1use crate::LLVM;
8use crate::abi::Abi;
9use crate::error::err;
10use inkwell::values::BasicMetadataValueEnum;
11use inkwell::{
12 AddressSpace,
13 attributes::{Attribute, AttributeLoc},
14 builder::Builder,
15 context::Context,
16 module::{Linkage, Module},
17 targets::TargetData,
18 types::{
19 BasicMetadataTypeEnum, BasicType, BasicTypeEnum, FloatType, FunctionType, IntType,
20 PointerType, StructType, VectorType, VoidType,
21 },
22 values::{
23 BasicValue, BasicValueEnum, FloatValue, FunctionValue, InstructionValue, IntValue,
24 PointerValue, VectorValue,
25 },
26};
27use std::collections::{HashMap, hash_map::Entry};
28use wasmer_types::entity::{EntityRef, PrimaryMap};
29use wasmer_types::{
30 CompileError, FunctionIndex, FunctionType as FuncType, GlobalIndex, LocalFunctionIndex,
31 MemoryIndex, ModuleInfo as WasmerCompilerModule, Mutability, SignatureIndex, TableIndex, Type,
32};
33use wasmer_vm::{MemoryStyle, TrapCode, VMBuiltinFunctionIndex, VMOffsets};
34
35pub fn type_to_llvm_ptr<'ctx>(
36 intrinsics: &Intrinsics<'ctx>,
37 _ty: Type,
38) -> Result<PointerType<'ctx>, CompileError> {
39 Ok(intrinsics.ptr_ty)
41}
42
43pub fn type_to_llvm<'ctx>(
44 intrinsics: &Intrinsics<'ctx>,
45 ty: Type,
46) -> Result<BasicTypeEnum<'ctx>, CompileError> {
47 match ty {
48 Type::I32 => Ok(intrinsics.i32_ty.as_basic_type_enum()),
49 Type::I64 => Ok(intrinsics.i64_ty.as_basic_type_enum()),
50 Type::F32 => Ok(intrinsics.f32_ty.as_basic_type_enum()),
51 Type::F64 => Ok(intrinsics.f64_ty.as_basic_type_enum()),
52 Type::V128 => Ok(intrinsics.i128_ty.as_basic_type_enum()),
53 Type::FuncRef | Type::ExceptionRef | Type::ExternRef => {
54 Ok(intrinsics.ptr_ty.as_basic_type_enum())
55 }
56 }
57}
58
59#[allow(dead_code)]
61pub struct Intrinsics<'ctx> {
62 pub ctlz_i32: FunctionValue<'ctx>,
63 pub ctlz_i64: FunctionValue<'ctx>,
64
65 pub cttz_i32: FunctionValue<'ctx>,
66 pub cttz_i64: FunctionValue<'ctx>,
67
68 pub ctpop_i32: FunctionValue<'ctx>,
69 pub ctpop_i64: FunctionValue<'ctx>,
70 pub ctpop_i8x16: FunctionValue<'ctx>,
71
72 pub fp_rounding_md: BasicMetadataValueEnum<'ctx>,
73 pub fp_exception_md: BasicMetadataValueEnum<'ctx>,
74 pub fp_ogt_md: BasicMetadataValueEnum<'ctx>,
75 pub fp_olt_md: BasicMetadataValueEnum<'ctx>,
76 pub fp_uno_md: BasicMetadataValueEnum<'ctx>,
77
78 pub add_f32: FunctionValue<'ctx>,
79 pub add_f64: FunctionValue<'ctx>,
80 pub add_f32x4: FunctionValue<'ctx>,
81 pub add_f64x2: FunctionValue<'ctx>,
82
83 pub sub_f32: FunctionValue<'ctx>,
84 pub sub_f64: FunctionValue<'ctx>,
85 pub sub_f32x4: FunctionValue<'ctx>,
86 pub sub_f64x2: FunctionValue<'ctx>,
87
88 pub mul_f32: FunctionValue<'ctx>,
89 pub mul_f64: FunctionValue<'ctx>,
90 pub mul_f32x4: FunctionValue<'ctx>,
91 pub mul_f64x2: FunctionValue<'ctx>,
92
93 pub div_f32: FunctionValue<'ctx>,
94 pub div_f64: FunctionValue<'ctx>,
95 pub div_f32x4: FunctionValue<'ctx>,
96 pub div_f64x2: FunctionValue<'ctx>,
97
98 pub sqrt_f32: FunctionValue<'ctx>,
99 pub sqrt_f64: FunctionValue<'ctx>,
100 pub sqrt_f32x4: FunctionValue<'ctx>,
101 pub sqrt_f64x2: FunctionValue<'ctx>,
102
103 pub cmp_f32: FunctionValue<'ctx>,
104 pub cmp_f64: FunctionValue<'ctx>,
105 pub cmp_f32x4: FunctionValue<'ctx>,
106 pub cmp_f64x2: FunctionValue<'ctx>,
107
108 pub ceil_f32: FunctionValue<'ctx>,
109 pub ceil_f64: FunctionValue<'ctx>,
110 pub ceil_f32x4: FunctionValue<'ctx>,
111 pub ceil_f64x2: FunctionValue<'ctx>,
112
113 pub floor_f32: FunctionValue<'ctx>,
114 pub floor_f64: FunctionValue<'ctx>,
115 pub floor_f32x4: FunctionValue<'ctx>,
116 pub floor_f64x2: FunctionValue<'ctx>,
117
118 pub trunc_f32: FunctionValue<'ctx>,
119 pub trunc_f64: FunctionValue<'ctx>,
120 pub trunc_f32x4: FunctionValue<'ctx>,
121 pub trunc_f64x2: FunctionValue<'ctx>,
122
123 pub fpext_f32: FunctionValue<'ctx>,
124 pub fptrunc_f64: FunctionValue<'ctx>,
125
126 pub nearbyint_f32: FunctionValue<'ctx>,
127 pub nearbyint_f64: FunctionValue<'ctx>,
128 pub nearbyint_f32x4: FunctionValue<'ctx>,
129 pub nearbyint_f64x2: FunctionValue<'ctx>,
130
131 pub fabs_f32: FunctionValue<'ctx>,
132 pub fabs_f64: FunctionValue<'ctx>,
133 pub fabs_f32x4: FunctionValue<'ctx>,
134 pub fabs_f64x2: FunctionValue<'ctx>,
135
136 pub copysign_f32: FunctionValue<'ctx>,
137 pub copysign_f64: FunctionValue<'ctx>,
138 pub copysign_f32x4: FunctionValue<'ctx>,
139 pub copysign_f64x2: FunctionValue<'ctx>,
140
141 pub sadd_sat_i8x16: FunctionValue<'ctx>,
142 pub sadd_sat_i16x8: FunctionValue<'ctx>,
143 pub uadd_sat_i8x16: FunctionValue<'ctx>,
144 pub uadd_sat_i16x8: FunctionValue<'ctx>,
145
146 pub ssub_sat_i8x16: FunctionValue<'ctx>,
147 pub ssub_sat_i16x8: FunctionValue<'ctx>,
148 pub usub_sat_i8x16: FunctionValue<'ctx>,
149 pub usub_sat_i16x8: FunctionValue<'ctx>,
150
151 pub expect_i1: FunctionValue<'ctx>,
152 pub trap: FunctionValue<'ctx>,
153 pub debug_trap: FunctionValue<'ctx>,
154
155 pub personality: FunctionValue<'ctx>,
156 pub personality2: FunctionValue<'ctx>,
157 pub readonly: Attribute,
158 pub stack_probe: Attribute,
159 pub uwtable: Attribute,
160 pub frame_pointer: Attribute,
161
162 pub void_ty: VoidType<'ctx>,
163 pub i1_ty: IntType<'ctx>,
164 pub i2_ty: IntType<'ctx>,
165 pub i4_ty: IntType<'ctx>,
166 pub i8_ty: IntType<'ctx>,
167 pub i16_ty: IntType<'ctx>,
168 pub i32_ty: IntType<'ctx>,
169 pub i64_ty: IntType<'ctx>,
170 pub i128_ty: IntType<'ctx>,
171 pub isize_ty: IntType<'ctx>,
172 pub f32_ty: FloatType<'ctx>,
173 pub f64_ty: FloatType<'ctx>,
174
175 pub i1x128_ty: VectorType<'ctx>,
176 pub i8x16_ty: VectorType<'ctx>,
177 pub i16x8_ty: VectorType<'ctx>,
178 pub i32x4_ty: VectorType<'ctx>,
179 pub i64x2_ty: VectorType<'ctx>,
180 pub f32x4_ty: VectorType<'ctx>,
181 pub f64x2_ty: VectorType<'ctx>,
182 pub i32x8_ty: VectorType<'ctx>,
183
184 pub ptr_ty: PointerType<'ctx>,
185
186 pub anyfunc_ty: StructType<'ctx>,
187 pub exc_ty: StructType<'ctx>,
188
189 pub i1_zero: IntValue<'ctx>,
190 pub i8_zero: IntValue<'ctx>,
191 pub i32_zero: IntValue<'ctx>,
192 pub i64_zero: IntValue<'ctx>,
193 pub i128_zero: IntValue<'ctx>,
194 pub isize_zero: IntValue<'ctx>,
195 pub f32_zero: FloatValue<'ctx>,
196 pub f64_zero: FloatValue<'ctx>,
197 pub f32x4_zero: VectorValue<'ctx>,
198 pub f64x2_zero: VectorValue<'ctx>,
199 pub i32_consts: [IntValue<'ctx>; 16],
200
201 pub trap_unreachable: BasicValueEnum<'ctx>,
202 pub trap_call_indirect_null: BasicValueEnum<'ctx>,
203 pub trap_call_indirect_sig: BasicValueEnum<'ctx>,
204 pub trap_memory_oob: BasicValueEnum<'ctx>,
205 pub trap_illegal_arithmetic: BasicValueEnum<'ctx>,
206 pub trap_integer_division_by_zero: BasicValueEnum<'ctx>,
207 pub trap_bad_conversion_to_integer: BasicValueEnum<'ctx>,
208 pub trap_unaligned_atomic: BasicValueEnum<'ctx>,
209 pub trap_table_access_oob: BasicValueEnum<'ctx>,
210
211 pub experimental_stackmap: FunctionValue<'ctx>,
212
213 pub table_copy: FunctionValue<'ctx>,
215 pub table_init: FunctionValue<'ctx>,
216 pub table_fill: FunctionValue<'ctx>,
217 pub table_size: FunctionValue<'ctx>,
218 pub imported_table_size: FunctionValue<'ctx>,
219 pub table_get: FunctionValue<'ctx>,
220 pub imported_table_get: FunctionValue<'ctx>,
221 pub table_set: FunctionValue<'ctx>,
222 pub imported_table_set: FunctionValue<'ctx>,
223 pub table_grow: FunctionValue<'ctx>,
224 pub imported_table_grow: FunctionValue<'ctx>,
225 pub memory_init: FunctionValue<'ctx>,
226 pub data_drop: FunctionValue<'ctx>,
227 pub func_ref: FunctionValue<'ctx>,
228 pub elem_drop: FunctionValue<'ctx>,
229 pub memory_copy: FunctionValue<'ctx>,
230 pub imported_memory_copy: FunctionValue<'ctx>,
231 pub memory_fill: FunctionValue<'ctx>,
232 pub imported_memory_fill: FunctionValue<'ctx>,
233 pub memory_size_ty: FunctionType<'ctx>,
234 pub memory_grow_ty: FunctionType<'ctx>,
235 pub memory_wait32: FunctionValue<'ctx>,
236 pub memory_wait32_ty: FunctionType<'ctx>,
237 pub imported_memory_wait32: FunctionValue<'ctx>,
238 pub memory_wait64: FunctionValue<'ctx>,
239 pub memory_wait64_ty: FunctionType<'ctx>,
240 pub imported_memory_wait64: FunctionValue<'ctx>,
241 pub memory_notify: FunctionValue<'ctx>,
242 pub memory_notify_ty: FunctionType<'ctx>,
243 pub imported_memory_notify: FunctionValue<'ctx>,
244
245 pub throw_trap: FunctionValue<'ctx>,
246
247 pub throw: FunctionValue<'ctx>,
249 pub rethrow: FunctionValue<'ctx>,
250 pub alloc_exception: FunctionValue<'ctx>,
251 pub delete_exception: FunctionValue<'ctx>,
252 pub read_exception: FunctionValue<'ctx>,
253
254 pub debug_ptr: FunctionValue<'ctx>,
256 pub debug_str: FunctionValue<'ctx>,
257
258 pub vmfunction_import_ty: StructType<'ctx>,
260 pub vmfunction_import_body_element: u32,
261 pub vmfunction_import_vmctx_element: u32,
262
263 pub vmmemory_definition_ty: StructType<'ctx>,
264 pub vmmemory_definition_base_element: u32,
265 pub vmmemory_definition_current_length_element: u32,
266}
267
268#[derive(Debug, Hash, PartialEq, Eq)]
269enum MemoryOp {
270 Size,
271 Grow,
272 Wait32,
273 Wait64,
274 Notify,
275}
276
277impl<'ctx> Intrinsics<'ctx> {
278 pub fn declare(
280 module: &Module<'ctx>,
281 context: &'ctx Context,
282 target_data: &TargetData,
283 binary_fmt: &target_lexicon::BinaryFormat,
284 ) -> Self {
285 let void_ty = context.void_type();
286 let i1_ty = context.bool_type();
287 let i2_ty = context.custom_width_int_type(2);
288 let i4_ty = context.custom_width_int_type(4);
289 let i8_ty = context.i8_type();
290 let i16_ty = context.i16_type();
291 let i32_ty = context.i32_type();
292 let i64_ty = context.i64_type();
293 let i128_ty = context.i128_type();
294 let isize_ty = context.ptr_sized_int_type(target_data, None);
295 let f32_ty = context.f32_type();
296 let f64_ty = context.f64_type();
297
298 let i1x4_ty = i1_ty.vec_type(4);
299 let i1x2_ty = i1_ty.vec_type(2);
300 let i1x128_ty = i1_ty.vec_type(128);
301 let i8x16_ty = i8_ty.vec_type(16);
302 let i16x8_ty = i16_ty.vec_type(8);
303 let i32x4_ty = i32_ty.vec_type(4);
304 let i64x2_ty = i64_ty.vec_type(2);
305 let f32x4_ty = f32_ty.vec_type(4);
306 let f64x2_ty = f64_ty.vec_type(2);
307 let i32x8_ty = i32_ty.vec_type(8);
308
309 let ptr_ty = context.ptr_type(AddressSpace::default());
310
311 let i1_zero = i1_ty.const_int(0, false);
312 let i8_zero = i8_ty.const_int(0, false);
313 let i32_zero = i32_ty.const_int(0, false);
314 let i64_zero = i64_ty.const_int(0, false);
315 let i128_zero = i128_ty.const_int(0, false);
316 let isize_zero = isize_ty.const_int(0, false);
317 let f32_zero = f32_ty.const_float(0.0);
318 let f64_zero = f64_ty.const_float(0.0);
319 let f32x4_zero = f32x4_ty.const_zero();
320 let f64x2_zero = f64x2_ty.const_zero();
321 let i32_consts = [
322 i32_ty.const_int(0, false),
323 i32_ty.const_int(1, false),
324 i32_ty.const_int(2, false),
325 i32_ty.const_int(3, false),
326 i32_ty.const_int(4, false),
327 i32_ty.const_int(5, false),
328 i32_ty.const_int(6, false),
329 i32_ty.const_int(7, false),
330 i32_ty.const_int(8, false),
331 i32_ty.const_int(9, false),
332 i32_ty.const_int(10, false),
333 i32_ty.const_int(11, false),
334 i32_ty.const_int(12, false),
335 i32_ty.const_int(13, false),
336 i32_ty.const_int(14, false),
337 i32_ty.const_int(15, false),
338 ];
339
340 let md_ty = context.metadata_type();
341
342 let i8_ptr_ty_basic = ptr_ty.as_basic_type_enum();
343
344 let i1_ty_basic_md: BasicMetadataTypeEnum = i1_ty.into();
345 let i32_ty_basic_md: BasicMetadataTypeEnum = i32_ty.into();
346 let i64_ty_basic_md: BasicMetadataTypeEnum = i64_ty.into();
347 let f32_ty_basic_md: BasicMetadataTypeEnum = f32_ty.into();
348 let f64_ty_basic_md: BasicMetadataTypeEnum = f64_ty.into();
349 let i8x16_ty_basic_md: BasicMetadataTypeEnum = i8x16_ty.into();
350 let i16x8_ty_basic_md: BasicMetadataTypeEnum = i16x8_ty.into();
351 let f32x4_ty_basic_md: BasicMetadataTypeEnum = f32x4_ty.into();
352 let f64x2_ty_basic_md: BasicMetadataTypeEnum = f64x2_ty.into();
353 let md_ty_basic_md: BasicMetadataTypeEnum = md_ty.into();
354
355 let ctx_ptr_ty = ptr_ty;
356 let ctx_ptr_ty_basic = ctx_ptr_ty.as_basic_type_enum();
357 let ctx_ptr_ty_basic_md: BasicMetadataTypeEnum = ctx_ptr_ty.into();
358
359 let sigindex_ty = i32_ty;
360
361 let anyfunc_ty = context.struct_type(
362 &[i8_ptr_ty_basic, sigindex_ty.into(), ctx_ptr_ty_basic],
363 false,
364 );
365 let funcref_ty = ptr_ty;
366 let anyref_ty = ptr_ty;
367 let anyref_ty_basic_md: BasicMetadataTypeEnum = anyref_ty.into();
368
369 let ret_i8x16_take_i8x16 = i8x16_ty.fn_type(&[i8x16_ty_basic_md], false);
370 let ret_i8x16_take_i8x16_i8x16 =
371 i8x16_ty.fn_type(&[i8x16_ty_basic_md, i8x16_ty_basic_md], false);
372 let ret_i16x8_take_i16x8_i16x8 =
373 i16x8_ty.fn_type(&[i16x8_ty_basic_md, i16x8_ty_basic_md], false);
374
375 let ret_i32_take_i32_i1 = i32_ty.fn_type(&[i32_ty_basic_md, i1_ty_basic_md], false);
376 let ret_i64_take_i64_i1 = i64_ty.fn_type(&[i64_ty_basic_md, i1_ty_basic_md], false);
377
378 let ret_i32_take_i32 = i32_ty.fn_type(&[i32_ty_basic_md], false);
379 let ret_i64_take_i64 = i64_ty.fn_type(&[i64_ty_basic_md], false);
380
381 let ret_f32_take_f32 = f32_ty.fn_type(&[f32_ty_basic_md], false);
382 let ret_f64_take_f64 = f64_ty.fn_type(&[f64_ty_basic_md], false);
383 let ret_f32x4_take_f32x4 = f32x4_ty.fn_type(&[f32x4_ty_basic_md], false);
384 let ret_f64x2_take_f64x2 = f64x2_ty.fn_type(&[f64x2_ty_basic_md], false);
385
386 let ret_f32_take_f32_f32 = f32_ty.fn_type(&[f32_ty_basic_md, f32_ty_basic_md], false);
387 let ret_f64_take_f64_f64 = f64_ty.fn_type(&[f64_ty_basic_md, f64_ty_basic_md], false);
388 let ret_f32x4_take_f32x4_f32x4 =
389 f32x4_ty.fn_type(&[f32x4_ty_basic_md, f32x4_ty_basic_md], false);
390 let ret_f64x2_take_f64x2_f64x2 =
391 f64x2_ty.fn_type(&[f64x2_ty_basic_md, f64x2_ty_basic_md], false);
392
393 let ret_f64_take_f32_md = f64_ty.fn_type(&[f32_ty_basic_md, md_ty_basic_md], false);
394 let ret_f32_take_f64_md_md =
395 f32_ty.fn_type(&[f64_ty_basic_md, md_ty_basic_md, md_ty_basic_md], false);
396
397 let ret_i1_take_i1_i1 = i1_ty.fn_type(&[i1_ty_basic_md, i1_ty_basic_md], false);
398
399 let ret_i1_take_f32_f32_md_md = i1_ty.fn_type(
400 &[
401 f32_ty_basic_md,
402 f32_ty_basic_md,
403 md_ty_basic_md,
404 md_ty_basic_md,
405 ],
406 false,
407 );
408 let ret_i1_take_f64_f64_md_md = i1_ty.fn_type(
409 &[
410 f64_ty_basic_md,
411 f64_ty_basic_md,
412 md_ty_basic_md,
413 md_ty_basic_md,
414 ],
415 false,
416 );
417 let ret_i1x4_take_f32x4_f32x4_md_md = i1x4_ty.fn_type(
418 &[
419 f32x4_ty_basic_md,
420 f32x4_ty_basic_md,
421 md_ty_basic_md,
422 md_ty_basic_md,
423 ],
424 false,
425 );
426 let ret_i1x2_take_f64x2_f64x2_md_md = i1x2_ty.fn_type(
427 &[
428 f64x2_ty_basic_md,
429 f64x2_ty_basic_md,
430 md_ty_basic_md,
431 md_ty_basic_md,
432 ],
433 false,
434 );
435
436 let ret_f32_take_f32_f32_md_md = f32_ty.fn_type(
437 &[
438 f32_ty_basic_md,
439 f32_ty_basic_md,
440 md_ty_basic_md,
441 md_ty_basic_md,
442 ],
443 false,
444 );
445 let ret_f64_take_f64_f64_md_md = f64_ty.fn_type(
446 &[
447 f64_ty_basic_md,
448 f64_ty_basic_md,
449 md_ty_basic_md,
450 md_ty_basic_md,
451 ],
452 false,
453 );
454 let ret_f32x4_take_f32x4_f32x4_md_md = f32x4_ty.fn_type(
455 &[
456 f32x4_ty_basic_md,
457 f32x4_ty_basic_md,
458 md_ty_basic_md,
459 md_ty_basic_md,
460 ],
461 false,
462 );
463 let ret_f64x2_take_f64x2_f64x2_md_md = f64x2_ty.fn_type(
464 &[
465 f64x2_ty_basic_md,
466 f64x2_ty_basic_md,
467 md_ty_basic_md,
468 md_ty_basic_md,
469 ],
470 false,
471 );
472
473 let intrinsics = Self {
474 ctlz_i32: module.add_function("llvm.ctlz.i32", ret_i32_take_i32_i1, None),
475 ctlz_i64: module.add_function("llvm.ctlz.i64", ret_i64_take_i64_i1, None),
476
477 cttz_i32: module.add_function("llvm.cttz.i32", ret_i32_take_i32_i1, None),
478 cttz_i64: module.add_function("llvm.cttz.i64", ret_i64_take_i64_i1, None),
479
480 ctpop_i32: module.add_function("llvm.ctpop.i32", ret_i32_take_i32, None),
481 ctpop_i64: module.add_function("llvm.ctpop.i64", ret_i64_take_i64, None),
482 ctpop_i8x16: module.add_function("llvm.ctpop.v16i8", ret_i8x16_take_i8x16, None),
483
484 fp_rounding_md: context.metadata_string("round.tonearest").into(),
485 fp_exception_md: context.metadata_string("fpexcept.strict").into(),
486
487 fp_ogt_md: context.metadata_string("ogt").into(),
488 fp_olt_md: context.metadata_string("olt").into(),
489 fp_uno_md: context.metadata_string("uno").into(),
490
491 sqrt_f32: module.add_function("llvm.sqrt.f32", ret_f32_take_f32, None),
492 sqrt_f64: module.add_function("llvm.sqrt.f64", ret_f64_take_f64, None),
493 sqrt_f32x4: module.add_function("llvm.sqrt.v4f32", ret_f32x4_take_f32x4, None),
494 sqrt_f64x2: module.add_function("llvm.sqrt.v2f64", ret_f64x2_take_f64x2, None),
495
496 ceil_f32: module.add_function("llvm.ceil.f32", ret_f32_take_f32, None),
497 ceil_f64: module.add_function("llvm.ceil.f64", ret_f64_take_f64, None),
498 ceil_f32x4: module.add_function("llvm.ceil.v4f32", ret_f32x4_take_f32x4, None),
499 ceil_f64x2: module.add_function("llvm.ceil.v2f64", ret_f64x2_take_f64x2, None),
500
501 floor_f32: module.add_function("llvm.floor.f32", ret_f32_take_f32, None),
502 floor_f64: module.add_function("llvm.floor.f64", ret_f64_take_f64, None),
503 floor_f32x4: module.add_function("llvm.floor.v4f32", ret_f32x4_take_f32x4, None),
504 floor_f64x2: module.add_function("llvm.floor.v2f64", ret_f64x2_take_f64x2, None),
505
506 trunc_f32: module.add_function("llvm.trunc.f32", ret_f32_take_f32, None),
507 trunc_f64: module.add_function("llvm.trunc.f64", ret_f64_take_f64, None),
508 trunc_f32x4: module.add_function("llvm.trunc.v4f32", ret_f32x4_take_f32x4, None),
509 trunc_f64x2: module.add_function("llvm.trunc.v2f64", ret_f64x2_take_f64x2, None),
510
511 nearbyint_f32: module.add_function("llvm.nearbyint.f32", ret_f32_take_f32, None),
512 nearbyint_f64: module.add_function("llvm.nearbyint.f64", ret_f64_take_f64, None),
513 nearbyint_f32x4: module.add_function(
514 "llvm.nearbyint.v4f32",
515 ret_f32x4_take_f32x4,
516 None,
517 ),
518 nearbyint_f64x2: module.add_function(
519 "llvm.nearbyint.v2f64",
520 ret_f64x2_take_f64x2,
521 None,
522 ),
523
524 add_f32: module.add_function(
525 "llvm.experimental.constrained.fadd.f32",
526 ret_f32_take_f32_f32_md_md,
527 None,
528 ),
529 add_f64: module.add_function(
530 "llvm.experimental.constrained.fadd.f64",
531 ret_f64_take_f64_f64_md_md,
532 None,
533 ),
534 add_f32x4: module.add_function(
535 "llvm.experimental.constrained.fadd.v4f32",
536 ret_f32x4_take_f32x4_f32x4_md_md,
537 None,
538 ),
539 add_f64x2: module.add_function(
540 "llvm.experimental.constrained.fadd.v2f64",
541 ret_f64x2_take_f64x2_f64x2_md_md,
542 None,
543 ),
544
545 sub_f32: module.add_function(
546 "llvm.experimental.constrained.fsub.f32",
547 ret_f32_take_f32_f32_md_md,
548 None,
549 ),
550 sub_f64: module.add_function(
551 "llvm.experimental.constrained.fsub.f64",
552 ret_f64_take_f64_f64_md_md,
553 None,
554 ),
555 sub_f32x4: module.add_function(
556 "llvm.experimental.constrained.fsub.v4f32",
557 ret_f32x4_take_f32x4_f32x4_md_md,
558 None,
559 ),
560 sub_f64x2: module.add_function(
561 "llvm.experimental.constrained.fsub.v2f64",
562 ret_f64x2_take_f64x2_f64x2_md_md,
563 None,
564 ),
565
566 mul_f32: module.add_function(
567 "llvm.experimental.constrained.fmul.f32",
568 ret_f32_take_f32_f32_md_md,
569 None,
570 ),
571 mul_f64: module.add_function(
572 "llvm.experimental.constrained.fmul.f64",
573 ret_f64_take_f64_f64_md_md,
574 None,
575 ),
576 mul_f32x4: module.add_function(
577 "llvm.experimental.constrained.fmul.v4f32",
578 ret_f32x4_take_f32x4_f32x4_md_md,
579 None,
580 ),
581 mul_f64x2: module.add_function(
582 "llvm.experimental.constrained.fmul.v2f64",
583 ret_f64x2_take_f64x2_f64x2_md_md,
584 None,
585 ),
586
587 div_f32: module.add_function(
588 "llvm.experimental.constrained.fdiv.f32",
589 ret_f32_take_f32_f32_md_md,
590 None,
591 ),
592 div_f64: module.add_function(
593 "llvm.experimental.constrained.fdiv.f64",
594 ret_f64_take_f64_f64_md_md,
595 None,
596 ),
597 div_f32x4: module.add_function(
598 "llvm.experimental.constrained.fdiv.v4f32",
599 ret_f32x4_take_f32x4_f32x4_md_md,
600 None,
601 ),
602 div_f64x2: module.add_function(
603 "llvm.experimental.constrained.fdiv.v2f64",
604 ret_f64x2_take_f64x2_f64x2_md_md,
605 None,
606 ),
607
608 cmp_f32: module.add_function(
609 "llvm.experimental.constrained.fcmp.f32",
610 ret_i1_take_f32_f32_md_md,
611 None,
612 ),
613 cmp_f64: module.add_function(
614 "llvm.experimental.constrained.fcmp.f64",
615 ret_i1_take_f64_f64_md_md,
616 None,
617 ),
618 cmp_f32x4: module.add_function(
619 "llvm.experimental.constrained.fcmp.v4f32",
620 ret_i1x4_take_f32x4_f32x4_md_md,
621 None,
622 ),
623 cmp_f64x2: module.add_function(
624 "llvm.experimental.constrained.fcmp.v2f64",
625 ret_i1x2_take_f64x2_f64x2_md_md,
626 None,
627 ),
628
629 fpext_f32: module.add_function(
630 "llvm.experimental.constrained.fpext.f64.f32",
631 ret_f64_take_f32_md,
632 None,
633 ),
634 fptrunc_f64: module.add_function(
635 "llvm.experimental.constrained.fptrunc.f32.f64",
636 ret_f32_take_f64_md_md,
637 None,
638 ),
639
640 fabs_f32: module.add_function("llvm.fabs.f32", ret_f32_take_f32, None),
641 fabs_f64: module.add_function("llvm.fabs.f64", ret_f64_take_f64, None),
642 fabs_f32x4: module.add_function("llvm.fabs.v4f32", ret_f32x4_take_f32x4, None),
643 fabs_f64x2: module.add_function("llvm.fabs.v2f64", ret_f64x2_take_f64x2, None),
644
645 copysign_f32: module.add_function("llvm.copysign.f32", ret_f32_take_f32_f32, None),
646 copysign_f64: module.add_function("llvm.copysign.f64", ret_f64_take_f64_f64, None),
647 copysign_f32x4: module.add_function(
648 "llvm.copysign.v4f32",
649 ret_f32x4_take_f32x4_f32x4,
650 None,
651 ),
652 copysign_f64x2: module.add_function(
653 "llvm.copysign.v2f64",
654 ret_f64x2_take_f64x2_f64x2,
655 None,
656 ),
657
658 sadd_sat_i8x16: module.add_function(
659 "llvm.sadd.sat.v16i8",
660 ret_i8x16_take_i8x16_i8x16,
661 None,
662 ),
663 sadd_sat_i16x8: module.add_function(
664 "llvm.sadd.sat.v8i16",
665 ret_i16x8_take_i16x8_i16x8,
666 None,
667 ),
668 uadd_sat_i8x16: module.add_function(
669 "llvm.uadd.sat.v16i8",
670 ret_i8x16_take_i8x16_i8x16,
671 None,
672 ),
673 uadd_sat_i16x8: module.add_function(
674 "llvm.uadd.sat.v8i16",
675 ret_i16x8_take_i16x8_i16x8,
676 None,
677 ),
678
679 ssub_sat_i8x16: module.add_function(
680 "llvm.ssub.sat.v16i8",
681 ret_i8x16_take_i8x16_i8x16,
682 None,
683 ),
684 ssub_sat_i16x8: module.add_function(
685 "llvm.ssub.sat.v8i16",
686 ret_i16x8_take_i16x8_i16x8,
687 None,
688 ),
689 usub_sat_i8x16: module.add_function(
690 "llvm.usub.sat.v16i8",
691 ret_i8x16_take_i8x16_i8x16,
692 None,
693 ),
694 usub_sat_i16x8: module.add_function(
695 "llvm.usub.sat.v8i16",
696 ret_i16x8_take_i16x8_i16x8,
697 None,
698 ),
699
700 expect_i1: module.add_function("llvm.expect.i1", ret_i1_take_i1_i1, None),
701 trap: module.add_function("llvm.trap", void_ty.fn_type(&[], false), None),
702 debug_trap: module.add_function("llvm.debugtrap", void_ty.fn_type(&[], false), None),
703 personality: module.add_function(
704 if matches!(binary_fmt, target_lexicon::BinaryFormat::Macho) {
705 "__gxx_personality_v0"
708 } else {
709 "wasmer_eh_personality"
710 },
711 i32_ty.fn_type(
712 &[
713 i32_ty.into(),
714 i32_ty.into(),
715 i64_ty.into(),
716 ptr_ty.into(),
717 ptr_ty.into(),
718 ],
719 false,
720 ),
721 None,
722 ),
723 personality2: module.add_function(
724 "wasmer_eh_personality2",
725 i32_ty.fn_type(&[ptr_ty.into(), ptr_ty.into()], false),
726 None,
727 ),
728 readonly: context
729 .create_enum_attribute(Attribute::get_named_enum_kind_id("readonly"), 0),
730 stack_probe: context.create_string_attribute("probe-stack", "inline-asm"),
731 uwtable: context.create_enum_attribute(Attribute::get_named_enum_kind_id("uwtable"), 1),
732 frame_pointer: context.create_string_attribute("frame-pointer", "non-leaf"),
733 void_ty,
734 i1_ty,
735 i2_ty,
736 i4_ty,
737 i8_ty,
738 i16_ty,
739 i32_ty,
740 i64_ty,
741 i128_ty,
742 isize_ty,
743 f32_ty,
744 f64_ty,
745
746 i1x128_ty,
747 i8x16_ty,
748 i16x8_ty,
749 i32x4_ty,
750 i64x2_ty,
751 f32x4_ty,
752 f64x2_ty,
753 i32x8_ty,
754
755 anyfunc_ty,
756 exc_ty: context.struct_type(&[i32_ty.into(), ptr_ty.into(), i64_ty.into()], false),
757 i1_zero,
758 i8_zero,
759 i32_zero,
760 i64_zero,
761 i128_zero,
762 isize_zero,
763 f32_zero,
764 f64_zero,
765 f32x4_zero,
766 f64x2_zero,
767 i32_consts,
768
769 trap_unreachable: i32_ty
770 .const_int(TrapCode::UnreachableCodeReached as _, false)
771 .as_basic_value_enum(),
772 trap_call_indirect_null: i32_ty
773 .const_int(TrapCode::IndirectCallToNull as _, false)
774 .as_basic_value_enum(),
775 trap_call_indirect_sig: i32_ty
776 .const_int(TrapCode::BadSignature as _, false)
777 .as_basic_value_enum(),
778 trap_memory_oob: i32_ty
779 .const_int(TrapCode::HeapAccessOutOfBounds as _, false)
780 .as_basic_value_enum(),
781 trap_illegal_arithmetic: i32_ty
782 .const_int(TrapCode::IntegerOverflow as _, false)
783 .as_basic_value_enum(),
784 trap_integer_division_by_zero: i32_ty
785 .const_int(TrapCode::IntegerDivisionByZero as _, false)
786 .as_basic_value_enum(),
787 trap_bad_conversion_to_integer: i32_ty
788 .const_int(TrapCode::BadConversionToInteger as _, false)
789 .as_basic_value_enum(),
790 trap_unaligned_atomic: i32_ty
791 .const_int(TrapCode::UnalignedAtomic as _, false)
792 .as_basic_value_enum(),
793 trap_table_access_oob: i32_ty
794 .const_int(TrapCode::TableAccessOutOfBounds as _, false)
795 .as_basic_value_enum(),
796
797 experimental_stackmap: module.add_function(
798 "llvm.experimental.stackmap",
799 void_ty.fn_type(
800 &[
801 i64_ty_basic_md, i32_ty_basic_md, ],
804 true,
805 ),
806 None,
807 ),
808
809 table_copy: module.add_function(
811 "wasmer_vm_table_copy",
812 void_ty.fn_type(
813 &[
814 ctx_ptr_ty_basic_md,
815 i32_ty_basic_md,
816 i32_ty_basic_md,
817 i32_ty_basic_md,
818 i32_ty_basic_md,
819 i32_ty_basic_md,
820 ],
821 false,
822 ),
823 None,
824 ),
825 table_init: module.add_function(
826 "wasmer_vm_table_init",
827 void_ty.fn_type(
828 &[
829 ctx_ptr_ty_basic_md,
830 i32_ty_basic_md,
831 i32_ty_basic_md,
832 i32_ty_basic_md,
833 i32_ty_basic_md,
834 i32_ty_basic_md,
835 ],
836 false,
837 ),
838 None,
839 ),
840 table_fill: module.add_function(
841 "wasmer_vm_table_fill",
842 void_ty.fn_type(
843 &[
844 ctx_ptr_ty_basic_md,
845 i32_ty_basic_md,
846 i32_ty_basic_md,
847 anyref_ty_basic_md,
848 i32_ty_basic_md,
849 ],
850 false,
851 ),
852 None,
853 ),
854 table_size: module.add_function(
855 "wasmer_vm_table_size",
856 i32_ty.fn_type(&[ctx_ptr_ty_basic_md, i32_ty_basic_md], false),
857 None,
858 ),
859 imported_table_size: module.add_function(
860 "wasmer_vm_imported_table_size",
861 i32_ty.fn_type(&[ctx_ptr_ty_basic_md, i32_ty_basic_md], false),
862 None,
863 ),
864 table_get: module.add_function(
865 "wasmer_vm_table_get",
866 anyref_ty.fn_type(
867 &[ctx_ptr_ty_basic_md, i32_ty_basic_md, i32_ty_basic_md],
868 false,
869 ),
870 None,
871 ),
872 imported_table_get: module.add_function(
873 "wasmer_vm_imported_table_get",
874 anyref_ty.fn_type(
875 &[ctx_ptr_ty_basic_md, i32_ty_basic_md, i32_ty_basic_md],
876 false,
877 ),
878 None,
879 ),
880 table_set: module.add_function(
881 "wasmer_vm_table_set",
882 void_ty.fn_type(
883 &[
884 ctx_ptr_ty_basic_md,
885 i32_ty_basic_md,
886 i32_ty_basic_md,
887 anyref_ty_basic_md,
888 ],
889 false,
890 ),
891 None,
892 ),
893 imported_table_set: module.add_function(
894 "wasmer_vm_imported_table_set",
895 void_ty.fn_type(
896 &[
897 ctx_ptr_ty_basic_md,
898 i32_ty_basic_md,
899 i32_ty_basic_md,
900 anyref_ty_basic_md,
901 ],
902 false,
903 ),
904 None,
905 ),
906 table_grow: module.add_function(
907 "wasmer_vm_table_grow",
908 i32_ty.fn_type(
909 &[
910 ctx_ptr_ty_basic_md,
911 anyref_ty_basic_md,
912 i32_ty_basic_md,
913 i32_ty_basic_md,
914 ],
915 false,
916 ),
917 None,
918 ),
919 imported_table_grow: module.add_function(
920 "wasmer_vm_imported_table_grow",
921 i32_ty.fn_type(
922 &[
923 ctx_ptr_ty_basic_md,
924 anyref_ty_basic_md,
925 i32_ty_basic_md,
926 i32_ty_basic_md,
927 ],
928 false,
929 ),
930 None,
931 ),
932 memory_init: module.add_function(
933 "wasmer_vm_memory32_init",
934 void_ty.fn_type(
935 &[
936 ctx_ptr_ty_basic_md,
937 i32_ty_basic_md,
938 i32_ty_basic_md,
939 i32_ty_basic_md,
940 i32_ty_basic_md,
941 i32_ty_basic_md,
942 ],
943 false,
944 ),
945 None,
946 ),
947 memory_copy: module.add_function(
948 "wasmer_vm_memory32_copy",
949 void_ty.fn_type(
950 &[
951 ctx_ptr_ty_basic_md,
952 i32_ty_basic_md,
953 i32_ty_basic_md,
954 i32_ty_basic_md,
955 i32_ty_basic_md,
956 ],
957 false,
958 ),
959 None,
960 ),
961 imported_memory_copy: module.add_function(
962 "wasmer_vm_imported_memory32_copy",
963 void_ty.fn_type(
964 &[
965 ctx_ptr_ty_basic_md,
966 i32_ty_basic_md,
967 i32_ty_basic_md,
968 i32_ty_basic_md,
969 i32_ty_basic_md,
970 ],
971 false,
972 ),
973 None,
974 ),
975 memory_fill: module.add_function(
976 "wasmer_vm_memory32_fill",
977 void_ty.fn_type(
978 &[
979 ctx_ptr_ty_basic_md,
980 i32_ty_basic_md,
981 i32_ty_basic_md,
982 i32_ty_basic_md,
983 i32_ty_basic_md,
984 ],
985 false,
986 ),
987 None,
988 ),
989 imported_memory_fill: module.add_function(
990 "wasmer_vm_imported_memory32_fill",
991 void_ty.fn_type(
992 &[
993 ctx_ptr_ty_basic_md,
994 i32_ty_basic_md,
995 i32_ty_basic_md,
996 i32_ty_basic_md,
997 i32_ty_basic_md,
998 ],
999 false,
1000 ),
1001 None,
1002 ),
1003 memory_size_ty: i32_ty.fn_type(&[ctx_ptr_ty_basic_md, i32_ty_basic_md], false),
1004 memory_grow_ty: i32_ty.fn_type(
1005 &[ctx_ptr_ty_basic_md, i32_ty_basic_md, i32_ty_basic_md],
1006 false,
1007 ),
1008 data_drop: module.add_function(
1009 "wasmer_vm_data_drop",
1010 void_ty.fn_type(&[ctx_ptr_ty_basic_md, i32_ty_basic_md], false),
1011 None,
1012 ),
1013 func_ref: module.add_function(
1014 "wasmer_vm_func_ref",
1015 funcref_ty.fn_type(&[ctx_ptr_ty_basic_md, i32_ty_basic_md], false),
1016 None,
1017 ),
1018 elem_drop: module.add_function(
1019 "wasmer_vm_elem_drop",
1020 void_ty.fn_type(&[ctx_ptr_ty_basic_md, i32_ty_basic_md], false),
1021 None,
1022 ),
1023 throw_trap: module.add_function(
1024 "wasmer_vm_raise_trap",
1025 void_ty.fn_type(&[i32_ty_basic_md], false),
1026 None,
1027 ),
1028
1029 throw: module.add_function(
1030 "wasmer_vm_throw",
1031 void_ty.fn_type(
1032 &[i32_ty.into(), ptr_ty.into(), ptr_ty.into(), i64_ty.into()],
1033 false,
1034 ),
1035 None,
1036 ),
1037 rethrow: module.add_function(
1038 "wasmer_vm_rethrow",
1039 void_ty.fn_type(&[ptr_ty.into()], false),
1040 None,
1041 ),
1042 alloc_exception: module.add_function(
1043 "wasmer_vm_alloc_exception",
1044 ptr_ty.fn_type(&[i64_ty.into()], false),
1045 None,
1046 ),
1047 delete_exception: module.add_function(
1048 "wasmer_vm_delete_exception",
1049 void_ty.fn_type(&[ptr_ty.into()], false),
1050 None,
1051 ),
1052 read_exception: module.add_function(
1053 "wasmer_vm_read_exception",
1054 ptr_ty.fn_type(&[ptr_ty.into()], false),
1055 None,
1056 ),
1057
1058 debug_ptr: module.add_function(
1059 "wasmer_vm_dbg_usize",
1060 void_ty.fn_type(&[ptr_ty.into()], false),
1061 None,
1062 ),
1063 debug_str: module.add_function(
1064 "wasmer_vm_dbg_str",
1065 void_ty.fn_type(&[ptr_ty.into(), i32_ty.into()], false),
1066 None,
1067 ),
1068 memory_wait32: module.add_function(
1069 "wasmer_vm_memory32_atomic_wait32",
1070 i32_ty.fn_type(
1071 &[
1072 ctx_ptr_ty_basic_md,
1073 i32_ty_basic_md,
1074 i32_ty_basic_md,
1075 i32_ty_basic_md,
1076 i64_ty_basic_md,
1077 ],
1078 false,
1079 ),
1080 None,
1081 ),
1082 memory_wait32_ty: i32_ty.fn_type(
1083 &[
1084 ctx_ptr_ty_basic_md,
1085 i32_ty_basic_md,
1086 i32_ty_basic_md,
1087 i32_ty_basic_md,
1088 i64_ty_basic_md,
1089 ],
1090 false,
1091 ),
1092 imported_memory_wait32: module.add_function(
1093 "wasmer_vm_imported_memory32_atomic_wait32",
1094 i32_ty.fn_type(
1095 &[
1096 ctx_ptr_ty_basic_md,
1097 i32_ty_basic_md,
1098 i32_ty_basic_md,
1099 i32_ty_basic_md,
1100 i64_ty_basic_md,
1101 ],
1102 false,
1103 ),
1104 None,
1105 ),
1106 memory_wait64: module.add_function(
1107 "wasmer_vm_memory32_atomic_wait64",
1108 i32_ty.fn_type(
1109 &[
1110 ctx_ptr_ty_basic_md,
1111 i32_ty_basic_md,
1112 i32_ty_basic_md,
1113 i64_ty_basic_md,
1114 i64_ty_basic_md,
1115 ],
1116 false,
1117 ),
1118 None,
1119 ),
1120 memory_wait64_ty: i32_ty.fn_type(
1121 &[
1122 ctx_ptr_ty_basic_md,
1123 i32_ty_basic_md,
1124 i32_ty_basic_md,
1125 i64_ty_basic_md,
1126 i64_ty_basic_md,
1127 ],
1128 false,
1129 ),
1130 imported_memory_wait64: module.add_function(
1131 "wasmer_vm_imported_memory32_atomic_wait64",
1132 i32_ty.fn_type(
1133 &[
1134 ctx_ptr_ty_basic_md,
1135 i32_ty_basic_md,
1136 i32_ty_basic_md,
1137 i64_ty_basic_md,
1138 i64_ty_basic_md,
1139 ],
1140 false,
1141 ),
1142 None,
1143 ),
1144 memory_notify: module.add_function(
1145 "wasmer_vm_memory32_atomic_notify",
1146 i32_ty.fn_type(
1147 &[
1148 ctx_ptr_ty_basic_md,
1149 i32_ty_basic_md,
1150 i32_ty_basic_md,
1151 i32_ty_basic_md,
1152 ],
1153 false,
1154 ),
1155 None,
1156 ),
1157 memory_notify_ty: i32_ty.fn_type(
1158 &[
1159 ctx_ptr_ty_basic_md,
1160 i32_ty_basic_md,
1161 i32_ty_basic_md,
1162 i32_ty_basic_md,
1163 ],
1164 false,
1165 ),
1166 imported_memory_notify: module.add_function(
1167 "wasmer_vm_imported_memory32_atomic_notify",
1168 i32_ty.fn_type(
1169 &[
1170 ctx_ptr_ty_basic_md,
1171 i32_ty_basic_md,
1172 i32_ty_basic_md,
1173 i32_ty_basic_md,
1174 ],
1175 false,
1176 ),
1177 None,
1178 ),
1179
1180 vmfunction_import_ty: context.struct_type(&[i8_ptr_ty_basic, i8_ptr_ty_basic], false),
1181 vmfunction_import_body_element: 0,
1182 vmfunction_import_vmctx_element: 1,
1183 vmmemory_definition_ty: context.struct_type(&[i8_ptr_ty_basic, isize_ty.into()], false),
1184 vmmemory_definition_base_element: 0,
1185 vmmemory_definition_current_length_element: 1,
1186
1187 ptr_ty,
1189 };
1190
1191 let noreturn =
1192 context.create_enum_attribute(Attribute::get_named_enum_kind_id("noreturn"), 0);
1193 intrinsics
1194 .throw_trap
1195 .add_attribute(AttributeLoc::Function, noreturn);
1196 intrinsics
1197 }
1198}
1199
1200#[derive(Clone, Copy)]
1201pub enum MemoryCache<'ctx> {
1202 Dynamic {
1204 ptr_to_base_ptr: PointerValue<'ctx>,
1205 ptr_to_current_length: PointerValue<'ctx>,
1206 },
1207 Static { base_ptr: PointerValue<'ctx> },
1209}
1210
1211#[derive(Clone)]
1212struct TableCache<'ctx> {
1213 ptr_to_base_ptr: PointerValue<'ctx>,
1214 ptr_to_bounds: PointerValue<'ctx>,
1215}
1216
1217#[derive(Clone, Copy)]
1218pub enum GlobalCache<'ctx> {
1219 Mut {
1220 ptr_to_value: PointerValue<'ctx>,
1221 value_type: BasicTypeEnum<'ctx>,
1222 },
1223 Const {
1224 value: BasicValueEnum<'ctx>,
1225 },
1226}
1227
1228#[derive(Clone)]
1229pub struct FunctionCache<'ctx> {
1230 pub func: PointerValue<'ctx>,
1231 pub llvm_func_type: FunctionType<'ctx>,
1232 pub vmctx: BasicValueEnum<'ctx>,
1233 pub attrs: Vec<(Attribute, AttributeLoc)>,
1234}
1235
1236pub struct CtxType<'ctx, 'a> {
1237 ctx_ptr_value: PointerValue<'ctx>,
1238
1239 config: &'a LLVM,
1240 wasm_module: &'a WasmerCompilerModule,
1241 cache_builder: &'a Builder<'ctx>,
1242 abi: &'a dyn Abi,
1243
1244 cached_memories: HashMap<MemoryIndex, MemoryCache<'ctx>>,
1245 cached_tables: HashMap<TableIndex, TableCache<'ctx>>,
1246 cached_sigindices: HashMap<SignatureIndex, IntValue<'ctx>>,
1247 cached_globals: HashMap<GlobalIndex, GlobalCache<'ctx>>,
1248 cached_functions: HashMap<FunctionIndex, FunctionCache<'ctx>>,
1249 cached_memory_op: HashMap<(MemoryIndex, MemoryOp), PointerValue<'ctx>>,
1250
1251 offsets: VMOffsets,
1252}
1253
1254impl<'ctx, 'a> CtxType<'ctx, 'a> {
1255 pub fn new(
1256 wasm_module: &'a WasmerCompilerModule,
1257 func_value: &FunctionValue<'ctx>,
1258 cache_builder: &'a Builder<'ctx>,
1259 abi: &'a dyn Abi,
1260 config: &'a LLVM,
1261 ) -> CtxType<'ctx, 'a> {
1262 CtxType {
1263 config,
1264 ctx_ptr_value: abi.get_vmctx_ptr_param(func_value),
1265
1266 wasm_module,
1267 cache_builder,
1268 abi,
1269
1270 cached_memories: HashMap::new(),
1271 cached_tables: HashMap::new(),
1272 cached_sigindices: HashMap::new(),
1273 cached_globals: HashMap::new(),
1274 cached_functions: HashMap::new(),
1275 cached_memory_op: HashMap::new(),
1276
1277 offsets: VMOffsets::new(8, wasm_module),
1279 }
1280 }
1281
1282 pub fn basic(&self) -> BasicValueEnum<'ctx> {
1283 self.ctx_ptr_value.as_basic_value_enum()
1284 }
1285
1286 pub fn memory(
1287 &mut self,
1288 index: MemoryIndex,
1289 intrinsics: &Intrinsics<'ctx>,
1290 module: &Module<'ctx>,
1291 memory_styles: &PrimaryMap<MemoryIndex, MemoryStyle>,
1292 ) -> Result<MemoryCache<'ctx>, CompileError> {
1293 let (cached_memories, wasm_module, ctx_ptr_value, cache_builder, offsets) = (
1294 &mut self.cached_memories,
1295 self.wasm_module,
1296 self.ctx_ptr_value,
1297 &self.cache_builder,
1298 &self.offsets,
1299 );
1300 let memory_style = &memory_styles[index];
1301 match cached_memories.get(&index) {
1302 Some(r) => Ok(*r),
1303 None => {
1304 let memory_definition_ptr =
1305 if let Some(local_memory_index) = wasm_module.local_memory_index(index) {
1306 let offset = offsets.vmctx_vmmemory_definition(local_memory_index);
1307 let offset = intrinsics.i32_ty.const_int(offset.into(), false);
1308 unsafe {
1309 err!(cache_builder.build_gep(
1310 intrinsics.i8_ty,
1311 ctx_ptr_value,
1312 &[offset],
1313 ""
1314 ))
1315 }
1316 } else {
1317 let offset = offsets.vmctx_vmmemory_import(index);
1318 let offset = intrinsics.i32_ty.const_int(offset.into(), false);
1319 let memory_definition_ptr_ptr = unsafe {
1320 err!(cache_builder.build_gep(
1321 intrinsics.i8_ty,
1322 ctx_ptr_value,
1323 &[offset],
1324 ""
1325 ))
1326 };
1327 let memory_definition_ptr_ptr = err!(cache_builder.build_bit_cast(
1328 memory_definition_ptr_ptr,
1329 intrinsics.ptr_ty,
1330 "",
1331 ))
1332 .into_pointer_value();
1333 let memory_definition_ptr = err!(cache_builder.build_load(
1334 intrinsics.ptr_ty,
1335 memory_definition_ptr_ptr,
1336 ""
1337 ))
1338 .into_pointer_value();
1339 tbaa_label(
1340 module,
1341 intrinsics,
1342 format!("memory {} definition", index.as_u32()),
1343 memory_definition_ptr.as_instruction_value().unwrap(),
1344 );
1345 memory_definition_ptr
1346 };
1347 let memory_definition_ptr = err!(cache_builder.build_bit_cast(
1348 memory_definition_ptr,
1349 intrinsics.ptr_ty,
1350 "",
1351 ))
1352 .into_pointer_value();
1353 let base_ptr = err!(cache_builder.build_struct_gep(
1354 intrinsics.vmmemory_definition_ty,
1355 memory_definition_ptr,
1356 intrinsics.vmmemory_definition_base_element,
1357 "",
1358 ));
1359 let value = if let MemoryStyle::Dynamic { .. } = memory_style {
1360 let current_length_ptr = err!(cache_builder.build_struct_gep(
1361 intrinsics.vmmemory_definition_ty,
1362 memory_definition_ptr,
1363 intrinsics.vmmemory_definition_current_length_element,
1364 "",
1365 ));
1366 MemoryCache::Dynamic {
1367 ptr_to_base_ptr: base_ptr,
1368 ptr_to_current_length: current_length_ptr,
1369 }
1370 } else {
1371 let base_ptr = err!(cache_builder.build_load(intrinsics.ptr_ty, base_ptr, ""))
1372 .into_pointer_value();
1373 tbaa_label(
1374 module,
1375 intrinsics,
1376 format!("memory base_ptr {}", index.as_u32()),
1377 base_ptr.as_instruction_value().unwrap(),
1378 );
1379 MemoryCache::Static { base_ptr }
1380 };
1381
1382 self.cached_memories.insert(index, value);
1383 Ok(*self.cached_memories.get(&index).unwrap())
1384 }
1385 }
1386 }
1387
1388 fn build_table_prepare(
1389 table_index: TableIndex,
1390 intrinsics: &Intrinsics<'ctx>,
1391 module: &Module<'ctx>,
1392 wasm_module: &WasmerCompilerModule,
1393 ctx_ptr_value: PointerValue<'ctx>,
1394 offsets: &VMOffsets,
1395 builder: &Builder<'ctx>,
1396 ) -> Result<(PointerValue<'ctx>, PointerValue<'ctx>), CompileError> {
1397 if let Some(local_table_index) = wasm_module.local_table_index(table_index) {
1398 let offset = intrinsics.i64_ty.const_int(
1399 offsets
1400 .vmctx_vmtable_definition_base(local_table_index)
1401 .into(),
1402 false,
1403 );
1404 let ptr_to_base_ptr =
1405 unsafe { err!(builder.build_gep(intrinsics.i8_ty, ctx_ptr_value, &[offset], "")) };
1406 let ptr_to_base_ptr =
1407 err!(builder.build_bit_cast(ptr_to_base_ptr, intrinsics.ptr_ty, ""))
1408 .into_pointer_value();
1409 let offset = intrinsics.i64_ty.const_int(
1410 offsets
1411 .vmctx_vmtable_definition_current_elements(local_table_index)
1412 .into(),
1413 false,
1414 );
1415 let ptr_to_bounds =
1416 unsafe { err!(builder.build_gep(intrinsics.i8_ty, ctx_ptr_value, &[offset], "")) };
1417 let ptr_to_bounds = err!(builder.build_bit_cast(ptr_to_bounds, intrinsics.ptr_ty, ""))
1418 .into_pointer_value();
1419 Ok((ptr_to_base_ptr, ptr_to_bounds))
1420 } else {
1421 let offset = intrinsics.i64_ty.const_int(
1422 offsets.vmctx_vmtable_import_definition(table_index).into(),
1423 false,
1424 );
1425 let definition_ptr_ptr =
1426 unsafe { err!(builder.build_gep(intrinsics.i8_ty, ctx_ptr_value, &[offset], "")) };
1427 let definition_ptr_ptr =
1428 err!(builder.build_bit_cast(definition_ptr_ptr, intrinsics.ptr_ty, "",))
1429 .into_pointer_value();
1430 let definition_ptr =
1431 err!(builder.build_load(intrinsics.ptr_ty, definition_ptr_ptr, ""))
1432 .into_pointer_value();
1433 tbaa_label(
1434 module,
1435 intrinsics,
1436 format!("table {} definition", table_index.as_u32()),
1437 definition_ptr.as_instruction_value().unwrap(),
1438 );
1439
1440 let offset = intrinsics
1441 .i64_ty
1442 .const_int(offsets.vmtable_definition_base().into(), false);
1443 let ptr_to_base_ptr =
1444 unsafe { err!(builder.build_gep(intrinsics.i8_ty, definition_ptr, &[offset], "")) };
1445 let ptr_to_base_ptr =
1446 err!(builder.build_bit_cast(ptr_to_base_ptr, intrinsics.ptr_ty, ""))
1447 .into_pointer_value();
1448 let offset = intrinsics
1449 .i64_ty
1450 .const_int(offsets.vmtable_definition_current_elements().into(), false);
1451 let ptr_to_bounds =
1452 unsafe { err!(builder.build_gep(intrinsics.i8_ty, definition_ptr, &[offset], "")) };
1453 let ptr_to_bounds = err!(builder.build_bit_cast(ptr_to_bounds, intrinsics.ptr_ty, ""))
1454 .into_pointer_value();
1455 Ok((ptr_to_base_ptr, ptr_to_bounds))
1456 }
1457 }
1458
1459 fn table_prepare(
1460 &mut self,
1461 table_index: TableIndex,
1462 intrinsics: &Intrinsics<'ctx>,
1463 module: &Module<'ctx>,
1464 body_builder: &Builder<'ctx>,
1465 ) -> Result<(PointerValue<'ctx>, PointerValue<'ctx>), CompileError> {
1466 let (cached_tables, wasm_module, ctx_ptr_value, cache_builder, offsets) = (
1467 &mut self.cached_tables,
1468 self.wasm_module,
1469 self.ctx_ptr_value,
1470 &self.cache_builder,
1471 &self.offsets,
1472 );
1473
1474 let is_growable = is_table_growable(wasm_module, table_index).ok_or_else(|| {
1475 CompileError::Codegen(format!(
1476 "Table index out of bounds: {}",
1477 table_index.as_u32()
1478 ))
1479 })?;
1480
1481 if is_growable {
1484 Self::build_table_prepare(
1485 table_index,
1486 intrinsics,
1487 module,
1488 wasm_module,
1489 ctx_ptr_value,
1490 offsets,
1491 body_builder,
1492 )
1493 } else {
1494 let TableCache {
1495 ptr_to_base_ptr,
1496 ptr_to_bounds,
1497 } = match cached_tables.entry(table_index) {
1498 Entry::Occupied(entry) => entry.get().clone(),
1499 Entry::Vacant(entry) => {
1500 let (ptr_to_base_ptr, ptr_to_bounds) = Self::build_table_prepare(
1501 table_index,
1502 intrinsics,
1503 module,
1504 wasm_module,
1505 ctx_ptr_value,
1506 offsets,
1507 cache_builder,
1508 )?;
1509
1510 let v = TableCache {
1511 ptr_to_base_ptr,
1512 ptr_to_bounds,
1513 };
1514
1515 entry.insert(v.clone());
1516
1517 v
1518 }
1519 };
1520
1521 Ok((ptr_to_base_ptr, ptr_to_bounds))
1522 }
1523 }
1524
1525 pub fn table(
1526 &mut self,
1527 index: TableIndex,
1528 intrinsics: &Intrinsics<'ctx>,
1529 module: &Module<'ctx>,
1530 body_builder: &Builder<'ctx>,
1531 ) -> Result<(PointerValue<'ctx>, IntValue<'ctx>), CompileError> {
1532 let (ptr_to_base_ptr, ptr_to_bounds) =
1533 self.table_prepare(index, intrinsics, module, body_builder)?;
1534
1535 let builder = if is_table_growable(self.wasm_module, index).unwrap() {
1537 &body_builder
1538 } else {
1539 &self.cache_builder
1540 };
1541
1542 let base_ptr = err!(builder.build_load(intrinsics.ptr_ty, ptr_to_base_ptr, "base_ptr"))
1543 .into_pointer_value();
1544 let bounds =
1545 err!(builder.build_load(intrinsics.isize_ty, ptr_to_bounds, "bounds")).into_int_value();
1546 tbaa_label(
1547 module,
1548 intrinsics,
1549 format!("table_base_ptr {}", index.index()),
1550 base_ptr.as_instruction_value().unwrap(),
1551 );
1552 tbaa_label(
1553 module,
1554 intrinsics,
1555 format!("table_bounds {}", index.index()),
1556 bounds.as_instruction_value().unwrap(),
1557 );
1558 Ok((base_ptr, bounds))
1559 }
1560
1561 pub fn dynamic_sigindex(
1562 &mut self,
1563 index: SignatureIndex,
1564 intrinsics: &Intrinsics<'ctx>,
1565 module: &Module<'ctx>,
1566 ) -> Result<IntValue<'ctx>, CompileError> {
1567 let (cached_sigindices, ctx_ptr_value, cache_builder, offsets) = (
1568 &mut self.cached_sigindices,
1569 self.ctx_ptr_value,
1570 &self.cache_builder,
1571 &self.offsets,
1572 );
1573
1574 match cached_sigindices.entry(index) {
1575 Entry::Occupied(entry) => Ok(*entry.get()),
1576 Entry::Vacant(entry) => {
1577 let byte_offset = intrinsics
1578 .i64_ty
1579 .const_int(offsets.vmctx_vmshared_signature_id(index).into(), false);
1580
1581 let sigindex_ptr = unsafe {
1582 err!(cache_builder.build_gep(
1583 intrinsics.i8_ty,
1584 ctx_ptr_value,
1585 &[byte_offset],
1586 "dynamic_sigindex",
1587 ))
1588 };
1589
1590 let sigindex_ptr =
1591 err!(cache_builder.build_bit_cast(sigindex_ptr, intrinsics.ptr_ty, ""))
1592 .into_pointer_value();
1593
1594 let sigindex =
1595 err!(cache_builder.build_load(intrinsics.i32_ty, sigindex_ptr, "sigindex"))
1596 .into_int_value();
1597 tbaa_label(
1598 module,
1599 intrinsics,
1600 format!("sigindex {}", index.as_u32()),
1601 sigindex.as_instruction_value().unwrap(),
1602 );
1603
1604 entry.insert(sigindex);
1605 Ok(sigindex)
1606 }
1607 }
1608 }
1609
1610 pub fn global(
1611 &mut self,
1612 index: GlobalIndex,
1613 intrinsics: &Intrinsics<'ctx>,
1614 module: &Module<'ctx>,
1615 ) -> Result<&GlobalCache<'ctx>, CompileError> {
1616 let (cached_globals, wasm_module, ctx_ptr_value, cache_builder, offsets) = (
1617 &mut self.cached_globals,
1618 self.wasm_module,
1619 self.ctx_ptr_value,
1620 &self.cache_builder,
1621 &self.offsets,
1622 );
1623 match cached_globals.entry(index) {
1624 Entry::Occupied(entry) => Ok(entry.into_mut()),
1625 Entry::Vacant(entry) => {
1626 let global_type = wasm_module.globals[index];
1627 let global_value_type = global_type.ty;
1628
1629 let global_mutability = global_type.mutability;
1630 let offset = if let Some(local_global_index) = wasm_module.local_global_index(index)
1631 {
1632 offsets.vmctx_vmglobal_definition(local_global_index)
1633 } else {
1634 offsets.vmctx_vmglobal_import(index)
1635 };
1636 let offset = intrinsics.i32_ty.const_int(offset.into(), false);
1637 let global_ptr = {
1638 let global_ptr_ptr = unsafe {
1639 err!(cache_builder.build_gep(
1640 intrinsics.i8_ty,
1641 ctx_ptr_value,
1642 &[offset],
1643 ""
1644 ))
1645 };
1646 let global_ptr_ptr =
1647 err!(cache_builder.build_bit_cast(global_ptr_ptr, intrinsics.ptr_ty, ""))
1648 .into_pointer_value();
1649 let global_ptr =
1650 err!(cache_builder.build_load(intrinsics.ptr_ty, global_ptr_ptr, ""))
1651 .into_pointer_value();
1652 tbaa_label(
1653 module,
1654 intrinsics,
1655 format!("global_ptr {}", index.as_u32()),
1656 global_ptr.as_instruction_value().unwrap(),
1657 );
1658 global_ptr
1659 };
1660 let global_ptr = err!(cache_builder.build_bit_cast(
1661 global_ptr,
1662 type_to_llvm_ptr(intrinsics, global_value_type)?,
1663 "",
1664 ))
1665 .into_pointer_value();
1666
1667 let ret = entry.insert(match global_mutability {
1668 Mutability::Const => {
1669 let value = err!(cache_builder.build_load(
1670 type_to_llvm(intrinsics, global_value_type)?,
1671 global_ptr,
1672 "",
1673 ));
1674 tbaa_label(
1675 module,
1676 intrinsics,
1677 format!("global {}", index.as_u32()),
1678 value.as_instruction_value().unwrap(),
1679 );
1680 GlobalCache::Const { value }
1681 }
1682 Mutability::Var => GlobalCache::Mut {
1683 ptr_to_value: global_ptr,
1684 value_type: type_to_llvm(intrinsics, global_value_type)?,
1685 },
1686 });
1687
1688 Ok(ret)
1689 }
1690 }
1691 }
1692
1693 pub fn add_func(
1694 &mut self,
1695 function_index: FunctionIndex,
1696 func: PointerValue<'ctx>,
1697 llvm_func_type: FunctionType<'ctx>,
1698 vmctx: BasicValueEnum<'ctx>,
1699 attrs: &[(Attribute, AttributeLoc)],
1700 ) {
1701 match self.cached_functions.entry(function_index) {
1702 Entry::Occupied(_) => unreachable!("duplicate function"),
1703 Entry::Vacant(entry) => {
1704 entry.insert(FunctionCache {
1705 func,
1706 llvm_func_type,
1707 vmctx,
1708 attrs: attrs.to_vec(),
1709 });
1710 }
1711 }
1712 }
1713
1714 #[allow(clippy::too_many_arguments)]
1715 pub fn local_func(
1716 &mut self,
1717 _local_function_index: LocalFunctionIndex,
1718 function_index: FunctionIndex,
1719 intrinsics: &Intrinsics<'ctx>,
1720 module: &Module<'ctx>,
1721 context: &'ctx Context,
1722 func_type: &FuncType,
1723 function_name: &str,
1724 ) -> Result<&FunctionCache<'ctx>, CompileError> {
1725 let (cached_functions, ctx_ptr_value, offsets) = (
1726 &mut self.cached_functions,
1727 &self.ctx_ptr_value,
1728 &self.offsets,
1729 );
1730 Ok(match cached_functions.entry(function_index) {
1731 Entry::Occupied(entry) => entry.into_mut(),
1732 Entry::Vacant(entry) => {
1733 debug_assert!(module.get_function(function_name).is_none());
1734 let (llvm_func_type, llvm_func_attrs) = self.abi.func_type_to_llvm(
1735 context,
1736 intrinsics,
1737 Some(offsets),
1738 func_type,
1739 if self.config.enable_g0m0_opt {
1740 Some(crate::abi::G0M0FunctionKind::Local)
1741 } else {
1742 None
1743 },
1744 )?;
1745 let func =
1746 module.add_function(function_name, llvm_func_type, Some(Linkage::External));
1747 for (attr, attr_loc) in &llvm_func_attrs {
1748 func.add_attribute(*attr_loc, *attr);
1749 }
1750 entry.insert(FunctionCache {
1751 func: func.as_global_value().as_pointer_value(),
1752 llvm_func_type,
1753 vmctx: ctx_ptr_value.as_basic_value_enum(),
1754 attrs: llvm_func_attrs,
1755 })
1756 }
1757 })
1758 }
1759
1760 pub fn func(
1761 &mut self,
1762 function_index: FunctionIndex,
1763 intrinsics: &Intrinsics<'ctx>,
1764 context: &'ctx Context,
1765 func_type: &FuncType,
1766 ) -> Result<&FunctionCache<'ctx>, CompileError> {
1767 let (cached_functions, wasm_module, ctx_ptr_value, cache_builder, offsets) = (
1768 &mut self.cached_functions,
1769 self.wasm_module,
1770 &self.ctx_ptr_value,
1771 &self.cache_builder,
1772 &self.offsets,
1773 );
1774 match cached_functions.entry(function_index) {
1775 Entry::Occupied(entry) => Ok(entry.into_mut()),
1776 Entry::Vacant(entry) => {
1777 let (llvm_func_type, llvm_func_attrs) = self.abi.func_type_to_llvm(
1778 context,
1779 intrinsics,
1780 Some(offsets),
1781 func_type,
1782 None,
1783 )?;
1784 debug_assert!(wasm_module.local_func_index(function_index).is_none());
1785 let offset = offsets.vmctx_vmfunction_import(function_index);
1786 let offset = intrinsics.i32_ty.const_int(offset.into(), false);
1787 let vmfunction_import_ptr = unsafe {
1788 err!(cache_builder.build_gep(intrinsics.i8_ty, *ctx_ptr_value, &[offset], ""))
1789 };
1790 let vmfunction_import_ptr = err!(cache_builder.build_bit_cast(
1791 vmfunction_import_ptr,
1792 intrinsics.ptr_ty,
1793 "",
1794 ))
1795 .into_pointer_value();
1796
1797 let body_ptr_ptr = err!(cache_builder.build_struct_gep(
1798 intrinsics.vmfunction_import_ty,
1799 vmfunction_import_ptr,
1800 intrinsics.vmfunction_import_body_element,
1801 "",
1802 ));
1803 let body_ptr = err!(cache_builder.build_load(intrinsics.ptr_ty, body_ptr_ptr, ""));
1804 let body_ptr = err!(cache_builder.build_bit_cast(body_ptr, intrinsics.ptr_ty, ""))
1805 .into_pointer_value();
1806 let vmctx_ptr_ptr = err!(cache_builder.build_struct_gep(
1807 intrinsics.vmfunction_import_ty,
1808 vmfunction_import_ptr,
1809 intrinsics.vmfunction_import_vmctx_element,
1810 "",
1811 ));
1812 let vmctx_ptr =
1813 err!(cache_builder.build_load(intrinsics.ptr_ty, vmctx_ptr_ptr, ""));
1814
1815 Ok(entry.insert(FunctionCache {
1816 func: body_ptr,
1817 llvm_func_type,
1818 vmctx: vmctx_ptr,
1819 attrs: llvm_func_attrs,
1820 }))
1821 }
1822 }
1823 }
1824
1825 pub fn memory_grow(
1826 &mut self,
1827 memory_index: MemoryIndex,
1828 intrinsics: &Intrinsics<'ctx>,
1829 ) -> Result<PointerValue<'ctx>, CompileError> {
1830 let (cached_memory_op, wasm_module, offsets, cache_builder, ctx_ptr_value) = (
1831 &mut self.cached_memory_op,
1832 &self.wasm_module,
1833 &self.offsets,
1834 &self.cache_builder,
1835 &self.ctx_ptr_value,
1836 );
1837 match cached_memory_op.entry((memory_index, MemoryOp::Grow)) {
1838 Entry::Occupied(entry) => Ok(*entry.get()),
1839 Entry::Vacant(entry) => {
1840 let (grow_fn, grow_fn_ty) =
1841 if wasm_module.local_memory_index(memory_index).is_some() {
1842 (
1843 VMBuiltinFunctionIndex::get_memory32_grow_index(),
1844 intrinsics.ptr_ty,
1845 )
1846 } else {
1847 (
1848 VMBuiltinFunctionIndex::get_imported_memory32_grow_index(),
1849 intrinsics.ptr_ty,
1850 )
1851 };
1852 let offset = offsets.vmctx_builtin_function(grow_fn);
1853 let offset = intrinsics.i32_ty.const_int(offset.into(), false);
1854 let grow_fn_ptr_ptr = unsafe {
1855 err!(cache_builder.build_gep(intrinsics.i8_ty, *ctx_ptr_value, &[offset], ""))
1856 };
1857
1858 let grow_fn_ptr_ptr =
1859 err!(cache_builder.build_bit_cast(grow_fn_ptr_ptr, intrinsics.ptr_ty, ""))
1860 .into_pointer_value();
1861 let val = err!(cache_builder.build_load(grow_fn_ty, grow_fn_ptr_ptr, ""))
1862 .into_pointer_value();
1863
1864 entry.insert(val);
1865 Ok(val)
1866 }
1867 }
1868 }
1869
1870 pub fn memory_size(
1871 &mut self,
1872 memory_index: MemoryIndex,
1873 intrinsics: &Intrinsics<'ctx>,
1874 ) -> Result<PointerValue<'ctx>, CompileError> {
1875 let (cached_memory_op, wasm_module, offsets, cache_builder, ctx_ptr_value) = (
1876 &mut self.cached_memory_op,
1877 &self.wasm_module,
1878 &self.offsets,
1879 &self.cache_builder,
1880 &self.ctx_ptr_value,
1881 );
1882
1883 match cached_memory_op.entry((memory_index, MemoryOp::Size)) {
1884 Entry::Occupied(entry) => Ok(*entry.get()),
1885 Entry::Vacant(entry) => {
1886 let (size_fn, size_fn_ty) =
1887 if wasm_module.local_memory_index(memory_index).is_some() {
1888 (
1889 VMBuiltinFunctionIndex::get_memory32_size_index(),
1890 intrinsics.ptr_ty,
1891 )
1892 } else {
1893 (
1894 VMBuiltinFunctionIndex::get_imported_memory32_size_index(),
1895 intrinsics.ptr_ty,
1896 )
1897 };
1898 let offset = offsets.vmctx_builtin_function(size_fn);
1899 let offset = intrinsics.i32_ty.const_int(offset.into(), false);
1900 let size_fn_ptr_ptr = unsafe {
1901 err!(cache_builder.build_gep(intrinsics.i8_ty, *ctx_ptr_value, &[offset], ""))
1902 };
1903
1904 let size_fn_ptr_ptr =
1905 err!(cache_builder.build_bit_cast(size_fn_ptr_ptr, intrinsics.ptr_ty, ""))
1906 .into_pointer_value();
1907
1908 let val = err!(cache_builder.build_load(size_fn_ty, size_fn_ptr_ptr, ""))
1909 .into_pointer_value();
1910 entry.insert(val);
1911 Ok(val)
1912 }
1913 }
1914 }
1915
1916 pub fn memory_wait32(
1917 &mut self,
1918 memory_index: MemoryIndex,
1919 intrinsics: &Intrinsics<'ctx>,
1920 ) -> Result<PointerValue<'ctx>, CompileError> {
1921 let (cached_memory_op, wasm_module, offsets, cache_builder, ctx_ptr_value) = (
1922 &mut self.cached_memory_op,
1923 &self.wasm_module,
1924 &self.offsets,
1925 &self.cache_builder,
1926 &self.ctx_ptr_value,
1927 );
1928 match cached_memory_op.entry((memory_index, MemoryOp::Wait32)) {
1929 Entry::Occupied(entry) => Ok(*entry.get()),
1930 Entry::Vacant(entry) => {
1931 let (size_fn, size_fn_ty) =
1932 if wasm_module.local_memory_index(memory_index).is_some() {
1933 (
1934 VMBuiltinFunctionIndex::get_memory_atomic_wait32_index(),
1935 intrinsics.ptr_ty,
1936 )
1937 } else {
1938 (
1939 VMBuiltinFunctionIndex::get_imported_memory_atomic_wait32_index(),
1940 intrinsics.ptr_ty,
1941 )
1942 };
1943 let offset = offsets.vmctx_builtin_function(size_fn);
1944 let offset = intrinsics.i32_ty.const_int(offset.into(), false);
1945 let size_fn_ptr_ptr = unsafe {
1946 err!(cache_builder.build_gep(intrinsics.i8_ty, *ctx_ptr_value, &[offset], ""))
1947 };
1948
1949 let size_fn_ptr_ptr =
1950 err!(cache_builder.build_bit_cast(size_fn_ptr_ptr, intrinsics.ptr_ty, ""))
1951 .into_pointer_value();
1952
1953 let val = err!(cache_builder.build_load(size_fn_ty, size_fn_ptr_ptr, ""))
1954 .into_pointer_value();
1955
1956 entry.insert(val);
1957 Ok(val)
1958 }
1959 }
1960 }
1961
1962 pub fn memory_wait64(
1963 &mut self,
1964 memory_index: MemoryIndex,
1965 intrinsics: &Intrinsics<'ctx>,
1966 ) -> Result<PointerValue<'ctx>, CompileError> {
1967 let (cached_memory_op, wasm_module, offsets, cache_builder, ctx_ptr_value) = (
1968 &mut self.cached_memory_op,
1969 &self.wasm_module,
1970 &self.offsets,
1971 &self.cache_builder,
1972 &self.ctx_ptr_value,
1973 );
1974
1975 match cached_memory_op.entry((memory_index, MemoryOp::Wait64)) {
1976 Entry::Occupied(entry) => Ok(*entry.get()),
1977 Entry::Vacant(entry) => {
1978 let (size_fn, size_fn_ty) =
1979 if wasm_module.local_memory_index(memory_index).is_some() {
1980 (
1981 VMBuiltinFunctionIndex::get_memory_atomic_wait64_index(),
1982 intrinsics.ptr_ty,
1983 )
1984 } else {
1985 (
1986 VMBuiltinFunctionIndex::get_imported_memory_atomic_wait64_index(),
1987 intrinsics.ptr_ty,
1988 )
1989 };
1990 let offset = offsets.vmctx_builtin_function(size_fn);
1991 let offset = intrinsics.i32_ty.const_int(offset.into(), false);
1992 let size_fn_ptr_ptr = unsafe {
1993 err!(cache_builder.build_gep(intrinsics.i8_ty, *ctx_ptr_value, &[offset], ""))
1994 };
1995
1996 let size_fn_ptr_ptr =
1997 err!(cache_builder.build_bit_cast(size_fn_ptr_ptr, intrinsics.ptr_ty, ""))
1998 .into_pointer_value();
1999
2000 let val = err!(cache_builder.build_load(size_fn_ty, size_fn_ptr_ptr, ""))
2001 .into_pointer_value();
2002 entry.insert(val);
2003 Ok(val)
2004 }
2005 }
2006 }
2007
2008 pub fn memory_notify(
2009 &mut self,
2010 memory_index: MemoryIndex,
2011 intrinsics: &Intrinsics<'ctx>,
2012 ) -> Result<PointerValue<'ctx>, CompileError> {
2013 let (cached_memory_op, wasm_module, offsets, cache_builder, ctx_ptr_value) = (
2014 &mut self.cached_memory_op,
2015 &self.wasm_module,
2016 &self.offsets,
2017 &self.cache_builder,
2018 &self.ctx_ptr_value,
2019 );
2020 match cached_memory_op.entry((memory_index, MemoryOp::Notify)) {
2021 Entry::Occupied(entry) => Ok(*entry.get()),
2022 Entry::Vacant(entry) => {
2023 let (size_fn, size_fn_ty) =
2024 if wasm_module.local_memory_index(memory_index).is_some() {
2025 (
2026 VMBuiltinFunctionIndex::get_memory_atomic_notify_index(),
2027 intrinsics.ptr_ty,
2028 )
2029 } else {
2030 (
2031 VMBuiltinFunctionIndex::get_imported_memory_atomic_notify_index(),
2032 intrinsics.ptr_ty,
2033 )
2034 };
2035 let offset = offsets.vmctx_builtin_function(size_fn);
2036 let offset = intrinsics.i32_ty.const_int(offset.into(), false);
2037 let size_fn_ptr_ptr = unsafe {
2038 err!(cache_builder.build_gep(intrinsics.i8_ty, *ctx_ptr_value, &[offset], ""))
2039 };
2040
2041 let size_fn_ptr_ptr =
2042 err!(cache_builder.build_bit_cast(size_fn_ptr_ptr, intrinsics.ptr_ty, ""))
2043 .into_pointer_value();
2044
2045 let val = err!(cache_builder.build_load(size_fn_ty, size_fn_ptr_ptr, ""))
2046 .into_pointer_value();
2047
2048 entry.insert(val);
2049 Ok(val)
2050 }
2051 }
2052 }
2053
2054 pub fn get_offsets(&self) -> &VMOffsets {
2055 &self.offsets
2056 }
2057}
2058
2059pub fn tbaa_label<'ctx>(
2062 module: &Module<'ctx>,
2063 intrinsics: &Intrinsics<'ctx>,
2064 label: String,
2065 instruction: InstructionValue<'ctx>,
2066) {
2067 let context = module.get_context();
2081
2082 let tbaa_root = module
2084 .get_global_metadata("wasmer_tbaa_root")
2085 .pop()
2086 .unwrap_or_else(|| {
2087 module
2088 .add_global_metadata("wasmer_tbaa_root", &context.metadata_node(&[]))
2089 .unwrap();
2090 module.get_global_metadata("wasmer_tbaa_root")[0]
2091 });
2092
2093 let type_label = context.metadata_string(label.as_str());
2096 let type_tbaa = module
2097 .get_global_metadata(label.as_str())
2098 .pop()
2099 .unwrap_or_else(|| {
2100 module
2101 .add_global_metadata(
2102 label.as_str(),
2103 &context.metadata_node(&[type_label.into(), tbaa_root.into()]),
2104 )
2105 .unwrap();
2106 module.get_global_metadata(label.as_str())[0]
2107 });
2108
2109 let label = label + "_memop";
2116 let type_tbaa = module
2117 .get_global_metadata(label.as_str())
2118 .pop()
2119 .unwrap_or_else(|| {
2120 module
2121 .add_global_metadata(
2122 label.as_str(),
2123 &context.metadata_node(&[
2124 type_tbaa.into(),
2125 type_tbaa.into(),
2126 intrinsics.i64_zero.into(),
2127 ]),
2128 )
2129 .unwrap();
2130 module.get_global_metadata(label.as_str())[0]
2131 });
2132
2133 let tbaa_kind = context.get_kind_id("tbaa");
2135 instruction.set_metadata(type_tbaa, tbaa_kind).unwrap();
2136}
2137
2138fn is_table_growable(module: &WasmerCompilerModule, index: TableIndex) -> Option<bool> {
2139 let table = module.tables.get(index)?;
2140 match table.maximum {
2141 None => Some(true),
2142 Some(max) => Some(max > table.minimum),
2143 }
2144}