wasmer_compiler_cranelift/
func_environ.rs

1// This file contains code from external sources.
2// Attributions: https://github.com/wasmerio/wasmer/blob/main/docs/ATTRIBUTIONS.md
3
4use crate::{
5    HashMap,
6    heap::{Heap, HeapData, HeapStyle},
7    table::{TableData, TableSize},
8    translator::{
9        EXN_REF_TYPE, FuncEnvironment as BaseFuncEnvironment, GlobalVariable, LandingPad, TAG_TYPE,
10        TargetEnvironment,
11    },
12};
13use cranelift_codegen::{
14    cursor::FuncCursor,
15    ir::{
16        self, AbiParam, ArgumentPurpose, BlockArg, Endianness, ExceptionTableData,
17        ExceptionTableItem, ExceptionTag, Function, InstBuilder, MemFlags, Signature,
18        UserExternalName,
19        condcodes::IntCC,
20        immediates::{Offset32, Uimm64},
21        types::*,
22    },
23    isa::TargetFrontendConfig,
24};
25use cranelift_frontend::FunctionBuilder;
26use smallvec::SmallVec;
27use std::convert::TryFrom;
28use wasmer_compiler::wasmparser::HeapType;
29use wasmer_types::{
30    FunctionIndex, FunctionType, GlobalIndex, LocalFunctionIndex, MemoryIndex, MemoryStyle,
31    ModuleInfo, SignatureIndex, TableIndex, TableStyle, TagIndex, Type as WasmerType,
32    VMBuiltinFunctionIndex, VMOffsets, WasmError, WasmResult,
33    entity::{EntityRef, PrimaryMap, SecondaryMap},
34};
35
36/// Compute an `ir::ExternalName` for a given wasm function index.
37pub fn get_function_name(func: &mut Function, func_index: FunctionIndex) -> ir::ExternalName {
38    ir::ExternalName::user(
39        func.params
40            .ensure_user_func_name(UserExternalName::new(0, func_index.as_u32())),
41    )
42}
43
44/// The type of the `current_elements` field.
45#[allow(unused)]
46pub fn type_of_vmtable_definition_current_elements(vmoffsets: &VMOffsets) -> ir::Type {
47    ir::Type::int(u16::from(vmoffsets.size_of_vmtable_definition_current_elements()) * 8).unwrap()
48}
49
50#[derive(Clone)]
51struct ExceptionFieldLayout {
52    offset: u32,
53    ty: ir::Type,
54}
55
56#[derive(Clone)]
57struct ExceptionTypeLayout {
58    fields: SmallVec<[ExceptionFieldLayout; 4]>,
59}
60
61/// The `FuncEnvironment` implementation for use by the `ModuleEnvironment`.
62pub struct FuncEnvironment<'module_environment> {
63    /// Target-specified configuration.
64    target_config: TargetFrontendConfig,
65
66    /// The module-level environment which this function-level environment belongs to.
67    module: &'module_environment ModuleInfo,
68
69    /// A stack tracking the type of local variables.
70    type_stack: Vec<WasmerType>,
71
72    /// The module function signatures
73    signatures: &'module_environment PrimaryMap<SignatureIndex, ir::Signature>,
74
75    /// Heaps implementing WebAssembly linear memories.
76    heaps: PrimaryMap<Heap, HeapData>,
77
78    /// The Cranelift global holding the vmctx address.
79    vmctx: Option<ir::GlobalValue>,
80
81    /// The external function signature for implementing wasm's `memory.size`
82    /// for locally-defined 32-bit memories.
83    memory32_size_sig: Option<ir::SigRef>,
84
85    /// The external function signature for implementing wasm's `table.size`
86    /// for locally-defined tables.
87    table_size_sig: Option<ir::SigRef>,
88
89    /// The external function signature for implementing wasm's `memory.grow`
90    /// for locally-defined memories.
91    memory_grow_sig: Option<ir::SigRef>,
92
93    /// The external function signature for implementing wasm's `table.grow`
94    /// for locally-defined tables.
95    table_grow_sig: Option<ir::SigRef>,
96
97    /// The external function signature for implementing wasm's `table.copy`
98    /// (it's the same for both local and imported tables).
99    table_copy_sig: Option<ir::SigRef>,
100
101    /// The external function signature for implementing wasm's `table.init`.
102    table_init_sig: Option<ir::SigRef>,
103
104    /// The external function signature for implementing wasm's `elem.drop`.
105    elem_drop_sig: Option<ir::SigRef>,
106
107    /// The external function signature for implementing wasm's `memory.copy`
108    /// (it's the same for both local and imported memories).
109    memory_copy_sig: Option<ir::SigRef>,
110
111    /// The external function signature for implementing wasm's `memory.fill`
112    /// (it's the same for both local and imported memories).
113    memory_fill_sig: Option<ir::SigRef>,
114
115    /// The external function signature for implementing wasm's `memory.init`.
116    memory_init_sig: Option<ir::SigRef>,
117
118    /// The external function signature for implementing wasm's `data.drop`.
119    data_drop_sig: Option<ir::SigRef>,
120
121    /// The external function signature for implementing wasm's `table.get`.
122    table_get_sig: Option<ir::SigRef>,
123
124    /// The external function signature for implementing wasm's `table.set`.
125    table_set_sig: Option<ir::SigRef>,
126
127    /// The external function signature for implementing wasm's `func.ref`.
128    func_ref_sig: Option<ir::SigRef>,
129
130    /// The external function signature for implementing wasm's `table.fill`.
131    table_fill_sig: Option<ir::SigRef>,
132
133    /// The external function signature for implementing wasm's `memory32.atomic.wait32`.
134    memory32_atomic_wait32_sig: Option<ir::SigRef>,
135
136    /// The external function signature for implementing wasm's `memory32.atomic.wait64`.
137    memory32_atomic_wait64_sig: Option<ir::SigRef>,
138
139    /// The external function signature for implementing wasm's `memory32.atomic.notify`.
140    memory32_atomic_notify_sig: Option<ir::SigRef>,
141
142    /// Cached signatures for exception helper builtins.
143    personality2_sig: Option<ir::SigRef>,
144    throw_sig: Option<ir::SigRef>,
145    alloc_exception_sig: Option<ir::SigRef>,
146    read_exception_sig: Option<ir::SigRef>,
147    read_exnref_sig: Option<ir::SigRef>,
148
149    /// Cached payload layouts for exception tags.
150    exception_type_layouts: HashMap<u32, ExceptionTypeLayout>,
151
152    /// Offsets to struct fields accessed by JIT code.
153    offsets: VMOffsets,
154
155    /// The memory styles
156    memory_styles: &'module_environment PrimaryMap<MemoryIndex, MemoryStyle>,
157
158    /// Cranelift tables we have created to implement Wasm tables.
159    tables: SecondaryMap<TableIndex, Option<TableData>>,
160
161    table_styles: &'module_environment PrimaryMap<TableIndex, TableStyle>,
162}
163
164impl<'module_environment> FuncEnvironment<'module_environment> {
165    pub fn new(
166        target_config: TargetFrontendConfig,
167        module: &'module_environment ModuleInfo,
168        signatures: &'module_environment PrimaryMap<SignatureIndex, ir::Signature>,
169        memory_styles: &'module_environment PrimaryMap<MemoryIndex, MemoryStyle>,
170        table_styles: &'module_environment PrimaryMap<TableIndex, TableStyle>,
171    ) -> Self {
172        Self {
173            target_config,
174            module,
175            signatures,
176            type_stack: vec![],
177            heaps: PrimaryMap::new(),
178            vmctx: None,
179            memory32_size_sig: None,
180            table_size_sig: None,
181            memory_grow_sig: None,
182            table_grow_sig: None,
183            table_copy_sig: None,
184            table_init_sig: None,
185            elem_drop_sig: None,
186            memory_copy_sig: None,
187            memory_fill_sig: None,
188            memory_init_sig: None,
189            table_get_sig: None,
190            table_set_sig: None,
191            data_drop_sig: None,
192            func_ref_sig: None,
193            table_fill_sig: None,
194            memory32_atomic_wait32_sig: None,
195            memory32_atomic_wait64_sig: None,
196            memory32_atomic_notify_sig: None,
197            personality2_sig: None,
198            throw_sig: None,
199            alloc_exception_sig: None,
200            read_exception_sig: None,
201            read_exnref_sig: None,
202            exception_type_layouts: HashMap::new(),
203            offsets: VMOffsets::new(target_config.pointer_bytes(), module),
204            memory_styles,
205            tables: Default::default(),
206            table_styles,
207        }
208    }
209
210    fn pointer_type(&self) -> ir::Type {
211        self.target_config.pointer_type()
212    }
213
214    fn ensure_table_exists(&mut self, func: &mut ir::Function, index: TableIndex) {
215        if self.tables[index].is_some() {
216            return;
217        }
218
219        let pointer_type = self.pointer_type();
220
221        let (ptr, base_offset, current_elements_offset) = {
222            let vmctx = self.vmctx(func);
223            if let Some(def_index) = self.module.local_table_index(index) {
224                let base_offset =
225                    i32::try_from(self.offsets.vmctx_vmtable_definition_base(def_index)).unwrap();
226                let current_elements_offset = i32::try_from(
227                    self.offsets
228                        .vmctx_vmtable_definition_current_elements(def_index),
229                )
230                .unwrap();
231                (vmctx, base_offset, current_elements_offset)
232            } else {
233                let from_offset = self.offsets.vmctx_vmtable_import(index);
234                let table = func.create_global_value(ir::GlobalValueData::Load {
235                    base: vmctx,
236                    offset: Offset32::new(i32::try_from(from_offset).unwrap()),
237                    global_type: pointer_type,
238                    flags: MemFlags::trusted().with_readonly(),
239                });
240                let base_offset = i32::from(self.offsets.vmtable_definition_base());
241                let current_elements_offset =
242                    i32::from(self.offsets.vmtable_definition_current_elements());
243                (table, base_offset, current_elements_offset)
244            }
245        };
246
247        let table = &self.module.tables[index];
248        let element_size = self.reference_type().bytes();
249
250        let base_gv = func.create_global_value(ir::GlobalValueData::Load {
251            base: ptr,
252            offset: Offset32::new(base_offset),
253            global_type: pointer_type,
254            flags: if Some(table.minimum) == table.maximum {
255                // A fixed-size table can't be resized so its base address won't
256                // change.
257                MemFlags::trusted().with_readonly()
258            } else {
259                MemFlags::trusted()
260            },
261        });
262
263        let bound = if Some(table.minimum) == table.maximum {
264            TableSize::Static {
265                bound: table.minimum,
266            }
267        } else {
268            TableSize::Dynamic {
269                bound_gv: func.create_global_value(ir::GlobalValueData::Load {
270                    base: ptr,
271                    offset: Offset32::new(current_elements_offset),
272                    global_type: ir::Type::int(
273                        u16::from(self.offsets.size_of_vmtable_definition_current_elements()) * 8,
274                    )
275                    .unwrap(),
276                    flags: MemFlags::trusted(),
277                }),
278            }
279        };
280
281        self.tables[index] = Some(TableData {
282            base_gv,
283            bound,
284            element_size,
285        });
286    }
287
288    fn vmctx(&mut self, func: &mut Function) -> ir::GlobalValue {
289        self.vmctx.unwrap_or_else(|| {
290            let vmctx = func.create_global_value(ir::GlobalValueData::VMContext);
291            self.vmctx = Some(vmctx);
292            vmctx
293        })
294    }
295
296    fn get_table_fill_sig(&mut self, func: &mut Function) -> ir::SigRef {
297        let sig = self.table_fill_sig.unwrap_or_else(|| {
298            func.import_signature(Signature {
299                params: vec![
300                    AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
301                    // table index
302                    AbiParam::new(I32),
303                    // dst
304                    AbiParam::new(I32),
305                    // value
306                    AbiParam::new(self.reference_type()),
307                    // len
308                    AbiParam::new(I32),
309                ],
310                returns: vec![],
311                call_conv: self.target_config.default_call_conv,
312            })
313        });
314        self.table_fill_sig = Some(sig);
315        sig
316    }
317
318    fn get_table_fill_func(
319        &mut self,
320        func: &mut Function,
321        table_index: TableIndex,
322    ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
323        (
324            self.get_table_fill_sig(func),
325            table_index.index(),
326            VMBuiltinFunctionIndex::get_table_fill_index(),
327        )
328    }
329
330    fn get_func_ref_sig(&mut self, func: &mut Function) -> ir::SigRef {
331        let sig = self.func_ref_sig.unwrap_or_else(|| {
332            func.import_signature(Signature {
333                params: vec![
334                    AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
335                    AbiParam::new(I32),
336                ],
337                returns: vec![AbiParam::new(self.reference_type())],
338                call_conv: self.target_config.default_call_conv,
339            })
340        });
341        self.func_ref_sig = Some(sig);
342        sig
343    }
344
345    fn get_func_ref_func(
346        &mut self,
347        func: &mut Function,
348        function_index: FunctionIndex,
349    ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
350        (
351            self.get_func_ref_sig(func),
352            function_index.index(),
353            VMBuiltinFunctionIndex::get_func_ref_index(),
354        )
355    }
356
357    fn get_table_get_sig(&mut self, func: &mut Function) -> ir::SigRef {
358        let sig = self.table_get_sig.unwrap_or_else(|| {
359            func.import_signature(Signature {
360                params: vec![
361                    AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
362                    AbiParam::new(I32),
363                    AbiParam::new(I32),
364                ],
365                returns: vec![AbiParam::new(self.reference_type())],
366                call_conv: self.target_config.default_call_conv,
367            })
368        });
369        self.table_get_sig = Some(sig);
370        sig
371    }
372
373    fn get_table_get_func(
374        &mut self,
375        func: &mut Function,
376        table_index: TableIndex,
377    ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
378        if self.module.is_imported_table(table_index) {
379            (
380                self.get_table_get_sig(func),
381                table_index.index(),
382                VMBuiltinFunctionIndex::get_imported_table_get_index(),
383            )
384        } else {
385            (
386                self.get_table_get_sig(func),
387                self.module.local_table_index(table_index).unwrap().index(),
388                VMBuiltinFunctionIndex::get_table_get_index(),
389            )
390        }
391    }
392
393    fn get_table_set_sig(&mut self, func: &mut Function) -> ir::SigRef {
394        let sig = self.table_set_sig.unwrap_or_else(|| {
395            func.import_signature(Signature {
396                params: vec![
397                    AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
398                    AbiParam::new(I32),
399                    AbiParam::new(I32),
400                    AbiParam::new(self.reference_type()),
401                ],
402                returns: vec![],
403                call_conv: self.target_config.default_call_conv,
404            })
405        });
406        self.table_set_sig = Some(sig);
407        sig
408    }
409
410    fn get_table_set_func(
411        &mut self,
412        func: &mut Function,
413        table_index: TableIndex,
414    ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
415        if self.module.is_imported_table(table_index) {
416            (
417                self.get_table_set_sig(func),
418                table_index.index(),
419                VMBuiltinFunctionIndex::get_imported_table_set_index(),
420            )
421        } else {
422            (
423                self.get_table_set_sig(func),
424                self.module.local_table_index(table_index).unwrap().index(),
425                VMBuiltinFunctionIndex::get_table_set_index(),
426            )
427        }
428    }
429
430    fn get_table_grow_sig(&mut self, func: &mut Function) -> ir::SigRef {
431        let sig = self.table_grow_sig.unwrap_or_else(|| {
432            func.import_signature(Signature {
433                params: vec![
434                    AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
435                    // TODO: figure out what the representation of a Wasm value is
436                    AbiParam::new(self.reference_type()),
437                    AbiParam::new(I32),
438                    AbiParam::new(I32),
439                ],
440                returns: vec![AbiParam::new(I32)],
441                call_conv: self.target_config.default_call_conv,
442            })
443        });
444        self.table_grow_sig = Some(sig);
445        sig
446    }
447
448    /// Return the table.grow function signature to call for the given index, along with the
449    /// translated index value to pass to it and its index in `VMBuiltinFunctionsArray`.
450    fn get_table_grow_func(
451        &mut self,
452        func: &mut Function,
453        index: TableIndex,
454    ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
455        if self.module.is_imported_table(index) {
456            (
457                self.get_table_grow_sig(func),
458                index.index(),
459                VMBuiltinFunctionIndex::get_imported_table_grow_index(),
460            )
461        } else {
462            (
463                self.get_table_grow_sig(func),
464                self.module.local_table_index(index).unwrap().index(),
465                VMBuiltinFunctionIndex::get_table_grow_index(),
466            )
467        }
468    }
469
470    fn get_memory_grow_sig(&mut self, func: &mut Function) -> ir::SigRef {
471        let sig = self.memory_grow_sig.unwrap_or_else(|| {
472            func.import_signature(Signature {
473                params: vec![
474                    AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
475                    AbiParam::new(I32),
476                    AbiParam::new(I32),
477                ],
478                returns: vec![AbiParam::new(I32)],
479                call_conv: self.target_config.default_call_conv,
480            })
481        });
482        self.memory_grow_sig = Some(sig);
483        sig
484    }
485
486    /// Return the memory.grow function signature to call for the given index, along with the
487    /// translated index value to pass to it and its index in `VMBuiltinFunctionsArray`.
488    fn get_memory_grow_func(
489        &mut self,
490        func: &mut Function,
491        index: MemoryIndex,
492    ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
493        if self.module.is_imported_memory(index) {
494            (
495                self.get_memory_grow_sig(func),
496                index.index(),
497                VMBuiltinFunctionIndex::get_imported_memory32_grow_index(),
498            )
499        } else {
500            (
501                self.get_memory_grow_sig(func),
502                self.module.local_memory_index(index).unwrap().index(),
503                VMBuiltinFunctionIndex::get_memory32_grow_index(),
504            )
505        }
506    }
507
508    fn get_table_size_sig(&mut self, func: &mut Function) -> ir::SigRef {
509        let sig = self.table_size_sig.unwrap_or_else(|| {
510            func.import_signature(Signature {
511                params: vec![
512                    AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
513                    AbiParam::new(I32),
514                ],
515                returns: vec![AbiParam::new(I32)],
516                call_conv: self.target_config.default_call_conv,
517            })
518        });
519        self.table_size_sig = Some(sig);
520        sig
521    }
522
523    /// Return the memory.size function signature to call for the given index, along with the
524    /// translated index value to pass to it and its index in `VMBuiltinFunctionsArray`.
525    fn get_table_size_func(
526        &mut self,
527        func: &mut Function,
528        index: TableIndex,
529    ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
530        if self.module.is_imported_table(index) {
531            (
532                self.get_table_size_sig(func),
533                index.index(),
534                VMBuiltinFunctionIndex::get_imported_table_size_index(),
535            )
536        } else {
537            (
538                self.get_table_size_sig(func),
539                self.module.local_table_index(index).unwrap().index(),
540                VMBuiltinFunctionIndex::get_table_size_index(),
541            )
542        }
543    }
544
545    fn get_memory32_size_sig(&mut self, func: &mut Function) -> ir::SigRef {
546        let sig = self.memory32_size_sig.unwrap_or_else(|| {
547            func.import_signature(Signature {
548                params: vec![
549                    AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
550                    AbiParam::new(I32),
551                ],
552                returns: vec![AbiParam::new(I32)],
553                call_conv: self.target_config.default_call_conv,
554            })
555        });
556        self.memory32_size_sig = Some(sig);
557        sig
558    }
559
560    /// Return the memory.size function signature to call for the given index, along with the
561    /// translated index value to pass to it and its index in `VMBuiltinFunctionsArray`.
562    fn get_memory_size_func(
563        &mut self,
564        func: &mut Function,
565        index: MemoryIndex,
566    ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
567        if self.module.is_imported_memory(index) {
568            (
569                self.get_memory32_size_sig(func),
570                index.index(),
571                VMBuiltinFunctionIndex::get_imported_memory32_size_index(),
572            )
573        } else {
574            (
575                self.get_memory32_size_sig(func),
576                self.module.local_memory_index(index).unwrap().index(),
577                VMBuiltinFunctionIndex::get_memory32_size_index(),
578            )
579        }
580    }
581
582    fn get_table_copy_sig(&mut self, func: &mut Function) -> ir::SigRef {
583        let sig = self.table_copy_sig.unwrap_or_else(|| {
584            func.import_signature(Signature {
585                params: vec![
586                    AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
587                    // Destination table index.
588                    AbiParam::new(I32),
589                    // Source table index.
590                    AbiParam::new(I32),
591                    // Index within destination table.
592                    AbiParam::new(I32),
593                    // Index within source table.
594                    AbiParam::new(I32),
595                    // Number of elements to copy.
596                    AbiParam::new(I32),
597                ],
598                returns: vec![],
599                call_conv: self.target_config.default_call_conv,
600            })
601        });
602        self.table_copy_sig = Some(sig);
603        sig
604    }
605
606    fn get_table_copy_func(
607        &mut self,
608        func: &mut Function,
609        dst_table_index: TableIndex,
610        src_table_index: TableIndex,
611    ) -> (ir::SigRef, usize, usize, VMBuiltinFunctionIndex) {
612        let sig = self.get_table_copy_sig(func);
613        (
614            sig,
615            dst_table_index.as_u32() as usize,
616            src_table_index.as_u32() as usize,
617            VMBuiltinFunctionIndex::get_table_copy_index(),
618        )
619    }
620
621    fn get_table_init_sig(&mut self, func: &mut Function) -> ir::SigRef {
622        let sig = self.table_init_sig.unwrap_or_else(|| {
623            func.import_signature(Signature {
624                params: vec![
625                    AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
626                    // Table index.
627                    AbiParam::new(I32),
628                    // Segment index.
629                    AbiParam::new(I32),
630                    // Destination index within table.
631                    AbiParam::new(I32),
632                    // Source index within segment.
633                    AbiParam::new(I32),
634                    // Number of elements to initialize.
635                    AbiParam::new(I32),
636                ],
637                returns: vec![],
638                call_conv: self.target_config.default_call_conv,
639            })
640        });
641        self.table_init_sig = Some(sig);
642        sig
643    }
644
645    fn get_table_init_func(
646        &mut self,
647        func: &mut Function,
648        table_index: TableIndex,
649    ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
650        let sig = self.get_table_init_sig(func);
651        let table_index = table_index.as_u32() as usize;
652        (
653            sig,
654            table_index,
655            VMBuiltinFunctionIndex::get_table_init_index(),
656        )
657    }
658
659    fn get_elem_drop_sig(&mut self, func: &mut Function) -> ir::SigRef {
660        let sig = self.elem_drop_sig.unwrap_or_else(|| {
661            func.import_signature(Signature {
662                params: vec![
663                    AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
664                    // Element index.
665                    AbiParam::new(I32),
666                ],
667                returns: vec![],
668                call_conv: self.target_config.default_call_conv,
669            })
670        });
671        self.elem_drop_sig = Some(sig);
672        sig
673    }
674
675    fn get_elem_drop_func(&mut self, func: &mut Function) -> (ir::SigRef, VMBuiltinFunctionIndex) {
676        let sig = self.get_elem_drop_sig(func);
677        (sig, VMBuiltinFunctionIndex::get_elem_drop_index())
678    }
679
680    fn get_memory_copy_sig(&mut self, func: &mut Function) -> ir::SigRef {
681        let sig = self.memory_copy_sig.unwrap_or_else(|| {
682            func.import_signature(Signature {
683                params: vec![
684                    AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
685                    // Memory index.
686                    AbiParam::new(I32),
687                    // Destination address.
688                    AbiParam::new(I32),
689                    // Source address.
690                    AbiParam::new(I32),
691                    // Length.
692                    AbiParam::new(I32),
693                ],
694                returns: vec![],
695                call_conv: self.target_config.default_call_conv,
696            })
697        });
698        self.memory_copy_sig = Some(sig);
699        sig
700    }
701
702    fn get_memory_copy_func(
703        &mut self,
704        func: &mut Function,
705        memory_index: MemoryIndex,
706    ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
707        let sig = self.get_memory_copy_sig(func);
708        if let Some(local_memory_index) = self.module.local_memory_index(memory_index) {
709            (
710                sig,
711                local_memory_index.index(),
712                VMBuiltinFunctionIndex::get_memory_copy_index(),
713            )
714        } else {
715            (
716                sig,
717                memory_index.index(),
718                VMBuiltinFunctionIndex::get_imported_memory_copy_index(),
719            )
720        }
721    }
722
723    fn get_memory_fill_sig(&mut self, func: &mut Function) -> ir::SigRef {
724        let sig = self.memory_fill_sig.unwrap_or_else(|| {
725            func.import_signature(Signature {
726                params: vec![
727                    AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
728                    // Memory index.
729                    AbiParam::new(I32),
730                    // Destination address.
731                    AbiParam::new(I32),
732                    // Value.
733                    AbiParam::new(I32),
734                    // Length.
735                    AbiParam::new(I32),
736                ],
737                returns: vec![],
738                call_conv: self.target_config.default_call_conv,
739            })
740        });
741        self.memory_fill_sig = Some(sig);
742        sig
743    }
744
745    fn get_memory_fill_func(
746        &mut self,
747        func: &mut Function,
748        memory_index: MemoryIndex,
749    ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
750        let sig = self.get_memory_fill_sig(func);
751        if let Some(local_memory_index) = self.module.local_memory_index(memory_index) {
752            (
753                sig,
754                local_memory_index.index(),
755                VMBuiltinFunctionIndex::get_memory_fill_index(),
756            )
757        } else {
758            (
759                sig,
760                memory_index.index(),
761                VMBuiltinFunctionIndex::get_imported_memory_fill_index(),
762            )
763        }
764    }
765
766    fn get_memory_init_sig(&mut self, func: &mut Function) -> ir::SigRef {
767        let sig = self.memory_init_sig.unwrap_or_else(|| {
768            func.import_signature(Signature {
769                params: vec![
770                    AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
771                    // Memory index.
772                    AbiParam::new(I32),
773                    // Data index.
774                    AbiParam::new(I32),
775                    // Destination address.
776                    AbiParam::new(I32),
777                    // Source index within the data segment.
778                    AbiParam::new(I32),
779                    // Length.
780                    AbiParam::new(I32),
781                ],
782                returns: vec![],
783                call_conv: self.target_config.default_call_conv,
784            })
785        });
786        self.memory_init_sig = Some(sig);
787        sig
788    }
789
790    fn get_memory_init_func(
791        &mut self,
792        func: &mut Function,
793    ) -> (ir::SigRef, VMBuiltinFunctionIndex) {
794        let sig = self.get_memory_init_sig(func);
795        (sig, VMBuiltinFunctionIndex::get_memory_init_index())
796    }
797
798    fn get_data_drop_sig(&mut self, func: &mut Function) -> ir::SigRef {
799        let sig = self.data_drop_sig.unwrap_or_else(|| {
800            func.import_signature(Signature {
801                params: vec![
802                    AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
803                    // Data index.
804                    AbiParam::new(I32),
805                ],
806                returns: vec![],
807                call_conv: self.target_config.default_call_conv,
808            })
809        });
810        self.data_drop_sig = Some(sig);
811        sig
812    }
813
814    fn get_data_drop_func(&mut self, func: &mut Function) -> (ir::SigRef, VMBuiltinFunctionIndex) {
815        let sig = self.get_data_drop_sig(func);
816        (sig, VMBuiltinFunctionIndex::get_data_drop_index())
817    }
818
819    fn get_memory32_atomic_wait32_sig(&mut self, func: &mut Function) -> ir::SigRef {
820        let sig = self.memory32_atomic_wait32_sig.unwrap_or_else(|| {
821            func.import_signature(Signature {
822                params: vec![
823                    AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
824                    // Memory Index
825                    AbiParam::new(I32),
826                    // Dst
827                    AbiParam::new(I32),
828                    // Val
829                    AbiParam::new(I32),
830                    // Timeout
831                    AbiParam::new(I64),
832                ],
833                returns: vec![AbiParam::new(I32)],
834                call_conv: self.target_config.default_call_conv,
835            })
836        });
837        self.memory32_atomic_wait32_sig = Some(sig);
838        sig
839    }
840
841    /// Return the memory.atomic.wait32 function signature to call for the given index,
842    /// along with the translated index value to pass to it
843    /// and its index in `VMBuiltinFunctionsArray`.
844    fn get_memory_atomic_wait32_func(
845        &mut self,
846        func: &mut Function,
847        index: MemoryIndex,
848    ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
849        if self.module.is_imported_memory(index) {
850            (
851                self.get_memory32_atomic_wait32_sig(func),
852                index.index(),
853                VMBuiltinFunctionIndex::get_imported_memory_atomic_wait32_index(),
854            )
855        } else {
856            (
857                self.get_memory32_atomic_wait32_sig(func),
858                self.module.local_memory_index(index).unwrap().index(),
859                VMBuiltinFunctionIndex::get_memory_atomic_wait32_index(),
860            )
861        }
862    }
863
864    fn get_memory32_atomic_wait64_sig(&mut self, func: &mut Function) -> ir::SigRef {
865        let sig = self.memory32_atomic_wait64_sig.unwrap_or_else(|| {
866            func.import_signature(Signature {
867                params: vec![
868                    AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
869                    // Memory Index
870                    AbiParam::new(I32),
871                    // Dst
872                    AbiParam::new(I32),
873                    // Val
874                    AbiParam::new(I64),
875                    // Timeout
876                    AbiParam::new(I64),
877                ],
878                returns: vec![AbiParam::new(I32)],
879                call_conv: self.target_config.default_call_conv,
880            })
881        });
882        self.memory32_atomic_wait64_sig = Some(sig);
883        sig
884    }
885
886    /// Return the memory.atomic.wait64 function signature to call for the given index,
887    /// along with the translated index value to pass to it
888    /// and its index in `VMBuiltinFunctionsArray`.
889    fn get_memory_atomic_wait64_func(
890        &mut self,
891        func: &mut Function,
892        index: MemoryIndex,
893    ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
894        if self.module.is_imported_memory(index) {
895            (
896                self.get_memory32_atomic_wait64_sig(func),
897                index.index(),
898                VMBuiltinFunctionIndex::get_imported_memory_atomic_wait64_index(),
899            )
900        } else {
901            (
902                self.get_memory32_atomic_wait64_sig(func),
903                self.module.local_memory_index(index).unwrap().index(),
904                VMBuiltinFunctionIndex::get_memory_atomic_wait64_index(),
905            )
906        }
907    }
908
909    fn get_memory32_atomic_notify_sig(&mut self, func: &mut Function) -> ir::SigRef {
910        let sig = self.memory32_atomic_notify_sig.unwrap_or_else(|| {
911            func.import_signature(Signature {
912                params: vec![
913                    AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext),
914                    // Memory Index
915                    AbiParam::new(I32),
916                    // Dst
917                    AbiParam::new(I32),
918                    // Count
919                    AbiParam::new(I32),
920                ],
921                returns: vec![AbiParam::new(I32)],
922                call_conv: self.target_config.default_call_conv,
923            })
924        });
925        self.memory32_atomic_notify_sig = Some(sig);
926        sig
927    }
928
929    /// Return the memory.atomic.notify function signature to call for the given index,
930    /// along with the translated index value to pass to it
931    /// and its index in `VMBuiltinFunctionsArray`.
932    fn get_memory_atomic_notify_func(
933        &mut self,
934        func: &mut Function,
935        index: MemoryIndex,
936    ) -> (ir::SigRef, usize, VMBuiltinFunctionIndex) {
937        if self.module.is_imported_memory(index) {
938            (
939                self.get_memory32_atomic_notify_sig(func),
940                index.index(),
941                VMBuiltinFunctionIndex::get_imported_memory_atomic_notify_index(),
942            )
943        } else {
944            (
945                self.get_memory32_atomic_notify_sig(func),
946                self.module.local_memory_index(index).unwrap().index(),
947                VMBuiltinFunctionIndex::get_memory_atomic_notify_index(),
948            )
949        }
950    }
951
952    fn get_personality2_func(
953        &mut self,
954        func: &mut Function,
955    ) -> (ir::SigRef, VMBuiltinFunctionIndex) {
956        let sig = self.personality2_sig.unwrap_or_else(|| {
957            let mut signature = Signature::new(self.target_config.default_call_conv);
958            signature.params.push(AbiParam::new(self.pointer_type()));
959            signature.params.push(AbiParam::new(self.pointer_type()));
960            signature.returns.push(AbiParam::new(TAG_TYPE));
961            let sig = func.import_signature(signature);
962            self.personality2_sig = Some(sig);
963            sig
964        });
965        (
966            sig,
967            VMBuiltinFunctionIndex::get_imported_personality2_index(),
968        )
969    }
970
971    fn get_throw_func(&mut self, func: &mut Function) -> (ir::SigRef, VMBuiltinFunctionIndex) {
972        let sig = self.throw_sig.unwrap_or_else(|| {
973            let mut signature = Signature::new(self.target_config.default_call_conv);
974            signature.params.push(AbiParam::special(
975                self.pointer_type(),
976                ArgumentPurpose::VMContext,
977            ));
978            signature.params.push(AbiParam::new(EXN_REF_TYPE));
979            let sig = func.import_signature(signature);
980            self.throw_sig = Some(sig);
981            sig
982        });
983        (sig, VMBuiltinFunctionIndex::get_imported_throw_index())
984    }
985
986    fn get_alloc_exception_func(
987        &mut self,
988        func: &mut Function,
989    ) -> (ir::SigRef, VMBuiltinFunctionIndex) {
990        let sig = self.alloc_exception_sig.unwrap_or_else(|| {
991            let mut signature = Signature::new(self.target_config.default_call_conv);
992            signature.params.push(AbiParam::special(
993                self.pointer_type(),
994                ArgumentPurpose::VMContext,
995            ));
996            signature.params.push(AbiParam::new(TAG_TYPE));
997            signature.returns.push(AbiParam::new(EXN_REF_TYPE));
998            let sig = func.import_signature(signature);
999            self.alloc_exception_sig = Some(sig);
1000            sig
1001        });
1002        (
1003            sig,
1004            VMBuiltinFunctionIndex::get_imported_alloc_exception_index(),
1005        )
1006    }
1007
1008    fn get_read_exnref_func(
1009        &mut self,
1010        func: &mut Function,
1011    ) -> (ir::SigRef, VMBuiltinFunctionIndex) {
1012        let sig = self.read_exnref_sig.unwrap_or_else(|| {
1013            let mut signature = Signature::new(self.target_config.default_call_conv);
1014            signature.params.push(AbiParam::special(
1015                self.pointer_type(),
1016                ArgumentPurpose::VMContext,
1017            ));
1018            signature.params.push(AbiParam::new(EXN_REF_TYPE));
1019            signature.returns.push(AbiParam::new(self.pointer_type()));
1020            let sig = func.import_signature(signature);
1021            self.read_exnref_sig = Some(sig);
1022            sig
1023        });
1024        (
1025            sig,
1026            VMBuiltinFunctionIndex::get_imported_read_exnref_index(),
1027        )
1028    }
1029
1030    fn get_read_exception_func(
1031        &mut self,
1032        func: &mut Function,
1033    ) -> (ir::SigRef, VMBuiltinFunctionIndex) {
1034        let sig = self.read_exception_sig.unwrap_or_else(|| {
1035            let mut signature = Signature::new(self.target_config.default_call_conv);
1036            signature.params.push(AbiParam::new(self.pointer_type()));
1037            signature.returns.push(AbiParam::new(EXN_REF_TYPE));
1038            let sig = func.import_signature(signature);
1039            self.read_exception_sig = Some(sig);
1040            sig
1041        });
1042        (
1043            sig,
1044            VMBuiltinFunctionIndex::get_imported_exception_into_exnref_index(),
1045        )
1046    }
1047
1048    fn exception_type_layout(&mut self, tag_index: TagIndex) -> WasmResult<&ExceptionTypeLayout> {
1049        let key = tag_index.as_u32();
1050        if !self.exception_type_layouts.contains_key(&key) {
1051            let layout = self.compute_exception_type_layout(tag_index)?;
1052            self.exception_type_layouts.insert(key, layout);
1053        }
1054        Ok(self.exception_type_layouts.get(&key).unwrap())
1055    }
1056
1057    fn compute_exception_type_layout(
1058        &self,
1059        tag_index: TagIndex,
1060    ) -> WasmResult<ExceptionTypeLayout> {
1061        let sig_index = self.module.tags[tag_index];
1062        let func_type = &self.module.signatures[sig_index];
1063        let mut offset = 0u32;
1064        let mut max_align = 1u32;
1065        let mut fields = SmallVec::<[ExceptionFieldLayout; 4]>::new();
1066
1067        for wasm_ty in func_type.params() {
1068            let ir_ty = self.map_wasmer_type_to_ir(*wasm_ty)?;
1069            let field_size = ir_ty.bytes();
1070            let align = field_size.max(1);
1071            max_align = max_align.max(align);
1072            offset = offset.next_multiple_of(align);
1073            fields.push(ExceptionFieldLayout { offset, ty: ir_ty });
1074            offset = offset
1075                .checked_add(field_size)
1076                .ok_or_else(|| WasmError::Unsupported("exception payload too large".to_string()))?;
1077        }
1078
1079        Ok(ExceptionTypeLayout { fields })
1080    }
1081
1082    fn map_wasmer_type_to_ir(&self, ty: WasmerType) -> WasmResult<ir::Type> {
1083        Ok(match ty {
1084            WasmerType::I32 => ir::types::I32,
1085            WasmerType::I64 => ir::types::I64,
1086            WasmerType::F32 => ir::types::F32,
1087            WasmerType::F64 => ir::types::F64,
1088            WasmerType::V128 => ir::types::I8X16,
1089            WasmerType::FuncRef | WasmerType::ExternRef | WasmerType::ExceptionRef => {
1090                self.reference_type()
1091            }
1092        })
1093    }
1094
1095    fn call_with_handlers(
1096        &mut self,
1097        builder: &mut FunctionBuilder,
1098        callee: ir::FuncRef,
1099        args: &[ir::Value],
1100        context: Option<ir::Value>,
1101        landing_pad: Option<LandingPad>,
1102        unreachable_on_return: bool,
1103    ) -> SmallVec<[ir::Value; 4]> {
1104        let sig_ref = builder.func.dfg.ext_funcs[callee].signature;
1105        let return_types: SmallVec<[ir::Type; 4]> = builder.func.dfg.signatures[sig_ref]
1106            .returns
1107            .iter()
1108            .map(|ret| ret.value_type)
1109            .collect();
1110
1111        if landing_pad.is_none() {
1112            let inst = builder.ins().call(callee, args);
1113            let results: SmallVec<[ir::Value; 4]> =
1114                builder.inst_results(inst).iter().copied().collect();
1115            if unreachable_on_return {
1116                builder.ins().trap(crate::TRAP_UNREACHABLE);
1117            }
1118            return results;
1119        }
1120
1121        let continuation = builder.create_block();
1122        let mut normal_args = SmallVec::<[BlockArg; 4]>::with_capacity(return_types.len());
1123        let mut result_values = SmallVec::<[ir::Value; 4]>::with_capacity(return_types.len());
1124        for (i, ty) in return_types.iter().enumerate() {
1125            let val = builder.append_block_param(continuation, *ty);
1126            result_values.push(val);
1127            normal_args.push(BlockArg::TryCallRet(u32::try_from(i).unwrap()));
1128        }
1129        let continuation_call = builder
1130            .func
1131            .dfg
1132            .block_call(continuation, normal_args.iter());
1133
1134        let mut table_items = Vec::new();
1135        if let Some(ctx) = context {
1136            table_items.push(ExceptionTableItem::Context(ctx));
1137        }
1138        if let Some(landing_pad) = landing_pad {
1139            for tag in landing_pad.clauses {
1140                let block_call = builder.func.dfg.block_call(
1141                    landing_pad.block,
1142                    &[BlockArg::TryCallExn(0), BlockArg::TryCallExn(1)],
1143                );
1144                table_items.push(match tag.wasm_tag {
1145                    Some(tag) => ExceptionTableItem::Tag(ExceptionTag::from_u32(tag), block_call),
1146                    None => ExceptionTableItem::Default(block_call),
1147                });
1148            }
1149        }
1150        let etd = ExceptionTableData::new(sig_ref, continuation_call, table_items);
1151        let et = builder.func.dfg.exception_tables.push(etd);
1152        builder.ins().try_call(callee, args, et);
1153        builder.switch_to_block(continuation);
1154        builder.seal_block(continuation);
1155        if unreachable_on_return {
1156            builder.ins().trap(crate::TRAP_UNREACHABLE);
1157        }
1158        result_values
1159    }
1160
1161    #[allow(clippy::too_many_arguments)]
1162    fn call_indirect_with_handlers(
1163        &mut self,
1164        builder: &mut FunctionBuilder,
1165        sig: ir::SigRef,
1166        func_addr: ir::Value,
1167        args: &[ir::Value],
1168        context: Option<ir::Value>,
1169        landing_pad: Option<LandingPad>,
1170        unreachable_on_return: bool,
1171    ) -> SmallVec<[ir::Value; 4]> {
1172        let return_types: SmallVec<[ir::Type; 4]> = builder.func.dfg.signatures[sig]
1173            .returns
1174            .iter()
1175            .map(|ret| ret.value_type)
1176            .collect();
1177
1178        if landing_pad.is_none() {
1179            let inst = builder.ins().call_indirect(sig, func_addr, args);
1180            let results: SmallVec<[ir::Value; 4]> =
1181                builder.inst_results(inst).iter().copied().collect();
1182            if unreachable_on_return {
1183                builder.ins().trap(crate::TRAP_UNREACHABLE);
1184            }
1185            return results;
1186        }
1187
1188        let continuation = builder.create_block();
1189        let current_block = builder.current_block().expect("current block");
1190        builder.insert_block_after(continuation, current_block);
1191
1192        let mut normal_args = SmallVec::<[BlockArg; 4]>::with_capacity(return_types.len());
1193        let mut result_values = SmallVec::<[ir::Value; 4]>::with_capacity(return_types.len());
1194        for (i, ty) in return_types.iter().enumerate() {
1195            let val = builder.append_block_param(continuation, *ty);
1196            result_values.push(val);
1197            normal_args.push(BlockArg::TryCallRet(u32::try_from(i).unwrap()));
1198        }
1199        let continuation_call = builder
1200            .func
1201            .dfg
1202            .block_call(continuation, normal_args.iter());
1203
1204        let mut table_items = Vec::new();
1205        if let Some(ctx) = context {
1206            table_items.push(ExceptionTableItem::Context(ctx));
1207        }
1208        if let Some(landing_pad) = landing_pad {
1209            for tag in landing_pad.clauses {
1210                let block_call = builder.func.dfg.block_call(
1211                    landing_pad.block,
1212                    &[BlockArg::TryCallExn(0), BlockArg::TryCallExn(1)],
1213                );
1214                table_items.push(match tag.wasm_tag {
1215                    Some(tag) => ExceptionTableItem::Tag(ExceptionTag::from_u32(tag), block_call),
1216                    None => ExceptionTableItem::Default(block_call),
1217                });
1218            }
1219        }
1220
1221        let etd = ExceptionTableData::new(sig, continuation_call, table_items);
1222        let et = builder.func.dfg.exception_tables.push(etd);
1223        builder.ins().try_call_indirect(func_addr, args, et);
1224        builder.switch_to_block(continuation);
1225        builder.seal_block(continuation);
1226        if unreachable_on_return {
1227            builder.ins().trap(crate::TRAP_UNREACHABLE);
1228        }
1229
1230        result_values
1231    }
1232
1233    /// Translates load of builtin function and returns a pair of values `vmctx`
1234    /// and address of the loaded function.
1235    fn translate_load_builtin_function_address(
1236        &mut self,
1237        pos: &mut FuncCursor<'_>,
1238        callee_func_idx: VMBuiltinFunctionIndex,
1239    ) -> (ir::Value, ir::Value) {
1240        // We use an indirect call so that we don't have to patch the code at runtime.
1241        let pointer_type = self.pointer_type();
1242        let vmctx = self.vmctx(pos.func);
1243        let base = pos.ins().global_value(pointer_type, vmctx);
1244
1245        let mut mem_flags = ir::MemFlags::trusted();
1246        mem_flags.set_readonly();
1247
1248        // Load the callee address.
1249        let body_offset =
1250            i32::try_from(self.offsets.vmctx_builtin_function(callee_func_idx)).unwrap();
1251        let func_addr = pos.ins().load(pointer_type, mem_flags, base, body_offset);
1252
1253        (base, func_addr)
1254    }
1255
1256    fn get_or_init_funcref_table_elem(
1257        &mut self,
1258        builder: &mut FunctionBuilder,
1259        table_index: TableIndex,
1260        index: ir::Value,
1261    ) -> ir::Value {
1262        let pointer_type = self.pointer_type();
1263        self.ensure_table_exists(builder.func, table_index);
1264        let table_data = self.tables[table_index].as_ref().unwrap();
1265
1266        // To support lazy initialization of table
1267        // contents, we check for a null entry here, and
1268        // if null, we take a slow-path that invokes a
1269        // libcall.
1270        let (table_entry_addr, flags) =
1271            table_data.prepare_table_addr(builder, index, pointer_type, false);
1272        builder.ins().load(pointer_type, flags, table_entry_addr, 0)
1273    }
1274}
1275
1276impl TargetEnvironment for FuncEnvironment<'_> {
1277    fn target_config(&self) -> TargetFrontendConfig {
1278        self.target_config
1279    }
1280}
1281
1282impl BaseFuncEnvironment for FuncEnvironment<'_> {
1283    fn is_wasm_parameter(&self, _signature: &ir::Signature, index: usize) -> bool {
1284        // The first parameter is the vmctx. The rest are the wasm parameters.
1285        index >= 1
1286    }
1287
1288    fn translate_table_grow(
1289        &mut self,
1290        mut pos: cranelift_codegen::cursor::FuncCursor<'_>,
1291        table_index: TableIndex,
1292        delta: ir::Value,
1293        init_value: ir::Value,
1294    ) -> WasmResult<ir::Value> {
1295        self.ensure_table_exists(pos.func, table_index);
1296        let (func_sig, index_arg, func_idx) = self.get_table_grow_func(pos.func, table_index);
1297        let table_index = pos.ins().iconst(I32, index_arg as i64);
1298        let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1299        let call_inst = pos.ins().call_indirect(
1300            func_sig,
1301            func_addr,
1302            &[vmctx, init_value, delta, table_index],
1303        );
1304        Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
1305    }
1306
1307    fn translate_table_get(
1308        &mut self,
1309        builder: &mut FunctionBuilder,
1310        table_index: TableIndex,
1311        index: ir::Value,
1312    ) -> WasmResult<ir::Value> {
1313        self.ensure_table_exists(builder.func, table_index);
1314        let mut pos = builder.cursor();
1315
1316        let (func_sig, table_index_arg, func_idx) = self.get_table_get_func(pos.func, table_index);
1317        let table_index = pos.ins().iconst(I32, table_index_arg as i64);
1318        let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1319        let call_inst = pos
1320            .ins()
1321            .call_indirect(func_sig, func_addr, &[vmctx, table_index, index]);
1322        Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
1323    }
1324
1325    fn translate_table_set(
1326        &mut self,
1327        builder: &mut FunctionBuilder,
1328        table_index: TableIndex,
1329        value: ir::Value,
1330        index: ir::Value,
1331    ) -> WasmResult<()> {
1332        self.ensure_table_exists(builder.func, table_index);
1333        let mut pos = builder.cursor();
1334
1335        let (func_sig, table_index_arg, func_idx) = self.get_table_set_func(pos.func, table_index);
1336        let n_table_index = pos.ins().iconst(I32, table_index_arg as i64);
1337        let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1338        pos.ins()
1339            .call_indirect(func_sig, func_addr, &[vmctx, n_table_index, index, value]);
1340        Ok(())
1341    }
1342
1343    fn translate_table_fill(
1344        &mut self,
1345        mut pos: cranelift_codegen::cursor::FuncCursor<'_>,
1346        table_index: TableIndex,
1347        dst: ir::Value,
1348        val: ir::Value,
1349        len: ir::Value,
1350    ) -> WasmResult<()> {
1351        self.ensure_table_exists(pos.func, table_index);
1352        let (func_sig, table_index_arg, func_idx) = self.get_table_fill_func(pos.func, table_index);
1353        let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1354
1355        let table_index_arg = pos.ins().iconst(I32, table_index_arg as i64);
1356        pos.ins().call_indirect(
1357            func_sig,
1358            func_addr,
1359            &[vmctx, table_index_arg, dst, val, len],
1360        );
1361
1362        Ok(())
1363    }
1364
1365    fn translate_ref_null(
1366        &mut self,
1367        mut pos: cranelift_codegen::cursor::FuncCursor,
1368        ty: HeapType,
1369    ) -> WasmResult<ir::Value> {
1370        Ok(match ty {
1371            HeapType::Abstract { ty, .. } => match ty {
1372                wasmer_compiler::wasmparser::AbstractHeapType::Func
1373                | wasmer_compiler::wasmparser::AbstractHeapType::Extern => {
1374                    pos.ins().iconst(self.reference_type(), 0)
1375                }
1376                _ => {
1377                    return Err(WasmError::Unsupported(
1378                        "`ref.null T` that is not a `funcref` or an `externref`".into(),
1379                    ));
1380                }
1381            },
1382            HeapType::Concrete(_) => {
1383                return Err(WasmError::Unsupported(
1384                    "`ref.null T` that is not a `funcref` or an `externref`".into(),
1385                ));
1386            }
1387        })
1388    }
1389
1390    fn translate_ref_is_null(
1391        &mut self,
1392        mut pos: cranelift_codegen::cursor::FuncCursor,
1393        value: ir::Value,
1394    ) -> WasmResult<ir::Value> {
1395        let bool_is_null =
1396            pos.ins()
1397                .icmp_imm(cranelift_codegen::ir::condcodes::IntCC::Equal, value, 0);
1398        Ok(pos.ins().uextend(ir::types::I32, bool_is_null))
1399    }
1400
1401    fn translate_ref_func(
1402        &mut self,
1403        mut pos: cranelift_codegen::cursor::FuncCursor<'_>,
1404        func_index: FunctionIndex,
1405    ) -> WasmResult<ir::Value> {
1406        let (func_sig, func_index_arg, func_idx) = self.get_func_ref_func(pos.func, func_index);
1407        let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1408
1409        let func_index_arg = pos.ins().iconst(I32, func_index_arg as i64);
1410        let call_inst = pos
1411            .ins()
1412            .call_indirect(func_sig, func_addr, &[vmctx, func_index_arg]);
1413
1414        Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
1415    }
1416
1417    fn translate_custom_global_get(
1418        &mut self,
1419        mut _pos: cranelift_codegen::cursor::FuncCursor<'_>,
1420        _index: GlobalIndex,
1421    ) -> WasmResult<ir::Value> {
1422        unreachable!("we don't make any custom globals")
1423    }
1424
1425    fn translate_custom_global_set(
1426        &mut self,
1427        mut _pos: cranelift_codegen::cursor::FuncCursor<'_>,
1428        _index: GlobalIndex,
1429        _value: ir::Value,
1430    ) -> WasmResult<()> {
1431        unreachable!("we don't make any custom globals")
1432    }
1433
1434    fn make_heap(&mut self, func: &mut ir::Function, index: MemoryIndex) -> WasmResult<Heap> {
1435        let pointer_type = self.pointer_type();
1436
1437        let (ptr, base_offset, current_length_offset) = {
1438            let vmctx = self.vmctx(func);
1439            if let Some(def_index) = self.module.local_memory_index(index) {
1440                let base_offset =
1441                    i32::try_from(self.offsets.vmctx_vmmemory_definition_base(def_index)).unwrap();
1442                let current_length_offset = i32::try_from(
1443                    self.offsets
1444                        .vmctx_vmmemory_definition_current_length(def_index),
1445                )
1446                .unwrap();
1447                (vmctx, base_offset, current_length_offset)
1448            } else {
1449                let from_offset = self.offsets.vmctx_vmmemory_import_definition(index);
1450                let memory = func.create_global_value(ir::GlobalValueData::Load {
1451                    base: vmctx,
1452                    offset: Offset32::new(i32::try_from(from_offset).unwrap()),
1453                    global_type: pointer_type,
1454                    flags: ir::MemFlags::trusted().with_readonly(),
1455                });
1456                let base_offset = i32::from(self.offsets.vmmemory_definition_base());
1457                let current_length_offset =
1458                    i32::from(self.offsets.vmmemory_definition_current_length());
1459                (memory, base_offset, current_length_offset)
1460            }
1461        };
1462
1463        // If we have a declared maximum, we can make this a "static" heap, which is
1464        // allocated up front and never moved.
1465        let (offset_guard_size, heap_style, readonly_base) = match self.memory_styles[index] {
1466            MemoryStyle::Dynamic { offset_guard_size } => {
1467                let heap_bound = func.create_global_value(ir::GlobalValueData::Load {
1468                    base: ptr,
1469                    offset: Offset32::new(current_length_offset),
1470                    global_type: pointer_type,
1471                    flags: ir::MemFlags::trusted(),
1472                });
1473                (
1474                    Uimm64::new(offset_guard_size),
1475                    HeapStyle::Dynamic {
1476                        bound_gv: heap_bound,
1477                    },
1478                    false,
1479                )
1480            }
1481            MemoryStyle::Static {
1482                bound,
1483                offset_guard_size,
1484            } => (
1485                Uimm64::new(offset_guard_size),
1486                HeapStyle::Static {
1487                    bound: bound.bytes().0 as u64,
1488                },
1489                true,
1490            ),
1491        };
1492
1493        let heap_base = func.create_global_value(ir::GlobalValueData::Load {
1494            base: ptr,
1495            offset: Offset32::new(base_offset),
1496            global_type: pointer_type,
1497            flags: if readonly_base {
1498                ir::MemFlags::trusted().with_readonly()
1499            } else {
1500                ir::MemFlags::trusted()
1501            },
1502        });
1503        Ok(self.heaps.push(HeapData {
1504            base: heap_base,
1505            min_size: 0,
1506            max_size: None,
1507            memory_type: None,
1508            offset_guard_size: offset_guard_size.into(),
1509            style: heap_style,
1510            index_type: I32,
1511            page_size_log2: self.target_config.page_size_align_log2,
1512        }))
1513    }
1514
1515    fn make_global(
1516        &mut self,
1517        func: &mut ir::Function,
1518        index: GlobalIndex,
1519    ) -> WasmResult<GlobalVariable> {
1520        let pointer_type = self.pointer_type();
1521
1522        let (ptr, offset) = {
1523            let vmctx = self.vmctx(func);
1524
1525            let from_offset = if let Some(def_index) = self.module.local_global_index(index) {
1526                self.offsets.vmctx_vmglobal_definition(def_index)
1527            } else {
1528                self.offsets.vmctx_vmglobal_import_definition(index)
1529            };
1530
1531            let global = func.create_global_value(ir::GlobalValueData::Load {
1532                base: vmctx,
1533                offset: Offset32::new(i32::try_from(from_offset).unwrap()),
1534                global_type: pointer_type,
1535                flags: MemFlags::trusted(),
1536            });
1537
1538            (global, 0)
1539        };
1540
1541        Ok(GlobalVariable::Memory {
1542            gv: ptr,
1543            offset: offset.into(),
1544            ty: match self.module.globals[index].ty {
1545                WasmerType::I32 => ir::types::I32,
1546                WasmerType::I64 => ir::types::I64,
1547                WasmerType::F32 => ir::types::F32,
1548                WasmerType::F64 => ir::types::F64,
1549                WasmerType::V128 => ir::types::I8X16,
1550                WasmerType::FuncRef | WasmerType::ExternRef | WasmerType::ExceptionRef => {
1551                    self.reference_type()
1552                }
1553            },
1554        })
1555    }
1556
1557    fn make_indirect_sig(
1558        &mut self,
1559        func: &mut ir::Function,
1560        index: SignatureIndex,
1561    ) -> WasmResult<ir::SigRef> {
1562        Ok(func.import_signature(self.signatures[index].clone()))
1563    }
1564
1565    fn make_direct_func(
1566        &mut self,
1567        func: &mut ir::Function,
1568        index: FunctionIndex,
1569    ) -> WasmResult<ir::FuncRef> {
1570        let sigidx = self.module.functions[index];
1571        let signature = func.import_signature(self.signatures[sigidx].clone());
1572        let name = get_function_name(func, index);
1573
1574        Ok(func.import_function(ir::ExtFuncData {
1575            name,
1576            signature,
1577            colocated: true,
1578        }))
1579    }
1580
1581    fn translate_call_indirect(
1582        &mut self,
1583        builder: &mut FunctionBuilder,
1584        table_index: TableIndex,
1585        sig_index: SignatureIndex,
1586        sig_ref: ir::SigRef,
1587        callee: ir::Value,
1588        call_args: &[ir::Value],
1589        landing_pad: Option<LandingPad>,
1590    ) -> WasmResult<SmallVec<[ir::Value; 4]>> {
1591        let pointer_type = self.pointer_type();
1592
1593        // Get the anyfunc pointer (the funcref) from the table.
1594        let anyfunc_ptr = self.get_or_init_funcref_table_elem(builder, table_index, callee);
1595
1596        // Dereference table_entry_addr to get the function address.
1597        let mem_flags = ir::MemFlags::trusted();
1598
1599        // check if the funcref is null
1600        builder
1601            .ins()
1602            .trapz(anyfunc_ptr, crate::TRAP_INDIRECT_CALL_TO_NULL);
1603
1604        let func_addr = builder.ins().load(
1605            pointer_type,
1606            mem_flags,
1607            anyfunc_ptr,
1608            i32::from(self.offsets.vmcaller_checked_anyfunc_func_ptr()),
1609        );
1610
1611        // If necessary, check the signature.
1612        match self.table_styles[table_index] {
1613            TableStyle::CallerChecksSignature => {
1614                let sig_id_size = self.offsets.size_of_vmshared_signature_index();
1615                let sig_id_type = ir::Type::int(u16::from(sig_id_size) * 8).unwrap();
1616                let vmctx = self.vmctx(builder.func);
1617                let base = builder.ins().global_value(pointer_type, vmctx);
1618                let offset =
1619                    i32::try_from(self.offsets.vmctx_vmshared_signature_id(sig_index)).unwrap();
1620
1621                // Load the caller ID.
1622                let mut mem_flags = ir::MemFlags::trusted();
1623                mem_flags.set_readonly();
1624                let caller_sig_id = builder.ins().load(sig_id_type, mem_flags, base, offset);
1625
1626                // Load the callee ID.
1627                let mem_flags = ir::MemFlags::trusted();
1628                let callee_sig_id = builder.ins().load(
1629                    sig_id_type,
1630                    mem_flags,
1631                    anyfunc_ptr,
1632                    i32::from(self.offsets.vmcaller_checked_anyfunc_type_index()),
1633                );
1634
1635                // Check that they match.
1636                let cmp = builder
1637                    .ins()
1638                    .icmp(IntCC::Equal, callee_sig_id, caller_sig_id);
1639                builder.ins().trapz(cmp, crate::TRAP_BAD_SIGNATURE);
1640            }
1641        }
1642
1643        let mut real_call_args = Vec::with_capacity(call_args.len() + 2);
1644
1645        // First append the callee vmctx address.
1646        let vmctx = builder.ins().load(
1647            pointer_type,
1648            mem_flags,
1649            anyfunc_ptr,
1650            i32::from(self.offsets.vmcaller_checked_anyfunc_vmctx()),
1651        );
1652        real_call_args.push(vmctx);
1653
1654        // Then append the regular call arguments.
1655        real_call_args.extend_from_slice(call_args);
1656
1657        let results = self.call_indirect_with_handlers(
1658            builder,
1659            sig_ref,
1660            func_addr,
1661            &real_call_args,
1662            Some(vmctx),
1663            landing_pad,
1664            false,
1665        );
1666        Ok(results)
1667    }
1668
1669    fn translate_call(
1670        &mut self,
1671        builder: &mut FunctionBuilder,
1672        callee_index: FunctionIndex,
1673        callee: ir::FuncRef,
1674        call_args: &[ir::Value],
1675        landing_pad: Option<LandingPad>,
1676    ) -> WasmResult<SmallVec<[ir::Value; 4]>> {
1677        let mut real_call_args = Vec::with_capacity(call_args.len() + 2);
1678
1679        // Handle direct calls to locally-defined functions.
1680        if !self.module.is_imported_function(callee_index) {
1681            // Let's get the caller vmctx
1682            let caller_vmctx = builder
1683                .func
1684                .special_param(ArgumentPurpose::VMContext)
1685                .unwrap();
1686            // First append the callee vmctx address, which is the same as the caller vmctx in
1687            // this case.
1688            real_call_args.push(caller_vmctx);
1689
1690            // Then append the regular call arguments.
1691            real_call_args.extend_from_slice(call_args);
1692
1693            let results = self.call_with_handlers(
1694                builder,
1695                callee,
1696                &real_call_args,
1697                Some(caller_vmctx),
1698                landing_pad,
1699                false,
1700            );
1701            return Ok(results);
1702        }
1703
1704        // Handle direct calls to imported functions. We use an indirect call
1705        // so that we don't have to patch the code at runtime.
1706        let pointer_type = self.pointer_type();
1707        let sig_ref = builder.func.dfg.ext_funcs[callee].signature;
1708        let vmctx = self.vmctx(builder.func);
1709        let base = builder.ins().global_value(pointer_type, vmctx);
1710
1711        let mem_flags = ir::MemFlags::trusted();
1712
1713        // Load the callee address.
1714        let body_offset =
1715            i32::try_from(self.offsets.vmctx_vmfunction_import_body(callee_index)).unwrap();
1716        let func_addr = builder
1717            .ins()
1718            .load(pointer_type, mem_flags, base, body_offset);
1719
1720        // First append the callee vmctx address.
1721        let vmctx_offset =
1722            i32::try_from(self.offsets.vmctx_vmfunction_import_vmctx(callee_index)).unwrap();
1723        let vmctx = builder
1724            .ins()
1725            .load(pointer_type, mem_flags, base, vmctx_offset);
1726        real_call_args.push(vmctx);
1727
1728        // Then append the regular call arguments.
1729        real_call_args.extend_from_slice(call_args);
1730
1731        let results = self.call_indirect_with_handlers(
1732            builder,
1733            sig_ref,
1734            func_addr,
1735            &real_call_args,
1736            Some(vmctx),
1737            landing_pad,
1738            false,
1739        );
1740        Ok(results)
1741    }
1742
1743    fn tag_param_arity(&self, tag_index: TagIndex) -> usize {
1744        let sig_index = self.module.tags[tag_index];
1745        let signature = &self.module.signatures[sig_index];
1746        signature.params().len()
1747    }
1748
1749    fn translate_exn_pointer_to_ref(
1750        &mut self,
1751        builder: &mut FunctionBuilder,
1752        exn_ptr: ir::Value,
1753    ) -> ir::Value {
1754        let (read_sig, read_idx) = self.get_read_exception_func(builder.func);
1755        let mut pos = builder.cursor();
1756        let (_, read_addr) = self.translate_load_builtin_function_address(&mut pos, read_idx);
1757        let read_call = builder.ins().call_indirect(read_sig, read_addr, &[exn_ptr]);
1758        builder.inst_results(read_call)[0]
1759    }
1760
1761    fn translate_exn_unbox(
1762        &mut self,
1763        builder: &mut FunctionBuilder,
1764        tag_index: TagIndex,
1765        exnref: ir::Value,
1766    ) -> WasmResult<SmallVec<[ir::Value; 4]>> {
1767        let layout = self.exception_type_layout(tag_index)?.clone();
1768
1769        let (read_exnref_sig, read_exnref_idx) = self.get_read_exnref_func(builder.func);
1770        let mut pos = builder.cursor();
1771        let (vmctx, read_exnref_addr) =
1772            self.translate_load_builtin_function_address(&mut pos, read_exnref_idx);
1773        let read_exnref_call =
1774            builder
1775                .ins()
1776                .call_indirect(read_exnref_sig, read_exnref_addr, &[vmctx, exnref]);
1777        let payload_ptr = builder.inst_results(read_exnref_call)[0];
1778
1779        let mut values = SmallVec::<[ir::Value; 4]>::with_capacity(layout.fields.len());
1780        let data_flags = ir::MemFlags::trusted();
1781        for field in &layout.fields {
1782            let value = builder.ins().load(
1783                field.ty,
1784                data_flags,
1785                payload_ptr,
1786                Offset32::new(field.offset as i32),
1787            );
1788            values.push(value);
1789        }
1790
1791        Ok(values)
1792    }
1793
1794    fn translate_exn_throw(
1795        &mut self,
1796        builder: &mut FunctionBuilder,
1797        tag_index: TagIndex,
1798        args: &[ir::Value],
1799        landing_pad: Option<LandingPad>,
1800    ) -> WasmResult<()> {
1801        let layout = self.exception_type_layout(tag_index)?.clone();
1802        if layout.fields.len() != args.len() {
1803            return Err(WasmError::Generic(format!(
1804                "exception payload arity mismatch: expected {}, got {}",
1805                layout.fields.len(),
1806                args.len()
1807            )));
1808        }
1809
1810        let (alloc_sig, alloc_idx) = self.get_alloc_exception_func(builder.func);
1811        let mut pos = builder.cursor();
1812        let (vmctx, alloc_addr) = self.translate_load_builtin_function_address(&mut pos, alloc_idx);
1813        let tag_value = builder
1814            .ins()
1815            .iconst(TAG_TYPE, i64::from(tag_index.as_u32()));
1816        let alloc_call = builder
1817            .ins()
1818            .call_indirect(alloc_sig, alloc_addr, &[vmctx, tag_value]);
1819        let exnref = builder.inst_results(alloc_call)[0];
1820
1821        let (read_exnref_sig, read_exnref_idx) = self.get_read_exnref_func(builder.func);
1822        let mut pos = builder.cursor();
1823        let (vmctx, read_exnref_addr) =
1824            self.translate_load_builtin_function_address(&mut pos, read_exnref_idx);
1825        let read_exnref_call =
1826            builder
1827                .ins()
1828                .call_indirect(read_exnref_sig, read_exnref_addr, &[vmctx, exnref]);
1829        let payload_ptr = builder.inst_results(read_exnref_call)[0];
1830
1831        let store_flags = ir::MemFlags::trusted();
1832        for (field, value) in layout.fields.iter().zip(args.iter()) {
1833            debug_assert_eq!(
1834                builder.func.dfg.value_type(*value),
1835                field.ty,
1836                "exception payload type mismatch"
1837            );
1838            builder.ins().store(
1839                store_flags,
1840                *value,
1841                payload_ptr,
1842                Offset32::new(field.offset as i32),
1843            );
1844        }
1845
1846        let (throw_sig, throw_idx) = self.get_throw_func(builder.func);
1847        let mut pos = builder.cursor();
1848        let (vmctx_value, throw_addr) =
1849            self.translate_load_builtin_function_address(&mut pos, throw_idx);
1850        let call_args = [vmctx_value, exnref];
1851
1852        let _ = self.call_indirect_with_handlers(
1853            builder,
1854            throw_sig,
1855            throw_addr,
1856            &call_args,
1857            Some(vmctx_value),
1858            landing_pad,
1859            true,
1860        );
1861
1862        Ok(())
1863    }
1864
1865    fn translate_exn_throw_ref(
1866        &mut self,
1867        builder: &mut FunctionBuilder,
1868        exnref: ir::Value,
1869        landing_pad: Option<LandingPad>,
1870    ) -> WasmResult<()> {
1871        let (throw_sig, throw_idx) = self.get_throw_func(builder.func);
1872        let mut pos = builder.cursor();
1873        let (vmctx_value, throw_addr) =
1874            self.translate_load_builtin_function_address(&mut pos, throw_idx);
1875        let call_args = [vmctx_value, exnref];
1876
1877        let _ = self.call_indirect_with_handlers(
1878            builder,
1879            throw_sig,
1880            throw_addr,
1881            &call_args,
1882            Some(vmctx_value),
1883            landing_pad,
1884            true,
1885        );
1886
1887        Ok(())
1888    }
1889
1890    fn translate_exn_personality_selector(
1891        &mut self,
1892        builder: &mut FunctionBuilder,
1893        exn_ptr: ir::Value,
1894    ) -> WasmResult<ir::Value> {
1895        let (sig, idx) = self.get_personality2_func(builder.func);
1896        let pointer_type = self.pointer_type();
1897        let exn_ty = builder.func.dfg.value_type(exn_ptr);
1898        let exn_arg = if exn_ty == pointer_type {
1899            exn_ptr
1900        } else {
1901            let mut flags = MemFlags::new();
1902            flags.set_endianness(Endianness::Little);
1903            builder.ins().bitcast(pointer_type, flags, exn_ptr)
1904        };
1905
1906        let mut pos = builder.cursor();
1907        let (vmctx_value, func_addr) = self.translate_load_builtin_function_address(&mut pos, idx);
1908        let call = builder
1909            .ins()
1910            .call_indirect(sig, func_addr, &[vmctx_value, exn_arg]);
1911        Ok(builder.inst_results(call)[0])
1912    }
1913
1914    fn translate_exn_reraise_unmatched(
1915        &mut self,
1916        builder: &mut FunctionBuilder,
1917        exnref: ir::Value,
1918    ) -> WasmResult<()> {
1919        let (throw_sig, throw_idx) = self.get_throw_func(builder.func);
1920        let mut pos = builder.cursor();
1921        let (vmctx_value, throw_addr) =
1922            self.translate_load_builtin_function_address(&mut pos, throw_idx);
1923        builder
1924            .ins()
1925            .call_indirect(throw_sig, throw_addr, &[vmctx_value, exnref]);
1926        builder.ins().trap(crate::TRAP_UNREACHABLE);
1927        Ok(())
1928    }
1929
1930    fn translate_memory_grow(
1931        &mut self,
1932        mut pos: FuncCursor<'_>,
1933        index: MemoryIndex,
1934        _heap: Heap,
1935        val: ir::Value,
1936    ) -> WasmResult<ir::Value> {
1937        let (func_sig, index_arg, func_idx) = self.get_memory_grow_func(pos.func, index);
1938        let memory_index = pos.ins().iconst(I32, index_arg as i64);
1939        let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1940        let call_inst = pos
1941            .ins()
1942            .call_indirect(func_sig, func_addr, &[vmctx, val, memory_index]);
1943        Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
1944    }
1945
1946    fn translate_memory_size(
1947        &mut self,
1948        mut pos: FuncCursor<'_>,
1949        index: MemoryIndex,
1950        _heap: Heap,
1951    ) -> WasmResult<ir::Value> {
1952        let (func_sig, index_arg, func_idx) = self.get_memory_size_func(pos.func, index);
1953        let memory_index = pos.ins().iconst(I32, index_arg as i64);
1954        let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1955        let call_inst = pos
1956            .ins()
1957            .call_indirect(func_sig, func_addr, &[vmctx, memory_index]);
1958        Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
1959    }
1960
1961    fn translate_memory_copy(
1962        &mut self,
1963        mut pos: FuncCursor,
1964        src_index: MemoryIndex,
1965        _src_heap: Heap,
1966        _dst_index: MemoryIndex,
1967        _dst_heap: Heap,
1968        dst: ir::Value,
1969        src: ir::Value,
1970        len: ir::Value,
1971    ) -> WasmResult<()> {
1972        let (func_sig, src_index, func_idx) = self.get_memory_copy_func(pos.func, src_index);
1973
1974        let src_index_arg = pos.ins().iconst(I32, src_index as i64);
1975
1976        let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1977
1978        pos.ins()
1979            .call_indirect(func_sig, func_addr, &[vmctx, src_index_arg, dst, src, len]);
1980
1981        Ok(())
1982    }
1983
1984    fn translate_memory_fill(
1985        &mut self,
1986        mut pos: FuncCursor,
1987        memory_index: MemoryIndex,
1988        _heap: Heap,
1989        dst: ir::Value,
1990        val: ir::Value,
1991        len: ir::Value,
1992    ) -> WasmResult<()> {
1993        let (func_sig, memory_index, func_idx) = self.get_memory_fill_func(pos.func, memory_index);
1994
1995        let memory_index_arg = pos.ins().iconst(I32, memory_index as i64);
1996
1997        let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
1998
1999        pos.ins().call_indirect(
2000            func_sig,
2001            func_addr,
2002            &[vmctx, memory_index_arg, dst, val, len],
2003        );
2004
2005        Ok(())
2006    }
2007
2008    fn translate_memory_init(
2009        &mut self,
2010        mut pos: FuncCursor,
2011        memory_index: MemoryIndex,
2012        _heap: Heap,
2013        seg_index: u32,
2014        dst: ir::Value,
2015        src: ir::Value,
2016        len: ir::Value,
2017    ) -> WasmResult<()> {
2018        let (func_sig, func_idx) = self.get_memory_init_func(pos.func);
2019
2020        let memory_index_arg = pos.ins().iconst(I32, memory_index.index() as i64);
2021        let seg_index_arg = pos.ins().iconst(I32, seg_index as i64);
2022
2023        let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2024
2025        pos.ins().call_indirect(
2026            func_sig,
2027            func_addr,
2028            &[vmctx, memory_index_arg, seg_index_arg, dst, src, len],
2029        );
2030
2031        Ok(())
2032    }
2033
2034    fn translate_data_drop(&mut self, mut pos: FuncCursor, seg_index: u32) -> WasmResult<()> {
2035        let (func_sig, func_idx) = self.get_data_drop_func(pos.func);
2036        let seg_index_arg = pos.ins().iconst(I32, seg_index as i64);
2037        let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2038        pos.ins()
2039            .call_indirect(func_sig, func_addr, &[vmctx, seg_index_arg]);
2040        Ok(())
2041    }
2042
2043    fn translate_table_size(
2044        &mut self,
2045        mut pos: FuncCursor,
2046        table_index: TableIndex,
2047    ) -> WasmResult<ir::Value> {
2048        self.ensure_table_exists(pos.func, table_index);
2049        let (func_sig, index_arg, func_idx) = self.get_table_size_func(pos.func, table_index);
2050        let table_index = pos.ins().iconst(I32, index_arg as i64);
2051        let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2052        let call_inst = pos
2053            .ins()
2054            .call_indirect(func_sig, func_addr, &[vmctx, table_index]);
2055        Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
2056    }
2057
2058    fn translate_table_copy(
2059        &mut self,
2060        mut pos: FuncCursor,
2061        dst_table_index: TableIndex,
2062        src_table_index: TableIndex,
2063        dst: ir::Value,
2064        src: ir::Value,
2065        len: ir::Value,
2066    ) -> WasmResult<()> {
2067        self.ensure_table_exists(pos.func, src_table_index);
2068        self.ensure_table_exists(pos.func, dst_table_index);
2069        let (func_sig, dst_table_index_arg, src_table_index_arg, func_idx) =
2070            self.get_table_copy_func(pos.func, dst_table_index, src_table_index);
2071
2072        let dst_table_index_arg = pos.ins().iconst(I32, dst_table_index_arg as i64);
2073        let src_table_index_arg = pos.ins().iconst(I32, src_table_index_arg as i64);
2074
2075        let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2076
2077        pos.ins().call_indirect(
2078            func_sig,
2079            func_addr,
2080            &[
2081                vmctx,
2082                dst_table_index_arg,
2083                src_table_index_arg,
2084                dst,
2085                src,
2086                len,
2087            ],
2088        );
2089
2090        Ok(())
2091    }
2092
2093    fn translate_table_init(
2094        &mut self,
2095        mut pos: FuncCursor,
2096        seg_index: u32,
2097        table_index: TableIndex,
2098        dst: ir::Value,
2099        src: ir::Value,
2100        len: ir::Value,
2101    ) -> WasmResult<()> {
2102        self.ensure_table_exists(pos.func, table_index);
2103        let (func_sig, table_index_arg, func_idx) = self.get_table_init_func(pos.func, table_index);
2104
2105        let table_index_arg = pos.ins().iconst(I32, table_index_arg as i64);
2106        let seg_index_arg = pos.ins().iconst(I32, seg_index as i64);
2107
2108        let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2109
2110        pos.ins().call_indirect(
2111            func_sig,
2112            func_addr,
2113            &[vmctx, table_index_arg, seg_index_arg, dst, src, len],
2114        );
2115
2116        Ok(())
2117    }
2118
2119    fn translate_elem_drop(&mut self, mut pos: FuncCursor, elem_index: u32) -> WasmResult<()> {
2120        let (func_sig, func_idx) = self.get_elem_drop_func(pos.func);
2121
2122        let elem_index_arg = pos.ins().iconst(I32, elem_index as i64);
2123
2124        let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2125
2126        pos.ins()
2127            .call_indirect(func_sig, func_addr, &[vmctx, elem_index_arg]);
2128
2129        Ok(())
2130    }
2131
2132    fn translate_atomic_wait(
2133        &mut self,
2134        mut pos: FuncCursor,
2135        index: MemoryIndex,
2136        _heap: Heap,
2137        addr: ir::Value,
2138        expected: ir::Value,
2139        timeout: ir::Value,
2140    ) -> WasmResult<ir::Value> {
2141        let (func_sig, index_arg, func_idx) = if pos.func.dfg.value_type(expected) == I64 {
2142            self.get_memory_atomic_wait64_func(pos.func, index)
2143        } else {
2144            self.get_memory_atomic_wait32_func(pos.func, index)
2145        };
2146        let memory_index = pos.ins().iconst(I32, index_arg as i64);
2147        let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2148        let call_inst = pos.ins().call_indirect(
2149            func_sig,
2150            func_addr,
2151            &[vmctx, memory_index, addr, expected, timeout],
2152        );
2153        Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
2154    }
2155
2156    fn translate_atomic_notify(
2157        &mut self,
2158        mut pos: FuncCursor,
2159        index: MemoryIndex,
2160        _heap: Heap,
2161        addr: ir::Value,
2162        count: ir::Value,
2163    ) -> WasmResult<ir::Value> {
2164        let (func_sig, index_arg, func_idx) = self.get_memory_atomic_notify_func(pos.func, index);
2165        let memory_index = pos.ins().iconst(I32, index_arg as i64);
2166        let (vmctx, func_addr) = self.translate_load_builtin_function_address(&mut pos, func_idx);
2167        let call_inst =
2168            pos.ins()
2169                .call_indirect(func_sig, func_addr, &[vmctx, memory_index, addr, count]);
2170        Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap())
2171    }
2172
2173    fn get_global_type(&self, global_index: GlobalIndex) -> Option<WasmerType> {
2174        Some(self.module.globals.get(global_index)?.ty)
2175    }
2176
2177    fn push_local_decl_on_stack(&mut self, ty: WasmerType) {
2178        self.type_stack.push(ty);
2179    }
2180
2181    fn push_params_on_stack(&mut self, function_index: LocalFunctionIndex) {
2182        let func_index = self.module.func_index(function_index);
2183        let sig_idx = self.module.functions[func_index];
2184        let signature = &self.module.signatures[sig_idx];
2185        for param in signature.params() {
2186            self.type_stack.push(*param);
2187        }
2188    }
2189
2190    fn get_local_type(&self, local_index: u32) -> Option<WasmerType> {
2191        self.type_stack.get(local_index as usize).cloned()
2192    }
2193
2194    fn get_local_types(&self) -> &[WasmerType] {
2195        &self.type_stack
2196    }
2197
2198    fn get_function_type(&self, function_index: FunctionIndex) -> Option<&FunctionType> {
2199        let sig_idx = self.module.functions.get(function_index)?;
2200        Some(&self.module.signatures[*sig_idx])
2201    }
2202
2203    fn get_function_sig(&self, sig_index: SignatureIndex) -> Option<&FunctionType> {
2204        self.module.signatures.get(sig_index)
2205    }
2206
2207    fn heap_access_spectre_mitigation(&self) -> bool {
2208        false
2209    }
2210
2211    fn proof_carrying_code(&self) -> bool {
2212        false
2213    }
2214
2215    fn heaps(&self) -> &PrimaryMap<Heap, HeapData> {
2216        &self.heaps
2217    }
2218}