wasmer_vm/instance/
mod.rs

1// This file contains code from external sources.
2// Attributions: https://github.com/wasmerio/wasmer/blob/main/docs/ATTRIBUTIONS.md
3
4//! An `Instance` contains all the runtime state used by execution of
5//! a WebAssembly module (except its callstack and register state). An
6//! `VMInstance` is a wrapper around `Instance` that manages
7//! how it is allocated and deallocated.
8
9mod allocator;
10
11use crate::LinearMemory;
12use crate::imports::Imports;
13use crate::store::{InternalStoreHandle, StoreObjects};
14use crate::table::TableElement;
15use crate::trap::{Trap, TrapCode};
16use crate::vmcontext::{
17    VMBuiltinFunctionsArray, VMCallerCheckedAnyfunc, VMContext, VMFunctionContext,
18    VMFunctionImport, VMFunctionKind, VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition,
19    VMMemoryImport, VMSharedTagIndex, VMSignatureHash, VMTableDefinition, VMTableImport,
20    VMTrampoline, memory_copy, memory_fill, memory32_atomic_check32, memory32_atomic_check64,
21};
22use crate::{FunctionBodyPtr, MaybeInstanceOwned, TrapHandlerFn, VMTag, wasmer_call_trampoline};
23use crate::{VMConfig, VMFuncRef, VMFunction, VMGlobal, VMMemory, VMTable};
24use crate::{export::VMExtern, threadconditions::ExpectedValue};
25pub use allocator::InstanceAllocator;
26use itertools::Itertools;
27use memoffset::offset_of;
28use more_asserts::assert_lt;
29use std::alloc::Layout;
30use std::cell::RefCell;
31use std::collections::HashMap;
32use std::convert::TryFrom;
33use std::fmt;
34use std::mem;
35use std::ptr::{self, NonNull};
36use std::slice;
37use std::sync::Arc;
38use wasmer_types::entity::{BoxedSlice, EntityRef, PrimaryMap, packed_option::ReservedValue};
39use wasmer_types::{
40    DataIndex, DataInitializer, ElemIndex, ExportIndex, FunctionIndex, GlobalIndex, GlobalInit,
41    InitExpr, InitExprOp, LocalFunctionIndex, LocalGlobalIndex, LocalMemoryIndex, LocalTableIndex,
42    MemoryError, MemoryIndex, ModuleInfo, Pages, RawValue, SignatureIndex, TableIndex, TagIndex,
43    VMOffsets,
44};
45
46/// A WebAssembly instance.
47///
48/// The type is dynamically-sized. Indeed, the `vmctx` field can
49/// contain various data. That's why the type has a C representation
50/// to ensure that the `vmctx` field is last. See the documentation of
51/// the `vmctx` field to learn more.
52#[repr(C)]
53#[allow(clippy::type_complexity)]
54pub(crate) struct Instance {
55    /// The `ModuleInfo` this `Instance` was instantiated from.
56    module: Arc<ModuleInfo>,
57
58    /// Pointer to the object store of the context owning this instance.
59    context: *mut StoreObjects,
60
61    /// Offsets in the `vmctx` region.
62    offsets: VMOffsets,
63
64    /// WebAssembly linear memory data.
65    memories: BoxedSlice<LocalMemoryIndex, InternalStoreHandle<VMMemory>>,
66
67    /// WebAssembly table data.
68    tables: BoxedSlice<LocalTableIndex, InternalStoreHandle<VMTable>>,
69
70    /// WebAssembly global data.
71    globals: BoxedSlice<LocalGlobalIndex, InternalStoreHandle<VMGlobal>>,
72
73    /// WebAssembly tag data. Notably, this stores *all* tags, not just local ones.
74    tags: BoxedSlice<TagIndex, InternalStoreHandle<VMTag>>,
75
76    /// Pointers to functions in executable memory.
77    functions: BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
78
79    /// Pointers to function call trampolines in executable memory.
80    function_call_trampolines: BoxedSlice<SignatureIndex, VMTrampoline>,
81
82    /// Passive elements in this instantiation. As `elem.drop`s happen, these
83    /// entries get removed.
84    passive_elements: RefCell<HashMap<ElemIndex, Box<[Option<VMFuncRef>]>>>,
85
86    /// Passive data segments from our module. As `data.drop`s happen, entries
87    /// get removed. A missing entry is considered equivalent to an empty slice.
88    passive_data: RefCell<HashMap<DataIndex, Arc<[u8]>>>,
89
90    /// Mapping of function indices to their func ref backing data. `VMFuncRef`s
91    /// will point to elements here for functions defined by this instance.
92    funcrefs: BoxedSlice<LocalFunctionIndex, VMCallerCheckedAnyfunc>,
93
94    /// Mapping of function indices to their func ref backing data. `VMFuncRef`s
95    /// will point to elements here for functions imported by this instance.
96    imported_funcrefs: BoxedSlice<FunctionIndex, NonNull<VMCallerCheckedAnyfunc>>,
97
98    /// Additional context used by compiled WebAssembly code. This
99    /// field is last, and represents a dynamically-sized array that
100    /// extends beyond the nominal end of the struct (similar to a
101    /// flexible array member).
102    vmctx: VMContext,
103}
104
105impl fmt::Debug for Instance {
106    fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
107        formatter.debug_struct("Instance").finish()
108    }
109}
110
111#[allow(clippy::cast_ptr_alignment)]
112impl Instance {
113    /// Helper function to access various locations offset from our `*mut
114    /// VMContext` object.
115    unsafe fn vmctx_plus_offset<T>(&self, offset: u32) -> *mut T {
116        unsafe {
117            (self.vmctx_ptr() as *mut u8)
118                .add(usize::try_from(offset).unwrap())
119                .cast()
120        }
121    }
122
123    fn module(&self) -> &Arc<ModuleInfo> {
124        &self.module
125    }
126
127    pub(crate) fn module_ref(&self) -> &ModuleInfo {
128        &self.module
129    }
130
131    pub(crate) fn context(&self) -> &StoreObjects {
132        unsafe { &*self.context }
133    }
134
135    pub(crate) fn context_mut(&mut self) -> &mut StoreObjects {
136        unsafe { &mut *self.context }
137    }
138
139    /// Offsets in the `vmctx` region.
140    fn offsets(&self) -> &VMOffsets {
141        &self.offsets
142    }
143
144    /// Return the indexed `VMFunctionImport`.
145    fn imported_function(&self, index: FunctionIndex) -> &VMFunctionImport {
146        let index = usize::try_from(index.as_u32()).unwrap();
147        unsafe { &*self.imported_functions_ptr().add(index) }
148    }
149
150    /// Return a pointer to the `VMFunctionImport`s.
151    fn imported_functions_ptr(&self) -> *mut VMFunctionImport {
152        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_functions_begin()) }
153    }
154
155    /// Return the index `VMTableImport`.
156    fn imported_table(&self, index: TableIndex) -> &VMTableImport {
157        let index = usize::try_from(index.as_u32()).unwrap();
158        unsafe { &*self.imported_tables_ptr().add(index) }
159    }
160
161    /// Return a pointer to the `VMTableImports`s.
162    fn imported_tables_ptr(&self) -> *mut VMTableImport {
163        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_tables_begin()) }
164    }
165
166    /// Return the indexed `VMMemoryImport`.
167    fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport {
168        let index = usize::try_from(index.as_u32()).unwrap();
169        unsafe { &*self.imported_memories_ptr().add(index) }
170    }
171
172    /// Return a pointer to the `VMMemoryImport`s.
173    fn imported_memories_ptr(&self) -> *mut VMMemoryImport {
174        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_memories_begin()) }
175    }
176
177    /// Return the indexed `VMGlobalImport`.
178    fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport {
179        let index = usize::try_from(index.as_u32()).unwrap();
180        unsafe { &*self.imported_globals_ptr().add(index) }
181    }
182
183    /// Return a pointer to the `VMGlobalImport`s.
184    fn imported_globals_ptr(&self) -> *mut VMGlobalImport {
185        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_globals_begin()) }
186    }
187
188    /// Return the indexed `VMSharedTagIndex`.
189    #[cfg_attr(target_os = "windows", allow(dead_code))]
190    pub(crate) fn shared_tag_ptr(&self, index: TagIndex) -> &VMSharedTagIndex {
191        let index = usize::try_from(index.as_u32()).unwrap();
192        unsafe { &*self.shared_tags_ptr().add(index) }
193    }
194
195    /// Return a pointer to the `VMSharedTagIndex`s.
196    pub(crate) fn shared_tags_ptr(&self) -> *mut VMSharedTagIndex {
197        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_tag_ids_begin()) }
198    }
199
200    /// Return the indexed `VMTableDefinition`.
201    #[allow(dead_code)]
202    fn table(&self, index: LocalTableIndex) -> VMTableDefinition {
203        unsafe { *self.table_ptr(index).as_ref() }
204    }
205
206    #[allow(dead_code)]
207    /// Updates the value for a defined table to `VMTableDefinition`.
208    fn set_table(&self, index: LocalTableIndex, table: &VMTableDefinition) {
209        unsafe {
210            *self.table_ptr(index).as_ptr() = *table;
211        }
212    }
213
214    /// Return the indexed `VMTableDefinition`.
215    fn table_ptr(&self, index: LocalTableIndex) -> NonNull<VMTableDefinition> {
216        let index = usize::try_from(index.as_u32()).unwrap();
217        NonNull::new(unsafe { self.tables_ptr().add(index) }).unwrap()
218    }
219
220    /// Return a pointer to the `VMTableDefinition`s.
221    fn tables_ptr(&self) -> *mut VMTableDefinition {
222        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_tables_begin()) }
223    }
224
225    fn fixed_funcref_table_ptr(
226        &self,
227        index: LocalTableIndex,
228    ) -> Option<NonNull<VMCallerCheckedAnyfunc>> {
229        let offset = self.offsets.vmctx_fixed_funcref_table_anyfuncs(index)?;
230        Some(NonNull::new(unsafe { self.vmctx_plus_offset(offset) }).unwrap())
231    }
232
233    fn sync_fixed_funcref_table_element(
234        &self,
235        table_index: LocalTableIndex,
236        index: u32,
237        funcref: Option<VMFuncRef>,
238    ) {
239        let Some(base) = self.fixed_funcref_table_ptr(table_index) else {
240            return;
241        };
242        unsafe {
243            *base.as_ptr().add(index as usize) = anyfunc_from_funcref(funcref);
244        }
245    }
246
247    fn sync_fixed_funcref_table(&self, table_index: LocalTableIndex) {
248        let Some(base) = self.fixed_funcref_table_ptr(table_index) else {
249            return;
250        };
251        let table = self.tables[table_index].get(self.context());
252        for index in 0..table.size() {
253            let TableElement::FuncRef(funcref) = table.get(index).unwrap() else {
254                unreachable!("fixed funcref tables cannot contain externrefs");
255            };
256            unsafe {
257                *base.as_ptr().add(index as usize) = anyfunc_from_funcref(funcref);
258            }
259        }
260    }
261
262    fn sync_fixed_funcref_table_by_index(&self, table_index: TableIndex) {
263        if let Some(local_table_index) = self.module.local_table_index(table_index) {
264            self.sync_fixed_funcref_table(local_table_index);
265        }
266    }
267
268    #[allow(dead_code)]
269    /// Get a locally defined or imported memory.
270    fn get_memory(&self, index: MemoryIndex) -> VMMemoryDefinition {
271        if let Some(local_index) = self.module.local_memory_index(index) {
272            self.memory(local_index)
273        } else {
274            let import = self.imported_memory(index);
275            unsafe { *import.definition.as_ref() }
276        }
277    }
278
279    /// Return the indexed `VMMemoryDefinition`.
280    fn memory(&self, index: LocalMemoryIndex) -> VMMemoryDefinition {
281        unsafe { *self.memory_ptr(index).as_ref() }
282    }
283
284    #[allow(dead_code)]
285    /// Set the indexed memory to `VMMemoryDefinition`.
286    fn set_memory(&self, index: LocalMemoryIndex, mem: &VMMemoryDefinition) {
287        unsafe {
288            *self.memory_ptr(index).as_ptr() = *mem;
289        }
290    }
291
292    /// Return the indexed `VMMemoryDefinition`.
293    fn memory_ptr(&self, index: LocalMemoryIndex) -> NonNull<VMMemoryDefinition> {
294        let index = usize::try_from(index.as_u32()).unwrap();
295        NonNull::new(unsafe { self.memories_ptr().add(index) }).unwrap()
296    }
297
298    /// Return a pointer to the `VMMemoryDefinition`s.
299    fn memories_ptr(&self) -> *mut VMMemoryDefinition {
300        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_memories_begin()) }
301    }
302
303    /// Get a locally defined or imported memory.
304    fn get_vmmemory(&self, index: MemoryIndex) -> &VMMemory {
305        if let Some(local_index) = self.module.local_memory_index(index) {
306            unsafe {
307                self.memories
308                    .get(local_index)
309                    .unwrap()
310                    .get(self.context.as_ref().unwrap())
311            }
312        } else {
313            let import = self.imported_memory(index);
314            unsafe { import.handle.get(self.context.as_ref().unwrap()) }
315        }
316    }
317
318    /// Get a locally defined or imported memory.
319    fn get_vmmemory_mut(&mut self, index: MemoryIndex) -> &mut VMMemory {
320        if let Some(local_index) = self.module.local_memory_index(index) {
321            unsafe {
322                self.memories
323                    .get_mut(local_index)
324                    .unwrap()
325                    .get_mut(self.context.as_mut().unwrap())
326            }
327        } else {
328            let import = self.imported_memory(index);
329            unsafe { import.handle.get_mut(self.context.as_mut().unwrap()) }
330        }
331    }
332
333    /// Get a locally defined memory as mutable.
334    fn get_local_vmmemory_mut(&mut self, local_index: LocalMemoryIndex) -> &mut VMMemory {
335        unsafe {
336            self.memories
337                .get_mut(local_index)
338                .unwrap()
339                .get_mut(self.context.as_mut().unwrap())
340        }
341    }
342
343    /// Return the indexed `VMGlobalDefinition`.
344    fn global(&self, index: LocalGlobalIndex) -> VMGlobalDefinition {
345        unsafe { self.global_ptr(index).as_ref().clone() }
346    }
347
348    /// Set the indexed global to `VMGlobalDefinition`.
349    #[allow(dead_code)]
350    fn set_global(&self, index: LocalGlobalIndex, global: &VMGlobalDefinition) {
351        unsafe {
352            *self.global_ptr(index).as_ptr() = global.clone();
353        }
354    }
355
356    /// Return the indexed `VMGlobalDefinition`.
357    fn global_ptr(&self, index: LocalGlobalIndex) -> NonNull<VMGlobalDefinition> {
358        let index = usize::try_from(index.as_u32()).unwrap();
359        NonNull::new(unsafe { self.globals_ptr().add(index) }).unwrap()
360    }
361
362    /// Return a pointer to the `VMGlobalDefinition`s.
363    fn globals_ptr(&self) -> *mut VMGlobalDefinition {
364        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_globals_begin()) }
365    }
366
367    /// Return a pointer to the `VMBuiltinFunctionsArray`.
368    fn builtin_functions_ptr(&self) -> *mut VMBuiltinFunctionsArray {
369        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_builtin_functions_begin()) }
370    }
371
372    /// Return a reference to the vmctx used by compiled wasm code.
373    fn vmctx(&self) -> &VMContext {
374        &self.vmctx
375    }
376
377    /// Return a raw pointer to the vmctx used by compiled wasm code.
378    fn vmctx_ptr(&self) -> *mut VMContext {
379        self.vmctx() as *const VMContext as *mut VMContext
380    }
381
382    /// Invoke the WebAssembly start function of the instance, if one is present.
383    fn invoke_start_function(
384        &self,
385        config: &VMConfig,
386        trap_handler: Option<*const TrapHandlerFn<'static>>,
387    ) -> Result<(), Trap> {
388        let start_index = match self.module.start_function {
389            Some(idx) => idx,
390            None => return Ok(()),
391        };
392
393        let (callee_address, callee_vmctx) = match self.module.local_func_index(start_index) {
394            Some(local_index) => {
395                let body = self
396                    .functions
397                    .get(local_index)
398                    .expect("function index is out of bounds")
399                    .0;
400                (
401                    body as *const _,
402                    VMFunctionContext {
403                        vmctx: self.vmctx_ptr(),
404                    },
405                )
406            }
407            None => {
408                assert_lt!(start_index.index(), self.module.num_imported_functions);
409                let import = self.imported_function(start_index);
410                (import.body, import.environment)
411            }
412        };
413
414        let sig = self.module.functions[start_index];
415        let trampoline = self.function_call_trampolines[sig];
416        let mut values_vec = vec![];
417
418        unsafe {
419            // Even though we already know the type of the function we need to call, in certain
420            // specific cases trampoline prepare callee arguments for specific optimizations, such
421            // as passing g0 and m0_base_ptr as paramters.
422            wasmer_call_trampoline(
423                trap_handler,
424                config,
425                callee_vmctx,
426                trampoline,
427                callee_address,
428                values_vec.as_mut_ptr(),
429            )
430        }
431    }
432
433    /// Return the offset from the vmctx pointer to its containing `Instance`.
434    #[inline]
435    pub(crate) fn vmctx_offset() -> isize {
436        offset_of!(Self, vmctx) as isize
437    }
438
439    /// Return the table index for the given `VMTableDefinition`.
440    pub(crate) fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
441        let begin: *const VMTableDefinition = self.tables_ptr() as *const _;
442        let end: *const VMTableDefinition = table;
443        // TODO: Use `offset_from` once it stablizes.
444        let index = LocalTableIndex::new(
445            (end as usize - begin as usize) / mem::size_of::<VMTableDefinition>(),
446        );
447        assert_lt!(index.index(), self.tables.len());
448        index
449    }
450
451    /// Return the memory index for the given `VMMemoryDefinition`.
452    pub(crate) fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
453        let begin: *const VMMemoryDefinition = self.memories_ptr() as *const _;
454        let end: *const VMMemoryDefinition = memory;
455        // TODO: Use `offset_from` once it stablizes.
456        let index = LocalMemoryIndex::new(
457            (end as usize - begin as usize) / mem::size_of::<VMMemoryDefinition>(),
458        );
459        assert_lt!(index.index(), self.memories.len());
460        index
461    }
462
463    /// Grow memory by the specified amount of pages.
464    ///
465    /// Returns `None` if memory can't be grown by the specified amount
466    /// of pages.
467    pub(crate) fn memory_grow<IntoPages>(
468        &mut self,
469        memory_index: LocalMemoryIndex,
470        delta: IntoPages,
471    ) -> Result<Pages, MemoryError>
472    where
473        IntoPages: Into<Pages>,
474    {
475        let mem = *self
476            .memories
477            .get(memory_index)
478            .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
479        mem.get_mut(self.context_mut()).grow(delta.into())
480    }
481
482    /// Grow imported memory by the specified amount of pages.
483    ///
484    /// Returns `None` if memory can't be grown by the specified amount
485    /// of pages.
486    ///
487    /// # Safety
488    /// This and `imported_memory_size` are currently unsafe because they
489    /// dereference the memory import's pointers.
490    pub(crate) unsafe fn imported_memory_grow<IntoPages>(
491        &mut self,
492        memory_index: MemoryIndex,
493        delta: IntoPages,
494    ) -> Result<Pages, MemoryError>
495    where
496        IntoPages: Into<Pages>,
497    {
498        let import = self.imported_memory(memory_index);
499        let mem = import.handle;
500        mem.get_mut(self.context_mut()).grow(delta.into())
501    }
502
503    /// Returns the number of allocated wasm pages.
504    pub(crate) fn memory_size(&self, memory_index: LocalMemoryIndex) -> Pages {
505        let mem = *self
506            .memories
507            .get(memory_index)
508            .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
509        mem.get(self.context()).size()
510    }
511
512    /// Returns the number of allocated wasm pages in an imported memory.
513    ///
514    /// # Safety
515    /// This and `imported_memory_grow` are currently unsafe because they
516    /// dereference the memory import's pointers.
517    pub(crate) unsafe fn imported_memory_size(&self, memory_index: MemoryIndex) -> Pages {
518        let import = self.imported_memory(memory_index);
519        let mem = import.handle;
520        mem.get(self.context()).size()
521    }
522
523    /// Returns the number of elements in a given table.
524    pub(crate) fn table_size(&self, table_index: LocalTableIndex) -> u32 {
525        let table = self
526            .tables
527            .get(table_index)
528            .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
529        table.get(self.context()).size()
530    }
531
532    /// Returns the number of elements in a given imported table.
533    ///
534    /// # Safety
535    /// `table_index` must be a valid, imported table index.
536    pub(crate) unsafe fn imported_table_size(&self, table_index: TableIndex) -> u32 {
537        let import = self.imported_table(table_index);
538        let table = import.handle;
539        table.get(self.context()).size()
540    }
541
542    /// Grow table by the specified amount of elements.
543    ///
544    /// Returns `None` if table can't be grown by the specified amount
545    /// of elements.
546    pub(crate) fn table_grow(
547        &mut self,
548        table_index: LocalTableIndex,
549        delta: u32,
550        init_value: TableElement,
551    ) -> Option<u32> {
552        let table = *self
553            .tables
554            .get(table_index)
555            .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
556        table.get_mut(self.context_mut()).grow(delta, init_value)
557    }
558
559    /// Grow table by the specified amount of elements.
560    ///
561    /// # Safety
562    /// `table_index` must be a valid, imported table index.
563    pub(crate) unsafe fn imported_table_grow(
564        &mut self,
565        table_index: TableIndex,
566        delta: u32,
567        init_value: TableElement,
568    ) -> Option<u32> {
569        let import = self.imported_table(table_index);
570        let table = import.handle;
571        table.get_mut(self.context_mut()).grow(delta, init_value)
572    }
573
574    /// Get table element by index.
575    pub(crate) fn table_get(
576        &self,
577        table_index: LocalTableIndex,
578        index: u32,
579    ) -> Option<TableElement> {
580        let table = self
581            .tables
582            .get(table_index)
583            .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
584        table.get(self.context()).get(index)
585    }
586
587    /// Returns the element at the given index.
588    ///
589    /// # Safety
590    /// `table_index` must be a valid, imported table index.
591    pub(crate) unsafe fn imported_table_get(
592        &self,
593        table_index: TableIndex,
594        index: u32,
595    ) -> Option<TableElement> {
596        let import = self.imported_table(table_index);
597        let table = import.handle;
598        table.get(self.context()).get(index)
599    }
600
601    /// Set table element by index.
602    pub(crate) fn table_set(
603        &mut self,
604        table_index: LocalTableIndex,
605        index: u32,
606        val: TableElement,
607    ) -> Result<(), Trap> {
608        let table = *self
609            .tables
610            .get(table_index)
611            .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
612        let funcref = match &val {
613            TableElement::FuncRef(funcref) => Some(*funcref),
614            TableElement::ExternRef(_) => None,
615        };
616        table.get_mut(self.context_mut()).set(index, val)?;
617        if let Some(funcref) = funcref {
618            self.sync_fixed_funcref_table_element(table_index, index, funcref);
619        }
620        Ok(())
621    }
622
623    /// Set table element by index for an imported table.
624    ///
625    /// # Safety
626    /// `table_index` must be a valid, imported table index.
627    pub(crate) unsafe fn imported_table_set(
628        &mut self,
629        table_index: TableIndex,
630        index: u32,
631        val: TableElement,
632    ) -> Result<(), Trap> {
633        let import = self.imported_table(table_index);
634        let table = import.handle;
635        table.get_mut(self.context_mut()).set(index, val)
636    }
637
638    /// Get a `VMFuncRef` for the given `FunctionIndex`.
639    pub(crate) fn func_ref(&self, function_index: FunctionIndex) -> Option<VMFuncRef> {
640        if function_index == FunctionIndex::reserved_value() {
641            None
642        } else if let Some(local_function_index) = self.module.local_func_index(function_index) {
643            Some(VMFuncRef(NonNull::from(
644                &self.funcrefs[local_function_index],
645            )))
646        } else {
647            Some(VMFuncRef(self.imported_funcrefs[function_index]))
648        }
649    }
650
651    /// The `table.init` operation: initializes a portion of a table with a
652    /// passive element.
653    ///
654    /// # Errors
655    ///
656    /// Returns a `Trap` error when the range within the table is out of bounds
657    /// or the range within the passive element is out of bounds.
658    pub(crate) fn table_init(
659        &mut self,
660        table_index: TableIndex,
661        elem_index: ElemIndex,
662        dst: u32,
663        src: u32,
664        len: u32,
665    ) -> Result<(), Trap> {
666        // https://webassembly.github.io/bulk-memory-operations/core/exec/instructions.html#exec-table-init
667
668        let table = self.get_table_handle(table_index);
669        let table = unsafe { table.get_mut(&mut *self.context) };
670        let passive_elements = self.passive_elements.borrow();
671        let elem = passive_elements
672            .get(&elem_index)
673            .map_or::<&[Option<VMFuncRef>], _>(&[], |e| &**e);
674
675        if src.checked_add(len).is_none_or(|n| n as usize > elem.len())
676            || dst.checked_add(len).is_none_or(|m| m > table.size())
677        {
678            return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
679        }
680
681        for (dst, src) in (dst..dst + len).zip(src..src + len) {
682            table
683                .set(dst, TableElement::FuncRef(elem[src as usize]))
684                .expect("should never panic because we already did the bounds check above");
685        }
686
687        self.sync_fixed_funcref_table_by_index(table_index);
688
689        Ok(())
690    }
691
692    /// The `table.fill` operation: fills a portion of a table with a given value.
693    ///
694    /// # Errors
695    ///
696    /// Returns a `Trap` error when the range within the table is out of bounds
697    pub(crate) fn table_fill(
698        &mut self,
699        table_index: TableIndex,
700        start_index: u32,
701        item: TableElement,
702        len: u32,
703    ) -> Result<(), Trap> {
704        // https://webassembly.github.io/bulk-memory-operations/core/exec/instructions.html#exec-table-init
705
706        let table = self.get_table(table_index);
707        let table_size = table.size() as usize;
708
709        if start_index
710            .checked_add(len)
711            .is_none_or(|n| n as usize > table_size)
712        {
713            return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
714        }
715
716        for i in start_index..(start_index + len) {
717            table
718                .set(i, item.clone())
719                .expect("should never panic because we already did the bounds check above");
720        }
721
722        self.sync_fixed_funcref_table_by_index(table_index);
723
724        Ok(())
725    }
726
727    /// The `table.copy` operation.
728    pub(crate) fn table_copy(
729        &mut self,
730        dst_table_index: TableIndex,
731        src_table_index: TableIndex,
732        dst: u32,
733        src: u32,
734        len: u32,
735    ) -> Result<(), Trap> {
736        let result = if dst_table_index == src_table_index {
737            let table = self.get_table(dst_table_index);
738            table.copy_within(dst, src, len)
739        } else {
740            let dst_table = self.get_table_handle(dst_table_index);
741            let src_table = self.get_table_handle(src_table_index);
742            if dst_table == src_table {
743                unsafe {
744                    dst_table
745                        .get_mut(&mut *self.context)
746                        .copy_within(dst, src, len)
747                }
748            } else {
749                unsafe {
750                    dst_table.get_mut(&mut *self.context).copy(
751                        src_table.get(&*self.context),
752                        dst,
753                        src,
754                        len,
755                    )
756                }
757            }
758        };
759        result?;
760        self.sync_fixed_funcref_table_by_index(dst_table_index);
761
762        Ok(())
763    }
764
765    /// Drop an element.
766    pub(crate) fn elem_drop(&self, elem_index: ElemIndex) {
767        // https://webassembly.github.io/reference-types/core/exec/instructions.html#exec-elem-drop
768
769        let mut passive_elements = self.passive_elements.borrow_mut();
770        passive_elements.remove(&elem_index);
771        // Note that we don't check that we actually removed an element because
772        // dropping a non-passive element is a no-op (not a trap).
773    }
774
775    /// Do a `memory.copy` for a locally defined memory.
776    ///
777    /// # Errors
778    ///
779    /// Returns a `Trap` error when the source or destination ranges are out of
780    /// bounds.
781    pub(crate) fn local_memory_copy(
782        &self,
783        memory_index: LocalMemoryIndex,
784        dst: u32,
785        src: u32,
786        len: u32,
787    ) -> Result<(), Trap> {
788        // https://webassembly.github.io/reference-types/core/exec/instructions.html#exec-memory-copy
789
790        let memory = self.memory(memory_index);
791        // The following memory copy is not synchronized and is not atomic:
792        unsafe { memory_copy(&memory, dst, src, len) }
793    }
794
795    /// Perform a `memory.copy` on an imported memory.
796    pub(crate) fn imported_memory_copy(
797        &self,
798        memory_index: MemoryIndex,
799        dst: u32,
800        src: u32,
801        len: u32,
802    ) -> Result<(), Trap> {
803        let import = self.imported_memory(memory_index);
804        let memory = unsafe { import.definition.as_ref() };
805        // The following memory copy is not synchronized and is not atomic:
806        unsafe { memory_copy(memory, dst, src, len) }
807    }
808
809    /// Perform the `memory.fill` operation on a locally defined memory.
810    ///
811    /// # Errors
812    ///
813    /// Returns a `Trap` error if the memory range is out of bounds.
814    pub(crate) fn local_memory_fill(
815        &self,
816        memory_index: LocalMemoryIndex,
817        dst: u32,
818        val: u32,
819        len: u32,
820    ) -> Result<(), Trap> {
821        let memory = self.memory(memory_index);
822        // The following memory fill is not synchronized and is not atomic:
823        unsafe { memory_fill(&memory, dst, val, len) }
824    }
825
826    /// Perform the `memory.fill` operation on an imported memory.
827    ///
828    /// # Errors
829    ///
830    /// Returns a `Trap` error if the memory range is out of bounds.
831    pub(crate) fn imported_memory_fill(
832        &self,
833        memory_index: MemoryIndex,
834        dst: u32,
835        val: u32,
836        len: u32,
837    ) -> Result<(), Trap> {
838        let import = self.imported_memory(memory_index);
839        let memory = unsafe { import.definition.as_ref() };
840        // The following memory fill is not synchronized and is not atomic:
841        unsafe { memory_fill(memory, dst, val, len) }
842    }
843
844    /// Performs the `memory.init` operation.
845    ///
846    /// # Errors
847    ///
848    /// Returns a `Trap` error if the destination range is out of this module's
849    /// memory's bounds or if the source range is outside the data segment's
850    /// bounds.
851    pub(crate) fn memory_init(
852        &self,
853        memory_index: MemoryIndex,
854        data_index: DataIndex,
855        dst: u32,
856        src: u32,
857        len: u32,
858    ) -> Result<(), Trap> {
859        // https://webassembly.github.io/bulk-memory-operations/core/exec/instructions.html#exec-memory-init
860
861        let memory = self.get_vmmemory(memory_index);
862        let passive_data = self.passive_data.borrow();
863        let data = passive_data.get(&data_index).map_or(&[][..], |d| &**d);
864
865        let current_length = unsafe { memory.vmmemory().as_ref().current_length };
866        if src.checked_add(len).is_none_or(|n| n as usize > data.len())
867            || dst
868                .checked_add(len)
869                .is_none_or(|m| usize::try_from(m).unwrap() > current_length)
870        {
871            return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
872        }
873        let src_slice = &data[src as usize..(src + len) as usize];
874        unsafe { memory.initialize_with_data(dst as usize, src_slice) }
875    }
876
877    /// Drop the given data segment, truncating its length to zero.
878    pub(crate) fn data_drop(&self, data_index: DataIndex) {
879        let mut passive_data = self.passive_data.borrow_mut();
880        passive_data.remove(&data_index);
881    }
882
883    /// Get a table by index regardless of whether it is locally-defined or an
884    /// imported, foreign table.
885    pub(crate) fn get_table(&mut self, table_index: TableIndex) -> &mut VMTable {
886        if let Some(local_table_index) = self.module.local_table_index(table_index) {
887            self.get_local_table(local_table_index)
888        } else {
889            self.get_foreign_table(table_index)
890        }
891    }
892
893    /// Get a locally-defined table.
894    pub(crate) fn get_local_table(&mut self, index: LocalTableIndex) -> &mut VMTable {
895        let table = self.tables[index];
896        table.get_mut(self.context_mut())
897    }
898
899    /// Get an imported, foreign table.
900    pub(crate) fn get_foreign_table(&mut self, index: TableIndex) -> &mut VMTable {
901        let import = self.imported_table(index);
902        let table = import.handle;
903        table.get_mut(self.context_mut())
904    }
905
906    /// Get a table handle by index regardless of whether it is locally-defined
907    /// or an imported, foreign table.
908    pub(crate) fn get_table_handle(
909        &mut self,
910        table_index: TableIndex,
911    ) -> InternalStoreHandle<VMTable> {
912        if let Some(local_table_index) = self.module.local_table_index(table_index) {
913            self.tables[local_table_index]
914        } else {
915            self.imported_table(table_index).handle
916        }
917    }
918
919    /// # Safety
920    /// See [`LinearMemory::do_wait`].
921    unsafe fn memory_wait(
922        memory: &mut VMMemory,
923        dst: u32,
924        expected: ExpectedValue,
925        timeout: i64,
926    ) -> Result<u32, Trap> {
927        let timeout = if timeout < 0 {
928            None
929        } else {
930            Some(std::time::Duration::from_nanos(timeout as u64))
931        };
932        match unsafe { memory.do_wait(dst, expected, timeout) } {
933            Ok(count) => Ok(count),
934            Err(_err) => {
935                // ret is None if there is more than 2^32 waiter in queue or some other error
936                // TODO: why THIS specific trap code tho? -.-
937                Err(Trap::lib(TrapCode::TableAccessOutOfBounds))
938            }
939        }
940    }
941
942    /// Perform an Atomic.Wait32
943    pub(crate) fn local_memory_wait32(
944        &mut self,
945        memory_index: LocalMemoryIndex,
946        dst: u32,
947        val: u32,
948        timeout: i64,
949    ) -> Result<u32, Trap> {
950        let memory = self.memory(memory_index);
951        //if ! memory.shared {
952        // We should trap according to spec, but official test rely on not trapping...
953        //}
954
955        // Do a fast-path check of the expected value, and also ensure proper alignment
956        let ret = unsafe { memory32_atomic_check32(&memory, dst, val) };
957
958        if let Ok(mut ret) = ret {
959            if ret == 0 {
960                let memory = self.get_local_vmmemory_mut(memory_index);
961                // Safety: we have already checked alignment and bounds in memory32_atomic_check32
962                ret = unsafe { Self::memory_wait(memory, dst, ExpectedValue::U32(val), timeout)? };
963            }
964            Ok(ret)
965        } else {
966            ret
967        }
968    }
969
970    /// Perform an Atomic.Wait32
971    pub(crate) fn imported_memory_wait32(
972        &mut self,
973        memory_index: MemoryIndex,
974        dst: u32,
975        val: u32,
976        timeout: i64,
977    ) -> Result<u32, Trap> {
978        let import = self.imported_memory(memory_index);
979        let memory = unsafe { import.definition.as_ref() };
980        //if ! memory.shared {
981        // We should trap according to spec, but official test rely on not trapping...
982        //}
983
984        // Do a fast-path check of the expected value, and also ensure proper alignment
985        let ret = unsafe { memory32_atomic_check32(memory, dst, val) };
986
987        if let Ok(mut ret) = ret {
988            if ret == 0 {
989                let memory = self.get_vmmemory_mut(memory_index);
990                // Safety: we have already checked alignment and bounds in memory32_atomic_check32
991                ret = unsafe { Self::memory_wait(memory, dst, ExpectedValue::U32(val), timeout)? };
992            }
993            Ok(ret)
994        } else {
995            ret
996        }
997    }
998
999    /// Perform an Atomic.Wait64
1000    pub(crate) fn local_memory_wait64(
1001        &mut self,
1002        memory_index: LocalMemoryIndex,
1003        dst: u32,
1004        val: u64,
1005        timeout: i64,
1006    ) -> Result<u32, Trap> {
1007        let memory = self.memory(memory_index);
1008        //if ! memory.shared {
1009        // We should trap according to spec, but official test rely on not trapping...
1010        //}
1011
1012        // Do a fast-path check of the expected value, and also ensure proper alignment
1013        let ret = unsafe { memory32_atomic_check64(&memory, dst, val) };
1014
1015        if let Ok(mut ret) = ret {
1016            if ret == 0 {
1017                let memory = self.get_local_vmmemory_mut(memory_index);
1018                // Safety: we have already checked alignment and bounds in memory32_atomic_check64
1019                ret = unsafe { Self::memory_wait(memory, dst, ExpectedValue::U64(val), timeout)? };
1020            }
1021            Ok(ret)
1022        } else {
1023            ret
1024        }
1025    }
1026
1027    /// Perform an Atomic.Wait64
1028    pub(crate) fn imported_memory_wait64(
1029        &mut self,
1030        memory_index: MemoryIndex,
1031        dst: u32,
1032        val: u64,
1033        timeout: i64,
1034    ) -> Result<u32, Trap> {
1035        let import = self.imported_memory(memory_index);
1036        let memory = unsafe { import.definition.as_ref() };
1037        //if ! memory.shared {
1038        // We should trap according to spec, but official test rely on not trapping...
1039        //}
1040
1041        // Do a fast-path check of the expected value, and also ensure proper alignment
1042        let ret = unsafe { memory32_atomic_check64(memory, dst, val) };
1043
1044        if let Ok(mut ret) = ret {
1045            if ret == 0 {
1046                let memory = self.get_vmmemory_mut(memory_index);
1047                // Safety: we have already checked alignment and bounds in memory32_atomic_check64
1048                ret = unsafe { Self::memory_wait(memory, dst, ExpectedValue::U64(val), timeout)? };
1049            }
1050            Ok(ret)
1051        } else {
1052            ret
1053        }
1054    }
1055
1056    /// Perform an Atomic.Notify
1057    pub(crate) fn local_memory_notify(
1058        &mut self,
1059        memory_index: LocalMemoryIndex,
1060        dst: u32,
1061        count: u32,
1062    ) -> Result<u32, Trap> {
1063        let memory = self.get_local_vmmemory_mut(memory_index);
1064        Ok(memory.do_notify(dst, count))
1065    }
1066
1067    /// Perform an Atomic.Notify
1068    pub(crate) fn imported_memory_notify(
1069        &mut self,
1070        memory_index: MemoryIndex,
1071        dst: u32,
1072        count: u32,
1073    ) -> Result<u32, Trap> {
1074        let memory = self.get_vmmemory_mut(memory_index);
1075        Ok(memory.do_notify(dst, count))
1076    }
1077}
1078
1079/// A handle holding an `Instance` of a WebAssembly module.
1080///
1081/// This is more or less a public facade of the private `Instance`,
1082/// providing useful higher-level API.
1083#[derive(Debug, Eq, PartialEq)]
1084pub struct VMInstance {
1085    /// The layout of `Instance` (which can vary).
1086    instance_layout: Layout,
1087
1088    /// The `Instance` itself.
1089    ///
1090    /// `Instance` must not be dropped manually by Rust, because it's
1091    /// allocated manually with `alloc` and a specific layout (Rust
1092    /// would be able to drop `Instance` itself but it will imply a
1093    /// memory leak because of `alloc`).
1094    ///
1095    /// No one in the code has a copy of the `Instance`'s
1096    /// pointer. `Self` is the only one.
1097    instance: NonNull<Instance>,
1098}
1099
1100/// VMInstance are created with an InstanceAllocator
1101/// and it will "consume" the memory
1102/// So the Drop here actualy free it (else it would be leaked)
1103impl Drop for VMInstance {
1104    fn drop(&mut self) {
1105        let instance_ptr = self.instance.as_ptr();
1106
1107        unsafe {
1108            // Need to drop all the actual Instance members
1109            instance_ptr.drop_in_place();
1110            // And then free the memory allocated for the Instance itself
1111            std::alloc::dealloc(instance_ptr as *mut u8, self.instance_layout);
1112        }
1113    }
1114}
1115
1116impl VMInstance {
1117    /// Create a new `VMInstance` pointing at freshly allocated instance data.
1118    ///
1119    /// # Safety
1120    ///
1121    /// This method is not necessarily inherently unsafe to call, but in general
1122    /// the APIs of an `Instance` are quite unsafe and have not been really
1123    /// audited for safety that much. As a result the unsafety here on this
1124    /// method is a low-overhead way of saying “this is an extremely unsafe type
1125    /// to work with”.
1126    ///
1127    /// Extreme care must be taken when working with `VMInstance` and it's
1128    /// recommended to have relatively intimate knowledge of how it works
1129    /// internally if you'd like to do so. If possible it's recommended to use
1130    /// the `wasmer` crate API rather than this type since that is vetted for
1131    /// safety.
1132    ///
1133    /// However the following must be taken care of before calling this function:
1134    /// - The memory at `instance.tables_ptr()` must be initialized with data for
1135    ///   all the local tables.
1136    /// - The memory at `instance.memories_ptr()` must be initialized with data for
1137    ///   all the local memories.
1138    #[allow(clippy::too_many_arguments)]
1139    pub unsafe fn new(
1140        allocator: InstanceAllocator,
1141        module: Arc<ModuleInfo>,
1142        context: &mut StoreObjects,
1143        finished_functions: BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
1144        finished_function_call_trampolines: BoxedSlice<SignatureIndex, VMTrampoline>,
1145        finished_memories: BoxedSlice<LocalMemoryIndex, InternalStoreHandle<VMMemory>>,
1146        finished_tables: BoxedSlice<LocalTableIndex, InternalStoreHandle<VMTable>>,
1147        finished_globals: BoxedSlice<LocalGlobalIndex, InternalStoreHandle<VMGlobal>>,
1148        tags: BoxedSlice<TagIndex, InternalStoreHandle<VMTag>>,
1149        imports: Imports,
1150        vmshared_signatures: BoxedSlice<SignatureIndex, VMSignatureHash>,
1151    ) -> Result<Self, Trap> {
1152        unsafe {
1153            let vmctx_tags = tags
1154                .values()
1155                .map(|m: &InternalStoreHandle<VMTag>| VMSharedTagIndex::new(m.index() as u32))
1156                .collect::<PrimaryMap<TagIndex, VMSharedTagIndex>>()
1157                .into_boxed_slice();
1158            let passive_data = RefCell::new(
1159                module
1160                    .passive_data
1161                    .clone()
1162                    .into_iter()
1163                    .map(|(idx, bytes)| (idx, Arc::from(bytes)))
1164                    .collect::<HashMap<_, _>>(),
1165            );
1166
1167            let handle = {
1168                let offsets = allocator.offsets().clone();
1169                // use dummy value to create an instance so we can get the vmctx pointer
1170                let funcrefs = PrimaryMap::new().into_boxed_slice();
1171                let imported_funcrefs = PrimaryMap::new().into_boxed_slice();
1172                // Create the `Instance`. The unique, the One.
1173                let instance = Instance {
1174                    module,
1175                    context,
1176                    offsets,
1177                    memories: finished_memories,
1178                    tables: finished_tables,
1179                    tags,
1180                    globals: finished_globals,
1181                    functions: finished_functions,
1182                    function_call_trampolines: finished_function_call_trampolines,
1183                    passive_elements: Default::default(),
1184                    passive_data,
1185                    funcrefs,
1186                    imported_funcrefs,
1187                    vmctx: VMContext {},
1188                };
1189
1190                let mut instance_handle = allocator.into_vminstance(instance);
1191
1192                // Set the funcrefs after we've built the instance
1193                {
1194                    let instance = instance_handle.instance_mut();
1195                    let vmctx_ptr = instance.vmctx_ptr();
1196                    (instance.funcrefs, instance.imported_funcrefs) = build_funcrefs(
1197                        &instance.module,
1198                        context,
1199                        &imports,
1200                        &instance.functions,
1201                        &vmshared_signatures,
1202                        &instance.function_call_trampolines,
1203                        vmctx_ptr,
1204                    );
1205                    for local_table_index in instance.tables.keys() {
1206                        instance.sync_fixed_funcref_table(local_table_index);
1207                    }
1208                }
1209
1210                instance_handle
1211            };
1212            let instance = handle.instance();
1213
1214            ptr::copy(
1215                vmctx_tags.values().as_slice().as_ptr(),
1216                instance.shared_tags_ptr(),
1217                vmctx_tags.len(),
1218            );
1219            ptr::copy(
1220                imports.functions.values().as_slice().as_ptr(),
1221                instance.imported_functions_ptr(),
1222                imports.functions.len(),
1223            );
1224            ptr::copy(
1225                imports.tables.values().as_slice().as_ptr(),
1226                instance.imported_tables_ptr(),
1227                imports.tables.len(),
1228            );
1229            ptr::copy(
1230                imports.memories.values().as_slice().as_ptr(),
1231                instance.imported_memories_ptr(),
1232                imports.memories.len(),
1233            );
1234            ptr::copy(
1235                imports.globals.values().as_slice().as_ptr(),
1236                instance.imported_globals_ptr(),
1237                imports.globals.len(),
1238            );
1239            // these should already be set, add asserts here? for:
1240            // - instance.tables_ptr() as *mut VMTableDefinition
1241            // - instance.memories_ptr() as *mut VMMemoryDefinition
1242            ptr::write(
1243                instance.builtin_functions_ptr(),
1244                VMBuiltinFunctionsArray::initialized(),
1245            );
1246
1247            // Perform infallible initialization in this constructor, while fallible
1248            // initialization is deferred to the `initialize` method.
1249            initialize_passive_elements(instance);
1250            initialize_globals(instance);
1251
1252            Ok(handle)
1253        }
1254    }
1255
1256    /// Return a reference to the contained `Instance`.
1257    pub(crate) fn instance(&self) -> &Instance {
1258        unsafe { self.instance.as_ref() }
1259    }
1260
1261    /// Return a mutable reference to the contained `Instance`.
1262    pub(crate) fn instance_mut(&mut self) -> &mut Instance {
1263        unsafe { self.instance.as_mut() }
1264    }
1265
1266    /// Finishes the instantiation process started by `Instance::new`.
1267    ///
1268    /// # Safety
1269    ///
1270    /// Only safe to call immediately after instantiation.
1271    pub unsafe fn finish_instantiation(
1272        &mut self,
1273        config: &VMConfig,
1274        trap_handler: Option<*const TrapHandlerFn<'static>>,
1275        data_initializers: &[DataInitializer<'_>],
1276    ) -> Result<(), Trap> {
1277        let instance = self.instance_mut();
1278
1279        // Apply the initializers.
1280        initialize_tables(instance)?;
1281        initialize_memories(instance, data_initializers)?;
1282
1283        // The WebAssembly spec specifies that the start function is
1284        // invoked automatically at instantiation time.
1285        instance.invoke_start_function(config, trap_handler)?;
1286        Ok(())
1287    }
1288
1289    /// Return a reference to the vmctx used by compiled wasm code.
1290    pub fn vmctx(&self) -> &VMContext {
1291        self.instance().vmctx()
1292    }
1293
1294    /// Return a raw pointer to the vmctx used by compiled wasm code.
1295    pub fn vmctx_ptr(&self) -> *mut VMContext {
1296        self.instance().vmctx_ptr()
1297    }
1298
1299    /// Return a reference to the `VMOffsets` to get offsets in the
1300    /// `Self::vmctx_ptr` region. Be careful when doing pointer
1301    /// arithmetic!
1302    pub fn vmoffsets(&self) -> &VMOffsets {
1303        self.instance().offsets()
1304    }
1305
1306    /// Return a reference-counting pointer to a module.
1307    pub fn module(&self) -> &Arc<ModuleInfo> {
1308        self.instance().module()
1309    }
1310
1311    /// Return a reference to a module.
1312    pub fn module_ref(&self) -> &ModuleInfo {
1313        self.instance().module_ref()
1314    }
1315
1316    /// Lookup an export with the given name.
1317    pub fn lookup(&mut self, field: &str) -> Option<VMExtern> {
1318        let export = *self.module_ref().exports.get(field)?;
1319
1320        Some(self.lookup_by_declaration(export))
1321    }
1322
1323    /// Lookup an export with the given export declaration.
1324    pub fn lookup_by_declaration(&mut self, export: ExportIndex) -> VMExtern {
1325        let instance = self.instance();
1326
1327        match export {
1328            ExportIndex::Function(index) => {
1329                let sig_index = &instance.module.functions[index];
1330                let handle = if let Some(def_index) = instance.module.local_func_index(index) {
1331                    // A VMFunction is lazily created only for functions that are
1332                    // exported.
1333                    let signature = instance.module.signatures[*sig_index].clone();
1334                    let vm_function = VMFunction {
1335                        anyfunc: MaybeInstanceOwned::Instance(NonNull::from(
1336                            &instance.funcrefs[def_index],
1337                        )),
1338                        signature,
1339                        // Any function received is already static at this point as:
1340                        // 1. All locally defined functions in the Wasm have a static signature.
1341                        // 2. All the imported functions are already static (because
1342                        //    they point to the trampolines rather than the dynamic addresses).
1343                        kind: VMFunctionKind::Static,
1344                        host_data: Box::new(()),
1345                    };
1346                    InternalStoreHandle::new(self.instance_mut().context_mut(), vm_function)
1347                } else {
1348                    let import = instance.imported_function(index);
1349                    import.handle
1350                };
1351
1352                VMExtern::Function(handle)
1353            }
1354            ExportIndex::Table(index) => {
1355                let handle = if let Some(def_index) = instance.module.local_table_index(index) {
1356                    instance.tables[def_index]
1357                } else {
1358                    let import = instance.imported_table(index);
1359                    import.handle
1360                };
1361                VMExtern::Table(handle)
1362            }
1363            ExportIndex::Memory(index) => {
1364                let handle = if let Some(def_index) = instance.module.local_memory_index(index) {
1365                    instance.memories[def_index]
1366                } else {
1367                    let import = instance.imported_memory(index);
1368                    import.handle
1369                };
1370                VMExtern::Memory(handle)
1371            }
1372            ExportIndex::Global(index) => {
1373                let handle = if let Some(def_index) = instance.module.local_global_index(index) {
1374                    instance.globals[def_index]
1375                } else {
1376                    let import = instance.imported_global(index);
1377                    import.handle
1378                };
1379                VMExtern::Global(handle)
1380            }
1381
1382            ExportIndex::Tag(index) => {
1383                let handle = instance.tags[index];
1384                VMExtern::Tag(handle)
1385            }
1386        }
1387    }
1388
1389    /// Return an iterator over the exports of this instance.
1390    ///
1391    /// Specifically, it provides access to the key-value pairs, where the keys
1392    /// are export names, and the values are export declarations which can be
1393    /// resolved `lookup_by_declaration`.
1394    pub fn exports(&self) -> indexmap::map::Iter<'_, String, ExportIndex> {
1395        self.module().exports.iter()
1396    }
1397
1398    /// Return the memory index for the given `VMMemoryDefinition` in this instance.
1399    pub fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
1400        self.instance().memory_index(memory)
1401    }
1402
1403    /// Grow memory in this instance by the specified amount of pages.
1404    ///
1405    /// Returns `None` if memory can't be grown by the specified amount
1406    /// of pages.
1407    pub fn memory_grow<IntoPages>(
1408        &mut self,
1409        memory_index: LocalMemoryIndex,
1410        delta: IntoPages,
1411    ) -> Result<Pages, MemoryError>
1412    where
1413        IntoPages: Into<Pages>,
1414    {
1415        self.instance_mut().memory_grow(memory_index, delta)
1416    }
1417
1418    /// Return the table index for the given `VMTableDefinition` in this instance.
1419    pub fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
1420        self.instance().table_index(table)
1421    }
1422
1423    /// Grow table in this instance by the specified amount of pages.
1424    ///
1425    /// Returns `None` if memory can't be grown by the specified amount
1426    /// of pages.
1427    pub fn table_grow(
1428        &mut self,
1429        table_index: LocalTableIndex,
1430        delta: u32,
1431        init_value: TableElement,
1432    ) -> Option<u32> {
1433        self.instance_mut()
1434            .table_grow(table_index, delta, init_value)
1435    }
1436
1437    /// Get table element reference.
1438    ///
1439    /// Returns `None` if index is out of bounds.
1440    pub fn table_get(&self, table_index: LocalTableIndex, index: u32) -> Option<TableElement> {
1441        self.instance().table_get(table_index, index)
1442    }
1443
1444    /// Set table element reference.
1445    ///
1446    /// Returns an error if the index is out of bounds
1447    pub fn table_set(
1448        &mut self,
1449        table_index: LocalTableIndex,
1450        index: u32,
1451        val: TableElement,
1452    ) -> Result<(), Trap> {
1453        self.instance_mut().table_set(table_index, index, val)
1454    }
1455
1456    /// Get a table defined locally within this module.
1457    pub fn get_local_table(&mut self, index: LocalTableIndex) -> &mut VMTable {
1458        self.instance_mut().get_local_table(index)
1459    }
1460}
1461
1462#[allow(clippy::mut_from_ref)]
1463#[allow(dead_code)]
1464/// Return a byte-slice view of a memory's data.
1465unsafe fn get_memory_slice<'instance>(
1466    init: &DataInitializer<'_>,
1467    instance: &'instance Instance,
1468) -> &'instance mut [u8] {
1469    unsafe {
1470        let memory = if let Some(local_memory_index) = instance
1471            .module
1472            .local_memory_index(init.location.memory_index)
1473        {
1474            instance.memory(local_memory_index)
1475        } else {
1476            let import = instance.imported_memory(init.location.memory_index);
1477            *import.definition.as_ref()
1478        };
1479        slice::from_raw_parts_mut(memory.base, memory.current_length)
1480    }
1481}
1482
1483fn get_global(index: GlobalIndex, instance: &Instance) -> RawValue {
1484    unsafe {
1485        if let Some(local_global_index) = instance.module.local_global_index(index) {
1486            instance.global(local_global_index).val
1487        } else {
1488            instance.imported_global(index).definition.as_ref().val
1489        }
1490    }
1491}
1492
1493enum EvaluatedInitExpr {
1494    I32(i32),
1495    I64(i64),
1496}
1497
1498fn eval_init_expr(expr: &InitExpr, instance: &Instance) -> EvaluatedInitExpr {
1499    if expr
1500        .ops()
1501        .first()
1502        .expect("missing expression")
1503        .is_32bit_expression()
1504    {
1505        let mut stack = Vec::with_capacity(expr.ops().len());
1506        for op in expr.ops() {
1507            match *op {
1508                InitExprOp::I32Const(value) => stack.push(value),
1509                InitExprOp::GlobalGetI32(global) => {
1510                    stack.push(unsafe { get_global(global, instance).i32 })
1511                }
1512                InitExprOp::I32Add => {
1513                    let rhs = stack.pop().expect("invalid init expr stack for i32.add");
1514                    let lhs = stack.pop().expect("invalid init expr stack for i32.add");
1515                    stack.push(lhs.wrapping_add(rhs));
1516                }
1517                InitExprOp::I32Sub => {
1518                    let rhs = stack.pop().expect("invalid init expr stack for i32.sub");
1519                    let lhs = stack.pop().expect("invalid init expr stack for i32.sub");
1520                    stack.push(lhs.wrapping_sub(rhs));
1521                }
1522                InitExprOp::I32Mul => {
1523                    let rhs = stack.pop().expect("invalid init expr stack for i32.mul");
1524                    let lhs = stack.pop().expect("invalid init expr stack for i32.mul");
1525                    stack.push(lhs.wrapping_mul(rhs));
1526                }
1527                _ => {
1528                    panic!("unexpected init expr statement: {op:?}");
1529                }
1530            }
1531        }
1532        EvaluatedInitExpr::I32(
1533            stack
1534                .into_iter()
1535                .exactly_one()
1536                .expect("invalid init expr stack shape"),
1537        )
1538    } else {
1539        let mut stack = Vec::with_capacity(expr.ops().len());
1540        for op in expr.ops() {
1541            match *op {
1542                InitExprOp::I64Const(value) => stack.push(value),
1543                InitExprOp::GlobalGetI64(global) => {
1544                    stack.push(unsafe { get_global(global, instance).i64 })
1545                }
1546                InitExprOp::I64Add => {
1547                    let rhs = stack.pop().expect("invalid init expr stack for i64.add");
1548                    let lhs = stack.pop().expect("invalid init expr stack for i64.add");
1549                    stack.push(lhs.wrapping_add(rhs));
1550                }
1551                InitExprOp::I64Sub => {
1552                    let rhs = stack.pop().expect("invalid init expr stack for i64.sub");
1553                    let lhs = stack.pop().expect("invalid init expr stack for i64.sub");
1554                    stack.push(lhs.wrapping_sub(rhs));
1555                }
1556                InitExprOp::I64Mul => {
1557                    let rhs = stack.pop().expect("invalid init expr stack for i64.mul");
1558                    let lhs = stack.pop().expect("invalid init expr stack for i64.mul");
1559                    stack.push(lhs.wrapping_mul(rhs));
1560                }
1561                _ => {
1562                    panic!("unexpected init expr statement: {op:?}");
1563                }
1564            }
1565        }
1566        EvaluatedInitExpr::I64(
1567            stack
1568                .into_iter()
1569                .exactly_one()
1570                .expect("invalid init expr stack shape"),
1571        )
1572    }
1573}
1574
1575/// Initialize the table memory from the provided initializers.
1576fn initialize_tables(instance: &mut Instance) -> Result<(), Trap> {
1577    let module = Arc::clone(&instance.module);
1578    for init in &module.table_initializers {
1579        let EvaluatedInitExpr::I32(start) = eval_init_expr(&init.offset_expr, instance) else {
1580            panic!("unexpected expression type, expected i32");
1581        };
1582        if start < 0 {
1583            return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
1584        }
1585        let start = start as usize;
1586        let table = instance.get_table_handle(init.table_index);
1587        let table = unsafe { table.get_mut(&mut *instance.context) };
1588
1589        if start
1590            .checked_add(init.elements.len())
1591            .is_none_or(|end| end > table.size() as usize)
1592        {
1593            return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
1594        }
1595
1596        if let wasmer_types::Type::FuncRef = table.ty().ty {
1597            for (i, func_idx) in init.elements.iter().enumerate() {
1598                let anyfunc = instance.func_ref(*func_idx);
1599                table
1600                    .set_with_construction(
1601                        u32::try_from(start + i).unwrap(),
1602                        TableElement::FuncRef(anyfunc),
1603                        true,
1604                    )
1605                    .unwrap();
1606            }
1607        } else {
1608            for i in 0..init.elements.len() {
1609                table
1610                    .set_with_construction(
1611                        u32::try_from(start + i).unwrap(),
1612                        TableElement::ExternRef(None),
1613                        true,
1614                    )
1615                    .unwrap();
1616            }
1617        }
1618
1619        instance.sync_fixed_funcref_table_by_index(init.table_index);
1620    }
1621
1622    Ok(())
1623}
1624
1625/// Initialize the `Instance::passive_elements` map by resolving the
1626/// `ModuleInfo::passive_elements`'s `FunctionIndex`s into `VMCallerCheckedAnyfunc`s for
1627/// this instance.
1628fn initialize_passive_elements(instance: &Instance) {
1629    let mut passive_elements = instance.passive_elements.borrow_mut();
1630    debug_assert!(
1631        passive_elements.is_empty(),
1632        "should only be called once, at initialization time"
1633    );
1634
1635    passive_elements.extend(instance.module.passive_elements.iter().filter_map(
1636        |(&idx, segments)| -> Option<(ElemIndex, Box<[Option<VMFuncRef>]>)> {
1637            if segments.is_empty() {
1638                None
1639            } else {
1640                Some((
1641                    idx,
1642                    segments
1643                        .iter()
1644                        .map(|s| instance.func_ref(*s))
1645                        .collect::<Box<[Option<VMFuncRef>]>>(),
1646                ))
1647            }
1648        },
1649    ));
1650}
1651
1652/// Initialize the table memory from the provided initializers.
1653fn initialize_memories(
1654    instance: &mut Instance,
1655    data_initializers: &[DataInitializer<'_>],
1656) -> Result<(), Trap> {
1657    for init in data_initializers {
1658        let memory = instance.get_vmmemory(init.location.memory_index);
1659
1660        let EvaluatedInitExpr::I32(start) = eval_init_expr(&init.location.offset_expr, instance)
1661        else {
1662            panic!("unexpected expression type, expected i32");
1663        };
1664        if start < 0 {
1665            return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
1666        }
1667        let start = start as usize;
1668        unsafe {
1669            let current_length = memory.vmmemory().as_ref().current_length;
1670            if start
1671                .checked_add(init.data.len())
1672                .is_none_or(|end| end > current_length)
1673            {
1674                return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
1675            }
1676            memory.initialize_with_data(start, init.data)?;
1677        }
1678    }
1679
1680    Ok(())
1681}
1682
1683fn initialize_globals(instance: &Instance) {
1684    let module = Arc::clone(&instance.module);
1685    for (index, initializer) in module.global_initializers.iter() {
1686        unsafe {
1687            let to = instance.global_ptr(index).as_ptr();
1688            match initializer {
1689                GlobalInit::I32Const(x) => (*to).val.i32 = *x,
1690                GlobalInit::I64Const(x) => (*to).val.i64 = *x,
1691                GlobalInit::F32Const(x) => (*to).val.f32 = *x,
1692                GlobalInit::F64Const(x) => (*to).val.f64 = *x,
1693                GlobalInit::V128Const(x) => (*to).val.bytes = *x.bytes(),
1694                GlobalInit::GetGlobal(x) => {
1695                    let from: VMGlobalDefinition =
1696                        if let Some(def_x) = module.local_global_index(*x) {
1697                            instance.global(def_x)
1698                        } else {
1699                            instance.imported_global(*x).definition.as_ref().clone()
1700                        };
1701                    *to = from;
1702                }
1703                GlobalInit::RefNullConst => (*to).val.funcref = 0,
1704                GlobalInit::RefFunc(func_idx) => {
1705                    let funcref = instance.func_ref(*func_idx).unwrap();
1706                    (*to).val = funcref.into_raw();
1707                }
1708                GlobalInit::Expr(expr) => match eval_init_expr(expr, instance) {
1709                    EvaluatedInitExpr::I32(value) => (*to).val.i32 = value,
1710                    EvaluatedInitExpr::I64(value) => (*to).val.i64 = value,
1711                },
1712            }
1713        }
1714    }
1715}
1716
1717fn anyfunc_from_funcref(funcref: Option<VMFuncRef>) -> VMCallerCheckedAnyfunc {
1718    match funcref {
1719        Some(funcref) => unsafe { *funcref.0.as_ptr() },
1720        None => VMCallerCheckedAnyfunc::null(),
1721    }
1722}
1723
1724/// Eagerly builds all the `VMFuncRef`s for imported and local functions so that all
1725/// future funcref operations are just looking up this data.
1726fn build_funcrefs(
1727    module_info: &ModuleInfo,
1728    ctx: &StoreObjects,
1729    imports: &Imports,
1730    finished_functions: &BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
1731    vmshared_signatures: &BoxedSlice<SignatureIndex, VMSignatureHash>,
1732    function_call_trampolines: &BoxedSlice<SignatureIndex, VMTrampoline>,
1733    vmctx_ptr: *mut VMContext,
1734) -> (
1735    BoxedSlice<LocalFunctionIndex, VMCallerCheckedAnyfunc>,
1736    BoxedSlice<FunctionIndex, NonNull<VMCallerCheckedAnyfunc>>,
1737) {
1738    let mut func_refs =
1739        PrimaryMap::with_capacity(module_info.functions.len() - module_info.num_imported_functions);
1740    let mut imported_func_refs = PrimaryMap::with_capacity(module_info.num_imported_functions);
1741
1742    // do imported functions
1743    for import in imports.functions.values() {
1744        imported_func_refs.push(import.handle.get(ctx).anyfunc.as_ptr());
1745    }
1746
1747    // do local functions
1748    for (local_index, func_ptr) in finished_functions.iter() {
1749        let index = module_info.func_index(local_index);
1750        let sig_index = module_info.functions[index];
1751        let type_signature_hash = vmshared_signatures[sig_index];
1752        let call_trampoline = function_call_trampolines[sig_index];
1753        let anyfunc = VMCallerCheckedAnyfunc {
1754            func_ptr: func_ptr.0,
1755            type_signature_hash,
1756            vmctx: VMFunctionContext { vmctx: vmctx_ptr },
1757            call_trampoline,
1758        };
1759        func_refs.push(anyfunc);
1760    }
1761    (
1762        func_refs.into_boxed_slice(),
1763        imported_func_refs.into_boxed_slice(),
1764    )
1765}