wasmer_vm/instance/
mod.rs

1// This file contains code from external sources.
2// Attributions: https://github.com/wasmerio/wasmer/blob/main/docs/ATTRIBUTIONS.md
3
4//! An `Instance` contains all the runtime state used by execution of
5//! a WebAssembly module (except its callstack and register state). An
6//! `VMInstance` is a wrapper around `Instance` that manages
7//! how it is allocated and deallocated.
8
9mod allocator;
10
11use crate::LinearMemory;
12use crate::imports::Imports;
13use crate::store::{InternalStoreHandle, StoreObjects};
14use crate::table::TableElement;
15use crate::trap::{Trap, TrapCode};
16use crate::vmcontext::{
17    VMBuiltinFunctionsArray, VMCallerCheckedAnyfunc, VMContext, VMFunctionContext,
18    VMFunctionImport, VMFunctionKind, VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition,
19    VMMemoryImport, VMSharedSignatureIndex, VMSharedTagIndex, VMTableDefinition, VMTableImport,
20    VMTrampoline, memory_copy, memory_fill, memory32_atomic_check32, memory32_atomic_check64,
21};
22use crate::{FunctionBodyPtr, MaybeInstanceOwned, TrapHandlerFn, VMTag, wasmer_call_trampoline};
23use crate::{VMConfig, VMFuncRef, VMFunction, VMGlobal, VMMemory, VMTable};
24use crate::{export::VMExtern, threadconditions::ExpectedValue};
25pub use allocator::InstanceAllocator;
26use itertools::Itertools;
27use memoffset::offset_of;
28use more_asserts::assert_lt;
29use std::alloc::Layout;
30use std::cell::RefCell;
31use std::collections::HashMap;
32use std::convert::TryFrom;
33use std::fmt;
34use std::mem;
35use std::ptr::{self, NonNull};
36use std::slice;
37use std::sync::Arc;
38use wasmer_types::entity::{BoxedSlice, EntityRef, PrimaryMap, packed_option::ReservedValue};
39use wasmer_types::{
40    DataIndex, DataInitializer, ElemIndex, ExportIndex, FunctionIndex, GlobalIndex, GlobalInit,
41    InitExpr, InitExprOp, LocalFunctionIndex, LocalGlobalIndex, LocalMemoryIndex, LocalTableIndex,
42    MemoryError, MemoryIndex, ModuleInfo, Pages, RawValue, SignatureIndex, TableIndex, TagIndex,
43    VMOffsets,
44};
45
46/// A WebAssembly instance.
47///
48/// The type is dynamically-sized. Indeed, the `vmctx` field can
49/// contain various data. That's why the type has a C representation
50/// to ensure that the `vmctx` field is last. See the documentation of
51/// the `vmctx` field to learn more.
52#[repr(C)]
53#[allow(clippy::type_complexity)]
54pub(crate) struct Instance {
55    /// The `ModuleInfo` this `Instance` was instantiated from.
56    module: Arc<ModuleInfo>,
57
58    /// Pointer to the object store of the context owning this instance.
59    context: *mut StoreObjects,
60
61    /// Offsets in the `vmctx` region.
62    offsets: VMOffsets,
63
64    /// WebAssembly linear memory data.
65    memories: BoxedSlice<LocalMemoryIndex, InternalStoreHandle<VMMemory>>,
66
67    /// WebAssembly table data.
68    tables: BoxedSlice<LocalTableIndex, InternalStoreHandle<VMTable>>,
69
70    /// WebAssembly global data.
71    globals: BoxedSlice<LocalGlobalIndex, InternalStoreHandle<VMGlobal>>,
72
73    /// WebAssembly tag data. Notably, this stores *all* tags, not just local ones.
74    tags: BoxedSlice<TagIndex, InternalStoreHandle<VMTag>>,
75
76    /// Pointers to functions in executable memory.
77    functions: BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
78
79    /// Pointers to function call trampolines in executable memory.
80    function_call_trampolines: BoxedSlice<SignatureIndex, VMTrampoline>,
81
82    /// Passive elements in this instantiation. As `elem.drop`s happen, these
83    /// entries get removed.
84    passive_elements: RefCell<HashMap<ElemIndex, Box<[Option<VMFuncRef>]>>>,
85
86    /// Passive data segments from our module. As `data.drop`s happen, entries
87    /// get removed. A missing entry is considered equivalent to an empty slice.
88    passive_data: RefCell<HashMap<DataIndex, Arc<[u8]>>>,
89
90    /// Mapping of function indices to their func ref backing data. `VMFuncRef`s
91    /// will point to elements here for functions defined by this instance.
92    funcrefs: BoxedSlice<LocalFunctionIndex, VMCallerCheckedAnyfunc>,
93
94    /// Mapping of function indices to their func ref backing data. `VMFuncRef`s
95    /// will point to elements here for functions imported by this instance.
96    imported_funcrefs: BoxedSlice<FunctionIndex, NonNull<VMCallerCheckedAnyfunc>>,
97
98    /// Additional context used by compiled WebAssembly code. This
99    /// field is last, and represents a dynamically-sized array that
100    /// extends beyond the nominal end of the struct (similar to a
101    /// flexible array member).
102    vmctx: VMContext,
103}
104
105impl fmt::Debug for Instance {
106    fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
107        formatter.debug_struct("Instance").finish()
108    }
109}
110
111#[allow(clippy::cast_ptr_alignment)]
112impl Instance {
113    /// Helper function to access various locations offset from our `*mut
114    /// VMContext` object.
115    unsafe fn vmctx_plus_offset<T>(&self, offset: u32) -> *mut T {
116        unsafe {
117            (self.vmctx_ptr() as *mut u8)
118                .add(usize::try_from(offset).unwrap())
119                .cast()
120        }
121    }
122
123    fn module(&self) -> &Arc<ModuleInfo> {
124        &self.module
125    }
126
127    pub(crate) fn module_ref(&self) -> &ModuleInfo {
128        &self.module
129    }
130
131    pub(crate) fn context(&self) -> &StoreObjects {
132        unsafe { &*self.context }
133    }
134
135    pub(crate) fn context_mut(&mut self) -> &mut StoreObjects {
136        unsafe { &mut *self.context }
137    }
138
139    /// Offsets in the `vmctx` region.
140    fn offsets(&self) -> &VMOffsets {
141        &self.offsets
142    }
143
144    /// Return a pointer to the `VMSharedSignatureIndex`s.
145    fn signature_ids_ptr(&self) -> *mut VMSharedSignatureIndex {
146        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_signature_ids_begin()) }
147    }
148
149    /// Return the indexed `VMFunctionImport`.
150    fn imported_function(&self, index: FunctionIndex) -> &VMFunctionImport {
151        let index = usize::try_from(index.as_u32()).unwrap();
152        unsafe { &*self.imported_functions_ptr().add(index) }
153    }
154
155    /// Return a pointer to the `VMFunctionImport`s.
156    fn imported_functions_ptr(&self) -> *mut VMFunctionImport {
157        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_functions_begin()) }
158    }
159
160    /// Return the index `VMTableImport`.
161    fn imported_table(&self, index: TableIndex) -> &VMTableImport {
162        let index = usize::try_from(index.as_u32()).unwrap();
163        unsafe { &*self.imported_tables_ptr().add(index) }
164    }
165
166    /// Return a pointer to the `VMTableImports`s.
167    fn imported_tables_ptr(&self) -> *mut VMTableImport {
168        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_tables_begin()) }
169    }
170
171    /// Return the indexed `VMMemoryImport`.
172    fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport {
173        let index = usize::try_from(index.as_u32()).unwrap();
174        unsafe { &*self.imported_memories_ptr().add(index) }
175    }
176
177    /// Return a pointer to the `VMMemoryImport`s.
178    fn imported_memories_ptr(&self) -> *mut VMMemoryImport {
179        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_memories_begin()) }
180    }
181
182    /// Return the indexed `VMGlobalImport`.
183    fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport {
184        let index = usize::try_from(index.as_u32()).unwrap();
185        unsafe { &*self.imported_globals_ptr().add(index) }
186    }
187
188    /// Return a pointer to the `VMGlobalImport`s.
189    fn imported_globals_ptr(&self) -> *mut VMGlobalImport {
190        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_globals_begin()) }
191    }
192
193    /// Return the indexed `VMSharedTagIndex`.
194    #[cfg_attr(target_os = "windows", allow(dead_code))]
195    pub(crate) fn shared_tag_ptr(&self, index: TagIndex) -> &VMSharedTagIndex {
196        let index = usize::try_from(index.as_u32()).unwrap();
197        unsafe { &*self.shared_tags_ptr().add(index) }
198    }
199
200    /// Return a pointer to the `VMSharedTagIndex`s.
201    pub(crate) fn shared_tags_ptr(&self) -> *mut VMSharedTagIndex {
202        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_tag_ids_begin()) }
203    }
204
205    /// Return the indexed `VMTableDefinition`.
206    #[allow(dead_code)]
207    fn table(&self, index: LocalTableIndex) -> VMTableDefinition {
208        unsafe { *self.table_ptr(index).as_ref() }
209    }
210
211    #[allow(dead_code)]
212    /// Updates the value for a defined table to `VMTableDefinition`.
213    fn set_table(&self, index: LocalTableIndex, table: &VMTableDefinition) {
214        unsafe {
215            *self.table_ptr(index).as_ptr() = *table;
216        }
217    }
218
219    /// Return the indexed `VMTableDefinition`.
220    fn table_ptr(&self, index: LocalTableIndex) -> NonNull<VMTableDefinition> {
221        let index = usize::try_from(index.as_u32()).unwrap();
222        NonNull::new(unsafe { self.tables_ptr().add(index) }).unwrap()
223    }
224
225    /// Return a pointer to the `VMTableDefinition`s.
226    fn tables_ptr(&self) -> *mut VMTableDefinition {
227        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_tables_begin()) }
228    }
229
230    #[allow(dead_code)]
231    /// Get a locally defined or imported memory.
232    fn get_memory(&self, index: MemoryIndex) -> VMMemoryDefinition {
233        if let Some(local_index) = self.module.local_memory_index(index) {
234            self.memory(local_index)
235        } else {
236            let import = self.imported_memory(index);
237            unsafe { *import.definition.as_ref() }
238        }
239    }
240
241    /// Return the indexed `VMMemoryDefinition`.
242    fn memory(&self, index: LocalMemoryIndex) -> VMMemoryDefinition {
243        unsafe { *self.memory_ptr(index).as_ref() }
244    }
245
246    #[allow(dead_code)]
247    /// Set the indexed memory to `VMMemoryDefinition`.
248    fn set_memory(&self, index: LocalMemoryIndex, mem: &VMMemoryDefinition) {
249        unsafe {
250            *self.memory_ptr(index).as_ptr() = *mem;
251        }
252    }
253
254    /// Return the indexed `VMMemoryDefinition`.
255    fn memory_ptr(&self, index: LocalMemoryIndex) -> NonNull<VMMemoryDefinition> {
256        let index = usize::try_from(index.as_u32()).unwrap();
257        NonNull::new(unsafe { self.memories_ptr().add(index) }).unwrap()
258    }
259
260    /// Return a pointer to the `VMMemoryDefinition`s.
261    fn memories_ptr(&self) -> *mut VMMemoryDefinition {
262        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_memories_begin()) }
263    }
264
265    /// Get a locally defined or imported memory.
266    fn get_vmmemory(&self, index: MemoryIndex) -> &VMMemory {
267        if let Some(local_index) = self.module.local_memory_index(index) {
268            unsafe {
269                self.memories
270                    .get(local_index)
271                    .unwrap()
272                    .get(self.context.as_ref().unwrap())
273            }
274        } else {
275            let import = self.imported_memory(index);
276            unsafe { import.handle.get(self.context.as_ref().unwrap()) }
277        }
278    }
279
280    /// Get a locally defined or imported memory.
281    fn get_vmmemory_mut(&mut self, index: MemoryIndex) -> &mut VMMemory {
282        if let Some(local_index) = self.module.local_memory_index(index) {
283            unsafe {
284                self.memories
285                    .get_mut(local_index)
286                    .unwrap()
287                    .get_mut(self.context.as_mut().unwrap())
288            }
289        } else {
290            let import = self.imported_memory(index);
291            unsafe { import.handle.get_mut(self.context.as_mut().unwrap()) }
292        }
293    }
294
295    /// Get a locally defined memory as mutable.
296    fn get_local_vmmemory_mut(&mut self, local_index: LocalMemoryIndex) -> &mut VMMemory {
297        unsafe {
298            self.memories
299                .get_mut(local_index)
300                .unwrap()
301                .get_mut(self.context.as_mut().unwrap())
302        }
303    }
304
305    /// Return the indexed `VMGlobalDefinition`.
306    fn global(&self, index: LocalGlobalIndex) -> VMGlobalDefinition {
307        unsafe { self.global_ptr(index).as_ref().clone() }
308    }
309
310    /// Set the indexed global to `VMGlobalDefinition`.
311    #[allow(dead_code)]
312    fn set_global(&self, index: LocalGlobalIndex, global: &VMGlobalDefinition) {
313        unsafe {
314            *self.global_ptr(index).as_ptr() = global.clone();
315        }
316    }
317
318    /// Return the indexed `VMGlobalDefinition`.
319    fn global_ptr(&self, index: LocalGlobalIndex) -> NonNull<VMGlobalDefinition> {
320        let index = usize::try_from(index.as_u32()).unwrap();
321        // TODO:
322        NonNull::new(unsafe { *self.globals_ptr().add(index) }).unwrap()
323    }
324
325    /// Return a pointer to the `VMGlobalDefinition`s.
326    fn globals_ptr(&self) -> *mut *mut VMGlobalDefinition {
327        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_globals_begin()) }
328    }
329
330    /// Return a pointer to the `VMBuiltinFunctionsArray`.
331    fn builtin_functions_ptr(&self) -> *mut VMBuiltinFunctionsArray {
332        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_builtin_functions_begin()) }
333    }
334
335    /// Return a reference to the vmctx used by compiled wasm code.
336    fn vmctx(&self) -> &VMContext {
337        &self.vmctx
338    }
339
340    /// Return a raw pointer to the vmctx used by compiled wasm code.
341    fn vmctx_ptr(&self) -> *mut VMContext {
342        self.vmctx() as *const VMContext as *mut VMContext
343    }
344
345    /// Invoke the WebAssembly start function of the instance, if one is present.
346    fn invoke_start_function(
347        &self,
348        config: &VMConfig,
349        trap_handler: Option<*const TrapHandlerFn<'static>>,
350    ) -> Result<(), Trap> {
351        let start_index = match self.module.start_function {
352            Some(idx) => idx,
353            None => return Ok(()),
354        };
355
356        let (callee_address, callee_vmctx) = match self.module.local_func_index(start_index) {
357            Some(local_index) => {
358                let body = self
359                    .functions
360                    .get(local_index)
361                    .expect("function index is out of bounds")
362                    .0;
363                (
364                    body as *const _,
365                    VMFunctionContext {
366                        vmctx: self.vmctx_ptr(),
367                    },
368                )
369            }
370            None => {
371                assert_lt!(start_index.index(), self.module.num_imported_functions);
372                let import = self.imported_function(start_index);
373                (import.body, import.environment)
374            }
375        };
376
377        let sig = self.module.functions[start_index];
378        let trampoline = self.function_call_trampolines[sig];
379        let mut values_vec = vec![];
380
381        unsafe {
382            // Even though we already know the type of the function we need to call, in certain
383            // specific cases trampoline prepare callee arguments for specific optimizations, such
384            // as passing g0 and m0_base_ptr as paramters.
385            wasmer_call_trampoline(
386                trap_handler,
387                config,
388                callee_vmctx,
389                trampoline,
390                callee_address,
391                values_vec.as_mut_ptr(),
392            )
393        }
394    }
395
396    /// Return the offset from the vmctx pointer to its containing `Instance`.
397    #[inline]
398    pub(crate) fn vmctx_offset() -> isize {
399        offset_of!(Self, vmctx) as isize
400    }
401
402    /// Return the table index for the given `VMTableDefinition`.
403    pub(crate) fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
404        let begin: *const VMTableDefinition = self.tables_ptr() as *const _;
405        let end: *const VMTableDefinition = table;
406        // TODO: Use `offset_from` once it stablizes.
407        let index = LocalTableIndex::new(
408            (end as usize - begin as usize) / mem::size_of::<VMTableDefinition>(),
409        );
410        assert_lt!(index.index(), self.tables.len());
411        index
412    }
413
414    /// Return the memory index for the given `VMMemoryDefinition`.
415    pub(crate) fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
416        let begin: *const VMMemoryDefinition = self.memories_ptr() as *const _;
417        let end: *const VMMemoryDefinition = memory;
418        // TODO: Use `offset_from` once it stablizes.
419        let index = LocalMemoryIndex::new(
420            (end as usize - begin as usize) / mem::size_of::<VMMemoryDefinition>(),
421        );
422        assert_lt!(index.index(), self.memories.len());
423        index
424    }
425
426    /// Grow memory by the specified amount of pages.
427    ///
428    /// Returns `None` if memory can't be grown by the specified amount
429    /// of pages.
430    pub(crate) fn memory_grow<IntoPages>(
431        &mut self,
432        memory_index: LocalMemoryIndex,
433        delta: IntoPages,
434    ) -> Result<Pages, MemoryError>
435    where
436        IntoPages: Into<Pages>,
437    {
438        let mem = *self
439            .memories
440            .get(memory_index)
441            .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
442        mem.get_mut(self.context_mut()).grow(delta.into())
443    }
444
445    /// Grow imported memory by the specified amount of pages.
446    ///
447    /// Returns `None` if memory can't be grown by the specified amount
448    /// of pages.
449    ///
450    /// # Safety
451    /// This and `imported_memory_size` are currently unsafe because they
452    /// dereference the memory import's pointers.
453    pub(crate) unsafe fn imported_memory_grow<IntoPages>(
454        &mut self,
455        memory_index: MemoryIndex,
456        delta: IntoPages,
457    ) -> Result<Pages, MemoryError>
458    where
459        IntoPages: Into<Pages>,
460    {
461        let import = self.imported_memory(memory_index);
462        let mem = import.handle;
463        mem.get_mut(self.context_mut()).grow(delta.into())
464    }
465
466    /// Returns the number of allocated wasm pages.
467    pub(crate) fn memory_size(&self, memory_index: LocalMemoryIndex) -> Pages {
468        let mem = *self
469            .memories
470            .get(memory_index)
471            .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
472        mem.get(self.context()).size()
473    }
474
475    /// Returns the number of allocated wasm pages in an imported memory.
476    ///
477    /// # Safety
478    /// This and `imported_memory_grow` are currently unsafe because they
479    /// dereference the memory import's pointers.
480    pub(crate) unsafe fn imported_memory_size(&self, memory_index: MemoryIndex) -> Pages {
481        let import = self.imported_memory(memory_index);
482        let mem = import.handle;
483        mem.get(self.context()).size()
484    }
485
486    /// Returns the number of elements in a given table.
487    pub(crate) fn table_size(&self, table_index: LocalTableIndex) -> u32 {
488        let table = self
489            .tables
490            .get(table_index)
491            .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
492        table.get(self.context()).size()
493    }
494
495    /// Returns the number of elements in a given imported table.
496    ///
497    /// # Safety
498    /// `table_index` must be a valid, imported table index.
499    pub(crate) unsafe fn imported_table_size(&self, table_index: TableIndex) -> u32 {
500        let import = self.imported_table(table_index);
501        let table = import.handle;
502        table.get(self.context()).size()
503    }
504
505    /// Grow table by the specified amount of elements.
506    ///
507    /// Returns `None` if table can't be grown by the specified amount
508    /// of elements.
509    pub(crate) fn table_grow(
510        &mut self,
511        table_index: LocalTableIndex,
512        delta: u32,
513        init_value: TableElement,
514    ) -> Option<u32> {
515        let table = *self
516            .tables
517            .get(table_index)
518            .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
519        table.get_mut(self.context_mut()).grow(delta, init_value)
520    }
521
522    /// Grow table by the specified amount of elements.
523    ///
524    /// # Safety
525    /// `table_index` must be a valid, imported table index.
526    pub(crate) unsafe fn imported_table_grow(
527        &mut self,
528        table_index: TableIndex,
529        delta: u32,
530        init_value: TableElement,
531    ) -> Option<u32> {
532        let import = self.imported_table(table_index);
533        let table = import.handle;
534        table.get_mut(self.context_mut()).grow(delta, init_value)
535    }
536
537    /// Get table element by index.
538    pub(crate) fn table_get(
539        &self,
540        table_index: LocalTableIndex,
541        index: u32,
542    ) -> Option<TableElement> {
543        let table = self
544            .tables
545            .get(table_index)
546            .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
547        table.get(self.context()).get(index)
548    }
549
550    /// Returns the element at the given index.
551    ///
552    /// # Safety
553    /// `table_index` must be a valid, imported table index.
554    pub(crate) unsafe fn imported_table_get(
555        &self,
556        table_index: TableIndex,
557        index: u32,
558    ) -> Option<TableElement> {
559        let import = self.imported_table(table_index);
560        let table = import.handle;
561        table.get(self.context()).get(index)
562    }
563
564    /// Set table element by index.
565    pub(crate) fn table_set(
566        &mut self,
567        table_index: LocalTableIndex,
568        index: u32,
569        val: TableElement,
570    ) -> Result<(), Trap> {
571        let table = *self
572            .tables
573            .get(table_index)
574            .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
575        table.get_mut(self.context_mut()).set(index, val)
576    }
577
578    /// Set table element by index for an imported table.
579    ///
580    /// # Safety
581    /// `table_index` must be a valid, imported table index.
582    pub(crate) unsafe fn imported_table_set(
583        &mut self,
584        table_index: TableIndex,
585        index: u32,
586        val: TableElement,
587    ) -> Result<(), Trap> {
588        let import = self.imported_table(table_index);
589        let table = import.handle;
590        table.get_mut(self.context_mut()).set(index, val)
591    }
592
593    /// Get a `VMFuncRef` for the given `FunctionIndex`.
594    pub(crate) fn func_ref(&self, function_index: FunctionIndex) -> Option<VMFuncRef> {
595        if function_index == FunctionIndex::reserved_value() {
596            None
597        } else if let Some(local_function_index) = self.module.local_func_index(function_index) {
598            Some(VMFuncRef(NonNull::from(
599                &self.funcrefs[local_function_index],
600            )))
601        } else {
602            Some(VMFuncRef(self.imported_funcrefs[function_index]))
603        }
604    }
605
606    /// The `table.init` operation: initializes a portion of a table with a
607    /// passive element.
608    ///
609    /// # Errors
610    ///
611    /// Returns a `Trap` error when the range within the table is out of bounds
612    /// or the range within the passive element is out of bounds.
613    pub(crate) fn table_init(
614        &mut self,
615        table_index: TableIndex,
616        elem_index: ElemIndex,
617        dst: u32,
618        src: u32,
619        len: u32,
620    ) -> Result<(), Trap> {
621        // https://webassembly.github.io/bulk-memory-operations/core/exec/instructions.html#exec-table-init
622
623        let table = self.get_table_handle(table_index);
624        let table = unsafe { table.get_mut(&mut *self.context) };
625        let passive_elements = self.passive_elements.borrow();
626        let elem = passive_elements
627            .get(&elem_index)
628            .map_or::<&[Option<VMFuncRef>], _>(&[], |e| &**e);
629
630        if src.checked_add(len).is_none_or(|n| n as usize > elem.len())
631            || dst.checked_add(len).is_none_or(|m| m > table.size())
632        {
633            return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
634        }
635
636        for (dst, src) in (dst..dst + len).zip(src..src + len) {
637            table
638                .set(dst, TableElement::FuncRef(elem[src as usize]))
639                .expect("should never panic because we already did the bounds check above");
640        }
641
642        Ok(())
643    }
644
645    /// The `table.fill` operation: fills a portion of a table with a given value.
646    ///
647    /// # Errors
648    ///
649    /// Returns a `Trap` error when the range within the table is out of bounds
650    pub(crate) fn table_fill(
651        &mut self,
652        table_index: TableIndex,
653        start_index: u32,
654        item: TableElement,
655        len: u32,
656    ) -> Result<(), Trap> {
657        // https://webassembly.github.io/bulk-memory-operations/core/exec/instructions.html#exec-table-init
658
659        let table = self.get_table(table_index);
660        let table_size = table.size() as usize;
661
662        if start_index
663            .checked_add(len)
664            .is_none_or(|n| n as usize > table_size)
665        {
666            return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
667        }
668
669        for i in start_index..(start_index + len) {
670            table
671                .set(i, item.clone())
672                .expect("should never panic because we already did the bounds check above");
673        }
674
675        Ok(())
676    }
677
678    /// Drop an element.
679    pub(crate) fn elem_drop(&self, elem_index: ElemIndex) {
680        // https://webassembly.github.io/reference-types/core/exec/instructions.html#exec-elem-drop
681
682        let mut passive_elements = self.passive_elements.borrow_mut();
683        passive_elements.remove(&elem_index);
684        // Note that we don't check that we actually removed an element because
685        // dropping a non-passive element is a no-op (not a trap).
686    }
687
688    /// Do a `memory.copy` for a locally defined memory.
689    ///
690    /// # Errors
691    ///
692    /// Returns a `Trap` error when the source or destination ranges are out of
693    /// bounds.
694    pub(crate) fn local_memory_copy(
695        &self,
696        memory_index: LocalMemoryIndex,
697        dst: u32,
698        src: u32,
699        len: u32,
700    ) -> Result<(), Trap> {
701        // https://webassembly.github.io/reference-types/core/exec/instructions.html#exec-memory-copy
702
703        let memory = self.memory(memory_index);
704        // The following memory copy is not synchronized and is not atomic:
705        unsafe { memory_copy(&memory, dst, src, len) }
706    }
707
708    /// Perform a `memory.copy` on an imported memory.
709    pub(crate) fn imported_memory_copy(
710        &self,
711        memory_index: MemoryIndex,
712        dst: u32,
713        src: u32,
714        len: u32,
715    ) -> Result<(), Trap> {
716        let import = self.imported_memory(memory_index);
717        let memory = unsafe { import.definition.as_ref() };
718        // The following memory copy is not synchronized and is not atomic:
719        unsafe { memory_copy(memory, dst, src, len) }
720    }
721
722    /// Perform the `memory.fill` operation on a locally defined memory.
723    ///
724    /// # Errors
725    ///
726    /// Returns a `Trap` error if the memory range is out of bounds.
727    pub(crate) fn local_memory_fill(
728        &self,
729        memory_index: LocalMemoryIndex,
730        dst: u32,
731        val: u32,
732        len: u32,
733    ) -> Result<(), Trap> {
734        let memory = self.memory(memory_index);
735        // The following memory fill is not synchronized and is not atomic:
736        unsafe { memory_fill(&memory, dst, val, len) }
737    }
738
739    /// Perform the `memory.fill` operation on an imported memory.
740    ///
741    /// # Errors
742    ///
743    /// Returns a `Trap` error if the memory range is out of bounds.
744    pub(crate) fn imported_memory_fill(
745        &self,
746        memory_index: MemoryIndex,
747        dst: u32,
748        val: u32,
749        len: u32,
750    ) -> Result<(), Trap> {
751        let import = self.imported_memory(memory_index);
752        let memory = unsafe { import.definition.as_ref() };
753        // The following memory fill is not synchronized and is not atomic:
754        unsafe { memory_fill(memory, dst, val, len) }
755    }
756
757    /// Performs the `memory.init` operation.
758    ///
759    /// # Errors
760    ///
761    /// Returns a `Trap` error if the destination range is out of this module's
762    /// memory's bounds or if the source range is outside the data segment's
763    /// bounds.
764    pub(crate) fn memory_init(
765        &self,
766        memory_index: MemoryIndex,
767        data_index: DataIndex,
768        dst: u32,
769        src: u32,
770        len: u32,
771    ) -> Result<(), Trap> {
772        // https://webassembly.github.io/bulk-memory-operations/core/exec/instructions.html#exec-memory-init
773
774        let memory = self.get_vmmemory(memory_index);
775        let passive_data = self.passive_data.borrow();
776        let data = passive_data.get(&data_index).map_or(&[][..], |d| &**d);
777
778        let current_length = unsafe { memory.vmmemory().as_ref().current_length };
779        if src.checked_add(len).is_none_or(|n| n as usize > data.len())
780            || dst
781                .checked_add(len)
782                .is_none_or(|m| usize::try_from(m).unwrap() > current_length)
783        {
784            return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
785        }
786        let src_slice = &data[src as usize..(src + len) as usize];
787        unsafe { memory.initialize_with_data(dst as usize, src_slice) }
788    }
789
790    /// Drop the given data segment, truncating its length to zero.
791    pub(crate) fn data_drop(&self, data_index: DataIndex) {
792        let mut passive_data = self.passive_data.borrow_mut();
793        passive_data.remove(&data_index);
794    }
795
796    /// Get a table by index regardless of whether it is locally-defined or an
797    /// imported, foreign table.
798    pub(crate) fn get_table(&mut self, table_index: TableIndex) -> &mut VMTable {
799        if let Some(local_table_index) = self.module.local_table_index(table_index) {
800            self.get_local_table(local_table_index)
801        } else {
802            self.get_foreign_table(table_index)
803        }
804    }
805
806    /// Get a locally-defined table.
807    pub(crate) fn get_local_table(&mut self, index: LocalTableIndex) -> &mut VMTable {
808        let table = self.tables[index];
809        table.get_mut(self.context_mut())
810    }
811
812    /// Get an imported, foreign table.
813    pub(crate) fn get_foreign_table(&mut self, index: TableIndex) -> &mut VMTable {
814        let import = self.imported_table(index);
815        let table = import.handle;
816        table.get_mut(self.context_mut())
817    }
818
819    /// Get a table handle by index regardless of whether it is locally-defined
820    /// or an imported, foreign table.
821    pub(crate) fn get_table_handle(
822        &mut self,
823        table_index: TableIndex,
824    ) -> InternalStoreHandle<VMTable> {
825        if let Some(local_table_index) = self.module.local_table_index(table_index) {
826            self.tables[local_table_index]
827        } else {
828            self.imported_table(table_index).handle
829        }
830    }
831
832    /// # Safety
833    /// See [`LinearMemory::do_wait`].
834    unsafe fn memory_wait(
835        memory: &mut VMMemory,
836        dst: u32,
837        expected: ExpectedValue,
838        timeout: i64,
839    ) -> Result<u32, Trap> {
840        let timeout = if timeout < 0 {
841            None
842        } else {
843            Some(std::time::Duration::from_nanos(timeout as u64))
844        };
845        match unsafe { memory.do_wait(dst, expected, timeout) } {
846            Ok(count) => Ok(count),
847            Err(_err) => {
848                // ret is None if there is more than 2^32 waiter in queue or some other error
849                Err(Trap::lib(TrapCode::TableAccessOutOfBounds))
850            }
851        }
852    }
853
854    /// Perform an Atomic.Wait32
855    pub(crate) fn local_memory_wait32(
856        &mut self,
857        memory_index: LocalMemoryIndex,
858        dst: u32,
859        val: u32,
860        timeout: i64,
861    ) -> Result<u32, Trap> {
862        let memory = self.memory(memory_index);
863        //if ! memory.shared {
864        // We should trap according to spec, but official test rely on not trapping...
865        //}
866
867        // Do a fast-path check of the expected value, and also ensure proper alignment
868        let ret = unsafe { memory32_atomic_check32(&memory, dst, val) };
869
870        if let Ok(mut ret) = ret {
871            if ret == 0 {
872                let memory = self.get_local_vmmemory_mut(memory_index);
873                // Safety: we have already checked alignment and bounds in memory32_atomic_check32
874                ret = unsafe { Self::memory_wait(memory, dst, ExpectedValue::U32(val), timeout)? };
875            }
876            Ok(ret)
877        } else {
878            ret
879        }
880    }
881
882    /// Perform an Atomic.Wait32
883    pub(crate) fn imported_memory_wait32(
884        &mut self,
885        memory_index: MemoryIndex,
886        dst: u32,
887        val: u32,
888        timeout: i64,
889    ) -> Result<u32, Trap> {
890        let import = self.imported_memory(memory_index);
891        let memory = unsafe { import.definition.as_ref() };
892        //if ! memory.shared {
893        // We should trap according to spec, but official test rely on not trapping...
894        //}
895
896        // Do a fast-path check of the expected value, and also ensure proper alignment
897        let ret = unsafe { memory32_atomic_check32(memory, dst, val) };
898
899        if let Ok(mut ret) = ret {
900            if ret == 0 {
901                let memory = self.get_vmmemory_mut(memory_index);
902                // Safety: we have already checked alignment and bounds in memory32_atomic_check32
903                ret = unsafe { Self::memory_wait(memory, dst, ExpectedValue::U32(val), timeout)? };
904            }
905            Ok(ret)
906        } else {
907            ret
908        }
909    }
910
911    /// Perform an Atomic.Wait64
912    pub(crate) fn local_memory_wait64(
913        &mut self,
914        memory_index: LocalMemoryIndex,
915        dst: u32,
916        val: u64,
917        timeout: i64,
918    ) -> Result<u32, Trap> {
919        let memory = self.memory(memory_index);
920        //if ! memory.shared {
921        // We should trap according to spec, but official test rely on not trapping...
922        //}
923
924        // Do a fast-path check of the expected value, and also ensure proper alignment
925        let ret = unsafe { memory32_atomic_check64(&memory, dst, val) };
926
927        if let Ok(mut ret) = ret {
928            if ret == 0 {
929                let memory = self.get_local_vmmemory_mut(memory_index);
930                // Safety: we have already checked alignment and bounds in memory32_atomic_check64
931                ret = unsafe { Self::memory_wait(memory, dst, ExpectedValue::U64(val), timeout)? };
932            }
933            Ok(ret)
934        } else {
935            ret
936        }
937    }
938
939    /// Perform an Atomic.Wait64
940    pub(crate) fn imported_memory_wait64(
941        &mut self,
942        memory_index: MemoryIndex,
943        dst: u32,
944        val: u64,
945        timeout: i64,
946    ) -> Result<u32, Trap> {
947        let import = self.imported_memory(memory_index);
948        let memory = unsafe { import.definition.as_ref() };
949        //if ! memory.shared {
950        // We should trap according to spec, but official test rely on not trapping...
951        //}
952
953        // Do a fast-path check of the expected value, and also ensure proper alignment
954        let ret = unsafe { memory32_atomic_check64(memory, dst, val) };
955
956        if let Ok(mut ret) = ret {
957            if ret == 0 {
958                let memory = self.get_vmmemory_mut(memory_index);
959                // Safety: we have already checked alignment and bounds in memory32_atomic_check64
960                ret = unsafe { Self::memory_wait(memory, dst, ExpectedValue::U64(val), timeout)? };
961            }
962            Ok(ret)
963        } else {
964            ret
965        }
966    }
967
968    /// Perform an Atomic.Notify
969    pub(crate) fn local_memory_notify(
970        &mut self,
971        memory_index: LocalMemoryIndex,
972        dst: u32,
973        count: u32,
974    ) -> Result<u32, Trap> {
975        let memory = self.get_local_vmmemory_mut(memory_index);
976        Ok(memory.do_notify(dst, count))
977    }
978
979    /// Perform an Atomic.Notify
980    pub(crate) fn imported_memory_notify(
981        &mut self,
982        memory_index: MemoryIndex,
983        dst: u32,
984        count: u32,
985    ) -> Result<u32, Trap> {
986        let memory = self.get_vmmemory_mut(memory_index);
987        Ok(memory.do_notify(dst, count))
988    }
989}
990
991/// A handle holding an `Instance` of a WebAssembly module.
992///
993/// This is more or less a public facade of the private `Instance`,
994/// providing useful higher-level API.
995#[derive(Debug, Eq, PartialEq)]
996pub struct VMInstance {
997    /// The layout of `Instance` (which can vary).
998    instance_layout: Layout,
999
1000    /// The `Instance` itself.
1001    ///
1002    /// `Instance` must not be dropped manually by Rust, because it's
1003    /// allocated manually with `alloc` and a specific layout (Rust
1004    /// would be able to drop `Instance` itself but it will imply a
1005    /// memory leak because of `alloc`).
1006    ///
1007    /// No one in the code has a copy of the `Instance`'s
1008    /// pointer. `Self` is the only one.
1009    instance: NonNull<Instance>,
1010}
1011
1012/// VMInstance are created with an InstanceAllocator
1013/// and it will "consume" the memory
1014/// So the Drop here actualy free it (else it would be leaked)
1015impl Drop for VMInstance {
1016    fn drop(&mut self) {
1017        let instance_ptr = self.instance.as_ptr();
1018
1019        unsafe {
1020            // Need to drop all the actual Instance members
1021            instance_ptr.drop_in_place();
1022            // And then free the memory allocated for the Instance itself
1023            std::alloc::dealloc(instance_ptr as *mut u8, self.instance_layout);
1024        }
1025    }
1026}
1027
1028impl VMInstance {
1029    /// Create a new `VMInstance` pointing at freshly allocated instance data.
1030    ///
1031    /// # Safety
1032    ///
1033    /// This method is not necessarily inherently unsafe to call, but in general
1034    /// the APIs of an `Instance` are quite unsafe and have not been really
1035    /// audited for safety that much. As a result the unsafety here on this
1036    /// method is a low-overhead way of saying “this is an extremely unsafe type
1037    /// to work with”.
1038    ///
1039    /// Extreme care must be taken when working with `VMInstance` and it's
1040    /// recommended to have relatively intimate knowledge of how it works
1041    /// internally if you'd like to do so. If possible it's recommended to use
1042    /// the `wasmer` crate API rather than this type since that is vetted for
1043    /// safety.
1044    ///
1045    /// However the following must be taken care of before calling this function:
1046    /// - The memory at `instance.tables_ptr()` must be initialized with data for
1047    ///   all the local tables.
1048    /// - The memory at `instance.memories_ptr()` must be initialized with data for
1049    ///   all the local memories.
1050    #[allow(clippy::too_many_arguments)]
1051    pub unsafe fn new(
1052        allocator: InstanceAllocator,
1053        module: Arc<ModuleInfo>,
1054        context: &mut StoreObjects,
1055        finished_functions: BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
1056        finished_function_call_trampolines: BoxedSlice<SignatureIndex, VMTrampoline>,
1057        finished_memories: BoxedSlice<LocalMemoryIndex, InternalStoreHandle<VMMemory>>,
1058        finished_tables: BoxedSlice<LocalTableIndex, InternalStoreHandle<VMTable>>,
1059        finished_globals: BoxedSlice<LocalGlobalIndex, InternalStoreHandle<VMGlobal>>,
1060        tags: BoxedSlice<TagIndex, InternalStoreHandle<VMTag>>,
1061        imports: Imports,
1062        vmshared_signatures: BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
1063    ) -> Result<Self, Trap> {
1064        unsafe {
1065            let vmctx_tags = tags
1066                .values()
1067                .map(|m: &InternalStoreHandle<VMTag>| VMSharedTagIndex::new(m.index() as u32))
1068                .collect::<PrimaryMap<TagIndex, VMSharedTagIndex>>()
1069                .into_boxed_slice();
1070            let vmctx_globals = finished_globals
1071                .values()
1072                .map(|m: &InternalStoreHandle<VMGlobal>| m.get(context).vmglobal())
1073                .collect::<PrimaryMap<LocalGlobalIndex, NonNull<VMGlobalDefinition>>>()
1074                .into_boxed_slice();
1075            let passive_data = RefCell::new(
1076                module
1077                    .passive_data
1078                    .clone()
1079                    .into_iter()
1080                    .map(|(idx, bytes)| (idx, Arc::from(bytes)))
1081                    .collect::<HashMap<_, _>>(),
1082            );
1083
1084            let handle = {
1085                let offsets = allocator.offsets().clone();
1086                // use dummy value to create an instance so we can get the vmctx pointer
1087                let funcrefs = PrimaryMap::new().into_boxed_slice();
1088                let imported_funcrefs = PrimaryMap::new().into_boxed_slice();
1089                // Create the `Instance`. The unique, the One.
1090                let instance = Instance {
1091                    module,
1092                    context,
1093                    offsets,
1094                    memories: finished_memories,
1095                    tables: finished_tables,
1096                    tags,
1097                    globals: finished_globals,
1098                    functions: finished_functions,
1099                    function_call_trampolines: finished_function_call_trampolines,
1100                    passive_elements: Default::default(),
1101                    passive_data,
1102                    funcrefs,
1103                    imported_funcrefs,
1104                    vmctx: VMContext {},
1105                };
1106
1107                let mut instance_handle = allocator.into_vminstance(instance);
1108
1109                // Set the funcrefs after we've built the instance
1110                {
1111                    let instance = instance_handle.instance_mut();
1112                    let vmctx_ptr = instance.vmctx_ptr();
1113                    (instance.funcrefs, instance.imported_funcrefs) = build_funcrefs(
1114                        &instance.module,
1115                        context,
1116                        &imports,
1117                        &instance.functions,
1118                        &vmshared_signatures,
1119                        &instance.function_call_trampolines,
1120                        vmctx_ptr,
1121                    );
1122                }
1123
1124                instance_handle
1125            };
1126            let instance = handle.instance();
1127
1128            ptr::copy(
1129                vmctx_tags.values().as_slice().as_ptr(),
1130                instance.shared_tags_ptr(),
1131                vmctx_tags.len(),
1132            );
1133            ptr::copy(
1134                vmshared_signatures.values().as_slice().as_ptr(),
1135                instance.signature_ids_ptr(),
1136                vmshared_signatures.len(),
1137            );
1138            ptr::copy(
1139                imports.functions.values().as_slice().as_ptr(),
1140                instance.imported_functions_ptr(),
1141                imports.functions.len(),
1142            );
1143            ptr::copy(
1144                imports.tables.values().as_slice().as_ptr(),
1145                instance.imported_tables_ptr(),
1146                imports.tables.len(),
1147            );
1148            ptr::copy(
1149                imports.memories.values().as_slice().as_ptr(),
1150                instance.imported_memories_ptr(),
1151                imports.memories.len(),
1152            );
1153            ptr::copy(
1154                imports.globals.values().as_slice().as_ptr(),
1155                instance.imported_globals_ptr(),
1156                imports.globals.len(),
1157            );
1158            // these should already be set, add asserts here? for:
1159            // - instance.tables_ptr() as *mut VMTableDefinition
1160            // - instance.memories_ptr() as *mut VMMemoryDefinition
1161            ptr::copy(
1162                vmctx_globals.values().as_slice().as_ptr(),
1163                instance.globals_ptr() as *mut NonNull<VMGlobalDefinition>,
1164                vmctx_globals.len(),
1165            );
1166            ptr::write(
1167                instance.builtin_functions_ptr(),
1168                VMBuiltinFunctionsArray::initialized(),
1169            );
1170
1171            // Perform infallible initialization in this constructor, while fallible
1172            // initialization is deferred to the `initialize` method.
1173            initialize_passive_elements(instance);
1174            initialize_globals(instance);
1175
1176            Ok(handle)
1177        }
1178    }
1179
1180    /// Return a reference to the contained `Instance`.
1181    pub(crate) fn instance(&self) -> &Instance {
1182        unsafe { self.instance.as_ref() }
1183    }
1184
1185    /// Return a mutable reference to the contained `Instance`.
1186    pub(crate) fn instance_mut(&mut self) -> &mut Instance {
1187        unsafe { self.instance.as_mut() }
1188    }
1189
1190    /// Finishes the instantiation process started by `Instance::new`.
1191    ///
1192    /// # Safety
1193    ///
1194    /// Only safe to call immediately after instantiation.
1195    pub unsafe fn finish_instantiation(
1196        &mut self,
1197        config: &VMConfig,
1198        trap_handler: Option<*const TrapHandlerFn<'static>>,
1199        data_initializers: &[DataInitializer<'_>],
1200    ) -> Result<(), Trap> {
1201        let instance = self.instance_mut();
1202
1203        // Apply the initializers.
1204        initialize_tables(instance)?;
1205        initialize_memories(instance, data_initializers)?;
1206
1207        // The WebAssembly spec specifies that the start function is
1208        // invoked automatically at instantiation time.
1209        instance.invoke_start_function(config, trap_handler)?;
1210        Ok(())
1211    }
1212
1213    /// Return a reference to the vmctx used by compiled wasm code.
1214    pub fn vmctx(&self) -> &VMContext {
1215        self.instance().vmctx()
1216    }
1217
1218    /// Return a raw pointer to the vmctx used by compiled wasm code.
1219    pub fn vmctx_ptr(&self) -> *mut VMContext {
1220        self.instance().vmctx_ptr()
1221    }
1222
1223    /// Return a reference to the `VMOffsets` to get offsets in the
1224    /// `Self::vmctx_ptr` region. Be careful when doing pointer
1225    /// arithmetic!
1226    pub fn vmoffsets(&self) -> &VMOffsets {
1227        self.instance().offsets()
1228    }
1229
1230    /// Return a reference-counting pointer to a module.
1231    pub fn module(&self) -> &Arc<ModuleInfo> {
1232        self.instance().module()
1233    }
1234
1235    /// Return a reference to a module.
1236    pub fn module_ref(&self) -> &ModuleInfo {
1237        self.instance().module_ref()
1238    }
1239
1240    /// Lookup an export with the given name.
1241    pub fn lookup(&mut self, field: &str) -> Option<VMExtern> {
1242        let export = *self.module_ref().exports.get(field)?;
1243
1244        Some(self.lookup_by_declaration(export))
1245    }
1246
1247    /// Lookup an export with the given export declaration.
1248    pub fn lookup_by_declaration(&mut self, export: ExportIndex) -> VMExtern {
1249        let instance = self.instance();
1250
1251        match export {
1252            ExportIndex::Function(index) => {
1253                let sig_index = &instance.module.functions[index];
1254                let handle = if let Some(def_index) = instance.module.local_func_index(index) {
1255                    // A VMFunction is lazily created only for functions that are
1256                    // exported.
1257                    let signature = instance.module.signatures[*sig_index].clone();
1258                    let vm_function = VMFunction {
1259                        anyfunc: MaybeInstanceOwned::Instance(NonNull::from(
1260                            &instance.funcrefs[def_index],
1261                        )),
1262                        signature,
1263                        // Any function received is already static at this point as:
1264                        // 1. All locally defined functions in the Wasm have a static signature.
1265                        // 2. All the imported functions are already static (because
1266                        //    they point to the trampolines rather than the dynamic addresses).
1267                        kind: VMFunctionKind::Static,
1268                        host_data: Box::new(()),
1269                    };
1270                    InternalStoreHandle::new(self.instance_mut().context_mut(), vm_function)
1271                } else {
1272                    let import = instance.imported_function(index);
1273                    import.handle
1274                };
1275
1276                VMExtern::Function(handle)
1277            }
1278            ExportIndex::Table(index) => {
1279                let handle = if let Some(def_index) = instance.module.local_table_index(index) {
1280                    instance.tables[def_index]
1281                } else {
1282                    let import = instance.imported_table(index);
1283                    import.handle
1284                };
1285                VMExtern::Table(handle)
1286            }
1287            ExportIndex::Memory(index) => {
1288                let handle = if let Some(def_index) = instance.module.local_memory_index(index) {
1289                    instance.memories[def_index]
1290                } else {
1291                    let import = instance.imported_memory(index);
1292                    import.handle
1293                };
1294                VMExtern::Memory(handle)
1295            }
1296            ExportIndex::Global(index) => {
1297                let handle = if let Some(def_index) = instance.module.local_global_index(index) {
1298                    instance.globals[def_index]
1299                } else {
1300                    let import = instance.imported_global(index);
1301                    import.handle
1302                };
1303                VMExtern::Global(handle)
1304            }
1305
1306            ExportIndex::Tag(index) => {
1307                let handle = instance.tags[index];
1308                VMExtern::Tag(handle)
1309            }
1310        }
1311    }
1312
1313    /// Return an iterator over the exports of this instance.
1314    ///
1315    /// Specifically, it provides access to the key-value pairs, where the keys
1316    /// are export names, and the values are export declarations which can be
1317    /// resolved `lookup_by_declaration`.
1318    pub fn exports(&self) -> indexmap::map::Iter<'_, String, ExportIndex> {
1319        self.module().exports.iter()
1320    }
1321
1322    /// Return the memory index for the given `VMMemoryDefinition` in this instance.
1323    pub fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
1324        self.instance().memory_index(memory)
1325    }
1326
1327    /// Grow memory in this instance by the specified amount of pages.
1328    ///
1329    /// Returns `None` if memory can't be grown by the specified amount
1330    /// of pages.
1331    pub fn memory_grow<IntoPages>(
1332        &mut self,
1333        memory_index: LocalMemoryIndex,
1334        delta: IntoPages,
1335    ) -> Result<Pages, MemoryError>
1336    where
1337        IntoPages: Into<Pages>,
1338    {
1339        self.instance_mut().memory_grow(memory_index, delta)
1340    }
1341
1342    /// Return the table index for the given `VMTableDefinition` in this instance.
1343    pub fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
1344        self.instance().table_index(table)
1345    }
1346
1347    /// Grow table in this instance by the specified amount of pages.
1348    ///
1349    /// Returns `None` if memory can't be grown by the specified amount
1350    /// of pages.
1351    pub fn table_grow(
1352        &mut self,
1353        table_index: LocalTableIndex,
1354        delta: u32,
1355        init_value: TableElement,
1356    ) -> Option<u32> {
1357        self.instance_mut()
1358            .table_grow(table_index, delta, init_value)
1359    }
1360
1361    /// Get table element reference.
1362    ///
1363    /// Returns `None` if index is out of bounds.
1364    pub fn table_get(&self, table_index: LocalTableIndex, index: u32) -> Option<TableElement> {
1365        self.instance().table_get(table_index, index)
1366    }
1367
1368    /// Set table element reference.
1369    ///
1370    /// Returns an error if the index is out of bounds
1371    pub fn table_set(
1372        &mut self,
1373        table_index: LocalTableIndex,
1374        index: u32,
1375        val: TableElement,
1376    ) -> Result<(), Trap> {
1377        self.instance_mut().table_set(table_index, index, val)
1378    }
1379
1380    /// Get a table defined locally within this module.
1381    pub fn get_local_table(&mut self, index: LocalTableIndex) -> &mut VMTable {
1382        self.instance_mut().get_local_table(index)
1383    }
1384}
1385
1386#[allow(clippy::mut_from_ref)]
1387#[allow(dead_code)]
1388/// Return a byte-slice view of a memory's data.
1389unsafe fn get_memory_slice<'instance>(
1390    init: &DataInitializer<'_>,
1391    instance: &'instance Instance,
1392) -> &'instance mut [u8] {
1393    unsafe {
1394        let memory = if let Some(local_memory_index) = instance
1395            .module
1396            .local_memory_index(init.location.memory_index)
1397        {
1398            instance.memory(local_memory_index)
1399        } else {
1400            let import = instance.imported_memory(init.location.memory_index);
1401            *import.definition.as_ref()
1402        };
1403        slice::from_raw_parts_mut(memory.base, memory.current_length)
1404    }
1405}
1406
1407fn get_global(index: GlobalIndex, instance: &Instance) -> RawValue {
1408    unsafe {
1409        if let Some(local_global_index) = instance.module.local_global_index(index) {
1410            instance.global(local_global_index).val
1411        } else {
1412            instance.imported_global(index).definition.as_ref().val
1413        }
1414    }
1415}
1416
1417enum EvaluatedInitExpr {
1418    I32(i32),
1419    I64(i64),
1420}
1421
1422fn eval_init_expr(expr: &InitExpr, instance: &Instance) -> EvaluatedInitExpr {
1423    if expr
1424        .ops()
1425        .first()
1426        .expect("missing expression")
1427        .is_32bit_expression()
1428    {
1429        let mut stack = Vec::with_capacity(expr.ops().len());
1430        for op in expr.ops() {
1431            match *op {
1432                InitExprOp::I32Const(value) => stack.push(value),
1433                InitExprOp::GlobalGetI32(global) => {
1434                    stack.push(unsafe { get_global(global, instance).i32 })
1435                }
1436                InitExprOp::I32Add => {
1437                    let rhs = stack.pop().expect("invalid init expr stack for i32.add");
1438                    let lhs = stack.pop().expect("invalid init expr stack for i32.add");
1439                    stack.push(lhs.wrapping_add(rhs));
1440                }
1441                InitExprOp::I32Sub => {
1442                    let rhs = stack.pop().expect("invalid init expr stack for i32.sub");
1443                    let lhs = stack.pop().expect("invalid init expr stack for i32.sub");
1444                    stack.push(lhs.wrapping_sub(rhs));
1445                }
1446                InitExprOp::I32Mul => {
1447                    let rhs = stack.pop().expect("invalid init expr stack for i32.mul");
1448                    let lhs = stack.pop().expect("invalid init expr stack for i32.mul");
1449                    stack.push(lhs.wrapping_mul(rhs));
1450                }
1451                _ => {
1452                    panic!("unexpected init expr statement: {op:?}");
1453                }
1454            }
1455        }
1456        EvaluatedInitExpr::I32(
1457            stack
1458                .into_iter()
1459                .exactly_one()
1460                .expect("invalid init expr stack shape"),
1461        )
1462    } else {
1463        let mut stack = Vec::with_capacity(expr.ops().len());
1464        for op in expr.ops() {
1465            match *op {
1466                InitExprOp::I64Const(value) => stack.push(value),
1467                InitExprOp::GlobalGetI64(global) => {
1468                    stack.push(unsafe { get_global(global, instance).i64 })
1469                }
1470                InitExprOp::I64Add => {
1471                    let rhs = stack.pop().expect("invalid init expr stack for i64.add");
1472                    let lhs = stack.pop().expect("invalid init expr stack for i64.add");
1473                    stack.push(lhs.wrapping_add(rhs));
1474                }
1475                InitExprOp::I64Sub => {
1476                    let rhs = stack.pop().expect("invalid init expr stack for i64.sub");
1477                    let lhs = stack.pop().expect("invalid init expr stack for i64.sub");
1478                    stack.push(lhs.wrapping_sub(rhs));
1479                }
1480                InitExprOp::I64Mul => {
1481                    let rhs = stack.pop().expect("invalid init expr stack for i64.mul");
1482                    let lhs = stack.pop().expect("invalid init expr stack for i64.mul");
1483                    stack.push(lhs.wrapping_mul(rhs));
1484                }
1485                _ => {
1486                    panic!("unexpected init expr statement: {op:?}");
1487                }
1488            }
1489        }
1490        EvaluatedInitExpr::I64(
1491            stack
1492                .into_iter()
1493                .exactly_one()
1494                .expect("invalid init expr stack shape"),
1495        )
1496    }
1497}
1498
1499/// Initialize the table memory from the provided initializers.
1500fn initialize_tables(instance: &mut Instance) -> Result<(), Trap> {
1501    let module = Arc::clone(&instance.module);
1502    for init in &module.table_initializers {
1503        let EvaluatedInitExpr::I32(start) = eval_init_expr(&init.offset_expr, instance) else {
1504            panic!("unexpected expression type, expected i32");
1505        };
1506        if start < 0 {
1507            return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
1508        }
1509        let start = start as usize;
1510        let table = instance.get_table_handle(init.table_index);
1511        let table = unsafe { table.get_mut(&mut *instance.context) };
1512
1513        if start
1514            .checked_add(init.elements.len())
1515            .is_none_or(|end| end > table.size() as usize)
1516        {
1517            return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
1518        }
1519
1520        if let wasmer_types::Type::FuncRef = table.ty().ty {
1521            for (i, func_idx) in init.elements.iter().enumerate() {
1522                let anyfunc = instance.func_ref(*func_idx);
1523                table
1524                    .set(
1525                        u32::try_from(start + i).unwrap(),
1526                        TableElement::FuncRef(anyfunc),
1527                    )
1528                    .unwrap();
1529            }
1530        } else {
1531            for i in 0..init.elements.len() {
1532                table
1533                    .set(
1534                        u32::try_from(start + i).unwrap(),
1535                        TableElement::ExternRef(None),
1536                    )
1537                    .unwrap();
1538            }
1539        }
1540    }
1541
1542    Ok(())
1543}
1544
1545/// Initialize the `Instance::passive_elements` map by resolving the
1546/// `ModuleInfo::passive_elements`'s `FunctionIndex`s into `VMCallerCheckedAnyfunc`s for
1547/// this instance.
1548fn initialize_passive_elements(instance: &Instance) {
1549    let mut passive_elements = instance.passive_elements.borrow_mut();
1550    debug_assert!(
1551        passive_elements.is_empty(),
1552        "should only be called once, at initialization time"
1553    );
1554
1555    passive_elements.extend(instance.module.passive_elements.iter().filter_map(
1556        |(&idx, segments)| -> Option<(ElemIndex, Box<[Option<VMFuncRef>]>)> {
1557            if segments.is_empty() {
1558                None
1559            } else {
1560                Some((
1561                    idx,
1562                    segments
1563                        .iter()
1564                        .map(|s| instance.func_ref(*s))
1565                        .collect::<Box<[Option<VMFuncRef>]>>(),
1566                ))
1567            }
1568        },
1569    ));
1570}
1571
1572/// Initialize the table memory from the provided initializers.
1573fn initialize_memories(
1574    instance: &mut Instance,
1575    data_initializers: &[DataInitializer<'_>],
1576) -> Result<(), Trap> {
1577    for init in data_initializers {
1578        let memory = instance.get_vmmemory(init.location.memory_index);
1579
1580        let EvaluatedInitExpr::I32(start) = eval_init_expr(&init.location.offset_expr, instance)
1581        else {
1582            panic!("unexpected expression type, expected i32");
1583        };
1584        if start < 0 {
1585            return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
1586        }
1587        let start = start as usize;
1588        unsafe {
1589            let current_length = memory.vmmemory().as_ref().current_length;
1590            if start
1591                .checked_add(init.data.len())
1592                .is_none_or(|end| end > current_length)
1593            {
1594                return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
1595            }
1596            memory.initialize_with_data(start, init.data)?;
1597        }
1598    }
1599
1600    Ok(())
1601}
1602
1603fn initialize_globals(instance: &Instance) {
1604    let module = Arc::clone(&instance.module);
1605    for (index, initializer) in module.global_initializers.iter() {
1606        unsafe {
1607            let to = instance.global_ptr(index).as_ptr();
1608            match initializer {
1609                GlobalInit::I32Const(x) => (*to).val.i32 = *x,
1610                GlobalInit::I64Const(x) => (*to).val.i64 = *x,
1611                GlobalInit::F32Const(x) => (*to).val.f32 = *x,
1612                GlobalInit::F64Const(x) => (*to).val.f64 = *x,
1613                GlobalInit::V128Const(x) => (*to).val.bytes = *x.bytes(),
1614                GlobalInit::GetGlobal(x) => {
1615                    let from: VMGlobalDefinition =
1616                        if let Some(def_x) = module.local_global_index(*x) {
1617                            instance.global(def_x)
1618                        } else {
1619                            instance.imported_global(*x).definition.as_ref().clone()
1620                        };
1621                    *to = from;
1622                }
1623                GlobalInit::RefNullConst => (*to).val.funcref = 0,
1624                GlobalInit::RefFunc(func_idx) => {
1625                    let funcref = instance.func_ref(*func_idx).unwrap();
1626                    (*to).val = funcref.into_raw();
1627                }
1628                GlobalInit::Expr(expr) => match eval_init_expr(expr, instance) {
1629                    EvaluatedInitExpr::I32(value) => (*to).val.i32 = value,
1630                    EvaluatedInitExpr::I64(value) => (*to).val.i64 = value,
1631                },
1632            }
1633        }
1634    }
1635}
1636
1637/// Eagerly builds all the `VMFuncRef`s for imported and local functions so that all
1638/// future funcref operations are just looking up this data.
1639fn build_funcrefs(
1640    module_info: &ModuleInfo,
1641    ctx: &StoreObjects,
1642    imports: &Imports,
1643    finished_functions: &BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
1644    vmshared_signatures: &BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
1645    function_call_trampolines: &BoxedSlice<SignatureIndex, VMTrampoline>,
1646    vmctx_ptr: *mut VMContext,
1647) -> (
1648    BoxedSlice<LocalFunctionIndex, VMCallerCheckedAnyfunc>,
1649    BoxedSlice<FunctionIndex, NonNull<VMCallerCheckedAnyfunc>>,
1650) {
1651    let mut func_refs =
1652        PrimaryMap::with_capacity(module_info.functions.len() - module_info.num_imported_functions);
1653    let mut imported_func_refs = PrimaryMap::with_capacity(module_info.num_imported_functions);
1654
1655    // do imported functions
1656    for import in imports.functions.values() {
1657        imported_func_refs.push(import.handle.get(ctx).anyfunc.as_ptr());
1658    }
1659
1660    // do local functions
1661    for (local_index, func_ptr) in finished_functions.iter() {
1662        let index = module_info.func_index(local_index);
1663        let sig_index = module_info.functions[index];
1664        let type_index = vmshared_signatures[sig_index];
1665        let call_trampoline = function_call_trampolines[sig_index];
1666        let anyfunc = VMCallerCheckedAnyfunc {
1667            func_ptr: func_ptr.0,
1668            type_index,
1669            vmctx: VMFunctionContext { vmctx: vmctx_ptr },
1670            call_trampoline,
1671        };
1672        func_refs.push(anyfunc);
1673    }
1674    (
1675        func_refs.into_boxed_slice(),
1676        imported_func_refs.into_boxed_slice(),
1677    )
1678}