wasmer_vm/instance/
mod.rs

1// This file contains code from external sources.
2// Attributions: https://github.com/wasmerio/wasmer/blob/main/docs/ATTRIBUTIONS.md
3
4//! An `Instance` contains all the runtime state used by execution of
5//! a WebAssembly module (except its callstack and register state). An
6//! `VMInstance` is a wrapper around `Instance` that manages
7//! how it is allocated and deallocated.
8
9mod allocator;
10
11use crate::LinearMemory;
12use crate::imports::Imports;
13use crate::store::{InternalStoreHandle, StoreObjects};
14use crate::table::TableElement;
15use crate::trap::{Trap, TrapCode};
16use crate::vmcontext::{
17    VMBuiltinFunctionsArray, VMCallerCheckedAnyfunc, VMContext, VMFunctionContext,
18    VMFunctionImport, VMFunctionKind, VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition,
19    VMMemoryImport, VMSharedSignatureIndex, VMSharedTagIndex, VMTableDefinition, VMTableImport,
20    VMTrampoline, memory_copy, memory_fill, memory32_atomic_check32, memory32_atomic_check64,
21};
22use crate::{FunctionBodyPtr, MaybeInstanceOwned, TrapHandlerFn, VMTag, wasmer_call_trampoline};
23use crate::{VMConfig, VMFuncRef, VMFunction, VMGlobal, VMMemory, VMTable};
24use crate::{export::VMExtern, threadconditions::ExpectedValue};
25pub use allocator::InstanceAllocator;
26use memoffset::offset_of;
27use more_asserts::assert_lt;
28use std::alloc::Layout;
29use std::cell::RefCell;
30use std::collections::HashMap;
31use std::convert::TryFrom;
32use std::fmt;
33use std::mem;
34use std::ptr::{self, NonNull};
35use std::slice;
36use std::sync::Arc;
37use wasmer_types::entity::{BoxedSlice, EntityRef, PrimaryMap, packed_option::ReservedValue};
38use wasmer_types::{
39    DataIndex, DataInitializer, ElemIndex, ExportIndex, FunctionIndex, GlobalIndex, GlobalInit,
40    LocalFunctionIndex, LocalGlobalIndex, LocalMemoryIndex, LocalTableIndex, MemoryError,
41    MemoryIndex, ModuleInfo, Pages, SignatureIndex, TableIndex, TableInitializer, TagIndex,
42    VMOffsets,
43};
44
45/// A WebAssembly instance.
46///
47/// The type is dynamically-sized. Indeed, the `vmctx` field can
48/// contain various data. That's why the type has a C representation
49/// to ensure that the `vmctx` field is last. See the documentation of
50/// the `vmctx` field to learn more.
51#[repr(C)]
52#[allow(clippy::type_complexity)]
53pub(crate) struct Instance {
54    /// The `ModuleInfo` this `Instance` was instantiated from.
55    module: Arc<ModuleInfo>,
56
57    /// Pointer to the object store of the context owning this instance.
58    context: *mut StoreObjects,
59
60    /// Offsets in the `vmctx` region.
61    offsets: VMOffsets,
62
63    /// WebAssembly linear memory data.
64    memories: BoxedSlice<LocalMemoryIndex, InternalStoreHandle<VMMemory>>,
65
66    /// WebAssembly table data.
67    tables: BoxedSlice<LocalTableIndex, InternalStoreHandle<VMTable>>,
68
69    /// WebAssembly global data.
70    globals: BoxedSlice<LocalGlobalIndex, InternalStoreHandle<VMGlobal>>,
71
72    /// WebAssembly tag data. Notably, this stores *all* tags, not just local ones.
73    tags: BoxedSlice<TagIndex, InternalStoreHandle<VMTag>>,
74
75    /// Pointers to functions in executable memory.
76    functions: BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
77
78    /// Pointers to function call trampolines in executable memory.
79    function_call_trampolines: BoxedSlice<SignatureIndex, VMTrampoline>,
80
81    /// Passive elements in this instantiation. As `elem.drop`s happen, these
82    /// entries get removed.
83    passive_elements: RefCell<HashMap<ElemIndex, Box<[Option<VMFuncRef>]>>>,
84
85    /// Passive data segments from our module. As `data.drop`s happen, entries
86    /// get removed. A missing entry is considered equivalent to an empty slice.
87    passive_data: RefCell<HashMap<DataIndex, Arc<[u8]>>>,
88
89    /// Mapping of function indices to their func ref backing data. `VMFuncRef`s
90    /// will point to elements here for functions defined by this instance.
91    funcrefs: BoxedSlice<LocalFunctionIndex, VMCallerCheckedAnyfunc>,
92
93    /// Mapping of function indices to their func ref backing data. `VMFuncRef`s
94    /// will point to elements here for functions imported by this instance.
95    imported_funcrefs: BoxedSlice<FunctionIndex, NonNull<VMCallerCheckedAnyfunc>>,
96
97    /// Additional context used by compiled WebAssembly code. This
98    /// field is last, and represents a dynamically-sized array that
99    /// extends beyond the nominal end of the struct (similar to a
100    /// flexible array member).
101    vmctx: VMContext,
102}
103
104impl fmt::Debug for Instance {
105    fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
106        formatter.debug_struct("Instance").finish()
107    }
108}
109
110#[allow(clippy::cast_ptr_alignment)]
111impl Instance {
112    /// Helper function to access various locations offset from our `*mut
113    /// VMContext` object.
114    unsafe fn vmctx_plus_offset<T>(&self, offset: u32) -> *mut T {
115        unsafe {
116            (self.vmctx_ptr() as *mut u8)
117                .add(usize::try_from(offset).unwrap())
118                .cast()
119        }
120    }
121
122    fn module(&self) -> &Arc<ModuleInfo> {
123        &self.module
124    }
125
126    pub(crate) fn module_ref(&self) -> &ModuleInfo {
127        &self.module
128    }
129
130    pub(crate) fn context(&self) -> &StoreObjects {
131        unsafe { &*self.context }
132    }
133
134    pub(crate) fn context_mut(&mut self) -> &mut StoreObjects {
135        unsafe { &mut *self.context }
136    }
137
138    /// Offsets in the `vmctx` region.
139    fn offsets(&self) -> &VMOffsets {
140        &self.offsets
141    }
142
143    /// Return a pointer to the `VMSharedSignatureIndex`s.
144    fn signature_ids_ptr(&self) -> *mut VMSharedSignatureIndex {
145        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_signature_ids_begin()) }
146    }
147
148    /// Return the indexed `VMFunctionImport`.
149    fn imported_function(&self, index: FunctionIndex) -> &VMFunctionImport {
150        let index = usize::try_from(index.as_u32()).unwrap();
151        unsafe { &*self.imported_functions_ptr().add(index) }
152    }
153
154    /// Return a pointer to the `VMFunctionImport`s.
155    fn imported_functions_ptr(&self) -> *mut VMFunctionImport {
156        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_functions_begin()) }
157    }
158
159    /// Return the index `VMTableImport`.
160    fn imported_table(&self, index: TableIndex) -> &VMTableImport {
161        let index = usize::try_from(index.as_u32()).unwrap();
162        unsafe { &*self.imported_tables_ptr().add(index) }
163    }
164
165    /// Return a pointer to the `VMTableImports`s.
166    fn imported_tables_ptr(&self) -> *mut VMTableImport {
167        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_tables_begin()) }
168    }
169
170    /// Return the indexed `VMMemoryImport`.
171    fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport {
172        let index = usize::try_from(index.as_u32()).unwrap();
173        unsafe { &*self.imported_memories_ptr().add(index) }
174    }
175
176    /// Return a pointer to the `VMMemoryImport`s.
177    fn imported_memories_ptr(&self) -> *mut VMMemoryImport {
178        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_memories_begin()) }
179    }
180
181    /// Return the indexed `VMGlobalImport`.
182    fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport {
183        let index = usize::try_from(index.as_u32()).unwrap();
184        unsafe { &*self.imported_globals_ptr().add(index) }
185    }
186
187    /// Return a pointer to the `VMGlobalImport`s.
188    fn imported_globals_ptr(&self) -> *mut VMGlobalImport {
189        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_globals_begin()) }
190    }
191
192    /// Return the indexed `VMSharedTagIndex`.
193    #[cfg_attr(target_os = "windows", allow(dead_code))]
194    pub(crate) fn shared_tag_ptr(&self, index: TagIndex) -> &VMSharedTagIndex {
195        let index = usize::try_from(index.as_u32()).unwrap();
196        unsafe { &*self.shared_tags_ptr().add(index) }
197    }
198
199    /// Return a pointer to the `VMSharedTagIndex`s.
200    pub(crate) fn shared_tags_ptr(&self) -> *mut VMSharedTagIndex {
201        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_tag_ids_begin()) }
202    }
203
204    /// Return the indexed `VMTableDefinition`.
205    #[allow(dead_code)]
206    fn table(&self, index: LocalTableIndex) -> VMTableDefinition {
207        unsafe { *self.table_ptr(index).as_ref() }
208    }
209
210    #[allow(dead_code)]
211    /// Updates the value for a defined table to `VMTableDefinition`.
212    fn set_table(&self, index: LocalTableIndex, table: &VMTableDefinition) {
213        unsafe {
214            *self.table_ptr(index).as_ptr() = *table;
215        }
216    }
217
218    /// Return the indexed `VMTableDefinition`.
219    fn table_ptr(&self, index: LocalTableIndex) -> NonNull<VMTableDefinition> {
220        let index = usize::try_from(index.as_u32()).unwrap();
221        NonNull::new(unsafe { self.tables_ptr().add(index) }).unwrap()
222    }
223
224    /// Return a pointer to the `VMTableDefinition`s.
225    fn tables_ptr(&self) -> *mut VMTableDefinition {
226        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_tables_begin()) }
227    }
228
229    #[allow(dead_code)]
230    /// Get a locally defined or imported memory.
231    fn get_memory(&self, index: MemoryIndex) -> VMMemoryDefinition {
232        if let Some(local_index) = self.module.local_memory_index(index) {
233            self.memory(local_index)
234        } else {
235            let import = self.imported_memory(index);
236            unsafe { *import.definition.as_ref() }
237        }
238    }
239
240    /// Return the indexed `VMMemoryDefinition`.
241    fn memory(&self, index: LocalMemoryIndex) -> VMMemoryDefinition {
242        unsafe { *self.memory_ptr(index).as_ref() }
243    }
244
245    #[allow(dead_code)]
246    /// Set the indexed memory to `VMMemoryDefinition`.
247    fn set_memory(&self, index: LocalMemoryIndex, mem: &VMMemoryDefinition) {
248        unsafe {
249            *self.memory_ptr(index).as_ptr() = *mem;
250        }
251    }
252
253    /// Return the indexed `VMMemoryDefinition`.
254    fn memory_ptr(&self, index: LocalMemoryIndex) -> NonNull<VMMemoryDefinition> {
255        let index = usize::try_from(index.as_u32()).unwrap();
256        NonNull::new(unsafe { self.memories_ptr().add(index) }).unwrap()
257    }
258
259    /// Return a pointer to the `VMMemoryDefinition`s.
260    fn memories_ptr(&self) -> *mut VMMemoryDefinition {
261        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_memories_begin()) }
262    }
263
264    /// Get a locally defined or imported memory.
265    fn get_vmmemory(&self, index: MemoryIndex) -> &VMMemory {
266        if let Some(local_index) = self.module.local_memory_index(index) {
267            unsafe {
268                self.memories
269                    .get(local_index)
270                    .unwrap()
271                    .get(self.context.as_ref().unwrap())
272            }
273        } else {
274            let import = self.imported_memory(index);
275            unsafe { import.handle.get(self.context.as_ref().unwrap()) }
276        }
277    }
278
279    /// Get a locally defined or imported memory.
280    fn get_vmmemory_mut(&mut self, index: MemoryIndex) -> &mut VMMemory {
281        if let Some(local_index) = self.module.local_memory_index(index) {
282            unsafe {
283                self.memories
284                    .get_mut(local_index)
285                    .unwrap()
286                    .get_mut(self.context.as_mut().unwrap())
287            }
288        } else {
289            let import = self.imported_memory(index);
290            unsafe { import.handle.get_mut(self.context.as_mut().unwrap()) }
291        }
292    }
293
294    /// Get a locally defined memory as mutable.
295    fn get_local_vmmemory_mut(&mut self, local_index: LocalMemoryIndex) -> &mut VMMemory {
296        unsafe {
297            self.memories
298                .get_mut(local_index)
299                .unwrap()
300                .get_mut(self.context.as_mut().unwrap())
301        }
302    }
303
304    /// Return the indexed `VMGlobalDefinition`.
305    fn global(&self, index: LocalGlobalIndex) -> VMGlobalDefinition {
306        unsafe { self.global_ptr(index).as_ref().clone() }
307    }
308
309    /// Set the indexed global to `VMGlobalDefinition`.
310    #[allow(dead_code)]
311    fn set_global(&self, index: LocalGlobalIndex, global: &VMGlobalDefinition) {
312        unsafe {
313            *self.global_ptr(index).as_ptr() = global.clone();
314        }
315    }
316
317    /// Return the indexed `VMGlobalDefinition`.
318    fn global_ptr(&self, index: LocalGlobalIndex) -> NonNull<VMGlobalDefinition> {
319        let index = usize::try_from(index.as_u32()).unwrap();
320        // TODO:
321        NonNull::new(unsafe { *self.globals_ptr().add(index) }).unwrap()
322    }
323
324    /// Return a pointer to the `VMGlobalDefinition`s.
325    fn globals_ptr(&self) -> *mut *mut VMGlobalDefinition {
326        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_globals_begin()) }
327    }
328
329    /// Return a pointer to the `VMBuiltinFunctionsArray`.
330    fn builtin_functions_ptr(&self) -> *mut VMBuiltinFunctionsArray {
331        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_builtin_functions_begin()) }
332    }
333
334    /// Return a reference to the vmctx used by compiled wasm code.
335    fn vmctx(&self) -> &VMContext {
336        &self.vmctx
337    }
338
339    /// Return a raw pointer to the vmctx used by compiled wasm code.
340    fn vmctx_ptr(&self) -> *mut VMContext {
341        self.vmctx() as *const VMContext as *mut VMContext
342    }
343
344    /// Invoke the WebAssembly start function of the instance, if one is present.
345    fn invoke_start_function(
346        &self,
347        config: &VMConfig,
348        trap_handler: Option<*const TrapHandlerFn<'static>>,
349    ) -> Result<(), Trap> {
350        let start_index = match self.module.start_function {
351            Some(idx) => idx,
352            None => return Ok(()),
353        };
354
355        let (callee_address, callee_vmctx) = match self.module.local_func_index(start_index) {
356            Some(local_index) => {
357                let body = self
358                    .functions
359                    .get(local_index)
360                    .expect("function index is out of bounds")
361                    .0;
362                (
363                    body as *const _,
364                    VMFunctionContext {
365                        vmctx: self.vmctx_ptr(),
366                    },
367                )
368            }
369            None => {
370                assert_lt!(start_index.index(), self.module.num_imported_functions);
371                let import = self.imported_function(start_index);
372                (import.body, import.environment)
373            }
374        };
375
376        let sig = self.module.functions[start_index];
377        let trampoline = self.function_call_trampolines[sig];
378        let mut values_vec = vec![];
379
380        unsafe {
381            // Even though we already know the type of the function we need to call, in certain
382            // specific cases trampoline prepare callee arguments for specific optimizations, such
383            // as passing g0 and m0_base_ptr as paramters.
384            wasmer_call_trampoline(
385                trap_handler,
386                config,
387                callee_vmctx,
388                trampoline,
389                callee_address,
390                values_vec.as_mut_ptr(),
391            )
392        }
393    }
394
395    /// Return the offset from the vmctx pointer to its containing `Instance`.
396    #[inline]
397    pub(crate) fn vmctx_offset() -> isize {
398        offset_of!(Self, vmctx) as isize
399    }
400
401    /// Return the table index for the given `VMTableDefinition`.
402    pub(crate) fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
403        let begin: *const VMTableDefinition = self.tables_ptr() as *const _;
404        let end: *const VMTableDefinition = table;
405        // TODO: Use `offset_from` once it stablizes.
406        let index = LocalTableIndex::new(
407            (end as usize - begin as usize) / mem::size_of::<VMTableDefinition>(),
408        );
409        assert_lt!(index.index(), self.tables.len());
410        index
411    }
412
413    /// Return the memory index for the given `VMMemoryDefinition`.
414    pub(crate) fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
415        let begin: *const VMMemoryDefinition = self.memories_ptr() as *const _;
416        let end: *const VMMemoryDefinition = memory;
417        // TODO: Use `offset_from` once it stablizes.
418        let index = LocalMemoryIndex::new(
419            (end as usize - begin as usize) / mem::size_of::<VMMemoryDefinition>(),
420        );
421        assert_lt!(index.index(), self.memories.len());
422        index
423    }
424
425    /// Grow memory by the specified amount of pages.
426    ///
427    /// Returns `None` if memory can't be grown by the specified amount
428    /// of pages.
429    pub(crate) fn memory_grow<IntoPages>(
430        &mut self,
431        memory_index: LocalMemoryIndex,
432        delta: IntoPages,
433    ) -> Result<Pages, MemoryError>
434    where
435        IntoPages: Into<Pages>,
436    {
437        let mem = *self
438            .memories
439            .get(memory_index)
440            .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
441        mem.get_mut(self.context_mut()).grow(delta.into())
442    }
443
444    /// Grow imported memory by the specified amount of pages.
445    ///
446    /// Returns `None` if memory can't be grown by the specified amount
447    /// of pages.
448    ///
449    /// # Safety
450    /// This and `imported_memory_size` are currently unsafe because they
451    /// dereference the memory import's pointers.
452    pub(crate) unsafe fn imported_memory_grow<IntoPages>(
453        &mut self,
454        memory_index: MemoryIndex,
455        delta: IntoPages,
456    ) -> Result<Pages, MemoryError>
457    where
458        IntoPages: Into<Pages>,
459    {
460        let import = self.imported_memory(memory_index);
461        let mem = import.handle;
462        mem.get_mut(self.context_mut()).grow(delta.into())
463    }
464
465    /// Returns the number of allocated wasm pages.
466    pub(crate) fn memory_size(&self, memory_index: LocalMemoryIndex) -> Pages {
467        let mem = *self
468            .memories
469            .get(memory_index)
470            .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
471        mem.get(self.context()).size()
472    }
473
474    /// Returns the number of allocated wasm pages in an imported memory.
475    ///
476    /// # Safety
477    /// This and `imported_memory_grow` are currently unsafe because they
478    /// dereference the memory import's pointers.
479    pub(crate) unsafe fn imported_memory_size(&self, memory_index: MemoryIndex) -> Pages {
480        let import = self.imported_memory(memory_index);
481        let mem = import.handle;
482        mem.get(self.context()).size()
483    }
484
485    /// Returns the number of elements in a given table.
486    pub(crate) fn table_size(&self, table_index: LocalTableIndex) -> u32 {
487        let table = self
488            .tables
489            .get(table_index)
490            .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
491        table.get(self.context()).size()
492    }
493
494    /// Returns the number of elements in a given imported table.
495    ///
496    /// # Safety
497    /// `table_index` must be a valid, imported table index.
498    pub(crate) unsafe fn imported_table_size(&self, table_index: TableIndex) -> u32 {
499        let import = self.imported_table(table_index);
500        let table = import.handle;
501        table.get(self.context()).size()
502    }
503
504    /// Grow table by the specified amount of elements.
505    ///
506    /// Returns `None` if table can't be grown by the specified amount
507    /// of elements.
508    pub(crate) fn table_grow(
509        &mut self,
510        table_index: LocalTableIndex,
511        delta: u32,
512        init_value: TableElement,
513    ) -> Option<u32> {
514        let table = *self
515            .tables
516            .get(table_index)
517            .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
518        table.get_mut(self.context_mut()).grow(delta, init_value)
519    }
520
521    /// Grow table by the specified amount of elements.
522    ///
523    /// # Safety
524    /// `table_index` must be a valid, imported table index.
525    pub(crate) unsafe fn imported_table_grow(
526        &mut self,
527        table_index: TableIndex,
528        delta: u32,
529        init_value: TableElement,
530    ) -> Option<u32> {
531        let import = self.imported_table(table_index);
532        let table = import.handle;
533        table.get_mut(self.context_mut()).grow(delta, init_value)
534    }
535
536    /// Get table element by index.
537    pub(crate) fn table_get(
538        &self,
539        table_index: LocalTableIndex,
540        index: u32,
541    ) -> Option<TableElement> {
542        let table = self
543            .tables
544            .get(table_index)
545            .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
546        table.get(self.context()).get(index)
547    }
548
549    /// Returns the element at the given index.
550    ///
551    /// # Safety
552    /// `table_index` must be a valid, imported table index.
553    pub(crate) unsafe fn imported_table_get(
554        &self,
555        table_index: TableIndex,
556        index: u32,
557    ) -> Option<TableElement> {
558        let import = self.imported_table(table_index);
559        let table = import.handle;
560        table.get(self.context()).get(index)
561    }
562
563    /// Set table element by index.
564    pub(crate) fn table_set(
565        &mut self,
566        table_index: LocalTableIndex,
567        index: u32,
568        val: TableElement,
569    ) -> Result<(), Trap> {
570        let table = *self
571            .tables
572            .get(table_index)
573            .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
574        table.get_mut(self.context_mut()).set(index, val)
575    }
576
577    /// Set table element by index for an imported table.
578    ///
579    /// # Safety
580    /// `table_index` must be a valid, imported table index.
581    pub(crate) unsafe fn imported_table_set(
582        &mut self,
583        table_index: TableIndex,
584        index: u32,
585        val: TableElement,
586    ) -> Result<(), Trap> {
587        let import = self.imported_table(table_index);
588        let table = import.handle;
589        table.get_mut(self.context_mut()).set(index, val)
590    }
591
592    /// Get a `VMFuncRef` for the given `FunctionIndex`.
593    pub(crate) fn func_ref(&self, function_index: FunctionIndex) -> Option<VMFuncRef> {
594        if function_index == FunctionIndex::reserved_value() {
595            None
596        } else if let Some(local_function_index) = self.module.local_func_index(function_index) {
597            Some(VMFuncRef(NonNull::from(
598                &self.funcrefs[local_function_index],
599            )))
600        } else {
601            Some(VMFuncRef(self.imported_funcrefs[function_index]))
602        }
603    }
604
605    /// The `table.init` operation: initializes a portion of a table with a
606    /// passive element.
607    ///
608    /// # Errors
609    ///
610    /// Returns a `Trap` error when the range within the table is out of bounds
611    /// or the range within the passive element is out of bounds.
612    pub(crate) fn table_init(
613        &mut self,
614        table_index: TableIndex,
615        elem_index: ElemIndex,
616        dst: u32,
617        src: u32,
618        len: u32,
619    ) -> Result<(), Trap> {
620        // https://webassembly.github.io/bulk-memory-operations/core/exec/instructions.html#exec-table-init
621
622        let table = self.get_table_handle(table_index);
623        let table = unsafe { table.get_mut(&mut *self.context) };
624        let passive_elements = self.passive_elements.borrow();
625        let elem = passive_elements
626            .get(&elem_index)
627            .map_or::<&[Option<VMFuncRef>], _>(&[], |e| &**e);
628
629        if src.checked_add(len).is_none_or(|n| n as usize > elem.len())
630            || dst.checked_add(len).is_none_or(|m| m > table.size())
631        {
632            return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
633        }
634
635        for (dst, src) in (dst..dst + len).zip(src..src + len) {
636            table
637                .set(dst, TableElement::FuncRef(elem[src as usize]))
638                .expect("should never panic because we already did the bounds check above");
639        }
640
641        Ok(())
642    }
643
644    /// The `table.fill` operation: fills a portion of a table with a given value.
645    ///
646    /// # Errors
647    ///
648    /// Returns a `Trap` error when the range within the table is out of bounds
649    pub(crate) fn table_fill(
650        &mut self,
651        table_index: TableIndex,
652        start_index: u32,
653        item: TableElement,
654        len: u32,
655    ) -> Result<(), Trap> {
656        // https://webassembly.github.io/bulk-memory-operations/core/exec/instructions.html#exec-table-init
657
658        let table = self.get_table(table_index);
659        let table_size = table.size() as usize;
660
661        if start_index
662            .checked_add(len)
663            .is_none_or(|n| n as usize > table_size)
664        {
665            return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
666        }
667
668        for i in start_index..(start_index + len) {
669            table
670                .set(i, item.clone())
671                .expect("should never panic because we already did the bounds check above");
672        }
673
674        Ok(())
675    }
676
677    /// Drop an element.
678    pub(crate) fn elem_drop(&self, elem_index: ElemIndex) {
679        // https://webassembly.github.io/reference-types/core/exec/instructions.html#exec-elem-drop
680
681        let mut passive_elements = self.passive_elements.borrow_mut();
682        passive_elements.remove(&elem_index);
683        // Note that we don't check that we actually removed an element because
684        // dropping a non-passive element is a no-op (not a trap).
685    }
686
687    /// Do a `memory.copy` for a locally defined memory.
688    ///
689    /// # Errors
690    ///
691    /// Returns a `Trap` error when the source or destination ranges are out of
692    /// bounds.
693    pub(crate) fn local_memory_copy(
694        &self,
695        memory_index: LocalMemoryIndex,
696        dst: u32,
697        src: u32,
698        len: u32,
699    ) -> Result<(), Trap> {
700        // https://webassembly.github.io/reference-types/core/exec/instructions.html#exec-memory-copy
701
702        let memory = self.memory(memory_index);
703        // The following memory copy is not synchronized and is not atomic:
704        unsafe { memory_copy(&memory, dst, src, len) }
705    }
706
707    /// Perform a `memory.copy` on an imported memory.
708    pub(crate) fn imported_memory_copy(
709        &self,
710        memory_index: MemoryIndex,
711        dst: u32,
712        src: u32,
713        len: u32,
714    ) -> Result<(), Trap> {
715        let import = self.imported_memory(memory_index);
716        let memory = unsafe { import.definition.as_ref() };
717        // The following memory copy is not synchronized and is not atomic:
718        unsafe { memory_copy(memory, dst, src, len) }
719    }
720
721    /// Perform the `memory.fill` operation on a locally defined memory.
722    ///
723    /// # Errors
724    ///
725    /// Returns a `Trap` error if the memory range is out of bounds.
726    pub(crate) fn local_memory_fill(
727        &self,
728        memory_index: LocalMemoryIndex,
729        dst: u32,
730        val: u32,
731        len: u32,
732    ) -> Result<(), Trap> {
733        let memory = self.memory(memory_index);
734        // The following memory fill is not synchronized and is not atomic:
735        unsafe { memory_fill(&memory, dst, val, len) }
736    }
737
738    /// Perform the `memory.fill` operation on an imported memory.
739    ///
740    /// # Errors
741    ///
742    /// Returns a `Trap` error if the memory range is out of bounds.
743    pub(crate) fn imported_memory_fill(
744        &self,
745        memory_index: MemoryIndex,
746        dst: u32,
747        val: u32,
748        len: u32,
749    ) -> Result<(), Trap> {
750        let import = self.imported_memory(memory_index);
751        let memory = unsafe { import.definition.as_ref() };
752        // The following memory fill is not synchronized and is not atomic:
753        unsafe { memory_fill(memory, dst, val, len) }
754    }
755
756    /// Performs the `memory.init` operation.
757    ///
758    /// # Errors
759    ///
760    /// Returns a `Trap` error if the destination range is out of this module's
761    /// memory's bounds or if the source range is outside the data segment's
762    /// bounds.
763    pub(crate) fn memory_init(
764        &self,
765        memory_index: MemoryIndex,
766        data_index: DataIndex,
767        dst: u32,
768        src: u32,
769        len: u32,
770    ) -> Result<(), Trap> {
771        // https://webassembly.github.io/bulk-memory-operations/core/exec/instructions.html#exec-memory-init
772
773        let memory = self.get_vmmemory(memory_index);
774        let passive_data = self.passive_data.borrow();
775        let data = passive_data.get(&data_index).map_or(&[][..], |d| &**d);
776
777        let current_length = unsafe { memory.vmmemory().as_ref().current_length };
778        if src.checked_add(len).is_none_or(|n| n as usize > data.len())
779            || dst
780                .checked_add(len)
781                .is_none_or(|m| usize::try_from(m).unwrap() > current_length)
782        {
783            return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
784        }
785        let src_slice = &data[src as usize..(src + len) as usize];
786        unsafe { memory.initialize_with_data(dst as usize, src_slice) }
787    }
788
789    /// Drop the given data segment, truncating its length to zero.
790    pub(crate) fn data_drop(&self, data_index: DataIndex) {
791        let mut passive_data = self.passive_data.borrow_mut();
792        passive_data.remove(&data_index);
793    }
794
795    /// Get a table by index regardless of whether it is locally-defined or an
796    /// imported, foreign table.
797    pub(crate) fn get_table(&mut self, table_index: TableIndex) -> &mut VMTable {
798        if let Some(local_table_index) = self.module.local_table_index(table_index) {
799            self.get_local_table(local_table_index)
800        } else {
801            self.get_foreign_table(table_index)
802        }
803    }
804
805    /// Get a locally-defined table.
806    pub(crate) fn get_local_table(&mut self, index: LocalTableIndex) -> &mut VMTable {
807        let table = self.tables[index];
808        table.get_mut(self.context_mut())
809    }
810
811    /// Get an imported, foreign table.
812    pub(crate) fn get_foreign_table(&mut self, index: TableIndex) -> &mut VMTable {
813        let import = self.imported_table(index);
814        let table = import.handle;
815        table.get_mut(self.context_mut())
816    }
817
818    /// Get a table handle by index regardless of whether it is locally-defined
819    /// or an imported, foreign table.
820    pub(crate) fn get_table_handle(
821        &mut self,
822        table_index: TableIndex,
823    ) -> InternalStoreHandle<VMTable> {
824        if let Some(local_table_index) = self.module.local_table_index(table_index) {
825            self.tables[local_table_index]
826        } else {
827            self.imported_table(table_index).handle
828        }
829    }
830
831    /// # Safety
832    /// See [`LinearMemory::do_wait`].
833    unsafe fn memory_wait(
834        memory: &mut VMMemory,
835        dst: u32,
836        expected: ExpectedValue,
837        timeout: i64,
838    ) -> Result<u32, Trap> {
839        let timeout = if timeout < 0 {
840            None
841        } else {
842            Some(std::time::Duration::from_nanos(timeout as u64))
843        };
844        match unsafe { memory.do_wait(dst, expected, timeout) } {
845            Ok(count) => Ok(count),
846            Err(_err) => {
847                // ret is None if there is more than 2^32 waiter in queue or some other error
848                Err(Trap::lib(TrapCode::TableAccessOutOfBounds))
849            }
850        }
851    }
852
853    /// Perform an Atomic.Wait32
854    pub(crate) fn local_memory_wait32(
855        &mut self,
856        memory_index: LocalMemoryIndex,
857        dst: u32,
858        val: u32,
859        timeout: i64,
860    ) -> Result<u32, Trap> {
861        let memory = self.memory(memory_index);
862        //if ! memory.shared {
863        // We should trap according to spec, but official test rely on not trapping...
864        //}
865
866        // Do a fast-path check of the expected value, and also ensure proper alignment
867        let ret = unsafe { memory32_atomic_check32(&memory, dst, val) };
868
869        if let Ok(mut ret) = ret {
870            if ret == 0 {
871                let memory = self.get_local_vmmemory_mut(memory_index);
872                // Safety: we have already checked alignment and bounds in memory32_atomic_check32
873                ret = unsafe { Self::memory_wait(memory, dst, ExpectedValue::U32(val), timeout)? };
874            }
875            Ok(ret)
876        } else {
877            ret
878        }
879    }
880
881    /// Perform an Atomic.Wait32
882    pub(crate) fn imported_memory_wait32(
883        &mut self,
884        memory_index: MemoryIndex,
885        dst: u32,
886        val: u32,
887        timeout: i64,
888    ) -> Result<u32, Trap> {
889        let import = self.imported_memory(memory_index);
890        let memory = unsafe { import.definition.as_ref() };
891        //if ! memory.shared {
892        // We should trap according to spec, but official test rely on not trapping...
893        //}
894
895        // Do a fast-path check of the expected value, and also ensure proper alignment
896        let ret = unsafe { memory32_atomic_check32(memory, dst, val) };
897
898        if let Ok(mut ret) = ret {
899            if ret == 0 {
900                let memory = self.get_vmmemory_mut(memory_index);
901                // Safety: we have already checked alignment and bounds in memory32_atomic_check32
902                ret = unsafe { Self::memory_wait(memory, dst, ExpectedValue::U32(val), timeout)? };
903            }
904            Ok(ret)
905        } else {
906            ret
907        }
908    }
909
910    /// Perform an Atomic.Wait64
911    pub(crate) fn local_memory_wait64(
912        &mut self,
913        memory_index: LocalMemoryIndex,
914        dst: u32,
915        val: u64,
916        timeout: i64,
917    ) -> Result<u32, Trap> {
918        let memory = self.memory(memory_index);
919        //if ! memory.shared {
920        // We should trap according to spec, but official test rely on not trapping...
921        //}
922
923        // Do a fast-path check of the expected value, and also ensure proper alignment
924        let ret = unsafe { memory32_atomic_check64(&memory, dst, val) };
925
926        if let Ok(mut ret) = ret {
927            if ret == 0 {
928                let memory = self.get_local_vmmemory_mut(memory_index);
929                // Safety: we have already checked alignment and bounds in memory32_atomic_check64
930                ret = unsafe { Self::memory_wait(memory, dst, ExpectedValue::U64(val), timeout)? };
931            }
932            Ok(ret)
933        } else {
934            ret
935        }
936    }
937
938    /// Perform an Atomic.Wait64
939    pub(crate) fn imported_memory_wait64(
940        &mut self,
941        memory_index: MemoryIndex,
942        dst: u32,
943        val: u64,
944        timeout: i64,
945    ) -> Result<u32, Trap> {
946        let import = self.imported_memory(memory_index);
947        let memory = unsafe { import.definition.as_ref() };
948        //if ! memory.shared {
949        // We should trap according to spec, but official test rely on not trapping...
950        //}
951
952        // Do a fast-path check of the expected value, and also ensure proper alignment
953        let ret = unsafe { memory32_atomic_check64(memory, dst, val) };
954
955        if let Ok(mut ret) = ret {
956            if ret == 0 {
957                let memory = self.get_vmmemory_mut(memory_index);
958                // Safety: we have already checked alignment and bounds in memory32_atomic_check64
959                ret = unsafe { Self::memory_wait(memory, dst, ExpectedValue::U64(val), timeout)? };
960            }
961            Ok(ret)
962        } else {
963            ret
964        }
965    }
966
967    /// Perform an Atomic.Notify
968    pub(crate) fn local_memory_notify(
969        &mut self,
970        memory_index: LocalMemoryIndex,
971        dst: u32,
972        count: u32,
973    ) -> Result<u32, Trap> {
974        let memory = self.get_local_vmmemory_mut(memory_index);
975        Ok(memory.do_notify(dst, count))
976    }
977
978    /// Perform an Atomic.Notify
979    pub(crate) fn imported_memory_notify(
980        &mut self,
981        memory_index: MemoryIndex,
982        dst: u32,
983        count: u32,
984    ) -> Result<u32, Trap> {
985        let memory = self.get_vmmemory_mut(memory_index);
986        Ok(memory.do_notify(dst, count))
987    }
988}
989
990/// A handle holding an `Instance` of a WebAssembly module.
991///
992/// This is more or less a public facade of the private `Instance`,
993/// providing useful higher-level API.
994#[derive(Debug, Eq, PartialEq)]
995pub struct VMInstance {
996    /// The layout of `Instance` (which can vary).
997    instance_layout: Layout,
998
999    /// The `Instance` itself.
1000    ///
1001    /// `Instance` must not be dropped manually by Rust, because it's
1002    /// allocated manually with `alloc` and a specific layout (Rust
1003    /// would be able to drop `Instance` itself but it will imply a
1004    /// memory leak because of `alloc`).
1005    ///
1006    /// No one in the code has a copy of the `Instance`'s
1007    /// pointer. `Self` is the only one.
1008    instance: NonNull<Instance>,
1009}
1010
1011/// VMInstance are created with an InstanceAllocator
1012/// and it will "consume" the memory
1013/// So the Drop here actualy free it (else it would be leaked)
1014impl Drop for VMInstance {
1015    fn drop(&mut self) {
1016        let instance_ptr = self.instance.as_ptr();
1017
1018        unsafe {
1019            // Need to drop all the actual Instance members
1020            instance_ptr.drop_in_place();
1021            // And then free the memory allocated for the Instance itself
1022            std::alloc::dealloc(instance_ptr as *mut u8, self.instance_layout);
1023        }
1024    }
1025}
1026
1027impl VMInstance {
1028    /// Create a new `VMInstance` pointing at freshly allocated instance data.
1029    ///
1030    /// # Safety
1031    ///
1032    /// This method is not necessarily inherently unsafe to call, but in general
1033    /// the APIs of an `Instance` are quite unsafe and have not been really
1034    /// audited for safety that much. As a result the unsafety here on this
1035    /// method is a low-overhead way of saying “this is an extremely unsafe type
1036    /// to work with”.
1037    ///
1038    /// Extreme care must be taken when working with `VMInstance` and it's
1039    /// recommended to have relatively intimate knowledge of how it works
1040    /// internally if you'd like to do so. If possible it's recommended to use
1041    /// the `wasmer` crate API rather than this type since that is vetted for
1042    /// safety.
1043    ///
1044    /// However the following must be taken care of before calling this function:
1045    /// - The memory at `instance.tables_ptr()` must be initialized with data for
1046    ///   all the local tables.
1047    /// - The memory at `instance.memories_ptr()` must be initialized with data for
1048    ///   all the local memories.
1049    #[allow(clippy::too_many_arguments)]
1050    pub unsafe fn new(
1051        allocator: InstanceAllocator,
1052        module: Arc<ModuleInfo>,
1053        context: &mut StoreObjects,
1054        finished_functions: BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
1055        finished_function_call_trampolines: BoxedSlice<SignatureIndex, VMTrampoline>,
1056        finished_memories: BoxedSlice<LocalMemoryIndex, InternalStoreHandle<VMMemory>>,
1057        finished_tables: BoxedSlice<LocalTableIndex, InternalStoreHandle<VMTable>>,
1058        finished_globals: BoxedSlice<LocalGlobalIndex, InternalStoreHandle<VMGlobal>>,
1059        tags: BoxedSlice<TagIndex, InternalStoreHandle<VMTag>>,
1060        imports: Imports,
1061        vmshared_signatures: BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
1062    ) -> Result<Self, Trap> {
1063        unsafe {
1064            let vmctx_tags = tags
1065                .values()
1066                .map(|m: &InternalStoreHandle<VMTag>| VMSharedTagIndex::new(m.index() as u32))
1067                .collect::<PrimaryMap<TagIndex, VMSharedTagIndex>>()
1068                .into_boxed_slice();
1069            let vmctx_globals = finished_globals
1070                .values()
1071                .map(|m: &InternalStoreHandle<VMGlobal>| m.get(context).vmglobal())
1072                .collect::<PrimaryMap<LocalGlobalIndex, NonNull<VMGlobalDefinition>>>()
1073                .into_boxed_slice();
1074            let passive_data = RefCell::new(
1075                module
1076                    .passive_data
1077                    .clone()
1078                    .into_iter()
1079                    .map(|(idx, bytes)| (idx, Arc::from(bytes)))
1080                    .collect::<HashMap<_, _>>(),
1081            );
1082
1083            let handle = {
1084                let offsets = allocator.offsets().clone();
1085                // use dummy value to create an instance so we can get the vmctx pointer
1086                let funcrefs = PrimaryMap::new().into_boxed_slice();
1087                let imported_funcrefs = PrimaryMap::new().into_boxed_slice();
1088                // Create the `Instance`. The unique, the One.
1089                let instance = Instance {
1090                    module,
1091                    context,
1092                    offsets,
1093                    memories: finished_memories,
1094                    tables: finished_tables,
1095                    tags,
1096                    globals: finished_globals,
1097                    functions: finished_functions,
1098                    function_call_trampolines: finished_function_call_trampolines,
1099                    passive_elements: Default::default(),
1100                    passive_data,
1101                    funcrefs,
1102                    imported_funcrefs,
1103                    vmctx: VMContext {},
1104                };
1105
1106                let mut instance_handle = allocator.into_vminstance(instance);
1107
1108                // Set the funcrefs after we've built the instance
1109                {
1110                    let instance = instance_handle.instance_mut();
1111                    let vmctx_ptr = instance.vmctx_ptr();
1112                    (instance.funcrefs, instance.imported_funcrefs) = build_funcrefs(
1113                        &instance.module,
1114                        context,
1115                        &imports,
1116                        &instance.functions,
1117                        &vmshared_signatures,
1118                        &instance.function_call_trampolines,
1119                        vmctx_ptr,
1120                    );
1121                }
1122
1123                instance_handle
1124            };
1125            let instance = handle.instance();
1126
1127            ptr::copy(
1128                vmctx_tags.values().as_slice().as_ptr(),
1129                instance.shared_tags_ptr(),
1130                vmctx_tags.len(),
1131            );
1132            ptr::copy(
1133                vmshared_signatures.values().as_slice().as_ptr(),
1134                instance.signature_ids_ptr(),
1135                vmshared_signatures.len(),
1136            );
1137            ptr::copy(
1138                imports.functions.values().as_slice().as_ptr(),
1139                instance.imported_functions_ptr(),
1140                imports.functions.len(),
1141            );
1142            ptr::copy(
1143                imports.tables.values().as_slice().as_ptr(),
1144                instance.imported_tables_ptr(),
1145                imports.tables.len(),
1146            );
1147            ptr::copy(
1148                imports.memories.values().as_slice().as_ptr(),
1149                instance.imported_memories_ptr(),
1150                imports.memories.len(),
1151            );
1152            ptr::copy(
1153                imports.globals.values().as_slice().as_ptr(),
1154                instance.imported_globals_ptr(),
1155                imports.globals.len(),
1156            );
1157            // these should already be set, add asserts here? for:
1158            // - instance.tables_ptr() as *mut VMTableDefinition
1159            // - instance.memories_ptr() as *mut VMMemoryDefinition
1160            ptr::copy(
1161                vmctx_globals.values().as_slice().as_ptr(),
1162                instance.globals_ptr() as *mut NonNull<VMGlobalDefinition>,
1163                vmctx_globals.len(),
1164            );
1165            ptr::write(
1166                instance.builtin_functions_ptr(),
1167                VMBuiltinFunctionsArray::initialized(),
1168            );
1169
1170            // Perform infallible initialization in this constructor, while fallible
1171            // initialization is deferred to the `initialize` method.
1172            initialize_passive_elements(instance);
1173            initialize_globals(instance);
1174
1175            Ok(handle)
1176        }
1177    }
1178
1179    /// Return a reference to the contained `Instance`.
1180    pub(crate) fn instance(&self) -> &Instance {
1181        unsafe { self.instance.as_ref() }
1182    }
1183
1184    /// Return a mutable reference to the contained `Instance`.
1185    pub(crate) fn instance_mut(&mut self) -> &mut Instance {
1186        unsafe { self.instance.as_mut() }
1187    }
1188
1189    /// Finishes the instantiation process started by `Instance::new`.
1190    ///
1191    /// # Safety
1192    ///
1193    /// Only safe to call immediately after instantiation.
1194    pub unsafe fn finish_instantiation(
1195        &mut self,
1196        config: &VMConfig,
1197        trap_handler: Option<*const TrapHandlerFn<'static>>,
1198        data_initializers: &[DataInitializer<'_>],
1199    ) -> Result<(), Trap> {
1200        let instance = self.instance_mut();
1201
1202        // Apply the initializers.
1203        initialize_tables(instance)?;
1204        initialize_memories(instance, data_initializers)?;
1205
1206        // The WebAssembly spec specifies that the start function is
1207        // invoked automatically at instantiation time.
1208        instance.invoke_start_function(config, trap_handler)?;
1209        Ok(())
1210    }
1211
1212    /// Return a reference to the vmctx used by compiled wasm code.
1213    pub fn vmctx(&self) -> &VMContext {
1214        self.instance().vmctx()
1215    }
1216
1217    /// Return a raw pointer to the vmctx used by compiled wasm code.
1218    pub fn vmctx_ptr(&self) -> *mut VMContext {
1219        self.instance().vmctx_ptr()
1220    }
1221
1222    /// Return a reference to the `VMOffsets` to get offsets in the
1223    /// `Self::vmctx_ptr` region. Be careful when doing pointer
1224    /// arithmetic!
1225    pub fn vmoffsets(&self) -> &VMOffsets {
1226        self.instance().offsets()
1227    }
1228
1229    /// Return a reference-counting pointer to a module.
1230    pub fn module(&self) -> &Arc<ModuleInfo> {
1231        self.instance().module()
1232    }
1233
1234    /// Return a reference to a module.
1235    pub fn module_ref(&self) -> &ModuleInfo {
1236        self.instance().module_ref()
1237    }
1238
1239    /// Lookup an export with the given name.
1240    pub fn lookup(&mut self, field: &str) -> Option<VMExtern> {
1241        let export = *self.module_ref().exports.get(field)?;
1242
1243        Some(self.lookup_by_declaration(export))
1244    }
1245
1246    /// Lookup an export with the given export declaration.
1247    pub fn lookup_by_declaration(&mut self, export: ExportIndex) -> VMExtern {
1248        let instance = self.instance();
1249
1250        match export {
1251            ExportIndex::Function(index) => {
1252                let sig_index = &instance.module.functions[index];
1253                let handle = if let Some(def_index) = instance.module.local_func_index(index) {
1254                    // A VMFunction is lazily created only for functions that are
1255                    // exported.
1256                    let signature = instance.module.signatures[*sig_index].clone();
1257                    let vm_function = VMFunction {
1258                        anyfunc: MaybeInstanceOwned::Instance(NonNull::from(
1259                            &instance.funcrefs[def_index],
1260                        )),
1261                        signature,
1262                        // Any function received is already static at this point as:
1263                        // 1. All locally defined functions in the Wasm have a static signature.
1264                        // 2. All the imported functions are already static (because
1265                        //    they point to the trampolines rather than the dynamic addresses).
1266                        kind: VMFunctionKind::Static,
1267                        host_data: Box::new(()),
1268                    };
1269                    InternalStoreHandle::new(self.instance_mut().context_mut(), vm_function)
1270                } else {
1271                    let import = instance.imported_function(index);
1272                    import.handle
1273                };
1274
1275                VMExtern::Function(handle)
1276            }
1277            ExportIndex::Table(index) => {
1278                let handle = if let Some(def_index) = instance.module.local_table_index(index) {
1279                    instance.tables[def_index]
1280                } else {
1281                    let import = instance.imported_table(index);
1282                    import.handle
1283                };
1284                VMExtern::Table(handle)
1285            }
1286            ExportIndex::Memory(index) => {
1287                let handle = if let Some(def_index) = instance.module.local_memory_index(index) {
1288                    instance.memories[def_index]
1289                } else {
1290                    let import = instance.imported_memory(index);
1291                    import.handle
1292                };
1293                VMExtern::Memory(handle)
1294            }
1295            ExportIndex::Global(index) => {
1296                let handle = if let Some(def_index) = instance.module.local_global_index(index) {
1297                    instance.globals[def_index]
1298                } else {
1299                    let import = instance.imported_global(index);
1300                    import.handle
1301                };
1302                VMExtern::Global(handle)
1303            }
1304
1305            ExportIndex::Tag(index) => {
1306                let handle = instance.tags[index];
1307                VMExtern::Tag(handle)
1308            }
1309        }
1310    }
1311
1312    /// Return an iterator over the exports of this instance.
1313    ///
1314    /// Specifically, it provides access to the key-value pairs, where the keys
1315    /// are export names, and the values are export declarations which can be
1316    /// resolved `lookup_by_declaration`.
1317    pub fn exports(&self) -> indexmap::map::Iter<'_, String, ExportIndex> {
1318        self.module().exports.iter()
1319    }
1320
1321    /// Return the memory index for the given `VMMemoryDefinition` in this instance.
1322    pub fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
1323        self.instance().memory_index(memory)
1324    }
1325
1326    /// Grow memory in this instance by the specified amount of pages.
1327    ///
1328    /// Returns `None` if memory can't be grown by the specified amount
1329    /// of pages.
1330    pub fn memory_grow<IntoPages>(
1331        &mut self,
1332        memory_index: LocalMemoryIndex,
1333        delta: IntoPages,
1334    ) -> Result<Pages, MemoryError>
1335    where
1336        IntoPages: Into<Pages>,
1337    {
1338        self.instance_mut().memory_grow(memory_index, delta)
1339    }
1340
1341    /// Return the table index for the given `VMTableDefinition` in this instance.
1342    pub fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
1343        self.instance().table_index(table)
1344    }
1345
1346    /// Grow table in this instance by the specified amount of pages.
1347    ///
1348    /// Returns `None` if memory can't be grown by the specified amount
1349    /// of pages.
1350    pub fn table_grow(
1351        &mut self,
1352        table_index: LocalTableIndex,
1353        delta: u32,
1354        init_value: TableElement,
1355    ) -> Option<u32> {
1356        self.instance_mut()
1357            .table_grow(table_index, delta, init_value)
1358    }
1359
1360    /// Get table element reference.
1361    ///
1362    /// Returns `None` if index is out of bounds.
1363    pub fn table_get(&self, table_index: LocalTableIndex, index: u32) -> Option<TableElement> {
1364        self.instance().table_get(table_index, index)
1365    }
1366
1367    /// Set table element reference.
1368    ///
1369    /// Returns an error if the index is out of bounds
1370    pub fn table_set(
1371        &mut self,
1372        table_index: LocalTableIndex,
1373        index: u32,
1374        val: TableElement,
1375    ) -> Result<(), Trap> {
1376        self.instance_mut().table_set(table_index, index, val)
1377    }
1378
1379    /// Get a table defined locally within this module.
1380    pub fn get_local_table(&mut self, index: LocalTableIndex) -> &mut VMTable {
1381        self.instance_mut().get_local_table(index)
1382    }
1383}
1384
1385/// Compute the offset for a memory data initializer.
1386fn get_memory_init_start(init: &DataInitializer<'_>, instance: &Instance) -> usize {
1387    let mut start = init.location.offset;
1388
1389    if let Some(base) = init.location.base {
1390        let val = unsafe {
1391            if let Some(def_index) = instance.module.local_global_index(base) {
1392                instance.global(def_index).val.u32
1393            } else {
1394                instance.imported_global(base).definition.as_ref().val.u32
1395            }
1396        };
1397        start += usize::try_from(val).unwrap();
1398    }
1399
1400    start
1401}
1402
1403#[allow(clippy::mut_from_ref)]
1404#[allow(dead_code)]
1405/// Return a byte-slice view of a memory's data.
1406unsafe fn get_memory_slice<'instance>(
1407    init: &DataInitializer<'_>,
1408    instance: &'instance Instance,
1409) -> &'instance mut [u8] {
1410    unsafe {
1411        let memory = if let Some(local_memory_index) = instance
1412            .module
1413            .local_memory_index(init.location.memory_index)
1414        {
1415            instance.memory(local_memory_index)
1416        } else {
1417            let import = instance.imported_memory(init.location.memory_index);
1418            *import.definition.as_ref()
1419        };
1420        slice::from_raw_parts_mut(memory.base, memory.current_length)
1421    }
1422}
1423
1424/// Compute the offset for a table element initializer.
1425fn get_table_init_start(init: &TableInitializer, instance: &Instance) -> usize {
1426    let mut start = init.offset;
1427
1428    if let Some(base) = init.base {
1429        let val = unsafe {
1430            if let Some(def_index) = instance.module.local_global_index(base) {
1431                instance.global(def_index).val.u32
1432            } else {
1433                instance.imported_global(base).definition.as_ref().val.u32
1434            }
1435        };
1436        start += usize::try_from(val).unwrap();
1437    }
1438
1439    start
1440}
1441
1442/// Initialize the table memory from the provided initializers.
1443fn initialize_tables(instance: &mut Instance) -> Result<(), Trap> {
1444    let module = Arc::clone(&instance.module);
1445    for init in &module.table_initializers {
1446        let start = get_table_init_start(init, instance);
1447        let table = instance.get_table_handle(init.table_index);
1448        let table = unsafe { table.get_mut(&mut *instance.context) };
1449
1450        if start
1451            .checked_add(init.elements.len())
1452            .is_none_or(|end| end > table.size() as usize)
1453        {
1454            return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
1455        }
1456
1457        if let wasmer_types::Type::FuncRef = table.ty().ty {
1458            for (i, func_idx) in init.elements.iter().enumerate() {
1459                let anyfunc = instance.func_ref(*func_idx);
1460                table
1461                    .set(
1462                        u32::try_from(start + i).unwrap(),
1463                        TableElement::FuncRef(anyfunc),
1464                    )
1465                    .unwrap();
1466            }
1467        } else {
1468            for i in 0..init.elements.len() {
1469                table
1470                    .set(
1471                        u32::try_from(start + i).unwrap(),
1472                        TableElement::ExternRef(None),
1473                    )
1474                    .unwrap();
1475            }
1476        }
1477    }
1478
1479    Ok(())
1480}
1481
1482/// Initialize the `Instance::passive_elements` map by resolving the
1483/// `ModuleInfo::passive_elements`'s `FunctionIndex`s into `VMCallerCheckedAnyfunc`s for
1484/// this instance.
1485fn initialize_passive_elements(instance: &Instance) {
1486    let mut passive_elements = instance.passive_elements.borrow_mut();
1487    debug_assert!(
1488        passive_elements.is_empty(),
1489        "should only be called once, at initialization time"
1490    );
1491
1492    passive_elements.extend(instance.module.passive_elements.iter().filter_map(
1493        |(&idx, segments)| -> Option<(ElemIndex, Box<[Option<VMFuncRef>]>)> {
1494            if segments.is_empty() {
1495                None
1496            } else {
1497                Some((
1498                    idx,
1499                    segments
1500                        .iter()
1501                        .map(|s| instance.func_ref(*s))
1502                        .collect::<Box<[Option<VMFuncRef>]>>(),
1503                ))
1504            }
1505        },
1506    ));
1507}
1508
1509/// Initialize the table memory from the provided initializers.
1510fn initialize_memories(
1511    instance: &mut Instance,
1512    data_initializers: &[DataInitializer<'_>],
1513) -> Result<(), Trap> {
1514    for init in data_initializers {
1515        let memory = instance.get_vmmemory(init.location.memory_index);
1516
1517        let start = get_memory_init_start(init, instance);
1518        unsafe {
1519            let current_length = memory.vmmemory().as_ref().current_length;
1520            if start
1521                .checked_add(init.data.len())
1522                .is_none_or(|end| end > current_length)
1523            {
1524                return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
1525            }
1526            memory.initialize_with_data(start, init.data)?;
1527        }
1528    }
1529
1530    Ok(())
1531}
1532
1533fn initialize_globals(instance: &Instance) {
1534    let module = Arc::clone(&instance.module);
1535    for (index, initializer) in module.global_initializers.iter() {
1536        unsafe {
1537            let to = instance.global_ptr(index).as_ptr();
1538            match initializer {
1539                GlobalInit::I32Const(x) => (*to).val.i32 = *x,
1540                GlobalInit::I64Const(x) => (*to).val.i64 = *x,
1541                GlobalInit::F32Const(x) => (*to).val.f32 = *x,
1542                GlobalInit::F64Const(x) => (*to).val.f64 = *x,
1543                GlobalInit::V128Const(x) => (*to).val.bytes = *x.bytes(),
1544                GlobalInit::GetGlobal(x) => {
1545                    let from: VMGlobalDefinition =
1546                        if let Some(def_x) = module.local_global_index(*x) {
1547                            instance.global(def_x)
1548                        } else {
1549                            instance.imported_global(*x).definition.as_ref().clone()
1550                        };
1551                    *to = from;
1552                }
1553                GlobalInit::RefNullConst => (*to).val.funcref = 0,
1554                GlobalInit::RefFunc(func_idx) => {
1555                    let funcref = instance.func_ref(*func_idx).unwrap();
1556                    (*to).val = funcref.into_raw();
1557                }
1558            }
1559        }
1560    }
1561}
1562
1563/// Eagerly builds all the `VMFuncRef`s for imported and local functions so that all
1564/// future funcref operations are just looking up this data.
1565fn build_funcrefs(
1566    module_info: &ModuleInfo,
1567    ctx: &StoreObjects,
1568    imports: &Imports,
1569    finished_functions: &BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
1570    vmshared_signatures: &BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
1571    function_call_trampolines: &BoxedSlice<SignatureIndex, VMTrampoline>,
1572    vmctx_ptr: *mut VMContext,
1573) -> (
1574    BoxedSlice<LocalFunctionIndex, VMCallerCheckedAnyfunc>,
1575    BoxedSlice<FunctionIndex, NonNull<VMCallerCheckedAnyfunc>>,
1576) {
1577    let mut func_refs =
1578        PrimaryMap::with_capacity(module_info.functions.len() - module_info.num_imported_functions);
1579    let mut imported_func_refs = PrimaryMap::with_capacity(module_info.num_imported_functions);
1580
1581    // do imported functions
1582    for import in imports.functions.values() {
1583        imported_func_refs.push(import.handle.get(ctx).anyfunc.as_ptr());
1584    }
1585
1586    // do local functions
1587    for (local_index, func_ptr) in finished_functions.iter() {
1588        let index = module_info.func_index(local_index);
1589        let sig_index = module_info.functions[index];
1590        let type_index = vmshared_signatures[sig_index];
1591        let call_trampoline = function_call_trampolines[sig_index];
1592        let anyfunc = VMCallerCheckedAnyfunc {
1593            func_ptr: func_ptr.0,
1594            type_index,
1595            vmctx: VMFunctionContext { vmctx: vmctx_ptr },
1596            call_trampoline,
1597        };
1598        func_refs.push(anyfunc);
1599    }
1600    (
1601        func_refs.into_boxed_slice(),
1602        imported_func_refs.into_boxed_slice(),
1603    )
1604}