wasmer_vm/instance/
mod.rs

1// This file contains code from external sources.
2// Attributions: https://github.com/wasmerio/wasmer/blob/main/docs/ATTRIBUTIONS.md
3
4//! An `Instance` contains all the runtime state used by execution of
5//! a WebAssembly module (except its callstack and register state). An
6//! `VMInstance` is a wrapper around `Instance` that manages
7//! how it is allocated and deallocated.
8
9mod allocator;
10
11use crate::export::VMExtern;
12use crate::imports::Imports;
13use crate::store::{InternalStoreHandle, StoreObjects};
14use crate::table::TableElement;
15use crate::trap::{Trap, TrapCode};
16use crate::vmcontext::{
17    VMBuiltinFunctionsArray, VMCallerCheckedAnyfunc, VMContext, VMFunctionContext,
18    VMFunctionImport, VMFunctionKind, VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition,
19    VMMemoryImport, VMSharedSignatureIndex, VMSharedTagIndex, VMTableDefinition, VMTableImport,
20    VMTrampoline, memory_copy, memory_fill, memory32_atomic_check32, memory32_atomic_check64,
21};
22use crate::{FunctionBodyPtr, MaybeInstanceOwned, TrapHandlerFn, VMTag, wasmer_call_trampoline};
23use crate::{LinearMemory, NotifyLocation};
24use crate::{VMConfig, VMFuncRef, VMFunction, VMGlobal, VMMemory, VMTable};
25pub use allocator::InstanceAllocator;
26use memoffset::offset_of;
27use more_asserts::assert_lt;
28use std::alloc::Layout;
29use std::cell::RefCell;
30use std::collections::HashMap;
31use std::convert::TryFrom;
32use std::fmt;
33use std::mem;
34use std::ptr::{self, NonNull};
35use std::slice;
36use std::sync::Arc;
37use wasmer_types::entity::{BoxedSlice, EntityRef, PrimaryMap, packed_option::ReservedValue};
38use wasmer_types::{
39    DataIndex, DataInitializer, ElemIndex, ExportIndex, FunctionIndex, GlobalIndex, GlobalInit,
40    LocalFunctionIndex, LocalGlobalIndex, LocalMemoryIndex, LocalTableIndex, MemoryError,
41    MemoryIndex, ModuleInfo, Pages, SignatureIndex, TableIndex, TableInitializer, TagIndex,
42    VMOffsets,
43};
44
45/// A WebAssembly instance.
46///
47/// The type is dynamically-sized. Indeed, the `vmctx` field can
48/// contain various data. That's why the type has a C representation
49/// to ensure that the `vmctx` field is last. See the documentation of
50/// the `vmctx` field to learn more.
51#[repr(C)]
52#[allow(clippy::type_complexity)]
53pub(crate) struct Instance {
54    /// The `ModuleInfo` this `Instance` was instantiated from.
55    module: Arc<ModuleInfo>,
56
57    /// Pointer to the object store of the context owning this instance.
58    context: *mut StoreObjects,
59
60    /// Offsets in the `vmctx` region.
61    offsets: VMOffsets,
62
63    /// WebAssembly linear memory data.
64    memories: BoxedSlice<LocalMemoryIndex, InternalStoreHandle<VMMemory>>,
65
66    /// WebAssembly table data.
67    tables: BoxedSlice<LocalTableIndex, InternalStoreHandle<VMTable>>,
68
69    /// WebAssembly global data.
70    globals: BoxedSlice<LocalGlobalIndex, InternalStoreHandle<VMGlobal>>,
71
72    /// WebAssembly tag data. Notably, this stores *all* tags, not just local ones.
73    tags: BoxedSlice<TagIndex, InternalStoreHandle<VMTag>>,
74
75    /// Pointers to functions in executable memory.
76    functions: BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
77
78    /// Pointers to function call trampolines in executable memory.
79    function_call_trampolines: BoxedSlice<SignatureIndex, VMTrampoline>,
80
81    /// Passive elements in this instantiation. As `elem.drop`s happen, these
82    /// entries get removed.
83    passive_elements: RefCell<HashMap<ElemIndex, Box<[Option<VMFuncRef>]>>>,
84
85    /// Passive data segments from our module. As `data.drop`s happen, entries
86    /// get removed. A missing entry is considered equivalent to an empty slice.
87    passive_data: RefCell<HashMap<DataIndex, Arc<[u8]>>>,
88
89    /// Mapping of function indices to their func ref backing data. `VMFuncRef`s
90    /// will point to elements here for functions defined by this instance.
91    funcrefs: BoxedSlice<LocalFunctionIndex, VMCallerCheckedAnyfunc>,
92
93    /// Mapping of function indices to their func ref backing data. `VMFuncRef`s
94    /// will point to elements here for functions imported by this instance.
95    imported_funcrefs: BoxedSlice<FunctionIndex, NonNull<VMCallerCheckedAnyfunc>>,
96
97    /// Additional context used by compiled WebAssembly code. This
98    /// field is last, and represents a dynamically-sized array that
99    /// extends beyond the nominal end of the struct (similar to a
100    /// flexible array member).
101    vmctx: VMContext,
102}
103
104impl fmt::Debug for Instance {
105    fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
106        formatter.debug_struct("Instance").finish()
107    }
108}
109
110#[allow(clippy::cast_ptr_alignment)]
111impl Instance {
112    /// Helper function to access various locations offset from our `*mut
113    /// VMContext` object.
114    unsafe fn vmctx_plus_offset<T>(&self, offset: u32) -> *mut T {
115        unsafe {
116            (self.vmctx_ptr() as *mut u8)
117                .add(usize::try_from(offset).unwrap())
118                .cast()
119        }
120    }
121
122    fn module(&self) -> &Arc<ModuleInfo> {
123        &self.module
124    }
125
126    pub(crate) fn module_ref(&self) -> &ModuleInfo {
127        &self.module
128    }
129
130    fn context(&self) -> &StoreObjects {
131        unsafe { &*self.context }
132    }
133
134    fn context_mut(&mut self) -> &mut StoreObjects {
135        unsafe { &mut *self.context }
136    }
137
138    /// Offsets in the `vmctx` region.
139    fn offsets(&self) -> &VMOffsets {
140        &self.offsets
141    }
142
143    /// Return a pointer to the `VMSharedSignatureIndex`s.
144    fn signature_ids_ptr(&self) -> *mut VMSharedSignatureIndex {
145        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_signature_ids_begin()) }
146    }
147
148    /// Return the indexed `VMFunctionImport`.
149    fn imported_function(&self, index: FunctionIndex) -> &VMFunctionImport {
150        let index = usize::try_from(index.as_u32()).unwrap();
151        unsafe { &*self.imported_functions_ptr().add(index) }
152    }
153
154    /// Return a pointer to the `VMFunctionImport`s.
155    fn imported_functions_ptr(&self) -> *mut VMFunctionImport {
156        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_functions_begin()) }
157    }
158
159    /// Return the index `VMTableImport`.
160    fn imported_table(&self, index: TableIndex) -> &VMTableImport {
161        let index = usize::try_from(index.as_u32()).unwrap();
162        unsafe { &*self.imported_tables_ptr().add(index) }
163    }
164
165    /// Return a pointer to the `VMTableImports`s.
166    fn imported_tables_ptr(&self) -> *mut VMTableImport {
167        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_tables_begin()) }
168    }
169
170    /// Return the indexed `VMMemoryImport`.
171    fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport {
172        let index = usize::try_from(index.as_u32()).unwrap();
173        unsafe { &*self.imported_memories_ptr().add(index) }
174    }
175
176    /// Return a pointer to the `VMMemoryImport`s.
177    fn imported_memories_ptr(&self) -> *mut VMMemoryImport {
178        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_memories_begin()) }
179    }
180
181    /// Return the indexed `VMGlobalImport`.
182    fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport {
183        let index = usize::try_from(index.as_u32()).unwrap();
184        unsafe { &*self.imported_globals_ptr().add(index) }
185    }
186
187    /// Return a pointer to the `VMGlobalImport`s.
188    fn imported_globals_ptr(&self) -> *mut VMGlobalImport {
189        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_globals_begin()) }
190    }
191
192    /// Return the indexed `VMSharedTagIndex`.
193    #[cfg_attr(target_os = "windows", allow(dead_code))]
194    pub(crate) fn shared_tag_ptr(&self, index: TagIndex) -> &VMSharedTagIndex {
195        let index = usize::try_from(index.as_u32()).unwrap();
196        unsafe { &*self.shared_tags_ptr().add(index) }
197    }
198
199    /// Return a pointer to the `VMSharedTagIndex`s.
200    pub(crate) fn shared_tags_ptr(&self) -> *mut VMSharedTagIndex {
201        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_tag_ids_begin()) }
202    }
203
204    /// Return the indexed `VMTableDefinition`.
205    #[allow(dead_code)]
206    fn table(&self, index: LocalTableIndex) -> VMTableDefinition {
207        unsafe { *self.table_ptr(index).as_ref() }
208    }
209
210    #[allow(dead_code)]
211    /// Updates the value for a defined table to `VMTableDefinition`.
212    fn set_table(&self, index: LocalTableIndex, table: &VMTableDefinition) {
213        unsafe {
214            *self.table_ptr(index).as_ptr() = *table;
215        }
216    }
217
218    /// Return the indexed `VMTableDefinition`.
219    fn table_ptr(&self, index: LocalTableIndex) -> NonNull<VMTableDefinition> {
220        let index = usize::try_from(index.as_u32()).unwrap();
221        NonNull::new(unsafe { self.tables_ptr().add(index) }).unwrap()
222    }
223
224    /// Return a pointer to the `VMTableDefinition`s.
225    fn tables_ptr(&self) -> *mut VMTableDefinition {
226        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_tables_begin()) }
227    }
228
229    #[allow(dead_code)]
230    /// Get a locally defined or imported memory.
231    fn get_memory(&self, index: MemoryIndex) -> VMMemoryDefinition {
232        if let Some(local_index) = self.module.local_memory_index(index) {
233            self.memory(local_index)
234        } else {
235            let import = self.imported_memory(index);
236            unsafe { *import.definition.as_ref() }
237        }
238    }
239
240    /// Return the indexed `VMMemoryDefinition`.
241    fn memory(&self, index: LocalMemoryIndex) -> VMMemoryDefinition {
242        unsafe { *self.memory_ptr(index).as_ref() }
243    }
244
245    #[allow(dead_code)]
246    /// Set the indexed memory to `VMMemoryDefinition`.
247    fn set_memory(&self, index: LocalMemoryIndex, mem: &VMMemoryDefinition) {
248        unsafe {
249            *self.memory_ptr(index).as_ptr() = *mem;
250        }
251    }
252
253    /// Return the indexed `VMMemoryDefinition`.
254    fn memory_ptr(&self, index: LocalMemoryIndex) -> NonNull<VMMemoryDefinition> {
255        let index = usize::try_from(index.as_u32()).unwrap();
256        NonNull::new(unsafe { self.memories_ptr().add(index) }).unwrap()
257    }
258
259    /// Return a pointer to the `VMMemoryDefinition`s.
260    fn memories_ptr(&self) -> *mut VMMemoryDefinition {
261        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_memories_begin()) }
262    }
263
264    /// Get a locally defined or imported memory.
265    fn get_vmmemory(&self, index: MemoryIndex) -> &VMMemory {
266        if let Some(local_index) = self.module.local_memory_index(index) {
267            unsafe {
268                self.memories
269                    .get(local_index)
270                    .unwrap()
271                    .get(self.context.as_ref().unwrap())
272            }
273        } else {
274            let import = self.imported_memory(index);
275            unsafe { import.handle.get(self.context.as_ref().unwrap()) }
276        }
277    }
278
279    /// Get a locally defined or imported memory.
280    fn get_vmmemory_mut(&mut self, index: MemoryIndex) -> &mut VMMemory {
281        if let Some(local_index) = self.module.local_memory_index(index) {
282            unsafe {
283                self.memories
284                    .get_mut(local_index)
285                    .unwrap()
286                    .get_mut(self.context.as_mut().unwrap())
287            }
288        } else {
289            let import = self.imported_memory(index);
290            unsafe { import.handle.get_mut(self.context.as_mut().unwrap()) }
291        }
292    }
293
294    /// Get a locally defined memory as mutable.
295    fn get_local_vmmemory_mut(&mut self, local_index: LocalMemoryIndex) -> &mut VMMemory {
296        unsafe {
297            self.memories
298                .get_mut(local_index)
299                .unwrap()
300                .get_mut(self.context.as_mut().unwrap())
301        }
302    }
303
304    /// Return the indexed `VMGlobalDefinition`.
305    fn global(&self, index: LocalGlobalIndex) -> VMGlobalDefinition {
306        unsafe { self.global_ptr(index).as_ref().clone() }
307    }
308
309    /// Set the indexed global to `VMGlobalDefinition`.
310    #[allow(dead_code)]
311    fn set_global(&self, index: LocalGlobalIndex, global: &VMGlobalDefinition) {
312        unsafe {
313            *self.global_ptr(index).as_ptr() = global.clone();
314        }
315    }
316
317    /// Return the indexed `VMGlobalDefinition`.
318    fn global_ptr(&self, index: LocalGlobalIndex) -> NonNull<VMGlobalDefinition> {
319        let index = usize::try_from(index.as_u32()).unwrap();
320        // TODO:
321        NonNull::new(unsafe { *self.globals_ptr().add(index) }).unwrap()
322    }
323
324    /// Return a pointer to the `VMGlobalDefinition`s.
325    fn globals_ptr(&self) -> *mut *mut VMGlobalDefinition {
326        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_globals_begin()) }
327    }
328
329    /// Return a pointer to the `VMBuiltinFunctionsArray`.
330    fn builtin_functions_ptr(&self) -> *mut VMBuiltinFunctionsArray {
331        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_builtin_functions_begin()) }
332    }
333
334    /// Return a reference to the vmctx used by compiled wasm code.
335    fn vmctx(&self) -> &VMContext {
336        &self.vmctx
337    }
338
339    /// Return a raw pointer to the vmctx used by compiled wasm code.
340    fn vmctx_ptr(&self) -> *mut VMContext {
341        self.vmctx() as *const VMContext as *mut VMContext
342    }
343
344    /// Invoke the WebAssembly start function of the instance, if one is present.
345    fn invoke_start_function(
346        &self,
347        config: &VMConfig,
348        trap_handler: Option<*const TrapHandlerFn<'static>>,
349    ) -> Result<(), Trap> {
350        let start_index = match self.module.start_function {
351            Some(idx) => idx,
352            None => return Ok(()),
353        };
354
355        let (callee_address, callee_vmctx) = match self.module.local_func_index(start_index) {
356            Some(local_index) => {
357                let body = self
358                    .functions
359                    .get(local_index)
360                    .expect("function index is out of bounds")
361                    .0;
362                (
363                    body as *const _,
364                    VMFunctionContext {
365                        vmctx: self.vmctx_ptr(),
366                    },
367                )
368            }
369            None => {
370                assert_lt!(start_index.index(), self.module.num_imported_functions);
371                let import = self.imported_function(start_index);
372                (import.body, import.environment)
373            }
374        };
375
376        let sig = self.module.functions[start_index];
377        let trampoline = self.function_call_trampolines[sig];
378        let mut values_vec = vec![];
379
380        unsafe {
381            // Even though we already know the type of the function we need to call, in certain
382            // specific cases trampoline prepare callee arguments for specific optimizations, such
383            // as passing g0 and m0_base_ptr as paramters.
384            wasmer_call_trampoline(
385                trap_handler,
386                config,
387                callee_vmctx,
388                trampoline,
389                callee_address,
390                values_vec.as_mut_ptr(),
391            )
392        }
393    }
394
395    /// Return the offset from the vmctx pointer to its containing `Instance`.
396    #[inline]
397    pub(crate) fn vmctx_offset() -> isize {
398        offset_of!(Self, vmctx) as isize
399    }
400
401    /// Return the table index for the given `VMTableDefinition`.
402    pub(crate) fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
403        let begin: *const VMTableDefinition = self.tables_ptr() as *const _;
404        let end: *const VMTableDefinition = table;
405        // TODO: Use `offset_from` once it stablizes.
406        let index = LocalTableIndex::new(
407            (end as usize - begin as usize) / mem::size_of::<VMTableDefinition>(),
408        );
409        assert_lt!(index.index(), self.tables.len());
410        index
411    }
412
413    /// Return the memory index for the given `VMMemoryDefinition`.
414    pub(crate) fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
415        let begin: *const VMMemoryDefinition = self.memories_ptr() as *const _;
416        let end: *const VMMemoryDefinition = memory;
417        // TODO: Use `offset_from` once it stablizes.
418        let index = LocalMemoryIndex::new(
419            (end as usize - begin as usize) / mem::size_of::<VMMemoryDefinition>(),
420        );
421        assert_lt!(index.index(), self.memories.len());
422        index
423    }
424
425    /// Grow memory by the specified amount of pages.
426    ///
427    /// Returns `None` if memory can't be grown by the specified amount
428    /// of pages.
429    pub(crate) fn memory_grow<IntoPages>(
430        &mut self,
431        memory_index: LocalMemoryIndex,
432        delta: IntoPages,
433    ) -> Result<Pages, MemoryError>
434    where
435        IntoPages: Into<Pages>,
436    {
437        let mem = *self
438            .memories
439            .get(memory_index)
440            .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
441        mem.get_mut(self.context_mut()).grow(delta.into())
442    }
443
444    /// Grow imported memory by the specified amount of pages.
445    ///
446    /// Returns `None` if memory can't be grown by the specified amount
447    /// of pages.
448    ///
449    /// # Safety
450    /// This and `imported_memory_size` are currently unsafe because they
451    /// dereference the memory import's pointers.
452    pub(crate) unsafe fn imported_memory_grow<IntoPages>(
453        &mut self,
454        memory_index: MemoryIndex,
455        delta: IntoPages,
456    ) -> Result<Pages, MemoryError>
457    where
458        IntoPages: Into<Pages>,
459    {
460        let import = self.imported_memory(memory_index);
461        let mem = import.handle;
462        mem.get_mut(self.context_mut()).grow(delta.into())
463    }
464
465    /// Returns the number of allocated wasm pages.
466    pub(crate) fn memory_size(&self, memory_index: LocalMemoryIndex) -> Pages {
467        let mem = *self
468            .memories
469            .get(memory_index)
470            .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
471        mem.get(self.context()).size()
472    }
473
474    /// Returns the number of allocated wasm pages in an imported memory.
475    ///
476    /// # Safety
477    /// This and `imported_memory_grow` are currently unsafe because they
478    /// dereference the memory import's pointers.
479    pub(crate) unsafe fn imported_memory_size(&self, memory_index: MemoryIndex) -> Pages {
480        let import = self.imported_memory(memory_index);
481        let mem = import.handle;
482        mem.get(self.context()).size()
483    }
484
485    /// Returns the number of elements in a given table.
486    pub(crate) fn table_size(&self, table_index: LocalTableIndex) -> u32 {
487        let table = self
488            .tables
489            .get(table_index)
490            .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
491        table.get(self.context()).size()
492    }
493
494    /// Returns the number of elements in a given imported table.
495    ///
496    /// # Safety
497    /// `table_index` must be a valid, imported table index.
498    pub(crate) unsafe fn imported_table_size(&self, table_index: TableIndex) -> u32 {
499        let import = self.imported_table(table_index);
500        let table = import.handle;
501        table.get(self.context()).size()
502    }
503
504    /// Grow table by the specified amount of elements.
505    ///
506    /// Returns `None` if table can't be grown by the specified amount
507    /// of elements.
508    pub(crate) fn table_grow(
509        &mut self,
510        table_index: LocalTableIndex,
511        delta: u32,
512        init_value: TableElement,
513    ) -> Option<u32> {
514        let table = *self
515            .tables
516            .get(table_index)
517            .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
518        table.get_mut(self.context_mut()).grow(delta, init_value)
519    }
520
521    /// Grow table by the specified amount of elements.
522    ///
523    /// # Safety
524    /// `table_index` must be a valid, imported table index.
525    pub(crate) unsafe fn imported_table_grow(
526        &mut self,
527        table_index: TableIndex,
528        delta: u32,
529        init_value: TableElement,
530    ) -> Option<u32> {
531        let import = self.imported_table(table_index);
532        let table = import.handle;
533        table.get_mut(self.context_mut()).grow(delta, init_value)
534    }
535
536    /// Get table element by index.
537    pub(crate) fn table_get(
538        &self,
539        table_index: LocalTableIndex,
540        index: u32,
541    ) -> Option<TableElement> {
542        let table = self
543            .tables
544            .get(table_index)
545            .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
546        table.get(self.context()).get(index)
547    }
548
549    /// Returns the element at the given index.
550    ///
551    /// # Safety
552    /// `table_index` must be a valid, imported table index.
553    pub(crate) unsafe fn imported_table_get(
554        &self,
555        table_index: TableIndex,
556        index: u32,
557    ) -> Option<TableElement> {
558        let import = self.imported_table(table_index);
559        let table = import.handle;
560        table.get(self.context()).get(index)
561    }
562
563    /// Set table element by index.
564    pub(crate) fn table_set(
565        &mut self,
566        table_index: LocalTableIndex,
567        index: u32,
568        val: TableElement,
569    ) -> Result<(), Trap> {
570        let table = *self
571            .tables
572            .get(table_index)
573            .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
574        table.get_mut(self.context_mut()).set(index, val)
575    }
576
577    /// Set table element by index for an imported table.
578    ///
579    /// # Safety
580    /// `table_index` must be a valid, imported table index.
581    pub(crate) unsafe fn imported_table_set(
582        &mut self,
583        table_index: TableIndex,
584        index: u32,
585        val: TableElement,
586    ) -> Result<(), Trap> {
587        let import = self.imported_table(table_index);
588        let table = import.handle;
589        table.get_mut(self.context_mut()).set(index, val)
590    }
591
592    /// Get a `VMFuncRef` for the given `FunctionIndex`.
593    pub(crate) fn func_ref(&self, function_index: FunctionIndex) -> Option<VMFuncRef> {
594        if function_index == FunctionIndex::reserved_value() {
595            None
596        } else if let Some(local_function_index) = self.module.local_func_index(function_index) {
597            Some(VMFuncRef(NonNull::from(
598                &self.funcrefs[local_function_index],
599            )))
600        } else {
601            Some(VMFuncRef(self.imported_funcrefs[function_index]))
602        }
603    }
604
605    /// The `table.init` operation: initializes a portion of a table with a
606    /// passive element.
607    ///
608    /// # Errors
609    ///
610    /// Returns a `Trap` error when the range within the table is out of bounds
611    /// or the range within the passive element is out of bounds.
612    pub(crate) fn table_init(
613        &mut self,
614        table_index: TableIndex,
615        elem_index: ElemIndex,
616        dst: u32,
617        src: u32,
618        len: u32,
619    ) -> Result<(), Trap> {
620        // https://webassembly.github.io/bulk-memory-operations/core/exec/instructions.html#exec-table-init
621
622        let table = self.get_table_handle(table_index);
623        let table = unsafe { table.get_mut(&mut *self.context) };
624        let passive_elements = self.passive_elements.borrow();
625        let elem = passive_elements
626            .get(&elem_index)
627            .map_or::<&[Option<VMFuncRef>], _>(&[], |e| &**e);
628
629        if src.checked_add(len).is_none_or(|n| n as usize > elem.len())
630            || dst.checked_add(len).is_none_or(|m| m > table.size())
631        {
632            return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
633        }
634
635        for (dst, src) in (dst..dst + len).zip(src..src + len) {
636            table
637                .set(dst, TableElement::FuncRef(elem[src as usize]))
638                .expect("should never panic because we already did the bounds check above");
639        }
640
641        Ok(())
642    }
643
644    /// The `table.fill` operation: fills a portion of a table with a given value.
645    ///
646    /// # Errors
647    ///
648    /// Returns a `Trap` error when the range within the table is out of bounds
649    pub(crate) fn table_fill(
650        &mut self,
651        table_index: TableIndex,
652        start_index: u32,
653        item: TableElement,
654        len: u32,
655    ) -> Result<(), Trap> {
656        // https://webassembly.github.io/bulk-memory-operations/core/exec/instructions.html#exec-table-init
657
658        let table = self.get_table(table_index);
659        let table_size = table.size() as usize;
660
661        if start_index
662            .checked_add(len)
663            .is_none_or(|n| n as usize > table_size)
664        {
665            return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
666        }
667
668        for i in start_index..(start_index + len) {
669            table
670                .set(i, item.clone())
671                .expect("should never panic because we already did the bounds check above");
672        }
673
674        Ok(())
675    }
676
677    /// Drop an element.
678    pub(crate) fn elem_drop(&self, elem_index: ElemIndex) {
679        // https://webassembly.github.io/reference-types/core/exec/instructions.html#exec-elem-drop
680
681        let mut passive_elements = self.passive_elements.borrow_mut();
682        passive_elements.remove(&elem_index);
683        // Note that we don't check that we actually removed an element because
684        // dropping a non-passive element is a no-op (not a trap).
685    }
686
687    /// Do a `memory.copy` for a locally defined memory.
688    ///
689    /// # Errors
690    ///
691    /// Returns a `Trap` error when the source or destination ranges are out of
692    /// bounds.
693    pub(crate) fn local_memory_copy(
694        &self,
695        memory_index: LocalMemoryIndex,
696        dst: u32,
697        src: u32,
698        len: u32,
699    ) -> Result<(), Trap> {
700        // https://webassembly.github.io/reference-types/core/exec/instructions.html#exec-memory-copy
701
702        let memory = self.memory(memory_index);
703        // The following memory copy is not synchronized and is not atomic:
704        unsafe { memory_copy(&memory, dst, src, len) }
705    }
706
707    /// Perform a `memory.copy` on an imported memory.
708    pub(crate) fn imported_memory_copy(
709        &self,
710        memory_index: MemoryIndex,
711        dst: u32,
712        src: u32,
713        len: u32,
714    ) -> Result<(), Trap> {
715        let import = self.imported_memory(memory_index);
716        let memory = unsafe { import.definition.as_ref() };
717        // The following memory copy is not synchronized and is not atomic:
718        unsafe { memory_copy(memory, dst, src, len) }
719    }
720
721    /// Perform the `memory.fill` operation on a locally defined memory.
722    ///
723    /// # Errors
724    ///
725    /// Returns a `Trap` error if the memory range is out of bounds.
726    pub(crate) fn local_memory_fill(
727        &self,
728        memory_index: LocalMemoryIndex,
729        dst: u32,
730        val: u32,
731        len: u32,
732    ) -> Result<(), Trap> {
733        let memory = self.memory(memory_index);
734        // The following memory fill is not synchronized and is not atomic:
735        unsafe { memory_fill(&memory, dst, val, len) }
736    }
737
738    /// Perform the `memory.fill` operation on an imported memory.
739    ///
740    /// # Errors
741    ///
742    /// Returns a `Trap` error if the memory range is out of bounds.
743    pub(crate) fn imported_memory_fill(
744        &self,
745        memory_index: MemoryIndex,
746        dst: u32,
747        val: u32,
748        len: u32,
749    ) -> Result<(), Trap> {
750        let import = self.imported_memory(memory_index);
751        let memory = unsafe { import.definition.as_ref() };
752        // The following memory fill is not synchronized and is not atomic:
753        unsafe { memory_fill(memory, dst, val, len) }
754    }
755
756    /// Performs the `memory.init` operation.
757    ///
758    /// # Errors
759    ///
760    /// Returns a `Trap` error if the destination range is out of this module's
761    /// memory's bounds or if the source range is outside the data segment's
762    /// bounds.
763    pub(crate) fn memory_init(
764        &self,
765        memory_index: MemoryIndex,
766        data_index: DataIndex,
767        dst: u32,
768        src: u32,
769        len: u32,
770    ) -> Result<(), Trap> {
771        // https://webassembly.github.io/bulk-memory-operations/core/exec/instructions.html#exec-memory-init
772
773        let memory = self.get_vmmemory(memory_index);
774        let passive_data = self.passive_data.borrow();
775        let data = passive_data.get(&data_index).map_or(&[][..], |d| &**d);
776
777        let current_length = unsafe { memory.vmmemory().as_ref().current_length };
778        if src.checked_add(len).is_none_or(|n| n as usize > data.len())
779            || dst
780                .checked_add(len)
781                .is_none_or(|m| usize::try_from(m).unwrap() > current_length)
782        {
783            return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
784        }
785        let src_slice = &data[src as usize..(src + len) as usize];
786        unsafe { memory.initialize_with_data(dst as usize, src_slice) }
787    }
788
789    /// Drop the given data segment, truncating its length to zero.
790    pub(crate) fn data_drop(&self, data_index: DataIndex) {
791        let mut passive_data = self.passive_data.borrow_mut();
792        passive_data.remove(&data_index);
793    }
794
795    /// Get a table by index regardless of whether it is locally-defined or an
796    /// imported, foreign table.
797    pub(crate) fn get_table(&mut self, table_index: TableIndex) -> &mut VMTable {
798        if let Some(local_table_index) = self.module.local_table_index(table_index) {
799            self.get_local_table(local_table_index)
800        } else {
801            self.get_foreign_table(table_index)
802        }
803    }
804
805    /// Get a locally-defined table.
806    pub(crate) fn get_local_table(&mut self, index: LocalTableIndex) -> &mut VMTable {
807        let table = self.tables[index];
808        table.get_mut(self.context_mut())
809    }
810
811    /// Get an imported, foreign table.
812    pub(crate) fn get_foreign_table(&mut self, index: TableIndex) -> &mut VMTable {
813        let import = self.imported_table(index);
814        let table = import.handle;
815        table.get_mut(self.context_mut())
816    }
817
818    /// Get a table handle by index regardless of whether it is locally-defined
819    /// or an imported, foreign table.
820    pub(crate) fn get_table_handle(
821        &mut self,
822        table_index: TableIndex,
823    ) -> InternalStoreHandle<VMTable> {
824        if let Some(local_table_index) = self.module.local_table_index(table_index) {
825            self.tables[local_table_index]
826        } else {
827            self.imported_table(table_index).handle
828        }
829    }
830
831    fn memory_wait(memory: &mut VMMemory, dst: u32, timeout: i64) -> Result<u32, Trap> {
832        let location = NotifyLocation { address: dst };
833        let timeout = if timeout < 0 {
834            None
835        } else {
836            Some(std::time::Duration::from_nanos(timeout as u64))
837        };
838        match memory.do_wait(location, timeout) {
839            Ok(count) => Ok(count),
840            Err(_err) => {
841                // ret is None if there is more than 2^32 waiter in queue or some other error
842                Err(Trap::lib(TrapCode::TableAccessOutOfBounds))
843            }
844        }
845    }
846
847    /// Perform an Atomic.Wait32
848    pub(crate) fn local_memory_wait32(
849        &mut self,
850        memory_index: LocalMemoryIndex,
851        dst: u32,
852        val: u32,
853        timeout: i64,
854    ) -> Result<u32, Trap> {
855        let memory = self.memory(memory_index);
856        //if ! memory.shared {
857        // We should trap according to spec, but official test rely on not trapping...
858        //}
859
860        let ret = unsafe { memory32_atomic_check32(&memory, dst, val) };
861
862        if let Ok(mut ret) = ret {
863            if ret == 0 {
864                let memory = self.get_local_vmmemory_mut(memory_index);
865                ret = Self::memory_wait(memory, dst, timeout)?;
866            }
867            Ok(ret)
868        } else {
869            ret
870        }
871    }
872
873    /// Perform an Atomic.Wait32
874    pub(crate) fn imported_memory_wait32(
875        &mut self,
876        memory_index: MemoryIndex,
877        dst: u32,
878        val: u32,
879        timeout: i64,
880    ) -> Result<u32, Trap> {
881        let import = self.imported_memory(memory_index);
882        let memory = unsafe { import.definition.as_ref() };
883        //if ! memory.shared {
884        // We should trap according to spec, but official test rely on not trapping...
885        //}
886
887        let ret = unsafe { memory32_atomic_check32(memory, dst, val) };
888        if let Ok(mut ret) = ret {
889            if ret == 0 {
890                let memory = self.get_vmmemory_mut(memory_index);
891                ret = Self::memory_wait(memory, dst, timeout)?;
892            }
893            Ok(ret)
894        } else {
895            ret
896        }
897    }
898
899    /// Perform an Atomic.Wait64
900    pub(crate) fn local_memory_wait64(
901        &mut self,
902        memory_index: LocalMemoryIndex,
903        dst: u32,
904        val: u64,
905        timeout: i64,
906    ) -> Result<u32, Trap> {
907        let memory = self.memory(memory_index);
908        //if ! memory.shared {
909        // We should trap according to spec, but official test rely on not trapping...
910        //}
911
912        let ret = unsafe { memory32_atomic_check64(&memory, dst, val) };
913
914        if let Ok(mut ret) = ret {
915            if ret == 0 {
916                let memory = self.get_local_vmmemory_mut(memory_index);
917                ret = Self::memory_wait(memory, dst, timeout)?;
918            }
919            Ok(ret)
920        } else {
921            ret
922        }
923    }
924
925    /// Perform an Atomic.Wait64
926    pub(crate) fn imported_memory_wait64(
927        &mut self,
928        memory_index: MemoryIndex,
929        dst: u32,
930        val: u64,
931        timeout: i64,
932    ) -> Result<u32, Trap> {
933        let import = self.imported_memory(memory_index);
934        let memory = unsafe { import.definition.as_ref() };
935        //if ! memory.shared {
936        // We should trap according to spec, but official test rely on not trapping...
937        //}
938
939        let ret = unsafe { memory32_atomic_check64(memory, dst, val) };
940
941        if let Ok(mut ret) = ret {
942            if ret == 0 {
943                let memory = self.get_vmmemory_mut(memory_index);
944                ret = Self::memory_wait(memory, dst, timeout)?;
945            }
946            Ok(ret)
947        } else {
948            ret
949        }
950    }
951
952    /// Perform an Atomic.Notify
953    pub(crate) fn local_memory_notify(
954        &mut self,
955        memory_index: LocalMemoryIndex,
956        dst: u32,
957        count: u32,
958    ) -> Result<u32, Trap> {
959        let memory = self.get_local_vmmemory_mut(memory_index);
960        // fetch the notifier
961        let location = NotifyLocation { address: dst };
962        Ok(memory.do_notify(location, count))
963    }
964
965    /// Perform an Atomic.Notify
966    pub(crate) fn imported_memory_notify(
967        &mut self,
968        memory_index: MemoryIndex,
969        dst: u32,
970        count: u32,
971    ) -> Result<u32, Trap> {
972        let memory = self.get_vmmemory_mut(memory_index);
973        // fetch the notifier
974        let location = NotifyLocation { address: dst };
975        Ok(memory.do_notify(location, count))
976    }
977}
978
979/// A handle holding an `Instance` of a WebAssembly module.
980///
981/// This is more or less a public facade of the private `Instance`,
982/// providing useful higher-level API.
983#[derive(Debug, Eq, PartialEq)]
984pub struct VMInstance {
985    /// The layout of `Instance` (which can vary).
986    instance_layout: Layout,
987
988    /// The `Instance` itself.
989    ///
990    /// `Instance` must not be dropped manually by Rust, because it's
991    /// allocated manually with `alloc` and a specific layout (Rust
992    /// would be able to drop `Instance` itself but it will imply a
993    /// memory leak because of `alloc`).
994    ///
995    /// No one in the code has a copy of the `Instance`'s
996    /// pointer. `Self` is the only one.
997    instance: NonNull<Instance>,
998}
999
1000/// VMInstance are created with an InstanceAllocator
1001/// and it will "consume" the memory
1002/// So the Drop here actualy free it (else it would be leaked)
1003impl Drop for VMInstance {
1004    fn drop(&mut self) {
1005        let instance_ptr = self.instance.as_ptr();
1006
1007        unsafe {
1008            // Need to drop all the actual Instance members
1009            instance_ptr.drop_in_place();
1010            // And then free the memory allocated for the Instance itself
1011            std::alloc::dealloc(instance_ptr as *mut u8, self.instance_layout);
1012        }
1013    }
1014}
1015
1016impl VMInstance {
1017    /// Create a new `VMInstance` pointing at freshly allocated instance data.
1018    ///
1019    /// # Safety
1020    ///
1021    /// This method is not necessarily inherently unsafe to call, but in general
1022    /// the APIs of an `Instance` are quite unsafe and have not been really
1023    /// audited for safety that much. As a result the unsafety here on this
1024    /// method is a low-overhead way of saying “this is an extremely unsafe type
1025    /// to work with”.
1026    ///
1027    /// Extreme care must be taken when working with `VMInstance` and it's
1028    /// recommended to have relatively intimate knowledge of how it works
1029    /// internally if you'd like to do so. If possible it's recommended to use
1030    /// the `wasmer` crate API rather than this type since that is vetted for
1031    /// safety.
1032    ///
1033    /// However the following must be taken care of before calling this function:
1034    /// - The memory at `instance.tables_ptr()` must be initialized with data for
1035    ///   all the local tables.
1036    /// - The memory at `instance.memories_ptr()` must be initialized with data for
1037    ///   all the local memories.
1038    #[allow(clippy::too_many_arguments)]
1039    pub unsafe fn new(
1040        allocator: InstanceAllocator,
1041        module: Arc<ModuleInfo>,
1042        context: &mut StoreObjects,
1043        finished_functions: BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
1044        finished_function_call_trampolines: BoxedSlice<SignatureIndex, VMTrampoline>,
1045        finished_memories: BoxedSlice<LocalMemoryIndex, InternalStoreHandle<VMMemory>>,
1046        finished_tables: BoxedSlice<LocalTableIndex, InternalStoreHandle<VMTable>>,
1047        finished_globals: BoxedSlice<LocalGlobalIndex, InternalStoreHandle<VMGlobal>>,
1048        tags: BoxedSlice<TagIndex, InternalStoreHandle<VMTag>>,
1049        imports: Imports,
1050        vmshared_signatures: BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
1051    ) -> Result<Self, Trap> {
1052        unsafe {
1053            let vmctx_tags = tags
1054                .values()
1055                .map(|m: &InternalStoreHandle<VMTag>| VMSharedTagIndex::new(m.index() as u32))
1056                .collect::<PrimaryMap<TagIndex, VMSharedTagIndex>>()
1057                .into_boxed_slice();
1058            let vmctx_globals = finished_globals
1059                .values()
1060                .map(|m: &InternalStoreHandle<VMGlobal>| m.get(context).vmglobal())
1061                .collect::<PrimaryMap<LocalGlobalIndex, NonNull<VMGlobalDefinition>>>()
1062                .into_boxed_slice();
1063            let passive_data = RefCell::new(
1064                module
1065                    .passive_data
1066                    .clone()
1067                    .into_iter()
1068                    .map(|(idx, bytes)| (idx, Arc::from(bytes)))
1069                    .collect::<HashMap<_, _>>(),
1070            );
1071
1072            let handle = {
1073                let offsets = allocator.offsets().clone();
1074                // use dummy value to create an instance so we can get the vmctx pointer
1075                let funcrefs = PrimaryMap::new().into_boxed_slice();
1076                let imported_funcrefs = PrimaryMap::new().into_boxed_slice();
1077                // Create the `Instance`. The unique, the One.
1078                let instance = Instance {
1079                    module,
1080                    context,
1081                    offsets,
1082                    memories: finished_memories,
1083                    tables: finished_tables,
1084                    tags,
1085                    globals: finished_globals,
1086                    functions: finished_functions,
1087                    function_call_trampolines: finished_function_call_trampolines,
1088                    passive_elements: Default::default(),
1089                    passive_data,
1090                    funcrefs,
1091                    imported_funcrefs,
1092                    vmctx: VMContext {},
1093                };
1094
1095                let mut instance_handle = allocator.into_vminstance(instance);
1096
1097                // Set the funcrefs after we've built the instance
1098                {
1099                    let instance = instance_handle.instance_mut();
1100                    let vmctx_ptr = instance.vmctx_ptr();
1101                    (instance.funcrefs, instance.imported_funcrefs) = build_funcrefs(
1102                        &instance.module,
1103                        context,
1104                        &imports,
1105                        &instance.functions,
1106                        &vmshared_signatures,
1107                        &instance.function_call_trampolines,
1108                        vmctx_ptr,
1109                    );
1110                }
1111
1112                instance_handle
1113            };
1114            let instance = handle.instance();
1115
1116            ptr::copy(
1117                vmctx_tags.values().as_slice().as_ptr(),
1118                instance.shared_tags_ptr(),
1119                vmctx_tags.len(),
1120            );
1121            ptr::copy(
1122                vmshared_signatures.values().as_slice().as_ptr(),
1123                instance.signature_ids_ptr(),
1124                vmshared_signatures.len(),
1125            );
1126            ptr::copy(
1127                imports.functions.values().as_slice().as_ptr(),
1128                instance.imported_functions_ptr(),
1129                imports.functions.len(),
1130            );
1131            ptr::copy(
1132                imports.tables.values().as_slice().as_ptr(),
1133                instance.imported_tables_ptr(),
1134                imports.tables.len(),
1135            );
1136            ptr::copy(
1137                imports.memories.values().as_slice().as_ptr(),
1138                instance.imported_memories_ptr(),
1139                imports.memories.len(),
1140            );
1141            ptr::copy(
1142                imports.globals.values().as_slice().as_ptr(),
1143                instance.imported_globals_ptr(),
1144                imports.globals.len(),
1145            );
1146            // these should already be set, add asserts here? for:
1147            // - instance.tables_ptr() as *mut VMTableDefinition
1148            // - instance.memories_ptr() as *mut VMMemoryDefinition
1149            ptr::copy(
1150                vmctx_globals.values().as_slice().as_ptr(),
1151                instance.globals_ptr() as *mut NonNull<VMGlobalDefinition>,
1152                vmctx_globals.len(),
1153            );
1154            ptr::write(
1155                instance.builtin_functions_ptr(),
1156                VMBuiltinFunctionsArray::initialized(),
1157            );
1158
1159            // Perform infallible initialization in this constructor, while fallible
1160            // initialization is deferred to the `initialize` method.
1161            initialize_passive_elements(instance);
1162            initialize_globals(instance);
1163
1164            Ok(handle)
1165        }
1166    }
1167
1168    /// Return a reference to the contained `Instance`.
1169    pub(crate) fn instance(&self) -> &Instance {
1170        unsafe { self.instance.as_ref() }
1171    }
1172
1173    /// Return a mutable reference to the contained `Instance`.
1174    pub(crate) fn instance_mut(&mut self) -> &mut Instance {
1175        unsafe { self.instance.as_mut() }
1176    }
1177
1178    /// Finishes the instantiation process started by `Instance::new`.
1179    ///
1180    /// # Safety
1181    ///
1182    /// Only safe to call immediately after instantiation.
1183    pub unsafe fn finish_instantiation(
1184        &mut self,
1185        config: &VMConfig,
1186        trap_handler: Option<*const TrapHandlerFn<'static>>,
1187        data_initializers: &[DataInitializer<'_>],
1188    ) -> Result<(), Trap> {
1189        let instance = self.instance_mut();
1190
1191        // Apply the initializers.
1192        initialize_tables(instance)?;
1193        initialize_memories(instance, data_initializers)?;
1194
1195        // The WebAssembly spec specifies that the start function is
1196        // invoked automatically at instantiation time.
1197        instance.invoke_start_function(config, trap_handler)?;
1198        Ok(())
1199    }
1200
1201    /// Return a reference to the vmctx used by compiled wasm code.
1202    pub fn vmctx(&self) -> &VMContext {
1203        self.instance().vmctx()
1204    }
1205
1206    /// Return a raw pointer to the vmctx used by compiled wasm code.
1207    pub fn vmctx_ptr(&self) -> *mut VMContext {
1208        self.instance().vmctx_ptr()
1209    }
1210
1211    /// Return a reference to the `VMOffsets` to get offsets in the
1212    /// `Self::vmctx_ptr` region. Be careful when doing pointer
1213    /// arithmetic!
1214    pub fn vmoffsets(&self) -> &VMOffsets {
1215        self.instance().offsets()
1216    }
1217
1218    /// Return a reference-counting pointer to a module.
1219    pub fn module(&self) -> &Arc<ModuleInfo> {
1220        self.instance().module()
1221    }
1222
1223    /// Return a reference to a module.
1224    pub fn module_ref(&self) -> &ModuleInfo {
1225        self.instance().module_ref()
1226    }
1227
1228    /// Lookup an export with the given name.
1229    pub fn lookup(&mut self, field: &str) -> Option<VMExtern> {
1230        let export = *self.module_ref().exports.get(field)?;
1231
1232        Some(self.lookup_by_declaration(export))
1233    }
1234
1235    /// Lookup an export with the given export declaration.
1236    pub fn lookup_by_declaration(&mut self, export: ExportIndex) -> VMExtern {
1237        let instance = self.instance();
1238
1239        match export {
1240            ExportIndex::Function(index) => {
1241                let sig_index = &instance.module.functions[index];
1242                let handle = if let Some(def_index) = instance.module.local_func_index(index) {
1243                    // A VMFunction is lazily created only for functions that are
1244                    // exported.
1245                    let signature = instance.module.signatures[*sig_index].clone();
1246                    let vm_function = VMFunction {
1247                        anyfunc: MaybeInstanceOwned::Instance(NonNull::from(
1248                            &instance.funcrefs[def_index],
1249                        )),
1250                        signature,
1251                        // Any function received is already static at this point as:
1252                        // 1. All locally defined functions in the Wasm have a static signature.
1253                        // 2. All the imported functions are already static (because
1254                        //    they point to the trampolines rather than the dynamic addresses).
1255                        kind: VMFunctionKind::Static,
1256                        host_data: Box::new(()),
1257                    };
1258                    InternalStoreHandle::new(self.instance_mut().context_mut(), vm_function)
1259                } else {
1260                    let import = instance.imported_function(index);
1261                    import.handle
1262                };
1263
1264                VMExtern::Function(handle)
1265            }
1266            ExportIndex::Table(index) => {
1267                let handle = if let Some(def_index) = instance.module.local_table_index(index) {
1268                    instance.tables[def_index]
1269                } else {
1270                    let import = instance.imported_table(index);
1271                    import.handle
1272                };
1273                VMExtern::Table(handle)
1274            }
1275            ExportIndex::Memory(index) => {
1276                let handle = if let Some(def_index) = instance.module.local_memory_index(index) {
1277                    instance.memories[def_index]
1278                } else {
1279                    let import = instance.imported_memory(index);
1280                    import.handle
1281                };
1282                VMExtern::Memory(handle)
1283            }
1284            ExportIndex::Global(index) => {
1285                let handle = if let Some(def_index) = instance.module.local_global_index(index) {
1286                    instance.globals[def_index]
1287                } else {
1288                    let import = instance.imported_global(index);
1289                    import.handle
1290                };
1291                VMExtern::Global(handle)
1292            }
1293
1294            ExportIndex::Tag(index) => {
1295                let handle = instance.tags[index];
1296                VMExtern::Tag(handle)
1297            }
1298        }
1299    }
1300
1301    /// Return an iterator over the exports of this instance.
1302    ///
1303    /// Specifically, it provides access to the key-value pairs, where the keys
1304    /// are export names, and the values are export declarations which can be
1305    /// resolved `lookup_by_declaration`.
1306    pub fn exports(&self) -> indexmap::map::Iter<'_, String, ExportIndex> {
1307        self.module().exports.iter()
1308    }
1309
1310    /// Return the memory index for the given `VMMemoryDefinition` in this instance.
1311    pub fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
1312        self.instance().memory_index(memory)
1313    }
1314
1315    /// Grow memory in this instance by the specified amount of pages.
1316    ///
1317    /// Returns `None` if memory can't be grown by the specified amount
1318    /// of pages.
1319    pub fn memory_grow<IntoPages>(
1320        &mut self,
1321        memory_index: LocalMemoryIndex,
1322        delta: IntoPages,
1323    ) -> Result<Pages, MemoryError>
1324    where
1325        IntoPages: Into<Pages>,
1326    {
1327        self.instance_mut().memory_grow(memory_index, delta)
1328    }
1329
1330    /// Return the table index for the given `VMTableDefinition` in this instance.
1331    pub fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
1332        self.instance().table_index(table)
1333    }
1334
1335    /// Grow table in this instance by the specified amount of pages.
1336    ///
1337    /// Returns `None` if memory can't be grown by the specified amount
1338    /// of pages.
1339    pub fn table_grow(
1340        &mut self,
1341        table_index: LocalTableIndex,
1342        delta: u32,
1343        init_value: TableElement,
1344    ) -> Option<u32> {
1345        self.instance_mut()
1346            .table_grow(table_index, delta, init_value)
1347    }
1348
1349    /// Get table element reference.
1350    ///
1351    /// Returns `None` if index is out of bounds.
1352    pub fn table_get(&self, table_index: LocalTableIndex, index: u32) -> Option<TableElement> {
1353        self.instance().table_get(table_index, index)
1354    }
1355
1356    /// Set table element reference.
1357    ///
1358    /// Returns an error if the index is out of bounds
1359    pub fn table_set(
1360        &mut self,
1361        table_index: LocalTableIndex,
1362        index: u32,
1363        val: TableElement,
1364    ) -> Result<(), Trap> {
1365        self.instance_mut().table_set(table_index, index, val)
1366    }
1367
1368    /// Get a table defined locally within this module.
1369    pub fn get_local_table(&mut self, index: LocalTableIndex) -> &mut VMTable {
1370        self.instance_mut().get_local_table(index)
1371    }
1372}
1373
1374/// Compute the offset for a memory data initializer.
1375fn get_memory_init_start(init: &DataInitializer<'_>, instance: &Instance) -> usize {
1376    let mut start = init.location.offset;
1377
1378    if let Some(base) = init.location.base {
1379        let val = unsafe {
1380            if let Some(def_index) = instance.module.local_global_index(base) {
1381                instance.global(def_index).val.u32
1382            } else {
1383                instance.imported_global(base).definition.as_ref().val.u32
1384            }
1385        };
1386        start += usize::try_from(val).unwrap();
1387    }
1388
1389    start
1390}
1391
1392#[allow(clippy::mut_from_ref)]
1393#[allow(dead_code)]
1394/// Return a byte-slice view of a memory's data.
1395unsafe fn get_memory_slice<'instance>(
1396    init: &DataInitializer<'_>,
1397    instance: &'instance Instance,
1398) -> &'instance mut [u8] {
1399    unsafe {
1400        let memory = if let Some(local_memory_index) = instance
1401            .module
1402            .local_memory_index(init.location.memory_index)
1403        {
1404            instance.memory(local_memory_index)
1405        } else {
1406            let import = instance.imported_memory(init.location.memory_index);
1407            *import.definition.as_ref()
1408        };
1409        slice::from_raw_parts_mut(memory.base, memory.current_length)
1410    }
1411}
1412
1413/// Compute the offset for a table element initializer.
1414fn get_table_init_start(init: &TableInitializer, instance: &Instance) -> usize {
1415    let mut start = init.offset;
1416
1417    if let Some(base) = init.base {
1418        let val = unsafe {
1419            if let Some(def_index) = instance.module.local_global_index(base) {
1420                instance.global(def_index).val.u32
1421            } else {
1422                instance.imported_global(base).definition.as_ref().val.u32
1423            }
1424        };
1425        start += usize::try_from(val).unwrap();
1426    }
1427
1428    start
1429}
1430
1431/// Initialize the table memory from the provided initializers.
1432fn initialize_tables(instance: &mut Instance) -> Result<(), Trap> {
1433    let module = Arc::clone(&instance.module);
1434    for init in &module.table_initializers {
1435        let start = get_table_init_start(init, instance);
1436        let table = instance.get_table_handle(init.table_index);
1437        let table = unsafe { table.get_mut(&mut *instance.context) };
1438
1439        if start
1440            .checked_add(init.elements.len())
1441            .is_none_or(|end| end > table.size() as usize)
1442        {
1443            return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
1444        }
1445
1446        if let wasmer_types::Type::FuncRef = table.ty().ty {
1447            for (i, func_idx) in init.elements.iter().enumerate() {
1448                let anyfunc = instance.func_ref(*func_idx);
1449                table
1450                    .set(
1451                        u32::try_from(start + i).unwrap(),
1452                        TableElement::FuncRef(anyfunc),
1453                    )
1454                    .unwrap();
1455            }
1456        } else {
1457            for i in 0..init.elements.len() {
1458                table
1459                    .set(
1460                        u32::try_from(start + i).unwrap(),
1461                        TableElement::ExternRef(None),
1462                    )
1463                    .unwrap();
1464            }
1465        }
1466    }
1467
1468    Ok(())
1469}
1470
1471/// Initialize the `Instance::passive_elements` map by resolving the
1472/// `ModuleInfo::passive_elements`'s `FunctionIndex`s into `VMCallerCheckedAnyfunc`s for
1473/// this instance.
1474fn initialize_passive_elements(instance: &Instance) {
1475    let mut passive_elements = instance.passive_elements.borrow_mut();
1476    debug_assert!(
1477        passive_elements.is_empty(),
1478        "should only be called once, at initialization time"
1479    );
1480
1481    passive_elements.extend(instance.module.passive_elements.iter().filter_map(
1482        |(&idx, segments)| -> Option<(ElemIndex, Box<[Option<VMFuncRef>]>)> {
1483            if segments.is_empty() {
1484                None
1485            } else {
1486                Some((
1487                    idx,
1488                    segments
1489                        .iter()
1490                        .map(|s| instance.func_ref(*s))
1491                        .collect::<Box<[Option<VMFuncRef>]>>(),
1492                ))
1493            }
1494        },
1495    ));
1496}
1497
1498/// Initialize the table memory from the provided initializers.
1499fn initialize_memories(
1500    instance: &mut Instance,
1501    data_initializers: &[DataInitializer<'_>],
1502) -> Result<(), Trap> {
1503    for init in data_initializers {
1504        let memory = instance.get_vmmemory(init.location.memory_index);
1505
1506        let start = get_memory_init_start(init, instance);
1507        unsafe {
1508            let current_length = memory.vmmemory().as_ref().current_length;
1509            if start
1510                .checked_add(init.data.len())
1511                .is_none_or(|end| end > current_length)
1512            {
1513                return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
1514            }
1515            memory.initialize_with_data(start, init.data)?;
1516        }
1517    }
1518
1519    Ok(())
1520}
1521
1522fn initialize_globals(instance: &Instance) {
1523    let module = Arc::clone(&instance.module);
1524    for (index, initializer) in module.global_initializers.iter() {
1525        unsafe {
1526            let to = instance.global_ptr(index).as_ptr();
1527            match initializer {
1528                GlobalInit::I32Const(x) => (*to).val.i32 = *x,
1529                GlobalInit::I64Const(x) => (*to).val.i64 = *x,
1530                GlobalInit::F32Const(x) => (*to).val.f32 = *x,
1531                GlobalInit::F64Const(x) => (*to).val.f64 = *x,
1532                GlobalInit::V128Const(x) => (*to).val.bytes = *x.bytes(),
1533                GlobalInit::GetGlobal(x) => {
1534                    let from: VMGlobalDefinition =
1535                        if let Some(def_x) = module.local_global_index(*x) {
1536                            instance.global(def_x)
1537                        } else {
1538                            instance.imported_global(*x).definition.as_ref().clone()
1539                        };
1540                    *to = from;
1541                }
1542                GlobalInit::RefNullConst => (*to).val.funcref = 0,
1543                GlobalInit::RefFunc(func_idx) => {
1544                    let funcref = instance.func_ref(*func_idx).unwrap();
1545                    (*to).val = funcref.into_raw();
1546                }
1547            }
1548        }
1549    }
1550}
1551
1552/// Eagerly builds all the `VMFuncRef`s for imported and local functions so that all
1553/// future funcref operations are just looking up this data.
1554fn build_funcrefs(
1555    module_info: &ModuleInfo,
1556    ctx: &StoreObjects,
1557    imports: &Imports,
1558    finished_functions: &BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
1559    vmshared_signatures: &BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
1560    function_call_trampolines: &BoxedSlice<SignatureIndex, VMTrampoline>,
1561    vmctx_ptr: *mut VMContext,
1562) -> (
1563    BoxedSlice<LocalFunctionIndex, VMCallerCheckedAnyfunc>,
1564    BoxedSlice<FunctionIndex, NonNull<VMCallerCheckedAnyfunc>>,
1565) {
1566    let mut func_refs =
1567        PrimaryMap::with_capacity(module_info.functions.len() - module_info.num_imported_functions);
1568    let mut imported_func_refs = PrimaryMap::with_capacity(module_info.num_imported_functions);
1569
1570    // do imported functions
1571    for import in imports.functions.values() {
1572        imported_func_refs.push(import.handle.get(ctx).anyfunc.as_ptr());
1573    }
1574
1575    // do local functions
1576    for (local_index, func_ptr) in finished_functions.iter() {
1577        let index = module_info.func_index(local_index);
1578        let sig_index = module_info.functions[index];
1579        let type_index = vmshared_signatures[sig_index];
1580        let call_trampoline = function_call_trampolines[sig_index];
1581        let anyfunc = VMCallerCheckedAnyfunc {
1582            func_ptr: func_ptr.0,
1583            type_index,
1584            vmctx: VMFunctionContext { vmctx: vmctx_ptr },
1585            call_trampoline,
1586        };
1587        func_refs.push(anyfunc);
1588    }
1589    (
1590        func_refs.into_boxed_slice(),
1591        imported_func_refs.into_boxed_slice(),
1592    )
1593}