1mod allocator;
10
11use crate::LinearMemory;
12use crate::imports::Imports;
13use crate::store::{InternalStoreHandle, StoreObjects};
14use crate::table::TableElement;
15use crate::trap::{Trap, TrapCode};
16use crate::vmcontext::{
17 VMBuiltinFunctionsArray, VMCallerCheckedAnyfunc, VMContext, VMFunctionContext,
18 VMFunctionImport, VMFunctionKind, VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition,
19 VMMemoryImport, VMSharedSignatureIndex, VMSharedTagIndex, VMTableDefinition, VMTableImport,
20 VMTrampoline, memory_copy, memory_fill, memory32_atomic_check32, memory32_atomic_check64,
21};
22use crate::{FunctionBodyPtr, MaybeInstanceOwned, TrapHandlerFn, VMTag, wasmer_call_trampoline};
23use crate::{VMConfig, VMFuncRef, VMFunction, VMGlobal, VMMemory, VMTable};
24use crate::{export::VMExtern, threadconditions::ExpectedValue};
25pub use allocator::InstanceAllocator;
26use itertools::Itertools;
27use memoffset::offset_of;
28use more_asserts::assert_lt;
29use std::alloc::Layout;
30use std::cell::RefCell;
31use std::collections::HashMap;
32use std::convert::TryFrom;
33use std::fmt;
34use std::mem;
35use std::ptr::{self, NonNull};
36use std::slice;
37use std::sync::Arc;
38use wasmer_types::entity::{BoxedSlice, EntityRef, PrimaryMap, packed_option::ReservedValue};
39use wasmer_types::{
40 DataIndex, DataInitializer, ElemIndex, ExportIndex, FunctionIndex, GlobalIndex, GlobalInit,
41 InitExpr, InitExprOp, LocalFunctionIndex, LocalGlobalIndex, LocalMemoryIndex, LocalTableIndex,
42 MemoryError, MemoryIndex, ModuleInfo, Pages, RawValue, SignatureIndex, TableIndex, TagIndex,
43 VMOffsets,
44};
45
46#[repr(C)]
53#[allow(clippy::type_complexity)]
54pub(crate) struct Instance {
55 module: Arc<ModuleInfo>,
57
58 context: *mut StoreObjects,
60
61 offsets: VMOffsets,
63
64 memories: BoxedSlice<LocalMemoryIndex, InternalStoreHandle<VMMemory>>,
66
67 tables: BoxedSlice<LocalTableIndex, InternalStoreHandle<VMTable>>,
69
70 globals: BoxedSlice<LocalGlobalIndex, InternalStoreHandle<VMGlobal>>,
72
73 tags: BoxedSlice<TagIndex, InternalStoreHandle<VMTag>>,
75
76 functions: BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
78
79 function_call_trampolines: BoxedSlice<SignatureIndex, VMTrampoline>,
81
82 passive_elements: RefCell<HashMap<ElemIndex, Box<[Option<VMFuncRef>]>>>,
85
86 passive_data: RefCell<HashMap<DataIndex, Arc<[u8]>>>,
89
90 funcrefs: BoxedSlice<LocalFunctionIndex, VMCallerCheckedAnyfunc>,
93
94 imported_funcrefs: BoxedSlice<FunctionIndex, NonNull<VMCallerCheckedAnyfunc>>,
97
98 vmctx: VMContext,
103}
104
105impl fmt::Debug for Instance {
106 fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
107 formatter.debug_struct("Instance").finish()
108 }
109}
110
111#[allow(clippy::cast_ptr_alignment)]
112impl Instance {
113 unsafe fn vmctx_plus_offset<T>(&self, offset: u32) -> *mut T {
116 unsafe {
117 (self.vmctx_ptr() as *mut u8)
118 .add(usize::try_from(offset).unwrap())
119 .cast()
120 }
121 }
122
123 fn module(&self) -> &Arc<ModuleInfo> {
124 &self.module
125 }
126
127 pub(crate) fn module_ref(&self) -> &ModuleInfo {
128 &self.module
129 }
130
131 pub(crate) fn context(&self) -> &StoreObjects {
132 unsafe { &*self.context }
133 }
134
135 pub(crate) fn context_mut(&mut self) -> &mut StoreObjects {
136 unsafe { &mut *self.context }
137 }
138
139 fn offsets(&self) -> &VMOffsets {
141 &self.offsets
142 }
143
144 fn signature_ids_ptr(&self) -> *mut VMSharedSignatureIndex {
146 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_signature_ids_begin()) }
147 }
148
149 fn imported_function(&self, index: FunctionIndex) -> &VMFunctionImport {
151 let index = usize::try_from(index.as_u32()).unwrap();
152 unsafe { &*self.imported_functions_ptr().add(index) }
153 }
154
155 fn imported_functions_ptr(&self) -> *mut VMFunctionImport {
157 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_functions_begin()) }
158 }
159
160 fn imported_table(&self, index: TableIndex) -> &VMTableImport {
162 let index = usize::try_from(index.as_u32()).unwrap();
163 unsafe { &*self.imported_tables_ptr().add(index) }
164 }
165
166 fn imported_tables_ptr(&self) -> *mut VMTableImport {
168 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_tables_begin()) }
169 }
170
171 fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport {
173 let index = usize::try_from(index.as_u32()).unwrap();
174 unsafe { &*self.imported_memories_ptr().add(index) }
175 }
176
177 fn imported_memories_ptr(&self) -> *mut VMMemoryImport {
179 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_memories_begin()) }
180 }
181
182 fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport {
184 let index = usize::try_from(index.as_u32()).unwrap();
185 unsafe { &*self.imported_globals_ptr().add(index) }
186 }
187
188 fn imported_globals_ptr(&self) -> *mut VMGlobalImport {
190 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_globals_begin()) }
191 }
192
193 #[cfg_attr(target_os = "windows", allow(dead_code))]
195 pub(crate) fn shared_tag_ptr(&self, index: TagIndex) -> &VMSharedTagIndex {
196 let index = usize::try_from(index.as_u32()).unwrap();
197 unsafe { &*self.shared_tags_ptr().add(index) }
198 }
199
200 pub(crate) fn shared_tags_ptr(&self) -> *mut VMSharedTagIndex {
202 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_tag_ids_begin()) }
203 }
204
205 #[allow(dead_code)]
207 fn table(&self, index: LocalTableIndex) -> VMTableDefinition {
208 unsafe { *self.table_ptr(index).as_ref() }
209 }
210
211 #[allow(dead_code)]
212 fn set_table(&self, index: LocalTableIndex, table: &VMTableDefinition) {
214 unsafe {
215 *self.table_ptr(index).as_ptr() = *table;
216 }
217 }
218
219 fn table_ptr(&self, index: LocalTableIndex) -> NonNull<VMTableDefinition> {
221 let index = usize::try_from(index.as_u32()).unwrap();
222 NonNull::new(unsafe { self.tables_ptr().add(index) }).unwrap()
223 }
224
225 fn tables_ptr(&self) -> *mut VMTableDefinition {
227 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_tables_begin()) }
228 }
229
230 #[allow(dead_code)]
231 fn get_memory(&self, index: MemoryIndex) -> VMMemoryDefinition {
233 if let Some(local_index) = self.module.local_memory_index(index) {
234 self.memory(local_index)
235 } else {
236 let import = self.imported_memory(index);
237 unsafe { *import.definition.as_ref() }
238 }
239 }
240
241 fn memory(&self, index: LocalMemoryIndex) -> VMMemoryDefinition {
243 unsafe { *self.memory_ptr(index).as_ref() }
244 }
245
246 #[allow(dead_code)]
247 fn set_memory(&self, index: LocalMemoryIndex, mem: &VMMemoryDefinition) {
249 unsafe {
250 *self.memory_ptr(index).as_ptr() = *mem;
251 }
252 }
253
254 fn memory_ptr(&self, index: LocalMemoryIndex) -> NonNull<VMMemoryDefinition> {
256 let index = usize::try_from(index.as_u32()).unwrap();
257 NonNull::new(unsafe { self.memories_ptr().add(index) }).unwrap()
258 }
259
260 fn memories_ptr(&self) -> *mut VMMemoryDefinition {
262 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_memories_begin()) }
263 }
264
265 fn get_vmmemory(&self, index: MemoryIndex) -> &VMMemory {
267 if let Some(local_index) = self.module.local_memory_index(index) {
268 unsafe {
269 self.memories
270 .get(local_index)
271 .unwrap()
272 .get(self.context.as_ref().unwrap())
273 }
274 } else {
275 let import = self.imported_memory(index);
276 unsafe { import.handle.get(self.context.as_ref().unwrap()) }
277 }
278 }
279
280 fn get_vmmemory_mut(&mut self, index: MemoryIndex) -> &mut VMMemory {
282 if let Some(local_index) = self.module.local_memory_index(index) {
283 unsafe {
284 self.memories
285 .get_mut(local_index)
286 .unwrap()
287 .get_mut(self.context.as_mut().unwrap())
288 }
289 } else {
290 let import = self.imported_memory(index);
291 unsafe { import.handle.get_mut(self.context.as_mut().unwrap()) }
292 }
293 }
294
295 fn get_local_vmmemory_mut(&mut self, local_index: LocalMemoryIndex) -> &mut VMMemory {
297 unsafe {
298 self.memories
299 .get_mut(local_index)
300 .unwrap()
301 .get_mut(self.context.as_mut().unwrap())
302 }
303 }
304
305 fn global(&self, index: LocalGlobalIndex) -> VMGlobalDefinition {
307 unsafe { self.global_ptr(index).as_ref().clone() }
308 }
309
310 #[allow(dead_code)]
312 fn set_global(&self, index: LocalGlobalIndex, global: &VMGlobalDefinition) {
313 unsafe {
314 *self.global_ptr(index).as_ptr() = global.clone();
315 }
316 }
317
318 fn global_ptr(&self, index: LocalGlobalIndex) -> NonNull<VMGlobalDefinition> {
320 let index = usize::try_from(index.as_u32()).unwrap();
321 NonNull::new(unsafe { *self.globals_ptr().add(index) }).unwrap()
323 }
324
325 fn globals_ptr(&self) -> *mut *mut VMGlobalDefinition {
327 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_globals_begin()) }
328 }
329
330 fn builtin_functions_ptr(&self) -> *mut VMBuiltinFunctionsArray {
332 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_builtin_functions_begin()) }
333 }
334
335 fn vmctx(&self) -> &VMContext {
337 &self.vmctx
338 }
339
340 fn vmctx_ptr(&self) -> *mut VMContext {
342 self.vmctx() as *const VMContext as *mut VMContext
343 }
344
345 fn invoke_start_function(
347 &self,
348 config: &VMConfig,
349 trap_handler: Option<*const TrapHandlerFn<'static>>,
350 ) -> Result<(), Trap> {
351 let start_index = match self.module.start_function {
352 Some(idx) => idx,
353 None => return Ok(()),
354 };
355
356 let (callee_address, callee_vmctx) = match self.module.local_func_index(start_index) {
357 Some(local_index) => {
358 let body = self
359 .functions
360 .get(local_index)
361 .expect("function index is out of bounds")
362 .0;
363 (
364 body as *const _,
365 VMFunctionContext {
366 vmctx: self.vmctx_ptr(),
367 },
368 )
369 }
370 None => {
371 assert_lt!(start_index.index(), self.module.num_imported_functions);
372 let import = self.imported_function(start_index);
373 (import.body, import.environment)
374 }
375 };
376
377 let sig = self.module.functions[start_index];
378 let trampoline = self.function_call_trampolines[sig];
379 let mut values_vec = vec![];
380
381 unsafe {
382 wasmer_call_trampoline(
386 trap_handler,
387 config,
388 callee_vmctx,
389 trampoline,
390 callee_address,
391 values_vec.as_mut_ptr(),
392 )
393 }
394 }
395
396 #[inline]
398 pub(crate) fn vmctx_offset() -> isize {
399 offset_of!(Self, vmctx) as isize
400 }
401
402 pub(crate) fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
404 let begin: *const VMTableDefinition = self.tables_ptr() as *const _;
405 let end: *const VMTableDefinition = table;
406 let index = LocalTableIndex::new(
408 (end as usize - begin as usize) / mem::size_of::<VMTableDefinition>(),
409 );
410 assert_lt!(index.index(), self.tables.len());
411 index
412 }
413
414 pub(crate) fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
416 let begin: *const VMMemoryDefinition = self.memories_ptr() as *const _;
417 let end: *const VMMemoryDefinition = memory;
418 let index = LocalMemoryIndex::new(
420 (end as usize - begin as usize) / mem::size_of::<VMMemoryDefinition>(),
421 );
422 assert_lt!(index.index(), self.memories.len());
423 index
424 }
425
426 pub(crate) fn memory_grow<IntoPages>(
431 &mut self,
432 memory_index: LocalMemoryIndex,
433 delta: IntoPages,
434 ) -> Result<Pages, MemoryError>
435 where
436 IntoPages: Into<Pages>,
437 {
438 let mem = *self
439 .memories
440 .get(memory_index)
441 .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
442 mem.get_mut(self.context_mut()).grow(delta.into())
443 }
444
445 pub(crate) unsafe fn imported_memory_grow<IntoPages>(
454 &mut self,
455 memory_index: MemoryIndex,
456 delta: IntoPages,
457 ) -> Result<Pages, MemoryError>
458 where
459 IntoPages: Into<Pages>,
460 {
461 let import = self.imported_memory(memory_index);
462 let mem = import.handle;
463 mem.get_mut(self.context_mut()).grow(delta.into())
464 }
465
466 pub(crate) fn memory_size(&self, memory_index: LocalMemoryIndex) -> Pages {
468 let mem = *self
469 .memories
470 .get(memory_index)
471 .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
472 mem.get(self.context()).size()
473 }
474
475 pub(crate) unsafe fn imported_memory_size(&self, memory_index: MemoryIndex) -> Pages {
481 let import = self.imported_memory(memory_index);
482 let mem = import.handle;
483 mem.get(self.context()).size()
484 }
485
486 pub(crate) fn table_size(&self, table_index: LocalTableIndex) -> u32 {
488 let table = self
489 .tables
490 .get(table_index)
491 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
492 table.get(self.context()).size()
493 }
494
495 pub(crate) unsafe fn imported_table_size(&self, table_index: TableIndex) -> u32 {
500 let import = self.imported_table(table_index);
501 let table = import.handle;
502 table.get(self.context()).size()
503 }
504
505 pub(crate) fn table_grow(
510 &mut self,
511 table_index: LocalTableIndex,
512 delta: u32,
513 init_value: TableElement,
514 ) -> Option<u32> {
515 let table = *self
516 .tables
517 .get(table_index)
518 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
519 table.get_mut(self.context_mut()).grow(delta, init_value)
520 }
521
522 pub(crate) unsafe fn imported_table_grow(
527 &mut self,
528 table_index: TableIndex,
529 delta: u32,
530 init_value: TableElement,
531 ) -> Option<u32> {
532 let import = self.imported_table(table_index);
533 let table = import.handle;
534 table.get_mut(self.context_mut()).grow(delta, init_value)
535 }
536
537 pub(crate) fn table_get(
539 &self,
540 table_index: LocalTableIndex,
541 index: u32,
542 ) -> Option<TableElement> {
543 let table = self
544 .tables
545 .get(table_index)
546 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
547 table.get(self.context()).get(index)
548 }
549
550 pub(crate) unsafe fn imported_table_get(
555 &self,
556 table_index: TableIndex,
557 index: u32,
558 ) -> Option<TableElement> {
559 let import = self.imported_table(table_index);
560 let table = import.handle;
561 table.get(self.context()).get(index)
562 }
563
564 pub(crate) fn table_set(
566 &mut self,
567 table_index: LocalTableIndex,
568 index: u32,
569 val: TableElement,
570 ) -> Result<(), Trap> {
571 let table = *self
572 .tables
573 .get(table_index)
574 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
575 table.get_mut(self.context_mut()).set(index, val)
576 }
577
578 pub(crate) unsafe fn imported_table_set(
583 &mut self,
584 table_index: TableIndex,
585 index: u32,
586 val: TableElement,
587 ) -> Result<(), Trap> {
588 let import = self.imported_table(table_index);
589 let table = import.handle;
590 table.get_mut(self.context_mut()).set(index, val)
591 }
592
593 pub(crate) fn func_ref(&self, function_index: FunctionIndex) -> Option<VMFuncRef> {
595 if function_index == FunctionIndex::reserved_value() {
596 None
597 } else if let Some(local_function_index) = self.module.local_func_index(function_index) {
598 Some(VMFuncRef(NonNull::from(
599 &self.funcrefs[local_function_index],
600 )))
601 } else {
602 Some(VMFuncRef(self.imported_funcrefs[function_index]))
603 }
604 }
605
606 pub(crate) fn table_init(
614 &mut self,
615 table_index: TableIndex,
616 elem_index: ElemIndex,
617 dst: u32,
618 src: u32,
619 len: u32,
620 ) -> Result<(), Trap> {
621 let table = self.get_table_handle(table_index);
624 let table = unsafe { table.get_mut(&mut *self.context) };
625 let passive_elements = self.passive_elements.borrow();
626 let elem = passive_elements
627 .get(&elem_index)
628 .map_or::<&[Option<VMFuncRef>], _>(&[], |e| &**e);
629
630 if src.checked_add(len).is_none_or(|n| n as usize > elem.len())
631 || dst.checked_add(len).is_none_or(|m| m > table.size())
632 {
633 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
634 }
635
636 for (dst, src) in (dst..dst + len).zip(src..src + len) {
637 table
638 .set(dst, TableElement::FuncRef(elem[src as usize]))
639 .expect("should never panic because we already did the bounds check above");
640 }
641
642 Ok(())
643 }
644
645 pub(crate) fn table_fill(
651 &mut self,
652 table_index: TableIndex,
653 start_index: u32,
654 item: TableElement,
655 len: u32,
656 ) -> Result<(), Trap> {
657 let table = self.get_table(table_index);
660 let table_size = table.size() as usize;
661
662 if start_index
663 .checked_add(len)
664 .is_none_or(|n| n as usize > table_size)
665 {
666 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
667 }
668
669 for i in start_index..(start_index + len) {
670 table
671 .set(i, item.clone())
672 .expect("should never panic because we already did the bounds check above");
673 }
674
675 Ok(())
676 }
677
678 pub(crate) fn elem_drop(&self, elem_index: ElemIndex) {
680 let mut passive_elements = self.passive_elements.borrow_mut();
683 passive_elements.remove(&elem_index);
684 }
687
688 pub(crate) fn local_memory_copy(
695 &self,
696 memory_index: LocalMemoryIndex,
697 dst: u32,
698 src: u32,
699 len: u32,
700 ) -> Result<(), Trap> {
701 let memory = self.memory(memory_index);
704 unsafe { memory_copy(&memory, dst, src, len) }
706 }
707
708 pub(crate) fn imported_memory_copy(
710 &self,
711 memory_index: MemoryIndex,
712 dst: u32,
713 src: u32,
714 len: u32,
715 ) -> Result<(), Trap> {
716 let import = self.imported_memory(memory_index);
717 let memory = unsafe { import.definition.as_ref() };
718 unsafe { memory_copy(memory, dst, src, len) }
720 }
721
722 pub(crate) fn local_memory_fill(
728 &self,
729 memory_index: LocalMemoryIndex,
730 dst: u32,
731 val: u32,
732 len: u32,
733 ) -> Result<(), Trap> {
734 let memory = self.memory(memory_index);
735 unsafe { memory_fill(&memory, dst, val, len) }
737 }
738
739 pub(crate) fn imported_memory_fill(
745 &self,
746 memory_index: MemoryIndex,
747 dst: u32,
748 val: u32,
749 len: u32,
750 ) -> Result<(), Trap> {
751 let import = self.imported_memory(memory_index);
752 let memory = unsafe { import.definition.as_ref() };
753 unsafe { memory_fill(memory, dst, val, len) }
755 }
756
757 pub(crate) fn memory_init(
765 &self,
766 memory_index: MemoryIndex,
767 data_index: DataIndex,
768 dst: u32,
769 src: u32,
770 len: u32,
771 ) -> Result<(), Trap> {
772 let memory = self.get_vmmemory(memory_index);
775 let passive_data = self.passive_data.borrow();
776 let data = passive_data.get(&data_index).map_or(&[][..], |d| &**d);
777
778 let current_length = unsafe { memory.vmmemory().as_ref().current_length };
779 if src.checked_add(len).is_none_or(|n| n as usize > data.len())
780 || dst
781 .checked_add(len)
782 .is_none_or(|m| usize::try_from(m).unwrap() > current_length)
783 {
784 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
785 }
786 let src_slice = &data[src as usize..(src + len) as usize];
787 unsafe { memory.initialize_with_data(dst as usize, src_slice) }
788 }
789
790 pub(crate) fn data_drop(&self, data_index: DataIndex) {
792 let mut passive_data = self.passive_data.borrow_mut();
793 passive_data.remove(&data_index);
794 }
795
796 pub(crate) fn get_table(&mut self, table_index: TableIndex) -> &mut VMTable {
799 if let Some(local_table_index) = self.module.local_table_index(table_index) {
800 self.get_local_table(local_table_index)
801 } else {
802 self.get_foreign_table(table_index)
803 }
804 }
805
806 pub(crate) fn get_local_table(&mut self, index: LocalTableIndex) -> &mut VMTable {
808 let table = self.tables[index];
809 table.get_mut(self.context_mut())
810 }
811
812 pub(crate) fn get_foreign_table(&mut self, index: TableIndex) -> &mut VMTable {
814 let import = self.imported_table(index);
815 let table = import.handle;
816 table.get_mut(self.context_mut())
817 }
818
819 pub(crate) fn get_table_handle(
822 &mut self,
823 table_index: TableIndex,
824 ) -> InternalStoreHandle<VMTable> {
825 if let Some(local_table_index) = self.module.local_table_index(table_index) {
826 self.tables[local_table_index]
827 } else {
828 self.imported_table(table_index).handle
829 }
830 }
831
832 unsafe fn memory_wait(
835 memory: &mut VMMemory,
836 dst: u32,
837 expected: ExpectedValue,
838 timeout: i64,
839 ) -> Result<u32, Trap> {
840 let timeout = if timeout < 0 {
841 None
842 } else {
843 Some(std::time::Duration::from_nanos(timeout as u64))
844 };
845 match unsafe { memory.do_wait(dst, expected, timeout) } {
846 Ok(count) => Ok(count),
847 Err(_err) => {
848 Err(Trap::lib(TrapCode::TableAccessOutOfBounds))
850 }
851 }
852 }
853
854 pub(crate) fn local_memory_wait32(
856 &mut self,
857 memory_index: LocalMemoryIndex,
858 dst: u32,
859 val: u32,
860 timeout: i64,
861 ) -> Result<u32, Trap> {
862 let memory = self.memory(memory_index);
863 let ret = unsafe { memory32_atomic_check32(&memory, dst, val) };
869
870 if let Ok(mut ret) = ret {
871 if ret == 0 {
872 let memory = self.get_local_vmmemory_mut(memory_index);
873 ret = unsafe { Self::memory_wait(memory, dst, ExpectedValue::U32(val), timeout)? };
875 }
876 Ok(ret)
877 } else {
878 ret
879 }
880 }
881
882 pub(crate) fn imported_memory_wait32(
884 &mut self,
885 memory_index: MemoryIndex,
886 dst: u32,
887 val: u32,
888 timeout: i64,
889 ) -> Result<u32, Trap> {
890 let import = self.imported_memory(memory_index);
891 let memory = unsafe { import.definition.as_ref() };
892 let ret = unsafe { memory32_atomic_check32(memory, dst, val) };
898
899 if let Ok(mut ret) = ret {
900 if ret == 0 {
901 let memory = self.get_vmmemory_mut(memory_index);
902 ret = unsafe { Self::memory_wait(memory, dst, ExpectedValue::U32(val), timeout)? };
904 }
905 Ok(ret)
906 } else {
907 ret
908 }
909 }
910
911 pub(crate) fn local_memory_wait64(
913 &mut self,
914 memory_index: LocalMemoryIndex,
915 dst: u32,
916 val: u64,
917 timeout: i64,
918 ) -> Result<u32, Trap> {
919 let memory = self.memory(memory_index);
920 let ret = unsafe { memory32_atomic_check64(&memory, dst, val) };
926
927 if let Ok(mut ret) = ret {
928 if ret == 0 {
929 let memory = self.get_local_vmmemory_mut(memory_index);
930 ret = unsafe { Self::memory_wait(memory, dst, ExpectedValue::U64(val), timeout)? };
932 }
933 Ok(ret)
934 } else {
935 ret
936 }
937 }
938
939 pub(crate) fn imported_memory_wait64(
941 &mut self,
942 memory_index: MemoryIndex,
943 dst: u32,
944 val: u64,
945 timeout: i64,
946 ) -> Result<u32, Trap> {
947 let import = self.imported_memory(memory_index);
948 let memory = unsafe { import.definition.as_ref() };
949 let ret = unsafe { memory32_atomic_check64(memory, dst, val) };
955
956 if let Ok(mut ret) = ret {
957 if ret == 0 {
958 let memory = self.get_vmmemory_mut(memory_index);
959 ret = unsafe { Self::memory_wait(memory, dst, ExpectedValue::U64(val), timeout)? };
961 }
962 Ok(ret)
963 } else {
964 ret
965 }
966 }
967
968 pub(crate) fn local_memory_notify(
970 &mut self,
971 memory_index: LocalMemoryIndex,
972 dst: u32,
973 count: u32,
974 ) -> Result<u32, Trap> {
975 let memory = self.get_local_vmmemory_mut(memory_index);
976 Ok(memory.do_notify(dst, count))
977 }
978
979 pub(crate) fn imported_memory_notify(
981 &mut self,
982 memory_index: MemoryIndex,
983 dst: u32,
984 count: u32,
985 ) -> Result<u32, Trap> {
986 let memory = self.get_vmmemory_mut(memory_index);
987 Ok(memory.do_notify(dst, count))
988 }
989}
990
991#[derive(Debug, Eq, PartialEq)]
996pub struct VMInstance {
997 instance_layout: Layout,
999
1000 instance: NonNull<Instance>,
1010}
1011
1012impl Drop for VMInstance {
1016 fn drop(&mut self) {
1017 let instance_ptr = self.instance.as_ptr();
1018
1019 unsafe {
1020 instance_ptr.drop_in_place();
1022 std::alloc::dealloc(instance_ptr as *mut u8, self.instance_layout);
1024 }
1025 }
1026}
1027
1028impl VMInstance {
1029 #[allow(clippy::too_many_arguments)]
1051 pub unsafe fn new(
1052 allocator: InstanceAllocator,
1053 module: Arc<ModuleInfo>,
1054 context: &mut StoreObjects,
1055 finished_functions: BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
1056 finished_function_call_trampolines: BoxedSlice<SignatureIndex, VMTrampoline>,
1057 finished_memories: BoxedSlice<LocalMemoryIndex, InternalStoreHandle<VMMemory>>,
1058 finished_tables: BoxedSlice<LocalTableIndex, InternalStoreHandle<VMTable>>,
1059 finished_globals: BoxedSlice<LocalGlobalIndex, InternalStoreHandle<VMGlobal>>,
1060 tags: BoxedSlice<TagIndex, InternalStoreHandle<VMTag>>,
1061 imports: Imports,
1062 vmshared_signatures: BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
1063 ) -> Result<Self, Trap> {
1064 unsafe {
1065 let vmctx_tags = tags
1066 .values()
1067 .map(|m: &InternalStoreHandle<VMTag>| VMSharedTagIndex::new(m.index() as u32))
1068 .collect::<PrimaryMap<TagIndex, VMSharedTagIndex>>()
1069 .into_boxed_slice();
1070 let vmctx_globals = finished_globals
1071 .values()
1072 .map(|m: &InternalStoreHandle<VMGlobal>| m.get(context).vmglobal())
1073 .collect::<PrimaryMap<LocalGlobalIndex, NonNull<VMGlobalDefinition>>>()
1074 .into_boxed_slice();
1075 let passive_data = RefCell::new(
1076 module
1077 .passive_data
1078 .clone()
1079 .into_iter()
1080 .map(|(idx, bytes)| (idx, Arc::from(bytes)))
1081 .collect::<HashMap<_, _>>(),
1082 );
1083
1084 let handle = {
1085 let offsets = allocator.offsets().clone();
1086 let funcrefs = PrimaryMap::new().into_boxed_slice();
1088 let imported_funcrefs = PrimaryMap::new().into_boxed_slice();
1089 let instance = Instance {
1091 module,
1092 context,
1093 offsets,
1094 memories: finished_memories,
1095 tables: finished_tables,
1096 tags,
1097 globals: finished_globals,
1098 functions: finished_functions,
1099 function_call_trampolines: finished_function_call_trampolines,
1100 passive_elements: Default::default(),
1101 passive_data,
1102 funcrefs,
1103 imported_funcrefs,
1104 vmctx: VMContext {},
1105 };
1106
1107 let mut instance_handle = allocator.into_vminstance(instance);
1108
1109 {
1111 let instance = instance_handle.instance_mut();
1112 let vmctx_ptr = instance.vmctx_ptr();
1113 (instance.funcrefs, instance.imported_funcrefs) = build_funcrefs(
1114 &instance.module,
1115 context,
1116 &imports,
1117 &instance.functions,
1118 &vmshared_signatures,
1119 &instance.function_call_trampolines,
1120 vmctx_ptr,
1121 );
1122 }
1123
1124 instance_handle
1125 };
1126 let instance = handle.instance();
1127
1128 ptr::copy(
1129 vmctx_tags.values().as_slice().as_ptr(),
1130 instance.shared_tags_ptr(),
1131 vmctx_tags.len(),
1132 );
1133 ptr::copy(
1134 vmshared_signatures.values().as_slice().as_ptr(),
1135 instance.signature_ids_ptr(),
1136 vmshared_signatures.len(),
1137 );
1138 ptr::copy(
1139 imports.functions.values().as_slice().as_ptr(),
1140 instance.imported_functions_ptr(),
1141 imports.functions.len(),
1142 );
1143 ptr::copy(
1144 imports.tables.values().as_slice().as_ptr(),
1145 instance.imported_tables_ptr(),
1146 imports.tables.len(),
1147 );
1148 ptr::copy(
1149 imports.memories.values().as_slice().as_ptr(),
1150 instance.imported_memories_ptr(),
1151 imports.memories.len(),
1152 );
1153 ptr::copy(
1154 imports.globals.values().as_slice().as_ptr(),
1155 instance.imported_globals_ptr(),
1156 imports.globals.len(),
1157 );
1158 ptr::copy(
1162 vmctx_globals.values().as_slice().as_ptr(),
1163 instance.globals_ptr() as *mut NonNull<VMGlobalDefinition>,
1164 vmctx_globals.len(),
1165 );
1166 ptr::write(
1167 instance.builtin_functions_ptr(),
1168 VMBuiltinFunctionsArray::initialized(),
1169 );
1170
1171 initialize_passive_elements(instance);
1174 initialize_globals(instance);
1175
1176 Ok(handle)
1177 }
1178 }
1179
1180 pub(crate) fn instance(&self) -> &Instance {
1182 unsafe { self.instance.as_ref() }
1183 }
1184
1185 pub(crate) fn instance_mut(&mut self) -> &mut Instance {
1187 unsafe { self.instance.as_mut() }
1188 }
1189
1190 pub unsafe fn finish_instantiation(
1196 &mut self,
1197 config: &VMConfig,
1198 trap_handler: Option<*const TrapHandlerFn<'static>>,
1199 data_initializers: &[DataInitializer<'_>],
1200 ) -> Result<(), Trap> {
1201 let instance = self.instance_mut();
1202
1203 initialize_tables(instance)?;
1205 initialize_memories(instance, data_initializers)?;
1206
1207 instance.invoke_start_function(config, trap_handler)?;
1210 Ok(())
1211 }
1212
1213 pub fn vmctx(&self) -> &VMContext {
1215 self.instance().vmctx()
1216 }
1217
1218 pub fn vmctx_ptr(&self) -> *mut VMContext {
1220 self.instance().vmctx_ptr()
1221 }
1222
1223 pub fn vmoffsets(&self) -> &VMOffsets {
1227 self.instance().offsets()
1228 }
1229
1230 pub fn module(&self) -> &Arc<ModuleInfo> {
1232 self.instance().module()
1233 }
1234
1235 pub fn module_ref(&self) -> &ModuleInfo {
1237 self.instance().module_ref()
1238 }
1239
1240 pub fn lookup(&mut self, field: &str) -> Option<VMExtern> {
1242 let export = *self.module_ref().exports.get(field)?;
1243
1244 Some(self.lookup_by_declaration(export))
1245 }
1246
1247 pub fn lookup_by_declaration(&mut self, export: ExportIndex) -> VMExtern {
1249 let instance = self.instance();
1250
1251 match export {
1252 ExportIndex::Function(index) => {
1253 let sig_index = &instance.module.functions[index];
1254 let handle = if let Some(def_index) = instance.module.local_func_index(index) {
1255 let signature = instance.module.signatures[*sig_index].clone();
1258 let vm_function = VMFunction {
1259 anyfunc: MaybeInstanceOwned::Instance(NonNull::from(
1260 &instance.funcrefs[def_index],
1261 )),
1262 signature,
1263 kind: VMFunctionKind::Static,
1268 host_data: Box::new(()),
1269 };
1270 InternalStoreHandle::new(self.instance_mut().context_mut(), vm_function)
1271 } else {
1272 let import = instance.imported_function(index);
1273 import.handle
1274 };
1275
1276 VMExtern::Function(handle)
1277 }
1278 ExportIndex::Table(index) => {
1279 let handle = if let Some(def_index) = instance.module.local_table_index(index) {
1280 instance.tables[def_index]
1281 } else {
1282 let import = instance.imported_table(index);
1283 import.handle
1284 };
1285 VMExtern::Table(handle)
1286 }
1287 ExportIndex::Memory(index) => {
1288 let handle = if let Some(def_index) = instance.module.local_memory_index(index) {
1289 instance.memories[def_index]
1290 } else {
1291 let import = instance.imported_memory(index);
1292 import.handle
1293 };
1294 VMExtern::Memory(handle)
1295 }
1296 ExportIndex::Global(index) => {
1297 let handle = if let Some(def_index) = instance.module.local_global_index(index) {
1298 instance.globals[def_index]
1299 } else {
1300 let import = instance.imported_global(index);
1301 import.handle
1302 };
1303 VMExtern::Global(handle)
1304 }
1305
1306 ExportIndex::Tag(index) => {
1307 let handle = instance.tags[index];
1308 VMExtern::Tag(handle)
1309 }
1310 }
1311 }
1312
1313 pub fn exports(&self) -> indexmap::map::Iter<'_, String, ExportIndex> {
1319 self.module().exports.iter()
1320 }
1321
1322 pub fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
1324 self.instance().memory_index(memory)
1325 }
1326
1327 pub fn memory_grow<IntoPages>(
1332 &mut self,
1333 memory_index: LocalMemoryIndex,
1334 delta: IntoPages,
1335 ) -> Result<Pages, MemoryError>
1336 where
1337 IntoPages: Into<Pages>,
1338 {
1339 self.instance_mut().memory_grow(memory_index, delta)
1340 }
1341
1342 pub fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
1344 self.instance().table_index(table)
1345 }
1346
1347 pub fn table_grow(
1352 &mut self,
1353 table_index: LocalTableIndex,
1354 delta: u32,
1355 init_value: TableElement,
1356 ) -> Option<u32> {
1357 self.instance_mut()
1358 .table_grow(table_index, delta, init_value)
1359 }
1360
1361 pub fn table_get(&self, table_index: LocalTableIndex, index: u32) -> Option<TableElement> {
1365 self.instance().table_get(table_index, index)
1366 }
1367
1368 pub fn table_set(
1372 &mut self,
1373 table_index: LocalTableIndex,
1374 index: u32,
1375 val: TableElement,
1376 ) -> Result<(), Trap> {
1377 self.instance_mut().table_set(table_index, index, val)
1378 }
1379
1380 pub fn get_local_table(&mut self, index: LocalTableIndex) -> &mut VMTable {
1382 self.instance_mut().get_local_table(index)
1383 }
1384}
1385
1386#[allow(clippy::mut_from_ref)]
1387#[allow(dead_code)]
1388unsafe fn get_memory_slice<'instance>(
1390 init: &DataInitializer<'_>,
1391 instance: &'instance Instance,
1392) -> &'instance mut [u8] {
1393 unsafe {
1394 let memory = if let Some(local_memory_index) = instance
1395 .module
1396 .local_memory_index(init.location.memory_index)
1397 {
1398 instance.memory(local_memory_index)
1399 } else {
1400 let import = instance.imported_memory(init.location.memory_index);
1401 *import.definition.as_ref()
1402 };
1403 slice::from_raw_parts_mut(memory.base, memory.current_length)
1404 }
1405}
1406
1407fn get_global(index: GlobalIndex, instance: &Instance) -> RawValue {
1408 unsafe {
1409 if let Some(local_global_index) = instance.module.local_global_index(index) {
1410 instance.global(local_global_index).val
1411 } else {
1412 instance.imported_global(index).definition.as_ref().val
1413 }
1414 }
1415}
1416
1417enum EvaluatedInitExpr {
1418 I32(i32),
1419 I64(i64),
1420}
1421
1422fn eval_init_expr(expr: &InitExpr, instance: &Instance) -> EvaluatedInitExpr {
1423 if expr
1424 .ops()
1425 .first()
1426 .expect("missing expression")
1427 .is_32bit_expression()
1428 {
1429 let mut stack = Vec::with_capacity(expr.ops().len());
1430 for op in expr.ops() {
1431 match *op {
1432 InitExprOp::I32Const(value) => stack.push(value),
1433 InitExprOp::GlobalGetI32(global) => {
1434 stack.push(unsafe { get_global(global, instance).i32 })
1435 }
1436 InitExprOp::I32Add => {
1437 let rhs = stack.pop().expect("invalid init expr stack for i32.add");
1438 let lhs = stack.pop().expect("invalid init expr stack for i32.add");
1439 stack.push(lhs.wrapping_add(rhs));
1440 }
1441 InitExprOp::I32Sub => {
1442 let rhs = stack.pop().expect("invalid init expr stack for i32.sub");
1443 let lhs = stack.pop().expect("invalid init expr stack for i32.sub");
1444 stack.push(lhs.wrapping_sub(rhs));
1445 }
1446 InitExprOp::I32Mul => {
1447 let rhs = stack.pop().expect("invalid init expr stack for i32.mul");
1448 let lhs = stack.pop().expect("invalid init expr stack for i32.mul");
1449 stack.push(lhs.wrapping_mul(rhs));
1450 }
1451 _ => {
1452 panic!("unexpected init expr statement: {op:?}");
1453 }
1454 }
1455 }
1456 EvaluatedInitExpr::I32(
1457 stack
1458 .into_iter()
1459 .exactly_one()
1460 .expect("invalid init expr stack shape"),
1461 )
1462 } else {
1463 let mut stack = Vec::with_capacity(expr.ops().len());
1464 for op in expr.ops() {
1465 match *op {
1466 InitExprOp::I64Const(value) => stack.push(value),
1467 InitExprOp::GlobalGetI64(global) => {
1468 stack.push(unsafe { get_global(global, instance).i64 })
1469 }
1470 InitExprOp::I64Add => {
1471 let rhs = stack.pop().expect("invalid init expr stack for i64.add");
1472 let lhs = stack.pop().expect("invalid init expr stack for i64.add");
1473 stack.push(lhs.wrapping_add(rhs));
1474 }
1475 InitExprOp::I64Sub => {
1476 let rhs = stack.pop().expect("invalid init expr stack for i64.sub");
1477 let lhs = stack.pop().expect("invalid init expr stack for i64.sub");
1478 stack.push(lhs.wrapping_sub(rhs));
1479 }
1480 InitExprOp::I64Mul => {
1481 let rhs = stack.pop().expect("invalid init expr stack for i64.mul");
1482 let lhs = stack.pop().expect("invalid init expr stack for i64.mul");
1483 stack.push(lhs.wrapping_mul(rhs));
1484 }
1485 _ => {
1486 panic!("unexpected init expr statement: {op:?}");
1487 }
1488 }
1489 }
1490 EvaluatedInitExpr::I64(
1491 stack
1492 .into_iter()
1493 .exactly_one()
1494 .expect("invalid init expr stack shape"),
1495 )
1496 }
1497}
1498
1499fn initialize_tables(instance: &mut Instance) -> Result<(), Trap> {
1501 let module = Arc::clone(&instance.module);
1502 for init in &module.table_initializers {
1503 let EvaluatedInitExpr::I32(start) = eval_init_expr(&init.offset_expr, instance) else {
1504 panic!("unexpected expression type, expected i32");
1505 };
1506 if start < 0 {
1507 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
1508 }
1509 let start = start as usize;
1510 let table = instance.get_table_handle(init.table_index);
1511 let table = unsafe { table.get_mut(&mut *instance.context) };
1512
1513 if start
1514 .checked_add(init.elements.len())
1515 .is_none_or(|end| end > table.size() as usize)
1516 {
1517 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
1518 }
1519
1520 if let wasmer_types::Type::FuncRef = table.ty().ty {
1521 for (i, func_idx) in init.elements.iter().enumerate() {
1522 let anyfunc = instance.func_ref(*func_idx);
1523 table
1524 .set(
1525 u32::try_from(start + i).unwrap(),
1526 TableElement::FuncRef(anyfunc),
1527 )
1528 .unwrap();
1529 }
1530 } else {
1531 for i in 0..init.elements.len() {
1532 table
1533 .set(
1534 u32::try_from(start + i).unwrap(),
1535 TableElement::ExternRef(None),
1536 )
1537 .unwrap();
1538 }
1539 }
1540 }
1541
1542 Ok(())
1543}
1544
1545fn initialize_passive_elements(instance: &Instance) {
1549 let mut passive_elements = instance.passive_elements.borrow_mut();
1550 debug_assert!(
1551 passive_elements.is_empty(),
1552 "should only be called once, at initialization time"
1553 );
1554
1555 passive_elements.extend(instance.module.passive_elements.iter().filter_map(
1556 |(&idx, segments)| -> Option<(ElemIndex, Box<[Option<VMFuncRef>]>)> {
1557 if segments.is_empty() {
1558 None
1559 } else {
1560 Some((
1561 idx,
1562 segments
1563 .iter()
1564 .map(|s| instance.func_ref(*s))
1565 .collect::<Box<[Option<VMFuncRef>]>>(),
1566 ))
1567 }
1568 },
1569 ));
1570}
1571
1572fn initialize_memories(
1574 instance: &mut Instance,
1575 data_initializers: &[DataInitializer<'_>],
1576) -> Result<(), Trap> {
1577 for init in data_initializers {
1578 let memory = instance.get_vmmemory(init.location.memory_index);
1579
1580 let EvaluatedInitExpr::I32(start) = eval_init_expr(&init.location.offset_expr, instance)
1581 else {
1582 panic!("unexpected expression type, expected i32");
1583 };
1584 if start < 0 {
1585 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
1586 }
1587 let start = start as usize;
1588 unsafe {
1589 let current_length = memory.vmmemory().as_ref().current_length;
1590 if start
1591 .checked_add(init.data.len())
1592 .is_none_or(|end| end > current_length)
1593 {
1594 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
1595 }
1596 memory.initialize_with_data(start, init.data)?;
1597 }
1598 }
1599
1600 Ok(())
1601}
1602
1603fn initialize_globals(instance: &Instance) {
1604 let module = Arc::clone(&instance.module);
1605 for (index, initializer) in module.global_initializers.iter() {
1606 unsafe {
1607 let to = instance.global_ptr(index).as_ptr();
1608 match initializer {
1609 GlobalInit::I32Const(x) => (*to).val.i32 = *x,
1610 GlobalInit::I64Const(x) => (*to).val.i64 = *x,
1611 GlobalInit::F32Const(x) => (*to).val.f32 = *x,
1612 GlobalInit::F64Const(x) => (*to).val.f64 = *x,
1613 GlobalInit::V128Const(x) => (*to).val.bytes = *x.bytes(),
1614 GlobalInit::GetGlobal(x) => {
1615 let from: VMGlobalDefinition =
1616 if let Some(def_x) = module.local_global_index(*x) {
1617 instance.global(def_x)
1618 } else {
1619 instance.imported_global(*x).definition.as_ref().clone()
1620 };
1621 *to = from;
1622 }
1623 GlobalInit::RefNullConst => (*to).val.funcref = 0,
1624 GlobalInit::RefFunc(func_idx) => {
1625 let funcref = instance.func_ref(*func_idx).unwrap();
1626 (*to).val = funcref.into_raw();
1627 }
1628 GlobalInit::Expr(expr) => match eval_init_expr(expr, instance) {
1629 EvaluatedInitExpr::I32(value) => (*to).val.i32 = value,
1630 EvaluatedInitExpr::I64(value) => (*to).val.i64 = value,
1631 },
1632 }
1633 }
1634 }
1635}
1636
1637fn build_funcrefs(
1640 module_info: &ModuleInfo,
1641 ctx: &StoreObjects,
1642 imports: &Imports,
1643 finished_functions: &BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
1644 vmshared_signatures: &BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
1645 function_call_trampolines: &BoxedSlice<SignatureIndex, VMTrampoline>,
1646 vmctx_ptr: *mut VMContext,
1647) -> (
1648 BoxedSlice<LocalFunctionIndex, VMCallerCheckedAnyfunc>,
1649 BoxedSlice<FunctionIndex, NonNull<VMCallerCheckedAnyfunc>>,
1650) {
1651 let mut func_refs =
1652 PrimaryMap::with_capacity(module_info.functions.len() - module_info.num_imported_functions);
1653 let mut imported_func_refs = PrimaryMap::with_capacity(module_info.num_imported_functions);
1654
1655 for import in imports.functions.values() {
1657 imported_func_refs.push(import.handle.get(ctx).anyfunc.as_ptr());
1658 }
1659
1660 for (local_index, func_ptr) in finished_functions.iter() {
1662 let index = module_info.func_index(local_index);
1663 let sig_index = module_info.functions[index];
1664 let type_index = vmshared_signatures[sig_index];
1665 let call_trampoline = function_call_trampolines[sig_index];
1666 let anyfunc = VMCallerCheckedAnyfunc {
1667 func_ptr: func_ptr.0,
1668 type_index,
1669 vmctx: VMFunctionContext { vmctx: vmctx_ptr },
1670 call_trampoline,
1671 };
1672 func_refs.push(anyfunc);
1673 }
1674 (
1675 func_refs.into_boxed_slice(),
1676 imported_func_refs.into_boxed_slice(),
1677 )
1678}