1mod allocator;
10
11use crate::export::VMExtern;
12use crate::imports::Imports;
13use crate::store::{InternalStoreHandle, StoreObjects};
14use crate::table::TableElement;
15use crate::trap::{Trap, TrapCode};
16use crate::vmcontext::{
17 VMBuiltinFunctionsArray, VMCallerCheckedAnyfunc, VMContext, VMFunctionContext,
18 VMFunctionImport, VMFunctionKind, VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition,
19 VMMemoryImport, VMSharedSignatureIndex, VMSharedTagIndex, VMTableDefinition, VMTableImport,
20 VMTrampoline, memory_copy, memory_fill, memory32_atomic_check32, memory32_atomic_check64,
21};
22use crate::{FunctionBodyPtr, MaybeInstanceOwned, TrapHandlerFn, VMTag, wasmer_call_trampoline};
23use crate::{LinearMemory, NotifyLocation};
24use crate::{VMConfig, VMFuncRef, VMFunction, VMGlobal, VMMemory, VMTable};
25pub use allocator::InstanceAllocator;
26use memoffset::offset_of;
27use more_asserts::assert_lt;
28use std::alloc::Layout;
29use std::cell::RefCell;
30use std::collections::HashMap;
31use std::convert::TryFrom;
32use std::fmt;
33use std::mem;
34use std::ptr::{self, NonNull};
35use std::slice;
36use std::sync::Arc;
37use wasmer_types::entity::{BoxedSlice, EntityRef, PrimaryMap, packed_option::ReservedValue};
38use wasmer_types::{
39 DataIndex, DataInitializer, ElemIndex, ExportIndex, FunctionIndex, GlobalIndex, GlobalInit,
40 LocalFunctionIndex, LocalGlobalIndex, LocalMemoryIndex, LocalTableIndex, MemoryError,
41 MemoryIndex, ModuleInfo, Pages, SignatureIndex, TableIndex, TableInitializer, TagIndex,
42 VMOffsets,
43};
44
45#[repr(C)]
52#[allow(clippy::type_complexity)]
53pub(crate) struct Instance {
54 module: Arc<ModuleInfo>,
56
57 context: *mut StoreObjects,
59
60 offsets: VMOffsets,
62
63 memories: BoxedSlice<LocalMemoryIndex, InternalStoreHandle<VMMemory>>,
65
66 tables: BoxedSlice<LocalTableIndex, InternalStoreHandle<VMTable>>,
68
69 globals: BoxedSlice<LocalGlobalIndex, InternalStoreHandle<VMGlobal>>,
71
72 tags: BoxedSlice<TagIndex, InternalStoreHandle<VMTag>>,
74
75 functions: BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
77
78 function_call_trampolines: BoxedSlice<SignatureIndex, VMTrampoline>,
80
81 passive_elements: RefCell<HashMap<ElemIndex, Box<[Option<VMFuncRef>]>>>,
84
85 passive_data: RefCell<HashMap<DataIndex, Arc<[u8]>>>,
88
89 funcrefs: BoxedSlice<LocalFunctionIndex, VMCallerCheckedAnyfunc>,
92
93 imported_funcrefs: BoxedSlice<FunctionIndex, NonNull<VMCallerCheckedAnyfunc>>,
96
97 vmctx: VMContext,
102}
103
104impl fmt::Debug for Instance {
105 fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
106 formatter.debug_struct("Instance").finish()
107 }
108}
109
110#[allow(clippy::cast_ptr_alignment)]
111impl Instance {
112 unsafe fn vmctx_plus_offset<T>(&self, offset: u32) -> *mut T {
115 unsafe {
116 (self.vmctx_ptr() as *mut u8)
117 .add(usize::try_from(offset).unwrap())
118 .cast()
119 }
120 }
121
122 fn module(&self) -> &Arc<ModuleInfo> {
123 &self.module
124 }
125
126 pub(crate) fn module_ref(&self) -> &ModuleInfo {
127 &self.module
128 }
129
130 fn context(&self) -> &StoreObjects {
131 unsafe { &*self.context }
132 }
133
134 fn context_mut(&mut self) -> &mut StoreObjects {
135 unsafe { &mut *self.context }
136 }
137
138 fn offsets(&self) -> &VMOffsets {
140 &self.offsets
141 }
142
143 fn signature_ids_ptr(&self) -> *mut VMSharedSignatureIndex {
145 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_signature_ids_begin()) }
146 }
147
148 fn imported_function(&self, index: FunctionIndex) -> &VMFunctionImport {
150 let index = usize::try_from(index.as_u32()).unwrap();
151 unsafe { &*self.imported_functions_ptr().add(index) }
152 }
153
154 fn imported_functions_ptr(&self) -> *mut VMFunctionImport {
156 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_functions_begin()) }
157 }
158
159 fn imported_table(&self, index: TableIndex) -> &VMTableImport {
161 let index = usize::try_from(index.as_u32()).unwrap();
162 unsafe { &*self.imported_tables_ptr().add(index) }
163 }
164
165 fn imported_tables_ptr(&self) -> *mut VMTableImport {
167 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_tables_begin()) }
168 }
169
170 fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport {
172 let index = usize::try_from(index.as_u32()).unwrap();
173 unsafe { &*self.imported_memories_ptr().add(index) }
174 }
175
176 fn imported_memories_ptr(&self) -> *mut VMMemoryImport {
178 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_memories_begin()) }
179 }
180
181 fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport {
183 let index = usize::try_from(index.as_u32()).unwrap();
184 unsafe { &*self.imported_globals_ptr().add(index) }
185 }
186
187 fn imported_globals_ptr(&self) -> *mut VMGlobalImport {
189 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_globals_begin()) }
190 }
191
192 #[cfg_attr(target_os = "windows", allow(dead_code))]
194 pub(crate) fn shared_tag_ptr(&self, index: TagIndex) -> &VMSharedTagIndex {
195 let index = usize::try_from(index.as_u32()).unwrap();
196 unsafe { &*self.shared_tags_ptr().add(index) }
197 }
198
199 pub(crate) fn shared_tags_ptr(&self) -> *mut VMSharedTagIndex {
201 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_tag_ids_begin()) }
202 }
203
204 #[allow(dead_code)]
206 fn table(&self, index: LocalTableIndex) -> VMTableDefinition {
207 unsafe { *self.table_ptr(index).as_ref() }
208 }
209
210 #[allow(dead_code)]
211 fn set_table(&self, index: LocalTableIndex, table: &VMTableDefinition) {
213 unsafe {
214 *self.table_ptr(index).as_ptr() = *table;
215 }
216 }
217
218 fn table_ptr(&self, index: LocalTableIndex) -> NonNull<VMTableDefinition> {
220 let index = usize::try_from(index.as_u32()).unwrap();
221 NonNull::new(unsafe { self.tables_ptr().add(index) }).unwrap()
222 }
223
224 fn tables_ptr(&self) -> *mut VMTableDefinition {
226 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_tables_begin()) }
227 }
228
229 #[allow(dead_code)]
230 fn get_memory(&self, index: MemoryIndex) -> VMMemoryDefinition {
232 if let Some(local_index) = self.module.local_memory_index(index) {
233 self.memory(local_index)
234 } else {
235 let import = self.imported_memory(index);
236 unsafe { *import.definition.as_ref() }
237 }
238 }
239
240 fn memory(&self, index: LocalMemoryIndex) -> VMMemoryDefinition {
242 unsafe { *self.memory_ptr(index).as_ref() }
243 }
244
245 #[allow(dead_code)]
246 fn set_memory(&self, index: LocalMemoryIndex, mem: &VMMemoryDefinition) {
248 unsafe {
249 *self.memory_ptr(index).as_ptr() = *mem;
250 }
251 }
252
253 fn memory_ptr(&self, index: LocalMemoryIndex) -> NonNull<VMMemoryDefinition> {
255 let index = usize::try_from(index.as_u32()).unwrap();
256 NonNull::new(unsafe { self.memories_ptr().add(index) }).unwrap()
257 }
258
259 fn memories_ptr(&self) -> *mut VMMemoryDefinition {
261 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_memories_begin()) }
262 }
263
264 fn get_vmmemory(&self, index: MemoryIndex) -> &VMMemory {
266 if let Some(local_index) = self.module.local_memory_index(index) {
267 unsafe {
268 self.memories
269 .get(local_index)
270 .unwrap()
271 .get(self.context.as_ref().unwrap())
272 }
273 } else {
274 let import = self.imported_memory(index);
275 unsafe { import.handle.get(self.context.as_ref().unwrap()) }
276 }
277 }
278
279 fn get_vmmemory_mut(&mut self, index: MemoryIndex) -> &mut VMMemory {
281 if let Some(local_index) = self.module.local_memory_index(index) {
282 unsafe {
283 self.memories
284 .get_mut(local_index)
285 .unwrap()
286 .get_mut(self.context.as_mut().unwrap())
287 }
288 } else {
289 let import = self.imported_memory(index);
290 unsafe { import.handle.get_mut(self.context.as_mut().unwrap()) }
291 }
292 }
293
294 fn get_local_vmmemory_mut(&mut self, local_index: LocalMemoryIndex) -> &mut VMMemory {
296 unsafe {
297 self.memories
298 .get_mut(local_index)
299 .unwrap()
300 .get_mut(self.context.as_mut().unwrap())
301 }
302 }
303
304 fn global(&self, index: LocalGlobalIndex) -> VMGlobalDefinition {
306 unsafe { self.global_ptr(index).as_ref().clone() }
307 }
308
309 #[allow(dead_code)]
311 fn set_global(&self, index: LocalGlobalIndex, global: &VMGlobalDefinition) {
312 unsafe {
313 *self.global_ptr(index).as_ptr() = global.clone();
314 }
315 }
316
317 fn global_ptr(&self, index: LocalGlobalIndex) -> NonNull<VMGlobalDefinition> {
319 let index = usize::try_from(index.as_u32()).unwrap();
320 NonNull::new(unsafe { *self.globals_ptr().add(index) }).unwrap()
322 }
323
324 fn globals_ptr(&self) -> *mut *mut VMGlobalDefinition {
326 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_globals_begin()) }
327 }
328
329 fn builtin_functions_ptr(&self) -> *mut VMBuiltinFunctionsArray {
331 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_builtin_functions_begin()) }
332 }
333
334 fn vmctx(&self) -> &VMContext {
336 &self.vmctx
337 }
338
339 fn vmctx_ptr(&self) -> *mut VMContext {
341 self.vmctx() as *const VMContext as *mut VMContext
342 }
343
344 fn invoke_start_function(
346 &self,
347 config: &VMConfig,
348 trap_handler: Option<*const TrapHandlerFn<'static>>,
349 ) -> Result<(), Trap> {
350 let start_index = match self.module.start_function {
351 Some(idx) => idx,
352 None => return Ok(()),
353 };
354
355 let (callee_address, callee_vmctx) = match self.module.local_func_index(start_index) {
356 Some(local_index) => {
357 let body = self
358 .functions
359 .get(local_index)
360 .expect("function index is out of bounds")
361 .0;
362 (
363 body as *const _,
364 VMFunctionContext {
365 vmctx: self.vmctx_ptr(),
366 },
367 )
368 }
369 None => {
370 assert_lt!(start_index.index(), self.module.num_imported_functions);
371 let import = self.imported_function(start_index);
372 (import.body, import.environment)
373 }
374 };
375
376 let sig = self.module.functions[start_index];
377 let trampoline = self.function_call_trampolines[sig];
378 let mut values_vec = vec![];
379
380 unsafe {
381 wasmer_call_trampoline(
385 trap_handler,
386 config,
387 callee_vmctx,
388 trampoline,
389 callee_address,
390 values_vec.as_mut_ptr(),
391 )
392 }
393 }
394
395 #[inline]
397 pub(crate) fn vmctx_offset() -> isize {
398 offset_of!(Self, vmctx) as isize
399 }
400
401 pub(crate) fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
403 let begin: *const VMTableDefinition = self.tables_ptr() as *const _;
404 let end: *const VMTableDefinition = table;
405 let index = LocalTableIndex::new(
407 (end as usize - begin as usize) / mem::size_of::<VMTableDefinition>(),
408 );
409 assert_lt!(index.index(), self.tables.len());
410 index
411 }
412
413 pub(crate) fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
415 let begin: *const VMMemoryDefinition = self.memories_ptr() as *const _;
416 let end: *const VMMemoryDefinition = memory;
417 let index = LocalMemoryIndex::new(
419 (end as usize - begin as usize) / mem::size_of::<VMMemoryDefinition>(),
420 );
421 assert_lt!(index.index(), self.memories.len());
422 index
423 }
424
425 pub(crate) fn memory_grow<IntoPages>(
430 &mut self,
431 memory_index: LocalMemoryIndex,
432 delta: IntoPages,
433 ) -> Result<Pages, MemoryError>
434 where
435 IntoPages: Into<Pages>,
436 {
437 let mem = *self
438 .memories
439 .get(memory_index)
440 .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
441 mem.get_mut(self.context_mut()).grow(delta.into())
442 }
443
444 pub(crate) unsafe fn imported_memory_grow<IntoPages>(
453 &mut self,
454 memory_index: MemoryIndex,
455 delta: IntoPages,
456 ) -> Result<Pages, MemoryError>
457 where
458 IntoPages: Into<Pages>,
459 {
460 let import = self.imported_memory(memory_index);
461 let mem = import.handle;
462 mem.get_mut(self.context_mut()).grow(delta.into())
463 }
464
465 pub(crate) fn memory_size(&self, memory_index: LocalMemoryIndex) -> Pages {
467 let mem = *self
468 .memories
469 .get(memory_index)
470 .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
471 mem.get(self.context()).size()
472 }
473
474 pub(crate) unsafe fn imported_memory_size(&self, memory_index: MemoryIndex) -> Pages {
480 let import = self.imported_memory(memory_index);
481 let mem = import.handle;
482 mem.get(self.context()).size()
483 }
484
485 pub(crate) fn table_size(&self, table_index: LocalTableIndex) -> u32 {
487 let table = self
488 .tables
489 .get(table_index)
490 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
491 table.get(self.context()).size()
492 }
493
494 pub(crate) unsafe fn imported_table_size(&self, table_index: TableIndex) -> u32 {
499 let import = self.imported_table(table_index);
500 let table = import.handle;
501 table.get(self.context()).size()
502 }
503
504 pub(crate) fn table_grow(
509 &mut self,
510 table_index: LocalTableIndex,
511 delta: u32,
512 init_value: TableElement,
513 ) -> Option<u32> {
514 let table = *self
515 .tables
516 .get(table_index)
517 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
518 table.get_mut(self.context_mut()).grow(delta, init_value)
519 }
520
521 pub(crate) unsafe fn imported_table_grow(
526 &mut self,
527 table_index: TableIndex,
528 delta: u32,
529 init_value: TableElement,
530 ) -> Option<u32> {
531 let import = self.imported_table(table_index);
532 let table = import.handle;
533 table.get_mut(self.context_mut()).grow(delta, init_value)
534 }
535
536 pub(crate) fn table_get(
538 &self,
539 table_index: LocalTableIndex,
540 index: u32,
541 ) -> Option<TableElement> {
542 let table = self
543 .tables
544 .get(table_index)
545 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
546 table.get(self.context()).get(index)
547 }
548
549 pub(crate) unsafe fn imported_table_get(
554 &self,
555 table_index: TableIndex,
556 index: u32,
557 ) -> Option<TableElement> {
558 let import = self.imported_table(table_index);
559 let table = import.handle;
560 table.get(self.context()).get(index)
561 }
562
563 pub(crate) fn table_set(
565 &mut self,
566 table_index: LocalTableIndex,
567 index: u32,
568 val: TableElement,
569 ) -> Result<(), Trap> {
570 let table = *self
571 .tables
572 .get(table_index)
573 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
574 table.get_mut(self.context_mut()).set(index, val)
575 }
576
577 pub(crate) unsafe fn imported_table_set(
582 &mut self,
583 table_index: TableIndex,
584 index: u32,
585 val: TableElement,
586 ) -> Result<(), Trap> {
587 let import = self.imported_table(table_index);
588 let table = import.handle;
589 table.get_mut(self.context_mut()).set(index, val)
590 }
591
592 pub(crate) fn func_ref(&self, function_index: FunctionIndex) -> Option<VMFuncRef> {
594 if function_index == FunctionIndex::reserved_value() {
595 None
596 } else if let Some(local_function_index) = self.module.local_func_index(function_index) {
597 Some(VMFuncRef(NonNull::from(
598 &self.funcrefs[local_function_index],
599 )))
600 } else {
601 Some(VMFuncRef(self.imported_funcrefs[function_index]))
602 }
603 }
604
605 pub(crate) fn table_init(
613 &mut self,
614 table_index: TableIndex,
615 elem_index: ElemIndex,
616 dst: u32,
617 src: u32,
618 len: u32,
619 ) -> Result<(), Trap> {
620 let table = self.get_table_handle(table_index);
623 let table = unsafe { table.get_mut(&mut *self.context) };
624 let passive_elements = self.passive_elements.borrow();
625 let elem = passive_elements
626 .get(&elem_index)
627 .map_or::<&[Option<VMFuncRef>], _>(&[], |e| &**e);
628
629 if src.checked_add(len).is_none_or(|n| n as usize > elem.len())
630 || dst.checked_add(len).is_none_or(|m| m > table.size())
631 {
632 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
633 }
634
635 for (dst, src) in (dst..dst + len).zip(src..src + len) {
636 table
637 .set(dst, TableElement::FuncRef(elem[src as usize]))
638 .expect("should never panic because we already did the bounds check above");
639 }
640
641 Ok(())
642 }
643
644 pub(crate) fn table_fill(
650 &mut self,
651 table_index: TableIndex,
652 start_index: u32,
653 item: TableElement,
654 len: u32,
655 ) -> Result<(), Trap> {
656 let table = self.get_table(table_index);
659 let table_size = table.size() as usize;
660
661 if start_index
662 .checked_add(len)
663 .is_none_or(|n| n as usize > table_size)
664 {
665 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
666 }
667
668 for i in start_index..(start_index + len) {
669 table
670 .set(i, item.clone())
671 .expect("should never panic because we already did the bounds check above");
672 }
673
674 Ok(())
675 }
676
677 pub(crate) fn elem_drop(&self, elem_index: ElemIndex) {
679 let mut passive_elements = self.passive_elements.borrow_mut();
682 passive_elements.remove(&elem_index);
683 }
686
687 pub(crate) fn local_memory_copy(
694 &self,
695 memory_index: LocalMemoryIndex,
696 dst: u32,
697 src: u32,
698 len: u32,
699 ) -> Result<(), Trap> {
700 let memory = self.memory(memory_index);
703 unsafe { memory_copy(&memory, dst, src, len) }
705 }
706
707 pub(crate) fn imported_memory_copy(
709 &self,
710 memory_index: MemoryIndex,
711 dst: u32,
712 src: u32,
713 len: u32,
714 ) -> Result<(), Trap> {
715 let import = self.imported_memory(memory_index);
716 let memory = unsafe { import.definition.as_ref() };
717 unsafe { memory_copy(memory, dst, src, len) }
719 }
720
721 pub(crate) fn local_memory_fill(
727 &self,
728 memory_index: LocalMemoryIndex,
729 dst: u32,
730 val: u32,
731 len: u32,
732 ) -> Result<(), Trap> {
733 let memory = self.memory(memory_index);
734 unsafe { memory_fill(&memory, dst, val, len) }
736 }
737
738 pub(crate) fn imported_memory_fill(
744 &self,
745 memory_index: MemoryIndex,
746 dst: u32,
747 val: u32,
748 len: u32,
749 ) -> Result<(), Trap> {
750 let import = self.imported_memory(memory_index);
751 let memory = unsafe { import.definition.as_ref() };
752 unsafe { memory_fill(memory, dst, val, len) }
754 }
755
756 pub(crate) fn memory_init(
764 &self,
765 memory_index: MemoryIndex,
766 data_index: DataIndex,
767 dst: u32,
768 src: u32,
769 len: u32,
770 ) -> Result<(), Trap> {
771 let memory = self.get_vmmemory(memory_index);
774 let passive_data = self.passive_data.borrow();
775 let data = passive_data.get(&data_index).map_or(&[][..], |d| &**d);
776
777 let current_length = unsafe { memory.vmmemory().as_ref().current_length };
778 if src.checked_add(len).is_none_or(|n| n as usize > data.len())
779 || dst
780 .checked_add(len)
781 .is_none_or(|m| usize::try_from(m).unwrap() > current_length)
782 {
783 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
784 }
785 let src_slice = &data[src as usize..(src + len) as usize];
786 unsafe { memory.initialize_with_data(dst as usize, src_slice) }
787 }
788
789 pub(crate) fn data_drop(&self, data_index: DataIndex) {
791 let mut passive_data = self.passive_data.borrow_mut();
792 passive_data.remove(&data_index);
793 }
794
795 pub(crate) fn get_table(&mut self, table_index: TableIndex) -> &mut VMTable {
798 if let Some(local_table_index) = self.module.local_table_index(table_index) {
799 self.get_local_table(local_table_index)
800 } else {
801 self.get_foreign_table(table_index)
802 }
803 }
804
805 pub(crate) fn get_local_table(&mut self, index: LocalTableIndex) -> &mut VMTable {
807 let table = self.tables[index];
808 table.get_mut(self.context_mut())
809 }
810
811 pub(crate) fn get_foreign_table(&mut self, index: TableIndex) -> &mut VMTable {
813 let import = self.imported_table(index);
814 let table = import.handle;
815 table.get_mut(self.context_mut())
816 }
817
818 pub(crate) fn get_table_handle(
821 &mut self,
822 table_index: TableIndex,
823 ) -> InternalStoreHandle<VMTable> {
824 if let Some(local_table_index) = self.module.local_table_index(table_index) {
825 self.tables[local_table_index]
826 } else {
827 self.imported_table(table_index).handle
828 }
829 }
830
831 fn memory_wait(memory: &mut VMMemory, dst: u32, timeout: i64) -> Result<u32, Trap> {
832 let location = NotifyLocation { address: dst };
833 let timeout = if timeout < 0 {
834 None
835 } else {
836 Some(std::time::Duration::from_nanos(timeout as u64))
837 };
838 match memory.do_wait(location, timeout) {
839 Ok(count) => Ok(count),
840 Err(_err) => {
841 Err(Trap::lib(TrapCode::TableAccessOutOfBounds))
843 }
844 }
845 }
846
847 pub(crate) fn local_memory_wait32(
849 &mut self,
850 memory_index: LocalMemoryIndex,
851 dst: u32,
852 val: u32,
853 timeout: i64,
854 ) -> Result<u32, Trap> {
855 let memory = self.memory(memory_index);
856 let ret = unsafe { memory32_atomic_check32(&memory, dst, val) };
861
862 if let Ok(mut ret) = ret {
863 if ret == 0 {
864 let memory = self.get_local_vmmemory_mut(memory_index);
865 ret = Self::memory_wait(memory, dst, timeout)?;
866 }
867 Ok(ret)
868 } else {
869 ret
870 }
871 }
872
873 pub(crate) fn imported_memory_wait32(
875 &mut self,
876 memory_index: MemoryIndex,
877 dst: u32,
878 val: u32,
879 timeout: i64,
880 ) -> Result<u32, Trap> {
881 let import = self.imported_memory(memory_index);
882 let memory = unsafe { import.definition.as_ref() };
883 let ret = unsafe { memory32_atomic_check32(memory, dst, val) };
888 if let Ok(mut ret) = ret {
889 if ret == 0 {
890 let memory = self.get_vmmemory_mut(memory_index);
891 ret = Self::memory_wait(memory, dst, timeout)?;
892 }
893 Ok(ret)
894 } else {
895 ret
896 }
897 }
898
899 pub(crate) fn local_memory_wait64(
901 &mut self,
902 memory_index: LocalMemoryIndex,
903 dst: u32,
904 val: u64,
905 timeout: i64,
906 ) -> Result<u32, Trap> {
907 let memory = self.memory(memory_index);
908 let ret = unsafe { memory32_atomic_check64(&memory, dst, val) };
913
914 if let Ok(mut ret) = ret {
915 if ret == 0 {
916 let memory = self.get_local_vmmemory_mut(memory_index);
917 ret = Self::memory_wait(memory, dst, timeout)?;
918 }
919 Ok(ret)
920 } else {
921 ret
922 }
923 }
924
925 pub(crate) fn imported_memory_wait64(
927 &mut self,
928 memory_index: MemoryIndex,
929 dst: u32,
930 val: u64,
931 timeout: i64,
932 ) -> Result<u32, Trap> {
933 let import = self.imported_memory(memory_index);
934 let memory = unsafe { import.definition.as_ref() };
935 let ret = unsafe { memory32_atomic_check64(memory, dst, val) };
940
941 if let Ok(mut ret) = ret {
942 if ret == 0 {
943 let memory = self.get_vmmemory_mut(memory_index);
944 ret = Self::memory_wait(memory, dst, timeout)?;
945 }
946 Ok(ret)
947 } else {
948 ret
949 }
950 }
951
952 pub(crate) fn local_memory_notify(
954 &mut self,
955 memory_index: LocalMemoryIndex,
956 dst: u32,
957 count: u32,
958 ) -> Result<u32, Trap> {
959 let memory = self.get_local_vmmemory_mut(memory_index);
960 let location = NotifyLocation { address: dst };
962 Ok(memory.do_notify(location, count))
963 }
964
965 pub(crate) fn imported_memory_notify(
967 &mut self,
968 memory_index: MemoryIndex,
969 dst: u32,
970 count: u32,
971 ) -> Result<u32, Trap> {
972 let memory = self.get_vmmemory_mut(memory_index);
973 let location = NotifyLocation { address: dst };
975 Ok(memory.do_notify(location, count))
976 }
977}
978
979#[derive(Debug, Eq, PartialEq)]
984pub struct VMInstance {
985 instance_layout: Layout,
987
988 instance: NonNull<Instance>,
998}
999
1000impl Drop for VMInstance {
1004 fn drop(&mut self) {
1005 let instance_ptr = self.instance.as_ptr();
1006
1007 unsafe {
1008 instance_ptr.drop_in_place();
1010 std::alloc::dealloc(instance_ptr as *mut u8, self.instance_layout);
1012 }
1013 }
1014}
1015
1016impl VMInstance {
1017 #[allow(clippy::too_many_arguments)]
1039 pub unsafe fn new(
1040 allocator: InstanceAllocator,
1041 module: Arc<ModuleInfo>,
1042 context: &mut StoreObjects,
1043 finished_functions: BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
1044 finished_function_call_trampolines: BoxedSlice<SignatureIndex, VMTrampoline>,
1045 finished_memories: BoxedSlice<LocalMemoryIndex, InternalStoreHandle<VMMemory>>,
1046 finished_tables: BoxedSlice<LocalTableIndex, InternalStoreHandle<VMTable>>,
1047 finished_globals: BoxedSlice<LocalGlobalIndex, InternalStoreHandle<VMGlobal>>,
1048 tags: BoxedSlice<TagIndex, InternalStoreHandle<VMTag>>,
1049 imports: Imports,
1050 vmshared_signatures: BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
1051 ) -> Result<Self, Trap> {
1052 unsafe {
1053 let vmctx_tags = tags
1054 .values()
1055 .map(|m: &InternalStoreHandle<VMTag>| VMSharedTagIndex::new(m.index() as u32))
1056 .collect::<PrimaryMap<TagIndex, VMSharedTagIndex>>()
1057 .into_boxed_slice();
1058 let vmctx_globals = finished_globals
1059 .values()
1060 .map(|m: &InternalStoreHandle<VMGlobal>| m.get(context).vmglobal())
1061 .collect::<PrimaryMap<LocalGlobalIndex, NonNull<VMGlobalDefinition>>>()
1062 .into_boxed_slice();
1063 let passive_data = RefCell::new(
1064 module
1065 .passive_data
1066 .clone()
1067 .into_iter()
1068 .map(|(idx, bytes)| (idx, Arc::from(bytes)))
1069 .collect::<HashMap<_, _>>(),
1070 );
1071
1072 let handle = {
1073 let offsets = allocator.offsets().clone();
1074 let funcrefs = PrimaryMap::new().into_boxed_slice();
1076 let imported_funcrefs = PrimaryMap::new().into_boxed_slice();
1077 let instance = Instance {
1079 module,
1080 context,
1081 offsets,
1082 memories: finished_memories,
1083 tables: finished_tables,
1084 tags,
1085 globals: finished_globals,
1086 functions: finished_functions,
1087 function_call_trampolines: finished_function_call_trampolines,
1088 passive_elements: Default::default(),
1089 passive_data,
1090 funcrefs,
1091 imported_funcrefs,
1092 vmctx: VMContext {},
1093 };
1094
1095 let mut instance_handle = allocator.into_vminstance(instance);
1096
1097 {
1099 let instance = instance_handle.instance_mut();
1100 let vmctx_ptr = instance.vmctx_ptr();
1101 (instance.funcrefs, instance.imported_funcrefs) = build_funcrefs(
1102 &instance.module,
1103 context,
1104 &imports,
1105 &instance.functions,
1106 &vmshared_signatures,
1107 &instance.function_call_trampolines,
1108 vmctx_ptr,
1109 );
1110 }
1111
1112 instance_handle
1113 };
1114 let instance = handle.instance();
1115
1116 ptr::copy(
1117 vmctx_tags.values().as_slice().as_ptr(),
1118 instance.shared_tags_ptr(),
1119 vmctx_tags.len(),
1120 );
1121 ptr::copy(
1122 vmshared_signatures.values().as_slice().as_ptr(),
1123 instance.signature_ids_ptr(),
1124 vmshared_signatures.len(),
1125 );
1126 ptr::copy(
1127 imports.functions.values().as_slice().as_ptr(),
1128 instance.imported_functions_ptr(),
1129 imports.functions.len(),
1130 );
1131 ptr::copy(
1132 imports.tables.values().as_slice().as_ptr(),
1133 instance.imported_tables_ptr(),
1134 imports.tables.len(),
1135 );
1136 ptr::copy(
1137 imports.memories.values().as_slice().as_ptr(),
1138 instance.imported_memories_ptr(),
1139 imports.memories.len(),
1140 );
1141 ptr::copy(
1142 imports.globals.values().as_slice().as_ptr(),
1143 instance.imported_globals_ptr(),
1144 imports.globals.len(),
1145 );
1146 ptr::copy(
1150 vmctx_globals.values().as_slice().as_ptr(),
1151 instance.globals_ptr() as *mut NonNull<VMGlobalDefinition>,
1152 vmctx_globals.len(),
1153 );
1154 ptr::write(
1155 instance.builtin_functions_ptr(),
1156 VMBuiltinFunctionsArray::initialized(),
1157 );
1158
1159 initialize_passive_elements(instance);
1162 initialize_globals(instance);
1163
1164 Ok(handle)
1165 }
1166 }
1167
1168 pub(crate) fn instance(&self) -> &Instance {
1170 unsafe { self.instance.as_ref() }
1171 }
1172
1173 pub(crate) fn instance_mut(&mut self) -> &mut Instance {
1175 unsafe { self.instance.as_mut() }
1176 }
1177
1178 pub unsafe fn finish_instantiation(
1184 &mut self,
1185 config: &VMConfig,
1186 trap_handler: Option<*const TrapHandlerFn<'static>>,
1187 data_initializers: &[DataInitializer<'_>],
1188 ) -> Result<(), Trap> {
1189 let instance = self.instance_mut();
1190
1191 initialize_tables(instance)?;
1193 initialize_memories(instance, data_initializers)?;
1194
1195 instance.invoke_start_function(config, trap_handler)?;
1198 Ok(())
1199 }
1200
1201 pub fn vmctx(&self) -> &VMContext {
1203 self.instance().vmctx()
1204 }
1205
1206 pub fn vmctx_ptr(&self) -> *mut VMContext {
1208 self.instance().vmctx_ptr()
1209 }
1210
1211 pub fn vmoffsets(&self) -> &VMOffsets {
1215 self.instance().offsets()
1216 }
1217
1218 pub fn module(&self) -> &Arc<ModuleInfo> {
1220 self.instance().module()
1221 }
1222
1223 pub fn module_ref(&self) -> &ModuleInfo {
1225 self.instance().module_ref()
1226 }
1227
1228 pub fn lookup(&mut self, field: &str) -> Option<VMExtern> {
1230 let export = *self.module_ref().exports.get(field)?;
1231
1232 Some(self.lookup_by_declaration(export))
1233 }
1234
1235 pub fn lookup_by_declaration(&mut self, export: ExportIndex) -> VMExtern {
1237 let instance = self.instance();
1238
1239 match export {
1240 ExportIndex::Function(index) => {
1241 let sig_index = &instance.module.functions[index];
1242 let handle = if let Some(def_index) = instance.module.local_func_index(index) {
1243 let signature = instance.module.signatures[*sig_index].clone();
1246 let vm_function = VMFunction {
1247 anyfunc: MaybeInstanceOwned::Instance(NonNull::from(
1248 &instance.funcrefs[def_index],
1249 )),
1250 signature,
1251 kind: VMFunctionKind::Static,
1256 host_data: Box::new(()),
1257 };
1258 InternalStoreHandle::new(self.instance_mut().context_mut(), vm_function)
1259 } else {
1260 let import = instance.imported_function(index);
1261 import.handle
1262 };
1263
1264 VMExtern::Function(handle)
1265 }
1266 ExportIndex::Table(index) => {
1267 let handle = if let Some(def_index) = instance.module.local_table_index(index) {
1268 instance.tables[def_index]
1269 } else {
1270 let import = instance.imported_table(index);
1271 import.handle
1272 };
1273 VMExtern::Table(handle)
1274 }
1275 ExportIndex::Memory(index) => {
1276 let handle = if let Some(def_index) = instance.module.local_memory_index(index) {
1277 instance.memories[def_index]
1278 } else {
1279 let import = instance.imported_memory(index);
1280 import.handle
1281 };
1282 VMExtern::Memory(handle)
1283 }
1284 ExportIndex::Global(index) => {
1285 let handle = if let Some(def_index) = instance.module.local_global_index(index) {
1286 instance.globals[def_index]
1287 } else {
1288 let import = instance.imported_global(index);
1289 import.handle
1290 };
1291 VMExtern::Global(handle)
1292 }
1293
1294 ExportIndex::Tag(index) => {
1295 let handle = instance.tags[index];
1296 VMExtern::Tag(handle)
1297 }
1298 }
1299 }
1300
1301 pub fn exports(&self) -> indexmap::map::Iter<'_, String, ExportIndex> {
1307 self.module().exports.iter()
1308 }
1309
1310 pub fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
1312 self.instance().memory_index(memory)
1313 }
1314
1315 pub fn memory_grow<IntoPages>(
1320 &mut self,
1321 memory_index: LocalMemoryIndex,
1322 delta: IntoPages,
1323 ) -> Result<Pages, MemoryError>
1324 where
1325 IntoPages: Into<Pages>,
1326 {
1327 self.instance_mut().memory_grow(memory_index, delta)
1328 }
1329
1330 pub fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
1332 self.instance().table_index(table)
1333 }
1334
1335 pub fn table_grow(
1340 &mut self,
1341 table_index: LocalTableIndex,
1342 delta: u32,
1343 init_value: TableElement,
1344 ) -> Option<u32> {
1345 self.instance_mut()
1346 .table_grow(table_index, delta, init_value)
1347 }
1348
1349 pub fn table_get(&self, table_index: LocalTableIndex, index: u32) -> Option<TableElement> {
1353 self.instance().table_get(table_index, index)
1354 }
1355
1356 pub fn table_set(
1360 &mut self,
1361 table_index: LocalTableIndex,
1362 index: u32,
1363 val: TableElement,
1364 ) -> Result<(), Trap> {
1365 self.instance_mut().table_set(table_index, index, val)
1366 }
1367
1368 pub fn get_local_table(&mut self, index: LocalTableIndex) -> &mut VMTable {
1370 self.instance_mut().get_local_table(index)
1371 }
1372}
1373
1374fn get_memory_init_start(init: &DataInitializer<'_>, instance: &Instance) -> usize {
1376 let mut start = init.location.offset;
1377
1378 if let Some(base) = init.location.base {
1379 let val = unsafe {
1380 if let Some(def_index) = instance.module.local_global_index(base) {
1381 instance.global(def_index).val.u32
1382 } else {
1383 instance.imported_global(base).definition.as_ref().val.u32
1384 }
1385 };
1386 start += usize::try_from(val).unwrap();
1387 }
1388
1389 start
1390}
1391
1392#[allow(clippy::mut_from_ref)]
1393#[allow(dead_code)]
1394unsafe fn get_memory_slice<'instance>(
1396 init: &DataInitializer<'_>,
1397 instance: &'instance Instance,
1398) -> &'instance mut [u8] {
1399 unsafe {
1400 let memory = if let Some(local_memory_index) = instance
1401 .module
1402 .local_memory_index(init.location.memory_index)
1403 {
1404 instance.memory(local_memory_index)
1405 } else {
1406 let import = instance.imported_memory(init.location.memory_index);
1407 *import.definition.as_ref()
1408 };
1409 slice::from_raw_parts_mut(memory.base, memory.current_length)
1410 }
1411}
1412
1413fn get_table_init_start(init: &TableInitializer, instance: &Instance) -> usize {
1415 let mut start = init.offset;
1416
1417 if let Some(base) = init.base {
1418 let val = unsafe {
1419 if let Some(def_index) = instance.module.local_global_index(base) {
1420 instance.global(def_index).val.u32
1421 } else {
1422 instance.imported_global(base).definition.as_ref().val.u32
1423 }
1424 };
1425 start += usize::try_from(val).unwrap();
1426 }
1427
1428 start
1429}
1430
1431fn initialize_tables(instance: &mut Instance) -> Result<(), Trap> {
1433 let module = Arc::clone(&instance.module);
1434 for init in &module.table_initializers {
1435 let start = get_table_init_start(init, instance);
1436 let table = instance.get_table_handle(init.table_index);
1437 let table = unsafe { table.get_mut(&mut *instance.context) };
1438
1439 if start
1440 .checked_add(init.elements.len())
1441 .is_none_or(|end| end > table.size() as usize)
1442 {
1443 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
1444 }
1445
1446 if let wasmer_types::Type::FuncRef = table.ty().ty {
1447 for (i, func_idx) in init.elements.iter().enumerate() {
1448 let anyfunc = instance.func_ref(*func_idx);
1449 table
1450 .set(
1451 u32::try_from(start + i).unwrap(),
1452 TableElement::FuncRef(anyfunc),
1453 )
1454 .unwrap();
1455 }
1456 } else {
1457 for i in 0..init.elements.len() {
1458 table
1459 .set(
1460 u32::try_from(start + i).unwrap(),
1461 TableElement::ExternRef(None),
1462 )
1463 .unwrap();
1464 }
1465 }
1466 }
1467
1468 Ok(())
1469}
1470
1471fn initialize_passive_elements(instance: &Instance) {
1475 let mut passive_elements = instance.passive_elements.borrow_mut();
1476 debug_assert!(
1477 passive_elements.is_empty(),
1478 "should only be called once, at initialization time"
1479 );
1480
1481 passive_elements.extend(instance.module.passive_elements.iter().filter_map(
1482 |(&idx, segments)| -> Option<(ElemIndex, Box<[Option<VMFuncRef>]>)> {
1483 if segments.is_empty() {
1484 None
1485 } else {
1486 Some((
1487 idx,
1488 segments
1489 .iter()
1490 .map(|s| instance.func_ref(*s))
1491 .collect::<Box<[Option<VMFuncRef>]>>(),
1492 ))
1493 }
1494 },
1495 ));
1496}
1497
1498fn initialize_memories(
1500 instance: &mut Instance,
1501 data_initializers: &[DataInitializer<'_>],
1502) -> Result<(), Trap> {
1503 for init in data_initializers {
1504 let memory = instance.get_vmmemory(init.location.memory_index);
1505
1506 let start = get_memory_init_start(init, instance);
1507 unsafe {
1508 let current_length = memory.vmmemory().as_ref().current_length;
1509 if start
1510 .checked_add(init.data.len())
1511 .is_none_or(|end| end > current_length)
1512 {
1513 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
1514 }
1515 memory.initialize_with_data(start, init.data)?;
1516 }
1517 }
1518
1519 Ok(())
1520}
1521
1522fn initialize_globals(instance: &Instance) {
1523 let module = Arc::clone(&instance.module);
1524 for (index, initializer) in module.global_initializers.iter() {
1525 unsafe {
1526 let to = instance.global_ptr(index).as_ptr();
1527 match initializer {
1528 GlobalInit::I32Const(x) => (*to).val.i32 = *x,
1529 GlobalInit::I64Const(x) => (*to).val.i64 = *x,
1530 GlobalInit::F32Const(x) => (*to).val.f32 = *x,
1531 GlobalInit::F64Const(x) => (*to).val.f64 = *x,
1532 GlobalInit::V128Const(x) => (*to).val.bytes = *x.bytes(),
1533 GlobalInit::GetGlobal(x) => {
1534 let from: VMGlobalDefinition =
1535 if let Some(def_x) = module.local_global_index(*x) {
1536 instance.global(def_x)
1537 } else {
1538 instance.imported_global(*x).definition.as_ref().clone()
1539 };
1540 *to = from;
1541 }
1542 GlobalInit::RefNullConst => (*to).val.funcref = 0,
1543 GlobalInit::RefFunc(func_idx) => {
1544 let funcref = instance.func_ref(*func_idx).unwrap();
1545 (*to).val = funcref.into_raw();
1546 }
1547 }
1548 }
1549 }
1550}
1551
1552fn build_funcrefs(
1555 module_info: &ModuleInfo,
1556 ctx: &StoreObjects,
1557 imports: &Imports,
1558 finished_functions: &BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
1559 vmshared_signatures: &BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
1560 function_call_trampolines: &BoxedSlice<SignatureIndex, VMTrampoline>,
1561 vmctx_ptr: *mut VMContext,
1562) -> (
1563 BoxedSlice<LocalFunctionIndex, VMCallerCheckedAnyfunc>,
1564 BoxedSlice<FunctionIndex, NonNull<VMCallerCheckedAnyfunc>>,
1565) {
1566 let mut func_refs =
1567 PrimaryMap::with_capacity(module_info.functions.len() - module_info.num_imported_functions);
1568 let mut imported_func_refs = PrimaryMap::with_capacity(module_info.num_imported_functions);
1569
1570 for import in imports.functions.values() {
1572 imported_func_refs.push(import.handle.get(ctx).anyfunc.as_ptr());
1573 }
1574
1575 for (local_index, func_ptr) in finished_functions.iter() {
1577 let index = module_info.func_index(local_index);
1578 let sig_index = module_info.functions[index];
1579 let type_index = vmshared_signatures[sig_index];
1580 let call_trampoline = function_call_trampolines[sig_index];
1581 let anyfunc = VMCallerCheckedAnyfunc {
1582 func_ptr: func_ptr.0,
1583 type_index,
1584 vmctx: VMFunctionContext { vmctx: vmctx_ptr },
1585 call_trampoline,
1586 };
1587 func_refs.push(anyfunc);
1588 }
1589 (
1590 func_refs.into_boxed_slice(),
1591 imported_func_refs.into_boxed_slice(),
1592 )
1593}