1mod allocator;
10
11use crate::LinearMemory;
12use crate::imports::Imports;
13use crate::store::{InternalStoreHandle, StoreObjects};
14use crate::table::TableElement;
15use crate::trap::{Trap, TrapCode};
16use crate::vmcontext::{
17 VMBuiltinFunctionsArray, VMCallerCheckedAnyfunc, VMContext, VMFunctionContext,
18 VMFunctionImport, VMFunctionKind, VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition,
19 VMMemoryImport, VMSharedSignatureIndex, VMSharedTagIndex, VMTableDefinition, VMTableImport,
20 VMTrampoline, memory_copy, memory_fill, memory32_atomic_check32, memory32_atomic_check64,
21};
22use crate::{FunctionBodyPtr, MaybeInstanceOwned, TrapHandlerFn, VMTag, wasmer_call_trampoline};
23use crate::{VMConfig, VMFuncRef, VMFunction, VMGlobal, VMMemory, VMTable};
24use crate::{export::VMExtern, threadconditions::ExpectedValue};
25pub use allocator::InstanceAllocator;
26use memoffset::offset_of;
27use more_asserts::assert_lt;
28use std::alloc::Layout;
29use std::cell::RefCell;
30use std::collections::HashMap;
31use std::convert::TryFrom;
32use std::fmt;
33use std::mem;
34use std::ptr::{self, NonNull};
35use std::slice;
36use std::sync::Arc;
37use wasmer_types::entity::{BoxedSlice, EntityRef, PrimaryMap, packed_option::ReservedValue};
38use wasmer_types::{
39 DataIndex, DataInitializer, ElemIndex, ExportIndex, FunctionIndex, GlobalIndex, GlobalInit,
40 LocalFunctionIndex, LocalGlobalIndex, LocalMemoryIndex, LocalTableIndex, MemoryError,
41 MemoryIndex, ModuleInfo, Pages, SignatureIndex, TableIndex, TableInitializer, TagIndex,
42 VMOffsets,
43};
44
45#[repr(C)]
52#[allow(clippy::type_complexity)]
53pub(crate) struct Instance {
54 module: Arc<ModuleInfo>,
56
57 context: *mut StoreObjects,
59
60 offsets: VMOffsets,
62
63 memories: BoxedSlice<LocalMemoryIndex, InternalStoreHandle<VMMemory>>,
65
66 tables: BoxedSlice<LocalTableIndex, InternalStoreHandle<VMTable>>,
68
69 globals: BoxedSlice<LocalGlobalIndex, InternalStoreHandle<VMGlobal>>,
71
72 tags: BoxedSlice<TagIndex, InternalStoreHandle<VMTag>>,
74
75 functions: BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
77
78 function_call_trampolines: BoxedSlice<SignatureIndex, VMTrampoline>,
80
81 passive_elements: RefCell<HashMap<ElemIndex, Box<[Option<VMFuncRef>]>>>,
84
85 passive_data: RefCell<HashMap<DataIndex, Arc<[u8]>>>,
88
89 funcrefs: BoxedSlice<LocalFunctionIndex, VMCallerCheckedAnyfunc>,
92
93 imported_funcrefs: BoxedSlice<FunctionIndex, NonNull<VMCallerCheckedAnyfunc>>,
96
97 vmctx: VMContext,
102}
103
104impl fmt::Debug for Instance {
105 fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
106 formatter.debug_struct("Instance").finish()
107 }
108}
109
110#[allow(clippy::cast_ptr_alignment)]
111impl Instance {
112 unsafe fn vmctx_plus_offset<T>(&self, offset: u32) -> *mut T {
115 unsafe {
116 (self.vmctx_ptr() as *mut u8)
117 .add(usize::try_from(offset).unwrap())
118 .cast()
119 }
120 }
121
122 fn module(&self) -> &Arc<ModuleInfo> {
123 &self.module
124 }
125
126 pub(crate) fn module_ref(&self) -> &ModuleInfo {
127 &self.module
128 }
129
130 pub(crate) fn context(&self) -> &StoreObjects {
131 unsafe { &*self.context }
132 }
133
134 pub(crate) fn context_mut(&mut self) -> &mut StoreObjects {
135 unsafe { &mut *self.context }
136 }
137
138 fn offsets(&self) -> &VMOffsets {
140 &self.offsets
141 }
142
143 fn signature_ids_ptr(&self) -> *mut VMSharedSignatureIndex {
145 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_signature_ids_begin()) }
146 }
147
148 fn imported_function(&self, index: FunctionIndex) -> &VMFunctionImport {
150 let index = usize::try_from(index.as_u32()).unwrap();
151 unsafe { &*self.imported_functions_ptr().add(index) }
152 }
153
154 fn imported_functions_ptr(&self) -> *mut VMFunctionImport {
156 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_functions_begin()) }
157 }
158
159 fn imported_table(&self, index: TableIndex) -> &VMTableImport {
161 let index = usize::try_from(index.as_u32()).unwrap();
162 unsafe { &*self.imported_tables_ptr().add(index) }
163 }
164
165 fn imported_tables_ptr(&self) -> *mut VMTableImport {
167 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_tables_begin()) }
168 }
169
170 fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport {
172 let index = usize::try_from(index.as_u32()).unwrap();
173 unsafe { &*self.imported_memories_ptr().add(index) }
174 }
175
176 fn imported_memories_ptr(&self) -> *mut VMMemoryImport {
178 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_memories_begin()) }
179 }
180
181 fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport {
183 let index = usize::try_from(index.as_u32()).unwrap();
184 unsafe { &*self.imported_globals_ptr().add(index) }
185 }
186
187 fn imported_globals_ptr(&self) -> *mut VMGlobalImport {
189 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_globals_begin()) }
190 }
191
192 #[cfg_attr(target_os = "windows", allow(dead_code))]
194 pub(crate) fn shared_tag_ptr(&self, index: TagIndex) -> &VMSharedTagIndex {
195 let index = usize::try_from(index.as_u32()).unwrap();
196 unsafe { &*self.shared_tags_ptr().add(index) }
197 }
198
199 pub(crate) fn shared_tags_ptr(&self) -> *mut VMSharedTagIndex {
201 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_tag_ids_begin()) }
202 }
203
204 #[allow(dead_code)]
206 fn table(&self, index: LocalTableIndex) -> VMTableDefinition {
207 unsafe { *self.table_ptr(index).as_ref() }
208 }
209
210 #[allow(dead_code)]
211 fn set_table(&self, index: LocalTableIndex, table: &VMTableDefinition) {
213 unsafe {
214 *self.table_ptr(index).as_ptr() = *table;
215 }
216 }
217
218 fn table_ptr(&self, index: LocalTableIndex) -> NonNull<VMTableDefinition> {
220 let index = usize::try_from(index.as_u32()).unwrap();
221 NonNull::new(unsafe { self.tables_ptr().add(index) }).unwrap()
222 }
223
224 fn tables_ptr(&self) -> *mut VMTableDefinition {
226 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_tables_begin()) }
227 }
228
229 #[allow(dead_code)]
230 fn get_memory(&self, index: MemoryIndex) -> VMMemoryDefinition {
232 if let Some(local_index) = self.module.local_memory_index(index) {
233 self.memory(local_index)
234 } else {
235 let import = self.imported_memory(index);
236 unsafe { *import.definition.as_ref() }
237 }
238 }
239
240 fn memory(&self, index: LocalMemoryIndex) -> VMMemoryDefinition {
242 unsafe { *self.memory_ptr(index).as_ref() }
243 }
244
245 #[allow(dead_code)]
246 fn set_memory(&self, index: LocalMemoryIndex, mem: &VMMemoryDefinition) {
248 unsafe {
249 *self.memory_ptr(index).as_ptr() = *mem;
250 }
251 }
252
253 fn memory_ptr(&self, index: LocalMemoryIndex) -> NonNull<VMMemoryDefinition> {
255 let index = usize::try_from(index.as_u32()).unwrap();
256 NonNull::new(unsafe { self.memories_ptr().add(index) }).unwrap()
257 }
258
259 fn memories_ptr(&self) -> *mut VMMemoryDefinition {
261 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_memories_begin()) }
262 }
263
264 fn get_vmmemory(&self, index: MemoryIndex) -> &VMMemory {
266 if let Some(local_index) = self.module.local_memory_index(index) {
267 unsafe {
268 self.memories
269 .get(local_index)
270 .unwrap()
271 .get(self.context.as_ref().unwrap())
272 }
273 } else {
274 let import = self.imported_memory(index);
275 unsafe { import.handle.get(self.context.as_ref().unwrap()) }
276 }
277 }
278
279 fn get_vmmemory_mut(&mut self, index: MemoryIndex) -> &mut VMMemory {
281 if let Some(local_index) = self.module.local_memory_index(index) {
282 unsafe {
283 self.memories
284 .get_mut(local_index)
285 .unwrap()
286 .get_mut(self.context.as_mut().unwrap())
287 }
288 } else {
289 let import = self.imported_memory(index);
290 unsafe { import.handle.get_mut(self.context.as_mut().unwrap()) }
291 }
292 }
293
294 fn get_local_vmmemory_mut(&mut self, local_index: LocalMemoryIndex) -> &mut VMMemory {
296 unsafe {
297 self.memories
298 .get_mut(local_index)
299 .unwrap()
300 .get_mut(self.context.as_mut().unwrap())
301 }
302 }
303
304 fn global(&self, index: LocalGlobalIndex) -> VMGlobalDefinition {
306 unsafe { self.global_ptr(index).as_ref().clone() }
307 }
308
309 #[allow(dead_code)]
311 fn set_global(&self, index: LocalGlobalIndex, global: &VMGlobalDefinition) {
312 unsafe {
313 *self.global_ptr(index).as_ptr() = global.clone();
314 }
315 }
316
317 fn global_ptr(&self, index: LocalGlobalIndex) -> NonNull<VMGlobalDefinition> {
319 let index = usize::try_from(index.as_u32()).unwrap();
320 NonNull::new(unsafe { *self.globals_ptr().add(index) }).unwrap()
322 }
323
324 fn globals_ptr(&self) -> *mut *mut VMGlobalDefinition {
326 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_globals_begin()) }
327 }
328
329 fn builtin_functions_ptr(&self) -> *mut VMBuiltinFunctionsArray {
331 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_builtin_functions_begin()) }
332 }
333
334 fn vmctx(&self) -> &VMContext {
336 &self.vmctx
337 }
338
339 fn vmctx_ptr(&self) -> *mut VMContext {
341 self.vmctx() as *const VMContext as *mut VMContext
342 }
343
344 fn invoke_start_function(
346 &self,
347 config: &VMConfig,
348 trap_handler: Option<*const TrapHandlerFn<'static>>,
349 ) -> Result<(), Trap> {
350 let start_index = match self.module.start_function {
351 Some(idx) => idx,
352 None => return Ok(()),
353 };
354
355 let (callee_address, callee_vmctx) = match self.module.local_func_index(start_index) {
356 Some(local_index) => {
357 let body = self
358 .functions
359 .get(local_index)
360 .expect("function index is out of bounds")
361 .0;
362 (
363 body as *const _,
364 VMFunctionContext {
365 vmctx: self.vmctx_ptr(),
366 },
367 )
368 }
369 None => {
370 assert_lt!(start_index.index(), self.module.num_imported_functions);
371 let import = self.imported_function(start_index);
372 (import.body, import.environment)
373 }
374 };
375
376 let sig = self.module.functions[start_index];
377 let trampoline = self.function_call_trampolines[sig];
378 let mut values_vec = vec![];
379
380 unsafe {
381 wasmer_call_trampoline(
385 trap_handler,
386 config,
387 callee_vmctx,
388 trampoline,
389 callee_address,
390 values_vec.as_mut_ptr(),
391 )
392 }
393 }
394
395 #[inline]
397 pub(crate) fn vmctx_offset() -> isize {
398 offset_of!(Self, vmctx) as isize
399 }
400
401 pub(crate) fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
403 let begin: *const VMTableDefinition = self.tables_ptr() as *const _;
404 let end: *const VMTableDefinition = table;
405 let index = LocalTableIndex::new(
407 (end as usize - begin as usize) / mem::size_of::<VMTableDefinition>(),
408 );
409 assert_lt!(index.index(), self.tables.len());
410 index
411 }
412
413 pub(crate) fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
415 let begin: *const VMMemoryDefinition = self.memories_ptr() as *const _;
416 let end: *const VMMemoryDefinition = memory;
417 let index = LocalMemoryIndex::new(
419 (end as usize - begin as usize) / mem::size_of::<VMMemoryDefinition>(),
420 );
421 assert_lt!(index.index(), self.memories.len());
422 index
423 }
424
425 pub(crate) fn memory_grow<IntoPages>(
430 &mut self,
431 memory_index: LocalMemoryIndex,
432 delta: IntoPages,
433 ) -> Result<Pages, MemoryError>
434 where
435 IntoPages: Into<Pages>,
436 {
437 let mem = *self
438 .memories
439 .get(memory_index)
440 .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
441 mem.get_mut(self.context_mut()).grow(delta.into())
442 }
443
444 pub(crate) unsafe fn imported_memory_grow<IntoPages>(
453 &mut self,
454 memory_index: MemoryIndex,
455 delta: IntoPages,
456 ) -> Result<Pages, MemoryError>
457 where
458 IntoPages: Into<Pages>,
459 {
460 let import = self.imported_memory(memory_index);
461 let mem = import.handle;
462 mem.get_mut(self.context_mut()).grow(delta.into())
463 }
464
465 pub(crate) fn memory_size(&self, memory_index: LocalMemoryIndex) -> Pages {
467 let mem = *self
468 .memories
469 .get(memory_index)
470 .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
471 mem.get(self.context()).size()
472 }
473
474 pub(crate) unsafe fn imported_memory_size(&self, memory_index: MemoryIndex) -> Pages {
480 let import = self.imported_memory(memory_index);
481 let mem = import.handle;
482 mem.get(self.context()).size()
483 }
484
485 pub(crate) fn table_size(&self, table_index: LocalTableIndex) -> u32 {
487 let table = self
488 .tables
489 .get(table_index)
490 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
491 table.get(self.context()).size()
492 }
493
494 pub(crate) unsafe fn imported_table_size(&self, table_index: TableIndex) -> u32 {
499 let import = self.imported_table(table_index);
500 let table = import.handle;
501 table.get(self.context()).size()
502 }
503
504 pub(crate) fn table_grow(
509 &mut self,
510 table_index: LocalTableIndex,
511 delta: u32,
512 init_value: TableElement,
513 ) -> Option<u32> {
514 let table = *self
515 .tables
516 .get(table_index)
517 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
518 table.get_mut(self.context_mut()).grow(delta, init_value)
519 }
520
521 pub(crate) unsafe fn imported_table_grow(
526 &mut self,
527 table_index: TableIndex,
528 delta: u32,
529 init_value: TableElement,
530 ) -> Option<u32> {
531 let import = self.imported_table(table_index);
532 let table = import.handle;
533 table.get_mut(self.context_mut()).grow(delta, init_value)
534 }
535
536 pub(crate) fn table_get(
538 &self,
539 table_index: LocalTableIndex,
540 index: u32,
541 ) -> Option<TableElement> {
542 let table = self
543 .tables
544 .get(table_index)
545 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
546 table.get(self.context()).get(index)
547 }
548
549 pub(crate) unsafe fn imported_table_get(
554 &self,
555 table_index: TableIndex,
556 index: u32,
557 ) -> Option<TableElement> {
558 let import = self.imported_table(table_index);
559 let table = import.handle;
560 table.get(self.context()).get(index)
561 }
562
563 pub(crate) fn table_set(
565 &mut self,
566 table_index: LocalTableIndex,
567 index: u32,
568 val: TableElement,
569 ) -> Result<(), Trap> {
570 let table = *self
571 .tables
572 .get(table_index)
573 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
574 table.get_mut(self.context_mut()).set(index, val)
575 }
576
577 pub(crate) unsafe fn imported_table_set(
582 &mut self,
583 table_index: TableIndex,
584 index: u32,
585 val: TableElement,
586 ) -> Result<(), Trap> {
587 let import = self.imported_table(table_index);
588 let table = import.handle;
589 table.get_mut(self.context_mut()).set(index, val)
590 }
591
592 pub(crate) fn func_ref(&self, function_index: FunctionIndex) -> Option<VMFuncRef> {
594 if function_index == FunctionIndex::reserved_value() {
595 None
596 } else if let Some(local_function_index) = self.module.local_func_index(function_index) {
597 Some(VMFuncRef(NonNull::from(
598 &self.funcrefs[local_function_index],
599 )))
600 } else {
601 Some(VMFuncRef(self.imported_funcrefs[function_index]))
602 }
603 }
604
605 pub(crate) fn table_init(
613 &mut self,
614 table_index: TableIndex,
615 elem_index: ElemIndex,
616 dst: u32,
617 src: u32,
618 len: u32,
619 ) -> Result<(), Trap> {
620 let table = self.get_table_handle(table_index);
623 let table = unsafe { table.get_mut(&mut *self.context) };
624 let passive_elements = self.passive_elements.borrow();
625 let elem = passive_elements
626 .get(&elem_index)
627 .map_or::<&[Option<VMFuncRef>], _>(&[], |e| &**e);
628
629 if src.checked_add(len).is_none_or(|n| n as usize > elem.len())
630 || dst.checked_add(len).is_none_or(|m| m > table.size())
631 {
632 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
633 }
634
635 for (dst, src) in (dst..dst + len).zip(src..src + len) {
636 table
637 .set(dst, TableElement::FuncRef(elem[src as usize]))
638 .expect("should never panic because we already did the bounds check above");
639 }
640
641 Ok(())
642 }
643
644 pub(crate) fn table_fill(
650 &mut self,
651 table_index: TableIndex,
652 start_index: u32,
653 item: TableElement,
654 len: u32,
655 ) -> Result<(), Trap> {
656 let table = self.get_table(table_index);
659 let table_size = table.size() as usize;
660
661 if start_index
662 .checked_add(len)
663 .is_none_or(|n| n as usize > table_size)
664 {
665 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
666 }
667
668 for i in start_index..(start_index + len) {
669 table
670 .set(i, item.clone())
671 .expect("should never panic because we already did the bounds check above");
672 }
673
674 Ok(())
675 }
676
677 pub(crate) fn elem_drop(&self, elem_index: ElemIndex) {
679 let mut passive_elements = self.passive_elements.borrow_mut();
682 passive_elements.remove(&elem_index);
683 }
686
687 pub(crate) fn local_memory_copy(
694 &self,
695 memory_index: LocalMemoryIndex,
696 dst: u32,
697 src: u32,
698 len: u32,
699 ) -> Result<(), Trap> {
700 let memory = self.memory(memory_index);
703 unsafe { memory_copy(&memory, dst, src, len) }
705 }
706
707 pub(crate) fn imported_memory_copy(
709 &self,
710 memory_index: MemoryIndex,
711 dst: u32,
712 src: u32,
713 len: u32,
714 ) -> Result<(), Trap> {
715 let import = self.imported_memory(memory_index);
716 let memory = unsafe { import.definition.as_ref() };
717 unsafe { memory_copy(memory, dst, src, len) }
719 }
720
721 pub(crate) fn local_memory_fill(
727 &self,
728 memory_index: LocalMemoryIndex,
729 dst: u32,
730 val: u32,
731 len: u32,
732 ) -> Result<(), Trap> {
733 let memory = self.memory(memory_index);
734 unsafe { memory_fill(&memory, dst, val, len) }
736 }
737
738 pub(crate) fn imported_memory_fill(
744 &self,
745 memory_index: MemoryIndex,
746 dst: u32,
747 val: u32,
748 len: u32,
749 ) -> Result<(), Trap> {
750 let import = self.imported_memory(memory_index);
751 let memory = unsafe { import.definition.as_ref() };
752 unsafe { memory_fill(memory, dst, val, len) }
754 }
755
756 pub(crate) fn memory_init(
764 &self,
765 memory_index: MemoryIndex,
766 data_index: DataIndex,
767 dst: u32,
768 src: u32,
769 len: u32,
770 ) -> Result<(), Trap> {
771 let memory = self.get_vmmemory(memory_index);
774 let passive_data = self.passive_data.borrow();
775 let data = passive_data.get(&data_index).map_or(&[][..], |d| &**d);
776
777 let current_length = unsafe { memory.vmmemory().as_ref().current_length };
778 if src.checked_add(len).is_none_or(|n| n as usize > data.len())
779 || dst
780 .checked_add(len)
781 .is_none_or(|m| usize::try_from(m).unwrap() > current_length)
782 {
783 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
784 }
785 let src_slice = &data[src as usize..(src + len) as usize];
786 unsafe { memory.initialize_with_data(dst as usize, src_slice) }
787 }
788
789 pub(crate) fn data_drop(&self, data_index: DataIndex) {
791 let mut passive_data = self.passive_data.borrow_mut();
792 passive_data.remove(&data_index);
793 }
794
795 pub(crate) fn get_table(&mut self, table_index: TableIndex) -> &mut VMTable {
798 if let Some(local_table_index) = self.module.local_table_index(table_index) {
799 self.get_local_table(local_table_index)
800 } else {
801 self.get_foreign_table(table_index)
802 }
803 }
804
805 pub(crate) fn get_local_table(&mut self, index: LocalTableIndex) -> &mut VMTable {
807 let table = self.tables[index];
808 table.get_mut(self.context_mut())
809 }
810
811 pub(crate) fn get_foreign_table(&mut self, index: TableIndex) -> &mut VMTable {
813 let import = self.imported_table(index);
814 let table = import.handle;
815 table.get_mut(self.context_mut())
816 }
817
818 pub(crate) fn get_table_handle(
821 &mut self,
822 table_index: TableIndex,
823 ) -> InternalStoreHandle<VMTable> {
824 if let Some(local_table_index) = self.module.local_table_index(table_index) {
825 self.tables[local_table_index]
826 } else {
827 self.imported_table(table_index).handle
828 }
829 }
830
831 unsafe fn memory_wait(
834 memory: &mut VMMemory,
835 dst: u32,
836 expected: ExpectedValue,
837 timeout: i64,
838 ) -> Result<u32, Trap> {
839 let timeout = if timeout < 0 {
840 None
841 } else {
842 Some(std::time::Duration::from_nanos(timeout as u64))
843 };
844 match unsafe { memory.do_wait(dst, expected, timeout) } {
845 Ok(count) => Ok(count),
846 Err(_err) => {
847 Err(Trap::lib(TrapCode::TableAccessOutOfBounds))
849 }
850 }
851 }
852
853 pub(crate) fn local_memory_wait32(
855 &mut self,
856 memory_index: LocalMemoryIndex,
857 dst: u32,
858 val: u32,
859 timeout: i64,
860 ) -> Result<u32, Trap> {
861 let memory = self.memory(memory_index);
862 let ret = unsafe { memory32_atomic_check32(&memory, dst, val) };
868
869 if let Ok(mut ret) = ret {
870 if ret == 0 {
871 let memory = self.get_local_vmmemory_mut(memory_index);
872 ret = unsafe { Self::memory_wait(memory, dst, ExpectedValue::U32(val), timeout)? };
874 }
875 Ok(ret)
876 } else {
877 ret
878 }
879 }
880
881 pub(crate) fn imported_memory_wait32(
883 &mut self,
884 memory_index: MemoryIndex,
885 dst: u32,
886 val: u32,
887 timeout: i64,
888 ) -> Result<u32, Trap> {
889 let import = self.imported_memory(memory_index);
890 let memory = unsafe { import.definition.as_ref() };
891 let ret = unsafe { memory32_atomic_check32(memory, dst, val) };
897
898 if let Ok(mut ret) = ret {
899 if ret == 0 {
900 let memory = self.get_vmmemory_mut(memory_index);
901 ret = unsafe { Self::memory_wait(memory, dst, ExpectedValue::U32(val), timeout)? };
903 }
904 Ok(ret)
905 } else {
906 ret
907 }
908 }
909
910 pub(crate) fn local_memory_wait64(
912 &mut self,
913 memory_index: LocalMemoryIndex,
914 dst: u32,
915 val: u64,
916 timeout: i64,
917 ) -> Result<u32, Trap> {
918 let memory = self.memory(memory_index);
919 let ret = unsafe { memory32_atomic_check64(&memory, dst, val) };
925
926 if let Ok(mut ret) = ret {
927 if ret == 0 {
928 let memory = self.get_local_vmmemory_mut(memory_index);
929 ret = unsafe { Self::memory_wait(memory, dst, ExpectedValue::U64(val), timeout)? };
931 }
932 Ok(ret)
933 } else {
934 ret
935 }
936 }
937
938 pub(crate) fn imported_memory_wait64(
940 &mut self,
941 memory_index: MemoryIndex,
942 dst: u32,
943 val: u64,
944 timeout: i64,
945 ) -> Result<u32, Trap> {
946 let import = self.imported_memory(memory_index);
947 let memory = unsafe { import.definition.as_ref() };
948 let ret = unsafe { memory32_atomic_check64(memory, dst, val) };
954
955 if let Ok(mut ret) = ret {
956 if ret == 0 {
957 let memory = self.get_vmmemory_mut(memory_index);
958 ret = unsafe { Self::memory_wait(memory, dst, ExpectedValue::U64(val), timeout)? };
960 }
961 Ok(ret)
962 } else {
963 ret
964 }
965 }
966
967 pub(crate) fn local_memory_notify(
969 &mut self,
970 memory_index: LocalMemoryIndex,
971 dst: u32,
972 count: u32,
973 ) -> Result<u32, Trap> {
974 let memory = self.get_local_vmmemory_mut(memory_index);
975 Ok(memory.do_notify(dst, count))
976 }
977
978 pub(crate) fn imported_memory_notify(
980 &mut self,
981 memory_index: MemoryIndex,
982 dst: u32,
983 count: u32,
984 ) -> Result<u32, Trap> {
985 let memory = self.get_vmmemory_mut(memory_index);
986 Ok(memory.do_notify(dst, count))
987 }
988}
989
990#[derive(Debug, Eq, PartialEq)]
995pub struct VMInstance {
996 instance_layout: Layout,
998
999 instance: NonNull<Instance>,
1009}
1010
1011impl Drop for VMInstance {
1015 fn drop(&mut self) {
1016 let instance_ptr = self.instance.as_ptr();
1017
1018 unsafe {
1019 instance_ptr.drop_in_place();
1021 std::alloc::dealloc(instance_ptr as *mut u8, self.instance_layout);
1023 }
1024 }
1025}
1026
1027impl VMInstance {
1028 #[allow(clippy::too_many_arguments)]
1050 pub unsafe fn new(
1051 allocator: InstanceAllocator,
1052 module: Arc<ModuleInfo>,
1053 context: &mut StoreObjects,
1054 finished_functions: BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
1055 finished_function_call_trampolines: BoxedSlice<SignatureIndex, VMTrampoline>,
1056 finished_memories: BoxedSlice<LocalMemoryIndex, InternalStoreHandle<VMMemory>>,
1057 finished_tables: BoxedSlice<LocalTableIndex, InternalStoreHandle<VMTable>>,
1058 finished_globals: BoxedSlice<LocalGlobalIndex, InternalStoreHandle<VMGlobal>>,
1059 tags: BoxedSlice<TagIndex, InternalStoreHandle<VMTag>>,
1060 imports: Imports,
1061 vmshared_signatures: BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
1062 ) -> Result<Self, Trap> {
1063 unsafe {
1064 let vmctx_tags = tags
1065 .values()
1066 .map(|m: &InternalStoreHandle<VMTag>| VMSharedTagIndex::new(m.index() as u32))
1067 .collect::<PrimaryMap<TagIndex, VMSharedTagIndex>>()
1068 .into_boxed_slice();
1069 let vmctx_globals = finished_globals
1070 .values()
1071 .map(|m: &InternalStoreHandle<VMGlobal>| m.get(context).vmglobal())
1072 .collect::<PrimaryMap<LocalGlobalIndex, NonNull<VMGlobalDefinition>>>()
1073 .into_boxed_slice();
1074 let passive_data = RefCell::new(
1075 module
1076 .passive_data
1077 .clone()
1078 .into_iter()
1079 .map(|(idx, bytes)| (idx, Arc::from(bytes)))
1080 .collect::<HashMap<_, _>>(),
1081 );
1082
1083 let handle = {
1084 let offsets = allocator.offsets().clone();
1085 let funcrefs = PrimaryMap::new().into_boxed_slice();
1087 let imported_funcrefs = PrimaryMap::new().into_boxed_slice();
1088 let instance = Instance {
1090 module,
1091 context,
1092 offsets,
1093 memories: finished_memories,
1094 tables: finished_tables,
1095 tags,
1096 globals: finished_globals,
1097 functions: finished_functions,
1098 function_call_trampolines: finished_function_call_trampolines,
1099 passive_elements: Default::default(),
1100 passive_data,
1101 funcrefs,
1102 imported_funcrefs,
1103 vmctx: VMContext {},
1104 };
1105
1106 let mut instance_handle = allocator.into_vminstance(instance);
1107
1108 {
1110 let instance = instance_handle.instance_mut();
1111 let vmctx_ptr = instance.vmctx_ptr();
1112 (instance.funcrefs, instance.imported_funcrefs) = build_funcrefs(
1113 &instance.module,
1114 context,
1115 &imports,
1116 &instance.functions,
1117 &vmshared_signatures,
1118 &instance.function_call_trampolines,
1119 vmctx_ptr,
1120 );
1121 }
1122
1123 instance_handle
1124 };
1125 let instance = handle.instance();
1126
1127 ptr::copy(
1128 vmctx_tags.values().as_slice().as_ptr(),
1129 instance.shared_tags_ptr(),
1130 vmctx_tags.len(),
1131 );
1132 ptr::copy(
1133 vmshared_signatures.values().as_slice().as_ptr(),
1134 instance.signature_ids_ptr(),
1135 vmshared_signatures.len(),
1136 );
1137 ptr::copy(
1138 imports.functions.values().as_slice().as_ptr(),
1139 instance.imported_functions_ptr(),
1140 imports.functions.len(),
1141 );
1142 ptr::copy(
1143 imports.tables.values().as_slice().as_ptr(),
1144 instance.imported_tables_ptr(),
1145 imports.tables.len(),
1146 );
1147 ptr::copy(
1148 imports.memories.values().as_slice().as_ptr(),
1149 instance.imported_memories_ptr(),
1150 imports.memories.len(),
1151 );
1152 ptr::copy(
1153 imports.globals.values().as_slice().as_ptr(),
1154 instance.imported_globals_ptr(),
1155 imports.globals.len(),
1156 );
1157 ptr::copy(
1161 vmctx_globals.values().as_slice().as_ptr(),
1162 instance.globals_ptr() as *mut NonNull<VMGlobalDefinition>,
1163 vmctx_globals.len(),
1164 );
1165 ptr::write(
1166 instance.builtin_functions_ptr(),
1167 VMBuiltinFunctionsArray::initialized(),
1168 );
1169
1170 initialize_passive_elements(instance);
1173 initialize_globals(instance);
1174
1175 Ok(handle)
1176 }
1177 }
1178
1179 pub(crate) fn instance(&self) -> &Instance {
1181 unsafe { self.instance.as_ref() }
1182 }
1183
1184 pub(crate) fn instance_mut(&mut self) -> &mut Instance {
1186 unsafe { self.instance.as_mut() }
1187 }
1188
1189 pub unsafe fn finish_instantiation(
1195 &mut self,
1196 config: &VMConfig,
1197 trap_handler: Option<*const TrapHandlerFn<'static>>,
1198 data_initializers: &[DataInitializer<'_>],
1199 ) -> Result<(), Trap> {
1200 let instance = self.instance_mut();
1201
1202 initialize_tables(instance)?;
1204 initialize_memories(instance, data_initializers)?;
1205
1206 instance.invoke_start_function(config, trap_handler)?;
1209 Ok(())
1210 }
1211
1212 pub fn vmctx(&self) -> &VMContext {
1214 self.instance().vmctx()
1215 }
1216
1217 pub fn vmctx_ptr(&self) -> *mut VMContext {
1219 self.instance().vmctx_ptr()
1220 }
1221
1222 pub fn vmoffsets(&self) -> &VMOffsets {
1226 self.instance().offsets()
1227 }
1228
1229 pub fn module(&self) -> &Arc<ModuleInfo> {
1231 self.instance().module()
1232 }
1233
1234 pub fn module_ref(&self) -> &ModuleInfo {
1236 self.instance().module_ref()
1237 }
1238
1239 pub fn lookup(&mut self, field: &str) -> Option<VMExtern> {
1241 let export = *self.module_ref().exports.get(field)?;
1242
1243 Some(self.lookup_by_declaration(export))
1244 }
1245
1246 pub fn lookup_by_declaration(&mut self, export: ExportIndex) -> VMExtern {
1248 let instance = self.instance();
1249
1250 match export {
1251 ExportIndex::Function(index) => {
1252 let sig_index = &instance.module.functions[index];
1253 let handle = if let Some(def_index) = instance.module.local_func_index(index) {
1254 let signature = instance.module.signatures[*sig_index].clone();
1257 let vm_function = VMFunction {
1258 anyfunc: MaybeInstanceOwned::Instance(NonNull::from(
1259 &instance.funcrefs[def_index],
1260 )),
1261 signature,
1262 kind: VMFunctionKind::Static,
1267 host_data: Box::new(()),
1268 };
1269 InternalStoreHandle::new(self.instance_mut().context_mut(), vm_function)
1270 } else {
1271 let import = instance.imported_function(index);
1272 import.handle
1273 };
1274
1275 VMExtern::Function(handle)
1276 }
1277 ExportIndex::Table(index) => {
1278 let handle = if let Some(def_index) = instance.module.local_table_index(index) {
1279 instance.tables[def_index]
1280 } else {
1281 let import = instance.imported_table(index);
1282 import.handle
1283 };
1284 VMExtern::Table(handle)
1285 }
1286 ExportIndex::Memory(index) => {
1287 let handle = if let Some(def_index) = instance.module.local_memory_index(index) {
1288 instance.memories[def_index]
1289 } else {
1290 let import = instance.imported_memory(index);
1291 import.handle
1292 };
1293 VMExtern::Memory(handle)
1294 }
1295 ExportIndex::Global(index) => {
1296 let handle = if let Some(def_index) = instance.module.local_global_index(index) {
1297 instance.globals[def_index]
1298 } else {
1299 let import = instance.imported_global(index);
1300 import.handle
1301 };
1302 VMExtern::Global(handle)
1303 }
1304
1305 ExportIndex::Tag(index) => {
1306 let handle = instance.tags[index];
1307 VMExtern::Tag(handle)
1308 }
1309 }
1310 }
1311
1312 pub fn exports(&self) -> indexmap::map::Iter<'_, String, ExportIndex> {
1318 self.module().exports.iter()
1319 }
1320
1321 pub fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
1323 self.instance().memory_index(memory)
1324 }
1325
1326 pub fn memory_grow<IntoPages>(
1331 &mut self,
1332 memory_index: LocalMemoryIndex,
1333 delta: IntoPages,
1334 ) -> Result<Pages, MemoryError>
1335 where
1336 IntoPages: Into<Pages>,
1337 {
1338 self.instance_mut().memory_grow(memory_index, delta)
1339 }
1340
1341 pub fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
1343 self.instance().table_index(table)
1344 }
1345
1346 pub fn table_grow(
1351 &mut self,
1352 table_index: LocalTableIndex,
1353 delta: u32,
1354 init_value: TableElement,
1355 ) -> Option<u32> {
1356 self.instance_mut()
1357 .table_grow(table_index, delta, init_value)
1358 }
1359
1360 pub fn table_get(&self, table_index: LocalTableIndex, index: u32) -> Option<TableElement> {
1364 self.instance().table_get(table_index, index)
1365 }
1366
1367 pub fn table_set(
1371 &mut self,
1372 table_index: LocalTableIndex,
1373 index: u32,
1374 val: TableElement,
1375 ) -> Result<(), Trap> {
1376 self.instance_mut().table_set(table_index, index, val)
1377 }
1378
1379 pub fn get_local_table(&mut self, index: LocalTableIndex) -> &mut VMTable {
1381 self.instance_mut().get_local_table(index)
1382 }
1383}
1384
1385fn get_memory_init_start(init: &DataInitializer<'_>, instance: &Instance) -> usize {
1387 let mut start = init.location.offset;
1388
1389 if let Some(base) = init.location.base {
1390 let val = unsafe {
1391 if let Some(def_index) = instance.module.local_global_index(base) {
1392 instance.global(def_index).val.u32
1393 } else {
1394 instance.imported_global(base).definition.as_ref().val.u32
1395 }
1396 };
1397 start += usize::try_from(val).unwrap();
1398 }
1399
1400 start
1401}
1402
1403#[allow(clippy::mut_from_ref)]
1404#[allow(dead_code)]
1405unsafe fn get_memory_slice<'instance>(
1407 init: &DataInitializer<'_>,
1408 instance: &'instance Instance,
1409) -> &'instance mut [u8] {
1410 unsafe {
1411 let memory = if let Some(local_memory_index) = instance
1412 .module
1413 .local_memory_index(init.location.memory_index)
1414 {
1415 instance.memory(local_memory_index)
1416 } else {
1417 let import = instance.imported_memory(init.location.memory_index);
1418 *import.definition.as_ref()
1419 };
1420 slice::from_raw_parts_mut(memory.base, memory.current_length)
1421 }
1422}
1423
1424fn get_table_init_start(init: &TableInitializer, instance: &Instance) -> usize {
1426 let mut start = init.offset;
1427
1428 if let Some(base) = init.base {
1429 let val = unsafe {
1430 if let Some(def_index) = instance.module.local_global_index(base) {
1431 instance.global(def_index).val.u32
1432 } else {
1433 instance.imported_global(base).definition.as_ref().val.u32
1434 }
1435 };
1436 start += usize::try_from(val).unwrap();
1437 }
1438
1439 start
1440}
1441
1442fn initialize_tables(instance: &mut Instance) -> Result<(), Trap> {
1444 let module = Arc::clone(&instance.module);
1445 for init in &module.table_initializers {
1446 let start = get_table_init_start(init, instance);
1447 let table = instance.get_table_handle(init.table_index);
1448 let table = unsafe { table.get_mut(&mut *instance.context) };
1449
1450 if start
1451 .checked_add(init.elements.len())
1452 .is_none_or(|end| end > table.size() as usize)
1453 {
1454 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
1455 }
1456
1457 if let wasmer_types::Type::FuncRef = table.ty().ty {
1458 for (i, func_idx) in init.elements.iter().enumerate() {
1459 let anyfunc = instance.func_ref(*func_idx);
1460 table
1461 .set(
1462 u32::try_from(start + i).unwrap(),
1463 TableElement::FuncRef(anyfunc),
1464 )
1465 .unwrap();
1466 }
1467 } else {
1468 for i in 0..init.elements.len() {
1469 table
1470 .set(
1471 u32::try_from(start + i).unwrap(),
1472 TableElement::ExternRef(None),
1473 )
1474 .unwrap();
1475 }
1476 }
1477 }
1478
1479 Ok(())
1480}
1481
1482fn initialize_passive_elements(instance: &Instance) {
1486 let mut passive_elements = instance.passive_elements.borrow_mut();
1487 debug_assert!(
1488 passive_elements.is_empty(),
1489 "should only be called once, at initialization time"
1490 );
1491
1492 passive_elements.extend(instance.module.passive_elements.iter().filter_map(
1493 |(&idx, segments)| -> Option<(ElemIndex, Box<[Option<VMFuncRef>]>)> {
1494 if segments.is_empty() {
1495 None
1496 } else {
1497 Some((
1498 idx,
1499 segments
1500 .iter()
1501 .map(|s| instance.func_ref(*s))
1502 .collect::<Box<[Option<VMFuncRef>]>>(),
1503 ))
1504 }
1505 },
1506 ));
1507}
1508
1509fn initialize_memories(
1511 instance: &mut Instance,
1512 data_initializers: &[DataInitializer<'_>],
1513) -> Result<(), Trap> {
1514 for init in data_initializers {
1515 let memory = instance.get_vmmemory(init.location.memory_index);
1516
1517 let start = get_memory_init_start(init, instance);
1518 unsafe {
1519 let current_length = memory.vmmemory().as_ref().current_length;
1520 if start
1521 .checked_add(init.data.len())
1522 .is_none_or(|end| end > current_length)
1523 {
1524 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
1525 }
1526 memory.initialize_with_data(start, init.data)?;
1527 }
1528 }
1529
1530 Ok(())
1531}
1532
1533fn initialize_globals(instance: &Instance) {
1534 let module = Arc::clone(&instance.module);
1535 for (index, initializer) in module.global_initializers.iter() {
1536 unsafe {
1537 let to = instance.global_ptr(index).as_ptr();
1538 match initializer {
1539 GlobalInit::I32Const(x) => (*to).val.i32 = *x,
1540 GlobalInit::I64Const(x) => (*to).val.i64 = *x,
1541 GlobalInit::F32Const(x) => (*to).val.f32 = *x,
1542 GlobalInit::F64Const(x) => (*to).val.f64 = *x,
1543 GlobalInit::V128Const(x) => (*to).val.bytes = *x.bytes(),
1544 GlobalInit::GetGlobal(x) => {
1545 let from: VMGlobalDefinition =
1546 if let Some(def_x) = module.local_global_index(*x) {
1547 instance.global(def_x)
1548 } else {
1549 instance.imported_global(*x).definition.as_ref().clone()
1550 };
1551 *to = from;
1552 }
1553 GlobalInit::RefNullConst => (*to).val.funcref = 0,
1554 GlobalInit::RefFunc(func_idx) => {
1555 let funcref = instance.func_ref(*func_idx).unwrap();
1556 (*to).val = funcref.into_raw();
1557 }
1558 }
1559 }
1560 }
1561}
1562
1563fn build_funcrefs(
1566 module_info: &ModuleInfo,
1567 ctx: &StoreObjects,
1568 imports: &Imports,
1569 finished_functions: &BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
1570 vmshared_signatures: &BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
1571 function_call_trampolines: &BoxedSlice<SignatureIndex, VMTrampoline>,
1572 vmctx_ptr: *mut VMContext,
1573) -> (
1574 BoxedSlice<LocalFunctionIndex, VMCallerCheckedAnyfunc>,
1575 BoxedSlice<FunctionIndex, NonNull<VMCallerCheckedAnyfunc>>,
1576) {
1577 let mut func_refs =
1578 PrimaryMap::with_capacity(module_info.functions.len() - module_info.num_imported_functions);
1579 let mut imported_func_refs = PrimaryMap::with_capacity(module_info.num_imported_functions);
1580
1581 for import in imports.functions.values() {
1583 imported_func_refs.push(import.handle.get(ctx).anyfunc.as_ptr());
1584 }
1585
1586 for (local_index, func_ptr) in finished_functions.iter() {
1588 let index = module_info.func_index(local_index);
1589 let sig_index = module_info.functions[index];
1590 let type_index = vmshared_signatures[sig_index];
1591 let call_trampoline = function_call_trampolines[sig_index];
1592 let anyfunc = VMCallerCheckedAnyfunc {
1593 func_ptr: func_ptr.0,
1594 type_index,
1595 vmctx: VMFunctionContext { vmctx: vmctx_ptr },
1596 call_trampoline,
1597 };
1598 func_refs.push(anyfunc);
1599 }
1600 (
1601 func_refs.into_boxed_slice(),
1602 imported_func_refs.into_boxed_slice(),
1603 )
1604}