1mod allocator;
10
11use crate::LinearMemory;
12use crate::imports::Imports;
13use crate::store::{InternalStoreHandle, StoreObjects};
14use crate::table::TableElement;
15use crate::trap::{Trap, TrapCode};
16use crate::vmcontext::{
17 VMBuiltinFunctionsArray, VMCallerCheckedAnyfunc, VMContext, VMFunctionContext,
18 VMFunctionImport, VMFunctionKind, VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition,
19 VMMemoryImport, VMSharedTagIndex, VMSignatureHash, VMTableDefinition, VMTableImport,
20 VMTrampoline, memory_copy, memory_fill, memory32_atomic_check32, memory32_atomic_check64,
21};
22use crate::{FunctionBodyPtr, MaybeInstanceOwned, TrapHandlerFn, VMTag, wasmer_call_trampoline};
23use crate::{VMConfig, VMFuncRef, VMFunction, VMGlobal, VMMemory, VMTable};
24use crate::{export::VMExtern, threadconditions::ExpectedValue};
25pub use allocator::InstanceAllocator;
26use itertools::Itertools;
27use memoffset::offset_of;
28use more_asserts::assert_lt;
29use std::alloc::Layout;
30use std::cell::RefCell;
31use std::collections::HashMap;
32use std::convert::TryFrom;
33use std::fmt;
34use std::mem;
35use std::ptr::{self, NonNull};
36use std::slice;
37use std::sync::Arc;
38use wasmer_types::entity::{BoxedSlice, EntityRef, PrimaryMap, packed_option::ReservedValue};
39use wasmer_types::{
40 DataIndex, DataInitializer, ElemIndex, ExportIndex, FunctionIndex, GlobalIndex, GlobalInit,
41 InitExpr, InitExprOp, LocalFunctionIndex, LocalGlobalIndex, LocalMemoryIndex, LocalTableIndex,
42 MemoryError, MemoryIndex, ModuleInfo, Pages, RawValue, SignatureIndex, TableIndex, TagIndex,
43 VMOffsets,
44};
45
46#[repr(C)]
53#[allow(clippy::type_complexity)]
54pub(crate) struct Instance {
55 module: Arc<ModuleInfo>,
57
58 context: *mut StoreObjects,
60
61 offsets: VMOffsets,
63
64 memories: BoxedSlice<LocalMemoryIndex, InternalStoreHandle<VMMemory>>,
66
67 tables: BoxedSlice<LocalTableIndex, InternalStoreHandle<VMTable>>,
69
70 globals: BoxedSlice<LocalGlobalIndex, InternalStoreHandle<VMGlobal>>,
72
73 tags: BoxedSlice<TagIndex, InternalStoreHandle<VMTag>>,
75
76 functions: BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
78
79 function_call_trampolines: BoxedSlice<SignatureIndex, VMTrampoline>,
81
82 passive_elements: RefCell<HashMap<ElemIndex, Box<[Option<VMFuncRef>]>>>,
85
86 passive_data: RefCell<HashMap<DataIndex, Arc<[u8]>>>,
89
90 funcrefs: BoxedSlice<LocalFunctionIndex, VMCallerCheckedAnyfunc>,
93
94 imported_funcrefs: BoxedSlice<FunctionIndex, NonNull<VMCallerCheckedAnyfunc>>,
97
98 vmctx: VMContext,
103}
104
105impl fmt::Debug for Instance {
106 fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
107 formatter.debug_struct("Instance").finish()
108 }
109}
110
111#[allow(clippy::cast_ptr_alignment)]
112impl Instance {
113 unsafe fn vmctx_plus_offset<T>(&self, offset: u32) -> *mut T {
116 unsafe {
117 (self.vmctx_ptr() as *mut u8)
118 .add(usize::try_from(offset).unwrap())
119 .cast()
120 }
121 }
122
123 fn module(&self) -> &Arc<ModuleInfo> {
124 &self.module
125 }
126
127 pub(crate) fn module_ref(&self) -> &ModuleInfo {
128 &self.module
129 }
130
131 pub(crate) fn context(&self) -> &StoreObjects {
132 unsafe { &*self.context }
133 }
134
135 pub(crate) fn context_mut(&mut self) -> &mut StoreObjects {
136 unsafe { &mut *self.context }
137 }
138
139 fn offsets(&self) -> &VMOffsets {
141 &self.offsets
142 }
143
144 fn imported_function(&self, index: FunctionIndex) -> &VMFunctionImport {
146 let index = usize::try_from(index.as_u32()).unwrap();
147 unsafe { &*self.imported_functions_ptr().add(index) }
148 }
149
150 fn imported_functions_ptr(&self) -> *mut VMFunctionImport {
152 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_functions_begin()) }
153 }
154
155 fn imported_table(&self, index: TableIndex) -> &VMTableImport {
157 let index = usize::try_from(index.as_u32()).unwrap();
158 unsafe { &*self.imported_tables_ptr().add(index) }
159 }
160
161 fn imported_tables_ptr(&self) -> *mut VMTableImport {
163 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_tables_begin()) }
164 }
165
166 fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport {
168 let index = usize::try_from(index.as_u32()).unwrap();
169 unsafe { &*self.imported_memories_ptr().add(index) }
170 }
171
172 fn imported_memories_ptr(&self) -> *mut VMMemoryImport {
174 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_memories_begin()) }
175 }
176
177 fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport {
179 let index = usize::try_from(index.as_u32()).unwrap();
180 unsafe { &*self.imported_globals_ptr().add(index) }
181 }
182
183 fn imported_globals_ptr(&self) -> *mut VMGlobalImport {
185 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_globals_begin()) }
186 }
187
188 #[cfg_attr(target_os = "windows", allow(dead_code))]
190 pub(crate) fn shared_tag_ptr(&self, index: TagIndex) -> &VMSharedTagIndex {
191 let index = usize::try_from(index.as_u32()).unwrap();
192 unsafe { &*self.shared_tags_ptr().add(index) }
193 }
194
195 pub(crate) fn shared_tags_ptr(&self) -> *mut VMSharedTagIndex {
197 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_tag_ids_begin()) }
198 }
199
200 #[allow(dead_code)]
202 fn table(&self, index: LocalTableIndex) -> VMTableDefinition {
203 unsafe { *self.table_ptr(index).as_ref() }
204 }
205
206 #[allow(dead_code)]
207 fn set_table(&self, index: LocalTableIndex, table: &VMTableDefinition) {
209 unsafe {
210 *self.table_ptr(index).as_ptr() = *table;
211 }
212 }
213
214 fn table_ptr(&self, index: LocalTableIndex) -> NonNull<VMTableDefinition> {
216 let index = usize::try_from(index.as_u32()).unwrap();
217 NonNull::new(unsafe { self.tables_ptr().add(index) }).unwrap()
218 }
219
220 fn tables_ptr(&self) -> *mut VMTableDefinition {
222 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_tables_begin()) }
223 }
224
225 fn fixed_funcref_table_ptr(
226 &self,
227 index: LocalTableIndex,
228 ) -> Option<NonNull<VMCallerCheckedAnyfunc>> {
229 let offset = self.offsets.vmctx_fixed_funcref_table_anyfuncs(index)?;
230 Some(NonNull::new(unsafe { self.vmctx_plus_offset(offset) }).unwrap())
231 }
232
233 fn sync_fixed_funcref_table_element(
234 &self,
235 table_index: LocalTableIndex,
236 index: u32,
237 funcref: Option<VMFuncRef>,
238 ) {
239 let Some(base) = self.fixed_funcref_table_ptr(table_index) else {
240 return;
241 };
242 unsafe {
243 *base.as_ptr().add(index as usize) = anyfunc_from_funcref(funcref);
244 }
245 }
246
247 fn sync_fixed_funcref_table(&self, table_index: LocalTableIndex) {
248 let Some(base) = self.fixed_funcref_table_ptr(table_index) else {
249 return;
250 };
251 let table = self.tables[table_index].get(self.context());
252 for index in 0..table.size() {
253 let TableElement::FuncRef(funcref) = table.get(index).unwrap() else {
254 unreachable!("fixed funcref tables cannot contain externrefs");
255 };
256 unsafe {
257 *base.as_ptr().add(index as usize) = anyfunc_from_funcref(funcref);
258 }
259 }
260 }
261
262 fn sync_fixed_funcref_table_by_index(&self, table_index: TableIndex) {
263 if let Some(local_table_index) = self.module.local_table_index(table_index) {
264 self.sync_fixed_funcref_table(local_table_index);
265 }
266 }
267
268 #[allow(dead_code)]
269 fn get_memory(&self, index: MemoryIndex) -> VMMemoryDefinition {
271 if let Some(local_index) = self.module.local_memory_index(index) {
272 self.memory(local_index)
273 } else {
274 let import = self.imported_memory(index);
275 unsafe { *import.definition.as_ref() }
276 }
277 }
278
279 fn memory(&self, index: LocalMemoryIndex) -> VMMemoryDefinition {
281 unsafe { *self.memory_ptr(index).as_ref() }
282 }
283
284 #[allow(dead_code)]
285 fn set_memory(&self, index: LocalMemoryIndex, mem: &VMMemoryDefinition) {
287 unsafe {
288 *self.memory_ptr(index).as_ptr() = *mem;
289 }
290 }
291
292 fn memory_ptr(&self, index: LocalMemoryIndex) -> NonNull<VMMemoryDefinition> {
294 let index = usize::try_from(index.as_u32()).unwrap();
295 NonNull::new(unsafe { self.memories_ptr().add(index) }).unwrap()
296 }
297
298 fn memories_ptr(&self) -> *mut VMMemoryDefinition {
300 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_memories_begin()) }
301 }
302
303 fn get_vmmemory(&self, index: MemoryIndex) -> &VMMemory {
305 if let Some(local_index) = self.module.local_memory_index(index) {
306 unsafe {
307 self.memories
308 .get(local_index)
309 .unwrap()
310 .get(self.context.as_ref().unwrap())
311 }
312 } else {
313 let import = self.imported_memory(index);
314 unsafe { import.handle.get(self.context.as_ref().unwrap()) }
315 }
316 }
317
318 fn get_vmmemory_mut(&mut self, index: MemoryIndex) -> &mut VMMemory {
320 if let Some(local_index) = self.module.local_memory_index(index) {
321 unsafe {
322 self.memories
323 .get_mut(local_index)
324 .unwrap()
325 .get_mut(self.context.as_mut().unwrap())
326 }
327 } else {
328 let import = self.imported_memory(index);
329 unsafe { import.handle.get_mut(self.context.as_mut().unwrap()) }
330 }
331 }
332
333 fn get_local_vmmemory_mut(&mut self, local_index: LocalMemoryIndex) -> &mut VMMemory {
335 unsafe {
336 self.memories
337 .get_mut(local_index)
338 .unwrap()
339 .get_mut(self.context.as_mut().unwrap())
340 }
341 }
342
343 fn global(&self, index: LocalGlobalIndex) -> VMGlobalDefinition {
345 unsafe { self.global_ptr(index).as_ref().clone() }
346 }
347
348 #[allow(dead_code)]
350 fn set_global(&self, index: LocalGlobalIndex, global: &VMGlobalDefinition) {
351 unsafe {
352 *self.global_ptr(index).as_ptr() = global.clone();
353 }
354 }
355
356 fn global_ptr(&self, index: LocalGlobalIndex) -> NonNull<VMGlobalDefinition> {
358 let index = usize::try_from(index.as_u32()).unwrap();
359 NonNull::new(unsafe { self.globals_ptr().add(index) }).unwrap()
360 }
361
362 fn globals_ptr(&self) -> *mut VMGlobalDefinition {
364 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_globals_begin()) }
365 }
366
367 fn builtin_functions_ptr(&self) -> *mut VMBuiltinFunctionsArray {
369 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_builtin_functions_begin()) }
370 }
371
372 fn vmctx(&self) -> &VMContext {
374 &self.vmctx
375 }
376
377 fn vmctx_ptr(&self) -> *mut VMContext {
379 self.vmctx() as *const VMContext as *mut VMContext
380 }
381
382 fn invoke_start_function(
384 &self,
385 config: &VMConfig,
386 trap_handler: Option<*const TrapHandlerFn<'static>>,
387 ) -> Result<(), Trap> {
388 let start_index = match self.module.start_function {
389 Some(idx) => idx,
390 None => return Ok(()),
391 };
392
393 let (callee_address, callee_vmctx) = match self.module.local_func_index(start_index) {
394 Some(local_index) => {
395 let body = self
396 .functions
397 .get(local_index)
398 .expect("function index is out of bounds")
399 .0;
400 (
401 body as *const _,
402 VMFunctionContext {
403 vmctx: self.vmctx_ptr(),
404 },
405 )
406 }
407 None => {
408 assert_lt!(start_index.index(), self.module.num_imported_functions);
409 let import = self.imported_function(start_index);
410 (import.body, import.environment)
411 }
412 };
413
414 let sig = self.module.functions[start_index];
415 let trampoline = self.function_call_trampolines[sig];
416 let mut values_vec = vec![];
417
418 unsafe {
419 wasmer_call_trampoline(
423 trap_handler,
424 config,
425 callee_vmctx,
426 trampoline,
427 callee_address,
428 values_vec.as_mut_ptr(),
429 )
430 }
431 }
432
433 #[inline]
435 pub(crate) fn vmctx_offset() -> isize {
436 offset_of!(Self, vmctx) as isize
437 }
438
439 pub(crate) fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
441 let begin: *const VMTableDefinition = self.tables_ptr() as *const _;
442 let end: *const VMTableDefinition = table;
443 let index = LocalTableIndex::new(
445 (end as usize - begin as usize) / mem::size_of::<VMTableDefinition>(),
446 );
447 assert_lt!(index.index(), self.tables.len());
448 index
449 }
450
451 pub(crate) fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
453 let begin: *const VMMemoryDefinition = self.memories_ptr() as *const _;
454 let end: *const VMMemoryDefinition = memory;
455 let index = LocalMemoryIndex::new(
457 (end as usize - begin as usize) / mem::size_of::<VMMemoryDefinition>(),
458 );
459 assert_lt!(index.index(), self.memories.len());
460 index
461 }
462
463 pub(crate) fn memory_grow<IntoPages>(
468 &mut self,
469 memory_index: LocalMemoryIndex,
470 delta: IntoPages,
471 ) -> Result<Pages, MemoryError>
472 where
473 IntoPages: Into<Pages>,
474 {
475 let mem = *self
476 .memories
477 .get(memory_index)
478 .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
479 mem.get_mut(self.context_mut()).grow(delta.into())
480 }
481
482 pub(crate) unsafe fn imported_memory_grow<IntoPages>(
491 &mut self,
492 memory_index: MemoryIndex,
493 delta: IntoPages,
494 ) -> Result<Pages, MemoryError>
495 where
496 IntoPages: Into<Pages>,
497 {
498 let import = self.imported_memory(memory_index);
499 let mem = import.handle;
500 mem.get_mut(self.context_mut()).grow(delta.into())
501 }
502
503 pub(crate) fn memory_size(&self, memory_index: LocalMemoryIndex) -> Pages {
505 let mem = *self
506 .memories
507 .get(memory_index)
508 .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
509 mem.get(self.context()).size()
510 }
511
512 pub(crate) unsafe fn imported_memory_size(&self, memory_index: MemoryIndex) -> Pages {
518 let import = self.imported_memory(memory_index);
519 let mem = import.handle;
520 mem.get(self.context()).size()
521 }
522
523 pub(crate) fn table_size(&self, table_index: LocalTableIndex) -> u32 {
525 let table = self
526 .tables
527 .get(table_index)
528 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
529 table.get(self.context()).size()
530 }
531
532 pub(crate) unsafe fn imported_table_size(&self, table_index: TableIndex) -> u32 {
537 let import = self.imported_table(table_index);
538 let table = import.handle;
539 table.get(self.context()).size()
540 }
541
542 pub(crate) fn table_grow(
547 &mut self,
548 table_index: LocalTableIndex,
549 delta: u32,
550 init_value: TableElement,
551 ) -> Option<u32> {
552 let table = *self
553 .tables
554 .get(table_index)
555 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
556 table.get_mut(self.context_mut()).grow(delta, init_value)
557 }
558
559 pub(crate) unsafe fn imported_table_grow(
564 &mut self,
565 table_index: TableIndex,
566 delta: u32,
567 init_value: TableElement,
568 ) -> Option<u32> {
569 let import = self.imported_table(table_index);
570 let table = import.handle;
571 table.get_mut(self.context_mut()).grow(delta, init_value)
572 }
573
574 pub(crate) fn table_get(
576 &self,
577 table_index: LocalTableIndex,
578 index: u32,
579 ) -> Option<TableElement> {
580 let table = self
581 .tables
582 .get(table_index)
583 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
584 table.get(self.context()).get(index)
585 }
586
587 pub(crate) unsafe fn imported_table_get(
592 &self,
593 table_index: TableIndex,
594 index: u32,
595 ) -> Option<TableElement> {
596 let import = self.imported_table(table_index);
597 let table = import.handle;
598 table.get(self.context()).get(index)
599 }
600
601 pub(crate) fn table_set(
603 &mut self,
604 table_index: LocalTableIndex,
605 index: u32,
606 val: TableElement,
607 ) -> Result<(), Trap> {
608 let table = *self
609 .tables
610 .get(table_index)
611 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
612 let funcref = match &val {
613 TableElement::FuncRef(funcref) => Some(*funcref),
614 TableElement::ExternRef(_) => None,
615 };
616 table.get_mut(self.context_mut()).set(index, val)?;
617 if let Some(funcref) = funcref {
618 self.sync_fixed_funcref_table_element(table_index, index, funcref);
619 }
620 Ok(())
621 }
622
623 pub(crate) unsafe fn imported_table_set(
628 &mut self,
629 table_index: TableIndex,
630 index: u32,
631 val: TableElement,
632 ) -> Result<(), Trap> {
633 let import = self.imported_table(table_index);
634 let table = import.handle;
635 table.get_mut(self.context_mut()).set(index, val)
636 }
637
638 pub(crate) fn func_ref(&self, function_index: FunctionIndex) -> Option<VMFuncRef> {
640 if function_index == FunctionIndex::reserved_value() {
641 None
642 } else if let Some(local_function_index) = self.module.local_func_index(function_index) {
643 Some(VMFuncRef(NonNull::from(
644 &self.funcrefs[local_function_index],
645 )))
646 } else {
647 Some(VMFuncRef(self.imported_funcrefs[function_index]))
648 }
649 }
650
651 pub(crate) fn table_init(
659 &mut self,
660 table_index: TableIndex,
661 elem_index: ElemIndex,
662 dst: u32,
663 src: u32,
664 len: u32,
665 ) -> Result<(), Trap> {
666 let table = self.get_table_handle(table_index);
669 let table = unsafe { table.get_mut(&mut *self.context) };
670 let passive_elements = self.passive_elements.borrow();
671 let elem = passive_elements
672 .get(&elem_index)
673 .map_or::<&[Option<VMFuncRef>], _>(&[], |e| &**e);
674
675 if src.checked_add(len).is_none_or(|n| n as usize > elem.len())
676 || dst.checked_add(len).is_none_or(|m| m > table.size())
677 {
678 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
679 }
680
681 for (dst, src) in (dst..dst + len).zip(src..src + len) {
682 table
683 .set(dst, TableElement::FuncRef(elem[src as usize]))
684 .expect("should never panic because we already did the bounds check above");
685 }
686
687 self.sync_fixed_funcref_table_by_index(table_index);
688
689 Ok(())
690 }
691
692 pub(crate) fn table_fill(
698 &mut self,
699 table_index: TableIndex,
700 start_index: u32,
701 item: TableElement,
702 len: u32,
703 ) -> Result<(), Trap> {
704 let table = self.get_table(table_index);
707 let table_size = table.size() as usize;
708
709 if start_index
710 .checked_add(len)
711 .is_none_or(|n| n as usize > table_size)
712 {
713 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
714 }
715
716 for i in start_index..(start_index + len) {
717 table
718 .set(i, item.clone())
719 .expect("should never panic because we already did the bounds check above");
720 }
721
722 self.sync_fixed_funcref_table_by_index(table_index);
723
724 Ok(())
725 }
726
727 pub(crate) fn table_copy(
729 &mut self,
730 dst_table_index: TableIndex,
731 src_table_index: TableIndex,
732 dst: u32,
733 src: u32,
734 len: u32,
735 ) -> Result<(), Trap> {
736 let result = if dst_table_index == src_table_index {
737 let table = self.get_table(dst_table_index);
738 table.copy_within(dst, src, len)
739 } else {
740 let dst_table = self.get_table_handle(dst_table_index);
741 let src_table = self.get_table_handle(src_table_index);
742 if dst_table == src_table {
743 unsafe {
744 dst_table
745 .get_mut(&mut *self.context)
746 .copy_within(dst, src, len)
747 }
748 } else {
749 unsafe {
750 dst_table.get_mut(&mut *self.context).copy(
751 src_table.get(&*self.context),
752 dst,
753 src,
754 len,
755 )
756 }
757 }
758 };
759 result?;
760 self.sync_fixed_funcref_table_by_index(dst_table_index);
761
762 Ok(())
763 }
764
765 pub(crate) fn elem_drop(&self, elem_index: ElemIndex) {
767 let mut passive_elements = self.passive_elements.borrow_mut();
770 passive_elements.remove(&elem_index);
771 }
774
775 pub(crate) fn local_memory_copy(
782 &self,
783 memory_index: LocalMemoryIndex,
784 dst: u32,
785 src: u32,
786 len: u32,
787 ) -> Result<(), Trap> {
788 let memory = self.memory(memory_index);
791 unsafe { memory_copy(&memory, dst, src, len) }
793 }
794
795 pub(crate) fn imported_memory_copy(
797 &self,
798 memory_index: MemoryIndex,
799 dst: u32,
800 src: u32,
801 len: u32,
802 ) -> Result<(), Trap> {
803 let import = self.imported_memory(memory_index);
804 let memory = unsafe { import.definition.as_ref() };
805 unsafe { memory_copy(memory, dst, src, len) }
807 }
808
809 pub(crate) fn local_memory_fill(
815 &self,
816 memory_index: LocalMemoryIndex,
817 dst: u32,
818 val: u32,
819 len: u32,
820 ) -> Result<(), Trap> {
821 let memory = self.memory(memory_index);
822 unsafe { memory_fill(&memory, dst, val, len) }
824 }
825
826 pub(crate) fn imported_memory_fill(
832 &self,
833 memory_index: MemoryIndex,
834 dst: u32,
835 val: u32,
836 len: u32,
837 ) -> Result<(), Trap> {
838 let import = self.imported_memory(memory_index);
839 let memory = unsafe { import.definition.as_ref() };
840 unsafe { memory_fill(memory, dst, val, len) }
842 }
843
844 pub(crate) fn memory_init(
852 &self,
853 memory_index: MemoryIndex,
854 data_index: DataIndex,
855 dst: u32,
856 src: u32,
857 len: u32,
858 ) -> Result<(), Trap> {
859 let memory = self.get_vmmemory(memory_index);
862 let passive_data = self.passive_data.borrow();
863 let data = passive_data.get(&data_index).map_or(&[][..], |d| &**d);
864
865 let current_length = unsafe { memory.vmmemory().as_ref().current_length };
866 if src.checked_add(len).is_none_or(|n| n as usize > data.len())
867 || dst
868 .checked_add(len)
869 .is_none_or(|m| usize::try_from(m).unwrap() > current_length)
870 {
871 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
872 }
873 let src_slice = &data[src as usize..(src + len) as usize];
874 unsafe { memory.initialize_with_data(dst as usize, src_slice) }
875 }
876
877 pub(crate) fn data_drop(&self, data_index: DataIndex) {
879 let mut passive_data = self.passive_data.borrow_mut();
880 passive_data.remove(&data_index);
881 }
882
883 pub(crate) fn get_table(&mut self, table_index: TableIndex) -> &mut VMTable {
886 if let Some(local_table_index) = self.module.local_table_index(table_index) {
887 self.get_local_table(local_table_index)
888 } else {
889 self.get_foreign_table(table_index)
890 }
891 }
892
893 pub(crate) fn get_local_table(&mut self, index: LocalTableIndex) -> &mut VMTable {
895 let table = self.tables[index];
896 table.get_mut(self.context_mut())
897 }
898
899 pub(crate) fn get_foreign_table(&mut self, index: TableIndex) -> &mut VMTable {
901 let import = self.imported_table(index);
902 let table = import.handle;
903 table.get_mut(self.context_mut())
904 }
905
906 pub(crate) fn get_table_handle(
909 &mut self,
910 table_index: TableIndex,
911 ) -> InternalStoreHandle<VMTable> {
912 if let Some(local_table_index) = self.module.local_table_index(table_index) {
913 self.tables[local_table_index]
914 } else {
915 self.imported_table(table_index).handle
916 }
917 }
918
919 unsafe fn memory_wait(
922 memory: &mut VMMemory,
923 dst: u32,
924 expected: ExpectedValue,
925 timeout: i64,
926 ) -> Result<u32, Trap> {
927 let timeout = if timeout < 0 {
928 None
929 } else {
930 Some(std::time::Duration::from_nanos(timeout as u64))
931 };
932 match unsafe { memory.do_wait(dst, expected, timeout) } {
933 Ok(count) => Ok(count),
934 Err(_err) => {
935 Err(Trap::lib(TrapCode::TableAccessOutOfBounds))
938 }
939 }
940 }
941
942 pub(crate) fn local_memory_wait32(
944 &mut self,
945 memory_index: LocalMemoryIndex,
946 dst: u32,
947 val: u32,
948 timeout: i64,
949 ) -> Result<u32, Trap> {
950 let memory = self.memory(memory_index);
951 let ret = unsafe { memory32_atomic_check32(&memory, dst, val) };
957
958 if let Ok(mut ret) = ret {
959 if ret == 0 {
960 let memory = self.get_local_vmmemory_mut(memory_index);
961 ret = unsafe { Self::memory_wait(memory, dst, ExpectedValue::U32(val), timeout)? };
963 }
964 Ok(ret)
965 } else {
966 ret
967 }
968 }
969
970 pub(crate) fn imported_memory_wait32(
972 &mut self,
973 memory_index: MemoryIndex,
974 dst: u32,
975 val: u32,
976 timeout: i64,
977 ) -> Result<u32, Trap> {
978 let import = self.imported_memory(memory_index);
979 let memory = unsafe { import.definition.as_ref() };
980 let ret = unsafe { memory32_atomic_check32(memory, dst, val) };
986
987 if let Ok(mut ret) = ret {
988 if ret == 0 {
989 let memory = self.get_vmmemory_mut(memory_index);
990 ret = unsafe { Self::memory_wait(memory, dst, ExpectedValue::U32(val), timeout)? };
992 }
993 Ok(ret)
994 } else {
995 ret
996 }
997 }
998
999 pub(crate) fn local_memory_wait64(
1001 &mut self,
1002 memory_index: LocalMemoryIndex,
1003 dst: u32,
1004 val: u64,
1005 timeout: i64,
1006 ) -> Result<u32, Trap> {
1007 let memory = self.memory(memory_index);
1008 let ret = unsafe { memory32_atomic_check64(&memory, dst, val) };
1014
1015 if let Ok(mut ret) = ret {
1016 if ret == 0 {
1017 let memory = self.get_local_vmmemory_mut(memory_index);
1018 ret = unsafe { Self::memory_wait(memory, dst, ExpectedValue::U64(val), timeout)? };
1020 }
1021 Ok(ret)
1022 } else {
1023 ret
1024 }
1025 }
1026
1027 pub(crate) fn imported_memory_wait64(
1029 &mut self,
1030 memory_index: MemoryIndex,
1031 dst: u32,
1032 val: u64,
1033 timeout: i64,
1034 ) -> Result<u32, Trap> {
1035 let import = self.imported_memory(memory_index);
1036 let memory = unsafe { import.definition.as_ref() };
1037 let ret = unsafe { memory32_atomic_check64(memory, dst, val) };
1043
1044 if let Ok(mut ret) = ret {
1045 if ret == 0 {
1046 let memory = self.get_vmmemory_mut(memory_index);
1047 ret = unsafe { Self::memory_wait(memory, dst, ExpectedValue::U64(val), timeout)? };
1049 }
1050 Ok(ret)
1051 } else {
1052 ret
1053 }
1054 }
1055
1056 pub(crate) fn local_memory_notify(
1058 &mut self,
1059 memory_index: LocalMemoryIndex,
1060 dst: u32,
1061 count: u32,
1062 ) -> Result<u32, Trap> {
1063 let memory = self.get_local_vmmemory_mut(memory_index);
1064 Ok(memory.do_notify(dst, count))
1065 }
1066
1067 pub(crate) fn imported_memory_notify(
1069 &mut self,
1070 memory_index: MemoryIndex,
1071 dst: u32,
1072 count: u32,
1073 ) -> Result<u32, Trap> {
1074 let memory = self.get_vmmemory_mut(memory_index);
1075 Ok(memory.do_notify(dst, count))
1076 }
1077}
1078
1079#[derive(Debug, Eq, PartialEq)]
1084pub struct VMInstance {
1085 instance_layout: Layout,
1087
1088 instance: NonNull<Instance>,
1098}
1099
1100impl Drop for VMInstance {
1104 fn drop(&mut self) {
1105 let instance_ptr = self.instance.as_ptr();
1106
1107 unsafe {
1108 instance_ptr.drop_in_place();
1110 std::alloc::dealloc(instance_ptr as *mut u8, self.instance_layout);
1112 }
1113 }
1114}
1115
1116impl VMInstance {
1117 #[allow(clippy::too_many_arguments)]
1139 pub unsafe fn new(
1140 allocator: InstanceAllocator,
1141 module: Arc<ModuleInfo>,
1142 context: &mut StoreObjects,
1143 finished_functions: BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
1144 finished_function_call_trampolines: BoxedSlice<SignatureIndex, VMTrampoline>,
1145 finished_memories: BoxedSlice<LocalMemoryIndex, InternalStoreHandle<VMMemory>>,
1146 finished_tables: BoxedSlice<LocalTableIndex, InternalStoreHandle<VMTable>>,
1147 finished_globals: BoxedSlice<LocalGlobalIndex, InternalStoreHandle<VMGlobal>>,
1148 tags: BoxedSlice<TagIndex, InternalStoreHandle<VMTag>>,
1149 imports: Imports,
1150 vmshared_signatures: BoxedSlice<SignatureIndex, VMSignatureHash>,
1151 ) -> Result<Self, Trap> {
1152 unsafe {
1153 let vmctx_tags = tags
1154 .values()
1155 .map(|m: &InternalStoreHandle<VMTag>| VMSharedTagIndex::new(m.index() as u32))
1156 .collect::<PrimaryMap<TagIndex, VMSharedTagIndex>>()
1157 .into_boxed_slice();
1158 let passive_data = RefCell::new(
1159 module
1160 .passive_data
1161 .clone()
1162 .into_iter()
1163 .map(|(idx, bytes)| (idx, Arc::from(bytes)))
1164 .collect::<HashMap<_, _>>(),
1165 );
1166
1167 let handle = {
1168 let offsets = allocator.offsets().clone();
1169 let funcrefs = PrimaryMap::new().into_boxed_slice();
1171 let imported_funcrefs = PrimaryMap::new().into_boxed_slice();
1172 let instance = Instance {
1174 module,
1175 context,
1176 offsets,
1177 memories: finished_memories,
1178 tables: finished_tables,
1179 tags,
1180 globals: finished_globals,
1181 functions: finished_functions,
1182 function_call_trampolines: finished_function_call_trampolines,
1183 passive_elements: Default::default(),
1184 passive_data,
1185 funcrefs,
1186 imported_funcrefs,
1187 vmctx: VMContext {},
1188 };
1189
1190 let mut instance_handle = allocator.into_vminstance(instance);
1191
1192 {
1194 let instance = instance_handle.instance_mut();
1195 let vmctx_ptr = instance.vmctx_ptr();
1196 (instance.funcrefs, instance.imported_funcrefs) = build_funcrefs(
1197 &instance.module,
1198 context,
1199 &imports,
1200 &instance.functions,
1201 &vmshared_signatures,
1202 &instance.function_call_trampolines,
1203 vmctx_ptr,
1204 );
1205 for local_table_index in instance.tables.keys() {
1206 instance.sync_fixed_funcref_table(local_table_index);
1207 }
1208 }
1209
1210 instance_handle
1211 };
1212 let instance = handle.instance();
1213
1214 ptr::copy(
1215 vmctx_tags.values().as_slice().as_ptr(),
1216 instance.shared_tags_ptr(),
1217 vmctx_tags.len(),
1218 );
1219 ptr::copy(
1220 imports.functions.values().as_slice().as_ptr(),
1221 instance.imported_functions_ptr(),
1222 imports.functions.len(),
1223 );
1224 ptr::copy(
1225 imports.tables.values().as_slice().as_ptr(),
1226 instance.imported_tables_ptr(),
1227 imports.tables.len(),
1228 );
1229 ptr::copy(
1230 imports.memories.values().as_slice().as_ptr(),
1231 instance.imported_memories_ptr(),
1232 imports.memories.len(),
1233 );
1234 ptr::copy(
1235 imports.globals.values().as_slice().as_ptr(),
1236 instance.imported_globals_ptr(),
1237 imports.globals.len(),
1238 );
1239 ptr::write(
1243 instance.builtin_functions_ptr(),
1244 VMBuiltinFunctionsArray::initialized(),
1245 );
1246
1247 initialize_passive_elements(instance);
1250 initialize_globals(instance);
1251
1252 Ok(handle)
1253 }
1254 }
1255
1256 pub(crate) fn instance(&self) -> &Instance {
1258 unsafe { self.instance.as_ref() }
1259 }
1260
1261 pub(crate) fn instance_mut(&mut self) -> &mut Instance {
1263 unsafe { self.instance.as_mut() }
1264 }
1265
1266 pub unsafe fn finish_instantiation(
1272 &mut self,
1273 config: &VMConfig,
1274 trap_handler: Option<*const TrapHandlerFn<'static>>,
1275 data_initializers: &[DataInitializer<'_>],
1276 ) -> Result<(), Trap> {
1277 let instance = self.instance_mut();
1278
1279 initialize_tables(instance)?;
1281 initialize_memories(instance, data_initializers)?;
1282
1283 instance.invoke_start_function(config, trap_handler)?;
1286 Ok(())
1287 }
1288
1289 pub fn vmctx(&self) -> &VMContext {
1291 self.instance().vmctx()
1292 }
1293
1294 pub fn vmctx_ptr(&self) -> *mut VMContext {
1296 self.instance().vmctx_ptr()
1297 }
1298
1299 pub fn vmoffsets(&self) -> &VMOffsets {
1303 self.instance().offsets()
1304 }
1305
1306 pub fn module(&self) -> &Arc<ModuleInfo> {
1308 self.instance().module()
1309 }
1310
1311 pub fn module_ref(&self) -> &ModuleInfo {
1313 self.instance().module_ref()
1314 }
1315
1316 pub fn lookup(&mut self, field: &str) -> Option<VMExtern> {
1318 let export = *self.module_ref().exports.get(field)?;
1319
1320 Some(self.lookup_by_declaration(export))
1321 }
1322
1323 pub fn lookup_by_declaration(&mut self, export: ExportIndex) -> VMExtern {
1325 let instance = self.instance();
1326
1327 match export {
1328 ExportIndex::Function(index) => {
1329 let sig_index = &instance.module.functions[index];
1330 let handle = if let Some(def_index) = instance.module.local_func_index(index) {
1331 let signature = instance.module.signatures[*sig_index].clone();
1334 let vm_function = VMFunction {
1335 anyfunc: MaybeInstanceOwned::Instance(NonNull::from(
1336 &instance.funcrefs[def_index],
1337 )),
1338 signature,
1339 kind: VMFunctionKind::Static,
1344 host_data: Box::new(()),
1345 };
1346 InternalStoreHandle::new(self.instance_mut().context_mut(), vm_function)
1347 } else {
1348 let import = instance.imported_function(index);
1349 import.handle
1350 };
1351
1352 VMExtern::Function(handle)
1353 }
1354 ExportIndex::Table(index) => {
1355 let handle = if let Some(def_index) = instance.module.local_table_index(index) {
1356 instance.tables[def_index]
1357 } else {
1358 let import = instance.imported_table(index);
1359 import.handle
1360 };
1361 VMExtern::Table(handle)
1362 }
1363 ExportIndex::Memory(index) => {
1364 let handle = if let Some(def_index) = instance.module.local_memory_index(index) {
1365 instance.memories[def_index]
1366 } else {
1367 let import = instance.imported_memory(index);
1368 import.handle
1369 };
1370 VMExtern::Memory(handle)
1371 }
1372 ExportIndex::Global(index) => {
1373 let handle = if let Some(def_index) = instance.module.local_global_index(index) {
1374 instance.globals[def_index]
1375 } else {
1376 let import = instance.imported_global(index);
1377 import.handle
1378 };
1379 VMExtern::Global(handle)
1380 }
1381
1382 ExportIndex::Tag(index) => {
1383 let handle = instance.tags[index];
1384 VMExtern::Tag(handle)
1385 }
1386 }
1387 }
1388
1389 pub fn exports(&self) -> indexmap::map::Iter<'_, String, ExportIndex> {
1395 self.module().exports.iter()
1396 }
1397
1398 pub fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
1400 self.instance().memory_index(memory)
1401 }
1402
1403 pub fn memory_grow<IntoPages>(
1408 &mut self,
1409 memory_index: LocalMemoryIndex,
1410 delta: IntoPages,
1411 ) -> Result<Pages, MemoryError>
1412 where
1413 IntoPages: Into<Pages>,
1414 {
1415 self.instance_mut().memory_grow(memory_index, delta)
1416 }
1417
1418 pub fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
1420 self.instance().table_index(table)
1421 }
1422
1423 pub fn table_grow(
1428 &mut self,
1429 table_index: LocalTableIndex,
1430 delta: u32,
1431 init_value: TableElement,
1432 ) -> Option<u32> {
1433 self.instance_mut()
1434 .table_grow(table_index, delta, init_value)
1435 }
1436
1437 pub fn table_get(&self, table_index: LocalTableIndex, index: u32) -> Option<TableElement> {
1441 self.instance().table_get(table_index, index)
1442 }
1443
1444 pub fn table_set(
1448 &mut self,
1449 table_index: LocalTableIndex,
1450 index: u32,
1451 val: TableElement,
1452 ) -> Result<(), Trap> {
1453 self.instance_mut().table_set(table_index, index, val)
1454 }
1455
1456 pub fn get_local_table(&mut self, index: LocalTableIndex) -> &mut VMTable {
1458 self.instance_mut().get_local_table(index)
1459 }
1460}
1461
1462#[allow(clippy::mut_from_ref)]
1463#[allow(dead_code)]
1464unsafe fn get_memory_slice<'instance>(
1466 init: &DataInitializer<'_>,
1467 instance: &'instance Instance,
1468) -> &'instance mut [u8] {
1469 unsafe {
1470 let memory = if let Some(local_memory_index) = instance
1471 .module
1472 .local_memory_index(init.location.memory_index)
1473 {
1474 instance.memory(local_memory_index)
1475 } else {
1476 let import = instance.imported_memory(init.location.memory_index);
1477 *import.definition.as_ref()
1478 };
1479 slice::from_raw_parts_mut(memory.base, memory.current_length)
1480 }
1481}
1482
1483fn get_global(index: GlobalIndex, instance: &Instance) -> RawValue {
1484 unsafe {
1485 if let Some(local_global_index) = instance.module.local_global_index(index) {
1486 instance.global(local_global_index).val
1487 } else {
1488 instance.imported_global(index).definition.as_ref().val
1489 }
1490 }
1491}
1492
1493enum EvaluatedInitExpr {
1494 I32(i32),
1495 I64(i64),
1496}
1497
1498fn eval_init_expr(expr: &InitExpr, instance: &Instance) -> EvaluatedInitExpr {
1499 if expr
1500 .ops()
1501 .first()
1502 .expect("missing expression")
1503 .is_32bit_expression()
1504 {
1505 let mut stack = Vec::with_capacity(expr.ops().len());
1506 for op in expr.ops() {
1507 match *op {
1508 InitExprOp::I32Const(value) => stack.push(value),
1509 InitExprOp::GlobalGetI32(global) => {
1510 stack.push(unsafe { get_global(global, instance).i32 })
1511 }
1512 InitExprOp::I32Add => {
1513 let rhs = stack.pop().expect("invalid init expr stack for i32.add");
1514 let lhs = stack.pop().expect("invalid init expr stack for i32.add");
1515 stack.push(lhs.wrapping_add(rhs));
1516 }
1517 InitExprOp::I32Sub => {
1518 let rhs = stack.pop().expect("invalid init expr stack for i32.sub");
1519 let lhs = stack.pop().expect("invalid init expr stack for i32.sub");
1520 stack.push(lhs.wrapping_sub(rhs));
1521 }
1522 InitExprOp::I32Mul => {
1523 let rhs = stack.pop().expect("invalid init expr stack for i32.mul");
1524 let lhs = stack.pop().expect("invalid init expr stack for i32.mul");
1525 stack.push(lhs.wrapping_mul(rhs));
1526 }
1527 _ => {
1528 panic!("unexpected init expr statement: {op:?}");
1529 }
1530 }
1531 }
1532 EvaluatedInitExpr::I32(
1533 stack
1534 .into_iter()
1535 .exactly_one()
1536 .expect("invalid init expr stack shape"),
1537 )
1538 } else {
1539 let mut stack = Vec::with_capacity(expr.ops().len());
1540 for op in expr.ops() {
1541 match *op {
1542 InitExprOp::I64Const(value) => stack.push(value),
1543 InitExprOp::GlobalGetI64(global) => {
1544 stack.push(unsafe { get_global(global, instance).i64 })
1545 }
1546 InitExprOp::I64Add => {
1547 let rhs = stack.pop().expect("invalid init expr stack for i64.add");
1548 let lhs = stack.pop().expect("invalid init expr stack for i64.add");
1549 stack.push(lhs.wrapping_add(rhs));
1550 }
1551 InitExprOp::I64Sub => {
1552 let rhs = stack.pop().expect("invalid init expr stack for i64.sub");
1553 let lhs = stack.pop().expect("invalid init expr stack for i64.sub");
1554 stack.push(lhs.wrapping_sub(rhs));
1555 }
1556 InitExprOp::I64Mul => {
1557 let rhs = stack.pop().expect("invalid init expr stack for i64.mul");
1558 let lhs = stack.pop().expect("invalid init expr stack for i64.mul");
1559 stack.push(lhs.wrapping_mul(rhs));
1560 }
1561 _ => {
1562 panic!("unexpected init expr statement: {op:?}");
1563 }
1564 }
1565 }
1566 EvaluatedInitExpr::I64(
1567 stack
1568 .into_iter()
1569 .exactly_one()
1570 .expect("invalid init expr stack shape"),
1571 )
1572 }
1573}
1574
1575fn initialize_tables(instance: &mut Instance) -> Result<(), Trap> {
1577 let module = Arc::clone(&instance.module);
1578 for init in &module.table_initializers {
1579 let EvaluatedInitExpr::I32(start) = eval_init_expr(&init.offset_expr, instance) else {
1580 panic!("unexpected expression type, expected i32");
1581 };
1582 if start < 0 {
1583 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
1584 }
1585 let start = start as usize;
1586 let table = instance.get_table_handle(init.table_index);
1587 let table = unsafe { table.get_mut(&mut *instance.context) };
1588
1589 if start
1590 .checked_add(init.elements.len())
1591 .is_none_or(|end| end > table.size() as usize)
1592 {
1593 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
1594 }
1595
1596 if let wasmer_types::Type::FuncRef = table.ty().ty {
1597 for (i, func_idx) in init.elements.iter().enumerate() {
1598 let anyfunc = instance.func_ref(*func_idx);
1599 table
1600 .set_with_construction(
1601 u32::try_from(start + i).unwrap(),
1602 TableElement::FuncRef(anyfunc),
1603 true,
1604 )
1605 .unwrap();
1606 }
1607 } else {
1608 for i in 0..init.elements.len() {
1609 table
1610 .set_with_construction(
1611 u32::try_from(start + i).unwrap(),
1612 TableElement::ExternRef(None),
1613 true,
1614 )
1615 .unwrap();
1616 }
1617 }
1618
1619 instance.sync_fixed_funcref_table_by_index(init.table_index);
1620 }
1621
1622 Ok(())
1623}
1624
1625fn initialize_passive_elements(instance: &Instance) {
1629 let mut passive_elements = instance.passive_elements.borrow_mut();
1630 debug_assert!(
1631 passive_elements.is_empty(),
1632 "should only be called once, at initialization time"
1633 );
1634
1635 passive_elements.extend(instance.module.passive_elements.iter().filter_map(
1636 |(&idx, segments)| -> Option<(ElemIndex, Box<[Option<VMFuncRef>]>)> {
1637 if segments.is_empty() {
1638 None
1639 } else {
1640 Some((
1641 idx,
1642 segments
1643 .iter()
1644 .map(|s| instance.func_ref(*s))
1645 .collect::<Box<[Option<VMFuncRef>]>>(),
1646 ))
1647 }
1648 },
1649 ));
1650}
1651
1652fn initialize_memories(
1654 instance: &mut Instance,
1655 data_initializers: &[DataInitializer<'_>],
1656) -> Result<(), Trap> {
1657 for init in data_initializers {
1658 let memory = instance.get_vmmemory(init.location.memory_index);
1659
1660 let EvaluatedInitExpr::I32(start) = eval_init_expr(&init.location.offset_expr, instance)
1661 else {
1662 panic!("unexpected expression type, expected i32");
1663 };
1664 if start < 0 {
1665 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
1666 }
1667 let start = start as usize;
1668 unsafe {
1669 let current_length = memory.vmmemory().as_ref().current_length;
1670 if start
1671 .checked_add(init.data.len())
1672 .is_none_or(|end| end > current_length)
1673 {
1674 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
1675 }
1676 memory.initialize_with_data(start, init.data)?;
1677 }
1678 }
1679
1680 Ok(())
1681}
1682
1683fn initialize_globals(instance: &Instance) {
1684 let module = Arc::clone(&instance.module);
1685 for (index, initializer) in module.global_initializers.iter() {
1686 unsafe {
1687 let to = instance.global_ptr(index).as_ptr();
1688 match initializer {
1689 GlobalInit::I32Const(x) => (*to).val.i32 = *x,
1690 GlobalInit::I64Const(x) => (*to).val.i64 = *x,
1691 GlobalInit::F32Const(x) => (*to).val.f32 = *x,
1692 GlobalInit::F64Const(x) => (*to).val.f64 = *x,
1693 GlobalInit::V128Const(x) => (*to).val.bytes = *x.bytes(),
1694 GlobalInit::GetGlobal(x) => {
1695 let from: VMGlobalDefinition =
1696 if let Some(def_x) = module.local_global_index(*x) {
1697 instance.global(def_x)
1698 } else {
1699 instance.imported_global(*x).definition.as_ref().clone()
1700 };
1701 *to = from;
1702 }
1703 GlobalInit::RefNullConst => (*to).val.funcref = 0,
1704 GlobalInit::RefFunc(func_idx) => {
1705 let funcref = instance.func_ref(*func_idx).unwrap();
1706 (*to).val = funcref.into_raw();
1707 }
1708 GlobalInit::Expr(expr) => match eval_init_expr(expr, instance) {
1709 EvaluatedInitExpr::I32(value) => (*to).val.i32 = value,
1710 EvaluatedInitExpr::I64(value) => (*to).val.i64 = value,
1711 },
1712 }
1713 }
1714 }
1715}
1716
1717fn anyfunc_from_funcref(funcref: Option<VMFuncRef>) -> VMCallerCheckedAnyfunc {
1718 match funcref {
1719 Some(funcref) => unsafe { *funcref.0.as_ptr() },
1720 None => VMCallerCheckedAnyfunc::null(),
1721 }
1722}
1723
1724fn build_funcrefs(
1727 module_info: &ModuleInfo,
1728 ctx: &StoreObjects,
1729 imports: &Imports,
1730 finished_functions: &BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
1731 vmshared_signatures: &BoxedSlice<SignatureIndex, VMSignatureHash>,
1732 function_call_trampolines: &BoxedSlice<SignatureIndex, VMTrampoline>,
1733 vmctx_ptr: *mut VMContext,
1734) -> (
1735 BoxedSlice<LocalFunctionIndex, VMCallerCheckedAnyfunc>,
1736 BoxedSlice<FunctionIndex, NonNull<VMCallerCheckedAnyfunc>>,
1737) {
1738 let mut func_refs =
1739 PrimaryMap::with_capacity(module_info.functions.len() - module_info.num_imported_functions);
1740 let mut imported_func_refs = PrimaryMap::with_capacity(module_info.num_imported_functions);
1741
1742 for import in imports.functions.values() {
1744 imported_func_refs.push(import.handle.get(ctx).anyfunc.as_ptr());
1745 }
1746
1747 for (local_index, func_ptr) in finished_functions.iter() {
1749 let index = module_info.func_index(local_index);
1750 let sig_index = module_info.functions[index];
1751 let type_signature_hash = vmshared_signatures[sig_index];
1752 let call_trampoline = function_call_trampolines[sig_index];
1753 let anyfunc = VMCallerCheckedAnyfunc {
1754 func_ptr: func_ptr.0,
1755 type_signature_hash,
1756 vmctx: VMFunctionContext { vmctx: vmctx_ptr },
1757 call_trampoline,
1758 };
1759 func_refs.push(anyfunc);
1760 }
1761 (
1762 func_refs.into_boxed_slice(),
1763 imported_func_refs.into_boxed_slice(),
1764 )
1765}