1use crate::VMFunctionBody;
8use crate::VMTable;
9use crate::global::VMGlobal;
10use crate::instance::Instance;
11use crate::memory::VMMemory;
12use crate::store::InternalStoreHandle;
13use crate::trap::{Trap, TrapCode};
14use crate::{VMBuiltinFunctionIndex, VMFunction};
15use std::convert::TryFrom;
16use std::hash::{Hash, Hasher};
17use std::ptr::{self, NonNull};
18use std::sync::atomic::{AtomicPtr, Ordering};
19use wasmer_types::RawValue;
20
21#[derive(Copy, Clone, Eq)]
26#[repr(C)]
27pub union VMFunctionContext {
28 pub vmctx: *mut VMContext,
30 pub host_env: *mut std::ffi::c_void,
32}
33
34impl VMFunctionContext {
35 pub fn is_null(&self) -> bool {
37 unsafe { self.host_env.is_null() }
38 }
39}
40
41impl std::fmt::Debug for VMFunctionContext {
42 fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
43 f.debug_struct("VMFunctionContext")
44 .field("vmctx_or_hostenv", unsafe { &self.host_env })
45 .finish()
46 }
47}
48
49impl std::cmp::PartialEq for VMFunctionContext {
50 fn eq(&self, rhs: &Self) -> bool {
51 unsafe { std::ptr::eq(self.host_env, rhs.host_env) }
52 }
53}
54
55impl std::hash::Hash for VMFunctionContext {
56 fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
57 unsafe {
58 self.vmctx.hash(state);
59 }
60 }
61}
62
63#[derive(Debug, Copy, Clone)]
65#[repr(C)]
66pub struct VMFunctionImport {
67 pub body: *const VMFunctionBody,
69
70 pub environment: VMFunctionContext,
72
73 pub handle: InternalStoreHandle<VMFunction>,
75}
76
77#[cfg(test)]
78mod test_vmfunction_import {
79 use super::VMFunctionImport;
80 use memoffset::offset_of;
81 use std::mem::size_of;
82 use wasmer_types::ModuleInfo;
83 use wasmer_types::VMOffsets;
84
85 #[test]
86 fn check_vmfunction_import_offsets() {
87 let module = ModuleInfo::new();
88 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
89 assert_eq!(
90 size_of::<VMFunctionImport>(),
91 usize::from(offsets.size_of_vmfunction_import())
92 );
93 assert_eq!(
94 offset_of!(VMFunctionImport, body),
95 usize::from(offsets.vmfunction_import_body())
96 );
97 assert_eq!(
98 offset_of!(VMFunctionImport, environment),
99 usize::from(offsets.vmfunction_import_vmctx())
100 );
101 }
102}
103
104#[repr(C)]
113pub struct VMDynamicFunctionContext<T> {
114 pub address: *const VMFunctionBody,
119
120 pub ctx: T,
122}
123
124unsafe impl<T: Sized + Send + Sync> Send for VMDynamicFunctionContext<T> {}
127unsafe impl<T: Sized + Send + Sync> Sync for VMDynamicFunctionContext<T> {}
130
131impl<T: Sized + Clone + Send + Sync> Clone for VMDynamicFunctionContext<T> {
132 fn clone(&self) -> Self {
133 Self {
134 address: self.address,
135 ctx: self.ctx.clone(),
136 }
137 }
138}
139
140#[cfg(test)]
141mod test_vmdynamicfunction_import_context {
142 use super::VMDynamicFunctionContext;
143 use crate::VMOffsets;
144 use memoffset::offset_of;
145 use std::mem::size_of;
146 use wasmer_types::ModuleInfo;
147
148 #[test]
149 fn check_vmdynamicfunction_import_context_offsets() {
150 let module = ModuleInfo::new();
151 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
152 assert_eq!(
153 size_of::<VMDynamicFunctionContext<usize>>(),
154 usize::from(offsets.size_of_vmdynamicfunction_import_context())
155 );
156 assert_eq!(
157 offset_of!(VMDynamicFunctionContext<usize>, address),
158 usize::from(offsets.vmdynamicfunction_import_context_address())
159 );
160 assert_eq!(
161 offset_of!(VMDynamicFunctionContext<usize>, ctx),
162 usize::from(offsets.vmdynamicfunction_import_context_ctx())
163 );
164 }
165}
166
167#[derive(Debug, Copy, Clone, Eq, PartialEq)]
169#[repr(C)]
170pub enum VMFunctionKind {
171 Static,
178
179 Dynamic,
185}
186
187#[derive(Clone)]
190#[repr(C)]
191pub struct VMTableImport {
192 pub definition: NonNull<VMTableDefinition>,
194
195 pub handle: InternalStoreHandle<VMTable>,
197}
198
199#[cfg(test)]
200mod test_vmtable_import {
201 use super::VMTableImport;
202 use crate::VMOffsets;
203 use memoffset::offset_of;
204 use std::mem::size_of;
205 use wasmer_types::ModuleInfo;
206
207 #[test]
208 fn check_vmtable_import_offsets() {
209 let module = ModuleInfo::new();
210 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
211 assert_eq!(
212 size_of::<VMTableImport>(),
213 usize::from(offsets.size_of_vmtable_import())
214 );
215 assert_eq!(
216 offset_of!(VMTableImport, definition),
217 usize::from(offsets.vmtable_import_definition())
218 );
219 }
220}
221
222#[derive(Clone)]
225#[repr(C)]
226pub struct VMMemoryImport {
227 pub definition: NonNull<VMMemoryDefinition>,
229
230 pub handle: InternalStoreHandle<VMMemory>,
232}
233
234#[cfg(test)]
235mod test_vmmemory_import {
236 use super::VMMemoryImport;
237 use crate::VMOffsets;
238 use memoffset::offset_of;
239 use std::mem::size_of;
240 use wasmer_types::ModuleInfo;
241
242 #[test]
243 fn check_vmmemory_import_offsets() {
244 let module = ModuleInfo::new();
245 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
246 assert_eq!(
247 size_of::<VMMemoryImport>(),
248 usize::from(offsets.size_of_vmmemory_import())
249 );
250 assert_eq!(
251 offset_of!(VMMemoryImport, definition),
252 usize::from(offsets.vmmemory_import_definition())
253 );
254 assert_eq!(
255 offset_of!(VMMemoryImport, handle),
256 usize::from(offsets.vmmemory_import_handle())
257 );
258 }
259}
260
261#[derive(Clone)]
264#[repr(C)]
265pub struct VMGlobalImport {
266 pub definition: NonNull<VMGlobalDefinition>,
268
269 pub handle: InternalStoreHandle<VMGlobal>,
271}
272
273unsafe impl Send for VMGlobalImport {}
278unsafe impl Sync for VMGlobalImport {}
284
285#[cfg(test)]
286mod test_vmglobal_import {
287 use super::VMGlobalImport;
288 use crate::VMOffsets;
289 use memoffset::offset_of;
290 use std::mem::size_of;
291 use wasmer_types::ModuleInfo;
292
293 #[test]
294 fn check_vmglobal_import_offsets() {
295 let module = ModuleInfo::new();
296 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
297 assert_eq!(
298 size_of::<VMGlobalImport>(),
299 usize::from(offsets.size_of_vmglobal_import())
300 );
301 assert_eq!(
302 offset_of!(VMGlobalImport, definition),
303 usize::from(offsets.vmglobal_import_definition())
304 );
305 }
306}
307
308pub(crate) unsafe fn memory_copy(
319 mem: &VMMemoryDefinition,
320 dst: u32,
321 src: u32,
322 len: u32,
323) -> Result<(), Trap> {
324 unsafe {
325 if src
327 .checked_add(len)
328 .is_none_or(|n| usize::try_from(n).unwrap() > mem.current_length)
329 || dst
330 .checked_add(len)
331 .is_none_or(|m| usize::try_from(m).unwrap() > mem.current_length)
332 {
333 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
334 }
335
336 let dst = usize::try_from(dst).unwrap();
337 let src = usize::try_from(src).unwrap();
338
339 let dst = mem.base.add(dst);
342 let src = mem.base.add(src);
343 ptr::copy(src, dst, len as usize);
344
345 Ok(())
346 }
347}
348
349pub(crate) unsafe fn memory_fill(
360 mem: &VMMemoryDefinition,
361 dst: u32,
362 val: u32,
363 len: u32,
364) -> Result<(), Trap> {
365 unsafe {
366 if dst
367 .checked_add(len)
368 .is_none_or(|m| usize::try_from(m).unwrap() > mem.current_length)
369 {
370 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
371 }
372
373 let dst = isize::try_from(dst).unwrap();
374 let val = val as u8;
375
376 let dst = mem.base.offset(dst);
379 ptr::write_bytes(dst, val, len as usize);
380
381 Ok(())
382 }
383}
384
385pub(crate) unsafe fn memory32_atomic_check32(
394 mem: &VMMemoryDefinition,
395 dst: u32,
396 val: u32,
397) -> Result<u32, Trap> {
398 unsafe {
399 if usize::try_from(dst).unwrap() > mem.current_length {
400 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
401 }
402
403 let dst = isize::try_from(dst).unwrap();
404 if dst & 0b11 != 0 {
405 return Err(Trap::lib(TrapCode::UnalignedAtomic));
406 }
407
408 let dst = mem.base.offset(dst) as *mut u32;
411 let atomic_dst = AtomicPtr::new(dst);
412 let read_val = *atomic_dst.load(Ordering::Acquire);
413 let ret = if read_val == val { 0 } else { 1 };
414 Ok(ret)
415 }
416}
417
418pub(crate) unsafe fn memory32_atomic_check64(
427 mem: &VMMemoryDefinition,
428 dst: u32,
429 val: u64,
430) -> Result<u32, Trap> {
431 unsafe {
432 if usize::try_from(dst).unwrap() > mem.current_length {
433 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
434 }
435
436 let dst = isize::try_from(dst).unwrap();
437 if dst & 0b111 != 0 {
438 return Err(Trap::lib(TrapCode::UnalignedAtomic));
439 }
440
441 let dst = mem.base.offset(dst) as *mut u64;
444 let atomic_dst = AtomicPtr::new(dst);
445 let read_val = *atomic_dst.load(Ordering::Acquire);
446 let ret = if read_val == val { 0 } else { 1 };
447 Ok(ret)
448 }
449}
450
451#[derive(Debug, Clone, Copy)]
454#[repr(C)]
455pub struct VMTableDefinition {
456 pub base: *mut u8,
458
459 pub current_elements: u32,
461}
462
463#[cfg(test)]
464mod test_vmtable_definition {
465 use super::VMTableDefinition;
466 use crate::VMOffsets;
467 use memoffset::offset_of;
468 use std::mem::size_of;
469 use wasmer_types::ModuleInfo;
470
471 #[test]
472 fn check_vmtable_definition_offsets() {
473 let module = ModuleInfo::new();
474 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
475 assert_eq!(
476 size_of::<VMTableDefinition>(),
477 usize::from(offsets.size_of_vmtable_definition())
478 );
479 assert_eq!(
480 offset_of!(VMTableDefinition, base),
481 usize::from(offsets.vmtable_definition_base())
482 );
483 assert_eq!(
484 offset_of!(VMTableDefinition, current_elements),
485 usize::from(offsets.vmtable_definition_current_elements())
486 );
487 }
488}
489
490#[derive(Debug, Clone)]
495#[repr(C, align(16))]
496pub struct VMGlobalDefinition {
497 pub val: RawValue,
499}
500
501#[cfg(test)]
502mod test_vmglobal_definition {
503 use super::VMGlobalDefinition;
504 use crate::{VMFuncRef, VMOffsets};
505 use more_asserts::assert_ge;
506 use std::mem::{align_of, size_of};
507 use wasmer_types::ModuleInfo;
508
509 #[test]
510 fn check_vmglobal_definition_alignment() {
511 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<i32>());
512 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<i64>());
513 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<f32>());
514 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<f64>());
515 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<VMFuncRef>());
516 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<[u8; 16]>());
517 }
518
519 #[test]
520 fn check_vmglobal_definition_offsets() {
521 let module = ModuleInfo::new();
522 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
523 assert_eq!(
524 size_of::<*const VMGlobalDefinition>(),
525 usize::from(offsets.size_of_vmglobal_local())
526 );
527 }
528
529 #[test]
530 fn check_vmglobal_begins_aligned() {
531 let module = ModuleInfo::new();
532 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
533 assert_eq!(offsets.vmctx_globals_begin() % 16, 0);
534 }
535}
536
537impl VMGlobalDefinition {
538 pub fn new() -> Self {
540 Self {
541 val: Default::default(),
542 }
543 }
544}
545
546#[repr(C)]
549#[cfg_attr(feature = "artifact-size", derive(loupe::MemoryUsage))]
550pub struct VMSharedTagIndex(u32);
551
552impl VMSharedTagIndex {
553 pub fn new(value: u32) -> Self {
555 Self(value)
556 }
557
558 pub fn index(&self) -> u32 {
560 self.0
561 }
562}
563
564#[repr(C)]
567#[cfg_attr(feature = "artifact-size", derive(loupe::MemoryUsage))]
568#[derive(Debug, Eq, PartialEq, Clone, Copy, Hash)]
569pub struct VMSharedSignatureIndex(u32);
570
571#[cfg(test)]
572mod test_vmshared_signature_index {
573 use super::VMSharedSignatureIndex;
574 use std::mem::size_of;
575 use wasmer_types::{ModuleInfo, TargetSharedSignatureIndex, VMOffsets};
576
577 #[test]
578 fn check_vmshared_signature_index() {
579 let module = ModuleInfo::new();
580 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
581 assert_eq!(
582 size_of::<VMSharedSignatureIndex>(),
583 usize::from(offsets.size_of_vmshared_signature_index())
584 );
585 }
586
587 #[test]
588 fn check_target_shared_signature_index() {
589 assert_eq!(
590 size_of::<VMSharedSignatureIndex>(),
591 size_of::<TargetSharedSignatureIndex>()
592 );
593 }
594}
595
596impl VMSharedSignatureIndex {
597 pub fn new(value: u32) -> Self {
599 Self(value)
600 }
601}
602
603impl Default for VMSharedSignatureIndex {
604 fn default() -> Self {
605 Self::new(u32::MAX)
606 }
607}
608
609#[derive(Debug, Clone, Copy)]
613#[repr(C)]
614pub struct VMCallerCheckedAnyfunc {
615 pub func_ptr: *const VMFunctionBody,
617 pub type_index: VMSharedSignatureIndex,
619 pub vmctx: VMFunctionContext,
621 pub call_trampoline: VMTrampoline,
624 }
626
627impl PartialEq for VMCallerCheckedAnyfunc {
628 fn eq(&self, other: &Self) -> bool {
629 self.func_ptr == other.func_ptr
630 && self.type_index == other.type_index
631 && self.vmctx == other.vmctx
632 && ptr::fn_addr_eq(self.call_trampoline, other.call_trampoline)
633 }
634}
635
636impl Eq for VMCallerCheckedAnyfunc {}
637
638impl Hash for VMCallerCheckedAnyfunc {
639 fn hash<H: Hasher>(&self, state: &mut H) {
640 self.func_ptr.hash(state);
641 self.type_index.hash(state);
642 self.vmctx.hash(state);
643 ptr::hash(self.call_trampoline as *const (), state);
644 }
645}
646
647#[cfg(test)]
648mod test_vmcaller_checked_anyfunc {
649 use super::VMCallerCheckedAnyfunc;
650 use crate::VMOffsets;
651 use memoffset::offset_of;
652 use std::mem::size_of;
653 use wasmer_types::ModuleInfo;
654
655 #[test]
656 fn check_vmcaller_checked_anyfunc_offsets() {
657 let module = ModuleInfo::new();
658 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
659 assert_eq!(
660 size_of::<VMCallerCheckedAnyfunc>(),
661 usize::from(offsets.size_of_vmcaller_checked_anyfunc())
662 );
663 assert_eq!(
664 offset_of!(VMCallerCheckedAnyfunc, func_ptr),
665 usize::from(offsets.vmcaller_checked_anyfunc_func_ptr())
666 );
667 assert_eq!(
668 offset_of!(VMCallerCheckedAnyfunc, type_index),
669 usize::from(offsets.vmcaller_checked_anyfunc_type_index())
670 );
671 assert_eq!(
672 offset_of!(VMCallerCheckedAnyfunc, vmctx),
673 usize::from(offsets.vmcaller_checked_anyfunc_vmctx())
674 );
675 }
676}
677
678#[repr(C)]
681pub struct VMBuiltinFunctionsArray {
682 ptrs: [usize; Self::len()],
683}
684
685impl VMBuiltinFunctionsArray {
686 pub const fn len() -> usize {
687 VMBuiltinFunctionIndex::builtin_functions_total_number() as usize
688 }
689
690 pub fn initialized() -> Self {
691 use crate::libcalls::*;
692
693 let mut ptrs = [0; Self::len()];
694
695 ptrs[VMBuiltinFunctionIndex::get_memory32_grow_index().index() as usize] =
696 wasmer_vm_memory32_grow as usize;
697 ptrs[VMBuiltinFunctionIndex::get_imported_memory32_grow_index().index() as usize] =
698 wasmer_vm_imported_memory32_grow as usize;
699
700 ptrs[VMBuiltinFunctionIndex::get_memory32_size_index().index() as usize] =
701 wasmer_vm_memory32_size as usize;
702 ptrs[VMBuiltinFunctionIndex::get_imported_memory32_size_index().index() as usize] =
703 wasmer_vm_imported_memory32_size as usize;
704
705 ptrs[VMBuiltinFunctionIndex::get_table_copy_index().index() as usize] =
706 wasmer_vm_table_copy as usize;
707
708 ptrs[VMBuiltinFunctionIndex::get_table_init_index().index() as usize] =
709 wasmer_vm_table_init as usize;
710 ptrs[VMBuiltinFunctionIndex::get_elem_drop_index().index() as usize] =
711 wasmer_vm_elem_drop as usize;
712
713 ptrs[VMBuiltinFunctionIndex::get_memory_copy_index().index() as usize] =
714 wasmer_vm_memory32_copy as usize;
715 ptrs[VMBuiltinFunctionIndex::get_imported_memory_copy_index().index() as usize] =
716 wasmer_vm_imported_memory32_copy as usize;
717 ptrs[VMBuiltinFunctionIndex::get_memory_fill_index().index() as usize] =
718 wasmer_vm_memory32_fill as usize;
719 ptrs[VMBuiltinFunctionIndex::get_imported_memory_fill_index().index() as usize] =
720 wasmer_vm_imported_memory32_fill as usize;
721 ptrs[VMBuiltinFunctionIndex::get_memory_init_index().index() as usize] =
722 wasmer_vm_memory32_init as usize;
723 ptrs[VMBuiltinFunctionIndex::get_data_drop_index().index() as usize] =
724 wasmer_vm_data_drop as usize;
725 ptrs[VMBuiltinFunctionIndex::get_raise_trap_index().index() as usize] =
726 wasmer_vm_raise_trap as usize;
727 ptrs[VMBuiltinFunctionIndex::get_table_size_index().index() as usize] =
728 wasmer_vm_table_size as usize;
729 ptrs[VMBuiltinFunctionIndex::get_imported_table_size_index().index() as usize] =
730 wasmer_vm_imported_table_size as usize;
731 ptrs[VMBuiltinFunctionIndex::get_table_grow_index().index() as usize] =
732 wasmer_vm_table_grow as usize;
733 ptrs[VMBuiltinFunctionIndex::get_imported_table_grow_index().index() as usize] =
734 wasmer_vm_imported_table_grow as usize;
735 ptrs[VMBuiltinFunctionIndex::get_table_get_index().index() as usize] =
736 wasmer_vm_table_get as usize;
737 ptrs[VMBuiltinFunctionIndex::get_imported_table_get_index().index() as usize] =
738 wasmer_vm_imported_table_get as usize;
739 ptrs[VMBuiltinFunctionIndex::get_table_set_index().index() as usize] =
740 wasmer_vm_table_set as usize;
741 ptrs[VMBuiltinFunctionIndex::get_imported_table_set_index().index() as usize] =
742 wasmer_vm_imported_table_set as usize;
743 ptrs[VMBuiltinFunctionIndex::get_func_ref_index().index() as usize] =
744 wasmer_vm_func_ref as usize;
745 ptrs[VMBuiltinFunctionIndex::get_table_fill_index().index() as usize] =
746 wasmer_vm_table_fill as usize;
747
748 ptrs[VMBuiltinFunctionIndex::get_memory_atomic_wait32_index().index() as usize] =
749 wasmer_vm_memory32_atomic_wait32 as usize;
750 ptrs[VMBuiltinFunctionIndex::get_imported_memory_atomic_wait32_index().index() as usize] =
751 wasmer_vm_imported_memory32_atomic_wait32 as usize;
752 ptrs[VMBuiltinFunctionIndex::get_memory_atomic_wait64_index().index() as usize] =
753 wasmer_vm_memory32_atomic_wait64 as usize;
754 ptrs[VMBuiltinFunctionIndex::get_imported_memory_atomic_wait64_index().index() as usize] =
755 wasmer_vm_imported_memory32_atomic_wait64 as usize;
756 ptrs[VMBuiltinFunctionIndex::get_memory_atomic_notify_index().index() as usize] =
757 wasmer_vm_memory32_atomic_notify as usize;
758 ptrs[VMBuiltinFunctionIndex::get_imported_memory_atomic_notify_index().index() as usize] =
759 wasmer_vm_imported_memory32_atomic_notify as usize;
760 ptrs[VMBuiltinFunctionIndex::get_imported_throw_index().index() as usize] =
761 wasmer_vm_throw as usize;
762 ptrs[VMBuiltinFunctionIndex::get_imported_rethrow_index().index() as usize] =
763 wasmer_vm_rethrow as usize;
764
765 ptrs[VMBuiltinFunctionIndex::get_imported_alloc_exception_index().index() as usize] =
766 wasmer_vm_alloc_exception as usize;
767 ptrs[VMBuiltinFunctionIndex::get_imported_delete_exception_index().index() as usize] =
768 wasmer_vm_delete_exception as usize;
769 ptrs[VMBuiltinFunctionIndex::get_imported_read_exception_index().index() as usize] =
770 wasmer_vm_read_exception as usize;
771
772 ptrs[VMBuiltinFunctionIndex::get_imported_debug_usize_index().index() as usize] =
773 wasmer_vm_dbg_usize as usize;
774 ptrs[VMBuiltinFunctionIndex::get_imported_debug_str_index().index() as usize] =
775 wasmer_vm_dbg_str as usize;
776
777 debug_assert!(ptrs.iter().cloned().all(|p| p != 0));
778
779 Self { ptrs }
780 }
781}
782
783#[derive(Debug)]
793#[repr(C, align(16))] pub struct VMContext {}
795
796impl VMContext {
797 #[allow(clippy::cast_ptr_alignment)]
803 #[inline]
804 pub(crate) unsafe fn instance(&self) -> &Instance {
805 unsafe {
806 &*((self as *const Self as *mut u8).offset(-Instance::vmctx_offset())
807 as *const Instance)
808 }
809 }
810
811 #[inline]
812 pub(crate) unsafe fn instance_mut(&mut self) -> &mut Instance {
813 unsafe {
814 &mut *((self as *const Self as *mut u8).offset(-Instance::vmctx_offset())
815 as *mut Instance)
816 }
817 }
818}
819
820pub type VMTrampoline = unsafe extern "C" fn(
822 *mut VMContext, *const VMFunctionBody, *mut RawValue, );
826
827#[derive(Debug, Copy, Clone)]
831#[repr(C)]
832pub struct VMMemoryDefinition {
833 pub base: *mut u8,
835
836 pub current_length: usize,
838}
839
840unsafe impl Send for VMMemoryDefinition {}
844unsafe impl Sync for VMMemoryDefinition {}
850
851#[cfg(test)]
852mod test_vmmemory_definition {
853 use super::VMMemoryDefinition;
854 use crate::VMOffsets;
855 use memoffset::offset_of;
856 use std::mem::size_of;
857 use wasmer_types::ModuleInfo;
858
859 #[test]
860 fn check_vmmemory_definition_offsets() {
861 let module = ModuleInfo::new();
862 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
863 assert_eq!(
864 size_of::<VMMemoryDefinition>(),
865 usize::from(offsets.size_of_vmmemory_definition())
866 );
867 assert_eq!(
868 offset_of!(VMMemoryDefinition, base),
869 usize::from(offsets.vmmemory_definition_base())
870 );
871 assert_eq!(
872 offset_of!(VMMemoryDefinition, current_length),
873 usize::from(offsets.vmmemory_definition_current_length())
874 );
875 }
876}