1use crate::VMFunctionBody;
8use crate::VMTable;
9use crate::global::VMGlobal;
10use crate::instance::Instance;
11use crate::memory::VMMemory;
12use crate::store::InternalStoreHandle;
13use crate::trap::{Trap, TrapCode};
14use crate::{VMBuiltinFunctionIndex, VMFunction};
15use std::convert::TryFrom;
16use std::hash::{Hash, Hasher};
17use std::ptr::{self, NonNull};
18use std::sync::atomic::{AtomicPtr, Ordering};
19use wasmer_types::RawValue;
20
21#[derive(Copy, Clone, Eq)]
26#[repr(C)]
27pub union VMFunctionContext {
28 pub vmctx: *mut VMContext,
30 pub host_env: *mut std::ffi::c_void,
32}
33
34impl VMFunctionContext {
35 pub fn is_null(&self) -> bool {
37 unsafe { self.host_env.is_null() }
38 }
39}
40
41impl std::fmt::Debug for VMFunctionContext {
42 fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
43 f.debug_struct("VMFunctionContext")
44 .field("vmctx_or_hostenv", unsafe { &self.host_env })
45 .finish()
46 }
47}
48
49impl std::cmp::PartialEq for VMFunctionContext {
50 fn eq(&self, rhs: &Self) -> bool {
51 unsafe { std::ptr::eq(self.host_env, rhs.host_env) }
52 }
53}
54
55impl std::hash::Hash for VMFunctionContext {
56 fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
57 unsafe {
58 self.vmctx.hash(state);
59 }
60 }
61}
62
63#[derive(Debug, Copy, Clone)]
65#[repr(C)]
66pub struct VMFunctionImport {
67 pub body: *const VMFunctionBody,
69
70 pub environment: VMFunctionContext,
72
73 pub handle: InternalStoreHandle<VMFunction>,
75
76 pub include_m0_param: bool,
78}
79
80#[cfg(test)]
81mod test_vmfunction_import {
82 use super::VMFunctionImport;
83 use memoffset::offset_of;
84 use std::mem::size_of;
85 use wasmer_types::ModuleInfo;
86 use wasmer_types::VMOffsets;
87
88 #[test]
89 fn check_vmfunction_import_offsets() {
90 let module = ModuleInfo::new();
91 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
92 assert_eq!(
93 size_of::<VMFunctionImport>(),
94 usize::from(offsets.size_of_vmfunction_import())
95 );
96 assert_eq!(
97 offset_of!(VMFunctionImport, body),
98 usize::from(offsets.vmfunction_import_body())
99 );
100 assert_eq!(
101 offset_of!(VMFunctionImport, environment),
102 usize::from(offsets.vmfunction_import_vmctx())
103 );
104 }
105}
106
107#[repr(C)]
116pub struct VMDynamicFunctionContext<T> {
117 pub address: *const VMFunctionBody,
122
123 pub ctx: T,
125}
126
127unsafe impl<T: Sized + Send + Sync> Send for VMDynamicFunctionContext<T> {}
130unsafe impl<T: Sized + Send + Sync> Sync for VMDynamicFunctionContext<T> {}
133
134impl<T: Sized + Clone + Send + Sync> Clone for VMDynamicFunctionContext<T> {
135 fn clone(&self) -> Self {
136 Self {
137 address: self.address,
138 ctx: self.ctx.clone(),
139 }
140 }
141}
142
143#[cfg(test)]
144mod test_vmdynamicfunction_import_context {
145 use super::VMDynamicFunctionContext;
146 use crate::VMOffsets;
147 use memoffset::offset_of;
148 use std::mem::size_of;
149 use wasmer_types::ModuleInfo;
150
151 #[test]
152 fn check_vmdynamicfunction_import_context_offsets() {
153 let module = ModuleInfo::new();
154 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
155 assert_eq!(
156 size_of::<VMDynamicFunctionContext<usize>>(),
157 usize::from(offsets.size_of_vmdynamicfunction_import_context())
158 );
159 assert_eq!(
160 offset_of!(VMDynamicFunctionContext<usize>, address),
161 usize::from(offsets.vmdynamicfunction_import_context_address())
162 );
163 assert_eq!(
164 offset_of!(VMDynamicFunctionContext<usize>, ctx),
165 usize::from(offsets.vmdynamicfunction_import_context_ctx())
166 );
167 }
168}
169
170#[derive(Debug, Copy, Clone, Eq, PartialEq)]
172#[repr(C)]
173pub enum VMFunctionKind {
174 Static,
181
182 Dynamic,
188}
189
190#[derive(Clone)]
193#[repr(C)]
194pub struct VMTableImport {
195 pub definition: NonNull<VMTableDefinition>,
197
198 pub handle: InternalStoreHandle<VMTable>,
200}
201
202#[cfg(test)]
203mod test_vmtable_import {
204 use super::VMTableImport;
205 use crate::VMOffsets;
206 use memoffset::offset_of;
207 use std::mem::size_of;
208 use wasmer_types::ModuleInfo;
209
210 #[test]
211 fn check_vmtable_import_offsets() {
212 let module = ModuleInfo::new();
213 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
214 assert_eq!(
215 size_of::<VMTableImport>(),
216 usize::from(offsets.size_of_vmtable_import())
217 );
218 assert_eq!(
219 offset_of!(VMTableImport, definition),
220 usize::from(offsets.vmtable_import_definition())
221 );
222 }
223}
224
225#[derive(Clone)]
228#[repr(C)]
229pub struct VMMemoryImport {
230 pub definition: NonNull<VMMemoryDefinition>,
232
233 pub handle: InternalStoreHandle<VMMemory>,
235}
236
237#[cfg(test)]
238mod test_vmmemory_import {
239 use super::VMMemoryImport;
240 use crate::VMOffsets;
241 use memoffset::offset_of;
242 use std::mem::size_of;
243 use wasmer_types::ModuleInfo;
244
245 #[test]
246 fn check_vmmemory_import_offsets() {
247 let module = ModuleInfo::new();
248 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
249 assert_eq!(
250 size_of::<VMMemoryImport>(),
251 usize::from(offsets.size_of_vmmemory_import())
252 );
253 assert_eq!(
254 offset_of!(VMMemoryImport, definition),
255 usize::from(offsets.vmmemory_import_definition())
256 );
257 assert_eq!(
258 offset_of!(VMMemoryImport, handle),
259 usize::from(offsets.vmmemory_import_handle())
260 );
261 }
262}
263
264#[derive(Clone)]
267#[repr(C)]
268pub struct VMGlobalImport {
269 pub definition: NonNull<VMGlobalDefinition>,
271
272 pub handle: InternalStoreHandle<VMGlobal>,
274}
275
276unsafe impl Send for VMGlobalImport {}
281unsafe impl Sync for VMGlobalImport {}
287
288#[cfg(test)]
289mod test_vmglobal_import {
290 use super::VMGlobalImport;
291 use crate::VMOffsets;
292 use memoffset::offset_of;
293 use std::mem::size_of;
294 use wasmer_types::ModuleInfo;
295
296 #[test]
297 fn check_vmglobal_import_offsets() {
298 let module = ModuleInfo::new();
299 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
300 assert_eq!(
301 size_of::<VMGlobalImport>(),
302 usize::from(offsets.size_of_vmglobal_import())
303 );
304 assert_eq!(
305 offset_of!(VMGlobalImport, definition),
306 usize::from(offsets.vmglobal_import_definition())
307 );
308 }
309}
310
311pub(crate) unsafe fn memory_copy(
322 mem: &VMMemoryDefinition,
323 dst: u32,
324 src: u32,
325 len: u32,
326) -> Result<(), Trap> {
327 unsafe {
328 if src
330 .checked_add(len)
331 .is_none_or(|n| usize::try_from(n).unwrap() > mem.current_length)
332 || dst
333 .checked_add(len)
334 .is_none_or(|m| usize::try_from(m).unwrap() > mem.current_length)
335 {
336 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
337 }
338
339 let dst = usize::try_from(dst).unwrap();
340 let src = usize::try_from(src).unwrap();
341
342 let dst = mem.base.add(dst);
345 let src = mem.base.add(src);
346 ptr::copy(src, dst, len as usize);
347
348 Ok(())
349 }
350}
351
352pub(crate) unsafe fn memory_fill(
363 mem: &VMMemoryDefinition,
364 dst: u32,
365 val: u32,
366 len: u32,
367) -> Result<(), Trap> {
368 unsafe {
369 if dst
370 .checked_add(len)
371 .is_none_or(|m| usize::try_from(m).unwrap() > mem.current_length)
372 {
373 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
374 }
375
376 let dst = isize::try_from(dst).unwrap();
377 let val = val as u8;
378
379 let dst = mem.base.offset(dst);
382 ptr::write_bytes(dst, val, len as usize);
383
384 Ok(())
385 }
386}
387
388pub(crate) unsafe fn memory32_atomic_check32(
397 mem: &VMMemoryDefinition,
398 dst: u32,
399 val: u32,
400) -> Result<u32, Trap> {
401 unsafe {
402 if usize::try_from(dst).unwrap() > mem.current_length {
403 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
404 }
405
406 let dst = isize::try_from(dst).unwrap();
407 if dst & 0b11 != 0 {
408 return Err(Trap::lib(TrapCode::UnalignedAtomic));
409 }
410
411 let dst = mem.base.offset(dst) as *mut u32;
414 let atomic_dst = AtomicPtr::new(dst);
415 let read_val = *atomic_dst.load(Ordering::Acquire);
416 let ret = if read_val == val { 0 } else { 1 };
417 Ok(ret)
418 }
419}
420
421pub(crate) unsafe fn memory32_atomic_check64(
430 mem: &VMMemoryDefinition,
431 dst: u32,
432 val: u64,
433) -> Result<u32, Trap> {
434 unsafe {
435 if usize::try_from(dst).unwrap() > mem.current_length {
436 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
437 }
438
439 let dst = isize::try_from(dst).unwrap();
440 if dst & 0b111 != 0 {
441 return Err(Trap::lib(TrapCode::UnalignedAtomic));
442 }
443
444 let dst = mem.base.offset(dst) as *mut u64;
447 let atomic_dst = AtomicPtr::new(dst);
448 let read_val = *atomic_dst.load(Ordering::Acquire);
449 let ret = if read_val == val { 0 } else { 1 };
450 Ok(ret)
451 }
452}
453
454#[derive(Debug, Clone, Copy)]
457#[repr(C)]
458pub struct VMTableDefinition {
459 pub base: *mut u8,
461
462 pub current_elements: u32,
464}
465
466#[cfg(test)]
467mod test_vmtable_definition {
468 use super::VMTableDefinition;
469 use crate::VMOffsets;
470 use memoffset::offset_of;
471 use std::mem::size_of;
472 use wasmer_types::ModuleInfo;
473
474 #[test]
475 fn check_vmtable_definition_offsets() {
476 let module = ModuleInfo::new();
477 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
478 assert_eq!(
479 size_of::<VMTableDefinition>(),
480 usize::from(offsets.size_of_vmtable_definition())
481 );
482 assert_eq!(
483 offset_of!(VMTableDefinition, base),
484 usize::from(offsets.vmtable_definition_base())
485 );
486 assert_eq!(
487 offset_of!(VMTableDefinition, current_elements),
488 usize::from(offsets.vmtable_definition_current_elements())
489 );
490 }
491}
492
493#[derive(Debug, Clone)]
498#[repr(C, align(16))]
499pub struct VMGlobalDefinition {
500 pub val: RawValue,
502}
503
504#[cfg(test)]
505mod test_vmglobal_definition {
506 use super::VMGlobalDefinition;
507 use crate::{VMFuncRef, VMOffsets};
508 use more_asserts::assert_ge;
509 use std::mem::{align_of, size_of};
510 use wasmer_types::ModuleInfo;
511
512 #[test]
513 fn check_vmglobal_definition_alignment() {
514 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<i32>());
515 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<i64>());
516 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<f32>());
517 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<f64>());
518 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<VMFuncRef>());
519 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<[u8; 16]>());
520 }
521
522 #[test]
523 fn check_vmglobal_definition_offsets() {
524 let module = ModuleInfo::new();
525 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
526 assert_eq!(
527 size_of::<VMGlobalDefinition>(),
528 usize::from(offsets.size_of_vmglobal_local())
529 );
530 }
531
532 #[test]
533 fn check_vmglobal_begins_aligned() {
534 let module = ModuleInfo::new();
535 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
536 assert_eq!(offsets.vmctx_globals_begin() % 16, 0);
537 }
538}
539
540impl VMGlobalDefinition {
541 pub fn new() -> Self {
543 Self {
544 val: Default::default(),
545 }
546 }
547}
548
549#[repr(C)]
552#[cfg_attr(feature = "artifact-size", derive(loupe::MemoryUsage))]
553pub struct VMSharedTagIndex(u32);
554
555impl VMSharedTagIndex {
556 pub fn new(value: u32) -> Self {
558 Self(value)
559 }
560
561 pub fn index(&self) -> u32 {
563 self.0
564 }
565}
566
567#[repr(C)]
570#[cfg_attr(feature = "artifact-size", derive(loupe::MemoryUsage))]
571#[derive(Debug, Eq, PartialEq, Clone, Copy, Hash)]
572pub struct VMSignatureHash(u32);
573
574impl VMSignatureHash {
575 pub fn new(value: u32) -> Self {
577 Self(value)
578 }
579}
580
581#[derive(Debug, Clone, Copy)]
585#[repr(C)]
586pub struct VMCallerCheckedAnyfunc {
587 pub func_ptr: *const VMFunctionBody,
589 pub type_signature_hash: VMSignatureHash,
591 pub vmctx: VMFunctionContext,
593 pub call_trampoline: VMTrampoline,
596 }
598
599unsafe extern "C" fn null_call_trampoline(
600 _vmctx: *mut VMContext,
601 _callee: *const VMFunctionBody,
602 _values: *mut RawValue,
603) {
604 unreachable!("null funcref trampoline should never be invoked");
605}
606
607impl VMCallerCheckedAnyfunc {
608 pub fn null() -> Self {
610 Self {
611 func_ptr: ptr::null(),
612 type_signature_hash: VMSignatureHash(0),
613 vmctx: VMFunctionContext {
614 host_env: ptr::null_mut(),
615 },
616 call_trampoline: null_call_trampoline,
617 }
618 }
619}
620
621impl PartialEq for VMCallerCheckedAnyfunc {
622 fn eq(&self, other: &Self) -> bool {
623 self.func_ptr == other.func_ptr
624 && self.type_signature_hash == other.type_signature_hash
625 && self.vmctx == other.vmctx
626 && ptr::fn_addr_eq(self.call_trampoline, other.call_trampoline)
627 }
628}
629
630impl Eq for VMCallerCheckedAnyfunc {}
631
632impl Hash for VMCallerCheckedAnyfunc {
633 fn hash<H: Hasher>(&self, state: &mut H) {
634 self.func_ptr.hash(state);
635 self.type_signature_hash.hash(state);
636 self.vmctx.hash(state);
637 ptr::hash(self.call_trampoline as *const (), state);
638 }
639}
640
641#[cfg(test)]
642mod test_vmcaller_checked_anyfunc {
643 use super::VMCallerCheckedAnyfunc;
644 use crate::VMOffsets;
645 use memoffset::offset_of;
646 use std::mem::size_of;
647 use wasmer_types::ModuleInfo;
648
649 #[test]
650 fn check_vmcaller_checked_anyfunc_offsets() {
651 let module = ModuleInfo::new();
652 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
653 assert_eq!(
654 size_of::<VMCallerCheckedAnyfunc>(),
655 usize::from(offsets.size_of_vmcaller_checked_anyfunc())
656 );
657 assert_eq!(
658 offset_of!(VMCallerCheckedAnyfunc, func_ptr),
659 usize::from(offsets.vmcaller_checked_anyfunc_func_ptr())
660 );
661 assert_eq!(
662 offset_of!(VMCallerCheckedAnyfunc, type_signature_hash),
663 usize::from(offsets.vmcaller_checked_anyfunc_signature_hash())
664 );
665 assert_eq!(
666 offset_of!(VMCallerCheckedAnyfunc, vmctx),
667 usize::from(offsets.vmcaller_checked_anyfunc_vmctx())
668 );
669 }
670}
671
672#[repr(C)]
675pub struct VMBuiltinFunctionsArray {
676 ptrs: [usize; Self::len()],
677}
678
679impl VMBuiltinFunctionsArray {
680 pub const fn len() -> usize {
681 VMBuiltinFunctionIndex::builtin_functions_total_number() as usize
682 }
683
684 pub fn initialized() -> Self {
685 use crate::libcalls::*;
686
687 let mut ptrs = [0; Self::len()];
688
689 ptrs[VMBuiltinFunctionIndex::get_memory32_grow_index().index() as *const () as usize] =
690 wasmer_vm_memory32_grow as *const () as usize;
691 ptrs[VMBuiltinFunctionIndex::get_imported_memory32_grow_index().index() as *const ()
692 as usize] = wasmer_vm_imported_memory32_grow as *const () as usize;
693 ptrs[VMBuiltinFunctionIndex::get_memory32_size_index().index() as *const () as usize] =
694 wasmer_vm_memory32_size as *const () as usize;
695 ptrs[VMBuiltinFunctionIndex::get_imported_memory32_size_index().index() as *const ()
696 as usize] = wasmer_vm_imported_memory32_size as *const () as usize;
697 ptrs[VMBuiltinFunctionIndex::get_table_copy_index().index() as *const () as usize] =
698 wasmer_vm_table_copy as *const () as usize;
699 ptrs[VMBuiltinFunctionIndex::get_table_init_index().index() as *const () as usize] =
700 wasmer_vm_table_init as *const () as usize;
701 ptrs[VMBuiltinFunctionIndex::get_elem_drop_index().index() as *const () as usize] =
702 wasmer_vm_elem_drop as *const () as usize;
703 ptrs[VMBuiltinFunctionIndex::get_memory_copy_index().index() as *const () as usize] =
704 wasmer_vm_memory32_copy as *const () as usize;
705 ptrs[VMBuiltinFunctionIndex::get_imported_memory_copy_index().index() as *const ()
706 as usize] = wasmer_vm_imported_memory32_copy as *const () as usize;
707 ptrs[VMBuiltinFunctionIndex::get_memory_fill_index().index() as *const () as usize] =
708 wasmer_vm_memory32_fill as *const () as usize;
709 ptrs[VMBuiltinFunctionIndex::get_imported_memory_fill_index().index() as *const ()
710 as usize] = wasmer_vm_imported_memory32_fill as *const () as usize;
711 ptrs[VMBuiltinFunctionIndex::get_memory_init_index().index() as *const () as usize] =
712 wasmer_vm_memory32_init as *const () as usize;
713 ptrs[VMBuiltinFunctionIndex::get_data_drop_index().index() as *const () as usize] =
714 wasmer_vm_data_drop as *const () as usize;
715 ptrs[VMBuiltinFunctionIndex::get_raise_trap_index().index() as *const () as usize] =
716 wasmer_vm_raise_trap as *const () as usize;
717 ptrs[VMBuiltinFunctionIndex::get_table_size_index().index() as *const () as usize] =
718 wasmer_vm_table_size as *const () as usize;
719 ptrs[VMBuiltinFunctionIndex::get_imported_table_size_index().index() as *const ()
720 as usize] = wasmer_vm_imported_table_size as *const () as usize;
721 ptrs[VMBuiltinFunctionIndex::get_table_grow_index().index() as *const () as usize] =
722 wasmer_vm_table_grow as *const () as usize;
723 ptrs[VMBuiltinFunctionIndex::get_imported_table_grow_index().index() as *const ()
724 as usize] = wasmer_vm_imported_table_grow as *const () as usize;
725 ptrs[VMBuiltinFunctionIndex::get_table_get_index().index() as *const () as usize] =
726 wasmer_vm_table_get as *const () as usize;
727 ptrs[VMBuiltinFunctionIndex::get_imported_table_get_index().index() as *const ()
728 as usize] = wasmer_vm_imported_table_get as *const () as usize;
729 ptrs[VMBuiltinFunctionIndex::get_table_set_index().index() as usize] =
730 wasmer_vm_table_set as *const () as usize;
731 ptrs[VMBuiltinFunctionIndex::get_imported_table_set_index().index() as usize] =
732 wasmer_vm_imported_table_set as *const () as usize;
733 ptrs[VMBuiltinFunctionIndex::get_func_ref_index().index() as usize] =
734 wasmer_vm_func_ref as *const () as usize;
735 ptrs[VMBuiltinFunctionIndex::get_table_fill_index().index() as usize] =
736 wasmer_vm_table_fill as *const () as usize;
737 ptrs[VMBuiltinFunctionIndex::get_memory_atomic_wait32_index().index() as usize] =
738 wasmer_vm_memory32_atomic_wait32 as *const () as usize;
739 ptrs[VMBuiltinFunctionIndex::get_imported_memory_atomic_wait32_index().index() as usize] =
740 wasmer_vm_imported_memory32_atomic_wait32 as *const () as usize;
741 ptrs[VMBuiltinFunctionIndex::get_memory_atomic_wait64_index().index() as usize] =
742 wasmer_vm_memory32_atomic_wait64 as *const () as usize;
743 ptrs[VMBuiltinFunctionIndex::get_imported_memory_atomic_wait64_index().index() as usize] =
744 wasmer_vm_imported_memory32_atomic_wait64 as *const () as usize;
745 ptrs[VMBuiltinFunctionIndex::get_memory_atomic_notify_index().index() as usize] =
746 wasmer_vm_memory32_atomic_notify as *const () as usize;
747 ptrs[VMBuiltinFunctionIndex::get_imported_memory_atomic_notify_index().index() as usize] =
748 wasmer_vm_imported_memory32_atomic_notify as *const () as usize;
749 ptrs[VMBuiltinFunctionIndex::get_imported_debug_usize_index().index() as usize] =
750 wasmer_vm_dbg_usize as *const () as usize;
751 ptrs[VMBuiltinFunctionIndex::get_imported_debug_str_index().index() as usize] =
752 wasmer_vm_dbg_str as *const () as usize;
753 ptrs[VMBuiltinFunctionIndex::get_imported_personality2_index().index() as usize] =
754 wasmer_eh_personality2 as *const () as usize;
755 ptrs[VMBuiltinFunctionIndex::get_imported_alloc_exception_index().index() as usize] =
756 wasmer_vm_alloc_exception as *const () as usize;
757 ptrs[VMBuiltinFunctionIndex::get_imported_throw_index().index() as usize] =
758 wasmer_vm_throw as *const () as usize;
759 ptrs[VMBuiltinFunctionIndex::get_imported_read_exnref_index().index() as usize] =
760 wasmer_vm_read_exnref as *const () as usize;
761 ptrs[VMBuiltinFunctionIndex::get_imported_exception_into_exnref_index().index() as usize] =
762 wasmer_vm_exception_into_exnref as *const () as usize;
763
764 debug_assert!(ptrs.iter().cloned().all(|p| p != 0));
765
766 Self { ptrs }
767 }
768}
769
770#[derive(Debug)]
780#[repr(C, align(16))] pub struct VMContext {}
782
783impl VMContext {
784 #[allow(clippy::cast_ptr_alignment)]
790 #[inline]
791 pub(crate) unsafe fn instance(&self) -> &Instance {
792 unsafe {
793 &*((self as *const Self as *mut u8).offset(-Instance::vmctx_offset())
794 as *const Instance)
795 }
796 }
797
798 #[inline]
799 pub(crate) unsafe fn instance_mut(&mut self) -> &mut Instance {
800 unsafe {
801 &mut *((self as *const Self as *mut u8).offset(-Instance::vmctx_offset())
802 as *mut Instance)
803 }
804 }
805}
806
807pub type VMTrampoline = unsafe extern "C" fn(
809 *mut VMContext, *const VMFunctionBody, *mut RawValue, );
813
814#[derive(Debug, Copy, Clone)]
818#[repr(C)]
819pub struct VMMemoryDefinition {
820 pub base: *mut u8,
822
823 pub current_length: usize,
825}
826
827unsafe impl Send for VMMemoryDefinition {}
831unsafe impl Sync for VMMemoryDefinition {}
837
838#[cfg(test)]
839mod test_vmmemory_definition {
840 use super::VMMemoryDefinition;
841 use crate::VMOffsets;
842 use memoffset::offset_of;
843 use std::mem::size_of;
844 use wasmer_types::ModuleInfo;
845
846 #[test]
847 fn check_vmmemory_definition_offsets() {
848 let module = ModuleInfo::new();
849 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
850 assert_eq!(
851 size_of::<VMMemoryDefinition>(),
852 usize::from(offsets.size_of_vmmemory_definition())
853 );
854 assert_eq!(
855 offset_of!(VMMemoryDefinition, base),
856 usize::from(offsets.vmmemory_definition_base())
857 );
858 assert_eq!(
859 offset_of!(VMMemoryDefinition, current_length),
860 usize::from(offsets.vmmemory_definition_current_length())
861 );
862 }
863}