1use crate::VMFunctionBody;
8use crate::VMTable;
9use crate::global::VMGlobal;
10use crate::instance::Instance;
11use crate::memory::VMMemory;
12use crate::store::InternalStoreHandle;
13use crate::trap::{Trap, TrapCode};
14use crate::{VMBuiltinFunctionIndex, VMFunction};
15use std::convert::TryFrom;
16use std::hash::{Hash, Hasher};
17use std::ptr::{self, NonNull};
18use std::sync::atomic::{AtomicPtr, Ordering};
19use wasmer_types::RawValue;
20
21#[derive(Copy, Clone, Eq)]
26#[repr(C)]
27pub union VMFunctionContext {
28 pub vmctx: *mut VMContext,
30 pub host_env: *mut std::ffi::c_void,
32}
33
34impl VMFunctionContext {
35 pub fn is_null(&self) -> bool {
37 unsafe { self.host_env.is_null() }
38 }
39}
40
41impl std::fmt::Debug for VMFunctionContext {
42 fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
43 f.debug_struct("VMFunctionContext")
44 .field("vmctx_or_hostenv", unsafe { &self.host_env })
45 .finish()
46 }
47}
48
49impl std::cmp::PartialEq for VMFunctionContext {
50 fn eq(&self, rhs: &Self) -> bool {
51 unsafe { std::ptr::eq(self.host_env, rhs.host_env) }
52 }
53}
54
55impl std::hash::Hash for VMFunctionContext {
56 fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
57 unsafe {
58 self.vmctx.hash(state);
59 }
60 }
61}
62
63#[derive(Debug, Copy, Clone)]
65#[repr(C)]
66pub struct VMFunctionImport {
67 pub body: *const VMFunctionBody,
69
70 pub environment: VMFunctionContext,
72
73 pub handle: InternalStoreHandle<VMFunction>,
75
76 pub include_m0_param: bool,
78}
79
80#[cfg(test)]
81mod test_vmfunction_import {
82 use super::VMFunctionImport;
83 use memoffset::offset_of;
84 use std::mem::size_of;
85 use wasmer_types::ModuleInfo;
86 use wasmer_types::VMOffsets;
87
88 #[test]
89 fn check_vmfunction_import_offsets() {
90 let module = ModuleInfo::new();
91 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
92 assert_eq!(
93 size_of::<VMFunctionImport>(),
94 usize::from(offsets.size_of_vmfunction_import())
95 );
96 assert_eq!(
97 offset_of!(VMFunctionImport, body),
98 usize::from(offsets.vmfunction_import_body())
99 );
100 assert_eq!(
101 offset_of!(VMFunctionImport, environment),
102 usize::from(offsets.vmfunction_import_vmctx())
103 );
104 }
105}
106
107#[repr(C)]
116pub struct VMDynamicFunctionContext<T> {
117 pub address: *const VMFunctionBody,
122
123 pub ctx: T,
125}
126
127unsafe impl<T: Sized + Send + Sync> Send for VMDynamicFunctionContext<T> {}
130unsafe impl<T: Sized + Send + Sync> Sync for VMDynamicFunctionContext<T> {}
133
134impl<T: Sized + Clone + Send + Sync> Clone for VMDynamicFunctionContext<T> {
135 fn clone(&self) -> Self {
136 Self {
137 address: self.address,
138 ctx: self.ctx.clone(),
139 }
140 }
141}
142
143#[cfg(test)]
144mod test_vmdynamicfunction_import_context {
145 use super::VMDynamicFunctionContext;
146 use crate::VMOffsets;
147 use memoffset::offset_of;
148 use std::mem::size_of;
149 use wasmer_types::ModuleInfo;
150
151 #[test]
152 fn check_vmdynamicfunction_import_context_offsets() {
153 let module = ModuleInfo::new();
154 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
155 assert_eq!(
156 size_of::<VMDynamicFunctionContext<usize>>(),
157 usize::from(offsets.size_of_vmdynamicfunction_import_context())
158 );
159 assert_eq!(
160 offset_of!(VMDynamicFunctionContext<usize>, address),
161 usize::from(offsets.vmdynamicfunction_import_context_address())
162 );
163 assert_eq!(
164 offset_of!(VMDynamicFunctionContext<usize>, ctx),
165 usize::from(offsets.vmdynamicfunction_import_context_ctx())
166 );
167 }
168}
169
170#[derive(Debug, Copy, Clone, Eq, PartialEq)]
172#[repr(C)]
173pub enum VMFunctionKind {
174 Static,
181
182 Dynamic,
188}
189
190#[derive(Clone)]
193#[repr(C)]
194pub struct VMTableImport {
195 pub definition: NonNull<VMTableDefinition>,
197
198 pub handle: InternalStoreHandle<VMTable>,
200}
201
202#[cfg(test)]
203mod test_vmtable_import {
204 use super::VMTableImport;
205 use crate::VMOffsets;
206 use memoffset::offset_of;
207 use std::mem::size_of;
208 use wasmer_types::ModuleInfo;
209
210 #[test]
211 fn check_vmtable_import_offsets() {
212 let module = ModuleInfo::new();
213 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
214 assert_eq!(
215 size_of::<VMTableImport>(),
216 usize::from(offsets.size_of_vmtable_import())
217 );
218 assert_eq!(
219 offset_of!(VMTableImport, definition),
220 usize::from(offsets.vmtable_import_definition())
221 );
222 }
223}
224
225#[derive(Clone)]
228#[repr(C)]
229pub struct VMMemoryImport {
230 pub definition: NonNull<VMMemoryDefinition>,
232
233 pub handle: InternalStoreHandle<VMMemory>,
235}
236
237#[cfg(test)]
238mod test_vmmemory_import {
239 use super::VMMemoryImport;
240 use crate::VMOffsets;
241 use memoffset::offset_of;
242 use std::mem::size_of;
243 use wasmer_types::ModuleInfo;
244
245 #[test]
246 fn check_vmmemory_import_offsets() {
247 let module = ModuleInfo::new();
248 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
249 assert_eq!(
250 size_of::<VMMemoryImport>(),
251 usize::from(offsets.size_of_vmmemory_import())
252 );
253 assert_eq!(
254 offset_of!(VMMemoryImport, definition),
255 usize::from(offsets.vmmemory_import_definition())
256 );
257 assert_eq!(
258 offset_of!(VMMemoryImport, handle),
259 usize::from(offsets.vmmemory_import_handle())
260 );
261 }
262}
263
264#[derive(Clone)]
267#[repr(C)]
268pub struct VMGlobalImport {
269 pub definition: NonNull<VMGlobalDefinition>,
271
272 pub handle: InternalStoreHandle<VMGlobal>,
274}
275
276unsafe impl Send for VMGlobalImport {}
281unsafe impl Sync for VMGlobalImport {}
287
288#[cfg(test)]
289mod test_vmglobal_import {
290 use super::VMGlobalImport;
291 use crate::VMOffsets;
292 use memoffset::offset_of;
293 use std::mem::size_of;
294 use wasmer_types::ModuleInfo;
295
296 #[test]
297 fn check_vmglobal_import_offsets() {
298 let module = ModuleInfo::new();
299 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
300 assert_eq!(
301 size_of::<VMGlobalImport>(),
302 usize::from(offsets.size_of_vmglobal_import())
303 );
304 assert_eq!(
305 offset_of!(VMGlobalImport, definition),
306 usize::from(offsets.vmglobal_import_definition())
307 );
308 }
309}
310
311pub(crate) unsafe fn memory_copy(
322 mem: &VMMemoryDefinition,
323 dst: u32,
324 src: u32,
325 len: u32,
326) -> Result<(), Trap> {
327 unsafe {
328 if src
330 .checked_add(len)
331 .is_none_or(|n| usize::try_from(n).unwrap() > mem.current_length)
332 || dst
333 .checked_add(len)
334 .is_none_or(|m| usize::try_from(m).unwrap() > mem.current_length)
335 {
336 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
337 }
338
339 let dst = usize::try_from(dst).unwrap();
340 let src = usize::try_from(src).unwrap();
341
342 let dst = mem.base.add(dst);
345 let src = mem.base.add(src);
346 ptr::copy(src, dst, len as usize);
347
348 Ok(())
349 }
350}
351
352pub(crate) unsafe fn memory_fill(
363 mem: &VMMemoryDefinition,
364 dst: u32,
365 val: u32,
366 len: u32,
367) -> Result<(), Trap> {
368 unsafe {
369 if dst
370 .checked_add(len)
371 .is_none_or(|m| usize::try_from(m).unwrap() > mem.current_length)
372 {
373 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
374 }
375
376 let dst = isize::try_from(dst).unwrap();
377 let val = val as u8;
378
379 let dst = mem.base.offset(dst);
382 ptr::write_bytes(dst, val, len as usize);
383
384 Ok(())
385 }
386}
387
388pub(crate) unsafe fn memory32_atomic_check32(
397 mem: &VMMemoryDefinition,
398 dst: u32,
399 val: u32,
400) -> Result<u32, Trap> {
401 unsafe {
402 if usize::try_from(dst).unwrap() > mem.current_length {
403 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
404 }
405
406 let dst = isize::try_from(dst).unwrap();
407 if dst & 0b11 != 0 {
408 return Err(Trap::lib(TrapCode::UnalignedAtomic));
409 }
410
411 let dst = mem.base.offset(dst) as *mut u32;
414 let atomic_dst = AtomicPtr::new(dst);
415 let read_val = *atomic_dst.load(Ordering::Acquire);
416 let ret = if read_val == val { 0 } else { 1 };
417 Ok(ret)
418 }
419}
420
421pub(crate) unsafe fn memory32_atomic_check64(
430 mem: &VMMemoryDefinition,
431 dst: u32,
432 val: u64,
433) -> Result<u32, Trap> {
434 unsafe {
435 if usize::try_from(dst).unwrap() > mem.current_length {
436 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
437 }
438
439 let dst = isize::try_from(dst).unwrap();
440 if dst & 0b111 != 0 {
441 return Err(Trap::lib(TrapCode::UnalignedAtomic));
442 }
443
444 let dst = mem.base.offset(dst) as *mut u64;
447 let atomic_dst = AtomicPtr::new(dst);
448 let read_val = *atomic_dst.load(Ordering::Acquire);
449 let ret = if read_val == val { 0 } else { 1 };
450 Ok(ret)
451 }
452}
453
454#[derive(Debug, Clone, Copy)]
457#[repr(C)]
458pub struct VMTableDefinition {
459 pub base: *mut u8,
461
462 pub current_elements: u32,
464}
465
466#[cfg(test)]
467mod test_vmtable_definition {
468 use super::VMTableDefinition;
469 use crate::VMOffsets;
470 use memoffset::offset_of;
471 use std::mem::size_of;
472 use wasmer_types::ModuleInfo;
473
474 #[test]
475 fn check_vmtable_definition_offsets() {
476 let module = ModuleInfo::new();
477 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
478 assert_eq!(
479 size_of::<VMTableDefinition>(),
480 usize::from(offsets.size_of_vmtable_definition())
481 );
482 assert_eq!(
483 offset_of!(VMTableDefinition, base),
484 usize::from(offsets.vmtable_definition_base())
485 );
486 assert_eq!(
487 offset_of!(VMTableDefinition, current_elements),
488 usize::from(offsets.vmtable_definition_current_elements())
489 );
490 }
491}
492
493#[derive(Debug, Clone)]
498#[repr(C, align(16))]
499pub struct VMGlobalDefinition {
500 pub val: RawValue,
502}
503
504#[cfg(test)]
505mod test_vmglobal_definition {
506 use super::VMGlobalDefinition;
507 use crate::{VMFuncRef, VMOffsets};
508 use more_asserts::assert_ge;
509 use std::mem::{align_of, size_of};
510 use wasmer_types::ModuleInfo;
511
512 #[test]
513 fn check_vmglobal_definition_alignment() {
514 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<i32>());
515 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<i64>());
516 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<f32>());
517 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<f64>());
518 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<VMFuncRef>());
519 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<[u8; 16]>());
520 }
521
522 #[test]
523 fn check_vmglobal_definition_offsets() {
524 let module = ModuleInfo::new();
525 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
526 assert_eq!(
527 size_of::<*const VMGlobalDefinition>(),
528 usize::from(offsets.size_of_vmglobal_local())
529 );
530 }
531
532 #[test]
533 fn check_vmglobal_begins_aligned() {
534 let module = ModuleInfo::new();
535 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
536 assert_eq!(offsets.vmctx_globals_begin() % 16, 0);
537 }
538}
539
540impl VMGlobalDefinition {
541 pub fn new() -> Self {
543 Self {
544 val: Default::default(),
545 }
546 }
547}
548
549#[repr(C)]
552#[cfg_attr(feature = "artifact-size", derive(loupe::MemoryUsage))]
553pub struct VMSharedTagIndex(u32);
554
555impl VMSharedTagIndex {
556 pub fn new(value: u32) -> Self {
558 Self(value)
559 }
560
561 pub fn index(&self) -> u32 {
563 self.0
564 }
565}
566
567#[repr(C)]
570#[cfg_attr(feature = "artifact-size", derive(loupe::MemoryUsage))]
571#[derive(Debug, Eq, PartialEq, Clone, Copy, Hash)]
572pub struct VMSharedSignatureIndex(u32);
573
574#[cfg(test)]
575mod test_vmshared_signature_index {
576 use super::VMSharedSignatureIndex;
577 use std::mem::size_of;
578 use wasmer_types::{ModuleInfo, TargetSharedSignatureIndex, VMOffsets};
579
580 #[test]
581 fn check_vmshared_signature_index() {
582 let module = ModuleInfo::new();
583 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
584 assert_eq!(
585 size_of::<VMSharedSignatureIndex>(),
586 usize::from(offsets.size_of_vmshared_signature_index())
587 );
588 }
589
590 #[test]
591 fn check_target_shared_signature_index() {
592 assert_eq!(
593 size_of::<VMSharedSignatureIndex>(),
594 size_of::<TargetSharedSignatureIndex>()
595 );
596 }
597}
598
599impl VMSharedSignatureIndex {
600 pub fn new(value: u32) -> Self {
602 Self(value)
603 }
604}
605
606impl Default for VMSharedSignatureIndex {
607 fn default() -> Self {
608 Self::new(u32::MAX)
609 }
610}
611
612#[derive(Debug, Clone, Copy)]
616#[repr(C)]
617pub struct VMCallerCheckedAnyfunc {
618 pub func_ptr: *const VMFunctionBody,
620 pub type_index: VMSharedSignatureIndex,
622 pub vmctx: VMFunctionContext,
624 pub call_trampoline: VMTrampoline,
627 }
629
630impl PartialEq for VMCallerCheckedAnyfunc {
631 fn eq(&self, other: &Self) -> bool {
632 self.func_ptr == other.func_ptr
633 && self.type_index == other.type_index
634 && self.vmctx == other.vmctx
635 && ptr::fn_addr_eq(self.call_trampoline, other.call_trampoline)
636 }
637}
638
639impl Eq for VMCallerCheckedAnyfunc {}
640
641impl Hash for VMCallerCheckedAnyfunc {
642 fn hash<H: Hasher>(&self, state: &mut H) {
643 self.func_ptr.hash(state);
644 self.type_index.hash(state);
645 self.vmctx.hash(state);
646 ptr::hash(self.call_trampoline as *const (), state);
647 }
648}
649
650#[cfg(test)]
651mod test_vmcaller_checked_anyfunc {
652 use super::VMCallerCheckedAnyfunc;
653 use crate::VMOffsets;
654 use memoffset::offset_of;
655 use std::mem::size_of;
656 use wasmer_types::ModuleInfo;
657
658 #[test]
659 fn check_vmcaller_checked_anyfunc_offsets() {
660 let module = ModuleInfo::new();
661 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
662 assert_eq!(
663 size_of::<VMCallerCheckedAnyfunc>(),
664 usize::from(offsets.size_of_vmcaller_checked_anyfunc())
665 );
666 assert_eq!(
667 offset_of!(VMCallerCheckedAnyfunc, func_ptr),
668 usize::from(offsets.vmcaller_checked_anyfunc_func_ptr())
669 );
670 assert_eq!(
671 offset_of!(VMCallerCheckedAnyfunc, type_index),
672 usize::from(offsets.vmcaller_checked_anyfunc_type_index())
673 );
674 assert_eq!(
675 offset_of!(VMCallerCheckedAnyfunc, vmctx),
676 usize::from(offsets.vmcaller_checked_anyfunc_vmctx())
677 );
678 }
679}
680
681#[repr(C)]
684pub struct VMBuiltinFunctionsArray {
685 ptrs: [usize; Self::len()],
686}
687
688impl VMBuiltinFunctionsArray {
689 pub const fn len() -> usize {
690 VMBuiltinFunctionIndex::builtin_functions_total_number() as usize
691 }
692
693 pub fn initialized() -> Self {
694 use crate::libcalls::*;
695
696 let mut ptrs = [0; Self::len()];
697
698 ptrs[VMBuiltinFunctionIndex::get_memory32_grow_index().index() as *const () as usize] =
699 wasmer_vm_memory32_grow as *const () as usize;
700 ptrs[VMBuiltinFunctionIndex::get_imported_memory32_grow_index().index() as *const ()
701 as usize] = wasmer_vm_imported_memory32_grow as *const () as usize;
702 ptrs[VMBuiltinFunctionIndex::get_memory32_size_index().index() as *const () as usize] =
703 wasmer_vm_memory32_size as *const () as usize;
704 ptrs[VMBuiltinFunctionIndex::get_imported_memory32_size_index().index() as *const ()
705 as usize] = wasmer_vm_imported_memory32_size as *const () as usize;
706 ptrs[VMBuiltinFunctionIndex::get_table_copy_index().index() as *const () as usize] =
707 wasmer_vm_table_copy as *const () as usize;
708 ptrs[VMBuiltinFunctionIndex::get_table_init_index().index() as *const () as usize] =
709 wasmer_vm_table_init as *const () as usize;
710 ptrs[VMBuiltinFunctionIndex::get_elem_drop_index().index() as *const () as usize] =
711 wasmer_vm_elem_drop as *const () as usize;
712 ptrs[VMBuiltinFunctionIndex::get_memory_copy_index().index() as *const () as usize] =
713 wasmer_vm_memory32_copy as *const () as usize;
714 ptrs[VMBuiltinFunctionIndex::get_imported_memory_copy_index().index() as *const ()
715 as usize] = wasmer_vm_imported_memory32_copy as *const () as usize;
716 ptrs[VMBuiltinFunctionIndex::get_memory_fill_index().index() as *const () as usize] =
717 wasmer_vm_memory32_fill as *const () as usize;
718 ptrs[VMBuiltinFunctionIndex::get_imported_memory_fill_index().index() as *const ()
719 as usize] = wasmer_vm_imported_memory32_fill as *const () as usize;
720 ptrs[VMBuiltinFunctionIndex::get_memory_init_index().index() as *const () as usize] =
721 wasmer_vm_memory32_init as *const () as usize;
722 ptrs[VMBuiltinFunctionIndex::get_data_drop_index().index() as *const () as usize] =
723 wasmer_vm_data_drop as *const () as usize;
724 ptrs[VMBuiltinFunctionIndex::get_raise_trap_index().index() as *const () as usize] =
725 wasmer_vm_raise_trap as *const () as usize;
726 ptrs[VMBuiltinFunctionIndex::get_table_size_index().index() as *const () as usize] =
727 wasmer_vm_table_size as *const () as usize;
728 ptrs[VMBuiltinFunctionIndex::get_imported_table_size_index().index() as *const ()
729 as usize] = wasmer_vm_imported_table_size as *const () as usize;
730 ptrs[VMBuiltinFunctionIndex::get_table_grow_index().index() as *const () as usize] =
731 wasmer_vm_table_grow as *const () as usize;
732 ptrs[VMBuiltinFunctionIndex::get_imported_table_grow_index().index() as *const ()
733 as usize] = wasmer_vm_imported_table_grow as *const () as usize;
734 ptrs[VMBuiltinFunctionIndex::get_table_get_index().index() as *const () as usize] =
735 wasmer_vm_table_get as *const () as usize;
736 ptrs[VMBuiltinFunctionIndex::get_imported_table_get_index().index() as *const ()
737 as usize] = wasmer_vm_imported_table_get as *const () as usize;
738 ptrs[VMBuiltinFunctionIndex::get_table_set_index().index() as usize] =
739 wasmer_vm_table_set as *const () as usize;
740 ptrs[VMBuiltinFunctionIndex::get_imported_table_set_index().index() as usize] =
741 wasmer_vm_imported_table_set as *const () as usize;
742 ptrs[VMBuiltinFunctionIndex::get_func_ref_index().index() as usize] =
743 wasmer_vm_func_ref as *const () as usize;
744 ptrs[VMBuiltinFunctionIndex::get_table_fill_index().index() as usize] =
745 wasmer_vm_table_fill as *const () as usize;
746 ptrs[VMBuiltinFunctionIndex::get_memory_atomic_wait32_index().index() as usize] =
747 wasmer_vm_memory32_atomic_wait32 as *const () as usize;
748 ptrs[VMBuiltinFunctionIndex::get_imported_memory_atomic_wait32_index().index() as usize] =
749 wasmer_vm_imported_memory32_atomic_wait32 as *const () as usize;
750 ptrs[VMBuiltinFunctionIndex::get_memory_atomic_wait64_index().index() as usize] =
751 wasmer_vm_memory32_atomic_wait64 as *const () as usize;
752 ptrs[VMBuiltinFunctionIndex::get_imported_memory_atomic_wait64_index().index() as usize] =
753 wasmer_vm_imported_memory32_atomic_wait64 as *const () as usize;
754 ptrs[VMBuiltinFunctionIndex::get_memory_atomic_notify_index().index() as usize] =
755 wasmer_vm_memory32_atomic_notify as *const () as usize;
756 ptrs[VMBuiltinFunctionIndex::get_imported_memory_atomic_notify_index().index() as usize] =
757 wasmer_vm_imported_memory32_atomic_notify as *const () as usize;
758 ptrs[VMBuiltinFunctionIndex::get_imported_debug_usize_index().index() as usize] =
759 wasmer_vm_dbg_usize as *const () as usize;
760 ptrs[VMBuiltinFunctionIndex::get_imported_debug_str_index().index() as usize] =
761 wasmer_vm_dbg_str as *const () as usize;
762 ptrs[VMBuiltinFunctionIndex::get_imported_personality2_index().index() as usize] =
763 wasmer_eh_personality2 as *const () as usize;
764 ptrs[VMBuiltinFunctionIndex::get_imported_alloc_exception_index().index() as usize] =
765 wasmer_vm_alloc_exception as *const () as usize;
766 ptrs[VMBuiltinFunctionIndex::get_imported_throw_index().index() as usize] =
767 wasmer_vm_throw as *const () as usize;
768 ptrs[VMBuiltinFunctionIndex::get_imported_read_exnref_index().index() as usize] =
769 wasmer_vm_read_exnref as *const () as usize;
770 ptrs[VMBuiltinFunctionIndex::get_imported_exception_into_exnref_index().index() as usize] =
771 wasmer_vm_exception_into_exnref as *const () as usize;
772
773 debug_assert!(ptrs.iter().cloned().all(|p| p != 0));
774
775 Self { ptrs }
776 }
777}
778
779#[derive(Debug)]
789#[repr(C, align(16))] pub struct VMContext {}
791
792impl VMContext {
793 #[allow(clippy::cast_ptr_alignment)]
799 #[inline]
800 pub(crate) unsafe fn instance(&self) -> &Instance {
801 unsafe {
802 &*((self as *const Self as *mut u8).offset(-Instance::vmctx_offset())
803 as *const Instance)
804 }
805 }
806
807 #[inline]
808 pub(crate) unsafe fn instance_mut(&mut self) -> &mut Instance {
809 unsafe {
810 &mut *((self as *const Self as *mut u8).offset(-Instance::vmctx_offset())
811 as *mut Instance)
812 }
813 }
814}
815
816pub type VMTrampoline = unsafe extern "C" fn(
818 *mut VMContext, *const VMFunctionBody, *mut RawValue, );
822
823#[derive(Debug, Copy, Clone)]
827#[repr(C)]
828pub struct VMMemoryDefinition {
829 pub base: *mut u8,
831
832 pub current_length: usize,
834}
835
836unsafe impl Send for VMMemoryDefinition {}
840unsafe impl Sync for VMMemoryDefinition {}
846
847#[cfg(test)]
848mod test_vmmemory_definition {
849 use super::VMMemoryDefinition;
850 use crate::VMOffsets;
851 use memoffset::offset_of;
852 use std::mem::size_of;
853 use wasmer_types::ModuleInfo;
854
855 #[test]
856 fn check_vmmemory_definition_offsets() {
857 let module = ModuleInfo::new();
858 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
859 assert_eq!(
860 size_of::<VMMemoryDefinition>(),
861 usize::from(offsets.size_of_vmmemory_definition())
862 );
863 assert_eq!(
864 offset_of!(VMMemoryDefinition, base),
865 usize::from(offsets.vmmemory_definition_base())
866 );
867 assert_eq!(
868 offset_of!(VMMemoryDefinition, current_length),
869 usize::from(offsets.vmmemory_definition_current_length())
870 );
871 }
872}