wasmer_vm/
exception.rs

1use backtrace::Backtrace;
2use std::{cell::UnsafeCell, ptr::NonNull};
3use wasmer_types::{RawValue, StoreId};
4
5use crate::{StoreHandle, StoreObjects, VMTag, store::InternalStoreHandle};
6
7/// Underlying object referenced by a `VMExceptionRef`.
8#[derive(Debug)]
9pub struct VMExceptionObj {
10    tag: u32,
11    payload: Box<UnsafeCell<[RawValue]>>,
12    backtrace: Backtrace,
13}
14
15impl VMExceptionObj {
16    /// Creates a new VMExceptionObj from the given tag and values; the tag is assumed
17    /// to be from the same store as the VMExceptionObj itself.
18    pub fn new(tag: InternalStoreHandle<VMTag>, payload: Box<[RawValue]>) -> Self {
19        let payload = Box::into_raw(payload);
20        let backtrace = Backtrace::new_unresolved();
21        // SAFETY: [RawValue] and UnsafeCell[RawValue] have the same memory layout, and Box itself
22        // does not enable any niche optimizations (of the kind that break Outer<UnsafeCell<T>>).
23        Self {
24            tag: tag.index() as u32,
25            payload: unsafe { Box::from_raw(payload as *mut UnsafeCell<[RawValue]>) },
26            backtrace,
27        }
28    }
29
30    /// Creates a new VMExceptionObj from the given tag with all values initialized to
31    /// zero; the tag is assumed to be from the same store as the VMExceptionObj itself.
32    pub fn new_zeroed(ctx: &StoreObjects, tag: InternalStoreHandle<VMTag>) -> Self {
33        let value_count = tag.get(ctx).signature.params().len();
34        let values = Box::into_raw(vec![RawValue::default(); value_count].into_boxed_slice());
35        let backtrace = Backtrace::new_unresolved();
36        // SAFETY: [RawValue] and UnsafeCell[RawValue] have the same memory layout, and Box itself
37        // does not enable any niche optimizations (of the kind that break Outer<UnsafeCell<T>>).
38        Self {
39            tag: tag.index() as u32,
40            payload: unsafe { Box::from_raw(values as *mut UnsafeCell<[RawValue]>) },
41            backtrace,
42        }
43    }
44
45    #[cfg_attr(
46        not(any(
47            all(target_family = "windows", target_env = "gnu"),
48            target_family = "unix",
49        )),
50        allow(unused)
51    )]
52    pub(crate) fn tag_index(&self) -> u32 {
53        self.tag
54    }
55
56    /// Gets the tag of this exception.
57    pub fn tag(&self) -> InternalStoreHandle<VMTag> {
58        InternalStoreHandle::from_index(self.tag as usize).unwrap()
59    }
60
61    /// Gets the payload of this exception.
62    pub fn payload(&self) -> NonNull<[RawValue]> {
63        // SAFETY: UnsafeCell::get always returns a non-null pointer.
64        unsafe { NonNull::new_unchecked(self.payload.get()) }
65    }
66
67    /// Gets the backtrace of this exception at the time it was constructed.
68    pub fn backtrace(&self) -> &Backtrace {
69        &self.backtrace
70    }
71}
72
73// TODO: This is probably the place to do some reference-counting of exception objects.
74/// Represents a reference to a VMExceptionObj.
75#[repr(transparent)]
76#[derive(Debug, Clone, PartialEq, Eq)]
77pub struct VMExceptionRef(pub StoreHandle<VMExceptionObj>);
78
79impl VMExceptionRef {
80    /// Converts the [`VMExceptionRef`] into a `RawValue`.
81    pub fn into_raw(self) -> RawValue {
82        RawValue {
83            exnref: self.to_u32_exnref(),
84        }
85    }
86
87    /// Gets the raw u32 exnref value.
88    pub fn to_u32_exnref(&self) -> u32 {
89        self.0.internal_handle().index() as u32
90    }
91
92    /// Extracts a `VMExceptionRef` from a `RawValue`.
93    ///
94    /// # Safety
95    /// `raw` must be a valid `VMExceptionRef` instance.
96    pub unsafe fn from_raw(store_id: StoreId, raw: RawValue) -> Option<Self> {
97        unsafe {
98            InternalStoreHandle::from_index(raw.exnref as usize)
99                .map(|handle| Self(StoreHandle::from_internal(store_id, handle)))
100        }
101    }
102}