1use std::marker::PhantomData;
2use wasmer_types::{Memory32, Memory64, MemorySize, ValueType};
3
4use crate::{
5 AsStoreRef, FromToNativeWasmType, MemoryAccessError, NativeWasmTypeInto, WasmRef, WasmSlice,
6 access::WasmRefAccess, view::MemoryView,
7};
8
9pub type WasmPtr64<T> = WasmPtr<T, Memory64>;
11
12#[repr(transparent)]
59pub struct WasmPtr<T, M: MemorySize = Memory32> {
60 offset: M::Offset,
61 _phantom: PhantomData<T>,
62}
63
64impl<T, M: MemorySize> WasmPtr<T, M> {
65 #[inline]
67 pub fn new(offset: M::Offset) -> Self {
68 Self {
69 offset,
70 _phantom: PhantomData,
71 }
72 }
73
74 #[inline]
76 pub fn offset(&self) -> M::Offset {
77 self.offset
78 }
79
80 #[inline]
82 pub fn cast<U>(self) -> WasmPtr<U, M> {
83 WasmPtr {
84 offset: self.offset,
85 _phantom: PhantomData,
86 }
87 }
88
89 #[inline]
91 pub fn null() -> Self {
92 Self::new(M::ZERO)
93 }
94
95 #[inline]
97 pub fn is_null(&self) -> bool {
98 self.offset.into() == 0
99 }
100
101 #[inline]
106 pub fn add_offset(self, offset: M::Offset) -> Result<Self, MemoryAccessError> {
107 let base = self.offset.into();
108 let index = offset.into();
109 let offset = index
110 .checked_mul(std::mem::size_of::<T>() as u64)
111 .ok_or(MemoryAccessError::Overflow)?;
112 let address = base
113 .checked_add(offset)
114 .ok_or(MemoryAccessError::Overflow)?;
115 let address = M::Offset::try_from(address).map_err(|_| MemoryAccessError::Overflow)?;
116 Ok(Self::new(address))
117 }
118
119 #[inline]
124 pub fn sub_offset(self, offset: M::Offset) -> Result<Self, MemoryAccessError> {
125 let base = self.offset.into();
126 let index = offset.into();
127 let offset = index
128 .checked_mul(std::mem::size_of::<T>() as u64)
129 .ok_or(MemoryAccessError::Overflow)?;
130 let address = base
131 .checked_sub(offset)
132 .ok_or(MemoryAccessError::Overflow)?;
133 let address = M::Offset::try_from(address).map_err(|_| MemoryAccessError::Overflow)?;
134 Ok(Self::new(address))
135 }
136}
137
138impl<T: ValueType, M: MemorySize> WasmPtr<T, M> {
139 #[inline]
142 pub fn deref<'a>(&self, view: &'a MemoryView) -> WasmRef<'a, T> {
143 WasmRef::new(view, self.offset.into())
144 }
145
146 #[inline]
148 pub fn read(&self, view: &MemoryView) -> Result<T, MemoryAccessError> {
149 self.deref(view).read()
150 }
151
152 #[inline]
154 pub fn write(&self, view: &MemoryView, val: T) -> Result<(), MemoryAccessError> {
155 self.deref(view).write(val)
156 }
157
158 #[inline]
164 pub fn slice<'a>(
165 &self,
166 view: &'a MemoryView,
167 len: M::Offset,
168 ) -> Result<WasmSlice<'a, T>, MemoryAccessError> {
169 WasmSlice::new(view, self.offset.into(), len.into())
170 }
171
172 #[inline]
177 pub fn read_until(
178 &self,
179 view: &MemoryView,
180 mut end: impl FnMut(&T) -> bool,
181 ) -> Result<Vec<T>, MemoryAccessError> {
182 let mut vec = Vec::new();
183 for i in 0u64.. {
184 let i = M::Offset::try_from(i).map_err(|_| MemoryAccessError::Overflow)?;
185 let val = self.add_offset(i)?.deref(view).read()?;
186 if end(&val) {
187 break;
188 }
189 vec.push(val);
190 }
191 Ok(vec)
192 }
193
194 #[inline]
196 pub fn access<'a>(
197 &self,
198 view: &'a MemoryView,
199 ) -> Result<WasmRefAccess<'a, T>, MemoryAccessError> {
200 self.deref(view).access()
201 }
202}
203
204impl<M: MemorySize> WasmPtr<u8, M> {
205 #[inline]
210 pub fn read_utf8_string(
211 &self,
212 view: &MemoryView,
213 len: M::Offset,
214 ) -> Result<String, MemoryAccessError> {
215 let vec = self.slice(view, len)?.read_to_vec()?;
216 Ok(String::from_utf8(vec)?)
217 }
218
219 #[inline]
224 pub fn read_utf8_string_with_nul(
225 &self,
226 view: &MemoryView,
227 ) -> Result<String, MemoryAccessError> {
228 let vec = self.read_until(view, |&byte| byte == 0)?;
229 Ok(String::from_utf8(vec)?)
230 }
231}
232
233unsafe impl<T: ValueType, M: MemorySize> FromToNativeWasmType for WasmPtr<T, M>
234where
235 <M as wasmer_types::MemorySize>::Native: NativeWasmTypeInto,
236{
237 type Native = M::Native;
238
239 fn to_native(self) -> Self::Native {
240 M::offset_to_native(self.offset)
241 }
242 fn from_native(n: Self::Native) -> Self {
243 Self {
244 offset: M::native_to_offset(n),
245 _phantom: PhantomData,
246 }
247 }
248 #[inline]
249 fn is_from_store(&self, _store: &impl AsStoreRef) -> bool {
250 true }
252}
253
254unsafe impl<T: ValueType, M: MemorySize> ValueType for WasmPtr<T, M> {
255 fn zero_padding_bytes(&self, _bytes: &mut [std::mem::MaybeUninit<u8>]) {}
256}
257
258impl<T: ValueType, M: MemorySize> Clone for WasmPtr<T, M> {
259 fn clone(&self) -> Self {
260 *self
261 }
262}
263
264impl<T: ValueType, M: MemorySize> Copy for WasmPtr<T, M> {}
265
266impl<T: ValueType, M: MemorySize> PartialEq for WasmPtr<T, M> {
267 fn eq(&self, other: &Self) -> bool {
268 self.offset.into() == other.offset.into()
269 }
270}
271
272impl<T: ValueType, M: MemorySize> Eq for WasmPtr<T, M> {}
273
274impl<T: ValueType, M: MemorySize> std::fmt::Debug for WasmPtr<T, M> {
275 fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
276 write!(f, "{}(@{})", std::any::type_name::<T>(), self.offset.into())
277 }
278}