mod allocator;
use crate::export::VMExtern;
use crate::imports::Imports;
use crate::store::{InternalStoreHandle, StoreObjects};
use crate::table::TableElement;
use crate::trap::{catch_traps, Trap, TrapCode};
use crate::vmcontext::{
memory32_atomic_check32, memory32_atomic_check64, memory_copy, memory_fill,
VMBuiltinFunctionsArray, VMCallerCheckedAnyfunc, VMContext, VMFunctionContext,
VMFunctionImport, VMFunctionKind, VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition,
VMMemoryImport, VMSharedSignatureIndex, VMTableDefinition, VMTableImport, VMTrampoline,
};
use crate::{FunctionBodyPtr, MaybeInstanceOwned, TrapHandlerFn, VMFunctionBody};
use crate::{LinearMemory, NotifyLocation};
use crate::{VMConfig, VMFuncRef, VMFunction, VMGlobal, VMMemory, VMTable};
pub use allocator::InstanceAllocator;
use memoffset::offset_of;
use more_asserts::assert_lt;
use std::alloc::Layout;
use std::cell::RefCell;
use std::collections::HashMap;
use std::convert::TryFrom;
use std::fmt;
use std::mem;
use std::ptr::{self, NonNull};
use std::slice;
use std::sync::Arc;
use wasmer_types::entity::{packed_option::ReservedValue, BoxedSlice, EntityRef, PrimaryMap};
use wasmer_types::{
DataIndex, DataInitializer, ElemIndex, ExportIndex, FunctionIndex, GlobalIndex, GlobalInit,
LocalFunctionIndex, LocalGlobalIndex, LocalMemoryIndex, LocalTableIndex, MemoryError,
MemoryIndex, ModuleInfo, Pages, SignatureIndex, TableIndex, TableInitializer, VMOffsets,
};
#[repr(C)]
#[allow(clippy::type_complexity)]
pub(crate) struct Instance {
module: Arc<ModuleInfo>,
context: *mut StoreObjects,
offsets: VMOffsets,
memories: BoxedSlice<LocalMemoryIndex, InternalStoreHandle<VMMemory>>,
tables: BoxedSlice<LocalTableIndex, InternalStoreHandle<VMTable>>,
globals: BoxedSlice<LocalGlobalIndex, InternalStoreHandle<VMGlobal>>,
functions: BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
function_call_trampolines: BoxedSlice<SignatureIndex, VMTrampoline>,
passive_elements: RefCell<HashMap<ElemIndex, Box<[Option<VMFuncRef>]>>>,
passive_data: RefCell<HashMap<DataIndex, Arc<[u8]>>>,
funcrefs: BoxedSlice<LocalFunctionIndex, VMCallerCheckedAnyfunc>,
imported_funcrefs: BoxedSlice<FunctionIndex, NonNull<VMCallerCheckedAnyfunc>>,
vmctx: VMContext,
}
impl fmt::Debug for Instance {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.debug_struct("Instance").finish()
}
}
#[allow(clippy::cast_ptr_alignment)]
impl Instance {
unsafe fn vmctx_plus_offset<T>(&self, offset: u32) -> *mut T {
(self.vmctx_ptr() as *mut u8)
.add(usize::try_from(offset).unwrap())
.cast()
}
fn module(&self) -> &Arc<ModuleInfo> {
&self.module
}
pub(crate) fn module_ref(&self) -> &ModuleInfo {
&self.module
}
fn context(&self) -> &StoreObjects {
unsafe { &*self.context }
}
fn context_mut(&mut self) -> &mut StoreObjects {
unsafe { &mut *self.context }
}
fn offsets(&self) -> &VMOffsets {
&self.offsets
}
fn signature_ids_ptr(&self) -> *mut VMSharedSignatureIndex {
unsafe { self.vmctx_plus_offset(self.offsets.vmctx_signature_ids_begin()) }
}
fn imported_function(&self, index: FunctionIndex) -> &VMFunctionImport {
let index = usize::try_from(index.as_u32()).unwrap();
unsafe { &*self.imported_functions_ptr().add(index) }
}
fn imported_functions_ptr(&self) -> *mut VMFunctionImport {
unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_functions_begin()) }
}
fn imported_table(&self, index: TableIndex) -> &VMTableImport {
let index = usize::try_from(index.as_u32()).unwrap();
unsafe { &*self.imported_tables_ptr().add(index) }
}
fn imported_tables_ptr(&self) -> *mut VMTableImport {
unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_tables_begin()) }
}
fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport {
let index = usize::try_from(index.as_u32()).unwrap();
unsafe { &*self.imported_memories_ptr().add(index) }
}
fn imported_memories_ptr(&self) -> *mut VMMemoryImport {
unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_memories_begin()) }
}
fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport {
let index = usize::try_from(index.as_u32()).unwrap();
unsafe { &*self.imported_globals_ptr().add(index) }
}
fn imported_globals_ptr(&self) -> *mut VMGlobalImport {
unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_globals_begin()) }
}
#[allow(dead_code)]
fn table(&self, index: LocalTableIndex) -> VMTableDefinition {
unsafe { *self.table_ptr(index).as_ref() }
}
#[allow(dead_code)]
fn set_table(&self, index: LocalTableIndex, table: &VMTableDefinition) {
unsafe {
*self.table_ptr(index).as_ptr() = *table;
}
}
fn table_ptr(&self, index: LocalTableIndex) -> NonNull<VMTableDefinition> {
let index = usize::try_from(index.as_u32()).unwrap();
NonNull::new(unsafe { self.tables_ptr().add(index) }).unwrap()
}
fn tables_ptr(&self) -> *mut VMTableDefinition {
unsafe { self.vmctx_plus_offset(self.offsets.vmctx_tables_begin()) }
}
#[allow(dead_code)]
fn get_memory(&self, index: MemoryIndex) -> VMMemoryDefinition {
if let Some(local_index) = self.module.local_memory_index(index) {
self.memory(local_index)
} else {
let import = self.imported_memory(index);
unsafe { *import.definition.as_ref() }
}
}
fn memory(&self, index: LocalMemoryIndex) -> VMMemoryDefinition {
unsafe { *self.memory_ptr(index).as_ref() }
}
#[allow(dead_code)]
fn set_memory(&self, index: LocalMemoryIndex, mem: &VMMemoryDefinition) {
unsafe {
*self.memory_ptr(index).as_ptr() = *mem;
}
}
fn memory_ptr(&self, index: LocalMemoryIndex) -> NonNull<VMMemoryDefinition> {
let index = usize::try_from(index.as_u32()).unwrap();
NonNull::new(unsafe { self.memories_ptr().add(index) }).unwrap()
}
fn memories_ptr(&self) -> *mut VMMemoryDefinition {
unsafe { self.vmctx_plus_offset(self.offsets.vmctx_memories_begin()) }
}
fn get_vmmemory(&self, index: MemoryIndex) -> &VMMemory {
if let Some(local_index) = self.module.local_memory_index(index) {
unsafe {
self.memories
.get(local_index)
.unwrap()
.get(self.context.as_ref().unwrap())
}
} else {
let import = self.imported_memory(index);
unsafe { import.handle.get(self.context.as_ref().unwrap()) }
}
}
fn get_vmmemory_mut(&mut self, index: MemoryIndex) -> &mut VMMemory {
if let Some(local_index) = self.module.local_memory_index(index) {
unsafe {
self.memories
.get_mut(local_index)
.unwrap()
.get_mut(self.context.as_mut().unwrap())
}
} else {
let import = self.imported_memory(index);
unsafe { import.handle.get_mut(self.context.as_mut().unwrap()) }
}
}
fn get_local_vmmemory_mut(&mut self, local_index: LocalMemoryIndex) -> &mut VMMemory {
unsafe {
self.memories
.get_mut(local_index)
.unwrap()
.get_mut(self.context.as_mut().unwrap())
}
}
fn global(&self, index: LocalGlobalIndex) -> VMGlobalDefinition {
unsafe { self.global_ptr(index).as_ref().clone() }
}
#[allow(dead_code)]
fn set_global(&self, index: LocalGlobalIndex, global: &VMGlobalDefinition) {
unsafe {
*self.global_ptr(index).as_ptr() = global.clone();
}
}
fn global_ptr(&self, index: LocalGlobalIndex) -> NonNull<VMGlobalDefinition> {
let index = usize::try_from(index.as_u32()).unwrap();
NonNull::new(unsafe { *self.globals_ptr().add(index) }).unwrap()
}
fn globals_ptr(&self) -> *mut *mut VMGlobalDefinition {
unsafe { self.vmctx_plus_offset(self.offsets.vmctx_globals_begin()) }
}
fn builtin_functions_ptr(&self) -> *mut VMBuiltinFunctionsArray {
unsafe { self.vmctx_plus_offset(self.offsets.vmctx_builtin_functions_begin()) }
}
fn vmctx(&self) -> &VMContext {
&self.vmctx
}
fn vmctx_ptr(&self) -> *mut VMContext {
self.vmctx() as *const VMContext as *mut VMContext
}
fn invoke_start_function(
&self,
config: &VMConfig,
trap_handler: Option<*const TrapHandlerFn<'static>>,
) -> Result<(), Trap> {
let start_index = match self.module.start_function {
Some(idx) => idx,
None => return Ok(()),
};
let (callee_address, callee_vmctx) = match self.module.local_func_index(start_index) {
Some(local_index) => {
let body = self
.functions
.get(local_index)
.expect("function index is out of bounds")
.0;
(
body as *const _,
VMFunctionContext {
vmctx: self.vmctx_ptr(),
},
)
}
None => {
assert_lt!(start_index.index(), self.module.num_imported_functions);
let import = self.imported_function(start_index);
(import.body, import.environment)
}
};
unsafe {
catch_traps(trap_handler, config, move || {
mem::transmute::<*const VMFunctionBody, unsafe extern "C" fn(VMFunctionContext)>(
callee_address,
)(callee_vmctx)
})
}
}
#[inline]
pub(crate) fn vmctx_offset() -> isize {
offset_of!(Self, vmctx) as isize
}
pub(crate) fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
let begin: *const VMTableDefinition = self.tables_ptr() as *const _;
let end: *const VMTableDefinition = table;
let index = LocalTableIndex::new(
(end as usize - begin as usize) / mem::size_of::<VMTableDefinition>(),
);
assert_lt!(index.index(), self.tables.len());
index
}
pub(crate) fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
let begin: *const VMMemoryDefinition = self.memories_ptr() as *const _;
let end: *const VMMemoryDefinition = memory;
let index = LocalMemoryIndex::new(
(end as usize - begin as usize) / mem::size_of::<VMMemoryDefinition>(),
);
assert_lt!(index.index(), self.memories.len());
index
}
pub(crate) fn memory_grow<IntoPages>(
&mut self,
memory_index: LocalMemoryIndex,
delta: IntoPages,
) -> Result<Pages, MemoryError>
where
IntoPages: Into<Pages>,
{
let mem = *self
.memories
.get(memory_index)
.unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
mem.get_mut(self.context_mut()).grow(delta.into())
}
pub(crate) unsafe fn imported_memory_grow<IntoPages>(
&mut self,
memory_index: MemoryIndex,
delta: IntoPages,
) -> Result<Pages, MemoryError>
where
IntoPages: Into<Pages>,
{
let import = self.imported_memory(memory_index);
let mem = import.handle;
mem.get_mut(self.context_mut()).grow(delta.into())
}
pub(crate) fn memory_size(&self, memory_index: LocalMemoryIndex) -> Pages {
let mem = *self
.memories
.get(memory_index)
.unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
mem.get(self.context()).size()
}
pub(crate) unsafe fn imported_memory_size(&self, memory_index: MemoryIndex) -> Pages {
let import = self.imported_memory(memory_index);
let mem = import.handle;
mem.get(self.context()).size()
}
pub(crate) fn table_size(&self, table_index: LocalTableIndex) -> u32 {
let table = self
.tables
.get(table_index)
.unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
table.get(self.context()).size()
}
pub(crate) unsafe fn imported_table_size(&self, table_index: TableIndex) -> u32 {
let import = self.imported_table(table_index);
let table = import.handle;
table.get(self.context()).size()
}
pub(crate) fn table_grow(
&mut self,
table_index: LocalTableIndex,
delta: u32,
init_value: TableElement,
) -> Option<u32> {
let table = *self
.tables
.get(table_index)
.unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
table.get_mut(self.context_mut()).grow(delta, init_value)
}
pub(crate) unsafe fn imported_table_grow(
&mut self,
table_index: TableIndex,
delta: u32,
init_value: TableElement,
) -> Option<u32> {
let import = self.imported_table(table_index);
let table = import.handle;
table.get_mut(self.context_mut()).grow(delta, init_value)
}
pub(crate) fn table_get(
&self,
table_index: LocalTableIndex,
index: u32,
) -> Option<TableElement> {
let table = self
.tables
.get(table_index)
.unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
table.get(self.context()).get(index)
}
pub(crate) unsafe fn imported_table_get(
&self,
table_index: TableIndex,
index: u32,
) -> Option<TableElement> {
let import = self.imported_table(table_index);
let table = import.handle;
table.get(self.context()).get(index)
}
pub(crate) fn table_set(
&mut self,
table_index: LocalTableIndex,
index: u32,
val: TableElement,
) -> Result<(), Trap> {
let table = *self
.tables
.get(table_index)
.unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
table.get_mut(self.context_mut()).set(index, val)
}
pub(crate) unsafe fn imported_table_set(
&mut self,
table_index: TableIndex,
index: u32,
val: TableElement,
) -> Result<(), Trap> {
let import = self.imported_table(table_index);
let table = import.handle;
table.get_mut(self.context_mut()).set(index, val)
}
pub(crate) fn func_ref(&self, function_index: FunctionIndex) -> Option<VMFuncRef> {
if function_index == FunctionIndex::reserved_value() {
None
} else if let Some(local_function_index) = self.module.local_func_index(function_index) {
Some(VMFuncRef(NonNull::from(
&self.funcrefs[local_function_index],
)))
} else {
Some(VMFuncRef(self.imported_funcrefs[function_index]))
}
}
pub(crate) fn table_init(
&mut self,
table_index: TableIndex,
elem_index: ElemIndex,
dst: u32,
src: u32,
len: u32,
) -> Result<(), Trap> {
let table = self.get_table_handle(table_index);
let table = unsafe { table.get_mut(&mut *self.context) };
let passive_elements = self.passive_elements.borrow();
let elem = passive_elements
.get(&elem_index)
.map_or::<&[Option<VMFuncRef>], _>(&[], |e| &**e);
if src
.checked_add(len)
.map_or(true, |n| n as usize > elem.len())
|| dst.checked_add(len).map_or(true, |m| m > table.size())
{
return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
}
for (dst, src) in (dst..dst + len).zip(src..src + len) {
table
.set(dst, TableElement::FuncRef(elem[src as usize]))
.expect("should never panic because we already did the bounds check above");
}
Ok(())
}
pub(crate) fn table_fill(
&mut self,
table_index: TableIndex,
start_index: u32,
item: TableElement,
len: u32,
) -> Result<(), Trap> {
let table = self.get_table(table_index);
let table_size = table.size() as usize;
if start_index
.checked_add(len)
.map_or(true, |n| n as usize > table_size)
{
return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
}
for i in start_index..(start_index + len) {
table
.set(i, item.clone())
.expect("should never panic because we already did the bounds check above");
}
Ok(())
}
pub(crate) fn elem_drop(&self, elem_index: ElemIndex) {
let mut passive_elements = self.passive_elements.borrow_mut();
passive_elements.remove(&elem_index);
}
pub(crate) fn local_memory_copy(
&self,
memory_index: LocalMemoryIndex,
dst: u32,
src: u32,
len: u32,
) -> Result<(), Trap> {
let memory = self.memory(memory_index);
unsafe { memory_copy(&memory, dst, src, len) }
}
pub(crate) fn imported_memory_copy(
&self,
memory_index: MemoryIndex,
dst: u32,
src: u32,
len: u32,
) -> Result<(), Trap> {
let import = self.imported_memory(memory_index);
let memory = unsafe { import.definition.as_ref() };
unsafe { memory_copy(memory, dst, src, len) }
}
pub(crate) fn local_memory_fill(
&self,
memory_index: LocalMemoryIndex,
dst: u32,
val: u32,
len: u32,
) -> Result<(), Trap> {
let memory = self.memory(memory_index);
unsafe { memory_fill(&memory, dst, val, len) }
}
pub(crate) fn imported_memory_fill(
&self,
memory_index: MemoryIndex,
dst: u32,
val: u32,
len: u32,
) -> Result<(), Trap> {
let import = self.imported_memory(memory_index);
let memory = unsafe { import.definition.as_ref() };
unsafe { memory_fill(memory, dst, val, len) }
}
pub(crate) fn memory_init(
&self,
memory_index: MemoryIndex,
data_index: DataIndex,
dst: u32,
src: u32,
len: u32,
) -> Result<(), Trap> {
let memory = self.get_vmmemory(memory_index);
let passive_data = self.passive_data.borrow();
let data = passive_data.get(&data_index).map_or(&[][..], |d| &**d);
let current_length = unsafe { memory.vmmemory().as_ref().current_length };
if src
.checked_add(len)
.map_or(true, |n| n as usize > data.len())
|| dst
.checked_add(len)
.map_or(true, |m| usize::try_from(m).unwrap() > current_length)
{
return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
}
let src_slice = &data[src as usize..(src + len) as usize];
unsafe { memory.initialize_with_data(dst as usize, src_slice) }
}
pub(crate) fn data_drop(&self, data_index: DataIndex) {
let mut passive_data = self.passive_data.borrow_mut();
passive_data.remove(&data_index);
}
pub(crate) fn get_table(&mut self, table_index: TableIndex) -> &mut VMTable {
if let Some(local_table_index) = self.module.local_table_index(table_index) {
self.get_local_table(local_table_index)
} else {
self.get_foreign_table(table_index)
}
}
pub(crate) fn get_local_table(&mut self, index: LocalTableIndex) -> &mut VMTable {
let table = self.tables[index];
table.get_mut(self.context_mut())
}
pub(crate) fn get_foreign_table(&mut self, index: TableIndex) -> &mut VMTable {
let import = self.imported_table(index);
let table = import.handle;
table.get_mut(self.context_mut())
}
pub(crate) fn get_table_handle(
&mut self,
table_index: TableIndex,
) -> InternalStoreHandle<VMTable> {
if let Some(local_table_index) = self.module.local_table_index(table_index) {
self.tables[local_table_index]
} else {
self.imported_table(table_index).handle
}
}
fn memory_wait(memory: &mut VMMemory, dst: u32, timeout: i64) -> Result<u32, Trap> {
let location = NotifyLocation { address: dst };
let timeout = if timeout < 0 {
None
} else {
Some(std::time::Duration::from_nanos(timeout as u64))
};
match memory.do_wait(location, timeout) {
Ok(count) => Ok(count),
Err(_err) => {
Err(Trap::lib(TrapCode::TableAccessOutOfBounds))
}
}
}
pub(crate) fn local_memory_wait32(
&mut self,
memory_index: LocalMemoryIndex,
dst: u32,
val: u32,
timeout: i64,
) -> Result<u32, Trap> {
let memory = self.memory(memory_index);
let ret = unsafe { memory32_atomic_check32(&memory, dst, val) };
if let Ok(mut ret) = ret {
if ret == 0 {
let memory = self.get_local_vmmemory_mut(memory_index);
ret = Self::memory_wait(memory, dst, timeout)?;
}
Ok(ret)
} else {
ret
}
}
pub(crate) fn imported_memory_wait32(
&mut self,
memory_index: MemoryIndex,
dst: u32,
val: u32,
timeout: i64,
) -> Result<u32, Trap> {
let import = self.imported_memory(memory_index);
let memory = unsafe { import.definition.as_ref() };
let ret = unsafe { memory32_atomic_check32(memory, dst, val) };
if let Ok(mut ret) = ret {
if ret == 0 {
let memory = self.get_vmmemory_mut(memory_index);
ret = Self::memory_wait(memory, dst, timeout)?;
}
Ok(ret)
} else {
ret
}
}
pub(crate) fn local_memory_wait64(
&mut self,
memory_index: LocalMemoryIndex,
dst: u32,
val: u64,
timeout: i64,
) -> Result<u32, Trap> {
let memory = self.memory(memory_index);
let ret = unsafe { memory32_atomic_check64(&memory, dst, val) };
if let Ok(mut ret) = ret {
if ret == 0 {
let memory = self.get_local_vmmemory_mut(memory_index);
ret = Self::memory_wait(memory, dst, timeout)?;
}
Ok(ret)
} else {
ret
}
}
pub(crate) fn imported_memory_wait64(
&mut self,
memory_index: MemoryIndex,
dst: u32,
val: u64,
timeout: i64,
) -> Result<u32, Trap> {
let import = self.imported_memory(memory_index);
let memory = unsafe { import.definition.as_ref() };
let ret = unsafe { memory32_atomic_check64(memory, dst, val) };
if let Ok(mut ret) = ret {
if ret == 0 {
let memory = self.get_vmmemory_mut(memory_index);
ret = Self::memory_wait(memory, dst, timeout)?;
}
Ok(ret)
} else {
ret
}
}
pub(crate) fn local_memory_notify(
&mut self,
memory_index: LocalMemoryIndex,
dst: u32,
count: u32,
) -> Result<u32, Trap> {
let memory = self.get_local_vmmemory_mut(memory_index);
let location = NotifyLocation { address: dst };
Ok(memory.do_notify(location, count))
}
pub(crate) fn imported_memory_notify(
&mut self,
memory_index: MemoryIndex,
dst: u32,
count: u32,
) -> Result<u32, Trap> {
let memory = self.get_vmmemory_mut(memory_index);
let location = NotifyLocation { address: dst };
Ok(memory.do_notify(location, count))
}
}
#[derive(Debug, Eq, PartialEq)]
pub struct VMInstance {
instance_layout: Layout,
instance: NonNull<Instance>,
}
impl Drop for VMInstance {
fn drop(&mut self) {
let instance_ptr = self.instance.as_ptr();
unsafe {
instance_ptr.drop_in_place();
std::alloc::dealloc(instance_ptr as *mut u8, self.instance_layout);
}
}
}
impl VMInstance {
#[allow(clippy::too_many_arguments)]
pub unsafe fn new(
allocator: InstanceAllocator,
module: Arc<ModuleInfo>,
context: &mut StoreObjects,
finished_functions: BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
finished_function_call_trampolines: BoxedSlice<SignatureIndex, VMTrampoline>,
finished_memories: BoxedSlice<LocalMemoryIndex, InternalStoreHandle<VMMemory>>,
finished_tables: BoxedSlice<LocalTableIndex, InternalStoreHandle<VMTable>>,
finished_globals: BoxedSlice<LocalGlobalIndex, InternalStoreHandle<VMGlobal>>,
imports: Imports,
vmshared_signatures: BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
) -> Result<Self, Trap> {
let vmctx_globals = finished_globals
.values()
.map(|m| m.get(context).vmglobal())
.collect::<PrimaryMap<LocalGlobalIndex, _>>()
.into_boxed_slice();
let passive_data = RefCell::new(
module
.passive_data
.clone()
.into_iter()
.map(|(idx, bytes)| (idx, Arc::from(bytes)))
.collect::<HashMap<_, _>>(),
);
let handle = {
let offsets = allocator.offsets().clone();
let funcrefs = PrimaryMap::new().into_boxed_slice();
let imported_funcrefs = PrimaryMap::new().into_boxed_slice();
let instance = Instance {
module,
context,
offsets,
memories: finished_memories,
tables: finished_tables,
globals: finished_globals,
functions: finished_functions,
function_call_trampolines: finished_function_call_trampolines,
passive_elements: Default::default(),
passive_data,
funcrefs,
imported_funcrefs,
vmctx: VMContext {},
};
let mut instance_handle = allocator.into_vminstance(instance);
{
let instance = instance_handle.instance_mut();
let vmctx_ptr = instance.vmctx_ptr();
(instance.funcrefs, instance.imported_funcrefs) = build_funcrefs(
&instance.module,
context,
&imports,
&instance.functions,
&vmshared_signatures,
&instance.function_call_trampolines,
vmctx_ptr,
);
}
instance_handle
};
let instance = handle.instance();
ptr::copy(
vmshared_signatures.values().as_slice().as_ptr(),
instance.signature_ids_ptr(),
vmshared_signatures.len(),
);
ptr::copy(
imports.functions.values().as_slice().as_ptr(),
instance.imported_functions_ptr(),
imports.functions.len(),
);
ptr::copy(
imports.tables.values().as_slice().as_ptr(),
instance.imported_tables_ptr(),
imports.tables.len(),
);
ptr::copy(
imports.memories.values().as_slice().as_ptr(),
instance.imported_memories_ptr(),
imports.memories.len(),
);
ptr::copy(
imports.globals.values().as_slice().as_ptr(),
instance.imported_globals_ptr(),
imports.globals.len(),
);
ptr::copy(
vmctx_globals.values().as_slice().as_ptr(),
instance.globals_ptr() as *mut NonNull<VMGlobalDefinition>,
vmctx_globals.len(),
);
ptr::write(
instance.builtin_functions_ptr(),
VMBuiltinFunctionsArray::initialized(),
);
initialize_passive_elements(instance);
initialize_globals(instance);
Ok(handle)
}
pub(crate) fn instance(&self) -> &Instance {
unsafe { self.instance.as_ref() }
}
pub(crate) fn instance_mut(&mut self) -> &mut Instance {
unsafe { self.instance.as_mut() }
}
pub unsafe fn finish_instantiation(
&mut self,
config: &VMConfig,
trap_handler: Option<*const TrapHandlerFn<'static>>,
data_initializers: &[DataInitializer<'_>],
) -> Result<(), Trap> {
let instance = self.instance_mut();
initialize_tables(instance)?;
initialize_memories(instance, data_initializers)?;
instance.invoke_start_function(config, trap_handler)?;
Ok(())
}
pub fn vmctx(&self) -> &VMContext {
self.instance().vmctx()
}
pub fn vmctx_ptr(&self) -> *mut VMContext {
self.instance().vmctx_ptr()
}
pub fn vmoffsets(&self) -> &VMOffsets {
self.instance().offsets()
}
pub fn module(&self) -> &Arc<ModuleInfo> {
self.instance().module()
}
pub fn module_ref(&self) -> &ModuleInfo {
self.instance().module_ref()
}
pub fn lookup(&mut self, field: &str) -> Option<VMExtern> {
let export = *self.module_ref().exports.get(field)?;
Some(self.lookup_by_declaration(export))
}
pub fn lookup_by_declaration(&mut self, export: ExportIndex) -> VMExtern {
let instance = self.instance();
match export {
ExportIndex::Function(index) => {
let sig_index = &instance.module.functions[index];
let handle = if let Some(def_index) = instance.module.local_func_index(index) {
let signature = instance.module.signatures[*sig_index].clone();
let vm_function = VMFunction {
anyfunc: MaybeInstanceOwned::Instance(NonNull::from(
&instance.funcrefs[def_index],
)),
signature,
kind: VMFunctionKind::Static,
host_data: Box::new(()),
};
InternalStoreHandle::new(self.instance_mut().context_mut(), vm_function)
} else {
let import = instance.imported_function(index);
import.handle
};
VMExtern::Function(handle)
}
ExportIndex::Table(index) => {
let handle = if let Some(def_index) = instance.module.local_table_index(index) {
instance.tables[def_index]
} else {
let import = instance.imported_table(index);
import.handle
};
VMExtern::Table(handle)
}
ExportIndex::Memory(index) => {
let handle = if let Some(def_index) = instance.module.local_memory_index(index) {
instance.memories[def_index]
} else {
let import = instance.imported_memory(index);
import.handle
};
VMExtern::Memory(handle)
}
ExportIndex::Global(index) => {
let handle = if let Some(def_index) = instance.module.local_global_index(index) {
instance.globals[def_index]
} else {
let import = instance.imported_global(index);
import.handle
};
VMExtern::Global(handle)
}
}
}
pub fn exports(&self) -> indexmap::map::Iter<String, ExportIndex> {
self.module().exports.iter()
}
pub fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
self.instance().memory_index(memory)
}
pub fn memory_grow<IntoPages>(
&mut self,
memory_index: LocalMemoryIndex,
delta: IntoPages,
) -> Result<Pages, MemoryError>
where
IntoPages: Into<Pages>,
{
self.instance_mut().memory_grow(memory_index, delta)
}
pub fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
self.instance().table_index(table)
}
pub fn table_grow(
&mut self,
table_index: LocalTableIndex,
delta: u32,
init_value: TableElement,
) -> Option<u32> {
self.instance_mut()
.table_grow(table_index, delta, init_value)
}
pub fn table_get(&self, table_index: LocalTableIndex, index: u32) -> Option<TableElement> {
self.instance().table_get(table_index, index)
}
pub fn table_set(
&mut self,
table_index: LocalTableIndex,
index: u32,
val: TableElement,
) -> Result<(), Trap> {
self.instance_mut().table_set(table_index, index, val)
}
pub fn get_local_table(&mut self, index: LocalTableIndex) -> &mut VMTable {
self.instance_mut().get_local_table(index)
}
}
fn get_memory_init_start(init: &DataInitializer<'_>, instance: &Instance) -> usize {
let mut start = init.location.offset;
if let Some(base) = init.location.base {
let val = unsafe {
if let Some(def_index) = instance.module.local_global_index(base) {
instance.global(def_index).val.u32
} else {
instance.imported_global(base).definition.as_ref().val.u32
}
};
start += usize::try_from(val).unwrap();
}
start
}
#[allow(clippy::mut_from_ref)]
#[allow(dead_code)]
unsafe fn get_memory_slice<'instance>(
init: &DataInitializer<'_>,
instance: &'instance Instance,
) -> &'instance mut [u8] {
let memory = if let Some(local_memory_index) = instance
.module
.local_memory_index(init.location.memory_index)
{
instance.memory(local_memory_index)
} else {
let import = instance.imported_memory(init.location.memory_index);
*import.definition.as_ref()
};
slice::from_raw_parts_mut(memory.base, memory.current_length)
}
fn get_table_init_start(init: &TableInitializer, instance: &Instance) -> usize {
let mut start = init.offset;
if let Some(base) = init.base {
let val = unsafe {
if let Some(def_index) = instance.module.local_global_index(base) {
instance.global(def_index).val.u32
} else {
instance.imported_global(base).definition.as_ref().val.u32
}
};
start += usize::try_from(val).unwrap();
}
start
}
fn initialize_tables(instance: &mut Instance) -> Result<(), Trap> {
let module = Arc::clone(&instance.module);
for init in &module.table_initializers {
let start = get_table_init_start(init, instance);
let table = instance.get_table_handle(init.table_index);
let table = unsafe { table.get_mut(&mut *instance.context) };
if start
.checked_add(init.elements.len())
.map_or(true, |end| end > table.size() as usize)
{
return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
}
if let wasmer_types::Type::FuncRef = table.ty().ty {
for (i, func_idx) in init.elements.iter().enumerate() {
let anyfunc = instance.func_ref(*func_idx);
table
.set(
u32::try_from(start + i).unwrap(),
TableElement::FuncRef(anyfunc),
)
.unwrap();
}
} else {
for i in 0..init.elements.len() {
table
.set(
u32::try_from(start + i).unwrap(),
TableElement::ExternRef(None),
)
.unwrap();
}
}
}
Ok(())
}
fn initialize_passive_elements(instance: &Instance) {
let mut passive_elements = instance.passive_elements.borrow_mut();
debug_assert!(
passive_elements.is_empty(),
"should only be called once, at initialization time"
);
passive_elements.extend(
instance
.module
.passive_elements
.iter()
.filter(|(_, segments)| !segments.is_empty())
.map(|(idx, segments)| {
(
*idx,
segments.iter().map(|s| instance.func_ref(*s)).collect(),
)
}),
);
}
fn initialize_memories(
instance: &mut Instance,
data_initializers: &[DataInitializer<'_>],
) -> Result<(), Trap> {
for init in data_initializers {
let memory = instance.get_vmmemory(init.location.memory_index);
let start = get_memory_init_start(init, instance);
unsafe {
let current_length = memory.vmmemory().as_ref().current_length;
if start
.checked_add(init.data.len())
.map_or(true, |end| end > current_length)
{
return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
}
memory.initialize_with_data(start, init.data)?;
}
}
Ok(())
}
fn initialize_globals(instance: &Instance) {
let module = Arc::clone(&instance.module);
for (index, initializer) in module.global_initializers.iter() {
unsafe {
let to = instance.global_ptr(index).as_ptr();
match initializer {
GlobalInit::I32Const(x) => (*to).val.i32 = *x,
GlobalInit::I64Const(x) => (*to).val.i64 = *x,
GlobalInit::F32Const(x) => (*to).val.f32 = *x,
GlobalInit::F64Const(x) => (*to).val.f64 = *x,
GlobalInit::V128Const(x) => (*to).val.bytes = *x.bytes(),
GlobalInit::GetGlobal(x) => {
let from: VMGlobalDefinition =
if let Some(def_x) = module.local_global_index(*x) {
instance.global(def_x)
} else {
instance.imported_global(*x).definition.as_ref().clone()
};
*to = from;
}
GlobalInit::RefNullConst => (*to).val.funcref = 0,
GlobalInit::RefFunc(func_idx) => {
let funcref = instance.func_ref(*func_idx).unwrap();
(*to).val = funcref.into_raw();
}
}
}
}
}
fn build_funcrefs(
module_info: &ModuleInfo,
ctx: &StoreObjects,
imports: &Imports,
finished_functions: &BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
vmshared_signatures: &BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
function_call_trampolines: &BoxedSlice<SignatureIndex, VMTrampoline>,
vmctx_ptr: *mut VMContext,
) -> (
BoxedSlice<LocalFunctionIndex, VMCallerCheckedAnyfunc>,
BoxedSlice<FunctionIndex, NonNull<VMCallerCheckedAnyfunc>>,
) {
let mut func_refs =
PrimaryMap::with_capacity(module_info.functions.len() - module_info.num_imported_functions);
let mut imported_func_refs = PrimaryMap::with_capacity(module_info.num_imported_functions);
for import in imports.functions.values() {
imported_func_refs.push(import.handle.get(ctx).anyfunc.as_ptr());
}
for (local_index, func_ptr) in finished_functions.iter() {
let index = module_info.func_index(local_index);
let sig_index = module_info.functions[index];
let type_index = vmshared_signatures[sig_index];
let call_trampoline = function_call_trampolines[sig_index];
let anyfunc = VMCallerCheckedAnyfunc {
func_ptr: func_ptr.0,
type_index,
vmctx: VMFunctionContext { vmctx: vmctx_ptr },
call_trampoline,
};
func_refs.push(anyfunc);
}
(
func_refs.into_boxed_slice(),
imported_func_refs.into_boxed_slice(),
)
}