diff --git a/lib/environ/src/func_environ.rs b/lib/environ/src/func_environ.rs index 7d76e84997..42e1e10d58 100644 --- a/lib/environ/src/func_environ.rs +++ b/lib/environ/src/func_environ.rs @@ -2,7 +2,7 @@ use cast; use cranelift_codegen::cursor::FuncCursor; use cranelift_codegen::ir; use cranelift_codegen::ir::condcodes::*; -use cranelift_codegen::ir::immediates::{Offset32, Uimm64}; +use cranelift_codegen::ir::immediates::{Imm64, Offset32, Uimm64}; use cranelift_codegen::ir::types::*; use cranelift_codegen::ir::{ AbiParam, ArgumentPurpose, ExtFuncData, FuncRef, Function, InstBuilder, Signature, @@ -60,6 +60,8 @@ pub struct FuncEnvironment<'module_environment> { vmctx: Option, /// The Cranelift global holding the base address of the signature IDs vector. + /// TODO: Now that the bases are just offsets from vmctx rather than loads, we + /// can eliminate these base variables. signature_ids_base: Option, /// The Cranelift global holding the base address of the imported functions table. @@ -121,7 +123,7 @@ impl<'module_environment> FuncEnvironment<'module_environment> { imported_memory32_size_extfunc: None, memory_grow_extfunc: None, imported_memory_grow_extfunc: None, - offsets: VMOffsets::new(target_config.pointer_bytes()), + offsets: VMOffsets::new(target_config.pointer_bytes(), module), } } @@ -141,11 +143,10 @@ impl<'module_environment> FuncEnvironment<'module_environment> { self.imported_functions_base.unwrap_or_else(|| { let pointer_type = self.pointer_type(); let vmctx = self.vmctx(func); - let new_base = func.create_global_value(ir::GlobalValueData::Load { + let new_base = func.create_global_value(ir::GlobalValueData::IAddImm { base: vmctx, - offset: Offset32::new(i32::from(self.offsets.vmctx_imported_functions())), + offset: Imm64::new(self.offsets.vmctx_imported_functions()), global_type: pointer_type, - readonly: true, }); self.imported_functions_base = Some(new_base); new_base @@ -156,11 +157,10 @@ impl<'module_environment> FuncEnvironment<'module_environment> { self.imported_tables_base.unwrap_or_else(|| { let pointer_type = self.pointer_type(); let vmctx = self.vmctx(func); - let new_base = func.create_global_value(ir::GlobalValueData::Load { + let new_base = func.create_global_value(ir::GlobalValueData::IAddImm { base: vmctx, - offset: Offset32::new(i32::from(self.offsets.vmctx_imported_tables())), + offset: Imm64::new(self.offsets.vmctx_imported_tables()), global_type: pointer_type, - readonly: true, }); self.imported_tables_base = Some(new_base); new_base @@ -171,11 +171,10 @@ impl<'module_environment> FuncEnvironment<'module_environment> { self.imported_memories_base.unwrap_or_else(|| { let pointer_type = self.pointer_type(); let vmctx = self.vmctx(func); - let new_base = func.create_global_value(ir::GlobalValueData::Load { + let new_base = func.create_global_value(ir::GlobalValueData::IAddImm { base: vmctx, - offset: Offset32::new(i32::from(self.offsets.vmctx_imported_memories())), + offset: Imm64::new(self.offsets.vmctx_imported_memories()), global_type: pointer_type, - readonly: true, }); self.imported_memories_base = Some(new_base); new_base @@ -186,11 +185,10 @@ impl<'module_environment> FuncEnvironment<'module_environment> { self.imported_globals_base.unwrap_or_else(|| { let pointer_type = self.pointer_type(); let vmctx = self.vmctx(func); - let new_base = func.create_global_value(ir::GlobalValueData::Load { + let new_base = func.create_global_value(ir::GlobalValueData::IAddImm { base: vmctx, - offset: Offset32::new(i32::from(self.offsets.vmctx_imported_globals())), + offset: Imm64::new(self.offsets.vmctx_imported_globals()), global_type: pointer_type, - readonly: true, }); self.imported_globals_base = Some(new_base); new_base @@ -201,11 +199,10 @@ impl<'module_environment> FuncEnvironment<'module_environment> { self.tables_base.unwrap_or_else(|| { let pointer_type = self.pointer_type(); let vmctx = self.vmctx(func); - let new_base = func.create_global_value(ir::GlobalValueData::Load { + let new_base = func.create_global_value(ir::GlobalValueData::IAddImm { base: vmctx, - offset: Offset32::new(i32::from(self.offsets.vmctx_tables())), + offset: Imm64::new(self.offsets.vmctx_tables()), global_type: pointer_type, - readonly: true, }); self.tables_base = Some(new_base); new_base @@ -216,11 +213,10 @@ impl<'module_environment> FuncEnvironment<'module_environment> { self.memories_base.unwrap_or_else(|| { let pointer_type = self.pointer_type(); let vmctx = self.vmctx(func); - let new_base = func.create_global_value(ir::GlobalValueData::Load { + let new_base = func.create_global_value(ir::GlobalValueData::IAddImm { base: vmctx, - offset: Offset32::new(i32::from(self.offsets.vmctx_memories())), + offset: Imm64::new(self.offsets.vmctx_memories()), global_type: pointer_type, - readonly: true, }); self.memories_base = Some(new_base); new_base @@ -231,11 +227,10 @@ impl<'module_environment> FuncEnvironment<'module_environment> { self.globals_base.unwrap_or_else(|| { let pointer_type = self.pointer_type(); let vmctx = self.vmctx(func); - let new_base = func.create_global_value(ir::GlobalValueData::Load { + let new_base = func.create_global_value(ir::GlobalValueData::IAddImm { base: vmctx, - offset: Offset32::new(i32::from(self.offsets.vmctx_globals())), + offset: Imm64::new(self.offsets.vmctx_globals()), global_type: pointer_type, - readonly: true, }); self.globals_base = Some(new_base); new_base @@ -246,11 +241,10 @@ impl<'module_environment> FuncEnvironment<'module_environment> { self.signature_ids_base.unwrap_or_else(|| { let pointer_type = self.pointer_type(); let vmctx = self.vmctx(func); - let new_base = func.create_global_value(ir::GlobalValueData::Load { + let new_base = func.create_global_value(ir::GlobalValueData::IAddImm { base: vmctx, - offset: Offset32::new(i32::from(self.offsets.vmctx_signature_ids())), + offset: Imm64::new(self.offsets.vmctx_signature_ids()), global_type: pointer_type, - readonly: true, }); self.signature_ids_base = Some(new_base); new_base diff --git a/lib/environ/src/vmoffsets.rs b/lib/environ/src/vmoffsets.rs index 3fc1974de4..7757fc528e 100644 --- a/lib/environ/src/vmoffsets.rs +++ b/lib/environ/src/vmoffsets.rs @@ -2,21 +2,50 @@ //! module. use cranelift_codegen::ir; +use cranelift_entity::EntityRef; use cranelift_wasm::{ DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, FuncIndex, GlobalIndex, MemoryIndex, - TableIndex, + SignatureIndex, TableIndex, }; +use module::Module; /// This class computes offsets to fields within `VMContext` and other /// related structs that JIT code accesses directly. pub struct VMOffsets { - pointer_size: u8, + /// The size in bytes of a pointer on the target. + pub pointer_size: u8, + /// The number of signature declarations in the module. + pub num_signature_ids: u64, + /// The number of imported functions in the module. + pub num_imported_functions: u64, + /// The number of imported tables in the module. + pub num_imported_tables: u64, + /// The number of imported memories in the module. + pub num_imported_memories: u64, + /// The number of imported globals in the module. + pub num_imported_globals: u64, + /// The number of defined tables in the module. + pub num_defined_tables: u64, + /// The number of defined memories in the module. + pub num_defined_memories: u64, + /// The number of defined globals in the module. + pub num_defined_globals: u64, } impl VMOffsets { /// Return a new `VMOffsets` instance, for a given pointer size. - pub fn new(pointer_size: u8) -> Self { - Self { pointer_size } + pub fn new(pointer_size: u8, module: &Module) -> Self { + Self { + pointer_size, + num_signature_ids: module.signatures.len() as u64, + num_imported_functions: module.imported_funcs.len() as u64, + num_imported_tables: module.imported_tables.len() as u64, + num_imported_memories: module.imported_memories.len() as u64, + num_imported_globals: module.imported_globals.len() as u64, + num_defined_tables: module.table_plans.len() as u64, + num_defined_memories: module.memory_plans.len() as u64, + num_defined_globals: module.globals.len() as u64, + } } } @@ -208,55 +237,76 @@ impl VMOffsets { /// Offsets for `VMContext`. impl VMOffsets { /// The offset of the `signature_ids` field. - pub fn vmctx_signature_ids(&self) -> u8 { - 0 * self.pointer_size + pub fn vmctx_signature_ids(&self) -> i64 { + 0 } /// The offset of the `tables` field. #[allow(clippy::erasing_op)] - pub fn vmctx_imported_functions(&self) -> u8 { - 1 * self.pointer_size + pub fn vmctx_imported_functions(&self) -> i64 { + self.vmctx_signature_ids() + + self.num_signature_ids as i64 * i64::from(self.size_of_vmshared_signature_index()) } /// The offset of the `tables` field. #[allow(clippy::identity_op)] - pub fn vmctx_imported_tables(&self) -> u8 { - 2 * self.pointer_size + pub fn vmctx_imported_tables(&self) -> i64 { + self.vmctx_imported_functions() + + self.num_imported_functions as i64 * i64::from(self.size_of_vmfunction_import()) } /// The offset of the `memories` field. - pub fn vmctx_imported_memories(&self) -> u8 { - 3 * self.pointer_size + pub fn vmctx_imported_memories(&self) -> i64 { + self.vmctx_imported_tables() + + self.num_imported_tables as i64 * i64::from(self.size_of_vmtable_import()) } /// The offset of the `globals` field. - pub fn vmctx_imported_globals(&self) -> u8 { - 4 * self.pointer_size + pub fn vmctx_imported_globals(&self) -> i64 { + self.vmctx_imported_memories() + + self.num_imported_memories as i64 * i64::from(self.size_of_vmmemory_import()) } /// The offset of the `tables` field. - pub fn vmctx_tables(&self) -> u8 { - 5 * self.pointer_size + pub fn vmctx_tables(&self) -> i64 { + self.vmctx_imported_globals() + + self.num_imported_globals as i64 * i64::from(self.size_of_vmglobal_import()) } /// The offset of the `memories` field. - pub fn vmctx_memories(&self) -> u8 { - 6 * self.pointer_size + pub fn vmctx_memories(&self) -> i64 { + self.vmctx_tables() + + self.num_defined_tables as i64 * i64::from(self.size_of_vmtable_definition()) } /// The offset of the `globals` field. - pub fn vmctx_globals(&self) -> u8 { - 7 * self.pointer_size + pub fn vmctx_globals(&self) -> i64 { + self.vmctx_memories() + + self.num_defined_memories as i64 * i64::from(self.size_of_vmmemory_definition()) } - /// Return the size of `VMContext`. + /// Return the size of the `VMContext` allocation. #[allow(dead_code)] - pub fn size_of_vmctx(&self) -> u8 { - 8 * self.pointer_size + pub fn size_of_vmctx(&self) -> i64 { + self.vmctx_globals() + + self.num_defined_globals as i64 * i64::from(self.size_of_vmglobal_definition()) + } + + /// Return the offset from the `signature_ids` pointer to `VMSharedSignatureId` index `index`. + pub fn index_vmshared_signature_id(&self, index: SignatureIndex) -> i32 { + assert!((index.index() as u64) < self.num_signature_ids); + cast::i32( + index + .as_u32() + .checked_mul(u32::from(self.size_of_vmshared_signature_index())) + .unwrap(), + ) + .unwrap() } /// Return the offset from the `imported_functions` pointer to `VMFunctionImport` index `index`. - fn index_vmfunction_import(&self, index: FuncIndex) -> i32 { + pub fn index_vmfunction_import(&self, index: FuncIndex) -> i32 { + assert!((index.index() as u64) < self.num_imported_functions); cast::i32( index .as_u32() @@ -267,7 +317,8 @@ impl VMOffsets { } /// Return the offset from the `imported_tables` pointer to `VMTableImport` index `index`. - fn index_vmtable_import(&self, index: TableIndex) -> i32 { + pub fn index_vmtable_import(&self, index: TableIndex) -> i32 { + assert!((index.index() as u64) < self.num_imported_tables); cast::i32( index .as_u32() @@ -277,19 +328,9 @@ impl VMOffsets { .unwrap() } - /// Return the offset from the `tables` pointer to `VMTableDefinition` index `index`. - fn index_vmtable_definition(&self, index: DefinedTableIndex) -> i32 { - cast::i32( - index - .as_u32() - .checked_mul(u32::from(self.size_of_vmtable_definition())) - .unwrap(), - ) - .unwrap() - } - /// Return the offset from the `imported_memories` pointer to `VMMemoryImport` index `index`. - fn index_vmmemory_import(&self, index: MemoryIndex) -> i32 { + pub fn index_vmmemory_import(&self, index: MemoryIndex) -> i32 { + assert!((index.index() as u64) < self.num_imported_memories); cast::i32( index .as_u32() @@ -299,8 +340,33 @@ impl VMOffsets { .unwrap() } + /// Return the offset from the `imported_globals` pointer to `VMGlobalImport` index `index`. + pub fn index_vmglobal_import(&self, index: GlobalIndex) -> i32 { + assert!((index.index() as u64) < self.num_imported_globals); + cast::i32( + index + .as_u32() + .checked_mul(u32::from(self.size_of_vmglobal_import())) + .unwrap(), + ) + .unwrap() + } + + /// Return the offset from the `tables` pointer to `VMTableDefinition` index `index`. + pub fn index_vmtable_definition(&self, index: DefinedTableIndex) -> i32 { + assert!((index.index() as u64) < self.num_defined_tables); + cast::i32( + index + .as_u32() + .checked_mul(u32::from(self.size_of_vmtable_definition())) + .unwrap(), + ) + .unwrap() + } + /// Return the offset from the `memories` pointer to `VMMemoryDefinition` index `index`. - fn index_vmmemory_definition(&self, index: DefinedMemoryIndex) -> i32 { + pub fn index_vmmemory_definition(&self, index: DefinedMemoryIndex) -> i32 { + assert!((index.index() as u64) < self.num_defined_memories); cast::i32( index .as_u32() @@ -310,12 +376,14 @@ impl VMOffsets { .unwrap() } - /// Return the offset from the `imported_globals` pointer to `VMGlobalImport` index `index`. - fn index_vmglobal_import(&self, index: GlobalIndex) -> i32 { + /// Return the offset from the `globals` pointer to the `VMGlobalDefinition` + /// index `index`. + pub fn index_vmglobal_definition(&self, index: DefinedGlobalIndex) -> i32 { + assert!((index.index() as u64) < self.num_defined_globals); cast::i32( index .as_u32() - .checked_mul(u32::from(self.size_of_vmglobal_import())) + .checked_mul(u32::from(self.size_of_vmglobal_definition())) .unwrap(), ) .unwrap() @@ -400,16 +468,4 @@ impl VMOffsets { .checked_add(i32::from(self.vmglobal_import_from())) .unwrap() } - - /// Return the offset from the `globals` pointer to the `VMGlobalDefinition` - /// index `index`. - pub fn index_vmglobal_definition(&self, index: DefinedGlobalIndex) -> i32 { - cast::i32( - index - .as_u32() - .checked_mul(u32::from(self.size_of_vmglobal_definition())) - .unwrap(), - ) - .unwrap() - } } diff --git a/lib/jit/src/instantiate.rs b/lib/jit/src/instantiate.rs index 14a94c106d..02076877f5 100644 --- a/lib/jit/src/instantiate.rs +++ b/lib/jit/src/instantiate.rs @@ -159,7 +159,7 @@ impl CompiledModule { /// Note that if only one instance of this module is needed, it may be more /// efficient to call the top-level `instantiate`, since that avoids copying /// the data initializers. - pub fn instantiate(&mut self) -> Result, InstantiationError> { + pub fn instantiate(&mut self) -> Result { let data_initializers = self .data_initializers .iter() @@ -205,7 +205,7 @@ pub fn instantiate( compiler: &mut Compiler, data: &[u8], resolver: &mut Resolver, -) -> Result, SetupError> { +) -> Result { let raw = RawCompiledModule::new(compiler, data, resolver)?; Instance::new( diff --git a/lib/jit/src/namespace.rs b/lib/jit/src/namespace.rs index 93e4dce619..f4a0c004b5 100644 --- a/lib/jit/src/namespace.rs +++ b/lib/jit/src/namespace.rs @@ -7,7 +7,6 @@ use action::{ActionError, ActionOutcome, RuntimeValue}; use compiler::Compiler; use cranelift_entity::PrimaryMap; use resolver::Resolver; -use std::boxed::Box; use std::collections::HashMap; use std::string::String; use wasmtime_runtime::{Export, Instance}; @@ -26,7 +25,7 @@ pub struct Namespace { names: HashMap, /// The instances, available by index. - instances: PrimaryMap>, + instances: PrimaryMap, } impl Namespace { @@ -40,11 +39,7 @@ impl Namespace { /// Install a new `Instance` in this `Namespace`, optionally with the /// given name, and return its index. - pub fn instance( - &mut self, - instance_name: Option<&str>, - instance: Box, - ) -> InstanceIndex { + pub fn instance(&mut self, instance_name: Option<&str>, instance: Instance) -> InstanceIndex { let index = self.instances.push(instance); if let Some(instance_name) = instance_name { self.names.insert(instance_name.into(), index); diff --git a/lib/runtime/src/instance.rs b/lib/runtime/src/instance.rs index 1b08923f72..211f4f9be4 100644 --- a/lib/runtime/src/instance.rs +++ b/lib/runtime/src/instance.rs @@ -10,10 +10,12 @@ use cranelift_wasm::{ use export::Export; use imports::Imports; use memory::LinearMemory; +use mmap::Mmap; use signalhandlers::{wasmtime_init_eager, wasmtime_init_finish}; use std::rc::Rc; use std::slice; use std::string::String; +use std::{mem, ptr}; use table::Table; use traphandlers::wasmtime_call; use vmcontext::{ @@ -21,213 +23,248 @@ use vmcontext::{ VMGlobalImport, VMMemoryDefinition, VMMemoryImport, VMSharedSignatureIndex, VMTableDefinition, VMTableImport, }; -use wasmtime_environ::{DataInitializer, Module}; +use wasmtime_environ::{DataInitializer, Module, VMOffsets}; + +fn signature_id( + vmctx: &VMContext, + offsets: &VMOffsets, + index: SignatureIndex, +) -> VMSharedSignatureIndex { + #[allow(clippy::cast_ptr_alignment)] + unsafe { + let ptr = (vmctx as *const VMContext as *const u8).add( + offsets.vmctx_signature_ids() as usize + + offsets.index_vmshared_signature_id(index) as usize, + ); + *(ptr as *const VMSharedSignatureIndex) + } +} + +fn imported_function<'vmctx>( + vmctx: &'vmctx VMContext, + offsets: &VMOffsets, + index: FuncIndex, +) -> &'vmctx VMFunctionImport { + #[allow(clippy::cast_ptr_alignment)] + unsafe { + let ptr = (vmctx as *const VMContext as *const u8).add( + offsets.vmctx_imported_functions() as usize + + offsets.index_vmfunction_import(index) as usize, + ); + &*(ptr as *const VMFunctionImport) + } +} + +/// The actual contents of an instance. +/// +/// `Instance` is just a handle containing a pointer to an `InstanceContents`, +/// which is specially allocated. +/// +/// This is repr(C) to ensure that the vmctx field is last. +#[repr(C)] +pub(crate) struct InstanceContents { + /// Offsets in the `vmctx` region. + offsets: VMOffsets, -/// The runtime state of an `Instance`. -#[derive(Debug)] -struct State { /// WebAssembly linear memory data. memories: BoxedSlice, /// WebAssembly table data. tables: BoxedSlice, - /// Function Signature IDs. - vmshared_signatures: BoxedSlice, - - /// Resolved imports. - vmctx_imports: Imports, - /// Pointers to functions in executable memory. finished_functions: BoxedSlice, - /// Table storage base address vector pointed to by vmctx. - vmctx_tables: BoxedSlice, - - /// Memory base address vector pointed to by vmctx. - vmctx_memories: BoxedSlice, - - /// WebAssembly global variable data. - vmctx_globals: BoxedSlice, - /// Context pointer used by compiled wasm code. vmctx: VMContext, } -impl State { +#[allow(clippy::cast_ptr_alignment)] +impl InstanceContents { + /// Return the indexed `VMSharedSignatureIndex`. + #[allow(dead_code)] + fn signature_id(&self, index: SignatureIndex) -> VMSharedSignatureIndex { + signature_id(&self.vmctx, &self.offsets, index) + } + + /// Return a pointer to the `VMSharedSignatureIndex`s. + fn signature_ids_ptr(&mut self) -> *mut VMSharedSignatureIndex { + unsafe { + (&mut self.vmctx as *mut VMContext as *mut u8) + .add(self.offsets.vmctx_signature_ids() as usize) + as *mut VMSharedSignatureIndex + } + } + /// Return the indexed `VMFunctionImport`. fn imported_function(&self, index: FuncIndex) -> &VMFunctionImport { - assert!(index.index() < self.vmctx_imports.functions.len()); - unsafe { self.vmctx.imported_function(index) } + imported_function(&self.vmctx, &self.offsets, index) } - /// Return a reference to imported table `index`. + /// Return a pointer to the `VMFunctionImport`s. + fn imported_functions_ptr(&mut self) -> *mut VMFunctionImport { + unsafe { + (&mut self.vmctx as *mut VMContext as *mut u8) + .add(self.offsets.vmctx_imported_functions() as usize) + as *mut VMFunctionImport + } + } + + /// Return the index `VMTableImport`. fn imported_table(&self, index: TableIndex) -> &VMTableImport { - assert!(index.index() < self.vmctx_imports.tables.len()); - unsafe { self.vmctx.imported_table(index) } + unsafe { + let ptr = (&self.vmctx as *const VMContext as *const u8).add( + self.offsets.vmctx_imported_tables() as usize + + self.offsets.index_vmtable_import(index) as usize, + ); + &*(ptr as *const VMTableImport) + } } - /// Return a reference to imported memory `index`. + /// Return a pointer to the `VMTableImports`s. + fn imported_tables_ptr(&mut self) -> *mut VMTableImport { + unsafe { + (&mut self.vmctx as *mut VMContext as *mut u8) + .add(self.offsets.vmctx_imported_tables() as usize) + as *mut VMTableImport + } + } + + /// Return the indexed `VMMemoryImport`. fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport { - assert!(index.index() < self.vmctx_imports.memories.len()); - unsafe { self.vmctx.imported_memory(index) } + unsafe { + let ptr = (&self.vmctx as *const VMContext as *const u8).add( + self.offsets.vmctx_imported_memories() as usize + + self.offsets.index_vmmemory_import(index) as usize, + ); + &*(ptr as *const VMMemoryImport) + } } - /// Return a reference to imported global `index`. + /// Return a pointer to the `VMMemoryImport`s. + fn imported_memories_ptr(&mut self) -> *mut VMMemoryImport { + unsafe { + (&mut self.vmctx as *mut VMContext as *mut u8) + .add(self.offsets.vmctx_imported_memories() as usize) + as *mut VMMemoryImport + } + } + + /// Return the indexed `VMGlobalImport`. fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport { - assert!(index.index() < self.vmctx_imports.globals.len()); - unsafe { self.vmctx.imported_global(index) } + unsafe { + let ptr = (&self.vmctx as *const VMContext as *const u8).add( + self.offsets.vmctx_imported_globals() as usize + + self.offsets.index_vmglobal_import(index) as usize, + ); + &*(ptr as *const VMGlobalImport) + } } - /// Return a reference to locally-defined table `index`. + /// Return a pointer to the `VMGlobalImport`s. + fn imported_globals_ptr(&mut self) -> *mut VMGlobalImport { + unsafe { + (&mut self.vmctx as *mut VMContext as *mut u8) + .add(self.offsets.vmctx_imported_globals() as usize) + as *mut VMGlobalImport + } + } + + /// Return the indexed `VMTableDefinition`. #[allow(dead_code)] fn table(&self, index: DefinedTableIndex) -> &VMTableDefinition { - assert!(index.index() < self.tables.len()); - unsafe { self.vmctx.table(index) } + unsafe { + let ptr = (&self.vmctx as *const VMContext as *const u8).add( + self.offsets.vmctx_tables() as usize + + self.offsets.index_vmtable_definition(index) as usize, + ); + &*(ptr as *const VMTableDefinition) + } } - /// Return a mutable reference to locally-defined table `index`. + /// Return the indexed `VMTableDefinition`. fn table_mut(&mut self, index: DefinedTableIndex) -> &mut VMTableDefinition { - assert!(index.index() < self.tables.len()); - unsafe { self.vmctx.table_mut(index) } + unsafe { + let ptr = (&self.vmctx as *const VMContext as *mut u8).add( + self.offsets.vmctx_tables() as usize + + self.offsets.index_vmtable_definition(index) as usize, + ); + &mut *(ptr as *mut VMTableDefinition) + } } - /// Return a reference to locally-defined linear memory `index`. + /// Return a pointer to the `VMTableDefinition`s. + fn tables_ptr(&mut self) -> *mut VMTableDefinition { + unsafe { + (&self.vmctx as *const VMContext as *mut u8).add(self.offsets.vmctx_tables() as usize) + as *mut VMTableDefinition + } + } + + /// Return the indexed `VMMemoryDefinition`. fn memory(&self, index: DefinedMemoryIndex) -> &VMMemoryDefinition { - assert!(index.index() < self.memories.len()); - unsafe { self.vmctx.memory(index) } + unsafe { + let ptr = (&self.vmctx as *const VMContext as *const u8).add( + self.offsets.vmctx_memories() as usize + + self.offsets.index_vmmemory_definition(index) as usize, + ); + &*(ptr as *const VMMemoryDefinition) + } } - /// Return a mutable reference to locally-defined linear memory `index`. + /// Return the indexed `VMMemoryDefinition`. fn memory_mut(&mut self, index: DefinedMemoryIndex) -> &mut VMMemoryDefinition { - assert!(index.index() < self.memories.len()); - unsafe { self.vmctx.memory_mut(index) } + unsafe { + let ptr = (&self.vmctx as *const VMContext as *mut u8).add( + self.offsets.vmctx_memories() as usize + + self.offsets.index_vmmemory_definition(index) as usize, + ); + &mut *(ptr as *mut VMMemoryDefinition) + } } - /// Return a reference to locally-defined global variable `index`. + /// Return a pointer to the `VMMemoryDefinition`s. + fn memories_ptr(&mut self) -> *mut VMMemoryDefinition { + unsafe { + (&self.vmctx as *const VMContext as *mut u8).add(self.offsets.vmctx_memories() as usize) + as *mut VMMemoryDefinition + } + } + + /// Return the indexed `VMGlobalDefinition`. #[allow(dead_code)] fn global(&self, index: DefinedGlobalIndex) -> &VMGlobalDefinition { - assert!(index.index() < self.vmctx_globals.len()); - unsafe { self.vmctx.global(index) } + unsafe { + let ptr = (&self.vmctx as *const VMContext as *const u8).add( + self.offsets.vmctx_globals() as usize + + self.offsets.index_vmglobal_definition(index) as usize, + ); + &*(ptr as *const VMGlobalDefinition) + } } - /// Return a mutable reference to locally-defined global variable `index`. + /// Return the indexed `VMGlobalDefinition`. fn global_mut(&mut self, index: DefinedGlobalIndex) -> &mut VMGlobalDefinition { - assert!(index.index() < self.vmctx_globals.len()); - unsafe { self.vmctx.global_mut(index) } + unsafe { + let ptr = (&self.vmctx as *const VMContext as *mut u8).add( + self.offsets.vmctx_globals() as usize + + self.offsets.index_vmglobal_definition(index) as usize, + ); + &mut *(ptr as *mut VMGlobalDefinition) + } } -} -/// An Instance of a WebAssemby module. -/// -/// Note that compiled wasm code passes around raw pointers to `Instance`, so -/// this shouldn't be moved. -#[derive(Debug)] -pub struct Instance { - /// The `Module` this `Instance` was instantiated from. - module: Rc, - - /// The runtime state of this instance. - state: State, -} - -impl Instance { - /// Create a new `Instance`. - pub fn new( - module: Rc, - finished_functions: BoxedSlice, - mut vmctx_imports: Imports, - data_initializers: &[DataInitializer], - mut vmshared_signatures: BoxedSlice, - ) -> Result, InstantiationError> { - let mut tables = create_tables(&module); - let mut memories = create_memories(&module)?; - - let mut vmctx_tables = tables - .values_mut() - .map(Table::vmtable) - .collect::>() - .into_boxed_slice(); - - let mut vmctx_memories = memories - .values_mut() - .map(LinearMemory::vmmemory) - .collect::>() - .into_boxed_slice(); - - let mut vmctx_globals = create_globals(&module); - - let vmctx_imported_functions_ptr = vmctx_imports - .functions - .values_mut() - .into_slice() - .as_mut_ptr(); - let vmctx_imported_tables_ptr = vmctx_imports.tables.values_mut().into_slice().as_mut_ptr(); - let vmctx_imported_memories_ptr = vmctx_imports - .memories - .values_mut() - .into_slice() - .as_mut_ptr(); - let vmctx_imported_globals_ptr = - vmctx_imports.globals.values_mut().into_slice().as_mut_ptr(); - let vmctx_tables_ptr = vmctx_tables.values_mut().into_slice().as_mut_ptr(); - let vmctx_memories_ptr = vmctx_memories.values_mut().into_slice().as_mut_ptr(); - let vmctx_globals_ptr = vmctx_globals.values_mut().into_slice().as_mut_ptr(); - let vmctx_shared_signatures_ptr = - vmshared_signatures.values_mut().into_slice().as_mut_ptr(); - - let mut result = Box::new(Self { - module, - state: State { - memories, - tables, - vmshared_signatures, - vmctx_imports, - finished_functions, - vmctx_tables, - vmctx_memories, - vmctx_globals, - vmctx: VMContext::new( - vmctx_imported_functions_ptr, - vmctx_imported_tables_ptr, - vmctx_imported_memories_ptr, - vmctx_imported_globals_ptr, - vmctx_tables_ptr, - vmctx_memories_ptr, - vmctx_globals_ptr, - vmctx_shared_signatures_ptr, - ), - }, - }); - - // Check initializer bounds before initializing anything. - check_table_init_bounds(&mut *result)?; - check_memory_init_bounds(&mut *result, data_initializers)?; - - // Apply the initializers. - initialize_tables(&mut *result)?; - initialize_memories(&mut *result, data_initializers)?; - initialize_globals(&mut *result); - - // Rather than writing inline assembly to jump to the code region, we use the fact that - // the Rust ABI for calling a function with no arguments and no return values matches the - // one of the generated code. Thanks to this, we can transmute the code region into a - // first-class Rust function and call it. - // Ensure that our signal handlers are ready for action. - // TODO: Move these calls out of `Instance`. - wasmtime_init_eager(); - wasmtime_init_finish(result.vmctx_mut()); - - // The WebAssembly spec specifies that the start function is - // invoked automatically at instantiation time. - result.invoke_start_function()?; - - Ok(result) + /// Return a pointer to the `VMGlobalDefinition`s. + fn globals_ptr(&mut self) -> *mut VMGlobalDefinition { + unsafe { + (&mut self.vmctx as *mut VMContext as *mut u8) + .add(self.offsets.vmctx_globals() as usize) as *mut VMGlobalDefinition + } } /// Return a reference to the vmctx used by compiled wasm code. pub fn vmctx(&self) -> &VMContext { - &self.state.vmctx + &self.vmctx } /// Return a raw pointer to the vmctx used by compiled wasm code. @@ -237,7 +274,7 @@ impl Instance { /// Return a mutable reference to the vmctx used by compiled wasm code. pub fn vmctx_mut(&mut self) -> &mut VMContext { - &mut self.state.vmctx + &mut self.vmctx } /// Return a mutable raw pointer to the vmctx used by compiled wasm code. @@ -245,67 +282,20 @@ impl Instance { self.vmctx_mut() } - /// Return the offset from the vmctx pointer to its containing Instance. - pub(crate) fn vmctx_offset() -> isize { - (offset_of!(Self, state) + offset_of!(State, vmctx)) as isize - } - - /// Grow memory by the specified amount of pages. - /// - /// Returns `None` if memory can't be grown by the specified amount - /// of pages. - pub fn memory_grow(&mut self, memory_index: DefinedMemoryIndex, delta: u32) -> Option { - let result = self - .state - .memories - .get_mut(memory_index) - .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index())) - .grow(delta); - - // Keep current the VMContext pointers used by compiled wasm code. - self.state.vmctx_memories[memory_index] = self.state.memories[memory_index].vmmemory(); - - result - } - - /// Returns the number of allocated wasm pages. - pub fn memory_size(&mut self, memory_index: DefinedMemoryIndex) -> u32 { - self.state - .memories - .get(memory_index) - .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index())) - .size() - } - - /// Test whether any of the objects inside this instance require signal - /// handlers to catch out of bounds accesses. - pub(crate) fn needs_signal_handlers(&self) -> bool { - self.state - .memories - .values() - .any(|memory| memory.needs_signal_handlers) - } - - /// Return the number of imported memories. - pub(crate) fn num_imported_memories(&self) -> usize { - self.state.vmctx_imports.memories.len() - } - /// Invoke the WebAssembly start function of the instance, if one is present. - fn invoke_start_function(&mut self) -> Result<(), InstantiationError> { - if let Some(start_index) = self.module.start_func { - let (callee_address, callee_vmctx) = match self.module.defined_func_index(start_index) { + fn invoke_start_function(&mut self, module: &Module) -> Result<(), InstantiationError> { + if let Some(start_index) = module.start_func { + let (callee_address, callee_vmctx) = match module.defined_func_index(start_index) { Some(defined_start_index) => { let body = *self - .state .finished_functions .get(defined_start_index) .expect("start function index is out of bounds"); (body, self.vmctx_mut() as *mut VMContext) } None => { - assert!(start_index.index() < self.module.imported_funcs.len()); - let import = self.state.imported_function(start_index); + assert!(start_index.index() < module.imported_funcs.len()); + let import = self.imported_function(start_index); (import.body, import.vmctx) } }; @@ -318,8 +308,307 @@ impl Instance { Ok(()) } + /// Return the offset from the vmctx pointer to its containing Instance. + pub(crate) fn vmctx_offset() -> isize { + offset_of!(Self, vmctx) as isize + } + + /// Return the table index for the given `VMTableDefinition`. + pub(crate) fn table_index(&self, table: &mut VMTableDefinition) -> DefinedTableIndex { + let offsets = &self.offsets; + let begin = unsafe { + (&self.vmctx as *const VMContext as *mut u8).add(offsets.vmctx_tables() as usize) + } as *mut VMTableDefinition; + let end: *mut VMTableDefinition = table; + // TODO: Use `offset_from` once it stablizes. + let index = DefinedTableIndex::new( + (end as usize - begin as usize) / mem::size_of::(), + ); + assert!(index.index() < self.tables.len()); + index + } + + /// Return the memory index for the given `VMMemoryDefinition`. + pub(crate) fn memory_index(&self, memory: &mut VMMemoryDefinition) -> DefinedMemoryIndex { + let offsets = &self.offsets; + let begin = unsafe { + (&self.vmctx as *const VMContext as *mut u8).add(offsets.vmctx_memories() as usize) + } as *mut VMMemoryDefinition; + let end: *mut VMMemoryDefinition = memory; + // TODO: Use `offset_from` once it stablizes. + let index = DefinedMemoryIndex::new( + (end as usize - begin as usize) / mem::size_of::(), + ); + assert!(index.index() < self.memories.len()); + index + } + + /// Test whether any of the objects inside this instance require signal + /// handlers to catch out of bounds accesses. + pub(crate) fn needs_signal_handlers(&self) -> bool { + self.memories + .values() + .any(|memory| memory.needs_signal_handlers) + } + + /// Grow memory by the specified amount of pages. + /// + /// Returns `None` if memory can't be grown by the specified amount + /// of pages. + pub(crate) fn memory_grow( + &mut self, + memory_index: DefinedMemoryIndex, + delta: u32, + ) -> Option { + let result = self + .memories + .get_mut(memory_index) + .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index())) + .grow(delta); + + // Keep current the VMContext pointers used by compiled wasm code. + *self.memory_mut(memory_index) = self.memories[memory_index].vmmemory(); + + result + } + + /// Grow imported memory by the specified amount of pages. + /// + /// Returns `None` if memory can't be grown by the specified amount + /// of pages. + /// + /// TODO:This and `imported_memory_size` are currently unsafe because + /// they dereference the memory import's pointers. + pub(crate) unsafe fn imported_memory_grow( + &mut self, + memory_index: MemoryIndex, + delta: u32, + ) -> Option { + let import = self.imported_memory(memory_index); + let foreign_instance_contents = (&mut *import.vmctx).instance_contents(); + let foreign_memory = &mut *import.from; + let foreign_index = foreign_instance_contents.memory_index(foreign_memory); + + foreign_instance_contents.memory_grow(foreign_index, delta) + } + + /// Returns the number of allocated wasm pages. + pub(crate) fn memory_size(&mut self, memory_index: DefinedMemoryIndex) -> u32 { + self.memories + .get(memory_index) + .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index())) + .size() + } + + /// Returns the number of allocated wasm pages in an imported memory. + pub(crate) unsafe fn imported_memory_size(&mut self, memory_index: MemoryIndex) -> u32 { + let import = self.imported_memory(memory_index); + let foreign_instance_contents = (&mut *import.vmctx).instance_contents(); + let foreign_memory = &mut *import.from; + let foreign_index = foreign_instance_contents.memory_index(foreign_memory); + + foreign_instance_contents.memory_size(foreign_index) + } +} + +/// A wrapper around an `Mmap` holding an `InstanceContents`. +struct MmapField { + /// The allocated contents. + mmap: Mmap, +} + +#[allow(clippy::cast_ptr_alignment)] +impl MmapField { + /// Return the contained contents. + fn contents(&self) -> &InstanceContents { + assert!(self.mmap.len() >= mem::size_of::()); + unsafe { &*(self.mmap.as_ptr() as *const InstanceContents) } + } + + /// Return the contained contents. + fn contents_mut(&mut self) -> &mut InstanceContents { + assert!(self.mmap.len() >= mem::size_of::()); + unsafe { &mut *(self.mmap.as_mut_ptr() as *mut InstanceContents) } + } +} + +impl Drop for MmapField { + fn drop(&mut self) { + /// Drop the `InstanceContents`. + assert!(self.mmap.len() >= mem::size_of::()); + mem::drop(mem::replace(self.contents_mut(), unsafe { mem::zeroed() })); + } +} + +/// An Instance of a WebAssemby module. +/// +/// Note that compiled wasm code passes around raw pointers to `Instance`, so +/// this shouldn't be moved. +pub struct Instance { + /// The `Module` this `Instance` was instantiated from. + module: Rc, + + /// The `Mmap` containing the contents of the instance. + mmap_field: MmapField, +} + +impl Instance { + /// Create a new `Instance`. + pub fn new( + module: Rc, + finished_functions: BoxedSlice, + imports: Imports, + data_initializers: &[DataInitializer], + vmshared_signatures: BoxedSlice, + ) -> Result { + let mut tables = create_tables(&module); + let mut memories = create_memories(&module)?; + + let vmctx_tables = tables + .values_mut() + .map(Table::vmtable) + .collect::>() + .into_boxed_slice(); + + let vmctx_memories = memories + .values_mut() + .map(LinearMemory::vmmemory) + .collect::>() + .into_boxed_slice(); + + let vmctx_globals = create_globals(&module); + + let offsets = VMOffsets { + pointer_size: mem::size_of::<*const u8>() as u8, + num_signature_ids: vmshared_signatures.len() as u64, + num_imported_functions: imports.functions.len() as u64, + num_imported_tables: imports.tables.len() as u64, + num_imported_memories: imports.memories.len() as u64, + num_imported_globals: imports.globals.len() as u64, + num_defined_tables: tables.len() as u64, + num_defined_memories: memories.len() as u64, + num_defined_globals: vmctx_globals.len() as u64, + }; + + let mut contents_mmap = Mmap::with_size( + mem::size_of::() + .checked_add(cast::usize(offsets.size_of_vmctx()).unwrap()) + .unwrap(), + ) + .map_err(InstantiationError::Resource)?; + + let contents = { + #[allow(clippy::cast_ptr_alignment)] + let contents_ptr = contents_mmap.as_mut_ptr() as *mut InstanceContents; + let contents = InstanceContents { + offsets, + memories, + tables, + finished_functions, + vmctx: VMContext {}, + }; + unsafe { + ptr::write(contents_ptr, contents); + &mut *contents_ptr + } + }; + + unsafe { + ptr::copy( + vmshared_signatures.values().as_slice().as_ptr(), + contents.signature_ids_ptr() as *mut VMSharedSignatureIndex, + vmshared_signatures.len(), + ); + ptr::copy( + imports.functions.values().as_slice().as_ptr(), + contents.imported_functions_ptr() as *mut VMFunctionImport, + imports.functions.len(), + ); + ptr::copy( + imports.tables.values().as_slice().as_ptr(), + contents.imported_tables_ptr() as *mut VMTableImport, + imports.tables.len(), + ); + ptr::copy( + imports.memories.values().as_slice().as_ptr(), + contents.imported_memories_ptr() as *mut VMMemoryImport, + imports.memories.len(), + ); + ptr::copy( + imports.globals.values().as_slice().as_ptr(), + contents.imported_globals_ptr() as *mut VMGlobalImport, + imports.globals.len(), + ); + ptr::copy( + vmctx_tables.values().as_slice().as_ptr(), + contents.tables_ptr() as *mut VMTableDefinition, + vmctx_tables.len(), + ); + ptr::copy( + vmctx_memories.values().as_slice().as_ptr(), + contents.memories_ptr() as *mut VMMemoryDefinition, + vmctx_memories.len(), + ); + ptr::copy( + vmctx_globals.values().as_slice().as_ptr(), + contents.globals_ptr() as *mut VMGlobalDefinition, + vmctx_globals.len(), + ); + } + + // Check initializer bounds before initializing anything. + check_table_init_bounds(&*module, contents)?; + check_memory_init_bounds(&*module, contents, data_initializers)?; + + // Apply the initializers. + initialize_tables(&*module, contents)?; + initialize_memories(&*module, contents, data_initializers)?; + initialize_globals(&*module, contents); + + // Rather than writing inline assembly to jump to the code region, we use the fact that + // the Rust ABI for calling a function with no arguments and no return values matches the + // one of the generated code. Thanks to this, we can transmute the code region into a + // first-class Rust function and call it. + // Ensure that our signal handlers are ready for action. + // TODO: Move these calls out of `Instance`. + wasmtime_init_eager(); + wasmtime_init_finish(contents.vmctx_mut()); + + // The WebAssembly spec specifies that the start function is + // invoked automatically at instantiation time. + contents.invoke_start_function(&*module)?; + + Ok(Instance { + module, + mmap_field: MmapField { + mmap: contents_mmap, + }, + }) + } + + /// Return a reference to the vmctx used by compiled wasm code. + pub fn vmctx(&self) -> &VMContext { + &self.mmap_field.contents().vmctx() + } + + /// Return a raw pointer to the vmctx used by compiled wasm code. + pub fn vmctx_ptr(&self) -> *const VMContext { + self.mmap_field.contents().vmctx_ptr() + } + + /// Return a mutable reference to the vmctx used by compiled wasm code. + pub fn vmctx_mut(&mut self) -> &mut VMContext { + self.mmap_field.contents_mut().vmctx_mut() + } + + /// Return a mutable raw pointer to the vmctx used by compiled wasm code. + pub fn vmctx_mut_ptr(&mut self) -> *mut VMContext { + self.mmap_field.contents_mut().vmctx_mut_ptr() + } + /// Lookup an export with the given name. pub fn lookup(&mut self, field: &str) -> Option { + let contents = self.mmap_field.contents_mut(); if let Some(export) = self.module.exports.get(field) { Some(match export { wasmtime_environ::Export::Function(index) => { @@ -327,11 +616,11 @@ impl Instance { let (address, vmctx) = if let Some(def_index) = self.module.defined_func_index(*index) { ( - self.state.finished_functions[def_index], - &mut self.state.vmctx as *mut VMContext, + contents.finished_functions[def_index], + &mut contents.vmctx as *mut VMContext, ) } else { - let import = self.state.imported_function(*index); + let import = contents.imported_function(*index); (import.body, import.vmctx) }; Export::Function { @@ -344,11 +633,11 @@ impl Instance { let (definition, vmctx) = if let Some(def_index) = self.module.defined_table_index(*index) { ( - self.state.table_mut(def_index) as *mut VMTableDefinition, - &mut self.state.vmctx as *mut VMContext, + contents.table_mut(def_index) as *mut VMTableDefinition, + &mut contents.vmctx as *mut VMContext, ) } else { - let import = self.state.imported_table(*index); + let import = contents.imported_table(*index); (import.from, import.vmctx) }; Export::Table { @@ -361,11 +650,11 @@ impl Instance { let (definition, vmctx) = if let Some(def_index) = self.module.defined_memory_index(*index) { ( - self.state.memory_mut(def_index) as *mut VMMemoryDefinition, - &mut self.state.vmctx as *mut VMContext, + contents.memory_mut(def_index) as *mut VMMemoryDefinition, + &mut contents.vmctx as *mut VMContext, ) } else { - let import = self.state.imported_memory(*index); + let import = contents.imported_memory(*index); (import.from, import.vmctx) }; Export::Memory { @@ -376,9 +665,9 @@ impl Instance { } wasmtime_environ::Export::Global(index) => Export::Global { definition: if let Some(def_index) = self.module.defined_global_index(*index) { - self.state.global_mut(def_index) + contents.global_mut(def_index) } else { - self.state.imported_global(*index).from + contents.imported_global(*index).from }, global: self.module.globals[*index], }, @@ -398,30 +687,32 @@ impl Instance { } } -fn check_table_init_bounds(instance: &mut Instance) -> Result<(), InstantiationError> { - for init in &instance.module.table_elements { +fn check_table_init_bounds( + module: &Module, + contents: &mut InstanceContents, +) -> Result<(), InstantiationError> { + for init in &module.table_elements { // TODO: Refactor this. let mut start = init.offset; if let Some(base) = init.base { - let global = if let Some(def_index) = instance.module.defined_global_index(base) { - instance.state.global_mut(def_index) + let global = if let Some(def_index) = module.defined_global_index(base) { + contents.global_mut(def_index) } else { - instance.state.imported_global(base).from + contents.imported_global(base).from }; start += unsafe { *(&*global).as_u32() } as usize; } // TODO: Refactor this. - let slice = if let Some(defined_table_index) = - instance.module.defined_table_index(init.table_index) + let slice = if let Some(defined_table_index) = module.defined_table_index(init.table_index) { - instance.state.tables[defined_table_index].as_mut() + contents.tables[defined_table_index].as_mut() } else { - let import = &instance.state.vmctx_imports.tables[init.table_index]; - let foreign_instance = unsafe { (&mut *(import).vmctx).instance() }; + let import = contents.imported_table(init.table_index); + let foreign_contents = unsafe { (&mut *(import).vmctx).instance_contents() }; let foreign_table = unsafe { &mut *(import).from }; - let foreign_index = foreign_instance.vmctx().table_index(foreign_table); - foreign_instance.state.tables[foreign_index].as_mut() + let foreign_index = foreign_contents.table_index(foreign_table); + foreign_contents.tables[foreign_index].as_mut() }; if slice.get_mut(start..start + init.elements.len()).is_none() { @@ -435,33 +726,33 @@ fn check_table_init_bounds(instance: &mut Instance) -> Result<(), InstantiationE } fn check_memory_init_bounds( - instance: &mut Instance, + module: &Module, + contents: &mut InstanceContents, data_initializers: &[DataInitializer], ) -> Result<(), InstantiationError> { for init in data_initializers { // TODO: Refactor this. let mut start = init.location.offset; if let Some(base) = init.location.base { - let global = if let Some(def_index) = instance.module.defined_global_index(base) { - instance.state.global_mut(def_index) + let global = if let Some(def_index) = module.defined_global_index(base) { + contents.global_mut(def_index) } else { - instance.state.imported_global(base).from + contents.imported_global(base).from }; start += unsafe { *(&*global).as_u32() } as usize; } // TODO: Refactor this. - let memory = if let Some(defined_memory_index) = instance - .module - .defined_memory_index(init.location.memory_index) + let memory = if let Some(defined_memory_index) = + module.defined_memory_index(init.location.memory_index) { - instance.state.memory(defined_memory_index) + contents.memory(defined_memory_index) } else { - let import = &instance.state.vmctx_imports.memories[init.location.memory_index]; - let foreign_instance = unsafe { (&mut *(import).vmctx).instance() }; + let import = contents.imported_memory(init.location.memory_index); + let foreign_contents = unsafe { (&mut *(import).vmctx).instance_contents() }; let foreign_memory = unsafe { &mut *(import).from }; - let foreign_index = foreign_instance.vmctx().memory_index(foreign_memory); - foreign_instance.state.memory(foreign_index) + let foreign_index = foreign_contents.memory_index(foreign_memory); + foreign_contents.memory(foreign_index) }; let mem_slice = unsafe { slice::from_raw_parts_mut(memory.base, memory.current_length) }; @@ -487,51 +778,50 @@ fn create_tables(module: &Module) -> BoxedSlice { } /// Initialize the table memory from the provided initializers. -fn initialize_tables(instance: &mut Instance) -> Result<(), InstantiationError> { - let vmctx: *mut VMContext = instance.vmctx_mut(); - for init in &instance.module.table_elements { +fn initialize_tables( + module: &Module, + contents: &mut InstanceContents, +) -> Result<(), InstantiationError> { + let vmctx: *mut VMContext = contents.vmctx_mut(); + for init in &module.table_elements { let mut start = init.offset; if let Some(base) = init.base { - let global = if let Some(def_index) = instance.module.defined_global_index(base) { - instance.state.global_mut(def_index) + let global = if let Some(def_index) = module.defined_global_index(base) { + contents.global_mut(def_index) } else { - instance.state.imported_global(base).from + contents.imported_global(base).from }; start += unsafe { *(&*global).as_i32() } as u32 as usize; } - let slice = if let Some(defined_table_index) = - instance.module.defined_table_index(init.table_index) + let slice = if let Some(defined_table_index) = module.defined_table_index(init.table_index) { - instance.state.tables[defined_table_index].as_mut() + contents.tables[defined_table_index].as_mut() } else { - let import = &instance.state.vmctx_imports.tables[init.table_index]; - let foreign_instance = unsafe { (&mut *(import).vmctx).instance() }; + let import = contents.imported_table(init.table_index); + let foreign_contents = unsafe { (&mut *(import).vmctx).instance_contents() }; let foreign_table = unsafe { &mut *(import).from }; - let foreign_index = foreign_instance.vmctx().table_index(foreign_table); - foreign_instance.state.tables[foreign_index].as_mut() + let foreign_index = foreign_contents.table_index(foreign_table); + foreign_contents.tables[foreign_index].as_mut() }; - if let Some(subslice) = slice.get_mut(start..start + init.elements.len()) { - for (i, func_idx) in init.elements.iter().enumerate() { - let callee_sig = instance.module.functions[*func_idx]; - let (callee_ptr, callee_vmctx) = - if let Some(index) = instance.module.defined_func_index(*func_idx) { - (instance.state.finished_functions[index], vmctx) - } else { - let imported_func = &instance.state.vmctx_imports.functions[*func_idx]; - (imported_func.body, imported_func.vmctx) - }; - let type_index = instance.state.vmshared_signatures[callee_sig]; - subslice[i] = VMCallerCheckedAnyfunc { - func_ptr: callee_ptr, - type_index, - vmctx: callee_vmctx, + + let subslice = &mut slice[start..start + init.elements.len()]; + for (i, func_idx) in init.elements.iter().enumerate() { + let callee_sig = module.functions[*func_idx]; + let (callee_ptr, callee_vmctx) = + if let Some(index) = module.defined_func_index(*func_idx) { + (contents.finished_functions[index], vmctx) + } else { + let imported_func = + imported_function(&contents.vmctx, &contents.offsets, *func_idx); + (imported_func.body, imported_func.vmctx) }; - } - } else { - return Err(InstantiationError::Link(LinkError( - "elements segment does not fit".to_owned(), - ))); + let type_index = signature_id(&contents.vmctx, &contents.offsets, callee_sig); + subslice[i] = VMCallerCheckedAnyfunc { + func_ptr: callee_ptr, + type_index, + vmctx: callee_vmctx, + }; } } @@ -553,40 +843,35 @@ fn create_memories( /// Initialize the table memory from the provided initializers. fn initialize_memories( - instance: &mut Instance, + module: &Module, + contents: &mut InstanceContents, data_initializers: &[DataInitializer], ) -> Result<(), InstantiationError> { for init in data_initializers { let mut start = init.location.offset; if let Some(base) = init.location.base { - let global = if let Some(def_index) = instance.module.defined_global_index(base) { - instance.state.global_mut(def_index) + let global = if let Some(def_index) = module.defined_global_index(base) { + contents.global_mut(def_index) } else { - instance.state.imported_global(base).from + contents.imported_global(base).from }; start += unsafe { *(&*global).as_i32() } as u32 as usize; } - let memory = if let Some(defined_memory_index) = instance - .module - .defined_memory_index(init.location.memory_index) + let memory = if let Some(defined_memory_index) = + module.defined_memory_index(init.location.memory_index) { - instance.state.memory(defined_memory_index) + contents.memory(defined_memory_index) } else { - let import = &instance.state.vmctx_imports.memories[init.location.memory_index]; - let foreign_instance = unsafe { (&mut *(import).vmctx).instance() }; + let import = contents.imported_memory(init.location.memory_index); + let foreign_contents = unsafe { (&mut *(import).vmctx).instance_contents() }; let foreign_memory = unsafe { &mut *(import).from }; - let foreign_index = foreign_instance.vmctx().memory_index(foreign_memory); - foreign_instance.state.memory(foreign_index) + let foreign_index = foreign_contents.memory_index(foreign_memory); + foreign_contents.memory(foreign_index) }; let mem_slice = unsafe { slice::from_raw_parts_mut(memory.base, memory.current_length) }; - if let Some(to_init) = mem_slice.get_mut(start..start + init.data.len()) { - to_init.copy_from_slice(init.data); - } else { - return Err(InstantiationError::Link(LinkError( - "data segment does not fit".to_owned(), - ))); - } + let to_init = &mut mem_slice[start..start + init.data.len()]; + to_init.copy_from_slice(init.data); } Ok(()) @@ -605,21 +890,21 @@ fn create_globals(module: &Module) -> BoxedSlice *unsafe { (*to).as_i32_mut() } = x, GlobalInit::I64Const(x) => *unsafe { (*to).as_i64_mut() } = x, GlobalInit::F32Const(x) => *unsafe { (*to).as_f32_bits_mut() } = x, GlobalInit::F64Const(x) => *unsafe { (*to).as_f64_bits_mut() } = x, GlobalInit::GetGlobal(x) => { - let from = if let Some(def_x) = instance.module.defined_global_index(x) { - instance.state.global_mut(def_x) + let from = if let Some(def_x) = module.defined_global_index(x) { + contents.global_mut(def_x) } else { - instance.state.imported_global(x).from + contents.imported_global(x).from }; unsafe { *to = *from }; } diff --git a/lib/runtime/src/libcalls.rs b/lib/runtime/src/libcalls.rs index c71e799be4..410d4727cc 100644 --- a/lib/runtime/src/libcalls.rs +++ b/lib/runtime/src/libcalls.rs @@ -94,10 +94,10 @@ pub unsafe extern "C" fn wasmtime_memory32_grow( memory_index: u32, vmctx: *mut VMContext, ) -> u32 { - let instance = (&mut *vmctx).instance(); + let instance_contents = (&mut *vmctx).instance_contents(); let memory_index = DefinedMemoryIndex::from_u32(memory_index); - instance + instance_contents .memory_grow(memory_index, delta) .unwrap_or(u32::max_value()) } @@ -109,30 +109,21 @@ pub unsafe extern "C" fn wasmtime_imported_memory32_grow( memory_index: u32, vmctx: *mut VMContext, ) -> u32 { - let instance = (&mut *vmctx).instance(); - assert!( - (memory_index as usize) < instance.num_imported_memories(), - "imported memory index for memory.grow out of bounds" - ); - + let instance_contents = (&mut *vmctx).instance_contents(); let memory_index = MemoryIndex::from_u32(memory_index); - let import = instance.vmctx().imported_memory(memory_index); - let foreign_instance = (&mut *import.vmctx).instance(); - let foreign_memory = &mut *import.from; - let foreign_index = foreign_instance.vmctx().memory_index(foreign_memory); - foreign_instance - .memory_grow(foreign_index, delta) + instance_contents + .imported_memory_grow(memory_index, delta) .unwrap_or(u32::max_value()) } /// Implementation of memory.size for locally-defined 32-bit memories. #[no_mangle] pub unsafe extern "C" fn wasmtime_memory32_size(memory_index: u32, vmctx: *mut VMContext) -> u32 { - let instance = (&mut *vmctx).instance(); + let instance_contents = (&mut *vmctx).instance_contents(); let memory_index = DefinedMemoryIndex::from_u32(memory_index); - instance.memory_size(memory_index) + instance_contents.memory_size(memory_index) } /// Implementation of memory.size for imported 32-bit memories. @@ -141,17 +132,8 @@ pub unsafe extern "C" fn wasmtime_imported_memory32_size( memory_index: u32, vmctx: *mut VMContext, ) -> u32 { - let instance = (&mut *vmctx).instance(); - assert!( - (memory_index as usize) < instance.num_imported_memories(), - "imported memory index for memory.grow out of bounds" - ); - + let instance_contents = (&mut *vmctx).instance_contents(); let memory_index = MemoryIndex::from_u32(memory_index); - let import = instance.vmctx().imported_memory(memory_index); - let foreign_instance = (&mut *import.vmctx).instance(); - let foreign_memory = &mut *import.from; - let foreign_index = foreign_instance.vmctx().memory_index(foreign_memory); - foreign_instance.memory_size(foreign_index) + instance_contents.imported_memory_size(memory_index) } diff --git a/lib/runtime/src/signalhandlers.rs b/lib/runtime/src/signalhandlers.rs index c98ef66c0b..cf5201bb24 100644 --- a/lib/runtime/src/signalhandlers.rs +++ b/lib/runtime/src/signalhandlers.rs @@ -94,9 +94,9 @@ pub extern "C" fn wasmtime_init_finish(vmctx: &mut VMContext) { }) } - let instance = unsafe { vmctx.instance() }; + let instance_contents = unsafe { vmctx.instance_contents() }; let have_signal_handlers = TRAP_CONTEXT.with(|cx| cx.borrow().haveSignalHandlers); - if !have_signal_handlers && instance.needs_signal_handlers() { + if !have_signal_handlers && instance_contents.needs_signal_handlers() { panic!("failed to install signal handlers"); } } diff --git a/lib/runtime/src/vmcontext.rs b/lib/runtime/src/vmcontext.rs index 4b94f8cff2..06392cf9d1 100644 --- a/lib/runtime/src/vmcontext.rs +++ b/lib/runtime/src/vmcontext.rs @@ -1,13 +1,8 @@ //! This file declares `VMContext` and several related structs which contain //! fields that compiled wasm code accesses directly. -use cranelift_entity::EntityRef; -use cranelift_wasm::{ - DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, FuncIndex, GlobalIndex, MemoryIndex, - TableIndex, -}; -use instance::Instance; -use std::{mem, ptr, u32}; +use instance::InstanceContents; +use std::{ptr, u32}; /// An imported function. #[derive(Debug, Copy, Clone)] @@ -24,11 +19,12 @@ pub struct VMFunctionImport { mod test_vmfunction_import { use super::VMFunctionImport; use std::mem::size_of; - use wasmtime_environ::VMOffsets; + use wasmtime_environ::{Module, VMOffsets}; #[test] fn check_vmfunction_import_offsets() { - let offsets = VMOffsets::new(size_of::<*mut u8>() as u8); + let module = Module::new(); + let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module); assert_eq!( size_of::(), usize::from(offsets.size_of_vmfunction_import()) @@ -78,11 +74,12 @@ pub struct VMTableImport { mod test_vmtable_import { use super::VMTableImport; use std::mem::size_of; - use wasmtime_environ::VMOffsets; + use wasmtime_environ::{Module, VMOffsets}; #[test] fn check_vmtable_import_offsets() { - let offsets = VMOffsets::new(size_of::<*mut u8>() as u8); + let module = Module::new(); + let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module); assert_eq!( size_of::(), usize::from(offsets.size_of_vmtable_import()) @@ -114,11 +111,12 @@ pub struct VMMemoryImport { mod test_vmmemory_import { use super::VMMemoryImport; use std::mem::size_of; - use wasmtime_environ::VMOffsets; + use wasmtime_environ::{Module, VMOffsets}; #[test] fn check_vmmemory_import_offsets() { - let offsets = VMOffsets::new(size_of::<*mut u8>() as u8); + let module = Module::new(); + let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module); assert_eq!( size_of::(), usize::from(offsets.size_of_vmmemory_import()) @@ -147,11 +145,12 @@ pub struct VMGlobalImport { mod test_vmglobal_import { use super::VMGlobalImport; use std::mem::size_of; - use wasmtime_environ::VMOffsets; + use wasmtime_environ::{Module, VMOffsets}; #[test] fn check_vmglobal_import_offsets() { - let offsets = VMOffsets::new(size_of::<*mut u8>() as u8); + let module = Module::new(); + let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module); assert_eq!( size_of::(), usize::from(offsets.size_of_vmglobal_import()) @@ -180,11 +179,12 @@ pub struct VMMemoryDefinition { mod test_vmmemory_definition { use super::VMMemoryDefinition; use std::mem::size_of; - use wasmtime_environ::VMOffsets; + use wasmtime_environ::{Module, VMOffsets}; #[test] fn check_vmmemory_definition_offsets() { - let offsets = VMOffsets::new(size_of::<*mut u8>() as u8); + let module = Module::new(); + let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module); assert_eq!( size_of::(), usize::from(offsets.size_of_vmmemory_definition()) @@ -222,11 +222,12 @@ pub struct VMTableDefinition { mod test_vmtable_definition { use super::VMTableDefinition; use std::mem::size_of; - use wasmtime_environ::VMOffsets; + use wasmtime_environ::{Module, VMOffsets}; #[test] fn check_vmtable_definition_offsets() { - let offsets = VMOffsets::new(size_of::<*mut u8>() as u8); + let module = Module::new(); + let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module); assert_eq!( size_of::(), usize::from(offsets.size_of_vmtable_definition()) @@ -257,7 +258,7 @@ pub struct VMGlobalDefinition { mod test_vmglobal_definition { use super::VMGlobalDefinition; use std::mem::{align_of, size_of}; - use wasmtime_environ::VMOffsets; + use wasmtime_environ::{Module, VMOffsets}; #[test] fn check_vmglobal_definition_alignment() { @@ -269,7 +270,8 @@ mod test_vmglobal_definition { #[test] fn check_vmglobal_definition_offsets() { - let offsets = VMOffsets::new(size_of::<*mut u8>() as u8); + let module = Module::new(); + let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module); assert_eq!( size_of::(), usize::from(offsets.size_of_vmglobal_definition()) @@ -390,11 +392,12 @@ pub struct VMSharedSignatureIndex(u32); mod test_vmshared_signature_index { use super::VMSharedSignatureIndex; use std::mem::size_of; - use wasmtime_environ::VMOffsets; + use wasmtime_environ::{Module, VMOffsets}; #[test] fn check_vmshared_signature_index() { - let offsets = VMOffsets::new(size_of::<*mut u8>() as u8); + let module = Module::new(); + let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module); assert_eq!( size_of::(), usize::from(offsets.size_of_vmshared_signature_index()) @@ -425,11 +428,12 @@ pub struct VMCallerCheckedAnyfunc { mod test_vmcaller_checked_anyfunc { use super::VMCallerCheckedAnyfunc; use std::mem::size_of; - use wasmtime_environ::VMOffsets; + use wasmtime_environ::{Module, VMOffsets}; #[test] fn check_vmcaller_checked_anyfunc_offsets() { - let offsets = VMOffsets::new(size_of::<*mut u8>() as u8); + let module = Module::new(); + let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module); assert_eq!( size_of::(), usize::from(offsets.size_of_vmcaller_checked_anyfunc()) @@ -460,186 +464,23 @@ impl Default for VMCallerCheckedAnyfunc { } /// The VM "context", which is pointed to by the `vmctx` arg in Cranelift. -/// This has pointers to the globals, memories, tables, and other runtime +/// This has information about globals, memories, tables, and other runtime /// state associated with the current instance. /// -/// TODO: The number of memories, globals, tables, and signature IDs does -/// not change dynamically, and pointer arrays are not indexed dynamically, -/// so these fields could all be contiguously allocated. +/// The struct here is empty, as the sizes of these fields are dynamic, and +/// we can't describe them in Rust's type system. Sufficient memory is +/// allocated at runtime. +/// +/// TODO: We could move the globals into the `vmctx` allocation too. #[derive(Debug)] #[repr(C)] -pub struct VMContext { - /// Signature identifiers for signature-checking indirect calls. - signature_ids: *mut VMSharedSignatureIndex, - - /// A pointer to an array of `*const VMFunctionBody` instances, indexed by `FuncIndex`. - imported_functions: *const VMFunctionImport, - - /// A pointer to an array of `VMTableImport` instances, indexed by `TableIndex`. - imported_tables: *const VMTableImport, - - /// A pointer to an array of `VMMemoryImport` instances, indexed by `MemoryIndex`. - imported_memories: *const VMMemoryImport, - - /// A pointer to an array of `VMGlobalImport` instances, indexed by `GlobalIndex`. - imported_globals: *const VMGlobalImport, - - /// A pointer to an array of locally-defined `VMTableDefinition` instances, - /// indexed by `DefinedTableIndex`. - tables: *mut VMTableDefinition, - - /// A pointer to an array of locally-defined `VMMemoryDefinition` instances, - /// indexed by `DefinedMemoryIndex`. - memories: *mut VMMemoryDefinition, - - /// A pointer to an array of locally-defined `VMGlobalDefinition` instances, - /// indexed by `DefinedGlobalIndex`. - globals: *mut VMGlobalDefinition, - // If more elements are added here, remember to add offset_of tests below! -} - -#[cfg(test)] -mod test { - use super::VMContext; - use std::mem::size_of; - use wasmtime_environ::VMOffsets; - - #[test] - fn check_vmctx_offsets() { - let offsets = VMOffsets::new(size_of::<*mut u8>() as u8); - assert_eq!(size_of::(), usize::from(offsets.size_of_vmctx())); - assert_eq!( - offset_of!(VMContext, signature_ids), - usize::from(offsets.vmctx_signature_ids()) - ); - assert_eq!( - offset_of!(VMContext, imported_functions), - usize::from(offsets.vmctx_imported_functions()) - ); - assert_eq!( - offset_of!(VMContext, imported_tables), - usize::from(offsets.vmctx_imported_tables()) - ); - assert_eq!( - offset_of!(VMContext, imported_memories), - usize::from(offsets.vmctx_imported_memories()) - ); - assert_eq!( - offset_of!(VMContext, imported_globals), - usize::from(offsets.vmctx_imported_globals()) - ); - assert_eq!( - offset_of!(VMContext, tables), - usize::from(offsets.vmctx_tables()) - ); - assert_eq!( - offset_of!(VMContext, memories), - usize::from(offsets.vmctx_memories()) - ); - assert_eq!( - offset_of!(VMContext, globals), - usize::from(offsets.vmctx_globals()) - ); - } -} +pub struct VMContext {} impl VMContext { - /// Create a new `VMContext` instance. - pub fn new( - imported_functions: *const VMFunctionImport, - imported_tables: *const VMTableImport, - imported_memories: *const VMMemoryImport, - imported_globals: *const VMGlobalImport, - tables: *mut VMTableDefinition, - memories: *mut VMMemoryDefinition, - globals: *mut VMGlobalDefinition, - signature_ids: *mut VMSharedSignatureIndex, - ) -> Self { - Self { - imported_functions, - imported_tables, - imported_memories, - imported_globals, - tables, - memories, - globals, - signature_ids, - } - } - - /// Return a reference to imported function `index`. - pub unsafe fn imported_function(&self, index: FuncIndex) -> &VMFunctionImport { - &*self.imported_functions.add(index.index()) - } - - /// Return a reference to imported table `index`. - pub unsafe fn imported_table(&self, index: TableIndex) -> &VMTableImport { - &*self.imported_tables.add(index.index()) - } - - /// Return a reference to imported memory `index`. - pub unsafe fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport { - &*self.imported_memories.add(index.index()) - } - - /// Return a reference to imported global `index`. - pub unsafe fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport { - &*self.imported_globals.add(index.index()) - } - - /// Return a reference to locally-defined table `index`. - pub unsafe fn table(&self, index: DefinedTableIndex) -> &VMTableDefinition { - &*self.tables.add(index.index()) - } - - /// Return a mutable reference to locally-defined table `index`. - pub unsafe fn table_mut(&mut self, index: DefinedTableIndex) -> &mut VMTableDefinition { - &mut *self.tables.add(index.index()) - } - - /// Return a reference to locally-defined linear memory `index`. - pub unsafe fn memory(&self, index: DefinedMemoryIndex) -> &VMMemoryDefinition { - &*self.memories.add(index.index()) - } - - /// Return a mutable reference to locally-defined linear memory `index`. - pub unsafe fn memory_mut(&mut self, index: DefinedMemoryIndex) -> &mut VMMemoryDefinition { - &mut *self.memories.add(index.index()) - } - - /// Return a reference to locally-defined global variable `index`. - pub unsafe fn global(&self, index: DefinedGlobalIndex) -> &VMGlobalDefinition { - &*self.globals.add(index.index()) - } - - /// Return a mutable reference to locally-defined global variable `index`. - pub unsafe fn global_mut(&mut self, index: DefinedGlobalIndex) -> &mut VMGlobalDefinition { - &mut *self.globals.add(index.index()) - } - /// Return a mutable reference to the associated `Instance`. #[allow(clippy::cast_ptr_alignment)] - pub unsafe fn instance(&mut self) -> &mut Instance { - &mut *((self as *mut Self as *mut u8).offset(-Instance::vmctx_offset()) as *mut Instance) - } - - /// Return the table index for the given `VMTableDefinition`. - pub fn table_index(&self, table: &mut VMTableDefinition) -> DefinedTableIndex { - // TODO: Use `offset_from` once it stablizes. - let begin = self.tables; - let end: *mut VMTableDefinition = table; - DefinedTableIndex::new( - (end as usize - begin as usize) / mem::size_of::(), - ) - } - - /// Return the memory index for the given `VMMemoryDefinition`. - pub fn memory_index(&self, memory: &mut VMMemoryDefinition) -> DefinedMemoryIndex { - // TODO: Use `offset_from` once it stablizes. - let begin = self.memories; - let end: *mut VMMemoryDefinition = memory; - DefinedMemoryIndex::new( - (end as usize - begin as usize) / mem::size_of::(), - ) + pub(crate) unsafe fn instance_contents(&mut self) -> &mut InstanceContents { + &mut *((self as *mut Self as *mut u8).offset(-InstanceContents::vmctx_offset()) + as *mut InstanceContents) } } diff --git a/lib/wast/src/spectest.rs b/lib/wast/src/spectest.rs index 6929c37b87..759ba4be74 100644 --- a/lib/wast/src/spectest.rs +++ b/lib/wast/src/spectest.rs @@ -45,7 +45,7 @@ extern "C" fn spectest_print_f64_f64(x: f64, y: f64) { /// Return an instance implementing the "spectest" interface used in the /// spec testsuite. -pub fn instantiate_spectest() -> Result, InstantiationError> { +pub fn instantiate_spectest() -> Result { let call_conv = isa::CallConv::triple_default(&HOST); let pointer_type = types::Type::triple_pointer_type(&HOST); let mut module = Module::new(); diff --git a/lib/wast/src/wast.rs b/lib/wast/src/wast.rs index 4e00c2deaf..aeab7bfeb6 100644 --- a/lib/wast/src/wast.rs +++ b/lib/wast/src/wast.rs @@ -111,7 +111,7 @@ impl WastContext { } } - fn instantiate(&mut self, module: ModuleBinary) -> Result, SetupError> { + fn instantiate(&mut self, module: ModuleBinary) -> Result { let data = module.into_vec(); self.validate(&data).map_err(SetupError::Validate)?;