Implement the remaining valid spec tests.

And lots of other miscellaneous changes. Rename InstanceWorld to
InstancePlus and reorganize its contents. This still isn't a great name,
but at least now it has a clear purpose.
This commit is contained in:
Dan Gohman
2018-12-11 17:12:33 -08:00
parent 6dd39dee6a
commit 3f24098edc
34 changed files with 1572 additions and 1262 deletions

89
lib/runtime/src/export.rs Normal file
View File

@@ -0,0 +1,89 @@
use cranelift_codegen::ir;
use cranelift_wasm::Global;
use vmcontext::{
VMContext, VMFunctionBody, VMGlobalDefinition, VMMemoryDefinition, VMTableDefinition,
};
use wasmtime_environ::{MemoryPlan, TablePlan};
/// The value of an export passed from one instance to another.
pub enum Export {
/// A function export value.
Function {
/// The address of the native-code function.
address: *const VMFunctionBody,
/// The function signature declaration, used for compatibilty checking.
signature: ir::Signature,
/// Pointer to the containing VMContext.
vmctx: *mut VMContext,
},
/// A table export value.
Table {
/// The address of the table descriptor.
address: *mut VMTableDefinition,
/// Pointer to the containing VMContext.
vmctx: *mut VMContext,
/// The table declaration, used for compatibilty checking.
table: TablePlan,
},
/// A memory export value.
Memory {
/// The address of the memory descriptor.
address: *mut VMMemoryDefinition,
/// Pointer to the containing VMContext.
vmctx: *mut VMContext,
/// The memory declaration, used for compatibilty checking.
memory: MemoryPlan,
},
/// A global export value.
Global {
/// The address of the global storage.
address: *mut VMGlobalDefinition,
/// The global declaration, used for compatibilty checking.
global: Global,
},
}
impl Export {
/// Construct a function export value.
pub fn function(
address: *const VMFunctionBody,
signature: ir::Signature,
vmctx: *mut VMContext,
) -> Self {
Export::Function {
address,
signature,
vmctx,
}
}
/// Construct a table export value.
pub fn table(address: *mut VMTableDefinition, vmctx: *mut VMContext, table: TablePlan) -> Self {
Export::Table {
address,
vmctx,
table,
}
}
/// Construct a memory export value.
pub fn memory(
address: *mut VMMemoryDefinition,
vmctx: *mut VMContext,
memory: MemoryPlan,
) -> Self {
Export::Memory {
address,
vmctx,
memory,
}
}
/// Construct a global export value.
pub fn global(address: *mut VMGlobalDefinition, global: Global) -> Self {
Export::Global { address, global }
}
}

View File

@@ -1,12 +1,12 @@
use cranelift_entity::{BoxedSlice, PrimaryMap};
use cranelift_wasm::{FuncIndex, GlobalIndex, MemoryIndex, TableIndex};
use vmcontext::{VMFunctionBody, VMGlobalImport, VMMemoryImport, VMTableImport};
use vmcontext::{VMFunctionImport, VMGlobalImport, VMMemoryImport, VMTableImport};
/// Resolved import pointers.
#[derive(Debug)]
pub struct Imports {
/// Resolved addresses for imported functions.
pub functions: BoxedSlice<FuncIndex, *const VMFunctionBody>,
pub functions: BoxedSlice<FuncIndex, VMFunctionImport>,
/// Resolved addresses for imported tables.
pub tables: BoxedSlice<TableIndex, VMTableImport>,
@@ -21,7 +21,7 @@ pub struct Imports {
impl Imports {
/// Construct a new `Imports` instance.
pub fn new(
function_imports: PrimaryMap<FuncIndex, *const VMFunctionBody>,
function_imports: PrimaryMap<FuncIndex, VMFunctionImport>,
table_imports: PrimaryMap<TableIndex, VMTableImport>,
memory_imports: PrimaryMap<MemoryIndex, VMMemoryImport>,
global_imports: PrimaryMap<GlobalIndex, VMGlobalImport>,

View File

@@ -4,13 +4,18 @@
use cranelift_entity::EntityRef;
use cranelift_entity::{BoxedSlice, PrimaryMap};
use cranelift_wasm::{
DefinedFuncIndex, DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, FuncIndex,
DefinedFuncIndex, DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, GlobalInit,
};
use export::Export;
use imports::Imports;
use memory::LinearMemory;
use sig_registry::SignatureRegistry;
use signalhandlers::{wasmtime_init_eager, wasmtime_init_finish};
use std::rc::Rc;
use std::slice;
use std::string::String;
use table::Table;
use traphandlers::wasmtime_call;
use vmcontext::{
VMCallerCheckedAnyfunc, VMContext, VMFunctionBody, VMGlobalDefinition, VMMemoryDefinition,
VMTableDefinition,
@@ -20,6 +25,9 @@ use wasmtime_environ::{DataInitializer, Module};
/// An Instance of a WebAssemby module.
#[derive(Debug)]
pub struct Instance {
/// The `Module` this `Instance` was instantiated from.
module: Rc<Module>,
/// WebAssembly linear memory data.
memories: BoxedSlice<DefinedMemoryIndex, LinearMemory>,
@@ -33,6 +41,9 @@ pub struct Instance {
/// Resolved imports.
vmctx_imports: Imports,
/// Pointers to functions in executable memory.
finished_functions: BoxedSlice<DefinedFuncIndex, *const VMFunctionBody>,
/// Table storage base address vector pointed to by vmctx.
vmctx_tables: BoxedSlice<DefinedTableIndex, VMTableDefinition>,
@@ -49,19 +60,20 @@ pub struct Instance {
impl Instance {
/// Create a new `Instance`.
pub fn new(
module: &Module,
finished_functions: &BoxedSlice<DefinedFuncIndex, *const VMFunctionBody>,
module: Rc<Module>,
finished_functions: BoxedSlice<DefinedFuncIndex, *const VMFunctionBody>,
mut vmctx_imports: Imports,
data_initializers: &[DataInitializer],
) -> Result<Self, String> {
let mut sig_registry = instantiate_signatures(module);
let mut memories = instantiate_memories(module, data_initializers)?;
let mut tables = instantiate_tables(
module,
finished_functions,
&vmctx_imports.functions,
&mut sig_registry,
);
data_initializers: Vec<DataInitializer>,
) -> Result<Box<Self>, InstantiationError> {
let mut sig_registry = create_and_initialize_signatures(&module);
let mut tables = create_tables(&module);
let mut memories = create_memories(&module)?;
let mut vmctx_tables = tables
.values_mut()
.map(Table::vmtable)
.collect::<PrimaryMap<DefinedTableIndex, _>>()
.into_boxed_slice();
let mut vmctx_memories = memories
.values_mut()
@@ -69,13 +81,7 @@ impl Instance {
.collect::<PrimaryMap<DefinedMemoryIndex, _>>()
.into_boxed_slice();
let mut vmctx_globals = instantiate_globals(module);
let mut vmctx_tables = tables
.values_mut()
.map(Table::vmtable)
.collect::<PrimaryMap<DefinedTableIndex, _>>()
.into_boxed_slice();
let mut vmctx_globals = create_globals(&module);
let vmctx_imported_functions_ptr = vmctx_imports
.functions
@@ -90,19 +96,21 @@ impl Instance {
.as_mut_ptr();
let vmctx_imported_globals_ptr =
vmctx_imports.globals.values_mut().into_slice().as_mut_ptr();
let vmctx_tables_ptr = vmctx_tables.values_mut().into_slice().as_mut_ptr();
let vmctx_memories_ptr = vmctx_memories.values_mut().into_slice().as_mut_ptr();
let vmctx_globals_ptr = vmctx_globals.values_mut().into_slice().as_mut_ptr();
let vmctx_tables_ptr = vmctx_tables.values_mut().into_slice().as_mut_ptr();
let vmctx_shared_signatures_ptr = sig_registry.vmshared_signatures();
Ok(Self {
let mut result = Box::new(Self {
module,
memories,
tables,
sig_registry,
vmctx_imports,
finished_functions,
vmctx_tables,
vmctx_memories,
vmctx_globals,
vmctx_tables,
vmctx: VMContext::new(
vmctx_imported_functions_ptr,
vmctx_imported_tables_ptr,
@@ -113,7 +121,31 @@ impl Instance {
vmctx_globals_ptr,
vmctx_shared_signatures_ptr,
),
})
});
// Check initializer bounds before initializing anything.
check_table_init_bounds(&mut *result)?;
check_memory_init_bounds(&mut *result, &data_initializers)?;
// Apply the initializers.
initialize_tables(&mut *result)?;
initialize_memories(&mut *result, data_initializers)?;
initialize_globals(&mut *result);
// Rather than writing inline assembly to jump to the code region, we use the fact that
// the Rust ABI for calling a function with no arguments and no return values matches the one
// of the generated code. Thanks to this, we can transmute the code region into a first-class
// Rust function and call it.
// Ensure that our signal handlers are ready for action.
// TODO: Move these calls out of `Instance`.
wasmtime_init_eager();
wasmtime_init_finish(result.vmctx_mut());
// The WebAssembly spec specifies that the start function is
// invoked automatically at instantiation time.
result.invoke_start_function()?;
Ok(result)
}
/// Return a reference to the vmctx used by JIT code.
@@ -121,11 +153,21 @@ impl Instance {
&self.vmctx
}
/// Return a raw pointer to the vmctx used by JIT code.
pub fn vmctx_ptr(&self) -> *const VMContext {
self.vmctx()
}
/// Return a mutable reference to the vmctx used by JIT code.
pub fn vmctx_mut(&mut self) -> &mut VMContext {
&mut self.vmctx
}
/// Return a mutable raw pointer to the vmctx used by JIT code.
pub fn vmctx_mut_ptr(&mut self) -> *mut VMContext {
self.vmctx_mut()
}
/// Return the offset from the vmctx pointer to its containing Instance.
pub(crate) fn vmctx_offset() -> isize {
offset_of!(Self, vmctx) as isize
@@ -166,11 +208,195 @@ impl Instance {
/// Return the number of imported memories.
pub(crate) fn num_imported_memories(&self) -> usize {
self.vmctx_imports.functions.len()
self.vmctx_imports.memories.len()
}
/// Invoke the WebAssembly start function of the instance, if one is present.
fn invoke_start_function(&mut self) -> Result<(), InstantiationError> {
if let Some(start_index) = self.module.start_func {
let (callee_address, callee_vmctx) = match self.module.defined_func_index(start_index) {
Some(defined_start_index) => {
let body = self
.finished_functions
.get(defined_start_index)
.expect("start function index is out of bounds")
.clone();
(body, self.vmctx_mut() as *mut VMContext)
}
None => {
assert!(start_index.index() < self.module.imported_funcs.len());
let import = unsafe { self.vmctx.imported_function(start_index) };
(import.body, import.vmctx)
}
};
// Make the call.
unsafe { wasmtime_call(callee_address, callee_vmctx) }
.map_err(InstantiationError::StartTrap)?;
}
Ok(())
}
/// Lookup an export with the given name.
pub fn lookup(&mut self, field: &str) -> Option<Export> {
if let Some(export) = self.module.exports.get(field) {
Some(match export {
wasmtime_environ::Export::Function(index) => {
let signature = self.module.signatures[self.module.functions[*index]].clone();
let (address, vmctx) =
if let Some(def_index) = self.module.defined_func_index(*index) {
(
self.finished_functions[def_index],
&mut self.vmctx as *mut VMContext,
)
} else {
let import = unsafe { self.vmctx.imported_function(*index) };
(import.body, import.vmctx)
};
Export::Function {
address,
signature,
vmctx,
}
}
wasmtime_environ::Export::Table(index) => {
let (address, vmctx) = if let Some(def_index) =
self.module.defined_table_index(*index)
{
(
unsafe { self.vmctx.table_mut(def_index) } as *mut VMTableDefinition,
&mut self.vmctx as *mut VMContext,
)
} else {
let import = unsafe { self.vmctx.imported_table(*index) };
(import.from, import.vmctx)
};
Export::Table {
address,
vmctx,
table: self.module.table_plans[*index].clone(),
}
}
wasmtime_environ::Export::Memory(index) => {
let (address, vmctx) = if let Some(def_index) =
self.module.defined_memory_index(*index)
{
(
unsafe { self.vmctx.memory_mut(def_index) } as *mut VMMemoryDefinition,
&mut self.vmctx as *mut VMContext,
)
} else {
let import = unsafe { self.vmctx.imported_memory(*index) };
(import.from, import.vmctx)
};
Export::Memory {
address,
vmctx,
memory: self.module.memory_plans[*index].clone(),
}
}
wasmtime_environ::Export::Global(index) => Export::Global {
address: if let Some(def_index) = self.module.defined_global_index(*index) {
unsafe { self.vmctx.global_mut(def_index) }
} else {
unsafe { self.vmctx.imported_global(*index).from }
},
global: self.module.globals[*index].clone(),
},
})
} else {
None
}
}
/// Lookup an export with the given name. This takes an immutable reference,
/// and the result is an `Export` that can only be used to read, not write.
/// This requirement is not enforced in the type system, so this function is
/// unsafe.
pub unsafe fn lookup_immutable(&self, field: &str) -> Option<Export> {
let temporary_mut = &mut *(self as *const Instance as *mut Instance);
temporary_mut.lookup(field)
}
}
fn instantiate_signatures(module: &Module) -> SignatureRegistry {
fn check_table_init_bounds(instance: &mut Instance) -> Result<(), InstantiationError> {
for init in &instance.module.table_elements {
// TODO: Refactor this.
let mut start = init.offset;
if let Some(base) = init.base {
let global = if let Some(def_index) = instance.module.defined_global_index(base) {
unsafe { instance.vmctx.global_mut(def_index) }
} else {
unsafe { instance.vmctx.imported_global(base).from }
};
start += unsafe { *(&*global).as_i32() } as u32 as usize;
}
// TODO: Refactor this.
let slice = if let Some(defined_table_index) =
instance.module.defined_table_index(init.table_index)
{
instance.tables[defined_table_index].as_mut()
} else {
let import = &instance.vmctx_imports.tables[init.table_index];
let foreign_instance = unsafe { (&mut *(import).vmctx).instance() };
let foreign_table = unsafe { &mut *(import).from };
let foreign_index = foreign_instance.vmctx().table_index(foreign_table);
foreign_instance.tables[foreign_index].as_mut()
};
if slice.get_mut(start..start + init.elements.len()).is_none() {
return Err(InstantiationError::Link(
"elements segment does not fit".to_owned(),
));
}
}
Ok(())
}
fn check_memory_init_bounds(
instance: &mut Instance,
data_initializers: &[DataInitializer],
) -> Result<(), InstantiationError> {
for init in data_initializers {
// TODO: Refactor this.
let mut start = init.offset;
if let Some(base) = init.base {
let global = if let Some(def_index) = instance.module.defined_global_index(base) {
unsafe { instance.vmctx.global_mut(def_index) }
} else {
unsafe { instance.vmctx.imported_global(base).from }
};
start += unsafe { *(&*global).as_i32() } as u32 as usize;
}
// TODO: Refactor this.
let memory = if let Some(defined_memory_index) =
instance.module.defined_memory_index(init.memory_index)
{
unsafe { instance.vmctx.memory(defined_memory_index) }
} else {
let import = &instance.vmctx_imports.memories[init.memory_index];
let foreign_instance = unsafe { (&mut *(import).vmctx).instance() };
let foreign_memory = unsafe { &mut *(import).from };
let foreign_index = foreign_instance.vmctx().memory_index(foreign_memory);
unsafe { foreign_instance.vmctx.memory(foreign_index) }
};
let mem_slice = unsafe { slice::from_raw_parts_mut(memory.base, memory.current_length) };
if mem_slice.get_mut(start..start + init.data.len()).is_none() {
return Err(InstantiationError::Link(
"data segment does not fit".to_owned(),
));
}
}
Ok(())
}
fn create_and_initialize_signatures(module: &Module) -> SignatureRegistry {
let mut sig_registry = SignatureRegistry::new();
for (sig_index, sig) in module.signatures.iter() {
sig_registry.register(sig_index, sig);
@@ -179,78 +405,169 @@ fn instantiate_signatures(module: &Module) -> SignatureRegistry {
}
/// Allocate memory for just the tables of the current module.
fn instantiate_tables(
module: &Module,
finished_functions: &BoxedSlice<DefinedFuncIndex, *const VMFunctionBody>,
imported_functions: &BoxedSlice<FuncIndex, *const VMFunctionBody>,
sig_registry: &mut SignatureRegistry,
) -> BoxedSlice<DefinedTableIndex, Table> {
let num_imports = module.imported_memories.len();
fn create_tables(module: &Module) -> BoxedSlice<DefinedTableIndex, Table> {
let num_imports = module.imported_tables.len();
let mut tables: PrimaryMap<DefinedTableIndex, _> =
PrimaryMap::with_capacity(module.table_plans.len() - num_imports);
for table in &module.table_plans.values().as_slice()[num_imports..] {
tables.push(Table::new(table));
}
for init in &module.table_elements {
debug_assert!(init.base.is_none(), "globalvar base not supported yet");
let defined_table_index = module
.defined_table_index(init.table_index)
.expect("Initializers for imported tables not supported yet");
let slice = tables[defined_table_index].as_mut();
let subslice = &mut slice[init.offset..init.offset + init.elements.len()];
for (i, func_idx) in init.elements.iter().enumerate() {
let callee_sig = module.functions[*func_idx];
let func_ptr = if let Some(index) = module.defined_func_index(*func_idx) {
finished_functions[index]
} else {
imported_functions[*func_idx]
};
let type_index = sig_registry.lookup(callee_sig);
subslice[i] = VMCallerCheckedAnyfunc {
func_ptr,
type_index,
};
}
}
tables.into_boxed_slice()
}
/// Initialize the table memory from the provided initializers.
fn initialize_tables(instance: &mut Instance) -> Result<(), InstantiationError> {
let vmctx: *mut VMContext = instance.vmctx_mut();
for init in &instance.module.table_elements {
let mut start = init.offset;
if let Some(base) = init.base {
let global = if let Some(def_index) = instance.module.defined_global_index(base) {
unsafe { instance.vmctx.global_mut(def_index) }
} else {
unsafe { instance.vmctx.imported_global(base).from }
};
start += unsafe { *(&*global).as_i32() } as u32 as usize;
}
let slice = if let Some(defined_table_index) =
instance.module.defined_table_index(init.table_index)
{
instance.tables[defined_table_index].as_mut()
} else {
let import = &instance.vmctx_imports.tables[init.table_index];
let foreign_instance = unsafe { (&mut *(import).vmctx).instance() };
let foreign_table = unsafe { &mut *(import).from };
let foreign_index = foreign_instance.vmctx().table_index(foreign_table);
foreign_instance.tables[foreign_index].as_mut()
};
if let Some(subslice) = slice.get_mut(start..start + init.elements.len()) {
for (i, func_idx) in init.elements.iter().enumerate() {
let callee_sig = instance.module.functions[*func_idx];
let (callee_ptr, callee_vmctx) =
if let Some(index) = instance.module.defined_func_index(*func_idx) {
(instance.finished_functions[index], vmctx)
} else {
let imported_func = &instance.vmctx_imports.functions[*func_idx];
(imported_func.body, imported_func.vmctx)
};
let type_index = instance.sig_registry.lookup(callee_sig);
subslice[i] = VMCallerCheckedAnyfunc {
func_ptr: callee_ptr,
type_index,
vmctx: callee_vmctx,
};
}
} else {
return Err(InstantiationError::Link(
"elements segment does not fit".to_owned(),
));
}
}
Ok(())
}
/// Allocate memory for just the memories of the current module.
fn instantiate_memories(
fn create_memories(
module: &Module,
data_initializers: &[DataInitializer],
) -> Result<BoxedSlice<DefinedMemoryIndex, LinearMemory>, String> {
) -> Result<BoxedSlice<DefinedMemoryIndex, LinearMemory>, InstantiationError> {
let num_imports = module.imported_memories.len();
let mut memories: PrimaryMap<DefinedMemoryIndex, _> =
PrimaryMap::with_capacity(module.memory_plans.len() - num_imports);
for plan in &module.memory_plans.values().as_slice()[num_imports..] {
memories.push(LinearMemory::new(&plan)?);
memories.push(LinearMemory::new(&plan).map_err(InstantiationError::Resource)?);
}
for init in data_initializers {
debug_assert!(init.base.is_none(), "globalvar base not supported yet");
let defined_memory_index = module
.defined_memory_index(init.memory_index)
.expect("Initializers for imported memories not supported yet");
let mem_mut = memories[defined_memory_index].as_mut();
let to_init = &mut mem_mut[init.offset..init.offset + init.data.len()];
to_init.copy_from_slice(init.data);
}
Ok(memories.into_boxed_slice())
}
/// Initialize the table memory from the provided initializers.
fn initialize_memories(
instance: &mut Instance,
data_initializers: Vec<DataInitializer>,
) -> Result<(), InstantiationError> {
for init in data_initializers {
let mut start = init.offset;
if let Some(base) = init.base {
let global = if let Some(def_index) = instance.module.defined_global_index(base) {
unsafe { instance.vmctx.global_mut(def_index) }
} else {
unsafe { instance.vmctx.imported_global(base).from }
};
start += unsafe { *(&*global).as_i32() } as u32 as usize;
}
let memory = if let Some(defined_memory_index) =
instance.module.defined_memory_index(init.memory_index)
{
unsafe { instance.vmctx.memory(defined_memory_index) }
} else {
let import = &instance.vmctx_imports.memories[init.memory_index];
let foreign_instance = unsafe { (&mut *(import).vmctx).instance() };
let foreign_memory = unsafe { &mut *(import).from };
let foreign_index = foreign_instance.vmctx().memory_index(foreign_memory);
unsafe { foreign_instance.vmctx.memory(foreign_index) }
};
let mem_slice = unsafe { slice::from_raw_parts_mut(memory.base, memory.current_length) };
if let Some(to_init) = mem_slice.get_mut(start..start + init.data.len()) {
to_init.copy_from_slice(init.data);
} else {
return Err(InstantiationError::Link(
"data segment does not fit".to_owned(),
));
}
}
Ok(())
}
/// Allocate memory for just the globals of the current module,
/// without any initializers applied yet.
fn instantiate_globals(module: &Module) -> BoxedSlice<DefinedGlobalIndex, VMGlobalDefinition> {
/// with initializers applied.
fn create_globals(module: &Module) -> BoxedSlice<DefinedGlobalIndex, VMGlobalDefinition> {
let num_imports = module.imported_globals.len();
let mut vmctx_globals = PrimaryMap::with_capacity(module.globals.len() - num_imports);
for global in &module.globals.values().as_slice()[num_imports..] {
vmctx_globals.push(VMGlobalDefinition::new(global));
for _ in &module.globals.values().as_slice()[num_imports..] {
vmctx_globals.push(VMGlobalDefinition::new());
}
vmctx_globals.into_boxed_slice()
}
fn initialize_globals(instance: &mut Instance) {
let num_imports = instance.module.imported_globals.len();
for (index, global) in instance.module.globals.iter().skip(num_imports) {
let def_index = instance.module.defined_global_index(index).unwrap();
let to: *mut VMGlobalDefinition = unsafe { instance.vmctx.global_mut(def_index) };
match global.initializer {
GlobalInit::I32Const(x) => *unsafe { (*to).as_i32_mut() } = x,
GlobalInit::I64Const(x) => *unsafe { (*to).as_i64_mut() } = x,
GlobalInit::F32Const(x) => *unsafe { (*to).as_f32_bits_mut() } = x,
GlobalInit::F64Const(x) => *unsafe { (*to).as_f64_bits_mut() } = x,
GlobalInit::GetGlobal(x) => {
let from = if let Some(def_x) = instance.module.defined_global_index(x) {
unsafe { instance.vmctx.global_mut(def_x) }
} else {
unsafe { instance.vmctx.imported_global(x).from }
};
unsafe { *to = *from };
}
GlobalInit::Import => panic!("locally-defined global initialized as import"),
}
}
}
/// An error while instantiating a module.
#[derive(Fail, Debug)]
pub enum InstantiationError {
/// Insufficient resources available for execution.
#[fail(display = "Insufficient resources: {}", _0)]
Resource(String),
/// A wasm translation error occured.
#[fail(display = "Link error: {}", _0)]
Link(String),
/// A compilation error occured.
#[fail(display = "Trap occurred while invoking start function: {}", _0)]
StartTrap(String),
}

View File

@@ -39,7 +39,11 @@ extern crate libc;
#[macro_use]
extern crate memoffset;
extern crate cast;
extern crate failure;
#[macro_use]
extern crate failure_derive;
mod export;
mod imports;
mod instance;
mod memory;
@@ -52,14 +56,15 @@ mod vmcontext;
pub mod libcalls;
pub use export::Export;
pub use imports::Imports;
pub use instance::Instance;
pub use instance::{Instance, InstantiationError};
pub use mmap::Mmap;
pub use signalhandlers::{wasmtime_init_eager, wasmtime_init_finish};
pub use traphandlers::wasmtime_call_trampoline;
pub use traphandlers::{wasmtime_call, wasmtime_call_trampoline};
pub use vmcontext::{
VMContext, VMFunctionBody, VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition,
VMMemoryImport, VMTableDefinition, VMTableImport,
VMContext, VMFunctionBody, VMFunctionImport, VMGlobalDefinition, VMGlobalImport,
VMMemoryDefinition, VMMemoryImport, VMTableDefinition, VMTableImport,
};
#[cfg(not(feature = "std"))]

View File

@@ -116,7 +116,7 @@ pub unsafe extern "C" fn wasmtime_imported_memory32_grow(
);
let memory_index = MemoryIndex::from_u32(memory_index);
let import = instance.vmctx_mut().imported_memory_mut(memory_index);
let import = instance.vmctx().imported_memory(memory_index);
let foreign_instance = (&mut *import.vmctx).instance();
let foreign_memory = &mut *import.from;
let foreign_index = foreign_instance.vmctx().memory_index(foreign_memory);
@@ -148,7 +148,7 @@ pub unsafe extern "C" fn wasmtime_imported_memory32_size(
);
let memory_index = MemoryIndex::from_u32(memory_index);
let import = instance.vmctx_mut().imported_memory_mut(memory_index);
let import = instance.vmctx().imported_memory(memory_index);
let foreign_instance = (&mut *import.vmctx).instance();
let foreign_memory = &mut *import.from;
let foreign_index = foreign_instance.vmctx().memory_index(foreign_memory);

View File

@@ -65,14 +65,16 @@ impl LinearMemory {
let mmap = Mmap::with_size(request_bytes)?;
// Make the unmapped and offset-guard pages inaccessible.
unsafe {
region::protect(
mmap.as_ptr().add(mapped_bytes),
inaccessible_bytes,
region::Protection::None,
)
if request_bytes != 0 {
unsafe {
region::protect(
mmap.as_ptr().add(mapped_bytes),
inaccessible_bytes,
region::Protection::None,
)
}
.expect("unable to make memory inaccessible");
}
.expect("unable to make memory inaccessible");
Ok(Self {
mmap,
@@ -150,19 +152,7 @@ impl LinearMemory {
pub fn vmmemory(&mut self) -> VMMemoryDefinition {
VMMemoryDefinition {
base: self.mmap.as_mut_ptr(),
current_length: self.mmap.len(),
current_length: self.current as usize * WASM_PAGE_SIZE as usize,
}
}
}
impl AsRef<[u8]> for LinearMemory {
fn as_ref(&self) -> &[u8] {
self.mmap.as_slice()
}
}
impl AsMut<[u8]> for LinearMemory {
fn as_mut(&mut self) -> &mut [u8] {
self.mmap.as_mut_slice()
}
}

View File

@@ -34,6 +34,12 @@ impl Mmap {
/// suitably sized and aligned for memory protection.
#[cfg(not(target_os = "windows"))]
pub fn with_size(size: usize) -> Result<Self, String> {
// Mmap may return EINVAL if the size is zero, so just
// special-case that.
if size == 0 {
return Ok(Self::new());
}
let page_size = region::page::size();
let alloc_size = round_up_to_page_size(size, page_size);
let ptr = unsafe {

View File

@@ -107,3 +107,27 @@ pub unsafe extern "C" fn wasmtime_call_trampoline(
Ok(())
})
}
/// Call the wasm function pointed to by `callee`, which has no arguments or
/// return values.
#[no_mangle]
pub unsafe extern "C" fn wasmtime_call(
callee: *const VMFunctionBody,
vmctx: *mut VMContext,
) -> Result<(), String> {
// In case wasm code calls Rust that panics and unwinds past this point,
// ensure that JMP_BUFS is unwound to its incoming state.
let _guard = ScopeGuard::new();
let func: fn(*mut VMContext) = mem::transmute(callee);
JMP_BUFS.with(|bufs| {
let mut buf = mem::uninitialized();
if setjmp(&mut buf) != 0 {
return TRAP_DATA.with(|data| Err(format!("wasm trap at {:?}", data.get().pc)));
}
bufs.borrow_mut().push(buf);
func(vmctx);
Ok(())
})
}

View File

@@ -3,12 +3,47 @@
use cranelift_entity::EntityRef;
use cranelift_wasm::{
DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, FuncIndex, Global, GlobalIndex,
GlobalInit, MemoryIndex, TableIndex,
DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, FuncIndex, GlobalIndex, MemoryIndex,
TableIndex,
};
use instance::Instance;
use std::{mem, ptr, u32};
/// An imported function.
#[derive(Debug, Copy, Clone)]
#[repr(C)]
pub struct VMFunctionImport {
/// A pointer to the imported function body.
pub body: *const VMFunctionBody,
/// A pointer to the VMContext that owns the function.
pub vmctx: *mut VMContext,
}
#[cfg(test)]
mod test_vmfunction_import {
use super::VMFunctionImport;
use std::mem::size_of;
use wasmtime_environ::VMOffsets;
#[test]
fn check_vmfunction_import_offsets() {
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8);
assert_eq!(
size_of::<VMFunctionImport>(),
usize::from(offsets.size_of_vmfunction_import())
);
assert_eq!(
offset_of!(VMFunctionImport, body),
usize::from(offsets.vmfunction_import_body())
);
assert_eq!(
offset_of!(VMFunctionImport, vmctx),
usize::from(offsets.vmfunction_import_vmctx())
);
}
}
/// A placeholder byte-sized type which is just used to provide some amount of type
/// safety when dealing with pointers to JIT-compiled function bodies. Note that it's
/// deliberately not Copy, as we shouldn't be carelessly copying function body bytes
@@ -244,17 +279,8 @@ mod test_vmglobal_definition {
impl VMGlobalDefinition {
/// Construct a `VMGlobalDefinition`.
pub fn new(global: &Global) -> Self {
let mut result = Self { storage: [0; 8] };
match global.initializer {
GlobalInit::I32Const(x) => *unsafe { result.as_i32_mut() } = x,
GlobalInit::I64Const(x) => *unsafe { result.as_i64_mut() } = x,
GlobalInit::F32Const(x) => *unsafe { result.as_f32_bits_mut() } = x,
GlobalInit::F64Const(x) => *unsafe { result.as_f64_bits_mut() } = x,
GlobalInit::GetGlobal(_x) => unimplemented!("globals init with get_global"),
GlobalInit::Import => panic!("attempting to initialize imported global"),
}
result
pub fn new() -> Self {
Self { storage: [0; 8] }
}
/// Return a reference to the value as an i32.
@@ -366,6 +392,7 @@ impl VMSharedSignatureIndex {
pub struct VMCallerCheckedAnyfunc {
pub func_ptr: *const VMFunctionBody,
pub type_index: VMSharedSignatureIndex,
pub vmctx: *mut VMContext,
// If more elements are added here, remember to add offset_of tests below!
}
@@ -390,6 +417,10 @@ mod test_vmcaller_checked_anyfunc {
offset_of!(VMCallerCheckedAnyfunc, type_index),
usize::from(offsets.vmcaller_checked_anyfunc_type_index())
);
assert_eq!(
offset_of!(VMCallerCheckedAnyfunc, vmctx),
usize::from(offsets.vmcaller_checked_anyfunc_vmctx())
);
}
}
@@ -398,6 +429,7 @@ impl Default for VMCallerCheckedAnyfunc {
Self {
func_ptr: ptr::null_mut(),
type_index: VMSharedSignatureIndex::new(u32::MAX),
vmctx: ptr::null_mut(),
}
}
}
@@ -413,16 +445,16 @@ impl Default for VMCallerCheckedAnyfunc {
#[repr(C)]
pub struct VMContext {
/// A pointer to an array of `*const VMFunctionBody` instances, indexed by `FuncIndex`.
imported_functions: *const *const VMFunctionBody,
imported_functions: *const VMFunctionImport,
/// A pointer to an array of `VMTableImport` instances, indexed by `TableIndex`.
imported_tables: *mut VMTableImport,
imported_tables: *const VMTableImport,
/// A pointer to an array of `VMMemoryImport` instances, indexed by `MemoryIndex`.
imported_memories: *mut VMMemoryImport,
imported_memories: *const VMMemoryImport,
/// A pointer to an array of `VMGlobalImport` instances, indexed by `GlobalIndex`.
imported_globals: *mut VMGlobalImport,
imported_globals: *const VMGlobalImport,
/// A pointer to an array of locally-defined `VMTableDefinition` instances,
/// indexed by `DefinedTableIndex`.
@@ -473,10 +505,10 @@ mod test {
impl VMContext {
/// Create a new `VMContext` instance.
pub fn new(
imported_functions: *const *const VMFunctionBody,
imported_tables: *mut VMTableImport,
imported_memories: *mut VMMemoryImport,
imported_globals: *mut VMGlobalImport,
imported_functions: *const VMFunctionImport,
imported_tables: *const VMTableImport,
imported_memories: *const VMMemoryImport,
imported_globals: *const VMGlobalImport,
tables: *mut VMTableDefinition,
memories: *mut VMMemoryDefinition,
globals: *mut VMGlobalDefinition,
@@ -495,8 +527,8 @@ impl VMContext {
}
/// Return a reference to imported function `index`.
pub unsafe fn imported_function(&self, index: FuncIndex) -> *const VMFunctionBody {
*self.imported_functions.add(index.index())
pub unsafe fn imported_function(&self, index: FuncIndex) -> &VMFunctionImport {
&*self.imported_functions.add(index.index())
}
/// Return a reference to imported table `index`.
@@ -504,31 +536,16 @@ impl VMContext {
&*self.imported_tables.add(index.index())
}
/// Return a mutable reference to imported table `index`.
pub unsafe fn imported_table_mut(&mut self, index: TableIndex) -> &mut VMTableImport {
&mut *self.imported_tables.add(index.index())
}
/// Return a reference to imported memory `index`.
pub unsafe fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport {
&*self.imported_memories.add(index.index())
}
/// Return a mutable reference to imported memory `index`.
pub unsafe fn imported_memory_mut(&mut self, index: MemoryIndex) -> &mut VMMemoryImport {
&mut *self.imported_memories.add(index.index())
}
/// Return a reference to imported global `index`.
pub unsafe fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport {
&*self.imported_globals.add(index.index())
}
/// Return a mutable reference to imported global `index`.
pub unsafe fn imported_global_mut(&mut self, index: GlobalIndex) -> &mut VMGlobalImport {
&mut *self.imported_globals.add(index.index())
}
/// Return a reference to locally-defined table `index`.
pub unsafe fn table(&self, index: DefinedTableIndex) -> &VMTableDefinition {
&*self.tables.add(index.index())
@@ -565,6 +582,16 @@ impl VMContext {
&mut *((self as *mut Self as *mut u8).offset(-Instance::vmctx_offset()) as *mut Instance)
}
/// Return the table index for the given `VMTableDefinition`.
pub fn table_index(&self, table: &mut VMTableDefinition) -> DefinedTableIndex {
// TODO: Use `offset_from` once it stablizes.
let begin = self.tables;
let end: *mut VMTableDefinition = table;
DefinedTableIndex::new(
(end as usize - begin as usize) / mem::size_of::<VMTableDefinition>(),
)
}
/// Return the memory index for the given `VMMemoryDefinition`.
pub fn memory_index(&self, memory: &mut VMMemoryDefinition) -> DefinedMemoryIndex {
// TODO: Use `offset_from` once it stablizes.