Implement the remaining valid spec tests.

And lots of other miscellaneous changes. Rename InstanceWorld to
InstancePlus and reorganize its contents. This still isn't a great name,
but at least now it has a clear purpose.
This commit is contained in:
Dan Gohman
2018-12-11 17:12:33 -08:00
parent 6dd39dee6a
commit 3f24098edc
34 changed files with 1572 additions and 1262 deletions

View File

@@ -4,13 +4,18 @@
use cranelift_entity::EntityRef;
use cranelift_entity::{BoxedSlice, PrimaryMap};
use cranelift_wasm::{
DefinedFuncIndex, DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, FuncIndex,
DefinedFuncIndex, DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, GlobalInit,
};
use export::Export;
use imports::Imports;
use memory::LinearMemory;
use sig_registry::SignatureRegistry;
use signalhandlers::{wasmtime_init_eager, wasmtime_init_finish};
use std::rc::Rc;
use std::slice;
use std::string::String;
use table::Table;
use traphandlers::wasmtime_call;
use vmcontext::{
VMCallerCheckedAnyfunc, VMContext, VMFunctionBody, VMGlobalDefinition, VMMemoryDefinition,
VMTableDefinition,
@@ -20,6 +25,9 @@ use wasmtime_environ::{DataInitializer, Module};
/// An Instance of a WebAssemby module.
#[derive(Debug)]
pub struct Instance {
/// The `Module` this `Instance` was instantiated from.
module: Rc<Module>,
/// WebAssembly linear memory data.
memories: BoxedSlice<DefinedMemoryIndex, LinearMemory>,
@@ -33,6 +41,9 @@ pub struct Instance {
/// Resolved imports.
vmctx_imports: Imports,
/// Pointers to functions in executable memory.
finished_functions: BoxedSlice<DefinedFuncIndex, *const VMFunctionBody>,
/// Table storage base address vector pointed to by vmctx.
vmctx_tables: BoxedSlice<DefinedTableIndex, VMTableDefinition>,
@@ -49,19 +60,20 @@ pub struct Instance {
impl Instance {
/// Create a new `Instance`.
pub fn new(
module: &Module,
finished_functions: &BoxedSlice<DefinedFuncIndex, *const VMFunctionBody>,
module: Rc<Module>,
finished_functions: BoxedSlice<DefinedFuncIndex, *const VMFunctionBody>,
mut vmctx_imports: Imports,
data_initializers: &[DataInitializer],
) -> Result<Self, String> {
let mut sig_registry = instantiate_signatures(module);
let mut memories = instantiate_memories(module, data_initializers)?;
let mut tables = instantiate_tables(
module,
finished_functions,
&vmctx_imports.functions,
&mut sig_registry,
);
data_initializers: Vec<DataInitializer>,
) -> Result<Box<Self>, InstantiationError> {
let mut sig_registry = create_and_initialize_signatures(&module);
let mut tables = create_tables(&module);
let mut memories = create_memories(&module)?;
let mut vmctx_tables = tables
.values_mut()
.map(Table::vmtable)
.collect::<PrimaryMap<DefinedTableIndex, _>>()
.into_boxed_slice();
let mut vmctx_memories = memories
.values_mut()
@@ -69,13 +81,7 @@ impl Instance {
.collect::<PrimaryMap<DefinedMemoryIndex, _>>()
.into_boxed_slice();
let mut vmctx_globals = instantiate_globals(module);
let mut vmctx_tables = tables
.values_mut()
.map(Table::vmtable)
.collect::<PrimaryMap<DefinedTableIndex, _>>()
.into_boxed_slice();
let mut vmctx_globals = create_globals(&module);
let vmctx_imported_functions_ptr = vmctx_imports
.functions
@@ -90,19 +96,21 @@ impl Instance {
.as_mut_ptr();
let vmctx_imported_globals_ptr =
vmctx_imports.globals.values_mut().into_slice().as_mut_ptr();
let vmctx_tables_ptr = vmctx_tables.values_mut().into_slice().as_mut_ptr();
let vmctx_memories_ptr = vmctx_memories.values_mut().into_slice().as_mut_ptr();
let vmctx_globals_ptr = vmctx_globals.values_mut().into_slice().as_mut_ptr();
let vmctx_tables_ptr = vmctx_tables.values_mut().into_slice().as_mut_ptr();
let vmctx_shared_signatures_ptr = sig_registry.vmshared_signatures();
Ok(Self {
let mut result = Box::new(Self {
module,
memories,
tables,
sig_registry,
vmctx_imports,
finished_functions,
vmctx_tables,
vmctx_memories,
vmctx_globals,
vmctx_tables,
vmctx: VMContext::new(
vmctx_imported_functions_ptr,
vmctx_imported_tables_ptr,
@@ -113,7 +121,31 @@ impl Instance {
vmctx_globals_ptr,
vmctx_shared_signatures_ptr,
),
})
});
// Check initializer bounds before initializing anything.
check_table_init_bounds(&mut *result)?;
check_memory_init_bounds(&mut *result, &data_initializers)?;
// Apply the initializers.
initialize_tables(&mut *result)?;
initialize_memories(&mut *result, data_initializers)?;
initialize_globals(&mut *result);
// Rather than writing inline assembly to jump to the code region, we use the fact that
// the Rust ABI for calling a function with no arguments and no return values matches the one
// of the generated code. Thanks to this, we can transmute the code region into a first-class
// Rust function and call it.
// Ensure that our signal handlers are ready for action.
// TODO: Move these calls out of `Instance`.
wasmtime_init_eager();
wasmtime_init_finish(result.vmctx_mut());
// The WebAssembly spec specifies that the start function is
// invoked automatically at instantiation time.
result.invoke_start_function()?;
Ok(result)
}
/// Return a reference to the vmctx used by JIT code.
@@ -121,11 +153,21 @@ impl Instance {
&self.vmctx
}
/// Return a raw pointer to the vmctx used by JIT code.
pub fn vmctx_ptr(&self) -> *const VMContext {
self.vmctx()
}
/// Return a mutable reference to the vmctx used by JIT code.
pub fn vmctx_mut(&mut self) -> &mut VMContext {
&mut self.vmctx
}
/// Return a mutable raw pointer to the vmctx used by JIT code.
pub fn vmctx_mut_ptr(&mut self) -> *mut VMContext {
self.vmctx_mut()
}
/// Return the offset from the vmctx pointer to its containing Instance.
pub(crate) fn vmctx_offset() -> isize {
offset_of!(Self, vmctx) as isize
@@ -166,11 +208,195 @@ impl Instance {
/// Return the number of imported memories.
pub(crate) fn num_imported_memories(&self) -> usize {
self.vmctx_imports.functions.len()
self.vmctx_imports.memories.len()
}
/// Invoke the WebAssembly start function of the instance, if one is present.
fn invoke_start_function(&mut self) -> Result<(), InstantiationError> {
if let Some(start_index) = self.module.start_func {
let (callee_address, callee_vmctx) = match self.module.defined_func_index(start_index) {
Some(defined_start_index) => {
let body = self
.finished_functions
.get(defined_start_index)
.expect("start function index is out of bounds")
.clone();
(body, self.vmctx_mut() as *mut VMContext)
}
None => {
assert!(start_index.index() < self.module.imported_funcs.len());
let import = unsafe { self.vmctx.imported_function(start_index) };
(import.body, import.vmctx)
}
};
// Make the call.
unsafe { wasmtime_call(callee_address, callee_vmctx) }
.map_err(InstantiationError::StartTrap)?;
}
Ok(())
}
/// Lookup an export with the given name.
pub fn lookup(&mut self, field: &str) -> Option<Export> {
if let Some(export) = self.module.exports.get(field) {
Some(match export {
wasmtime_environ::Export::Function(index) => {
let signature = self.module.signatures[self.module.functions[*index]].clone();
let (address, vmctx) =
if let Some(def_index) = self.module.defined_func_index(*index) {
(
self.finished_functions[def_index],
&mut self.vmctx as *mut VMContext,
)
} else {
let import = unsafe { self.vmctx.imported_function(*index) };
(import.body, import.vmctx)
};
Export::Function {
address,
signature,
vmctx,
}
}
wasmtime_environ::Export::Table(index) => {
let (address, vmctx) = if let Some(def_index) =
self.module.defined_table_index(*index)
{
(
unsafe { self.vmctx.table_mut(def_index) } as *mut VMTableDefinition,
&mut self.vmctx as *mut VMContext,
)
} else {
let import = unsafe { self.vmctx.imported_table(*index) };
(import.from, import.vmctx)
};
Export::Table {
address,
vmctx,
table: self.module.table_plans[*index].clone(),
}
}
wasmtime_environ::Export::Memory(index) => {
let (address, vmctx) = if let Some(def_index) =
self.module.defined_memory_index(*index)
{
(
unsafe { self.vmctx.memory_mut(def_index) } as *mut VMMemoryDefinition,
&mut self.vmctx as *mut VMContext,
)
} else {
let import = unsafe { self.vmctx.imported_memory(*index) };
(import.from, import.vmctx)
};
Export::Memory {
address,
vmctx,
memory: self.module.memory_plans[*index].clone(),
}
}
wasmtime_environ::Export::Global(index) => Export::Global {
address: if let Some(def_index) = self.module.defined_global_index(*index) {
unsafe { self.vmctx.global_mut(def_index) }
} else {
unsafe { self.vmctx.imported_global(*index).from }
},
global: self.module.globals[*index].clone(),
},
})
} else {
None
}
}
/// Lookup an export with the given name. This takes an immutable reference,
/// and the result is an `Export` that can only be used to read, not write.
/// This requirement is not enforced in the type system, so this function is
/// unsafe.
pub unsafe fn lookup_immutable(&self, field: &str) -> Option<Export> {
let temporary_mut = &mut *(self as *const Instance as *mut Instance);
temporary_mut.lookup(field)
}
}
fn instantiate_signatures(module: &Module) -> SignatureRegistry {
fn check_table_init_bounds(instance: &mut Instance) -> Result<(), InstantiationError> {
for init in &instance.module.table_elements {
// TODO: Refactor this.
let mut start = init.offset;
if let Some(base) = init.base {
let global = if let Some(def_index) = instance.module.defined_global_index(base) {
unsafe { instance.vmctx.global_mut(def_index) }
} else {
unsafe { instance.vmctx.imported_global(base).from }
};
start += unsafe { *(&*global).as_i32() } as u32 as usize;
}
// TODO: Refactor this.
let slice = if let Some(defined_table_index) =
instance.module.defined_table_index(init.table_index)
{
instance.tables[defined_table_index].as_mut()
} else {
let import = &instance.vmctx_imports.tables[init.table_index];
let foreign_instance = unsafe { (&mut *(import).vmctx).instance() };
let foreign_table = unsafe { &mut *(import).from };
let foreign_index = foreign_instance.vmctx().table_index(foreign_table);
foreign_instance.tables[foreign_index].as_mut()
};
if slice.get_mut(start..start + init.elements.len()).is_none() {
return Err(InstantiationError::Link(
"elements segment does not fit".to_owned(),
));
}
}
Ok(())
}
fn check_memory_init_bounds(
instance: &mut Instance,
data_initializers: &[DataInitializer],
) -> Result<(), InstantiationError> {
for init in data_initializers {
// TODO: Refactor this.
let mut start = init.offset;
if let Some(base) = init.base {
let global = if let Some(def_index) = instance.module.defined_global_index(base) {
unsafe { instance.vmctx.global_mut(def_index) }
} else {
unsafe { instance.vmctx.imported_global(base).from }
};
start += unsafe { *(&*global).as_i32() } as u32 as usize;
}
// TODO: Refactor this.
let memory = if let Some(defined_memory_index) =
instance.module.defined_memory_index(init.memory_index)
{
unsafe { instance.vmctx.memory(defined_memory_index) }
} else {
let import = &instance.vmctx_imports.memories[init.memory_index];
let foreign_instance = unsafe { (&mut *(import).vmctx).instance() };
let foreign_memory = unsafe { &mut *(import).from };
let foreign_index = foreign_instance.vmctx().memory_index(foreign_memory);
unsafe { foreign_instance.vmctx.memory(foreign_index) }
};
let mem_slice = unsafe { slice::from_raw_parts_mut(memory.base, memory.current_length) };
if mem_slice.get_mut(start..start + init.data.len()).is_none() {
return Err(InstantiationError::Link(
"data segment does not fit".to_owned(),
));
}
}
Ok(())
}
fn create_and_initialize_signatures(module: &Module) -> SignatureRegistry {
let mut sig_registry = SignatureRegistry::new();
for (sig_index, sig) in module.signatures.iter() {
sig_registry.register(sig_index, sig);
@@ -179,78 +405,169 @@ fn instantiate_signatures(module: &Module) -> SignatureRegistry {
}
/// Allocate memory for just the tables of the current module.
fn instantiate_tables(
module: &Module,
finished_functions: &BoxedSlice<DefinedFuncIndex, *const VMFunctionBody>,
imported_functions: &BoxedSlice<FuncIndex, *const VMFunctionBody>,
sig_registry: &mut SignatureRegistry,
) -> BoxedSlice<DefinedTableIndex, Table> {
let num_imports = module.imported_memories.len();
fn create_tables(module: &Module) -> BoxedSlice<DefinedTableIndex, Table> {
let num_imports = module.imported_tables.len();
let mut tables: PrimaryMap<DefinedTableIndex, _> =
PrimaryMap::with_capacity(module.table_plans.len() - num_imports);
for table in &module.table_plans.values().as_slice()[num_imports..] {
tables.push(Table::new(table));
}
for init in &module.table_elements {
debug_assert!(init.base.is_none(), "globalvar base not supported yet");
let defined_table_index = module
.defined_table_index(init.table_index)
.expect("Initializers for imported tables not supported yet");
let slice = tables[defined_table_index].as_mut();
let subslice = &mut slice[init.offset..init.offset + init.elements.len()];
for (i, func_idx) in init.elements.iter().enumerate() {
let callee_sig = module.functions[*func_idx];
let func_ptr = if let Some(index) = module.defined_func_index(*func_idx) {
finished_functions[index]
} else {
imported_functions[*func_idx]
};
let type_index = sig_registry.lookup(callee_sig);
subslice[i] = VMCallerCheckedAnyfunc {
func_ptr,
type_index,
};
}
}
tables.into_boxed_slice()
}
/// Initialize the table memory from the provided initializers.
fn initialize_tables(instance: &mut Instance) -> Result<(), InstantiationError> {
let vmctx: *mut VMContext = instance.vmctx_mut();
for init in &instance.module.table_elements {
let mut start = init.offset;
if let Some(base) = init.base {
let global = if let Some(def_index) = instance.module.defined_global_index(base) {
unsafe { instance.vmctx.global_mut(def_index) }
} else {
unsafe { instance.vmctx.imported_global(base).from }
};
start += unsafe { *(&*global).as_i32() } as u32 as usize;
}
let slice = if let Some(defined_table_index) =
instance.module.defined_table_index(init.table_index)
{
instance.tables[defined_table_index].as_mut()
} else {
let import = &instance.vmctx_imports.tables[init.table_index];
let foreign_instance = unsafe { (&mut *(import).vmctx).instance() };
let foreign_table = unsafe { &mut *(import).from };
let foreign_index = foreign_instance.vmctx().table_index(foreign_table);
foreign_instance.tables[foreign_index].as_mut()
};
if let Some(subslice) = slice.get_mut(start..start + init.elements.len()) {
for (i, func_idx) in init.elements.iter().enumerate() {
let callee_sig = instance.module.functions[*func_idx];
let (callee_ptr, callee_vmctx) =
if let Some(index) = instance.module.defined_func_index(*func_idx) {
(instance.finished_functions[index], vmctx)
} else {
let imported_func = &instance.vmctx_imports.functions[*func_idx];
(imported_func.body, imported_func.vmctx)
};
let type_index = instance.sig_registry.lookup(callee_sig);
subslice[i] = VMCallerCheckedAnyfunc {
func_ptr: callee_ptr,
type_index,
vmctx: callee_vmctx,
};
}
} else {
return Err(InstantiationError::Link(
"elements segment does not fit".to_owned(),
));
}
}
Ok(())
}
/// Allocate memory for just the memories of the current module.
fn instantiate_memories(
fn create_memories(
module: &Module,
data_initializers: &[DataInitializer],
) -> Result<BoxedSlice<DefinedMemoryIndex, LinearMemory>, String> {
) -> Result<BoxedSlice<DefinedMemoryIndex, LinearMemory>, InstantiationError> {
let num_imports = module.imported_memories.len();
let mut memories: PrimaryMap<DefinedMemoryIndex, _> =
PrimaryMap::with_capacity(module.memory_plans.len() - num_imports);
for plan in &module.memory_plans.values().as_slice()[num_imports..] {
memories.push(LinearMemory::new(&plan)?);
memories.push(LinearMemory::new(&plan).map_err(InstantiationError::Resource)?);
}
for init in data_initializers {
debug_assert!(init.base.is_none(), "globalvar base not supported yet");
let defined_memory_index = module
.defined_memory_index(init.memory_index)
.expect("Initializers for imported memories not supported yet");
let mem_mut = memories[defined_memory_index].as_mut();
let to_init = &mut mem_mut[init.offset..init.offset + init.data.len()];
to_init.copy_from_slice(init.data);
}
Ok(memories.into_boxed_slice())
}
/// Initialize the table memory from the provided initializers.
fn initialize_memories(
instance: &mut Instance,
data_initializers: Vec<DataInitializer>,
) -> Result<(), InstantiationError> {
for init in data_initializers {
let mut start = init.offset;
if let Some(base) = init.base {
let global = if let Some(def_index) = instance.module.defined_global_index(base) {
unsafe { instance.vmctx.global_mut(def_index) }
} else {
unsafe { instance.vmctx.imported_global(base).from }
};
start += unsafe { *(&*global).as_i32() } as u32 as usize;
}
let memory = if let Some(defined_memory_index) =
instance.module.defined_memory_index(init.memory_index)
{
unsafe { instance.vmctx.memory(defined_memory_index) }
} else {
let import = &instance.vmctx_imports.memories[init.memory_index];
let foreign_instance = unsafe { (&mut *(import).vmctx).instance() };
let foreign_memory = unsafe { &mut *(import).from };
let foreign_index = foreign_instance.vmctx().memory_index(foreign_memory);
unsafe { foreign_instance.vmctx.memory(foreign_index) }
};
let mem_slice = unsafe { slice::from_raw_parts_mut(memory.base, memory.current_length) };
if let Some(to_init) = mem_slice.get_mut(start..start + init.data.len()) {
to_init.copy_from_slice(init.data);
} else {
return Err(InstantiationError::Link(
"data segment does not fit".to_owned(),
));
}
}
Ok(())
}
/// Allocate memory for just the globals of the current module,
/// without any initializers applied yet.
fn instantiate_globals(module: &Module) -> BoxedSlice<DefinedGlobalIndex, VMGlobalDefinition> {
/// with initializers applied.
fn create_globals(module: &Module) -> BoxedSlice<DefinedGlobalIndex, VMGlobalDefinition> {
let num_imports = module.imported_globals.len();
let mut vmctx_globals = PrimaryMap::with_capacity(module.globals.len() - num_imports);
for global in &module.globals.values().as_slice()[num_imports..] {
vmctx_globals.push(VMGlobalDefinition::new(global));
for _ in &module.globals.values().as_slice()[num_imports..] {
vmctx_globals.push(VMGlobalDefinition::new());
}
vmctx_globals.into_boxed_slice()
}
fn initialize_globals(instance: &mut Instance) {
let num_imports = instance.module.imported_globals.len();
for (index, global) in instance.module.globals.iter().skip(num_imports) {
let def_index = instance.module.defined_global_index(index).unwrap();
let to: *mut VMGlobalDefinition = unsafe { instance.vmctx.global_mut(def_index) };
match global.initializer {
GlobalInit::I32Const(x) => *unsafe { (*to).as_i32_mut() } = x,
GlobalInit::I64Const(x) => *unsafe { (*to).as_i64_mut() } = x,
GlobalInit::F32Const(x) => *unsafe { (*to).as_f32_bits_mut() } = x,
GlobalInit::F64Const(x) => *unsafe { (*to).as_f64_bits_mut() } = x,
GlobalInit::GetGlobal(x) => {
let from = if let Some(def_x) = instance.module.defined_global_index(x) {
unsafe { instance.vmctx.global_mut(def_x) }
} else {
unsafe { instance.vmctx.imported_global(x).from }
};
unsafe { *to = *from };
}
GlobalInit::Import => panic!("locally-defined global initialized as import"),
}
}
}
/// An error while instantiating a module.
#[derive(Fail, Debug)]
pub enum InstantiationError {
/// Insufficient resources available for execution.
#[fail(display = "Insufficient resources: {}", _0)]
Resource(String),
/// A wasm translation error occured.
#[fail(display = "Link error: {}", _0)]
Link(String),
/// A compilation error occured.
#[fail(display = "Trap occurred while invoking start function: {}", _0)]
StartTrap(String),
}