diff --git a/Cargo.toml b/Cargo.toml index 833842b23b..ab271724b8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -27,6 +27,7 @@ cranelift-native = { git = "https://github.com/sunfishcode/cranelift.git", branc cranelift-entity = { git = "https://github.com/sunfishcode/cranelift.git", branch = "guard-offset" } cranelift-wasm = { git = "https://github.com/sunfishcode/cranelift.git", branch = "guard-offset" } wasmtime-environ = { path = "lib/environ" } +wasmtime-runtime = { path = "lib/runtime" } wasmtime-execute = { path = "lib/execute" } wasmtime-obj = { path = "lib/obj" } wasmtime-wast = { path = "lib/wast" } diff --git a/lib/environ/Cargo.toml b/lib/environ/Cargo.toml index fd711229cb..ea22ee484e 100644 --- a/lib/environ/Cargo.toml +++ b/lib/environ/Cargo.toml @@ -14,8 +14,8 @@ cranelift-codegen = { git = "https://github.com/sunfishcode/cranelift.git", bran cranelift-entity = { git = "https://github.com/sunfishcode/cranelift.git", branch = "guard-offset" } cranelift-wasm = { git = "https://github.com/sunfishcode/cranelift.git", branch = "guard-offset" } cast = { version = "0.2.2", default-features = false } -failure = "0.1.3" -failure_derive = "0.1.3" +failure = { version = "0.1.3", default-features = false } +failure_derive = { version = "0.1.3", default-features = false } [features] default = ["std"] diff --git a/lib/environ/src/compilation.rs b/lib/environ/src/compilation.rs index c540f9a4f7..e4f53f8042 100644 --- a/lib/environ/src/compilation.rs +++ b/lib/environ/src/compilation.rs @@ -6,9 +6,12 @@ use cranelift_codegen::ir; use cranelift_codegen::ir::ExternalName; use cranelift_codegen::isa; use cranelift_codegen::{CodegenError, Context}; -use cranelift_entity::{EntityRef, PrimaryMap}; +use cranelift_entity::PrimaryMap; use cranelift_wasm::{DefinedFuncIndex, FuncIndex, FuncTranslator, WasmError}; -use environ::{get_func_name, get_memory_grow_name, get_memory_size_name, FuncEnvironment}; +use func_environ::{ + get_func_name, get_imported_memory32_grow_name, get_imported_memory32_size_name, + get_memory32_grow_name, get_memory32_size_name, FuncEnvironment, +}; use module::Module; use std::vec::Vec; @@ -49,13 +52,17 @@ impl binemit::RelocSink for RelocSink { name: &ExternalName, addend: binemit::Addend, ) { - let reloc_target = if *name == get_memory_grow_name() { - RelocationTarget::MemoryGrow - } else if *name == get_memory_size_name() { - RelocationTarget::MemorySize + let reloc_target = if *name == get_memory32_grow_name() { + RelocationTarget::Memory32Grow + } else if *name == get_imported_memory32_grow_name() { + RelocationTarget::ImportedMemory32Grow + } else if *name == get_memory32_size_name() { + RelocationTarget::Memory32Size + } else if *name == get_imported_memory32_size_name() { + RelocationTarget::ImportedMemory32Size } else if let ExternalName::User { namespace, index } = *name { debug_assert!(namespace == 0); - RelocationTarget::UserFunc(FuncIndex::new(index as usize)) + RelocationTarget::UserFunc(FuncIndex::from_u32(index)) } else if let ExternalName::LibCall(libcall) = *name { RelocationTarget::LibCall(libcall) } else { @@ -107,10 +114,14 @@ pub enum RelocationTarget { UserFunc(FuncIndex), /// A compiler-generated libcall. LibCall(ir::LibCall), - /// Function for growing the default memory by the specified amount of pages. - MemoryGrow, - /// Function for query current size of the default linear memory. - MemorySize, + /// Function for growing a locally-defined 32-bit memory by the specified amount of pages. + Memory32Grow, + /// Function for growing an imported 32-bit memory by the specified amount of pages. + ImportedMemory32Grow, + /// Function for query current size of a locally-defined 32-bit linear memory. + Memory32Size, + /// Function for query current size of an imported 32-bit linear memory. + ImportedMemory32Size, } /// Relocations to apply to function bodies. diff --git a/lib/environ/src/environ.rs b/lib/environ/src/environ.rs deleted file mode 100644 index 6af9bc2f81..0000000000 --- a/lib/environ/src/environ.rs +++ /dev/null @@ -1,721 +0,0 @@ -use cast; -use cranelift_codegen::cursor::FuncCursor; -use cranelift_codegen::ir; -use cranelift_codegen::ir::condcodes::*; -use cranelift_codegen::ir::immediates::{Offset32, Uimm64}; -use cranelift_codegen::ir::types::*; -use cranelift_codegen::ir::{ - AbiParam, ArgumentPurpose, ExtFuncData, FuncRef, Function, InstBuilder, Signature, -}; -use cranelift_codegen::isa; -use cranelift_entity::EntityRef; -use cranelift_wasm::{ - self, translate_module, FuncIndex, Global, GlobalIndex, GlobalVariable, Memory, MemoryIndex, - SignatureIndex, Table, TableIndex, WasmResult, -}; -use module::{ - DataInitializer, Export, LazyContents, MemoryPlan, MemoryStyle, Module, TableElements, - TablePlan, TableStyle, -}; -use std::clone::Clone; -use std::string::String; -use std::vec::Vec; -use tunables::Tunables; -use vmoffsets::VMOffsets; -use WASM_PAGE_SIZE; - -/// Compute a `ir::ExternalName` for a given wasm function index. -pub fn get_func_name(func_index: FuncIndex) -> ir::ExternalName { - ir::ExternalName::user(0, func_index.as_u32()) -} - -/// Compute a `ir::ExternalName` for the `memory.grow` libcall. -pub fn get_memory_grow_name() -> ir::ExternalName { - ir::ExternalName::user(1, 0) -} - -/// Compute a `ir::ExternalName` for the `memory.size` libcall. -pub fn get_memory_size_name() -> ir::ExternalName { - ir::ExternalName::user(1, 1) -} - -/// Object containing the standalone environment information. To be passed after creation as -/// argument to `compile_module`. -pub struct ModuleEnvironment<'data, 'module> { - /// Compilation setting flags. - isa: &'module isa::TargetIsa, - - /// Module information. - module: &'module mut Module, - - /// References to information to be decoded later. - lazy: LazyContents<'data>, - - /// Tunable parameters. - tunables: Tunables, -} - -impl<'data, 'module> ModuleEnvironment<'data, 'module> { - /// Allocates the enironment data structures with the given isa. - pub fn new( - isa: &'module isa::TargetIsa, - module: &'module mut Module, - tunables: Tunables, - ) -> Self { - Self { - isa, - module, - lazy: LazyContents::new(), - tunables, - } - } - - fn pointer_type(&self) -> ir::Type { - self.isa.frontend_config().pointer_type() - } - - /// Translate the given wasm module data using this environment. This consumes the - /// `ModuleEnvironment` with its mutable reference to the `Module` and produces a - /// `ModuleTranslation` with an immutable reference to the `Module` (which has - /// become fully populated). - pub fn translate(mut self, data: &'data [u8]) -> WasmResult> { - translate_module(data, &mut self)?; - - Ok(ModuleTranslation { - isa: self.isa, - module: self.module, - lazy: self.lazy, - tunables: self.tunables, - }) - } -} - -/// The FuncEnvironment implementation for use by the `ModuleEnvironment`. -pub struct FuncEnvironment<'module_environment> { - /// Compilation setting flags. - isa: &'module_environment isa::TargetIsa, - - /// The module-level environment which this function-level environment belongs to. - module: &'module_environment Module, - - /// The Cranelift global holding the vmctx address. - vmctx: Option, - - /// The Cranelift global holding the base address of the memories vector. - memories_base: Option, - - /// The Cranelift global holding the base address of the tables vector. - tables_base: Option, - - /// The Cranelift global holding the base address of the globals vector. - globals_base: Option, - - /// The Cranelift global holding the base address of the signature IDs vector. - signature_ids_base: Option, - - /// The external function declaration for implementing wasm's `memory.size`. - memory_size_extfunc: Option, - - /// The external function declaration for implementing wasm's `memory.grow`. - memory_grow_extfunc: Option, - - /// Offsets to struct fields accessed by JIT code. - offsets: VMOffsets, -} - -impl<'module_environment> FuncEnvironment<'module_environment> { - pub fn new( - isa: &'module_environment isa::TargetIsa, - module: &'module_environment Module, - ) -> Self { - Self { - isa, - module, - vmctx: None, - memories_base: None, - tables_base: None, - globals_base: None, - signature_ids_base: None, - memory_size_extfunc: None, - memory_grow_extfunc: None, - offsets: VMOffsets::new(isa.pointer_bytes()), - } - } - - /// Transform the call argument list in preparation for making a call. - fn get_real_call_args(func: &Function, call_args: &[ir::Value]) -> Vec { - let mut real_call_args = Vec::with_capacity(call_args.len() + 1); - real_call_args.extend_from_slice(call_args); - real_call_args.push(func.special_param(ArgumentPurpose::VMContext).unwrap()); - real_call_args - } - - fn vmctx(&mut self, func: &mut Function) -> ir::GlobalValue { - self.vmctx.unwrap_or_else(|| { - let vmctx = func.create_global_value(ir::GlobalValueData::VMContext); - self.vmctx = Some(vmctx); - vmctx - }) - } -} - -/// This trait is useful for `translate_module` because it tells how to translate -/// enironment-dependent wasm instructions. These functions should not be called by the user. -impl<'data, 'module> cranelift_wasm::ModuleEnvironment<'data> - for ModuleEnvironment<'data, 'module> -{ - fn target_config(&self) -> isa::TargetFrontendConfig { - self.isa.frontend_config() - } - - fn declare_signature(&mut self, sig: &ir::Signature) { - let sig = translate_signature(sig.clone(), self.pointer_type()); - // TODO: Deduplicate signatures. - self.module.signatures.push(sig); - } - - fn get_signature(&self, sig_index: SignatureIndex) -> &ir::Signature { - &self.module.signatures[sig_index] - } - - fn declare_func_import(&mut self, sig_index: SignatureIndex, module: &str, field: &str) { - debug_assert_eq!( - self.module.functions.len(), - self.module.imported_funcs.len(), - "Imported functions must be declared first" - ); - self.module.functions.push(sig_index); - - self.module - .imported_funcs - .push((String::from(module), String::from(field))); - } - - fn get_num_func_imports(&self) -> usize { - self.module.imported_funcs.len() - } - - fn declare_func_type(&mut self, sig_index: SignatureIndex) { - self.module.functions.push(sig_index); - } - - fn get_func_type(&self, func_index: FuncIndex) -> SignatureIndex { - self.module.functions[func_index] - } - - fn declare_global_import(&mut self, global: Global, module: &str, field: &str) { - debug_assert_eq!( - self.module.globals.len(), - self.module.imported_globals.len(), - "Imported globals must be declared first" - ); - self.module.globals.push(global); - - self.module - .imported_globals - .push((String::from(module), String::from(field))); - } - - fn declare_global(&mut self, global: Global) { - self.module.globals.push(global); - } - - fn get_global(&self, global_index: GlobalIndex) -> &Global { - &self.module.globals[global_index] - } - - fn declare_table_import(&mut self, table: Table, module: &str, field: &str) { - debug_assert_eq!( - self.module.table_plans.len(), - self.module.imported_tables.len(), - "Imported tables must be declared first" - ); - let plan = TablePlan::for_table(table, &self.tunables); - self.module.table_plans.push(plan); - - self.module - .imported_tables - .push((String::from(module), String::from(field))); - } - - fn declare_table(&mut self, table: Table) { - let plan = TablePlan::for_table(table, &self.tunables); - self.module.table_plans.push(plan); - } - - fn declare_table_elements( - &mut self, - table_index: TableIndex, - base: Option, - offset: usize, - elements: Vec, - ) { - self.module.table_elements.push(TableElements { - table_index, - base, - offset, - elements, - }); - } - - fn declare_memory_import(&mut self, memory: Memory, module: &str, field: &str) { - debug_assert_eq!( - self.module.memory_plans.len(), - self.module.imported_memories.len(), - "Imported memories must be declared first" - ); - let plan = MemoryPlan::for_memory(memory, &self.tunables); - self.module.memory_plans.push(plan); - - self.module - .imported_memories - .push((String::from(module), String::from(field))); - } - - fn declare_memory(&mut self, memory: Memory) { - let plan = MemoryPlan::for_memory(memory, &self.tunables); - self.module.memory_plans.push(plan); - } - - fn declare_data_initialization( - &mut self, - memory_index: MemoryIndex, - base: Option, - offset: usize, - data: &'data [u8], - ) { - self.lazy.data_initializers.push(DataInitializer { - memory_index, - base, - offset, - data, - }); - } - - fn declare_func_export(&mut self, func_index: FuncIndex, name: &str) { - self.module - .exports - .insert(String::from(name), Export::Function(func_index)); - } - - fn declare_table_export(&mut self, table_index: TableIndex, name: &str) { - self.module - .exports - .insert(String::from(name), Export::Table(table_index)); - } - - fn declare_memory_export(&mut self, memory_index: MemoryIndex, name: &str) { - self.module - .exports - .insert(String::from(name), Export::Memory(memory_index)); - } - - fn declare_global_export(&mut self, global_index: GlobalIndex, name: &str) { - self.module - .exports - .insert(String::from(name), Export::Global(global_index)); - } - - fn declare_start_func(&mut self, func_index: FuncIndex) { - debug_assert!(self.module.start_func.is_none()); - self.module.start_func = Some(func_index); - } - - fn define_function_body(&mut self, body_bytes: &'data [u8]) -> WasmResult<()> { - self.lazy.function_body_inputs.push(body_bytes); - Ok(()) - } -} - -impl<'module_environment> cranelift_wasm::FuncEnvironment for FuncEnvironment<'module_environment> { - fn target_config(&self) -> isa::TargetFrontendConfig { - self.isa.frontend_config() - } - - fn make_global(&mut self, func: &mut ir::Function, index: GlobalIndex) -> GlobalVariable { - let pointer_type = self.pointer_type(); - - let vmctx = self.vmctx(func); - let mut globals_base = self.globals_base.unwrap_or_else(|| { - let new_base = func.create_global_value(ir::GlobalValueData::Load { - base: vmctx, - offset: Offset32::new(i32::from(self.offsets.vmctx_globals())), - global_type: pointer_type, - readonly: true, - }); - self.globals_base = Some(new_base); - new_base - }); - let mut offset = self.offsets.index_vmglobal(index.as_u32()); - - // For imported memories, the `VMGlobal` array contains a pointer to - // the `VMGlobalDefinition` rather than containing the `VMGlobalDefinition` - // inline, so we do an extra indirection. - if self.module.is_imported_global(index) { - globals_base = func.create_global_value(ir::GlobalValueData::Load { - base: globals_base, - offset: Offset32::new(self.offsets.index_vmglobal_import_from(index.as_u32())), - global_type: pointer_type, - readonly: true, - }); - offset = self.offsets.index_vmglobal(0); - } - - GlobalVariable::Memory { - gv: globals_base, - offset: offset.into(), - ty: self.module.globals[index].ty, - } - } - - fn make_heap(&mut self, func: &mut ir::Function, index: MemoryIndex) -> ir::Heap { - let pointer_type = self.pointer_type(); - - let vmctx = self.vmctx(func); - let mut memories_base = self.memories_base.unwrap_or_else(|| { - let new_base = func.create_global_value(ir::GlobalValueData::Load { - base: vmctx, - offset: Offset32::new(i32::from(self.offsets.vmctx_memories())), - global_type: pointer_type, - readonly: true, - }); - self.memories_base = Some(new_base); - new_base - }); - let mut base_offset = self.offsets.index_vmmemory_definition_base(index.as_u32()); - let mut current_length_offset = self - .offsets - .index_vmmemory_definition_current_length(index.as_u32()); - - // For imported memories, the `VMMemory` array contains a pointer to - // the `VMMemoryDefinition` rather than containing the `VMMemoryDefinition` - // inline, so we do an extra indirection. - if self.module.is_imported_memory(index) { - memories_base = func.create_global_value(ir::GlobalValueData::Load { - base: memories_base, - offset: Offset32::new(self.offsets.index_vmmemory_import_from(index.as_u32())), - global_type: pointer_type, - readonly: true, - }); - base_offset = self.offsets.index_vmmemory_definition_base(0); - current_length_offset = self.offsets.index_vmmemory_definition_current_length(0); - } - - // If we have a declared maximum, we can make this a "static" heap, which is - // allocated up front and never moved. - let (offset_guard_size, heap_style, readonly_base) = match self.module.memory_plans[index] { - MemoryPlan { - memory: _, - style: MemoryStyle::Dynamic, - offset_guard_size, - } => { - let heap_bound = func.create_global_value(ir::GlobalValueData::Load { - base: memories_base, - offset: Offset32::new(current_length_offset), - global_type: I32, - readonly: false, - }); - ( - Uimm64::new(offset_guard_size), - ir::HeapStyle::Dynamic { - bound_gv: heap_bound, - }, - false, - ) - } - MemoryPlan { - memory: _, - style: MemoryStyle::Static { bound }, - offset_guard_size, - } => ( - Uimm64::new(offset_guard_size), - ir::HeapStyle::Static { - bound: Uimm64::new(u64::from(bound) * u64::from(WASM_PAGE_SIZE)), - }, - true, - ), - }; - - let heap_base = func.create_global_value(ir::GlobalValueData::Load { - base: memories_base, - offset: Offset32::new(base_offset), - global_type: pointer_type, - readonly: readonly_base, - }); - func.create_heap(ir::HeapData { - base: heap_base, - min_size: 0.into(), - offset_guard_size, - style: heap_style, - index_type: I32, - }) - } - - fn make_table(&mut self, func: &mut ir::Function, index: TableIndex) -> ir::Table { - let pointer_type = self.pointer_type(); - - let vmctx = self.vmctx(func); - let mut tables_base = self.tables_base.unwrap_or_else(|| { - let new_base = func.create_global_value(ir::GlobalValueData::Load { - base: vmctx, - offset: Offset32::new(i32::from(self.offsets.vmctx_tables())), - global_type: pointer_type, - readonly: true, - }); - self.tables_base = Some(new_base); - new_base - }); - let mut base_offset = self.offsets.index_vmtable_definition_base(index.as_u32()); - let mut current_elements_offset = self - .offsets - .index_vmtable_definition_current_elements(index.as_u32()); - - // For imported tables, the `VMTable` array contains a pointer to - // the `VMTableDefinition` rather than containing the `VMTableDefinition` - // inline, so we do an extra indirection. - if self.module.is_imported_table(index) { - tables_base = func.create_global_value(ir::GlobalValueData::Load { - base: tables_base, - offset: Offset32::new(self.offsets.index_vmtable_import_from(index.as_u32())), - global_type: pointer_type, - readonly: true, - }); - base_offset = self.offsets.index_vmtable_definition_base(0); - current_elements_offset = self.offsets.index_vmtable_definition_current_elements(0); - } - - let base_gv = func.create_global_value(ir::GlobalValueData::Load { - base: tables_base, - offset: Offset32::new(base_offset), - global_type: pointer_type, - readonly: false, - }); - let bound_gv = func.create_global_value(ir::GlobalValueData::Load { - base: tables_base, - offset: Offset32::new(current_elements_offset), - global_type: I32, - readonly: false, - }); - - let element_size = match self.module.table_plans[index].style { - TableStyle::CallerChecksSignature => { - u64::from(self.offsets.size_of_vmcaller_checked_anyfunc()) - } - }; - - func.create_table(ir::TableData { - base_gv, - min_size: Uimm64::new(0), - bound_gv, - element_size: Uimm64::new(element_size), - index_type: I32, - }) - } - - fn make_indirect_sig(&mut self, func: &mut ir::Function, index: SignatureIndex) -> ir::SigRef { - func.import_signature(self.module.signatures[index].clone()) - } - - fn make_direct_func(&mut self, func: &mut ir::Function, index: FuncIndex) -> ir::FuncRef { - let sigidx = self.module.functions[index]; - let signature = func.import_signature(self.module.signatures[sigidx].clone()); - let name = get_func_name(index); - // We currently allocate all code segments independently, so nothing - // is colocated. - let colocated = false; - func.import_function(ir::ExtFuncData { - name, - signature, - colocated, - }) - } - - fn translate_call_indirect( - &mut self, - mut pos: FuncCursor, - table_index: TableIndex, - table: ir::Table, - sig_index: SignatureIndex, - sig_ref: ir::SigRef, - callee: ir::Value, - call_args: &[ir::Value], - ) -> WasmResult { - let pointer_type = self.pointer_type(); - - let table_entry_addr = pos.ins().table_addr(pointer_type, table, callee, 0); - - // Dereference table_entry_addr to get the function address. - let mut mem_flags = ir::MemFlags::new(); - mem_flags.set_notrap(); - mem_flags.set_aligned(); - let func_addr = pos.ins().load( - pointer_type, - mem_flags, - table_entry_addr, - i32::from(self.offsets.vmcaller_checked_anyfunc_func_ptr()), - ); - - // If necessary, check the signature. - match self.module.table_plans[table_index].style { - TableStyle::CallerChecksSignature => { - let sig_id_size = self.offsets.size_of_vmsignature_id(); - let sig_id_type = Type::int(u16::from(sig_id_size) * 8).unwrap(); - - let vmctx = self.vmctx(pos.func); - let signature_ids_base = self.signature_ids_base.unwrap_or_else(|| { - let new_base = pos.func.create_global_value(ir::GlobalValueData::Load { - base: vmctx, - offset: Offset32::new(i32::from(self.offsets.vmctx_signature_ids())), - global_type: pointer_type, - readonly: true, - }); - self.signature_ids_base = Some(new_base); - new_base - }); - let sig_ids = pos.ins().global_value(pointer_type, signature_ids_base); - - // Load the caller ID. - // TODO: Factor this out into a MemFlags constructor, as it's used a lot. - let mut mem_flags = ir::MemFlags::new(); - mem_flags.set_notrap(); - mem_flags.set_aligned(); - let caller_sig_id = pos.ins().load( - sig_id_type, - mem_flags, - sig_ids, - cast::i32( - sig_index - .as_u32() - .checked_mul(u32::from(sig_id_size)) - .unwrap(), - ) - .unwrap(), - ); - - // Load the callee ID. - let mut mem_flags = ir::MemFlags::new(); - mem_flags.set_notrap(); - mem_flags.set_aligned(); - let callee_sig_id = pos.ins().load( - sig_id_type, - mem_flags, - table_entry_addr, - i32::from(self.offsets.vmcaller_checked_anyfunc_type_id()), - ); - - // Check that they match. - let cmp = pos.ins().icmp(IntCC::Equal, callee_sig_id, caller_sig_id); - pos.ins().trapz(cmp, ir::TrapCode::BadSignature); - } - } - - let real_call_args = FuncEnvironment::get_real_call_args(pos.func, call_args); - Ok(pos.ins().call_indirect(sig_ref, func_addr, &real_call_args)) - } - - fn translate_call( - &mut self, - mut pos: FuncCursor, - _callee_index: FuncIndex, - callee: ir::FuncRef, - call_args: &[ir::Value], - ) -> WasmResult { - let real_call_args = FuncEnvironment::get_real_call_args(pos.func, call_args); - Ok(pos.ins().call(callee, &real_call_args)) - } - - fn translate_memory_grow( - &mut self, - mut pos: FuncCursor, - index: MemoryIndex, - _heap: ir::Heap, - val: ir::Value, - ) -> WasmResult { - let memory_grow_func = self.memory_grow_extfunc.unwrap_or_else(|| { - let sig_ref = pos.func.import_signature(Signature { - params: vec![ - AbiParam::new(I32), - AbiParam::new(I32), - AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext), - ], - returns: vec![AbiParam::new(I32)], - call_conv: self.isa.frontend_config().default_call_conv, - }); - // We currently allocate all code segments independently, so nothing - // is colocated. - let colocated = false; - pos.func.import_function(ExtFuncData { - name: get_memory_grow_name(), - signature: sig_ref, - colocated, - }) - }); - self.memory_grow_extfunc = Some(memory_grow_func); - let memory_index = pos.ins().iconst(I32, index.index() as i64); - let vmctx = pos.func.special_param(ArgumentPurpose::VMContext).unwrap(); - let call_inst = pos - .ins() - .call(memory_grow_func, &[val, memory_index, vmctx]); - Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap()) - } - - fn translate_memory_size( - &mut self, - mut pos: FuncCursor, - index: MemoryIndex, - _heap: ir::Heap, - ) -> WasmResult { - let memory_size_func = self.memory_size_extfunc.unwrap_or_else(|| { - let sig_ref = pos.func.import_signature(Signature { - params: vec![ - AbiParam::new(I32), - AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext), - ], - returns: vec![AbiParam::new(I32)], - call_conv: self.isa.frontend_config().default_call_conv, - }); - // We currently allocate all code segments independently, so nothing - // is colocated. - let colocated = false; - pos.func.import_function(ExtFuncData { - name: get_memory_size_name(), - signature: sig_ref, - colocated, - }) - }); - self.memory_size_extfunc = Some(memory_size_func); - let memory_index = pos.ins().iconst(I32, index.index() as i64); - let vmctx = pos.func.special_param(ArgumentPurpose::VMContext).unwrap(); - let call_inst = pos.ins().call(memory_size_func, &[memory_index, vmctx]); - Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap()) - } -} - -/// The result of translating via `ModuleEnvironment`. -pub struct ModuleTranslation<'data, 'module> { - /// Compilation setting flags. - pub isa: &'module isa::TargetIsa, - - /// Module information. - pub module: &'module Module, - - /// Pointers into the raw data buffer. - pub lazy: LazyContents<'data>, - - /// Tunable parameters. - pub tunables: Tunables, -} - -impl<'data, 'module> ModuleTranslation<'data, 'module> { - /// Return a new `FuncEnvironment` for translating a function. - pub fn func_env(&self) -> FuncEnvironment { - FuncEnvironment::new(self.isa, &self.module) - } -} - -/// Add environment-specific function parameters. -pub fn translate_signature(mut sig: ir::Signature, pointer_type: ir::Type) -> ir::Signature { - sig.params - .push(AbiParam::special(pointer_type, ArgumentPurpose::VMContext)); - sig -} diff --git a/lib/environ/src/func_environ.rs b/lib/environ/src/func_environ.rs new file mode 100644 index 0000000000..27297734eb --- /dev/null +++ b/lib/environ/src/func_environ.rs @@ -0,0 +1,660 @@ +use cast; +use cranelift_codegen::cursor::FuncCursor; +use cranelift_codegen::ir; +use cranelift_codegen::ir::condcodes::*; +use cranelift_codegen::ir::immediates::{Offset32, Uimm64}; +use cranelift_codegen::ir::types::*; +use cranelift_codegen::ir::{ + AbiParam, ArgumentPurpose, ExtFuncData, FuncRef, Function, InstBuilder, Signature, +}; +use cranelift_codegen::isa; +use cranelift_entity::EntityRef; +use cranelift_wasm::{ + self, DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, FuncIndex, GlobalIndex, + GlobalVariable, MemoryIndex, SignatureIndex, TableIndex, WasmResult, +}; +use module::{MemoryPlan, MemoryStyle, Module, TableStyle}; +use std::clone::Clone; +use std::vec::Vec; +use vmoffsets::VMOffsets; +use WASM_PAGE_SIZE; + +/// Compute an `ir::ExternalName` for a given wasm function index. +pub fn get_func_name(func_index: FuncIndex) -> ir::ExternalName { + ir::ExternalName::user(0, func_index.as_u32()) +} + +/// Compute an `ir::ExternalName` for the `memory.grow` libcall for +/// 32-bit locally-defined memories. +pub fn get_memory32_grow_name() -> ir::ExternalName { + ir::ExternalName::user(1, 0) +} + +/// Compute an `ir::ExternalName` for the `memory.grow` libcall for +/// 32-bit imported memories. +pub fn get_imported_memory32_grow_name() -> ir::ExternalName { + ir::ExternalName::user(1, 1) +} + +/// Compute an `ir::ExternalName` for the `memory.size` libcall for +/// 32-bit locally-defined memories. +pub fn get_memory32_size_name() -> ir::ExternalName { + ir::ExternalName::user(1, 2) +} + +/// Compute an `ir::ExternalName` for the `memory.size` libcall for +/// 32-bit imported memories. +pub fn get_imported_memory32_size_name() -> ir::ExternalName { + ir::ExternalName::user(1, 3) +} + +/// The FuncEnvironment implementation for use by the `ModuleEnvironment`. +pub struct FuncEnvironment<'module_environment> { + /// Compilation setting flags. + isa: &'module_environment isa::TargetIsa, + + /// The module-level environment which this function-level environment belongs to. + module: &'module_environment Module, + + /// The Cranelift global holding the vmctx address. + vmctx: Option, + + /// The Cranelift global holding the base address of the imported functions table. + imported_functions_base: Option, + + /// The Cranelift global holding the base address of the imported tables table. + imported_tables_base: Option, + + /// The Cranelift global holding the base address of the imported memories table. + imported_memories_base: Option, + + /// The Cranelift global holding the base address of the imported globals table. + imported_globals_base: Option, + + /// The Cranelift global holding the base address of the tables vector. + tables_base: Option, + + /// The Cranelift global holding the base address of the memories vector. + memories_base: Option, + + /// The Cranelift global holding the base address of the globals vector. + globals_base: Option, + + /// The Cranelift global holding the base address of the signature IDs vector. + signature_ids_base: Option, + + /// The external function declaration for implementing wasm's `memory.size` + /// for locally-defined 32-bit memories. + memory32_size_extfunc: Option, + + /// The external function declaration for implementing wasm's `memory.size` + /// for imported 32-bit memories. + imported_memory32_size_extfunc: Option, + + /// The external function declaration for implementing wasm's `memory.grow` + /// for locally-defined memories. + memory_grow_extfunc: Option, + + /// The external function declaration for implementing wasm's `memory.grow` + /// for imported memories. + imported_memory_grow_extfunc: Option, + + /// Offsets to struct fields accessed by JIT code. + offsets: VMOffsets, +} + +impl<'module_environment> FuncEnvironment<'module_environment> { + pub fn new( + isa: &'module_environment isa::TargetIsa, + module: &'module_environment Module, + ) -> Self { + Self { + isa, + module, + vmctx: None, + imported_functions_base: None, + imported_tables_base: None, + imported_memories_base: None, + imported_globals_base: None, + tables_base: None, + memories_base: None, + globals_base: None, + signature_ids_base: None, + memory32_size_extfunc: None, + imported_memory32_size_extfunc: None, + memory_grow_extfunc: None, + imported_memory_grow_extfunc: None, + offsets: VMOffsets::new(isa.pointer_bytes()), + } + } + + fn pointer_type(&self) -> ir::Type { + self.isa.frontend_config().pointer_type() + } + + /// Transform the call argument list in preparation for making a call. + fn get_real_call_args(func: &Function, call_args: &[ir::Value]) -> Vec { + let mut real_call_args = Vec::with_capacity(call_args.len() + 1); + real_call_args.extend_from_slice(call_args); + real_call_args.push(func.special_param(ArgumentPurpose::VMContext).unwrap()); + real_call_args + } + + fn vmctx(&mut self, func: &mut Function) -> ir::GlobalValue { + self.vmctx.unwrap_or_else(|| { + let vmctx = func.create_global_value(ir::GlobalValueData::VMContext); + self.vmctx = Some(vmctx); + vmctx + }) + } + + fn get_imported_functions_base(&mut self, func: &mut Function) -> ir::GlobalValue { + self.imported_functions_base.unwrap_or_else(|| { + let pointer_type = self.pointer_type(); + let vmctx = self.vmctx(func); + let new_base = func.create_global_value(ir::GlobalValueData::Load { + base: vmctx, + offset: Offset32::new(i32::from(self.offsets.vmctx_imported_functions())), + global_type: pointer_type, + readonly: true, + }); + self.imported_functions_base = Some(new_base); + new_base + }) + } + + fn get_imported_tables_base(&mut self, func: &mut Function) -> ir::GlobalValue { + self.imported_tables_base.unwrap_or_else(|| { + let pointer_type = self.pointer_type(); + let vmctx = self.vmctx(func); + let new_base = func.create_global_value(ir::GlobalValueData::Load { + base: vmctx, + offset: Offset32::new(i32::from(self.offsets.vmctx_imported_tables())), + global_type: pointer_type, + readonly: true, + }); + self.imported_tables_base = Some(new_base); + new_base + }) + } + + fn get_imported_memories_base(&mut self, func: &mut Function) -> ir::GlobalValue { + self.imported_memories_base.unwrap_or_else(|| { + let pointer_type = self.pointer_type(); + let vmctx = self.vmctx(func); + let new_base = func.create_global_value(ir::GlobalValueData::Load { + base: vmctx, + offset: Offset32::new(i32::from(self.offsets.vmctx_imported_memories())), + global_type: pointer_type, + readonly: true, + }); + self.imported_memories_base = Some(new_base); + new_base + }) + } + + fn get_imported_globals_base(&mut self, func: &mut Function) -> ir::GlobalValue { + self.imported_globals_base.unwrap_or_else(|| { + let pointer_type = self.pointer_type(); + let vmctx = self.vmctx(func); + let new_base = func.create_global_value(ir::GlobalValueData::Load { + base: vmctx, + offset: Offset32::new(i32::from(self.offsets.vmctx_imported_globals())), + global_type: pointer_type, + readonly: true, + }); + self.imported_globals_base = Some(new_base); + new_base + }) + } + + fn get_tables_base(&mut self, func: &mut Function) -> ir::GlobalValue { + self.tables_base.unwrap_or_else(|| { + let pointer_type = self.pointer_type(); + let vmctx = self.vmctx(func); + let new_base = func.create_global_value(ir::GlobalValueData::Load { + base: vmctx, + offset: Offset32::new(i32::from(self.offsets.vmctx_tables())), + global_type: pointer_type, + readonly: true, + }); + self.tables_base = Some(new_base); + new_base + }) + } + + fn get_memories_base(&mut self, func: &mut Function) -> ir::GlobalValue { + self.memories_base.unwrap_or_else(|| { + let pointer_type = self.pointer_type(); + let vmctx = self.vmctx(func); + let new_base = func.create_global_value(ir::GlobalValueData::Load { + base: vmctx, + offset: Offset32::new(i32::from(self.offsets.vmctx_memories())), + global_type: pointer_type, + readonly: true, + }); + self.memories_base = Some(new_base); + new_base + }) + } + + fn get_globals_base(&mut self, func: &mut Function) -> ir::GlobalValue { + self.globals_base.unwrap_or_else(|| { + let pointer_type = self.pointer_type(); + let vmctx = self.vmctx(func); + let new_base = func.create_global_value(ir::GlobalValueData::Load { + base: vmctx, + offset: Offset32::new(i32::from(self.offsets.vmctx_globals())), + global_type: pointer_type, + readonly: true, + }); + self.globals_base = Some(new_base); + new_base + }) + } + + fn get_memory_grow_sig(&self, func: &mut Function) -> ir::SigRef { + func.import_signature(Signature { + params: vec![ + AbiParam::new(I32), + AbiParam::new(I32), + AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext), + ], + returns: vec![AbiParam::new(I32)], + call_conv: self.isa.frontend_config().default_call_conv, + }) + } + + /// Return the memory.grow function to call for the given index, along with the + /// translated index value to pass to it. + fn get_memory_grow_func( + &mut self, + func: &mut Function, + index: MemoryIndex, + ) -> (FuncRef, usize) { + if self.module.is_imported_memory(index) { + let extfunc = self.imported_memory_grow_extfunc.unwrap_or_else(|| { + let sig_ref = self.get_memory_grow_sig(func); + func.import_function(ExtFuncData { + name: get_imported_memory32_grow_name(), + signature: sig_ref, + // We currently allocate all code segments independently, so nothing + // is colocated. + colocated: false, + }) + }); + self.imported_memory_grow_extfunc = Some(extfunc); + (extfunc, index.index()) + } else { + let extfunc = self.memory_grow_extfunc.unwrap_or_else(|| { + let sig_ref = self.get_memory_grow_sig(func); + func.import_function(ExtFuncData { + name: get_memory32_grow_name(), + signature: sig_ref, + // We currently allocate all code segments independently, so nothing + // is colocated. + colocated: false, + }) + }); + self.memory_grow_extfunc = Some(extfunc); + ( + extfunc, + self.module.defined_memory_index(index).unwrap().index(), + ) + } + } + + fn get_memory32_size_sig(&self, func: &mut Function) -> ir::SigRef { + func.import_signature(Signature { + params: vec![ + AbiParam::new(I32), + AbiParam::special(self.pointer_type(), ArgumentPurpose::VMContext), + ], + returns: vec![AbiParam::new(I32)], + call_conv: self.isa.frontend_config().default_call_conv, + }) + } + + /// Return the memory.size function to call for the given index, along with the + /// translated index value to pass to it. + fn get_memory_size_func( + &mut self, + func: &mut Function, + index: MemoryIndex, + ) -> (FuncRef, usize) { + if self.module.is_imported_memory(index) { + let extfunc = self.imported_memory32_size_extfunc.unwrap_or_else(|| { + let sig_ref = self.get_memory32_size_sig(func); + func.import_function(ExtFuncData { + name: get_imported_memory32_size_name(), + signature: sig_ref, + // We currently allocate all code segments independently, so nothing + // is colocated. + colocated: false, + }) + }); + self.imported_memory32_size_extfunc = Some(extfunc); + (extfunc, index.index()) + } else { + let extfunc = self.memory32_size_extfunc.unwrap_or_else(|| { + let sig_ref = self.get_memory32_size_sig(func); + func.import_function(ExtFuncData { + name: get_memory32_size_name(), + signature: sig_ref, + // We currently allocate all code segments independently, so nothing + // is colocated. + colocated: false, + }) + }); + self.memory32_size_extfunc = Some(extfunc); + ( + extfunc, + self.module.defined_memory_index(index).unwrap().index(), + ) + } + } +} + +impl<'module_environment> cranelift_wasm::FuncEnvironment for FuncEnvironment<'module_environment> { + fn target_config(&self) -> isa::TargetFrontendConfig { + self.isa.frontend_config() + } + + fn make_table(&mut self, func: &mut ir::Function, index: TableIndex) -> ir::Table { + let pointer_type = self.pointer_type(); + + let (table, def_index) = if let Some(def_index) = self.module.defined_table_index(index) { + let table = self.get_tables_base(func); + (table, def_index) + } else { + let imported_tables_base = self.get_imported_tables_base(func); + let from_offset = self.offsets.index_vmtable_import_from(index); + let table = func.create_global_value(ir::GlobalValueData::Load { + base: imported_tables_base, + offset: Offset32::new(from_offset), + global_type: pointer_type, + readonly: true, + }); + (table, DefinedTableIndex::new(0)) + }; + let base_offset = self.offsets.index_vmtable_definition_base(def_index); + let current_elements_offset = self + .offsets + .index_vmtable_definition_current_elements(def_index); + + let base_gv = func.create_global_value(ir::GlobalValueData::Load { + base: table, + offset: Offset32::new(base_offset), + global_type: pointer_type, + readonly: false, + }); + let bound_gv = func.create_global_value(ir::GlobalValueData::Load { + base: table, + offset: Offset32::new(current_elements_offset), + global_type: self.offsets.type_of_vmtable_definition_current_elements(), + readonly: false, + }); + + let element_size = match self.module.table_plans[index].style { + TableStyle::CallerChecksSignature => { + u64::from(self.offsets.size_of_vmcaller_checked_anyfunc()) + } + }; + + func.create_table(ir::TableData { + base_gv, + min_size: Uimm64::new(0), + bound_gv, + element_size: Uimm64::new(element_size), + index_type: I32, + }) + } + + fn make_heap(&mut self, func: &mut ir::Function, index: MemoryIndex) -> ir::Heap { + let pointer_type = self.pointer_type(); + + let (memory, def_index) = if let Some(def_index) = self.module.defined_memory_index(index) { + let memory = self.get_memories_base(func); + (memory, def_index) + } else { + let imported_memories_base = self.get_imported_memories_base(func); + let from_offset = self.offsets.index_vmmemory_import_from(index); + let memory = func.create_global_value(ir::GlobalValueData::Load { + base: imported_memories_base, + offset: Offset32::new(from_offset), + global_type: pointer_type, + readonly: true, + }); + (memory, DefinedMemoryIndex::new(0)) + }; + let base_offset = self.offsets.index_vmmemory_definition_base(def_index); + let current_length_offset = self + .offsets + .index_vmmemory_definition_current_length(def_index); + + // If we have a declared maximum, we can make this a "static" heap, which is + // allocated up front and never moved. + let (offset_guard_size, heap_style, readonly_base) = match self.module.memory_plans[index] { + MemoryPlan { + memory: _, + style: MemoryStyle::Dynamic, + offset_guard_size, + } => { + let heap_bound = func.create_global_value(ir::GlobalValueData::Load { + base: memory, + offset: Offset32::new(current_length_offset), + global_type: self.offsets.type_of_vmmemory_definition_current_length(), + readonly: false, + }); + ( + Uimm64::new(offset_guard_size), + ir::HeapStyle::Dynamic { + bound_gv: heap_bound, + }, + false, + ) + } + MemoryPlan { + memory: _, + style: MemoryStyle::Static { bound }, + offset_guard_size, + } => ( + Uimm64::new(offset_guard_size), + ir::HeapStyle::Static { + bound: Uimm64::new(u64::from(bound) * u64::from(WASM_PAGE_SIZE)), + }, + true, + ), + }; + + let heap_base = func.create_global_value(ir::GlobalValueData::Load { + base: memory, + offset: Offset32::new(base_offset), + global_type: pointer_type, + readonly: readonly_base, + }); + func.create_heap(ir::HeapData { + base: heap_base, + min_size: 0.into(), + offset_guard_size, + style: heap_style, + index_type: I32, + }) + } + + fn make_global(&mut self, func: &mut ir::Function, index: GlobalIndex) -> GlobalVariable { + let pointer_type = self.pointer_type(); + + let (global, def_index) = if let Some(def_index) = self.module.defined_global_index(index) { + let global = self.get_globals_base(func); + (global, def_index) + } else { + let imported_globals_base = self.get_imported_globals_base(func); + let from_offset = self.offsets.index_vmglobal_import_from(index); + let global = func.create_global_value(ir::GlobalValueData::Load { + base: imported_globals_base, + offset: Offset32::new(from_offset), + global_type: pointer_type, + readonly: true, + }); + (global, DefinedGlobalIndex::new(0)) + }; + let offset = self.offsets.index_vmglobal_definition(def_index); + + GlobalVariable::Memory { + gv: global, + offset: offset.into(), + ty: self.module.globals[index].ty, + } + } + + fn make_indirect_sig(&mut self, func: &mut ir::Function, index: SignatureIndex) -> ir::SigRef { + func.import_signature(self.module.signatures[index].clone()) + } + + fn make_direct_func(&mut self, func: &mut ir::Function, index: FuncIndex) -> ir::FuncRef { + let sigidx = self.module.functions[index]; + let signature = func.import_signature(self.module.signatures[sigidx].clone()); + let name = get_func_name(index); + func.import_function(ir::ExtFuncData { + name, + signature, + // We currently allocate all code segments independently, so nothing + // is colocated. + colocated: false, + }) + } + + fn translate_call_indirect( + &mut self, + mut pos: FuncCursor, + table_index: TableIndex, + table: ir::Table, + sig_index: SignatureIndex, + sig_ref: ir::SigRef, + callee: ir::Value, + call_args: &[ir::Value], + ) -> WasmResult { + let pointer_type = self.pointer_type(); + + let table_entry_addr = pos.ins().table_addr(pointer_type, table, callee, 0); + + // Dereference table_entry_addr to get the function address. + let mem_flags = ir::MemFlags::trusted(); + let func_addr = pos.ins().load( + pointer_type, + mem_flags, + table_entry_addr, + i32::from(self.offsets.vmcaller_checked_anyfunc_func_ptr()), + ); + + // If necessary, check the signature. + match self.module.table_plans[table_index].style { + TableStyle::CallerChecksSignature => { + let sig_id_size = self.offsets.size_of_vmshared_signature_index(); + let sig_id_type = Type::int(u16::from(sig_id_size) * 8).unwrap(); + + let vmctx = self.vmctx(pos.func); + let signature_ids_base = self.signature_ids_base.unwrap_or_else(|| { + let new_base = pos.func.create_global_value(ir::GlobalValueData::Load { + base: vmctx, + offset: Offset32::new(i32::from(self.offsets.vmctx_signature_ids())), + global_type: pointer_type, + readonly: true, + }); + self.signature_ids_base = Some(new_base); + new_base + }); + let sig_ids = pos.ins().global_value(pointer_type, signature_ids_base); + + // Load the caller ID. + let mem_flags = ir::MemFlags::trusted(); + let caller_sig_id = pos.ins().load( + sig_id_type, + mem_flags, + sig_ids, + cast::i32( + sig_index + .as_u32() + .checked_mul(u32::from(sig_id_size)) + .unwrap(), + ) + .unwrap(), + ); + + // Load the callee ID. + let mem_flags = ir::MemFlags::trusted(); + let callee_sig_id = pos.ins().load( + sig_id_type, + mem_flags, + table_entry_addr, + i32::from(self.offsets.vmcaller_checked_anyfunc_type_index()), + ); + + // Check that they match. + let cmp = pos.ins().icmp(IntCC::Equal, callee_sig_id, caller_sig_id); + pos.ins().trapz(cmp, ir::TrapCode::BadSignature); + } + } + + let real_call_args = FuncEnvironment::get_real_call_args(pos.func, call_args); + Ok(pos.ins().call_indirect(sig_ref, func_addr, &real_call_args)) + } + + fn translate_call( + &mut self, + mut pos: FuncCursor, + callee_index: FuncIndex, + callee: ir::FuncRef, + call_args: &[ir::Value], + ) -> WasmResult { + let real_call_args = FuncEnvironment::get_real_call_args(pos.func, call_args); + + // Handle direct calls to locally-defined functions. + if !self.module.is_imported_function(callee_index) { + return Ok(pos.ins().call(callee, &real_call_args)); + } + + // Handle direct calls to imported functions. We use an indirect call + // so that we don't have to patch the code at runtime. + let pointer_type = self.pointer_type(); + let sig_ref = pos.func.dfg.ext_funcs[callee].signature; + let imported_functions_base = self.get_imported_functions_base(&mut pos.func); + let base = pos + .ins() + .global_value(pointer_type, imported_functions_base); + let offset = self.offsets.index_vmfunction_body_import(callee_index); + let mem_flags = ir::MemFlags::trusted(); + let func_addr = pos.ins().load(pointer_type, mem_flags, base, offset); + Ok(pos.ins().call_indirect(sig_ref, func_addr, &real_call_args)) + } + + fn translate_memory_grow( + &mut self, + mut pos: FuncCursor, + index: MemoryIndex, + _heap: ir::Heap, + val: ir::Value, + ) -> WasmResult { + let (memory_grow_func, index_arg) = self.get_memory_grow_func(&mut pos.func, index); + let memory_index = pos.ins().iconst(I32, index_arg as i64); + let vmctx = pos.func.special_param(ArgumentPurpose::VMContext).unwrap(); + let call_inst = pos + .ins() + .call(memory_grow_func, &[val, memory_index, vmctx]); + Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap()) + } + + fn translate_memory_size( + &mut self, + mut pos: FuncCursor, + index: MemoryIndex, + _heap: ir::Heap, + ) -> WasmResult { + let (memory_size_func, index_arg) = self.get_memory_size_func(&mut pos.func, index); + let memory_index = pos.ins().iconst(I32, index_arg as i64); + let vmctx = pos.func.special_param(ArgumentPurpose::VMContext).unwrap(); + let call_inst = pos.ins().call(memory_size_func, &[memory_index, vmctx]); + Ok(*pos.func.dfg.inst_results(call_inst).first().unwrap()) + } +} diff --git a/lib/environ/src/lib.rs b/lib/environ/src/lib.rs index dc797f5c82..84d3ec9243 100644 --- a/lib/environ/src/lib.rs +++ b/lib/environ/src/lib.rs @@ -39,18 +39,19 @@ extern crate failure; extern crate failure_derive; mod compilation; -mod environ; +mod func_environ; mod module; +mod module_environ; mod tunables; mod vmoffsets; pub use compilation::{ compile_module, Compilation, CompileError, RelocSink, Relocation, RelocationTarget, Relocations, }; -pub use environ::{translate_signature, ModuleEnvironment, ModuleTranslation}; pub use module::{ DataInitializer, Export, MemoryPlan, MemoryStyle, Module, TableElements, TablePlan, TableStyle, }; +pub use module_environ::{translate_signature, ModuleEnvironment, ModuleTranslation}; pub use tunables::Tunables; pub use vmoffsets::VMOffsets; diff --git a/lib/environ/src/module.rs b/lib/environ/src/module.rs index 19c60bc2a7..d0d911006d 100644 --- a/lib/environ/src/module.rs +++ b/lib/environ/src/module.rs @@ -3,8 +3,8 @@ use cranelift_codegen::ir; use cranelift_entity::{EntityRef, PrimaryMap}; use cranelift_wasm::{ - DefinedFuncIndex, FuncIndex, Global, GlobalIndex, Memory, MemoryIndex, SignatureIndex, Table, - TableIndex, + DefinedFuncIndex, DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, FuncIndex, Global, + GlobalIndex, Memory, MemoryIndex, SignatureIndex, Table, TableIndex, }; use std::cmp; use std::collections::HashMap; @@ -142,12 +142,12 @@ pub struct Module { /// Names of imported tables. pub imported_tables: PrimaryMap, - /// Names of imported globals. - pub imported_globals: PrimaryMap, - /// Names of imported memories. pub imported_memories: PrimaryMap, + /// Names of imported globals. + pub imported_globals: PrimaryMap, + /// Types of functions, imported and local. pub functions: PrimaryMap, @@ -176,9 +176,9 @@ impl Module { Self { signatures: PrimaryMap::new(), imported_funcs: PrimaryMap::new(), + imported_tables: PrimaryMap::new(), imported_memories: PrimaryMap::new(), imported_globals: PrimaryMap::new(), - imported_tables: PrimaryMap::new(), functions: PrimaryMap::new(), table_plans: PrimaryMap::new(), memory_plans: PrimaryMap::new(), @@ -211,20 +211,71 @@ impl Module { index.index() < self.imported_funcs.len() } + /// Convert a `DefinedTableIndex` into a `TableIndex`. + pub fn table_index(&self, defined_table: DefinedTableIndex) -> TableIndex { + TableIndex::new(self.imported_tables.len() + defined_table.index()) + } + + /// Convert a `TableIndex` into a `DefinedTableIndex`. Returns None if the + /// index is an imported table. + pub fn defined_table_index(&self, table: TableIndex) -> Option { + if table.index() < self.imported_tables.len() { + None + } else { + Some(DefinedTableIndex::new( + table.index() - self.imported_tables.len(), + )) + } + } + /// Test whether the given table index is for an imported table. pub fn is_imported_table(&self, index: TableIndex) -> bool { index.index() < self.imported_tables.len() } - /// Test whether the given global index is for an imported global. - pub fn is_imported_global(&self, index: GlobalIndex) -> bool { - index.index() < self.imported_globals.len() + /// Convert a `DefinedMemoryIndex` into a `MemoryIndex`. + pub fn memory_index(&self, defined_memory: DefinedMemoryIndex) -> MemoryIndex { + MemoryIndex::new(self.imported_memories.len() + defined_memory.index()) + } + + /// Convert a `MemoryIndex` into a `DefinedMemoryIndex`. Returns None if the + /// index is an imported memory. + pub fn defined_memory_index(&self, memory: MemoryIndex) -> Option { + if memory.index() < self.imported_memories.len() { + None + } else { + Some(DefinedMemoryIndex::new( + memory.index() - self.imported_memories.len(), + )) + } } /// Test whether the given memory index is for an imported memory. pub fn is_imported_memory(&self, index: MemoryIndex) -> bool { index.index() < self.imported_memories.len() } + + /// Convert a `DefinedGlobalIndex` into a `GlobalIndex`. + pub fn global_index(&self, defined_global: DefinedGlobalIndex) -> GlobalIndex { + GlobalIndex::new(self.imported_globals.len() + defined_global.index()) + } + + /// Convert a `GlobalIndex` into a `DefinedGlobalIndex`. Returns None if the + /// index is an imported global. + pub fn defined_global_index(&self, global: GlobalIndex) -> Option { + if global.index() < self.imported_globals.len() { + None + } else { + Some(DefinedGlobalIndex::new( + global.index() - self.imported_globals.len(), + )) + } + } + + /// Test whether the given global index is for an imported global. + pub fn is_imported_global(&self, index: GlobalIndex) -> bool { + index.index() < self.imported_globals.len() + } } /// A data initializer for linear memory. diff --git a/lib/environ/src/module_environ.rs b/lib/environ/src/module_environ.rs new file mode 100644 index 0000000000..a5ec4230a3 --- /dev/null +++ b/lib/environ/src/module_environ.rs @@ -0,0 +1,261 @@ +use cranelift_codegen::ir; +use cranelift_codegen::ir::{AbiParam, ArgumentPurpose}; +use cranelift_codegen::isa; +use cranelift_wasm::{ + self, translate_module, FuncIndex, Global, GlobalIndex, Memory, MemoryIndex, SignatureIndex, + Table, TableIndex, WasmResult, +}; +use func_environ::FuncEnvironment; +use module::{DataInitializer, Export, LazyContents, MemoryPlan, Module, TableElements, TablePlan}; +use std::clone::Clone; +use std::string::String; +use std::vec::Vec; +use tunables::Tunables; + +/// Object containing the standalone environment information. To be passed after creation as +/// argument to `compile_module`. +pub struct ModuleEnvironment<'data, 'module> { + /// Compilation setting flags. + isa: &'module isa::TargetIsa, + + /// Module information. + module: &'module mut Module, + + /// References to information to be decoded later. + lazy: LazyContents<'data>, + + /// Tunable parameters. + tunables: Tunables, +} + +impl<'data, 'module> ModuleEnvironment<'data, 'module> { + /// Allocates the enironment data structures with the given isa. + pub fn new( + isa: &'module isa::TargetIsa, + module: &'module mut Module, + tunables: Tunables, + ) -> Self { + Self { + isa, + module, + lazy: LazyContents::new(), + tunables, + } + } + + fn pointer_type(&self) -> ir::Type { + self.isa.frontend_config().pointer_type() + } + + /// Translate the given wasm module data using this environment. This consumes the + /// `ModuleEnvironment` with its mutable reference to the `Module` and produces a + /// `ModuleTranslation` with an immutable reference to the `Module` (which has + /// become fully populated). + pub fn translate(mut self, data: &'data [u8]) -> WasmResult> { + translate_module(data, &mut self)?; + + Ok(ModuleTranslation { + isa: self.isa, + module: self.module, + lazy: self.lazy, + tunables: self.tunables, + }) + } +} + +/// This trait is useful for `translate_module` because it tells how to translate +/// enironment-dependent wasm instructions. These functions should not be called by the user. +impl<'data, 'module> cranelift_wasm::ModuleEnvironment<'data> + for ModuleEnvironment<'data, 'module> +{ + fn target_config(&self) -> isa::TargetFrontendConfig { + self.isa.frontend_config() + } + + fn declare_signature(&mut self, sig: &ir::Signature) { + let sig = translate_signature(sig.clone(), self.pointer_type()); + // TODO: Deduplicate signatures. + self.module.signatures.push(sig); + } + + fn get_signature(&self, sig_index: SignatureIndex) -> &ir::Signature { + &self.module.signatures[sig_index] + } + + fn declare_func_import(&mut self, sig_index: SignatureIndex, module: &str, field: &str) { + debug_assert_eq!( + self.module.functions.len(), + self.module.imported_funcs.len(), + "Imported functions must be declared first" + ); + self.module.functions.push(sig_index); + + self.module + .imported_funcs + .push((String::from(module), String::from(field))); + } + + fn get_num_func_imports(&self) -> usize { + self.module.imported_funcs.len() + } + + fn declare_func_type(&mut self, sig_index: SignatureIndex) { + self.module.functions.push(sig_index); + } + + fn get_func_type(&self, func_index: FuncIndex) -> SignatureIndex { + self.module.functions[func_index] + } + + fn declare_global_import(&mut self, global: Global, module: &str, field: &str) { + debug_assert_eq!( + self.module.globals.len(), + self.module.imported_globals.len(), + "Imported globals must be declared first" + ); + self.module.globals.push(global); + + self.module + .imported_globals + .push((String::from(module), String::from(field))); + } + + fn declare_global(&mut self, global: Global) { + self.module.globals.push(global); + } + + fn get_global(&self, global_index: GlobalIndex) -> &Global { + &self.module.globals[global_index] + } + + fn declare_table_import(&mut self, table: Table, module: &str, field: &str) { + debug_assert_eq!( + self.module.table_plans.len(), + self.module.imported_tables.len(), + "Imported tables must be declared first" + ); + let plan = TablePlan::for_table(table, &self.tunables); + self.module.table_plans.push(plan); + + self.module + .imported_tables + .push((String::from(module), String::from(field))); + } + + fn declare_table(&mut self, table: Table) { + let plan = TablePlan::for_table(table, &self.tunables); + self.module.table_plans.push(plan); + } + + fn declare_table_elements( + &mut self, + table_index: TableIndex, + base: Option, + offset: usize, + elements: Vec, + ) { + self.module.table_elements.push(TableElements { + table_index, + base, + offset, + elements, + }); + } + + fn declare_memory_import(&mut self, memory: Memory, module: &str, field: &str) { + debug_assert_eq!( + self.module.memory_plans.len(), + self.module.imported_memories.len(), + "Imported memories must be declared first" + ); + let plan = MemoryPlan::for_memory(memory, &self.tunables); + self.module.memory_plans.push(plan); + + self.module + .imported_memories + .push((String::from(module), String::from(field))); + } + + fn declare_memory(&mut self, memory: Memory) { + let plan = MemoryPlan::for_memory(memory, &self.tunables); + self.module.memory_plans.push(plan); + } + + fn declare_data_initialization( + &mut self, + memory_index: MemoryIndex, + base: Option, + offset: usize, + data: &'data [u8], + ) { + self.lazy.data_initializers.push(DataInitializer { + memory_index, + base, + offset, + data, + }); + } + + fn declare_func_export(&mut self, func_index: FuncIndex, name: &str) { + self.module + .exports + .insert(String::from(name), Export::Function(func_index)); + } + + fn declare_table_export(&mut self, table_index: TableIndex, name: &str) { + self.module + .exports + .insert(String::from(name), Export::Table(table_index)); + } + + fn declare_memory_export(&mut self, memory_index: MemoryIndex, name: &str) { + self.module + .exports + .insert(String::from(name), Export::Memory(memory_index)); + } + + fn declare_global_export(&mut self, global_index: GlobalIndex, name: &str) { + self.module + .exports + .insert(String::from(name), Export::Global(global_index)); + } + + fn declare_start_func(&mut self, func_index: FuncIndex) { + debug_assert!(self.module.start_func.is_none()); + self.module.start_func = Some(func_index); + } + + fn define_function_body(&mut self, body_bytes: &'data [u8]) -> WasmResult<()> { + self.lazy.function_body_inputs.push(body_bytes); + Ok(()) + } +} + +/// The result of translating via `ModuleEnvironment`. +pub struct ModuleTranslation<'data, 'module> { + /// Compilation setting flags. + pub isa: &'module isa::TargetIsa, + + /// Module information. + pub module: &'module Module, + + /// Pointers into the raw data buffer. + pub lazy: LazyContents<'data>, + + /// Tunable parameters. + pub tunables: Tunables, +} + +impl<'data, 'module> ModuleTranslation<'data, 'module> { + /// Return a new `FuncEnvironment` for translating a function. + pub fn func_env(&self) -> FuncEnvironment { + FuncEnvironment::new(self.isa, &self.module) + } +} + +/// Add environment-specific function parameters. +pub fn translate_signature(mut sig: ir::Signature, pointer_type: ir::Type) -> ir::Signature { + sig.params + .push(AbiParam::special(pointer_type, ArgumentPurpose::VMContext)); + sig +} diff --git a/lib/environ/src/vmoffsets.rs b/lib/environ/src/vmoffsets.rs index 18c0e644ab..f80428d33a 100644 --- a/lib/environ/src/vmoffsets.rs +++ b/lib/environ/src/vmoffsets.rs @@ -1,6 +1,12 @@ //! Offsets and sizes of various structs in wasmtime-execute's vmcontext //! module. +use cranelift_codegen::ir; +use cranelift_wasm::{ + DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, FuncIndex, GlobalIndex, MemoryIndex, + TableIndex, +}; + /// This class computes offsets to fields within `VMContext` and other /// related structs that JIT code accesses directly. pub struct VMOffsets { @@ -14,7 +20,86 @@ impl VMOffsets { } } -/// Offsets for `wasmtime_execute::VMMemoryDefinition`. +/// Offsets for `*const VMFunctionBody`. +impl VMOffsets { + /// The size of the `current_elements` field. + #[allow(clippy::identity_op)] + pub fn size_of_vmfunction_body_ptr(&self) -> u8 { + 1 * self.pointer_size + } +} + +/// Offsets for `VMTableImport`. +impl VMOffsets { + /// The offset of the `from` field. + #[allow(clippy::erasing_op)] + pub fn vmtable_import_from(&self) -> u8 { + 0 * self.pointer_size + } + + /// The offset of the `vmctx` field. + #[allow(clippy::identity_op)] + pub fn vmtable_import_vmctx(&self) -> u8 { + 1 * self.pointer_size + } + + /// Return the size of `VMTableImport`. + pub fn size_of_vmtable_import(&self) -> u8 { + 2 * self.pointer_size + } +} + +/// Offsets for `VMTableDefinition`. +impl VMOffsets { + /// The offset of the `base` field. + #[allow(clippy::erasing_op)] + pub fn vmtable_definition_base(&self) -> u8 { + 0 * self.pointer_size + } + + /// The offset of the `current_elements` field. + #[allow(clippy::identity_op)] + pub fn vmtable_definition_current_elements(&self) -> u8 { + 1 * self.pointer_size + } + + /// The size of the `current_elements` field. + pub fn size_of_vmtable_definition_current_elements(&self) -> u8 { + 4 + } + + /// Return the size of `VMTableDefinition`. + pub fn size_of_vmtable_definition(&self) -> u8 { + 2 * self.pointer_size + } + + /// The type of the `current_elements` field. + pub fn type_of_vmtable_definition_current_elements(&self) -> ir::Type { + ir::Type::int(u16::from(self.size_of_vmtable_definition_current_elements()) * 8).unwrap() + } +} + +/// Offsets for `VMMemoryImport`. +impl VMOffsets { + /// The offset of the `from` field. + #[allow(clippy::erasing_op)] + pub fn vmmemory_import_from(&self) -> u8 { + 0 * self.pointer_size + } + + /// The offset of the `vmctx` field. + #[allow(clippy::identity_op)] + pub fn vmmemory_import_vmctx(&self) -> u8 { + 1 * self.pointer_size + } + + /// Return the size of `VMMemoryImport`. + pub fn size_of_vmmemory_import(&self) -> u8 { + 2 * self.pointer_size + } +} + +/// Offsets for `VMMemoryDefinition`. impl VMOffsets { /// The offset of the `base` field. #[allow(clippy::erasing_op)] @@ -28,44 +113,23 @@ impl VMOffsets { 1 * self.pointer_size } + /// The size of the `current_length` field. + pub fn size_of_vmmemory_definition_current_length(&self) -> u8 { + 4 + } + /// Return the size of `VMMemoryDefinition`. pub fn size_of_vmmemory_definition(&self) -> u8 { 2 * self.pointer_size } -} -/// Offsets for `wasmtime_execute::VMMemoryImport`. -impl VMOffsets { - /// The offset of the `from` field. - #[allow(clippy::erasing_op)] - pub fn vmmemory_import_from(&self) -> u8 { - 0 * self.pointer_size - } - - /// Return the size of `VMMemoryImport`. - #[allow(clippy::identity_op)] - pub fn size_of_vmmemory_import(&self) -> u8 { - 1 * self.pointer_size + /// The type of the `current_length` field. + pub fn type_of_vmmemory_definition_current_length(&self) -> ir::Type { + ir::Type::int(u16::from(self.size_of_vmmemory_definition_current_length()) * 8).unwrap() } } -/// Offsets for `wasmtime_execute::VMMemory`. -impl VMOffsets { - /// Return the size of `VMMemory`. - pub fn size_of_vmmemory(&self) -> u8 { - 2 * self.pointer_size - } -} - -/// Offsets for `wasmtime_execute::VMGlobalDefinition`. -impl VMOffsets { - /// Return the size of `VMGlobalDefinition`. - pub fn size_of_vmglobal_definition(&self) -> u8 { - 8 - } -} - -/// Offsets for `wasmtime_execute::VMGlobalImport`. +/// Offsets for `VMGlobalImport`. impl VMOffsets { /// The offset of the `from` field. #[allow(clippy::erasing_op)] @@ -80,67 +144,23 @@ impl VMOffsets { } } -/// Offsets for `wasmtime_execute::VMGlobal`. +/// Offsets for `VMGlobalDefinition`. impl VMOffsets { - /// Return the size of `VMGlobal`. - pub fn size_of_vmglobal(&self) -> u8 { - assert!(self.size_of_vmglobal_import() <= self.size_of_vmglobal_definition()); - self.size_of_vmglobal_definition() + /// Return the size of `VMGlobalDefinition`. + pub fn size_of_vmglobal_definition(&self) -> u8 { + 8 } } -/// Offsets for `wasmtime_execute::VMTableDefinition`. +/// Offsets for `VMSharedSignatureIndex`. impl VMOffsets { - /// The offset of the `base` field. - #[allow(clippy::erasing_op)] - pub fn vmtable_definition_base(&self) -> u8 { - 0 * self.pointer_size - } - - /// The offset of the `current_elements` field. - #[allow(clippy::identity_op)] - pub fn vmtable_definition_current_elements(&self) -> u8 { - 1 * self.pointer_size - } - - /// Return the size of `VMTableDefinition`. - pub fn size_of_vmtable_definition(&self) -> u8 { - 2 * self.pointer_size - } -} - -/// Offsets for `wasmtime_execute::VMTableImport`. -impl VMOffsets { - /// The offset of the `from` field. - #[allow(clippy::erasing_op)] - pub fn vmtable_import_from(&self) -> u8 { - 0 * self.pointer_size - } - - /// Return the size of `VMTableImport`. - #[allow(clippy::identity_op)] - pub fn size_of_vmtable_import(&self) -> u8 { - 1 * self.pointer_size - } -} - -/// Offsets for `wasmtime_execute::VMTable`. -impl VMOffsets { - /// Return the size of `VMTable`. - pub fn size_of_vmtable(&self) -> u8 { - 2 * self.pointer_size - } -} - -/// Offsets for `wasmtime_execute::VMSignatureId`. -impl VMOffsets { - /// Return the size of `VMSignatureId`. - pub fn size_of_vmsignature_id(&self) -> u8 { + /// Return the size of `VMSharedSignatureIndex`. + pub fn size_of_vmshared_signature_index(&self) -> u8 { 4 } } -/// Offsets for `wasmtime_execute::VMCallerCheckedAnyfunc`. +/// Offsets for `VMCallerCheckedAnyfunc`. impl VMOffsets { /// The offset of the `func_ptr` field. #[allow(clippy::erasing_op)] @@ -148,131 +168,208 @@ impl VMOffsets { 0 * self.pointer_size } - /// The offset of the `type_id` field. + /// The offset of the `type_index` field. #[allow(clippy::identity_op)] - pub fn vmcaller_checked_anyfunc_type_id(&self) -> u8 { + pub fn vmcaller_checked_anyfunc_type_index(&self) -> u8 { 1 * self.pointer_size } - /// Return the size of `VMTable`. + /// Return the size of `VMCallerCheckedAnyfunc`. pub fn size_of_vmcaller_checked_anyfunc(&self) -> u8 { 2 * self.pointer_size } } -/// Offsets for `wasmtime_execute::VMContext`. +/// Offsets for `VMContext`. impl VMOffsets { - /// The offset of the `memories` field. + /// The offset of the `tables` field. #[allow(clippy::erasing_op)] - pub fn vmctx_memories(&self) -> u8 { + pub fn vmctx_imported_functions(&self) -> u8 { 0 * self.pointer_size } - /// The offset of the `globals` field. + /// The offset of the `tables` field. #[allow(clippy::identity_op)] - pub fn vmctx_globals(&self) -> u8 { + pub fn vmctx_imported_tables(&self) -> u8 { 1 * self.pointer_size } + /// The offset of the `memories` field. + pub fn vmctx_imported_memories(&self) -> u8 { + 2 * self.pointer_size + } + + /// The offset of the `globals` field. + pub fn vmctx_imported_globals(&self) -> u8 { + 3 * self.pointer_size + } + /// The offset of the `tables` field. pub fn vmctx_tables(&self) -> u8 { - 2 * self.pointer_size + 4 * self.pointer_size + } + + /// The offset of the `memories` field. + pub fn vmctx_memories(&self) -> u8 { + 5 * self.pointer_size + } + + /// The offset of the `globals` field. + pub fn vmctx_globals(&self) -> u8 { + 6 * self.pointer_size } /// The offset of the `signature_ids` field. pub fn vmctx_signature_ids(&self) -> u8 { - 3 * self.pointer_size + 7 * self.pointer_size } /// Return the size of `VMContext`. #[allow(dead_code)] pub fn size_of_vmctx(&self) -> u8 { - 4 * self.pointer_size + 8 * self.pointer_size } - /// Return the offset from the `memories` pointer to `VMMemory` index `index`. - pub fn index_vmmemory(&self, index: u32) -> i32 { + /// Return the offset from the `imported_tables` pointer to `VMTableImport` index `index`. + fn index_vmtable_import(&self, index: TableIndex) -> i32 { cast::i32( index - .checked_mul(u32::from(self.size_of_vmmemory())) + .as_u32() + .checked_mul(u32::from(self.size_of_vmtable_import())) .unwrap(), ) .unwrap() } - /// Return the offset from the `globals` pointer to `VMGlobal` index `index`. - pub fn index_vmglobal(&self, index: u32) -> i32 { + /// Return the offset from the `tables` pointer to `VMTableDefinition` index `index`. + fn index_vmtable_definition(&self, index: DefinedTableIndex) -> i32 { cast::i32( index - .checked_mul(u32::from(self.size_of_vmglobal())) + .as_u32() + .checked_mul(u32::from(self.size_of_vmtable_definition())) .unwrap(), ) .unwrap() } - /// Return the offset from the `tables` pointer to `VMTable` index `index`. - pub fn index_vmtable(&self, index: u32) -> i32 { + /// Return the offset from the `imported_memories` pointer to `VMMemoryImport` index `index`. + fn index_vmmemory_import(&self, index: MemoryIndex) -> i32 { cast::i32( index - .checked_mul(u32::from(self.size_of_vmtable())) + .as_u32() + .checked_mul(u32::from(self.size_of_vmmemory_import())) .unwrap(), ) .unwrap() } + /// Return the offset from the `memories` pointer to `VMMemoryDefinition` index `index`. + fn index_vmmemory_definition(&self, index: DefinedMemoryIndex) -> i32 { + cast::i32( + index + .as_u32() + .checked_mul(u32::from(self.size_of_vmmemory_definition())) + .unwrap(), + ) + .unwrap() + } + + /// Return the offset from the `imported_globals` pointer to `VMGlobalImport` index `index`. + fn index_vmglobal_import(&self, index: GlobalIndex) -> i32 { + cast::i32( + index + .as_u32() + .checked_mul(u32::from(self.size_of_vmglobal_import())) + .unwrap(), + ) + .unwrap() + } + + /// Return the offset from the `imported_functions` pointer to the + /// `*const VMFunctionBody` index `index`. + pub fn index_vmfunction_body_import(&self, index: FuncIndex) -> i32 { + cast::i32( + index + .as_u32() + .checked_mul(u32::from(self.size_of_vmfunction_body_ptr())) + .unwrap(), + ) + .unwrap() + } + + /// Return the offset from the `tables` pointer to the `from` field in + /// `VMTableImport` index `index`. + pub fn index_vmtable_import_from(&self, index: TableIndex) -> i32 { + self.index_vmtable_import(index) + .checked_add(i32::from(self.vmtable_import_from())) + .unwrap() + } + + /// Return the offset from the `tables` pointer to the `base` field in + /// `VMTableDefinition` index `index`. + pub fn index_vmtable_definition_base(&self, index: DefinedTableIndex) -> i32 { + self.index_vmtable_definition(index) + .checked_add(i32::from(self.vmtable_definition_base())) + .unwrap() + } + + /// Return the offset from the `tables` pointer to the `current_elements` field in + /// `VMTableDefinition` index `index`. + pub fn index_vmtable_definition_current_elements(&self, index: DefinedTableIndex) -> i32 { + self.index_vmtable_definition(index) + .checked_add(i32::from(self.vmtable_definition_current_elements())) + .unwrap() + } + + /// Return the offset from the `memories` pointer to the `from` field in + /// `VMMemoryImport` index `index`. + pub fn index_vmmemory_import_from(&self, index: MemoryIndex) -> i32 { + self.index_vmmemory_import(index) + .checked_add(i32::from(self.vmmemory_import_from())) + .unwrap() + } + + /// Return the offset from the `memories` pointer to the `vmctx` field in + /// `VMMemoryImport` index `index`. + pub fn index_vmmemory_import_vmctx(&self, index: MemoryIndex) -> i32 { + self.index_vmmemory_import(index) + .checked_add(i32::from(self.vmmemory_import_vmctx())) + .unwrap() + } + /// Return the offset from the `memories` pointer to the `base` field in - /// `VMMemory` index `index`. - pub fn index_vmmemory_definition_base(&self, index: u32) -> i32 { - self.index_vmmemory(index) + /// `VMMemoryDefinition` index `index`. + pub fn index_vmmemory_definition_base(&self, index: DefinedMemoryIndex) -> i32 { + self.index_vmmemory_definition(index) .checked_add(i32::from(self.vmmemory_definition_base())) .unwrap() } /// Return the offset from the `memories` pointer to the `current_length` field in /// `VMMemoryDefinition` index `index`. - pub fn index_vmmemory_definition_current_length(&self, index: u32) -> i32 { - self.index_vmmemory(index) + pub fn index_vmmemory_definition_current_length(&self, index: DefinedMemoryIndex) -> i32 { + self.index_vmmemory_definition(index) .checked_add(i32::from(self.vmmemory_definition_current_length())) .unwrap() } - /// Return the offset from the `memories` pointer to the `from` field in - /// `VMMemoryImport` index `index`. - pub fn index_vmmemory_import_from(&self, index: u32) -> i32 { - self.index_vmmemory(index) - .checked_add(i32::from(self.vmmemory_import_from())) - .unwrap() - } - - /// Return the offset from the `globals` pointer to the `from` field in - /// `VMGlobal` index `index`. - pub fn index_vmglobal_import_from(&self, index: u32) -> i32 { - self.index_vmglobal(index) + /// Return the offset from the `imported_globals` pointer to the `from` field in + /// `VMGlobalImport` index `index`. + pub fn index_vmglobal_import_from(&self, index: GlobalIndex) -> i32 { + self.index_vmglobal_import(index) .checked_add(i32::from(self.vmglobal_import_from())) .unwrap() } - /// Return the offset from the `tables` pointer to the `base` field in - /// `VMTable` index `index`. - pub fn index_vmtable_definition_base(&self, index: u32) -> i32 { - self.index_vmtable(index) - .checked_add(i32::from(self.vmtable_definition_base())) - .unwrap() - } - - /// Return the offset from the `tables` pointer to the `current_elements` field in - /// `VMTable` index `index`. - pub fn index_vmtable_definition_current_elements(&self, index: u32) -> i32 { - self.index_vmtable(index) - .checked_add(i32::from(self.vmtable_definition_current_elements())) - .unwrap() - } - - /// Return the offset from the `tables` pointer to the `from` field in - /// `VMTableImport` index `index`. - pub fn index_vmtable_import_from(&self, index: u32) -> i32 { - self.index_vmtable(index) - .checked_add(i32::from(self.vmtable_import_from())) - .unwrap() + /// Return the offset from the `globals` pointer to the `VMGlobalDefinition` + /// index `index`. + pub fn index_vmglobal_definition(&self, index: DefinedGlobalIndex) -> i32 { + cast::i32( + index + .as_u32() + .checked_mul(u32::from(self.size_of_vmglobal_definition())) + .unwrap(), + ) + .unwrap() } } diff --git a/lib/execute/Cargo.toml b/lib/execute/Cargo.toml index ef9c9de2af..04ccc51617 100644 --- a/lib/execute/Cargo.toml +++ b/lib/execute/Cargo.toml @@ -15,14 +15,10 @@ cranelift-entity = { git = "https://github.com/sunfishcode/cranelift.git", branc cranelift-wasm = { git = "https://github.com/sunfishcode/cranelift.git", branch = "guard-offset" } cranelift-frontend = { git = "https://github.com/sunfishcode/cranelift.git", branch = "guard-offset" } wasmtime-environ = { path = "../environ" } +wasmtime-runtime = { path = "../runtime" } region = "1.0.0" -lazy_static = "1.2.0" -libc = { version = "0.2.44", default-features = false } -errno = "0.2.4" -memoffset = "0.2.1" -cast = { version = "0.2.2", default-features = false } -failure = "0.1.3" -failure_derive = "0.1.3" +failure = { version = "0.1.3", default-features = false } +failure_derive = { version = "0.1.3", default-features = false } [build-dependencies] cmake = "0.1.35" diff --git a/lib/execute/src/code.rs b/lib/execute/src/code.rs index 26e003db8d..6461474cf3 100644 --- a/lib/execute/src/code.rs +++ b/lib/execute/src/code.rs @@ -1,12 +1,10 @@ //! Memory management for executable code. -use mmap::Mmap; use region; -use std::cmp; -use std::mem; use std::string::String; use std::vec::Vec; -use vmcontext::VMFunctionBody; +use std::{cmp, mem}; +use wasmtime_runtime::{Mmap, VMFunctionBody}; /// Memory manager for executable code. pub struct Code { @@ -47,7 +45,7 @@ impl Code { } /// Convert mut a slice from u8 to VMFunctionBody. - fn as_mut_vmfunc_slice(slice: &mut [u8]) -> &mut [VMFunctionBody] { + fn view_as_mut_vmfunc_slice(slice: &mut [u8]) -> &mut [VMFunctionBody] { let byte_ptr: *mut [u8] = slice; let body_ptr = byte_ptr as *mut [VMFunctionBody]; unsafe { &mut *body_ptr } @@ -62,7 +60,7 @@ impl Code { ) -> Result<&mut [VMFunctionBody], String> { let new = self.allocate(slice.len())?; new.copy_from_slice(slice); - Ok(Self::as_mut_vmfunc_slice(new)) + Ok(Self::view_as_mut_vmfunc_slice(new)) } /// Make all allocated memory executable. diff --git a/lib/execute/src/export.rs b/lib/execute/src/export.rs index 0639e70251..d1e1f4e78f 100644 --- a/lib/execute/src/export.rs +++ b/lib/execute/src/export.rs @@ -1,22 +1,29 @@ use cranelift_codegen::ir; use cranelift_wasm::Global; -use vmcontext::{VMFunctionBody, VMGlobal, VMMemory, VMTable}; use wasmtime_environ::{MemoryPlan, TablePlan}; +use wasmtime_runtime::{ + VMContext, VMFunctionBody, VMGlobalDefinition, VMMemoryDefinition, VMTableDefinition, +}; + +/// An exported function. +pub struct FunctionExport { + /// The address of the native-code function. + pub address: *const VMFunctionBody, + /// The function signature declaration, used for compatibilty checking. + pub signature: ir::Signature, +} /// The value of an export passed from one instance to another. -pub enum ExportValue { +pub enum Export { /// A function export value. - Function { - /// The address of the native-code function. - address: *const VMFunctionBody, - /// The function signature declaration, used for compatibilty checking. - signature: ir::Signature, - }, + Function(FunctionExport), /// A table export value. Table { /// The address of the table descriptor. - address: *mut VMTable, + address: *mut VMTableDefinition, + /// Pointer to the containing VMContext. + vmctx: *mut VMContext, /// The table declaration, used for compatibilty checking. table: TablePlan, }, @@ -24,7 +31,9 @@ pub enum ExportValue { /// A memory export value. Memory { /// The address of the memory descriptor. - address: *mut VMMemory, + address: *mut VMMemoryDefinition, + /// Pointer to the containing VMContext. + vmctx: *mut VMContext, /// The memory declaration, used for compatibilty checking. memory: MemoryPlan, }, @@ -32,45 +41,57 @@ pub enum ExportValue { /// A global export value. Global { /// The address of the global storage. - address: *mut VMGlobal, + address: *mut VMGlobalDefinition, /// The global declaration, used for compatibilty checking. global: Global, }, } -impl ExportValue { +impl Export { /// Construct a function export value. pub fn function(address: *const VMFunctionBody, signature: ir::Signature) -> Self { - ExportValue::Function { address, signature } + Export::Function(FunctionExport { address, signature }) } /// Construct a table export value. - pub fn table(address: *mut VMTable, table: TablePlan) -> Self { - ExportValue::Table { address, table } + pub fn table(address: *mut VMTableDefinition, vmctx: *mut VMContext, table: TablePlan) -> Self { + Export::Table { + address, + vmctx, + table, + } } /// Construct a memory export value. - pub fn memory(address: *mut VMMemory, memory: MemoryPlan) -> Self { - ExportValue::Memory { address, memory } + pub fn memory( + address: *mut VMMemoryDefinition, + vmctx: *mut VMContext, + memory: MemoryPlan, + ) -> Self { + Export::Memory { + address, + vmctx, + memory, + } } /// Construct a global export value. - pub fn global(address: *mut VMGlobal, global: Global) -> Self { - ExportValue::Global { address, global } + pub fn global(address: *mut VMGlobalDefinition, global: Global) -> Self { + Export::Global { address, global } } } /// Import resolver connects imports with available exported values. pub trait Resolver { /// Resolve the given module/field combo. - fn resolve(&mut self, module: &str, field: &str) -> Option; + fn resolve(&mut self, module: &str, field: &str) -> Option; } /// `Resolver` implementation that always resolves to `None`. pub struct NullResolver {} impl Resolver for NullResolver { - fn resolve(&mut self, _module: &str, _field: &str) -> Option { + fn resolve(&mut self, _module: &str, _field: &str) -> Option { None } } diff --git a/lib/execute/src/get.rs b/lib/execute/src/get.rs deleted file mode 100644 index 0626321d65..0000000000 --- a/lib/execute/src/get.rs +++ /dev/null @@ -1,65 +0,0 @@ -//! Support for reading the value of a wasm global from outside the module. - -use action::{ActionError, RuntimeValue}; -use cranelift_codegen::ir; -use cranelift_entity::EntityRef; -use cranelift_wasm::GlobalIndex; -use instance::Instance; -use wasmtime_environ::{Export, Module}; - -/// Reads the value of the named global variable in `module`. -pub fn get( - module: &Module, - instance: &mut Instance, - global_name: &str, -) -> Result { - let global_index = match module.exports.get(global_name) { - Some(Export::Global(index)) => *index, - Some(_) => { - return Err(ActionError::Kind(format!( - "exported item \"{}\" is not a global", - global_name - ))) - } - None => { - return Err(ActionError::Field(format!( - "no export named \"{}\"", - global_name - ))) - } - }; - - get_by_index(module, instance, global_index) -} - -/// Reads the value of the indexed global variable in `module`. -pub fn get_by_index( - module: &Module, - instance: &mut Instance, - global_index: GlobalIndex, -) -> Result { - unsafe { - let vmctx = &mut *instance.vmctx(); - let vmglobal = vmctx.global(global_index); - let definition = vmglobal.get_definition(module.is_imported_global(global_index)); - Ok( - match module - .globals - .get(global_index) - .ok_or_else(|| ActionError::Index(global_index.index() as u64))? - .ty - { - ir::types::I32 => RuntimeValue::I32(*definition.as_i32()), - ir::types::I64 => RuntimeValue::I64(*definition.as_i64()), - ir::types::F32 => RuntimeValue::F32(*definition.as_f32_bits()), - ir::types::F64 => RuntimeValue::F64(*definition.as_f64_bits()), - other => { - return Err(ActionError::Type(format!( - "global with type {} not supported", - other - ))) - } - }, - ) - } -} diff --git a/lib/execute/src/imports.rs b/lib/execute/src/imports.rs deleted file mode 100644 index 8fd06f63b5..0000000000 --- a/lib/execute/src/imports.rs +++ /dev/null @@ -1,30 +0,0 @@ -use cranelift_entity::PrimaryMap; -use cranelift_wasm::{FuncIndex, GlobalIndex, MemoryIndex, TableIndex}; -use vmcontext::{VMFunctionBody, VMGlobal, VMMemory, VMTable}; - -/// Resolved import pointers. -#[derive(Debug)] -pub struct Imports { - /// Resolved addresses for imported functions. - pub functions: PrimaryMap, - - /// Resolved addresses for imported tables. - pub tables: PrimaryMap, - - /// Resolved addresses for imported globals. - pub globals: PrimaryMap, - - /// Resolved addresses for imported memories. - pub memories: PrimaryMap, -} - -impl Imports { - pub fn new() -> Self { - Self { - functions: PrimaryMap::new(), - tables: PrimaryMap::new(), - globals: PrimaryMap::new(), - memories: PrimaryMap::new(), - } - } -} diff --git a/lib/execute/src/instance.rs b/lib/execute/src/instance.rs deleted file mode 100644 index bd9ed33dc7..0000000000 --- a/lib/execute/src/instance.rs +++ /dev/null @@ -1,235 +0,0 @@ -//! An `Instance` contains all the runtime state used by execution of a wasm -//! module. - -use cranelift_entity::EntityRef; -use cranelift_entity::PrimaryMap; -use cranelift_wasm::{DefinedFuncIndex, FuncIndex, GlobalIndex, MemoryIndex, TableIndex}; -use imports::Imports; -use memory::LinearMemory; -use sig_registry::SignatureRegistry; -use std::ptr; -use std::slice; -use std::string::String; -use table::Table; -use vmcontext::{VMCallerCheckedAnyfunc, VMContext, VMFunctionBody, VMGlobal, VMMemory, VMTable}; -use wasmtime_environ::{DataInitializer, Module}; - -/// An Instance of a WebAssemby module. -#[derive(Debug)] -pub struct Instance { - /// WebAssembly linear memory data. - memories: PrimaryMap, - - /// WebAssembly table data. - tables: PrimaryMap, - - /// Function Signature IDs. - /// FIXME: This should be shared across instances rather than per-Instance. - sig_registry: SignatureRegistry, - - /// Memory base address vector pointed to by vmctx. - vmctx_memories: PrimaryMap, - - /// WebAssembly global variable data. - vmctx_globals: PrimaryMap, - - /// Table storage base address vector pointed to by vmctx. - vmctx_tables: PrimaryMap, - - /// Pointer values for resolved imports. - imports: Imports, - - /// Pointers to functions in executable memory. - allocated_functions: PrimaryMap, - - /// Context pointer used by JIT code. - vmctx: VMContext, -} - -impl Instance { - /// Create a new `Instance`. In order to complete instantiation, call - /// `invoke_start_function`. `allocated_functions` holds the function bodies - /// which have been placed in executable memory. - pub fn new( - module: &Module, - allocated_functions: PrimaryMap, - data_initializers: &[DataInitializer], - imports: Imports, - ) -> Result { - let mut sig_registry = instantiate_signatures(module); - let mut memories = instantiate_memories(module, data_initializers)?; - let mut tables = instantiate_tables(module, &allocated_functions, &mut sig_registry); - - let mut vmctx_memories = memories - .values_mut() - .map(LinearMemory::vmmemory) - .collect::>(); - - let mut vmctx_globals = instantiate_globals(module); - - let mut vmctx_tables = tables - .values_mut() - .map(Table::vmtable) - .collect::>(); - - let vmctx_memories_ptr = vmctx_memories.values_mut().into_slice().as_mut_ptr(); - let vmctx_globals_ptr = vmctx_globals.values_mut().into_slice().as_mut_ptr(); - let vmctx_tables_ptr = vmctx_tables.values_mut().into_slice().as_mut_ptr(); - let signature_ids_ptr = sig_registry.vmsignature_ids(); - - Ok(Self { - memories, - tables, - sig_registry, - vmctx_memories, - vmctx_globals, - vmctx_tables, - imports, - allocated_functions, - vmctx: VMContext::new( - vmctx_memories_ptr, - vmctx_globals_ptr, - vmctx_tables_ptr, - signature_ids_ptr, - ), - }) - } - - /// Return the vmctx pointer to be passed into JIT code. - pub fn vmctx(&mut self) -> &mut VMContext { - &mut self.vmctx - } - - /// Return the offset from the vmctx pointer to its containing Instance. - pub(crate) fn vmctx_offset() -> isize { - offset_of!(Self, vmctx) as isize - } - - /// Return the pointer to executable memory for the given function index. - pub(crate) fn get_allocated_function( - &self, - index: DefinedFuncIndex, - ) -> Option<&[VMFunctionBody]> { - self.allocated_functions - .get(index) - .map(|(ptr, len)| unsafe { slice::from_raw_parts(*ptr, *len) }) - } - - /// Return the pointer to executable memory for the given function index. - pub(crate) fn get_imported_function(&self, index: FuncIndex) -> Option<*const VMFunctionBody> { - self.imports.functions.get(index).cloned() - } - - /// Grow memory by the specified amount of pages. - /// - /// Returns `None` if memory can't be grown by the specified amount - /// of pages. - pub fn memory_grow(&mut self, memory_index: MemoryIndex, delta: u32) -> Option { - let result = self - .memories - .get_mut(memory_index) - .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index())) - .grow(delta); - - // Keep current the VMContext pointers used by JIT code. - self.vmctx_memories[memory_index] = self.memories[memory_index].vmmemory(); - - result - } - - /// Returns the number of allocated wasm pages. - pub fn memory_size(&mut self, memory_index: MemoryIndex) -> u32 { - self.memories - .get(memory_index) - .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index())) - .size() - } - - /// Returns a slice of the contents of allocated linear memory. - pub fn inspect_memory(&self, memory_index: MemoryIndex, address: usize, len: usize) -> &[u8] { - &self - .memories - .get(memory_index) - .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index())) - .as_ref()[address..address + len] - } - - /// Shows the value of a global variable. - pub fn inspect_global(&self, global_index: GlobalIndex) -> &VMGlobal { - &self.vmctx_globals[global_index] - } -} - -fn instantiate_signatures(module: &Module) -> SignatureRegistry { - let mut sig_registry = SignatureRegistry::new(); - for (sig_index, sig) in module.signatures.iter() { - sig_registry.register(sig_index, sig); - } - sig_registry -} - -/// Allocate memory for just the memories of the current module. -fn instantiate_memories( - module: &Module, - data_initializers: &[DataInitializer], -) -> Result, String> { - let mut memories = PrimaryMap::with_capacity(module.memory_plans.len()); - for plan in module.memory_plans.values() { - memories.push(LinearMemory::new(&plan)?); - } - - for init in data_initializers { - debug_assert!(init.base.is_none(), "globalvar base not supported yet"); - let mem_mut = memories[init.memory_index].as_mut(); - let to_init = &mut mem_mut[init.offset..init.offset + init.data.len()]; - to_init.copy_from_slice(init.data); - } - - Ok(memories) -} - -/// Allocate memory for just the tables of the current module. -fn instantiate_tables( - module: &Module, - allocated_functions: &PrimaryMap, - sig_registry: &mut SignatureRegistry, -) -> PrimaryMap { - let mut tables = PrimaryMap::with_capacity(module.table_plans.len()); - for table in module.table_plans.values() { - tables.push(Table::new(table)); - } - - for init in &module.table_elements { - debug_assert!(init.base.is_none(), "globalvar base not supported yet"); - let slice = tables[init.table_index].as_mut(); - let subslice = &mut slice[init.offset..init.offset + init.elements.len()]; - for (i, func_idx) in init.elements.iter().enumerate() { - let callee_sig = module.functions[*func_idx]; - let func_ptr = allocated_functions[module - .defined_func_index(*func_idx) - .expect("table element initializer with imported function not supported yet")] - .0; - let type_id = sig_registry.lookup(callee_sig); - subslice[i] = VMCallerCheckedAnyfunc { func_ptr, type_id }; - } - } - - tables -} - -/// Allocate memory for just the globals of the current module, -/// without any initializers applied yet. -fn instantiate_globals(module: &Module) -> PrimaryMap { - let mut vmctx_globals = PrimaryMap::with_capacity(module.globals.len()); - - for (index, global) in module.globals.iter() { - if module.is_imported_global(index) { - // FIXME: get the actual import - vmctx_globals.push(VMGlobal::import(ptr::null_mut())); - } else { - vmctx_globals.push(VMGlobal::definition(global)); - } - } - - vmctx_globals -} diff --git a/lib/execute/src/invoke.rs b/lib/execute/src/invoke.rs deleted file mode 100644 index 1db922f253..0000000000 --- a/lib/execute/src/invoke.rs +++ /dev/null @@ -1,232 +0,0 @@ -//! Support for invoking wasm functions from outside a wasm module. - -use action::{ActionError, ActionOutcome, RuntimeValue}; -use code::Code; -use cranelift_codegen::ir::InstBuilder; -use cranelift_codegen::{binemit, ir, isa, Context}; -use cranelift_entity::EntityRef; -use cranelift_frontend::{FunctionBuilder, FunctionBuilderContext}; -use cranelift_wasm::FuncIndex; -use instance::Instance; -use signalhandlers::{ensure_eager_signal_handlers, ensure_full_signal_handlers, TrapContext}; -use std::mem; -use std::ptr; -use std::vec::Vec; -use traphandlers::call_wasm; -use vmcontext::{VMContext, VMFunctionBody}; -use wasmtime_environ::{CompileError, Export, Module, RelocSink}; - -/// Calls the given named function, passing its return values and returning -/// its results. -pub fn invoke( - code: &mut Code, - isa: &isa::TargetIsa, - module: &Module, - instance: &mut Instance, - function: &str, - args: &[RuntimeValue], -) -> Result { - let fn_index = match module.exports.get(function) { - Some(Export::Function(index)) => *index, - Some(_) => { - return Err(ActionError::Kind(format!( - "exported item \"{}\" is not a function", - function - ))) - } - None => { - return Err(ActionError::Field(format!( - "no export named \"{}\"", - function - ))) - } - }; - - invoke_by_index(code, isa, module, instance, fn_index, args) -} - -/// Invoke the WebAssembly start function of the instance, if one is present. -pub fn invoke_start_function( - code: &mut Code, - isa: &isa::TargetIsa, - module: &Module, - instance: &mut Instance, -) -> Result { - if let Some(start_index) = module.start_func { - invoke_by_index(code, isa, module, instance, start_index, &[]) - } else { - // No start function, just return nothing. - Ok(ActionOutcome::Returned { values: vec![] }) - } -} - -/// Calls the given indexed function, passing its return values and returning -/// its results. -pub fn invoke_by_index( - code: &mut Code, - isa: &isa::TargetIsa, - module: &Module, - instance: &mut Instance, - fn_index: FuncIndex, - args: &[RuntimeValue], -) -> Result { - let exec_code_buf = match module.defined_func_index(fn_index) { - Some(def_fn_index) => instance - .get_allocated_function(def_fn_index) - .ok_or_else(|| ActionError::Index(def_fn_index.index() as u64))? - .as_ptr(), - None => instance - .get_imported_function(fn_index) - .ok_or_else(|| ActionError::Index(fn_index.index() as u64))?, - }; - - let sig = &module.signatures[module.functions[fn_index]]; - - // TODO: Move this out to be done once per thread rather than per call. - let mut traps = TrapContext { - triedToInstallSignalHandlers: false, - haveSignalHandlers: false, - }; - - // Rather than writing inline assembly to jump to the code region, we use the fact that - // the Rust ABI for calling a function with no arguments and no return values matches the one - // of the generated code. Thanks to this, we can transmute the code region into a first-class - // Rust function and call it. - // Ensure that our signal handlers are ready for action. - ensure_eager_signal_handlers(); - ensure_full_signal_handlers(&mut traps); - if !traps.haveSignalHandlers { - return Err(ActionError::Resource( - "failed to install signal handlers".to_string(), - )); - } - - call_through_wrapper(code, isa, exec_code_buf, instance, args, &sig) -} - -fn call_through_wrapper( - code: &mut Code, - isa: &isa::TargetIsa, - callee: *const VMFunctionBody, - instance: &mut Instance, - args: &[RuntimeValue], - sig: &ir::Signature, -) -> Result { - let vmctx = instance.vmctx() as *mut VMContext; - - for (index, value) in args.iter().enumerate() { - assert_eq!(value.value_type(), sig.params[index].value_type); - } - - let wrapper_sig = ir::Signature::new(isa.frontend_config().default_call_conv); - let mut context = Context::new(); - context.func = ir::Function::with_name_signature(ir::ExternalName::user(0, 0), wrapper_sig); - - let value_size = 8; - let mut results_vec = Vec::new(); - results_vec.resize(sig.returns.len(), 0i64); - - let mut fn_builder_ctx = FunctionBuilderContext::new(); - { - let mut builder = FunctionBuilder::new(&mut context.func, &mut fn_builder_ctx); - let block0 = builder.create_ebb(); - - builder.append_ebb_params_for_function_params(block0); - - builder.switch_to_block(block0); - builder.seal_block(block0); - - let mut callee_args = Vec::new(); - let pointer_type = isa.pointer_type(); - - let callee_value = builder.ins().iconst(pointer_type, callee as i64); - - for value in args { - match value { - RuntimeValue::I32(i) => { - callee_args.push(builder.ins().iconst(ir::types::I32, i64::from(*i))) - } - RuntimeValue::I64(i) => callee_args.push(builder.ins().iconst(ir::types::I64, *i)), - RuntimeValue::F32(i) => callee_args.push( - builder - .ins() - .f32const(ir::immediates::Ieee32::with_bits(*i)), - ), - RuntimeValue::F64(i) => callee_args.push( - builder - .ins() - .f64const(ir::immediates::Ieee64::with_bits(*i)), - ), - } - } - - let vmctx_value = builder.ins().iconst(pointer_type, vmctx as i64); - callee_args.push(vmctx_value); - - let new_sig = builder.import_signature(sig.clone()); - - // TODO: It's possible to make this a direct call. We just need Cranelift - // to support functions declared with an immediate integer address. - let call = builder - .ins() - .call_indirect(new_sig, callee_value, &callee_args); - - let results = builder.func.dfg.inst_results(call).to_vec(); - - let results_vec_value = builder - .ins() - .iconst(pointer_type, results_vec.as_ptr() as i64); - - let mut mflags = ir::MemFlags::new(); - mflags.set_notrap(); - mflags.set_aligned(); - for (i, r) in results.iter().enumerate() { - builder - .ins() - .store(mflags, *r, results_vec_value, (i * value_size) as i32); - } - - builder.ins().return_(&[]); - } - - let mut code_buf: Vec = Vec::new(); - let mut reloc_sink = RelocSink::new(); - let mut trap_sink = binemit::NullTrapSink {}; - context - .compile_and_emit(isa, &mut code_buf, &mut reloc_sink, &mut trap_sink) - .map_err(|error| ActionError::Compile(CompileError::Codegen(error)))?; - assert!(reloc_sink.func_relocs.is_empty()); - - let exec_code_buf = code - .allocate_copy_of_byte_slice(&code_buf) - .map_err(ActionError::Resource)? - .as_ptr(); - code.publish(); - - let func: fn() = unsafe { mem::transmute(exec_code_buf) }; - - Ok(match call_wasm(func) { - Ok(()) => { - let mut values = Vec::with_capacity(sig.returns.len()); - - for (index, abi_param) in sig.returns.iter().enumerate() { - let v = unsafe { - let ptr = results_vec.as_ptr().add(index * value_size); - - match abi_param.value_type { - ir::types::I32 => RuntimeValue::I32(ptr::read(ptr as *const i32)), - ir::types::I64 => RuntimeValue::I64(ptr::read(ptr as *const i64)), - ir::types::F32 => RuntimeValue::F32(ptr::read(ptr as *const u32)), - ir::types::F64 => RuntimeValue::F64(ptr::read(ptr as *const u64)), - other => panic!("unsupported value type {:?}", other), - } - }; - - values.push(v); - } - - ActionOutcome::Returned { values } - } - Err(message) => ActionOutcome::Trapped { message }, - }) -} diff --git a/lib/execute/src/lib.rs b/lib/execute/src/lib.rs index eb38b39a7c..052041599c 100644 --- a/lib/execute/src/lib.rs +++ b/lib/execute/src/lib.rs @@ -28,18 +28,12 @@ extern crate cranelift_codegen; extern crate cranelift_entity; extern crate cranelift_frontend; extern crate cranelift_wasm; -extern crate errno; extern crate region; extern crate wasmtime_environ; +extern crate wasmtime_runtime; #[cfg(not(feature = "std"))] #[macro_use] extern crate alloc; -#[macro_use] -extern crate lazy_static; -extern crate libc; -#[macro_use] -extern crate memoffset; -extern crate cast; extern crate failure; #[macro_use] extern crate failure_derive; @@ -47,30 +41,13 @@ extern crate failure_derive; mod action; mod code; mod export; -mod get; -mod imports; -mod instance; -mod invoke; -mod libcalls; mod link; -mod memory; -mod mmap; -mod sig_registry; -mod signalhandlers; -mod table; -mod traphandlers; -mod vmcontext; mod world; pub use action::{ActionError, ActionOutcome, RuntimeValue}; pub use code::Code; -pub use export::{ExportValue, NullResolver, Resolver}; -pub use get::{get, get_by_index}; -pub use instance::Instance; -pub use invoke::{invoke, invoke_by_index, invoke_start_function}; +pub use export::{Export, NullResolver, Resolver}; pub use link::link_module; -pub use traphandlers::{call_wasm, LookupCodeSegment, RecordTrap, Unwind}; -pub use vmcontext::{VMContext, VMFunctionBody, VMGlobal, VMMemory, VMTable}; pub use world::InstanceWorld; #[cfg(not(feature = "std"))] diff --git a/lib/execute/src/libcalls.rs b/lib/execute/src/libcalls.rs deleted file mode 100644 index 08f7c4e78c..0000000000 --- a/lib/execute/src/libcalls.rs +++ /dev/null @@ -1,77 +0,0 @@ -//! Runtime library calls. Note that the JIT may sometimes perform these inline -//! rather than calling them, particularly when CPUs have special instructions -//! which compute them directly. - -pub extern "C" fn wasmtime_f32_ceil(x: f32) -> f32 { - x.ceil() -} - -pub extern "C" fn wasmtime_f32_floor(x: f32) -> f32 { - x.floor() -} - -pub extern "C" fn wasmtime_f32_trunc(x: f32) -> f32 { - x.trunc() -} - -#[allow(clippy::float_arithmetic, clippy::float_cmp)] -pub extern "C" fn wasmtime_f32_nearest(x: f32) -> f32 { - // Rust doesn't have a nearest function, so do it manually. - if x == 0.0 { - // Preserve the sign of zero. - x - } else { - // Nearest is either ceil or floor depending on which is nearest or even. - let u = x.ceil(); - let d = x.floor(); - let um = (x - u).abs(); - let dm = (x - d).abs(); - if um < dm - || (um == dm && { - let h = u / 2.; - h.floor() == h - }) - { - u - } else { - d - } - } -} - -pub extern "C" fn wasmtime_f64_ceil(x: f64) -> f64 { - x.ceil() -} - -pub extern "C" fn wasmtime_f64_floor(x: f64) -> f64 { - x.floor() -} - -pub extern "C" fn wasmtime_f64_trunc(x: f64) -> f64 { - x.trunc() -} - -#[allow(clippy::float_arithmetic, clippy::float_cmp)] -pub extern "C" fn wasmtime_f64_nearest(x: f64) -> f64 { - // Rust doesn't have a nearest function, so do it manually. - if x == 0.0 { - // Preserve the sign of zero. - x - } else { - // Nearest is either ceil or floor depending on which is nearest or even. - let u = x.ceil(); - let d = x.floor(); - let um = (x - u).abs(); - let dm = (x - d).abs(); - if um < dm - || (um == dm && { - let h = u / 2.; - h.floor() == h - }) - { - u - } else { - d - } - } -} diff --git a/lib/execute/src/link.rs b/lib/execute/src/link.rs index 7bb89dfe9e..724847ac81 100644 --- a/lib/execute/src/link.rs +++ b/lib/execute/src/link.rs @@ -1,18 +1,16 @@ use cranelift_codegen::binemit::Reloc; -use cranelift_entity::{EntityRef, PrimaryMap}; -use cranelift_wasm::{ - DefinedFuncIndex, Global, GlobalInit, Memory, MemoryIndex, Table, TableElementType, -}; -use export::{ExportValue, Resolver}; -use imports::Imports; +use cranelift_entity::PrimaryMap; +use cranelift_wasm::{DefinedFuncIndex, Global, GlobalInit, Memory, Table, TableElementType}; +use export::{Export, FunctionExport, Resolver}; use std::ptr::write_unaligned; +use std::string::String; use std::vec::Vec; -use vmcontext::VMContext; -use vmcontext::{VMFunctionBody, VMGlobal, VMMemory, VMTable}; use wasmtime_environ::{ MemoryPlan, MemoryStyle, Module, Relocation, RelocationTarget, Relocations, TablePlan, TableStyle, }; +use wasmtime_runtime::libcalls; +use wasmtime_runtime::{Imports, VMFunctionBody, VMGlobalImport, VMMemoryImport, VMTableImport}; /// A link error, such as incompatible or unmatched imports/exports. #[derive(Fail, Debug)] @@ -22,29 +20,28 @@ pub struct LinkError(String); /// Links a module that has been compiled with `compiled_module` in `wasmtime-environ`. pub fn link_module( module: &Module, - allocated_functions: &PrimaryMap, + allocated_functions: &PrimaryMap, relocations: Relocations, resolver: &mut Resolver, ) -> Result { - let mut imports = Imports::new(); - + let mut function_imports = PrimaryMap::with_capacity(module.imported_funcs.len()); for (index, (ref module_name, ref field)) in module.imported_funcs.iter() { match resolver.resolve(module_name, field) { Some(export_value) => match export_value { - ExportValue::Function { address, signature } => { + Export::Function(FunctionExport { address, signature }) => { let import_signature = &module.signatures[module.functions[index]]; if signature != *import_signature { + // TODO: If the difference is in the calling convention, + // we could emit a wrapper function to fix it up. return Err(LinkError( format!("{}/{}: exported function with signature {} incompatible with function import with signature {}", module_name, field, signature, import_signature) )); } - imports.functions.push(address); + function_imports.push(address); } - ExportValue::Table { .. } - | ExportValue::Memory { .. } - | ExportValue::Global { .. } => { + Export::Table { .. } | Export::Memory { .. } | Export::Global { .. } => { return Err(LinkError(format!( "{}/{}: export not compatible with function import", module_name, field @@ -60,41 +57,15 @@ pub fn link_module( } } - for (index, (ref module_name, ref field)) in module.imported_globals.iter() { - match resolver.resolve(module_name, field) { - Some(export_value) => match export_value { - ExportValue::Global { address, global } => { - let imported_global = module.globals[index]; - if !is_global_compatible(&global, &imported_global) { - return Err(LinkError(format!( - "{}/{}: exported global incompatible with global import", - module_name, field - ))); - } - imports.globals.push(address as *mut VMGlobal); - } - ExportValue::Table { .. } - | ExportValue::Memory { .. } - | ExportValue::Function { .. } => { - return Err(LinkError(format!( - "{}/{}: exported global incompatible with global import", - module_name, field - ))); - } - }, - None => { - return Err(LinkError(format!( - "no provided import global for {}/{}", - module_name, field - ))) - } - } - } - + let mut table_imports = PrimaryMap::with_capacity(module.imported_tables.len()); for (index, (ref module_name, ref field)) in module.imported_tables.iter() { match resolver.resolve(module_name, field) { Some(export_value) => match export_value { - ExportValue::Table { address, table } => { + Export::Table { + address, + vmctx, + table, + } => { let import_table = &module.table_plans[index]; if !is_table_compatible(&table, import_table) { return Err(LinkError(format!( @@ -102,11 +73,12 @@ pub fn link_module( module_name, field, ))); } - imports.tables.push(address as *mut VMTable); + table_imports.push(VMTableImport { + from: address, + vmctx, + }); } - ExportValue::Global { .. } - | ExportValue::Memory { .. } - | ExportValue::Function { .. } => { + Export::Global { .. } | Export::Memory { .. } | Export::Function { .. } => { return Err(LinkError(format!( "{}/{}: export not compatible with table import", module_name, field @@ -122,10 +94,15 @@ pub fn link_module( } } + let mut memory_imports = PrimaryMap::with_capacity(module.imported_memories.len()); for (index, (ref module_name, ref field)) in module.imported_memories.iter() { match resolver.resolve(module_name, field) { Some(export_value) => match export_value { - ExportValue::Memory { address, memory } => { + Export::Memory { + address, + vmctx, + memory, + } => { let import_memory = &module.memory_plans[index]; if is_memory_compatible(&memory, import_memory) { return Err(LinkError(format!( @@ -133,11 +110,12 @@ pub fn link_module( module_name, field ))); } - imports.memories.push(address as *mut VMMemory); + memory_imports.push(VMMemoryImport { + from: address, + vmctx, + }); } - ExportValue::Table { .. } - | ExportValue::Global { .. } - | ExportValue::Function { .. } => { + Export::Table { .. } | Export::Global { .. } | Export::Function { .. } => { return Err(LinkError(format!( "{}/{}: export not compatible with memory import", module_name, field @@ -153,6 +131,43 @@ pub fn link_module( } } + let mut global_imports = PrimaryMap::with_capacity(module.imported_globals.len()); + for (index, (ref module_name, ref field)) in module.imported_globals.iter() { + match resolver.resolve(module_name, field) { + Some(export_value) => match export_value { + Export::Global { address, global } => { + let imported_global = module.globals[index]; + if !is_global_compatible(&global, &imported_global) { + return Err(LinkError(format!( + "{}/{}: exported global incompatible with global import", + module_name, field + ))); + } + global_imports.push(VMGlobalImport { from: address }); + } + Export::Table { .. } | Export::Memory { .. } | Export::Function { .. } => { + return Err(LinkError(format!( + "{}/{}: exported global incompatible with global import", + module_name, field + ))); + } + }, + None => { + return Err(LinkError(format!( + "no provided import global for {}/{}", + module_name, field + ))) + } + } + } + + let imports = Imports::new( + function_imports, + table_imports, + memory_imports, + global_imports, + ); + // Apply relocations, now that we have virtual addresses for everything. relocate(&imports, allocated_functions, relocations, &module); @@ -277,22 +292,27 @@ fn is_memory_compatible(exported: &MemoryPlan, imported: &MemoryPlan) -> bool { /// Performs the relocations inside the function bytecode, provided the necessary metadata. fn relocate( imports: &Imports, - allocated_functions: &PrimaryMap, + allocated_functions: &PrimaryMap, relocations: PrimaryMap>, module: &Module, ) { for (i, function_relocs) in relocations.into_iter() { for r in function_relocs { + use self::libcalls::*; let target_func_address: usize = match r.reloc_target { RelocationTarget::UserFunc(index) => match module.defined_func_index(index) { - Some(f) => allocated_functions[f].0 as usize, + Some(f) => { + let fatptr: *const [VMFunctionBody] = allocated_functions[f]; + fatptr as *const VMFunctionBody as usize + } None => imports.functions[index] as usize, }, - RelocationTarget::MemoryGrow => wasmtime_memory_grow as usize, - RelocationTarget::MemorySize => wasmtime_memory_size as usize, + RelocationTarget::Memory32Grow => wasmtime_memory32_grow as usize, + RelocationTarget::Memory32Size => wasmtime_memory32_size as usize, + RelocationTarget::ImportedMemory32Grow => wasmtime_imported_memory32_grow as usize, + RelocationTarget::ImportedMemory32Size => wasmtime_imported_memory32_size as usize, RelocationTarget::LibCall(libcall) => { use cranelift_codegen::ir::LibCall::*; - use libcalls::*; match libcall { CeilF32 => wasmtime_f32_ceil as usize, FloorF32 => wasmtime_f32_floor as usize, @@ -308,7 +328,8 @@ fn relocate( } }; - let body = allocated_functions[i].0; + let fatptr: *const [VMFunctionBody] = allocated_functions[i]; + let body = fatptr as *const VMFunctionBody; match r.reloc { #[cfg(target_pointer_width = "64")] Reloc::Abs8 => unsafe { @@ -340,21 +361,3 @@ fn relocate( extern "C" { pub fn __rust_probestack(); } - -/// The implementation of memory.grow. -extern "C" fn wasmtime_memory_grow(size: u32, memory_index: u32, vmctx: *mut VMContext) -> u32 { - let instance = unsafe { (&mut *vmctx).instance() }; - let memory_index = MemoryIndex::new(memory_index as usize); - - instance - .memory_grow(memory_index, size) - .unwrap_or(u32::max_value()) -} - -/// The implementation of memory.size. -extern "C" fn wasmtime_memory_size(memory_index: u32, vmctx: *mut VMContext) -> u32 { - let instance = unsafe { (&mut *vmctx).instance() }; - let memory_index = MemoryIndex::new(memory_index as usize); - - instance.memory_size(memory_index) -} diff --git a/lib/execute/src/world.rs b/lib/execute/src/world.rs index e8f9c9c2c4..2a14858c1f 100644 --- a/lib/execute/src/world.rs +++ b/lib/execute/src/world.rs @@ -1,17 +1,30 @@ use action::{ActionError, ActionOutcome, RuntimeValue}; use code::Code; -use cranelift_codegen::isa; -use cranelift_entity::PrimaryMap; -use cranelift_wasm::{DefinedFuncIndex, GlobalIndex, MemoryIndex}; +use cranelift_codegen::ir::InstBuilder; +use cranelift_codegen::Context; +use cranelift_codegen::{binemit, ir, isa}; +use cranelift_entity::{BoxedSlice, EntityRef, PrimaryMap}; +use cranelift_frontend::{FunctionBuilder, FunctionBuilderContext}; +use cranelift_wasm::{ + DefinedFuncIndex, DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, FuncIndex, + GlobalIndex, MemoryIndex, TableIndex, +}; use export::Resolver; -use get::get; -use instance::Instance; -use invoke::{invoke, invoke_start_function}; use link::link_module; -use std::str; -use vmcontext::{VMFunctionBody, VMGlobal}; +use std::cmp::max; +use std::collections::HashMap; +use std::slice; +use std::string::String; +use std::vec::Vec; +use std::{mem, ptr}; use wasmtime_environ::{ - compile_module, Compilation, CompileError, Module, ModuleEnvironment, Tunables, + compile_module, Compilation, CompileError, Export, Module, ModuleEnvironment, RelocSink, + Tunables, +}; +use wasmtime_runtime::{ + wasmtime_call_trampoline, wasmtime_init_eager, wasmtime_init_finish, Instance, VMContext, + VMFunctionBody, VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition, VMMemoryImport, + VMTableDefinition, VMTableImport, }; /// A module, an instance of that module, and accompanying compilation artifacts. @@ -20,10 +33,19 @@ use wasmtime_environ::{ pub struct InstanceWorld { module: Module, instance: Instance, + + /// Pointers to functions in executable memory. + finished_functions: BoxedSlice, + + /// Trampolines for calling into JIT code. + trampolines: TrampolinePark, } impl InstanceWorld { /// Create a new `InstanceWorld` by compiling the wasm module in `data` and instatiating it. + /// + /// `finished_functions` holds the function bodies + /// which have been placed in executable memory and linked. pub fn new( code: &mut Code, isa: &isa::TargetIsa, @@ -33,57 +55,133 @@ impl InstanceWorld { let mut module = Module::new(); // TODO: Allow the tunables to be overridden. let tunables = Tunables::default(); - let instance = { - // TODO: Untie this. - let ((mut compilation, relocations), lazy_data_initializers) = { - let (lazy_function_body_inputs, lazy_data_initializers) = { - let environ = ModuleEnvironment::new(isa, &mut module, tunables); + let (lazy_function_body_inputs, lazy_data_initializers) = { + let environ = ModuleEnvironment::new(isa, &mut module, tunables); - let translation = environ - .translate(&data) - .map_err(|error| ActionError::Compile(CompileError::Wasm(error)))?; + let translation = environ + .translate(&data) + .map_err(|error| ActionError::Compile(CompileError::Wasm(error)))?; - ( - translation.lazy.function_body_inputs, - translation.lazy.data_initializers, - ) - }; - - ( - compile_module(&module, &lazy_function_body_inputs, isa) - .map_err(ActionError::Compile)?, - lazy_data_initializers, - ) - }; - - let allocated_functions = - allocate_functions(code, compilation).map_err(ActionError::Resource)?; - - let resolved = link_module(&module, &allocated_functions, relocations, resolver) - .map_err(ActionError::Link)?; - - let mut instance = Instance::new( - &module, - allocated_functions, - &lazy_data_initializers, - resolved, + ( + translation.lazy.function_body_inputs, + translation.lazy.data_initializers, ) - .map_err(ActionError::Resource)?; - - // The WebAssembly spec specifies that the start function is - // invoked automatically at instantiation time. - match invoke_start_function(code, isa, &module, &mut instance)? { - ActionOutcome::Returned { .. } => {} - ActionOutcome::Trapped { message } => { - // Instantiation fails if the start function traps. - return Err(ActionError::Start(message)); - } - } - - instance }; - Ok(Self { module, instance }) + let (compilation, relocations) = compile_module(&module, &lazy_function_body_inputs, isa) + .map_err(ActionError::Compile)?; + + let allocated_functions = + allocate_functions(code, compilation).map_err(ActionError::Resource)?; + + let imports = link_module(&module, &allocated_functions, relocations, resolver) + .map_err(ActionError::Link)?; + + let finished_functions: BoxedSlice = + allocated_functions + .into_iter() + .map(|(_index, allocated)| { + let fatptr: *const [VMFunctionBody] = *allocated; + fatptr as *const VMFunctionBody + }) + .collect::>() + .into_boxed_slice(); + + let instance = Instance::new( + &module, + &finished_functions, + imports, + &lazy_data_initializers, + ) + .map_err(ActionError::Resource)?; + + let fn_builder_ctx = FunctionBuilderContext::new(); + + let mut result = Self { + module, + instance, + finished_functions, + trampolines: TrampolinePark { + memo: HashMap::new(), + fn_builder_ctx, + }, + }; + + // The WebAssembly spec specifies that the start function is + // invoked automatically at instantiation time. + match result.invoke_start_function(code, isa)? { + ActionOutcome::Returned { .. } => {} + ActionOutcome::Trapped { message } => { + // Instantiation fails if the start function traps. + return Err(ActionError::Start(message)); + } + } + + Ok(result) + } + + fn get_imported_function(&self, index: FuncIndex) -> Option<*const VMFunctionBody> { + if index.index() < self.module.imported_funcs.len() { + Some(unsafe { self.instance.vmctx().imported_function(index) }) + } else { + None + } + } + + // TODO: Add an accessor for table elements. + #[allow(dead_code)] + fn get_imported_table(&self, index: TableIndex) -> Option<&VMTableImport> { + if index.index() < self.module.imported_tables.len() { + Some(unsafe { self.instance.vmctx().imported_table(index) }) + } else { + None + } + } + + fn get_imported_memory(&self, index: MemoryIndex) -> Option<&VMMemoryImport> { + if index.index() < self.module.imported_memories.len() { + Some(unsafe { self.instance.vmctx().imported_memory(index) }) + } else { + None + } + } + + fn get_imported_global(&self, index: GlobalIndex) -> Option<&VMGlobalImport> { + if index.index() < self.module.imported_globals.len() { + Some(unsafe { self.instance.vmctx().imported_global(index) }) + } else { + None + } + } + + fn get_finished_function(&self, index: DefinedFuncIndex) -> Option<*const VMFunctionBody> { + self.finished_functions.get(index).cloned() + } + + // TODO: Add an accessor for table elements. + #[allow(dead_code)] + fn get_defined_table(&self, index: DefinedTableIndex) -> Option<&VMTableDefinition> { + if self.module.table_index(index).index() < self.module.table_plans.len() { + Some(unsafe { self.instance.vmctx().table(index) }) + } else { + None + } + } + + fn get_defined_memory(&self, index: DefinedMemoryIndex) -> Option<&VMMemoryDefinition> { + if self.module.memory_index(index).index() < self.module.memory_plans.len() { + Some(unsafe { self.instance.vmctx().memory(index) }) + } else { + None + } + } + + fn get_defined_global(&self, index: DefinedGlobalIndex) -> Option<&VMGlobalDefinition> { + if self.module.global_index(index).index() < self.module.globals.len() { + Some(unsafe { self.instance.vmctx().global(index) }) + } else { + None + } } /// Invoke a function in this `InstanceWorld` by name. @@ -94,40 +192,362 @@ impl InstanceWorld { function_name: &str, args: &[RuntimeValue], ) -> Result { - invoke( - code, - isa, - &self.module, - &mut self.instance, - &function_name, - args, - ) + let fn_index = match self.module.exports.get(function_name) { + Some(Export::Function(index)) => *index, + Some(_) => { + return Err(ActionError::Kind(format!( + "exported item \"{}\" is not a function", + function_name + ))) + } + None => { + return Err(ActionError::Field(format!( + "no export named \"{}\"", + function_name + ))) + } + }; + + self.invoke_by_index(code, isa, fn_index, args) + } + + /// Invoke the WebAssembly start function of the instance, if one is present. + fn invoke_start_function( + &mut self, + code: &mut Code, + isa: &isa::TargetIsa, + ) -> Result { + if let Some(start_index) = self.module.start_func { + self.invoke_by_index(code, isa, start_index, &[]) + } else { + // No start function, just return nothing. + Ok(ActionOutcome::Returned { values: vec![] }) + } + } + + /// Calls the given indexed function, passing its return values and returning + /// its results. + fn invoke_by_index( + &mut self, + code: &mut Code, + isa: &isa::TargetIsa, + fn_index: FuncIndex, + args: &[RuntimeValue], + ) -> Result { + let callee_address = match self.module.defined_func_index(fn_index) { + Some(def_fn_index) => self + .get_finished_function(def_fn_index) + .ok_or_else(|| ActionError::Index(def_fn_index.index() as u64))?, + None => self + .get_imported_function(fn_index) + .ok_or_else(|| ActionError::Index(fn_index.index() as u64))?, + }; + + // Rather than writing inline assembly to jump to the code region, we use the fact that + // the Rust ABI for calling a function with no arguments and no return values matches the one + // of the generated code. Thanks to this, we can transmute the code region into a first-class + // Rust function and call it. + // Ensure that our signal handlers are ready for action. + wasmtime_init_eager(); + wasmtime_init_finish(self.instance.vmctx_mut()); + + let signature = &self.module.signatures[self.module.functions[fn_index]]; + let vmctx: *mut VMContext = self.instance.vmctx_mut(); + + for (index, value) in args.iter().enumerate() { + assert_eq!(value.value_type(), signature.params[index].value_type); + } + + // TODO: Support values larger than u64. + let mut values_vec: Vec = Vec::new(); + let value_size = mem::size_of::(); + values_vec.resize(max(signature.params.len(), signature.returns.len()), 0u64); + + // Store the argument values into `values_vec`. + for (index, arg) in args.iter().enumerate() { + unsafe { + let ptr = values_vec.as_mut_ptr().add(index); + + match arg { + RuntimeValue::I32(x) => ptr::write(ptr as *mut i32, *x), + RuntimeValue::I64(x) => ptr::write(ptr as *mut i64, *x), + RuntimeValue::F32(x) => ptr::write(ptr as *mut u32, *x), + RuntimeValue::F64(x) => ptr::write(ptr as *mut u64, *x), + } + } + } + + // Store the vmctx value into `values_vec`. + unsafe { + let ptr = values_vec.as_mut_ptr().add(args.len()); + ptr::write(ptr as *mut usize, vmctx as usize) + } + + // Get the trampoline to call for this function. + let exec_code_buf = + self.trampolines + .get(code, isa, callee_address, &signature, value_size)?; + + // Make all JIT code produced thus far executable. + code.publish(); + + // Call the trampoline. + if let Err(message) = unsafe { + wasmtime_call_trampoline( + exec_code_buf, + values_vec.as_mut_ptr() as *mut u8, + self.instance.vmctx_mut(), + ) + } { + return Ok(ActionOutcome::Trapped { message }); + } + + // Load the return values out of `values_vec`. + let values = signature + .returns + .iter() + .enumerate() + .map(|(index, abi_param)| unsafe { + let ptr = values_vec.as_ptr().add(index); + + match abi_param.value_type { + ir::types::I32 => RuntimeValue::I32(ptr::read(ptr as *const i32)), + ir::types::I64 => RuntimeValue::I64(ptr::read(ptr as *const i64)), + ir::types::F32 => RuntimeValue::F32(ptr::read(ptr as *const u32)), + ir::types::F64 => RuntimeValue::F64(ptr::read(ptr as *const u64)), + other => panic!("unsupported value type {:?}", other), + } + }) + .collect(); + + Ok(ActionOutcome::Returned { values }) } /// Read a global in this `InstanceWorld` by name. - pub fn get(&mut self, global_name: &str) -> Result { - get(&self.module, &mut self.instance, global_name) + pub fn get(&self, global_name: &str) -> Result { + let global_index = match self.module.exports.get(global_name) { + Some(Export::Global(index)) => *index, + Some(_) => { + return Err(ActionError::Kind(format!( + "exported item \"{}\" is not a global", + global_name + ))) + } + None => { + return Err(ActionError::Field(format!( + "no export named \"{}\"", + global_name + ))) + } + }; + + self.get_by_index(global_index) + } + + /// Reads the value of the indexed global variable in `module`. + pub fn get_by_index(&self, global_index: GlobalIndex) -> Result { + let global_address = match self.module.defined_global_index(global_index) { + Some(def_global_index) => self + .get_defined_global(def_global_index) + .ok_or_else(|| ActionError::Index(def_global_index.index() as u64))?, + None => { + let from: *const VMGlobalDefinition = self + .get_imported_global(global_index) + .ok_or_else(|| ActionError::Index(global_index.index() as u64))? + .from; + from + } + }; + let global_def = unsafe { &*global_address }; + + unsafe { + Ok( + match self + .module + .globals + .get(global_index) + .ok_or_else(|| ActionError::Index(global_index.index() as u64))? + .ty + { + ir::types::I32 => RuntimeValue::I32(*global_def.as_i32()), + ir::types::I64 => RuntimeValue::I64(*global_def.as_i64()), + ir::types::F32 => RuntimeValue::F32(*global_def.as_f32_bits()), + ir::types::F64 => RuntimeValue::F64(*global_def.as_f64_bits()), + other => { + return Err(ActionError::Type(format!( + "global with type {} not supported", + other + ))) + } + }, + ) + } } /// Returns a slice of the contents of allocated linear memory. - pub fn inspect_memory(&self, memory_index: MemoryIndex, address: usize, len: usize) -> &[u8] { - self.instance.inspect_memory(memory_index, address, len) - } + pub fn inspect_memory( + &self, + memory_index: MemoryIndex, + address: usize, + len: usize, + ) -> Result<&[u8], ActionError> { + let memory_address = match self.module.defined_memory_index(memory_index) { + Some(def_memory_index) => self + .get_defined_memory(def_memory_index) + .ok_or_else(|| ActionError::Index(def_memory_index.index() as u64))?, + None => { + let from: *const VMMemoryDefinition = self + .get_imported_memory(memory_index) + .ok_or_else(|| ActionError::Index(memory_index.index() as u64))? + .from; + from + } + }; + let memory_def = unsafe { &*memory_address }; - /// Shows the value of a global variable. - pub fn inspect_global(&self, global_index: GlobalIndex) -> &VMGlobal { - self.instance.inspect_global(global_index) + Ok(unsafe { + &slice::from_raw_parts(memory_def.base, memory_def.current_length) + [address..address + len] + }) } } fn allocate_functions( code: &mut Code, compilation: Compilation, -) -> Result, String> { +) -> Result, String> { let mut result = PrimaryMap::with_capacity(compilation.functions.len()); for (_, body) in compilation.functions.into_iter() { - let slice = code.allocate_copy_of_byte_slice(body)?; - result.push((slice.as_mut_ptr(), slice.len())); + let fatptr: *mut [VMFunctionBody] = code.allocate_copy_of_byte_slice(body)?; + result.push(fatptr); } Ok(result) } + +struct TrampolinePark { + /// Memorized per-function trampolines. + memo: HashMap<*const VMFunctionBody, *const VMFunctionBody>, + + /// The `FunctionBuilderContext`, shared between function compilations. + fn_builder_ctx: FunctionBuilderContext, +} + +impl TrampolinePark { + fn get( + &mut self, + code: &mut Code, + isa: &isa::TargetIsa, + callee_address: *const VMFunctionBody, + signature: &ir::Signature, + value_size: usize, + ) -> Result<*const VMFunctionBody, ActionError> { + use std::collections::hash_map::Entry::{Occupied, Vacant}; + Ok(match self.memo.entry(callee_address) { + Occupied(entry) => *entry.get(), + Vacant(entry) => { + let body = make_trampoline( + &mut self.fn_builder_ctx, + code, + isa, + callee_address, + signature, + value_size, + )?; + entry.insert(body); + body + } + }) + } +} + +fn make_trampoline( + fn_builder_ctx: &mut FunctionBuilderContext, + code: &mut Code, + isa: &isa::TargetIsa, + callee_address: *const VMFunctionBody, + signature: &ir::Signature, + value_size: usize, +) -> Result<*const VMFunctionBody, ActionError> { + let pointer_type = isa.pointer_type(); + let mut wrapper_sig = ir::Signature::new(isa.frontend_config().default_call_conv); + + // Add the `values_vec` parameter. + wrapper_sig.params.push(ir::AbiParam::new(pointer_type)); + // Add the `vmctx` parameter. + wrapper_sig.params.push(ir::AbiParam::special( + pointer_type, + ir::ArgumentPurpose::VMContext, + )); + + let mut context = Context::new(); + context.func = ir::Function::with_name_signature(ir::ExternalName::user(0, 0), wrapper_sig); + + { + let mut builder = FunctionBuilder::new(&mut context.func, fn_builder_ctx); + let block0 = builder.create_ebb(); + + builder.append_ebb_params_for_function_params(block0); + builder.switch_to_block(block0); + builder.seal_block(block0); + + let mut callee_args = Vec::new(); + let pointer_type = isa.pointer_type(); + + let (values_vec_ptr_val, vmctx_ptr_val) = { + let params = builder.func.dfg.ebb_params(block0); + (params[0], params[1]) + }; + + // Load the argument values out of `values_vec`. + let mflags = ir::MemFlags::trusted(); + for (i, r) in signature.params.iter().enumerate() { + let value = match r.purpose { + ir::ArgumentPurpose::Normal => builder.ins().load( + r.value_type, + mflags, + values_vec_ptr_val, + (i * value_size) as i32, + ), + ir::ArgumentPurpose::VMContext => vmctx_ptr_val, + other => panic!("unsupported argument purpose {}", other), + }; + callee_args.push(value); + } + + let new_sig = builder.import_signature(signature.clone()); + + // TODO: It's possible to make this a direct call. We just need Cranelift + // to support functions declared with an immediate integer address. + // ExternalName::Absolute(u64). Let's do it. + let callee_value = builder.ins().iconst(pointer_type, callee_address as i64); + let call = builder + .ins() + .call_indirect(new_sig, callee_value, &callee_args); + + let results = builder.func.dfg.inst_results(call).to_vec(); + + // Store the return values into `values_vec`. + let mflags = ir::MemFlags::trusted(); + for (i, r) in results.iter().enumerate() { + builder + .ins() + .store(mflags, *r, values_vec_ptr_val, (i * value_size) as i32); + } + + builder.ins().return_(&[]); + builder.finalize() + } + + let mut code_buf: Vec = Vec::new(); + let mut reloc_sink = RelocSink::new(); + let mut trap_sink = binemit::NullTrapSink {}; + context + .compile_and_emit(isa, &mut code_buf, &mut reloc_sink, &mut trap_sink) + .map_err(|error| ActionError::Compile(CompileError::Codegen(error)))?; + assert!(reloc_sink.func_relocs.is_empty()); + + Ok(code + .allocate_copy_of_byte_slice(&code_buf) + .map_err(ActionError::Resource)? + .as_ptr()) +} diff --git a/lib/runtime/Cargo.toml b/lib/runtime/Cargo.toml new file mode 100644 index 0000000000..6c329b3991 --- /dev/null +++ b/lib/runtime/Cargo.toml @@ -0,0 +1,38 @@ +[package] +name = "wasmtime-runtime" +version = "0.0.0" +authors = ["The Cranelift Project Developers"] +publish = false +description = "Runtime library support for Wasmtime" +categories = ["wasm"] +repository = "https://github.com/CraneStation/wasmtime" +license = "Apache-2.0 WITH LLVM-exception" +readme = "README.md" + +[dependencies] +cranelift-codegen = { git = "https://github.com/sunfishcode/cranelift.git", branch = "guard-offset" } +cranelift-entity = { git = "https://github.com/sunfishcode/cranelift.git", branch = "guard-offset" } +cranelift-wasm = { git = "https://github.com/sunfishcode/cranelift.git", branch = "guard-offset" } +wasmtime-environ = { path = "../environ" } +region = "1.0.0" +lazy_static = "1.2.0" +libc = { version = "0.2.44", default-features = false } +errno = "0.2.4" +memoffset = "0.2.1" +cast = { version = "0.2.2", default-features = false } +failure = { version = "0.1.3", default-features = false } +failure_derive = { version = "0.1.3", default-features = false } + +[build-dependencies] +cmake = "0.1.35" +bindgen = "0.44.0" +regex = "1.0.6" + +[features] +default = ["std"] +std = ["cranelift-codegen/std", "cranelift-wasm/std"] +core = ["cranelift-codegen/core", "cranelift-wasm/core", "wasmtime-environ/core"] + +[badges] +maintenance = { status = "experimental" } +travis-ci = { repository = "CraneStation/wasmtime" } diff --git a/lib/runtime/LICENSE b/lib/runtime/LICENSE new file mode 100644 index 0000000000..f9d81955f4 --- /dev/null +++ b/lib/runtime/LICENSE @@ -0,0 +1,220 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +--- LLVM Exceptions to the Apache 2.0 License ---- + +As an exception, if, as a result of your compiling your source code, portions +of this Software are embedded into an Object form of such source code, you +may redistribute such embedded portions in such Object form without complying +with the conditions of Sections 4(a), 4(b) and 4(d) of the License. + +In addition, if you combine or link compiled forms of this Software with +software that is licensed under the GPLv2 ("Combined Software") and if a +court of competent jurisdiction determines that the patent provision (Section +3), the indemnity provision (Section 9) or other Section of the License +conflicts with the conditions of the GPLv2, you may retroactively and +prospectively choose to deem waived or otherwise exclude such Section(s) of +the License, but only in their entirety and only with respect to the Combined +Software. + diff --git a/lib/runtime/README.md b/lib/runtime/README.md new file mode 100644 index 0000000000..560835bb0e --- /dev/null +++ b/lib/runtime/README.md @@ -0,0 +1,7 @@ +This is the `wasmtime-runtime` crate, which contains wasm runtime library +support, supporting the wasm ABI used by [`wasmtime-environ`], +[`wasmtime-execute`], and [`wasmtime-obj`]. + +[`wasmtime-environ`]: https://crates.io/crates/wasmtime-environ +[`wasmtime-execute`]: https://crates.io/crates/wasmtime-execute +[`wasmtime-obj`]: https://crates.io/crates/wasmtime-obj diff --git a/lib/execute/build.rs b/lib/runtime/build.rs similarity index 100% rename from lib/execute/build.rs rename to lib/runtime/build.rs diff --git a/lib/execute/signalhandlers/CMakeLists.txt b/lib/runtime/signalhandlers/CMakeLists.txt similarity index 100% rename from lib/execute/signalhandlers/CMakeLists.txt rename to lib/runtime/signalhandlers/CMakeLists.txt diff --git a/lib/execute/signalhandlers/SignalHandlers.cpp b/lib/runtime/signalhandlers/SignalHandlers.cpp similarity index 99% rename from lib/execute/signalhandlers/SignalHandlers.cpp rename to lib/runtime/signalhandlers/SignalHandlers.cpp index eb3608d5f6..ac06d74051 100644 --- a/lib/execute/signalhandlers/SignalHandlers.cpp +++ b/lib/runtime/signalhandlers/SignalHandlers.cpp @@ -702,7 +702,7 @@ EnsureEagerSignalHandlers() } #elif defined(USE_APPLE_MACH_PORTS) - // All the Mach setup in EnsureLazyProcessSignalHandlers. + // All the Mach setup in EnsureDarwinMachPorts. #else // SA_ONSTACK allows us to handle signals on an alternate stack, so that // the handler can run in response to running out of stack space on the diff --git a/lib/execute/signalhandlers/SignalHandlers.h b/lib/runtime/signalhandlers/SignalHandlers.h similarity index 100% rename from lib/execute/signalhandlers/SignalHandlers.h rename to lib/runtime/signalhandlers/SignalHandlers.h diff --git a/lib/runtime/src/imports.rs b/lib/runtime/src/imports.rs new file mode 100644 index 0000000000..363edac7a8 --- /dev/null +++ b/lib/runtime/src/imports.rs @@ -0,0 +1,46 @@ +use cranelift_entity::{BoxedSlice, PrimaryMap}; +use cranelift_wasm::{FuncIndex, GlobalIndex, MemoryIndex, TableIndex}; +use vmcontext::{VMFunctionBody, VMGlobalImport, VMMemoryImport, VMTableImport}; + +/// Resolved import pointers. +#[derive(Debug)] +pub struct Imports { + /// Resolved addresses for imported functions. + pub functions: BoxedSlice, + + /// Resolved addresses for imported tables. + pub tables: BoxedSlice, + + /// Resolved addresses for imported memories. + pub memories: BoxedSlice, + + /// Resolved addresses for imported globals. + pub globals: BoxedSlice, +} + +impl Imports { + /// Construct a new `Imports` instance. + pub fn new( + function_imports: PrimaryMap, + table_imports: PrimaryMap, + memory_imports: PrimaryMap, + global_imports: PrimaryMap, + ) -> Self { + Self { + functions: function_imports.into_boxed_slice(), + tables: table_imports.into_boxed_slice(), + memories: memory_imports.into_boxed_slice(), + globals: global_imports.into_boxed_slice(), + } + } + + /// Construct a new `Imports` instance with no imports. + pub fn none() -> Self { + Self { + functions: PrimaryMap::new().into_boxed_slice(), + tables: PrimaryMap::new().into_boxed_slice(), + memories: PrimaryMap::new().into_boxed_slice(), + globals: PrimaryMap::new().into_boxed_slice(), + } + } +} diff --git a/lib/runtime/src/instance.rs b/lib/runtime/src/instance.rs new file mode 100644 index 0000000000..046e62681b --- /dev/null +++ b/lib/runtime/src/instance.rs @@ -0,0 +1,256 @@ +//! An `Instance` contains all the runtime state used by execution of a wasm +//! module. + +use cranelift_entity::EntityRef; +use cranelift_entity::{BoxedSlice, PrimaryMap}; +use cranelift_wasm::{ + DefinedFuncIndex, DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, FuncIndex, +}; +use imports::Imports; +use memory::LinearMemory; +use sig_registry::SignatureRegistry; +use std::string::String; +use table::Table; +use vmcontext::{ + VMCallerCheckedAnyfunc, VMContext, VMFunctionBody, VMGlobalDefinition, VMMemoryDefinition, + VMTableDefinition, +}; +use wasmtime_environ::{DataInitializer, Module}; + +/// An Instance of a WebAssemby module. +#[derive(Debug)] +pub struct Instance { + /// WebAssembly linear memory data. + memories: BoxedSlice, + + /// WebAssembly table data. + tables: BoxedSlice, + + /// Function Signature IDs. + /// FIXME: This should be shared across instances rather than per-Instance. + sig_registry: SignatureRegistry, + + /// Resolved imports. + vmctx_imports: Imports, + + /// Table storage base address vector pointed to by vmctx. + vmctx_tables: BoxedSlice, + + /// Memory base address vector pointed to by vmctx. + vmctx_memories: BoxedSlice, + + /// WebAssembly global variable data. + vmctx_globals: BoxedSlice, + + /// Context pointer used by JIT code. + vmctx: VMContext, +} + +impl Instance { + /// Create a new `Instance`. + pub fn new( + module: &Module, + finished_functions: &BoxedSlice, + mut vmctx_imports: Imports, + data_initializers: &[DataInitializer], + ) -> Result { + let mut sig_registry = instantiate_signatures(module); + let mut memories = instantiate_memories(module, data_initializers)?; + let mut tables = instantiate_tables( + module, + finished_functions, + &vmctx_imports.functions, + &mut sig_registry, + ); + + let mut vmctx_memories = memories + .values_mut() + .map(LinearMemory::vmmemory) + .collect::>() + .into_boxed_slice(); + + let mut vmctx_globals = instantiate_globals(module); + + let mut vmctx_tables = tables + .values_mut() + .map(Table::vmtable) + .collect::>() + .into_boxed_slice(); + + let vmctx_imported_functions_ptr = vmctx_imports + .functions + .values_mut() + .into_slice() + .as_mut_ptr(); + let vmctx_imported_tables_ptr = vmctx_imports.tables.values_mut().into_slice().as_mut_ptr(); + let vmctx_imported_memories_ptr = vmctx_imports + .memories + .values_mut() + .into_slice() + .as_mut_ptr(); + let vmctx_imported_globals_ptr = + vmctx_imports.globals.values_mut().into_slice().as_mut_ptr(); + let vmctx_memories_ptr = vmctx_memories.values_mut().into_slice().as_mut_ptr(); + let vmctx_globals_ptr = vmctx_globals.values_mut().into_slice().as_mut_ptr(); + let vmctx_tables_ptr = vmctx_tables.values_mut().into_slice().as_mut_ptr(); + let vmctx_shared_signatures_ptr = sig_registry.vmshared_signatures(); + + Ok(Self { + memories, + tables, + sig_registry, + vmctx_imports, + vmctx_memories, + vmctx_globals, + vmctx_tables, + vmctx: VMContext::new( + vmctx_imported_functions_ptr, + vmctx_imported_tables_ptr, + vmctx_imported_memories_ptr, + vmctx_imported_globals_ptr, + vmctx_tables_ptr, + vmctx_memories_ptr, + vmctx_globals_ptr, + vmctx_shared_signatures_ptr, + ), + }) + } + + /// Return a reference to the vmctx used by JIT code. + pub fn vmctx(&self) -> &VMContext { + &self.vmctx + } + + /// Return a mutable reference to the vmctx used by JIT code. + pub fn vmctx_mut(&mut self) -> &mut VMContext { + &mut self.vmctx + } + + /// Return the offset from the vmctx pointer to its containing Instance. + pub(crate) fn vmctx_offset() -> isize { + offset_of!(Self, vmctx) as isize + } + + /// Grow memory by the specified amount of pages. + /// + /// Returns `None` if memory can't be grown by the specified amount + /// of pages. + pub fn memory_grow(&mut self, memory_index: DefinedMemoryIndex, delta: u32) -> Option { + let result = self + .memories + .get_mut(memory_index) + .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index())) + .grow(delta); + + // Keep current the VMContext pointers used by JIT code. + self.vmctx_memories[memory_index] = self.memories[memory_index].vmmemory(); + + result + } + + /// Returns the number of allocated wasm pages. + pub fn memory_size(&mut self, memory_index: DefinedMemoryIndex) -> u32 { + self.memories + .get(memory_index) + .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index())) + .size() + } + + /// Test whether any of the objects inside this instance require signal + /// handlers to catch out of bounds accesses. + pub(crate) fn needs_signal_handlers(&self) -> bool { + self.memories + .values() + .any(|memory| memory.needs_signal_handlers) + } + + /// Return the number of imported memories. + pub(crate) fn num_imported_memories(&self) -> usize { + self.vmctx_imports.functions.len() + } +} + +fn instantiate_signatures(module: &Module) -> SignatureRegistry { + let mut sig_registry = SignatureRegistry::new(); + for (sig_index, sig) in module.signatures.iter() { + sig_registry.register(sig_index, sig); + } + sig_registry +} + +/// Allocate memory for just the tables of the current module. +fn instantiate_tables( + module: &Module, + finished_functions: &BoxedSlice, + imported_functions: &BoxedSlice, + sig_registry: &mut SignatureRegistry, +) -> BoxedSlice { + let num_imports = module.imported_memories.len(); + let mut tables: PrimaryMap = + PrimaryMap::with_capacity(module.table_plans.len() - num_imports); + for table in &module.table_plans.values().as_slice()[num_imports..] { + tables.push(Table::new(table)); + } + + for init in &module.table_elements { + debug_assert!(init.base.is_none(), "globalvar base not supported yet"); + let defined_table_index = module + .defined_table_index(init.table_index) + .expect("Initializers for imported tables not supported yet"); + let slice = tables[defined_table_index].as_mut(); + let subslice = &mut slice[init.offset..init.offset + init.elements.len()]; + for (i, func_idx) in init.elements.iter().enumerate() { + let callee_sig = module.functions[*func_idx]; + let func_ptr = if let Some(index) = module.defined_func_index(*func_idx) { + finished_functions[index] + } else { + imported_functions[*func_idx] + }; + let type_index = sig_registry.lookup(callee_sig); + subslice[i] = VMCallerCheckedAnyfunc { + func_ptr, + type_index, + }; + } + } + + tables.into_boxed_slice() +} + +/// Allocate memory for just the memories of the current module. +fn instantiate_memories( + module: &Module, + data_initializers: &[DataInitializer], +) -> Result, String> { + let num_imports = module.imported_memories.len(); + let mut memories: PrimaryMap = + PrimaryMap::with_capacity(module.memory_plans.len() - num_imports); + for plan in &module.memory_plans.values().as_slice()[num_imports..] { + memories.push(LinearMemory::new(&plan)?); + } + + for init in data_initializers { + debug_assert!(init.base.is_none(), "globalvar base not supported yet"); + let defined_memory_index = module + .defined_memory_index(init.memory_index) + .expect("Initializers for imported memories not supported yet"); + let mem_mut = memories[defined_memory_index].as_mut(); + let to_init = &mut mem_mut[init.offset..init.offset + init.data.len()]; + to_init.copy_from_slice(init.data); + } + + Ok(memories.into_boxed_slice()) +} + +/// Allocate memory for just the globals of the current module, +/// without any initializers applied yet. +fn instantiate_globals(module: &Module) -> BoxedSlice { + let num_imports = module.imported_globals.len(); + let mut vmctx_globals = PrimaryMap::with_capacity(module.globals.len() - num_imports); + + for global in &module.globals.values().as_slice()[num_imports..] { + vmctx_globals.push(VMGlobalDefinition::new(global)); + } + + vmctx_globals.into_boxed_slice() +} diff --git a/lib/runtime/src/lib.rs b/lib/runtime/src/lib.rs new file mode 100644 index 0000000000..efe2746465 --- /dev/null +++ b/lib/runtime/src/lib.rs @@ -0,0 +1,70 @@ +//! Runtime library support for Wasmtime. + +#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)] +#![warn(unused_import_braces)] +#![cfg_attr(feature = "std", deny(unstable_features))] +#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))] +#![cfg_attr( + feature = "cargo-clippy", + allow(clippy::new_without_default, clippy::new_without_default_derive) +)] +#![cfg_attr( + feature = "cargo-clippy", + warn( + clippy::float_arithmetic, + clippy::mut_mut, + clippy::nonminimal_bool, + clippy::option_map_unwrap_or, + clippy::option_map_unwrap_or_else, + clippy::print_stdout, + clippy::unicode_not_nfc, + clippy::use_self + ) +)] +#![cfg_attr(not(feature = "std"), no_std)] +#![cfg_attr(not(feature = "std"), feature(alloc))] + +extern crate cranelift_codegen; +extern crate cranelift_entity; +extern crate cranelift_wasm; +extern crate errno; +extern crate region; +extern crate wasmtime_environ; +#[cfg(not(feature = "std"))] +#[macro_use] +extern crate alloc; +#[macro_use] +extern crate lazy_static; +extern crate libc; +#[macro_use] +extern crate memoffset; +extern crate cast; + +mod imports; +mod instance; +mod memory; +mod mmap; +mod sig_registry; +mod signalhandlers; +mod table; +mod traphandlers; +mod vmcontext; + +pub mod libcalls; + +pub use imports::Imports; +pub use instance::Instance; +pub use mmap::Mmap; +pub use signalhandlers::{wasmtime_init_eager, wasmtime_init_finish}; +pub use traphandlers::wasmtime_call_trampoline; +pub use vmcontext::{ + VMContext, VMFunctionBody, VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition, + VMMemoryImport, VMTableDefinition, VMTableImport, +}; + +#[cfg(not(feature = "std"))] +mod std { + pub use alloc::{string, vec}; + pub use core::*; + pub use core::{i32, str, u32}; +} diff --git a/lib/runtime/src/libcalls.rs b/lib/runtime/src/libcalls.rs new file mode 100644 index 0000000000..789cbd0c73 --- /dev/null +++ b/lib/runtime/src/libcalls.rs @@ -0,0 +1,157 @@ +//! Runtime library calls. Note that the JIT may sometimes perform these inline +//! rather than calling them, particularly when CPUs have special instructions +//! which compute them directly. + +use cranelift_wasm::{DefinedMemoryIndex, MemoryIndex}; +use vmcontext::VMContext; + +/// Implementation of f32.ceil +pub extern "C" fn wasmtime_f32_ceil(x: f32) -> f32 { + x.ceil() +} + +/// Implementation of f32.floor +pub extern "C" fn wasmtime_f32_floor(x: f32) -> f32 { + x.floor() +} + +/// Implementation of f32.trunc +pub extern "C" fn wasmtime_f32_trunc(x: f32) -> f32 { + x.trunc() +} + +/// Implementation of f32.nearest +#[allow(clippy::float_arithmetic, clippy::float_cmp)] +pub extern "C" fn wasmtime_f32_nearest(x: f32) -> f32 { + // Rust doesn't have a nearest function, so do it manually. + if x == 0.0 { + // Preserve the sign of zero. + x + } else { + // Nearest is either ceil or floor depending on which is nearest or even. + let u = x.ceil(); + let d = x.floor(); + let um = (x - u).abs(); + let dm = (x - d).abs(); + if um < dm + || (um == dm && { + let h = u / 2.; + h.floor() == h + }) + { + u + } else { + d + } + } +} + +/// Implementation of f64.ceil +pub extern "C" fn wasmtime_f64_ceil(x: f64) -> f64 { + x.ceil() +} + +/// Implementation of f64.floor +pub extern "C" fn wasmtime_f64_floor(x: f64) -> f64 { + x.floor() +} + +/// Implementation of f64.trunc +pub extern "C" fn wasmtime_f64_trunc(x: f64) -> f64 { + x.trunc() +} + +/// Implementation of f64.nearest +#[allow(clippy::float_arithmetic, clippy::float_cmp)] +pub extern "C" fn wasmtime_f64_nearest(x: f64) -> f64 { + // Rust doesn't have a nearest function, so do it manually. + if x == 0.0 { + // Preserve the sign of zero. + x + } else { + // Nearest is either ceil or floor depending on which is nearest or even. + let u = x.ceil(); + let d = x.floor(); + let um = (x - u).abs(); + let dm = (x - d).abs(); + if um < dm + || (um == dm && { + let h = u / 2.; + h.floor() == h + }) + { + u + } else { + d + } + } +} + +/// Implementation of memory.grow for locally-defined 32-bit memories. +#[no_mangle] +pub unsafe extern "C" fn wasmtime_memory32_grow( + delta: u32, + memory_index: u32, + vmctx: *mut VMContext, +) -> u32 { + let instance = (&mut *vmctx).instance(); + let memory_index = DefinedMemoryIndex::from_u32(memory_index); + + instance + .memory_grow(memory_index, delta) + .unwrap_or(u32::max_value()) +} + +/// Implementation of memory.grow for imported 32-bit memories. +#[no_mangle] +pub unsafe extern "C" fn wasmtime_imported_memory32_grow( + delta: u32, + memory_index: u32, + vmctx: *mut VMContext, +) -> u32 { + let instance = (&mut *vmctx).instance(); + assert!( + (memory_index as usize) < instance.num_imported_memories(), + "imported memory index for memory.grow out of bounds" + ); + + let memory_index = MemoryIndex::from_u32(memory_index); + let import = instance.vmctx_mut().imported_memory_mut(memory_index); + let foreign_instance = (&mut *import.vmctx).instance(); + let foreign_memory = &mut *import.from; + let foreign_index = foreign_instance.vmctx().memory_index(foreign_memory); + + foreign_instance + .memory_grow(foreign_index, delta) + .unwrap_or(u32::max_value()) +} + +/// Implementation of memory.size for locally-defined 32-bit memories. +#[no_mangle] +pub unsafe extern "C" fn wasmtime_memory32_size(memory_index: u32, vmctx: *mut VMContext) -> u32 { + let instance = (&mut *vmctx).instance(); + let memory_index = DefinedMemoryIndex::from_u32(memory_index); + + instance.memory_size(memory_index) +} + +/// Implementation of memory.size for imported 32-bit memories. +#[no_mangle] +pub unsafe extern "C" fn wasmtime_imported_memory32_size( + memory_index: u32, + vmctx: *mut VMContext, +) -> u32 { + let instance = (&mut *vmctx).instance(); + assert!( + (memory_index as usize) < instance.num_imported_memories(), + "imported memory index for memory.grow out of bounds" + ); + + let memory_index = MemoryIndex::from_u32(memory_index); + let import = instance.vmctx_mut().imported_memory_mut(memory_index); + let foreign_instance = (&mut *import.vmctx).instance(); + let foreign_memory = &mut *import.from; + let foreign_index = foreign_instance.vmctx().memory_index(foreign_memory); + + foreign_instance.memory_size(foreign_index) +} diff --git a/lib/execute/src/memory.rs b/lib/runtime/src/memory.rs similarity index 77% rename from lib/execute/src/memory.rs rename to lib/runtime/src/memory.rs index 3c71cdfc1d..e84e649b9b 100644 --- a/lib/execute/src/memory.rs +++ b/lib/runtime/src/memory.rs @@ -5,20 +5,32 @@ use mmap::Mmap; use region; use std::string::String; -use vmcontext::VMMemory; +use vmcontext::VMMemoryDefinition; use wasmtime_environ::{MemoryPlan, MemoryStyle, WASM_MAX_PAGES, WASM_PAGE_SIZE}; /// A linear memory instance. #[derive(Debug)] pub struct LinearMemory { + // The underlying allocation. mmap: Mmap, + + // The current logical size in wasm pages of this linear memory. current: u32, + + // The optional maximum size in wasm pages of this linear memory. maximum: Option, + + // Size in bytes of extra guard pages after the end to optimize loads and stores with + // constant offsets. offset_guard_size: usize, + + // Records whether we're using a bounds-checking strategy which requires + // handlers to catch trapping accesses. + pub(crate) needs_signal_handlers: bool, } impl LinearMemory { - /// Create a new linear memory instance with specified minimum and maximum number of pages. + /// Create a new linear memory instance with specified minimum and maximum number of wasm pages. pub fn new(plan: &MemoryPlan) -> Result { // `maximum` cannot be set to more than `65536` pages. assert!(plan.memory.minimum <= WASM_MAX_PAGES); @@ -26,6 +38,15 @@ impl LinearMemory { let offset_guard_bytes = plan.offset_guard_size as usize; + // If we have an offset guard, or if we're doing the static memory + // allocation strategy, we need signal handlers to catch out of bounds + // acceses. + let needs_signal_handlers = offset_guard_bytes > 0 + || match plan.style { + MemoryStyle::Dynamic => false, + MemoryStyle::Static { .. } => true, + }; + let minimum_pages = match plan.style { MemoryStyle::Dynamic => plan.memory.minimum, MemoryStyle::Static { bound } => { @@ -58,6 +79,7 @@ impl LinearMemory { current: plan.memory.minimum, maximum: plan.memory.maximum, offset_guard_size: offset_guard_bytes, + needs_signal_handlers, }) } @@ -66,10 +88,10 @@ impl LinearMemory { self.current } - /// Grow memory by the specified amount of pages. + /// Grow memory by the specified amount of wasm pages. /// /// Returns `None` if memory can't be grown by the specified amount - /// of pages. + /// of wasm pages. pub fn grow(&mut self, delta: u32) -> Option { let new_pages = match self.current.checked_add(delta) { Some(new_pages) => new_pages, @@ -124,9 +146,12 @@ impl LinearMemory { Some(prev_pages) } - /// Return a `VMMemory` for exposing the memory to JIT code. - pub fn vmmemory(&mut self) -> VMMemory { - VMMemory::definition(self.mmap.as_mut_ptr(), self.mmap.len()) + /// Return a `VMMemoryDefinition` for exposing the memory to JIT code. + pub fn vmmemory(&mut self) -> VMMemoryDefinition { + VMMemoryDefinition { + base: self.mmap.as_mut_ptr(), + current_length: self.mmap.len(), + } } } diff --git a/lib/execute/src/mmap.rs b/lib/runtime/src/mmap.rs similarity index 88% rename from lib/execute/src/mmap.rs rename to lib/runtime/src/mmap.rs index 29fb3ca75e..15bda5b5fe 100644 --- a/lib/execute/src/mmap.rs +++ b/lib/runtime/src/mmap.rs @@ -22,6 +22,7 @@ pub struct Mmap { } impl Mmap { + /// Construct a new empty instance of `Mmap`. pub fn new() -> Self { Self { ptr: ptr::null_mut(), @@ -55,6 +56,8 @@ impl Mmap { } } + /// Create a new `Mmap` pointing to at least `size` bytes of memory, + /// suitably sized and aligned for memory protection. #[cfg(target_os = "windows")] pub fn with_size(size: usize) -> Result { use winapi::um::memoryapi::VirtualAlloc; @@ -81,22 +84,27 @@ impl Mmap { } } + /// Return the allocated memory as a slice of u8. pub fn as_slice(&self) -> &[u8] { unsafe { slice::from_raw_parts(self.ptr, self.len) } } + /// Return the allocated memory as a mutable slice of u8. pub fn as_mut_slice(&mut self) -> &mut [u8] { unsafe { slice::from_raw_parts_mut(self.ptr, self.len) } } + /// Return the allocated memory as a pointer to u8. pub fn as_ptr(&self) -> *const u8 { self.ptr } + /// Return the allocated memory as a mutable pointer to u8. pub fn as_mut_ptr(&mut self) -> *mut u8 { self.ptr } + /// Return the lengthof the allocated memory. pub fn len(&self) -> usize { self.len } diff --git a/lib/execute/src/sig_registry.rs b/lib/runtime/src/sig_registry.rs similarity index 59% rename from lib/execute/src/sig_registry.rs rename to lib/runtime/src/sig_registry.rs index 18f624e8f5..ba1febda30 100644 --- a/lib/execute/src/sig_registry.rs +++ b/lib/runtime/src/sig_registry.rs @@ -6,47 +6,50 @@ use cranelift_codegen::ir; use cranelift_entity::PrimaryMap; use cranelift_wasm::SignatureIndex; use std::collections::{hash_map, HashMap}; -use vmcontext::VMSignatureId; +use vmcontext::VMSharedSignatureIndex; #[derive(Debug)] pub struct SignatureRegistry { - signature_hash: HashMap, - signature_ids: PrimaryMap, + signature_hash: HashMap, + shared_signatures: PrimaryMap, } impl SignatureRegistry { pub fn new() -> Self { Self { signature_hash: HashMap::new(), - signature_ids: PrimaryMap::new(), + shared_signatures: PrimaryMap::new(), } } - pub fn vmsignature_ids(&mut self) -> *mut VMSignatureId { - self.signature_ids.values_mut().into_slice().as_mut_ptr() + pub fn vmshared_signatures(&mut self) -> *mut VMSharedSignatureIndex { + self.shared_signatures + .values_mut() + .into_slice() + .as_mut_ptr() } /// Register the given signature. pub fn register(&mut self, sig_index: SignatureIndex, sig: &ir::Signature) { // TODO: Refactor this interface so that we're not passing in redundant // information. - debug_assert_eq!(sig_index.index(), self.signature_ids.len()); + debug_assert_eq!(sig_index.index(), self.shared_signatures.len()); use cranelift_entity::EntityRef; let len = self.signature_hash.len(); let sig_id = match self.signature_hash.entry(sig.clone()) { hash_map::Entry::Occupied(entry) => *entry.get(), hash_map::Entry::Vacant(entry) => { - let sig_id = cast::u32(len).unwrap(); + let sig_id = VMSharedSignatureIndex::new(cast::u32(len).unwrap()); entry.insert(sig_id); sig_id } }; - self.signature_ids.push(sig_id); + self.shared_signatures.push(sig_id); } /// Return the identifying runtime index for the given signature. - pub fn lookup(&mut self, sig_index: SignatureIndex) -> VMSignatureId { - self.signature_ids[sig_index] + pub fn lookup(&mut self, sig_index: SignatureIndex) -> VMSharedSignatureIndex { + self.shared_signatures[sig_index] } } diff --git a/lib/execute/src/signalhandlers.rs b/lib/runtime/src/signalhandlers.rs similarity index 62% rename from lib/execute/src/signalhandlers.rs rename to lib/runtime/src/signalhandlers.rs index 9ce155c8ff..cfbdb7714c 100644 --- a/lib/execute/src/signalhandlers.rs +++ b/lib/runtime/src/signalhandlers.rs @@ -5,7 +5,9 @@ #![allow(non_snake_case)] use std::borrow::{Borrow, BorrowMut}; +use std::cell::RefCell; use std::sync::RwLock; +use vmcontext::VMContext; include!(concat!(env!("OUT_DIR"), "/signalhandlers.rs")); @@ -36,7 +38,8 @@ lazy_static! { /// called at the end of the startup process, after other handlers have been /// installed. This function can thus be called multiple times, having no effect /// after the first call. -pub fn ensure_eager_signal_handlers() { +#[no_mangle] +pub extern "C" fn wasmtime_init_eager() { let mut locked = EAGER_INSTALL_STATE.write().unwrap(); let state = locked.borrow_mut(); @@ -54,6 +57,49 @@ pub fn ensure_eager_signal_handlers() { state.success = true; } +thread_local! { + static TRAP_CONTEXT: RefCell = RefCell::new(TrapContext { triedToInstallSignalHandlers: false, haveSignalHandlers: false }); +} + +/// Assuming `EnsureEagerProcessSignalHandlers` has already been called, +/// this function performs the full installation of signal handlers which must +/// be performed per-thread. This operation may incur some overhead and +/// so should be done only when needed to use wasm. +#[no_mangle] +pub extern "C" fn wasmtime_init_finish(vmctx: &mut VMContext) { + if !TRAP_CONTEXT.with(|cx| cx.borrow().triedToInstallSignalHandlers) { + TRAP_CONTEXT.with(|cx| { + cx.borrow_mut().triedToInstallSignalHandlers = true; + assert!(!cx.borrow().haveSignalHandlers); + }); + + { + let locked = EAGER_INSTALL_STATE.read().unwrap(); + let state = locked.borrow(); + assert!( + state.tried, + "call wasmtime_init_eager before calling wasmtime_init_finish" + ); + if !state.success { + return; + } + } + + #[cfg(any(target_os = "macos", target_os = "ios"))] + ensure_darwin_mach_ports(); + + TRAP_CONTEXT.with(|cx| { + cx.borrow_mut().haveSignalHandlers = true; + }) + } + + let instance = unsafe { vmctx.instance() }; + let have_signal_handlers = TRAP_CONTEXT.with(|cx| cx.borrow().haveSignalHandlers); + if !have_signal_handlers && instance.needs_signal_handlers() { + panic!("failed to install signal handlers"); + } +} + #[cfg(any(target_os = "macos", target_os = "ios"))] fn ensure_darwin_mach_ports() { let mut locked = LAZY_INSTALL_STATE.write().unwrap(); @@ -72,30 +118,3 @@ fn ensure_darwin_mach_ports() { state.success = true; } - -/// Assuming `EnsureEagerProcessSignalHandlers` has already been called, -/// this function performs the full installation of signal handlers which must -/// be performed per-thread. This operation may incur some overhead and -/// so should be done only when needed to use wasm. -pub fn ensure_full_signal_handlers(cx: &mut TrapContext) { - if cx.triedToInstallSignalHandlers { - return; - } - - cx.triedToInstallSignalHandlers = true; - assert!(!cx.haveSignalHandlers); - - { - let locked = EAGER_INSTALL_STATE.read().unwrap(); - let state = locked.borrow(); - assert!(state.tried); - if !state.success { - return; - } - } - - #[cfg(any(target_os = "macos", target_os = "ios"))] - ensure_darwin_mach_ports(); - - cx.haveSignalHandlers = true; -} diff --git a/lib/execute/src/table.rs b/lib/runtime/src/table.rs similarity index 81% rename from lib/execute/src/table.rs rename to lib/runtime/src/table.rs index 1338b3b9c6..e25aa035f2 100644 --- a/lib/execute/src/table.rs +++ b/lib/runtime/src/table.rs @@ -3,7 +3,7 @@ //! `Table` is to WebAssembly tables what `LinearMemory` is to WebAssembly linear memories. use cranelift_wasm::TableElementType; -use vmcontext::{VMCallerCheckedAnyfunc, VMTable}; +use vmcontext::{VMCallerCheckedAnyfunc, VMTableDefinition}; use wasmtime_environ::{TablePlan, TableStyle}; /// A table instance. @@ -39,9 +39,12 @@ impl Table { } } - /// Return a `VMTable` for exposing the table to JIT code. - pub fn vmtable(&mut self) -> VMTable { - VMTable::definition(self.vec.as_mut_ptr() as *mut u8, self.vec.len()) + /// Return a `VMTableDefinition` for exposing the table to JIT code. + pub fn vmtable(&mut self) -> VMTableDefinition { + VMTableDefinition { + base: self.vec.as_mut_ptr() as *mut u8, + current_elements: self.vec.len(), + } } } diff --git a/lib/execute/src/traphandlers.rs b/lib/runtime/src/traphandlers.rs similarity index 82% rename from lib/execute/src/traphandlers.rs rename to lib/runtime/src/traphandlers.rs index 405ea6b780..d357fd09c6 100644 --- a/lib/execute/src/traphandlers.rs +++ b/lib/runtime/src/traphandlers.rs @@ -7,6 +7,7 @@ use std::cell::{Cell, RefCell}; use std::mem; use std::ptr; use std::string::String; +use vmcontext::{VMContext, VMFunctionBody}; // Currently we uset setjmp/longjmp to unwind out of a signal handler // and back to the point where WebAssembly was called (via `call_wasm`). @@ -81,22 +82,28 @@ impl Drop for ScopeGuard { } } -/// Call the wasm function poined to by `f`. -pub fn call_wasm(f: F) -> Result<(), String> -where - F: FnOnce(), -{ +/// Call the wasm function pointed to by `callee`. `values_vec` points to +/// a buffer which holds the incoming arguments, and to which the outgoing +/// return values will be written. +#[no_mangle] +pub unsafe extern "C" fn wasmtime_call_trampoline( + callee: *const VMFunctionBody, + values_vec: *mut u8, + vmctx: *mut VMContext, +) -> Result<(), String> { // In case wasm code calls Rust that panics and unwinds past this point, // ensure that JMP_BUFS is unwound to its incoming state. let _guard = ScopeGuard::new(); + let func: fn(*mut u8, *mut VMContext) = mem::transmute(callee); + JMP_BUFS.with(|bufs| { - let mut buf = unsafe { mem::uninitialized() }; - if unsafe { setjmp(&mut buf) } != 0 { + let mut buf = mem::uninitialized(); + if setjmp(&mut buf) != 0 { return TRAP_DATA.with(|data| Err(format!("wasm trap at {:?}", data.get().pc))); } bufs.borrow_mut().push(buf); - f(); + func(values_vec, vmctx); Ok(()) }) } diff --git a/lib/execute/src/vmcontext.rs b/lib/runtime/src/vmcontext.rs similarity index 55% rename from lib/execute/src/vmcontext.rs rename to lib/runtime/src/vmcontext.rs index b8c08b5dd9..62fb3cf990 100644 --- a/lib/execute/src/vmcontext.rs +++ b/lib/runtime/src/vmcontext.rs @@ -2,10 +2,12 @@ //! fields that JIT code accesses directly. use cranelift_entity::EntityRef; -use cranelift_wasm::{Global, GlobalIndex, GlobalInit, MemoryIndex, TableIndex}; +use cranelift_wasm::{ + DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, FuncIndex, Global, GlobalIndex, + GlobalInit, MemoryIndex, TableIndex, +}; use instance::Instance; -use std::fmt; -use std::ptr; +use std::{mem, ptr, u32}; /// A placeholder byte-sized type which is just used to provide some amount of type /// safety when dealing with pointers to JIT-compiled function bodies. Note that it's @@ -25,6 +27,107 @@ mod test_vmfunction_body { } } +/// The fields a JIT needs to access to utilize a WebAssembly table +/// imported from another instance. +#[derive(Debug, Copy, Clone)] +#[repr(C)] +pub struct VMTableImport { + /// A pointer to the imported table description. + pub from: *mut VMTableDefinition, + + /// A pointer to the VMContext that owns the table description. + pub vmctx: *mut VMContext, +} + +#[cfg(test)] +mod test_vmtable_import { + use super::VMTableImport; + use std::mem::size_of; + use wasmtime_environ::VMOffsets; + + #[test] + fn check_vmtable_import_offsets() { + let offsets = VMOffsets::new(size_of::<*mut u8>() as u8); + assert_eq!( + size_of::(), + usize::from(offsets.size_of_vmtable_import()) + ); + assert_eq!( + offset_of!(VMTableImport, from), + usize::from(offsets.vmtable_import_from()) + ); + assert_eq!( + offset_of!(VMTableImport, vmctx), + usize::from(offsets.vmtable_import_vmctx()) + ); + } +} + +/// The fields a JIT needs to access to utilize a WebAssembly linear +/// memory imported from another instance. +#[derive(Debug, Copy, Clone)] +#[repr(C)] +pub struct VMMemoryImport { + /// A pointer to the imported memory description. + pub from: *mut VMMemoryDefinition, + + /// A pointer to the VMContext that owns the memory description. + pub vmctx: *mut VMContext, +} + +#[cfg(test)] +mod test_vmmemory_import { + use super::VMMemoryImport; + use std::mem::size_of; + use wasmtime_environ::VMOffsets; + + #[test] + fn check_vmmemory_import_offsets() { + let offsets = VMOffsets::new(size_of::<*mut u8>() as u8); + assert_eq!( + size_of::(), + usize::from(offsets.size_of_vmmemory_import()) + ); + assert_eq!( + offset_of!(VMMemoryImport, from), + usize::from(offsets.vmmemory_import_from()) + ); + assert_eq!( + offset_of!(VMMemoryImport, vmctx), + usize::from(offsets.vmmemory_import_vmctx()) + ); + } +} + +/// The fields a JIT needs to access to utilize a WebAssembly global +/// variable imported from another instance. +#[derive(Debug, Copy, Clone)] +#[repr(C)] +pub struct VMGlobalImport { + /// A pointer to the imported global variable description. + pub from: *mut VMGlobalDefinition, +} + +#[cfg(test)] +mod test_vmglobal_import { + use super::VMGlobalImport; + use std::mem::size_of; + use wasmtime_environ::VMOffsets; + + #[test] + fn check_vmglobal_import_offsets() { + let offsets = VMOffsets::new(size_of::<*mut u8>() as u8); + assert_eq!( + size_of::(), + usize::from(offsets.size_of_vmglobal_import()) + ); + assert_eq!( + offset_of!(VMGlobalImport, from), + usize::from(offsets.vmglobal_import_from()) + ); + } +} + /// The fields a JIT needs to access to utilize a WebAssembly linear /// memory defined within the instance, namely the start address and the /// size in bytes. @@ -32,9 +135,10 @@ mod test_vmfunction_body { #[repr(C)] pub struct VMMemoryDefinition { /// The start address. - base: *mut u8, - /// The current size of linear memory in bytes. - current_length: usize, + pub base: *mut u8, + + /// The current logical size of this linear memory in bytes. + pub current_length: usize, } #[cfg(test)] @@ -58,104 +162,51 @@ mod test_vmmemory_definition { offset_of!(VMMemoryDefinition, current_length), usize::from(offsets.vmmemory_definition_current_length()) ); + /* TODO: Assert that the size of `current_length` matches. + assert_eq!( + size_of::(), + usize::from(offsets.size_of_vmmemory_definition_current_length()) + ); + */ } } -/// The fields a JIT needs to access to utilize a WebAssembly linear -/// memory imported from another instance. +/// The fields a JIT needs to access to utilize a WebAssembly table +/// defined within the instance. #[derive(Debug, Copy, Clone)] #[repr(C)] -pub struct VMMemoryImport { - /// A pointer to the imported memory description. - from: *mut VMMemoryDefinition, +pub struct VMTableDefinition { + /// Pointer to the table data. + pub base: *mut u8, + + /// The current number of elements in the table. + pub current_elements: usize, } #[cfg(test)] -mod test_vmmemory_import { - use super::VMMemoryImport; +mod test_vmtable_definition { + use super::VMTableDefinition; use std::mem::size_of; use wasmtime_environ::VMOffsets; #[test] - fn check_vmmemory_import_offsets() { + fn check_vmtable_definition_offsets() { let offsets = VMOffsets::new(size_of::<*mut u8>() as u8); assert_eq!( - size_of::(), - usize::from(offsets.size_of_vmmemory_import()) + size_of::(), + usize::from(offsets.size_of_vmtable_definition()) ); assert_eq!( - offset_of!(VMMemoryImport, from), - usize::from(offsets.vmmemory_import_from()) + offset_of!(VMTableDefinition, base), + usize::from(offsets.vmtable_definition_base()) ); - } -} - -/// The main fields a JIT needs to access to utilize a WebAssembly linear -/// memory. It must know whether the memory is defined within the instance -/// or imported. -#[repr(C)] -pub union VMMemory { - /// A linear memory defined within the instance. - definition: VMMemoryDefinition, - - /// An imported linear memory. - import: VMMemoryImport, -} - -#[cfg(test)] -mod test_vmmemory { - use super::VMMemory; - use std::mem::size_of; - use wasmtime_environ::VMOffsets; - - #[test] - fn check_vmmemory_offsets() { - let offsets = VMOffsets::new(size_of::<*mut u8>() as u8); assert_eq!( - size_of::(), - usize::from(offsets.size_of_vmmemory()) + offset_of!(VMTableDefinition, current_elements), + usize::from(offsets.vmtable_definition_current_elements()) ); } } -impl VMMemory { - /// Construct a `VMMemoryDefinition` variant of `VMMemory`. - pub fn definition(base: *mut u8, current_length: usize) -> Self { - Self { - definition: VMMemoryDefinition { - base, - current_length, - }, - } - } - - /// Construct a `VMMemoryImmport` variant of `VMMemory`. - pub fn import(from: *mut VMMemoryDefinition) -> Self { - Self { - import: VMMemoryImport { from }, - } - } - - /// Get the underlying `VMMemoryDefinition`. - pub unsafe fn get_definition(&mut self, is_import: bool) -> &mut VMMemoryDefinition { - if is_import { - &mut *self.import.from - } else { - &mut self.definition - } - } -} - -impl fmt::Debug for VMMemory { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "VMMemory {{")?; - write!(f, " definition: {:?},", unsafe { self.definition })?; - write!(f, " import: {:?},", unsafe { self.import })?; - write!(f, "}}")?; - Ok(()) - } -} - /// The storage for a WebAssembly global defined within the instance. /// /// TODO: Pack the globals more densely, rather than using the same size @@ -192,277 +243,118 @@ mod test_vmglobal_definition { } impl VMGlobalDefinition { + /// Construct a `VMGlobalDefinition`. + pub fn new(global: &Global) -> Self { + let mut result = Self { storage: [0; 8] }; + match global.initializer { + GlobalInit::I32Const(x) => *unsafe { result.as_i32_mut() } = x, + GlobalInit::I64Const(x) => *unsafe { result.as_i64_mut() } = x, + GlobalInit::F32Const(x) => *unsafe { result.as_f32_bits_mut() } = x, + GlobalInit::F64Const(x) => *unsafe { result.as_f64_bits_mut() } = x, + GlobalInit::GetGlobal(_x) => unimplemented!("globals init with get_global"), + GlobalInit::Import => panic!("attempting to initialize imported global"), + } + result + } + + /// Return a reference to the value as an i32. #[allow(clippy::cast_ptr_alignment)] - pub unsafe fn as_i32(&mut self) -> &mut i32 { + pub unsafe fn as_i32(&self) -> &i32 { + &*(self.storage.as_ref().as_ptr() as *const u8 as *const i32) + } + + /// Return a mutable reference to the value as an i32. + #[allow(clippy::cast_ptr_alignment)] + pub unsafe fn as_i32_mut(&mut self) -> &mut i32 { &mut *(self.storage.as_mut().as_mut_ptr() as *mut u8 as *mut i32) } + /// Return a reference to the value as an i64. #[allow(clippy::cast_ptr_alignment)] - pub unsafe fn as_i64(&mut self) -> &mut i64 { + pub unsafe fn as_i64(&self) -> &i64 { + &*(self.storage.as_ref().as_ptr() as *const u8 as *const i64) + } + + /// Return a mutable reference to the value as an i64. + #[allow(clippy::cast_ptr_alignment)] + pub unsafe fn as_i64_mut(&mut self) -> &mut i64 { &mut *(self.storage.as_mut().as_mut_ptr() as *mut u8 as *mut i64) } + /// Return a reference to the value as an f32. #[allow(clippy::cast_ptr_alignment)] - pub unsafe fn as_f32(&mut self) -> &mut f32 { + pub unsafe fn as_f32(&self) -> &f32 { + &*(self.storage.as_ref().as_ptr() as *const u8 as *const f32) + } + + /// Return a mutable reference to the value as an f32. + #[allow(clippy::cast_ptr_alignment)] + pub unsafe fn as_f32_mut(&mut self) -> &mut f32 { &mut *(self.storage.as_mut().as_mut_ptr() as *mut u8 as *mut f32) } + /// Return a reference to the value as f32 bits. #[allow(clippy::cast_ptr_alignment)] - pub unsafe fn as_f32_bits(&mut self) -> &mut u32 { + pub unsafe fn as_f32_bits(&self) -> &u32 { + &*(self.storage.as_ref().as_ptr() as *const u8 as *const u32) + } + + /// Return a mutable reference to the value as f32 bits. + #[allow(clippy::cast_ptr_alignment)] + pub unsafe fn as_f32_bits_mut(&mut self) -> &mut u32 { &mut *(self.storage.as_mut().as_mut_ptr() as *mut u8 as *mut u32) } + /// Return a reference to the value as an f64. #[allow(clippy::cast_ptr_alignment)] - pub unsafe fn as_f64(&mut self) -> &mut f64 { + pub unsafe fn as_f64(&self) -> &f64 { + &*(self.storage.as_ref().as_ptr() as *const u8 as *const f64) + } + + /// Return a mutable reference to the value as an f64. + #[allow(clippy::cast_ptr_alignment)] + pub unsafe fn as_f64_mut(&mut self) -> &mut f64 { &mut *(self.storage.as_mut().as_mut_ptr() as *mut u8 as *mut f64) } + /// Return a reference to the value as f64 bits. #[allow(clippy::cast_ptr_alignment)] - pub unsafe fn as_f64_bits(&mut self) -> &mut u64 { + pub unsafe fn as_f64_bits(&self) -> &u64 { + &*(self.storage.as_ref().as_ptr() as *const u8 as *const u64) + } + + /// Return a mutable reference to the value as f64 bits. + #[allow(clippy::cast_ptr_alignment)] + pub unsafe fn as_f64_bits_mut(&mut self) -> &mut u64 { &mut *(self.storage.as_mut().as_mut_ptr() as *mut u8 as *mut u64) } } -/// The fields a JIT needs to access to utilize a WebAssembly global -/// variable imported from another instance. -#[derive(Debug, Copy, Clone)] +/// An index into the shared signature registry, usable for checking signatures +/// at indirect calls. #[repr(C)] -pub struct VMGlobalImport { - /// A pointer to the imported global variable description. - from: *mut VMGlobalDefinition, -} +#[derive(Debug, Eq, PartialEq, Clone, Copy)] +pub struct VMSharedSignatureIndex(u32); #[cfg(test)] -mod test_vmglobal_import { - use super::VMGlobalImport; +mod test_vmshared_signature_index { + use super::VMSharedSignatureIndex; use std::mem::size_of; use wasmtime_environ::VMOffsets; #[test] - fn check_vmglobal_import_offsets() { + fn check_vmshared_signature_index() { let offsets = VMOffsets::new(size_of::<*mut u8>() as u8); assert_eq!( - size_of::(), - usize::from(offsets.size_of_vmglobal_import()) - ); - assert_eq!( - offset_of!(VMGlobalImport, from), - usize::from(offsets.vmglobal_import_from()) + size_of::(), + usize::from(offsets.size_of_vmshared_signature_index()) ); } } -/// The main fields a JIT needs to access to utilize a WebAssembly global -/// variable. It must know whether the global variable is defined within the -/// instance or imported. -#[repr(C)] -pub union VMGlobal { - /// A global variable defined within the instance. - definition: VMGlobalDefinition, - - /// An imported global variable. - import: VMGlobalImport, -} - -#[cfg(test)] -mod test_vmglobal { - use super::VMGlobal; - use std::mem::size_of; - use wasmtime_environ::VMOffsets; - - #[test] - fn check_vmglobal_offsets() { - let offsets = VMOffsets::new(size_of::<*mut u8>() as u8); - assert_eq!( - size_of::(), - usize::from(offsets.size_of_vmglobal()) - ); - } -} - -impl VMGlobal { - /// Construct a `VMGlobalDefinition` variant of `VMGlobal`. - pub fn definition(global: &Global) -> Self { - let mut result = VMGlobalDefinition { storage: [0; 8] }; - match global.initializer { - GlobalInit::I32Const(x) => *unsafe { result.as_i32() } = x, - GlobalInit::I64Const(x) => *unsafe { result.as_i64() } = x, - GlobalInit::F32Const(x) => *unsafe { result.as_f32_bits() } = x, - GlobalInit::F64Const(x) => *unsafe { result.as_f64_bits() } = x, - GlobalInit::GetGlobal(_x) => unimplemented!("globals init with get_global"), - GlobalInit::Import => panic!("attempting to initialize imported global"), - } - Self { definition: result } - } - - /// Construct a `VMGlobalImmport` variant of `VMGlobal`. - pub fn import(from: *mut VMGlobalDefinition) -> Self { - Self { - import: VMGlobalImport { from }, - } - } - - /// Get the underlying `VMGlobalDefinition`. - pub unsafe fn get_definition(&mut self, is_import: bool) -> &mut VMGlobalDefinition { - if is_import { - &mut *self.import.from - } else { - &mut self.definition - } - } -} - -impl fmt::Debug for VMGlobal { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "VMGlobal {{")?; - write!(f, " definition: {:?},", unsafe { self.definition })?; - write!(f, " import: {:?},", unsafe { self.import })?; - write!(f, "}}")?; - Ok(()) - } -} - -/// The fields a JIT needs to access to utilize a WebAssembly table -/// defined within the instance. -#[derive(Debug, Copy, Clone)] -#[repr(C)] -pub struct VMTableDefinition { - base: *mut u8, - current_elements: usize, -} - -#[cfg(test)] -mod test_vmtable_definition { - use super::VMTableDefinition; - use std::mem::size_of; - use wasmtime_environ::VMOffsets; - - #[test] - fn check_vmtable_definition_offsets() { - let offsets = VMOffsets::new(size_of::<*mut u8>() as u8); - assert_eq!( - size_of::(), - usize::from(offsets.size_of_vmtable_definition()) - ); - assert_eq!( - offset_of!(VMTableDefinition, base), - usize::from(offsets.vmtable_definition_base()) - ); - assert_eq!( - offset_of!(VMTableDefinition, current_elements), - usize::from(offsets.vmtable_definition_current_elements()) - ); - } -} - -/// The fields a JIT needs to access to utilize a WebAssembly table -/// imported from another instance. -#[derive(Debug, Copy, Clone)] -#[repr(C)] -pub struct VMTableImport { - /// A pointer to the imported table description. - from: *mut VMTableDefinition, -} - -#[cfg(test)] -mod test_vmtable_import { - use super::VMTableImport; - use std::mem::size_of; - use wasmtime_environ::VMOffsets; - - #[test] - fn check_vmtable_import_offsets() { - let offsets = VMOffsets::new(size_of::<*mut u8>() as u8); - assert_eq!( - size_of::(), - usize::from(offsets.size_of_vmtable_import()) - ); - assert_eq!( - offset_of!(VMTableImport, from), - usize::from(offsets.vmtable_import_from()) - ); - } -} - -/// The main fields a JIT needs to access to utilize a WebAssembly table. -/// It must know whether the table is defined within the instance -/// or imported. -#[repr(C)] -pub union VMTable { - /// A table defined within the instance. - definition: VMTableDefinition, - - /// An imported table. - import: VMTableImport, -} - -#[cfg(test)] -mod test_vmtable { - use super::VMTable; - use std::mem::size_of; - use wasmtime_environ::VMOffsets; - - #[test] - fn check_vmtable_offsets() { - let offsets = VMOffsets::new(size_of::<*mut u8>() as u8); - assert_eq!(size_of::(), usize::from(offsets.size_of_vmtable())); - } -} - -impl VMTable { - /// Construct a `VMTableDefinition` variant of `VMTable`. - pub fn definition(base: *mut u8, current_elements: usize) -> Self { - Self { - definition: VMTableDefinition { - base, - current_elements, - }, - } - } - - /// Construct a `VMTableImmport` variant of `VMTable`. - pub fn import(from: *mut VMTableDefinition) -> Self { - Self { - import: VMTableImport { from }, - } - } - - /// Get the underlying `VMTableDefinition`. - pub unsafe fn get_definition(&mut self, is_import: bool) -> &mut VMTableDefinition { - if is_import { - &mut *self.import.from - } else { - &mut self.definition - } - } -} - -impl fmt::Debug for VMTable { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "VMTable {{")?; - write!(f, " definition: {:?},", unsafe { self.definition })?; - write!(f, " import: {:?},", unsafe { self.import })?; - write!(f, "}}")?; - Ok(()) - } -} - -/// The type of the `type_id` field in `VMCallerCheckedAnyfunc`. -pub type VMSignatureId = u32; - -#[cfg(test)] -mod test_vmsignature_id { - use super::VMSignatureId; - use std::mem::size_of; - use wasmtime_environ::VMOffsets; - - #[test] - fn check_vmcaller_checked_anyfunc_offsets() { - let offsets = VMOffsets::new(size_of::<*mut u8>() as u8); - assert_eq!( - size_of::(), - usize::from(offsets.size_of_vmsignature_id()) - ); +impl VMSharedSignatureIndex { + pub fn new(value: u32) -> Self { + VMSharedSignatureIndex(value) } } @@ -473,7 +365,7 @@ mod test_vmsignature_id { #[repr(C)] pub struct VMCallerCheckedAnyfunc { pub func_ptr: *const VMFunctionBody, - pub type_id: VMSignatureId, + pub type_index: VMSharedSignatureIndex, // If more elements are added here, remember to add offset_of tests below! } @@ -495,8 +387,8 @@ mod test_vmcaller_checked_anyfunc { usize::from(offsets.vmcaller_checked_anyfunc_func_ptr()) ); assert_eq!( - offset_of!(VMCallerCheckedAnyfunc, type_id), - usize::from(offsets.vmcaller_checked_anyfunc_type_id()) + offset_of!(VMCallerCheckedAnyfunc, type_index), + usize::from(offsets.vmcaller_checked_anyfunc_type_index()) ); } } @@ -505,7 +397,7 @@ impl Default for VMCallerCheckedAnyfunc { fn default() -> Self { Self { func_ptr: ptr::null_mut(), - type_id: 0, + type_index: VMSharedSignatureIndex::new(u32::MAX), } } } @@ -520,16 +412,32 @@ impl Default for VMCallerCheckedAnyfunc { #[derive(Debug)] #[repr(C)] pub struct VMContext { - /// A pointer to an array of `VMMemory` instances, indexed by - /// WebAssembly memory index. - memories: *mut VMMemory, - /// A pointer to an array of globals. - globals: *mut VMGlobal, - /// A pointer to an array of `VMTable` instances, indexed by - /// WebAssembly table index. - tables: *mut VMTable, + /// A pointer to an array of `*const VMFunctionBody` instances, indexed by `FuncIndex`. + imported_functions: *const *const VMFunctionBody, + + /// A pointer to an array of `VMTableImport` instances, indexed by `TableIndex`. + imported_tables: *mut VMTableImport, + + /// A pointer to an array of `VMMemoryImport` instances, indexed by `MemoryIndex`. + imported_memories: *mut VMMemoryImport, + + /// A pointer to an array of `VMGlobalImport` instances, indexed by `GlobalIndex`. + imported_globals: *mut VMGlobalImport, + + /// A pointer to an array of locally-defined `VMTableDefinition` instances, + /// indexed by `DefinedTableIndex`. + tables: *mut VMTableDefinition, + + /// A pointer to an array of locally-defined `VMMemoryDefinition` instances, + /// indexed by `DefinedMemoryIndex`. + memories: *mut VMMemoryDefinition, + + /// A pointer to an array of locally-defined `VMGlobalDefinition` instances, + /// indexed by `DefinedGlobalIndex`. + globals: *mut VMGlobalDefinition, + /// Signature identifiers for signature-checking indirect calls. - signature_ids: *mut u32, + signature_ids: *mut VMSharedSignatureIndex, // If more elements are added here, remember to add offset_of tests below! } @@ -565,32 +473,90 @@ mod test { impl VMContext { /// Create a new `VMContext` instance. pub fn new( - memories: *mut VMMemory, - globals: *mut VMGlobal, - tables: *mut VMTable, - signature_ids: *mut u32, + imported_functions: *const *const VMFunctionBody, + imported_tables: *mut VMTableImport, + imported_memories: *mut VMMemoryImport, + imported_globals: *mut VMGlobalImport, + tables: *mut VMTableDefinition, + memories: *mut VMMemoryDefinition, + globals: *mut VMGlobalDefinition, + signature_ids: *mut VMSharedSignatureIndex, ) -> Self { Self { + imported_functions, + imported_tables, + imported_memories, + imported_globals, + tables, memories, globals, - tables, signature_ids, } } - /// Return the base pointer of the globals array. - pub unsafe fn global(&mut self, index: GlobalIndex) -> &mut VMGlobal { - &mut *self.globals.add(index.index()) + /// Return a reference to imported function `index`. + pub unsafe fn imported_function(&self, index: FuncIndex) -> *const VMFunctionBody { + *self.imported_functions.add(index.index()) } - /// Return a mutable reference to linear memory `index`. - pub unsafe fn memory(&mut self, index: MemoryIndex) -> &mut VMMemory { + /// Return a reference to imported table `index`. + pub unsafe fn imported_table(&self, index: TableIndex) -> &VMTableImport { + &*self.imported_tables.add(index.index()) + } + + /// Return a mutable reference to imported table `index`. + pub unsafe fn imported_table_mut(&mut self, index: TableIndex) -> &mut VMTableImport { + &mut *self.imported_tables.add(index.index()) + } + + /// Return a reference to imported memory `index`. + pub unsafe fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport { + &*self.imported_memories.add(index.index()) + } + + /// Return a mutable reference to imported memory `index`. + pub unsafe fn imported_memory_mut(&mut self, index: MemoryIndex) -> &mut VMMemoryImport { + &mut *self.imported_memories.add(index.index()) + } + + /// Return a reference to imported global `index`. + pub unsafe fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport { + &*self.imported_globals.add(index.index()) + } + + /// Return a mutable reference to imported global `index`. + pub unsafe fn imported_global_mut(&mut self, index: GlobalIndex) -> &mut VMGlobalImport { + &mut *self.imported_globals.add(index.index()) + } + + /// Return a reference to locally-defined table `index`. + pub unsafe fn table(&self, index: DefinedTableIndex) -> &VMTableDefinition { + &*self.tables.add(index.index()) + } + + /// Return a mutable reference to locally-defined table `index`. + pub unsafe fn table_mut(&mut self, index: DefinedTableIndex) -> &mut VMTableDefinition { + &mut *self.tables.add(index.index()) + } + + /// Return a reference to locally-defined linear memory `index`. + pub unsafe fn memory(&self, index: DefinedMemoryIndex) -> &VMMemoryDefinition { + &*self.memories.add(index.index()) + } + + /// Return a mutable reference to locally-defined linear memory `index`. + pub unsafe fn memory_mut(&mut self, index: DefinedMemoryIndex) -> &mut VMMemoryDefinition { &mut *self.memories.add(index.index()) } - /// Return a mutable reference to table `index`. - pub unsafe fn table(&mut self, index: TableIndex) -> &mut VMTable { - &mut *self.tables.add(index.index()) + /// Return a reference to locally-defined global variable `index`. + pub unsafe fn global(&self, index: DefinedGlobalIndex) -> &VMGlobalDefinition { + &*self.globals.add(index.index()) + } + + /// Return a mutable reference to locally-defined global variable `index`. + pub unsafe fn global_mut(&mut self, index: DefinedGlobalIndex) -> &mut VMGlobalDefinition { + &mut *self.globals.add(index.index()) } /// Return a mutable reference to the associated `Instance`. @@ -598,4 +564,14 @@ impl VMContext { pub unsafe fn instance(&mut self) -> &mut Instance { &mut *((self as *mut Self as *mut u8).offset(-Instance::vmctx_offset()) as *mut Instance) } + + /// Return the memory index for the given `VMMemoryDefinition`. + pub fn memory_index(&self, memory: &mut VMMemoryDefinition) -> DefinedMemoryIndex { + // TODO: Use `offset_from` once it stablizes. + let begin = self.memories; + let end: *mut VMMemoryDefinition = memory; + DefinedMemoryIndex::new( + (end as usize - begin as usize) / mem::size_of::(), + ) + } } diff --git a/lib/wast/Cargo.toml b/lib/wast/Cargo.toml index 7447375c3f..376331af6b 100644 --- a/lib/wast/Cargo.toml +++ b/lib/wast/Cargo.toml @@ -15,11 +15,12 @@ cranelift-native = { git = "https://github.com/sunfishcode/cranelift.git", branc cranelift-wasm = { git = "https://github.com/sunfishcode/cranelift.git", branch = "guard-offset" } cranelift-entity = { git = "https://github.com/sunfishcode/cranelift.git", branch = "guard-offset" } wasmtime-execute = { path = "../execute" } +wasmtime-runtime = { path = "../runtime" } wasmtime-environ = { path = "../environ" } wabt = "0.7" target-lexicon = "0.2.0" -failure = "0.1.3" -failure_derive = "0.1.3" +failure = { version = "0.1.3", default-features = false } +failure_derive = { version = "0.1.3", default-features = false } [badges] maintenance = { status = "experimental" } diff --git a/lib/wast/build.rs b/lib/wast/build.rs index 09577e0509..6c4c442c04 100644 --- a/lib/wast/build.rs +++ b/lib/wast/build.rs @@ -66,7 +66,7 @@ fn test_directory(out: &mut File, testsuite: &str) -> io::Result<()> { " fn {}() {{", avoid_keywords(&stemstr.replace("-", "_")) )?; - writeln!(out, " let mut wast_context = WastContext::new();")?; + writeln!(out, " let mut wast_context = WastContext::new().expect(\"error constructing WastContext\");")?; writeln!( out, " wast_context.run_file(&*native_isa(), Path::new(\"{}\")).expect(\"error running wast file: {}\");", diff --git a/lib/wast/src/lib.rs b/lib/wast/src/lib.rs index 6e6126ae1f..fc56cee98b 100644 --- a/lib/wast/src/lib.rs +++ b/lib/wast/src/lib.rs @@ -33,6 +33,7 @@ extern crate target_lexicon; extern crate wabt; extern crate wasmtime_environ; extern crate wasmtime_execute; +extern crate wasmtime_runtime; mod spectest; mod wast; diff --git a/lib/wast/src/spectest.rs b/lib/wast/src/spectest.rs index 6ac30a6031..d9d98dca15 100644 --- a/lib/wast/src/spectest.rs +++ b/lib/wast/src/spectest.rs @@ -1,10 +1,16 @@ use cranelift_codegen::ir::types; use cranelift_codegen::{ir, isa}; +use cranelift_entity::PrimaryMap; use cranelift_wasm::{Global, GlobalInit, Memory, Table, TableElementType}; use std::ptr; use target_lexicon::HOST; -use wasmtime_environ::{translate_signature, MemoryPlan, MemoryStyle, TablePlan, TableStyle}; -use wasmtime_execute::{ExportValue, Resolver, VMFunctionBody, VMGlobal, VMMemory, VMTable}; +use wasmtime_environ::{ + translate_signature, MemoryPlan, MemoryStyle, Module, TablePlan, TableStyle, +}; +use wasmtime_execute::{Export, Resolver}; +use wasmtime_runtime::{ + Imports, Instance, VMFunctionBody, VMGlobalDefinition, VMMemoryDefinition, VMTableDefinition, +}; extern "C" fn spectest_print() {} @@ -41,44 +47,60 @@ extern "C" fn spectest_print_f64_f64(x: f64, y: f64) { } pub struct SpecTest { - spectest_global_i32: VMGlobal, - spectest_global_f32: VMGlobal, - spectest_global_f64: VMGlobal, - spectest_table: VMTable, - spectest_memory: VMMemory, + instance: Instance, + spectest_global_i32: VMGlobalDefinition, + spectest_global_f32: VMGlobalDefinition, + spectest_global_f64: VMGlobalDefinition, + spectest_table: VMTableDefinition, + spectest_memory: VMMemoryDefinition, } impl SpecTest { - pub fn new() -> Self { - Self { - spectest_global_i32: VMGlobal::definition(&Global { + pub fn new() -> Result { + let finished_functions = PrimaryMap::new(); + let imports = Imports::none(); + let data_initializers = Vec::new(); + Ok(Self { + instance: Instance::new( + &Module::new(), + &finished_functions.into_boxed_slice(), + imports, + &data_initializers, + )?, + spectest_global_i32: VMGlobalDefinition::new(&Global { ty: types::I32, mutability: true, initializer: GlobalInit::I32Const(0), }), - spectest_global_f32: VMGlobal::definition(&Global { + spectest_global_f32: VMGlobalDefinition::new(&Global { ty: types::I32, mutability: true, initializer: GlobalInit::F32Const(0), }), - spectest_global_f64: VMGlobal::definition(&Global { + spectest_global_f64: VMGlobalDefinition::new(&Global { ty: types::I32, mutability: true, initializer: GlobalInit::F64Const(0), }), - spectest_table: VMTable::definition(ptr::null_mut(), 0), - spectest_memory: VMMemory::definition(ptr::null_mut(), 0), - } + spectest_table: VMTableDefinition { + base: ptr::null_mut(), + current_elements: 0, + }, + spectest_memory: VMMemoryDefinition { + base: ptr::null_mut(), + current_length: 0, + }, + }) } } impl Resolver for SpecTest { - fn resolve(&mut self, module: &str, field: &str) -> Option { + fn resolve(&mut self, module: &str, field: &str) -> Option { let call_conv = isa::CallConv::triple_default(&HOST); let pointer_type = types::Type::triple_pointer_type(&HOST); match module { "spectest" => match field { - "print" => Some(ExportValue::function( + "print" => Some(Export::function( spectest_print as *const VMFunctionBody, translate_signature( ir::Signature { @@ -89,7 +111,7 @@ impl Resolver for SpecTest { pointer_type, ), )), - "print_i32" => Some(ExportValue::function( + "print_i32" => Some(Export::function( spectest_print_i32 as *const VMFunctionBody, translate_signature( ir::Signature { @@ -100,7 +122,7 @@ impl Resolver for SpecTest { pointer_type, ), )), - "print_i64" => Some(ExportValue::function( + "print_i64" => Some(Export::function( spectest_print_i64 as *const VMFunctionBody, translate_signature( ir::Signature { @@ -111,7 +133,7 @@ impl Resolver for SpecTest { pointer_type, ), )), - "print_f32" => Some(ExportValue::function( + "print_f32" => Some(Export::function( spectest_print_f32 as *const VMFunctionBody, translate_signature( ir::Signature { @@ -122,7 +144,7 @@ impl Resolver for SpecTest { pointer_type, ), )), - "print_f64" => Some(ExportValue::function( + "print_f64" => Some(Export::function( spectest_print_f64 as *const VMFunctionBody, translate_signature( ir::Signature { @@ -133,7 +155,7 @@ impl Resolver for SpecTest { pointer_type, ), )), - "print_i32_f32" => Some(ExportValue::function( + "print_i32_f32" => Some(Export::function( spectest_print_i32_f32 as *const VMFunctionBody, translate_signature( ir::Signature { @@ -147,7 +169,7 @@ impl Resolver for SpecTest { pointer_type, ), )), - "print_f64_f64" => Some(ExportValue::function( + "print_f64_f64" => Some(Export::function( spectest_print_f64_f64 as *const VMFunctionBody, translate_signature( ir::Signature { @@ -161,7 +183,7 @@ impl Resolver for SpecTest { pointer_type, ), )), - "global_i32" => Some(ExportValue::global( + "global_i32" => Some(Export::global( &mut self.spectest_global_i32, Global { ty: ir::types::I32, @@ -169,7 +191,7 @@ impl Resolver for SpecTest { initializer: GlobalInit::I32Const(0), }, )), - "global_f32" => Some(ExportValue::global( + "global_f32" => Some(Export::global( &mut self.spectest_global_f32, Global { ty: ir::types::F32, @@ -177,7 +199,7 @@ impl Resolver for SpecTest { initializer: GlobalInit::F32Const(0), }, )), - "global_f64" => Some(ExportValue::global( + "global_f64" => Some(Export::global( &mut self.spectest_global_f64, Global { ty: ir::types::F64, @@ -185,8 +207,9 @@ impl Resolver for SpecTest { initializer: GlobalInit::F64Const(0), }, )), - "table" => Some(ExportValue::table( + "table" => Some(Export::table( &mut self.spectest_table, + self.instance.vmctx_mut(), TablePlan { table: Table { ty: TableElementType::Func, @@ -196,8 +219,9 @@ impl Resolver for SpecTest { style: TableStyle::CallerChecksSignature, }, )), - "memory" => Some(ExportValue::memory( + "memory" => Some(Export::memory( &mut self.spectest_memory, + self.instance.vmctx_mut(), MemoryPlan { memory: Memory { minimum: 0, diff --git a/lib/wast/src/wast.rs b/lib/wast/src/wast.rs index ff1145539d..32d46f9d8b 100644 --- a/lib/wast/src/wast.rs +++ b/lib/wast/src/wast.rs @@ -90,14 +90,14 @@ pub struct WastContext { impl WastContext { /// Construct a new instance of `WastContext`. - pub fn new() -> Self { - Self { + pub fn new() -> Result { + Ok(Self { worlds: PrimaryMap::new(), current: None, namespace: HashMap::new(), code: Code::new(), - spectest: SpecTest::new(), - } + spectest: SpecTest::new()?, + }) } fn instantiate( diff --git a/src/run_wast.rs b/src/run_wast.rs index 3e2bbaed57..6a84e34d4d 100644 --- a/src/run_wast.rs +++ b/src/run_wast.rs @@ -94,7 +94,7 @@ fn main() { } let isa = isa_builder.finish(settings::Flags::new(flag_builder)); - let mut wast_context = WastContext::new(); + let mut wast_context = WastContext::new().expect("Error creating WastContext"); for filename in &args.arg_file { wast_context .run_file(&*isa, Path::new(&filename)) diff --git a/src/wasmtime.rs b/src/wasmtime.rs index 47391ba2ed..fbde516b52 100644 --- a/src/wasmtime.rs +++ b/src/wasmtime.rs @@ -184,11 +184,13 @@ fn handle_module(args: &Args, path: &Path, isa: &TargetIsa) -> Result<(), String if split.len() != 3 { break; } - let memory = world.inspect_memory( - MemoryIndex::new(str::parse(split[0]).unwrap()), - str::parse(split[1]).unwrap(), - str::parse(split[2]).unwrap(), - ); + let memory = world + .inspect_memory( + MemoryIndex::new(str::parse(split[0]).unwrap()), + str::parse(split[1]).unwrap(), + str::parse(split[2]).unwrap(), + ) + .map_err(|e| e.to_string())?; let mut s = memory.iter().fold(String::from("#"), |mut acc, byte| { acc.push_str(format!("{:02x}_", byte).as_str()); acc