Refactor the compilation and instantiation pipeline.

wasmtime-execute is now wasmtime-jit. Move `JITCode` and the TargetIsa
into a new `Compiler` type. `InstancePlus` is no more, with trampoline
functionality now handled by `Compiler`.
This commit is contained in:
Dan Gohman
2019-01-03 06:59:46 -08:00
parent 450a279e18
commit 7592c99f3b
46 changed files with 1225 additions and 1073 deletions

285
lib/jit/src/action.rs Normal file
View File

@@ -0,0 +1,285 @@
//! Support for performing actions with a wasm module from the outside.
use compiler::Compiler;
use cranelift_codegen::ir;
use instantiate::SetupError;
use std::cmp::max;
use std::string::String;
use std::vec::Vec;
use std::{fmt, mem, ptr, slice};
use wasmtime_runtime::{wasmtime_call_trampoline, Export, Instance};
/// A runtime value.
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum RuntimeValue {
/// A runtime value with type i32.
I32(i32),
/// A runtime value with type i64.
I64(i64),
/// A runtime value with type f32.
F32(u32),
/// A runtime value with type f64.
F64(u64),
}
impl RuntimeValue {
/// Return the type of this `RuntimeValue`.
pub fn value_type(self) -> ir::Type {
match self {
RuntimeValue::I32(_) => ir::types::I32,
RuntimeValue::I64(_) => ir::types::I64,
RuntimeValue::F32(_) => ir::types::F32,
RuntimeValue::F64(_) => ir::types::F64,
}
}
/// Assuming this `RuntimeValue` holds an `i32`, return that value.
pub fn unwrap_i32(self) -> i32 {
match self {
RuntimeValue::I32(x) => x,
_ => panic!("unwrapping value of type {} as i32", self.value_type()),
}
}
/// Assuming this `RuntimeValue` holds an `i64`, return that value.
pub fn unwrap_i64(self) -> i64 {
match self {
RuntimeValue::I64(x) => x,
_ => panic!("unwrapping value of type {} as i64", self.value_type()),
}
}
/// Assuming this `RuntimeValue` holds an `f32`, return that value.
pub fn unwrap_f32(self) -> f32 {
f32::from_bits(self.unwrap_f32_bits())
}
/// Assuming this `RuntimeValue` holds an `f32`, return the bits of that value as a `u32`.
pub fn unwrap_f32_bits(self) -> u32 {
match self {
RuntimeValue::F32(x) => x,
_ => panic!("unwrapping value of type {} as f32", self.value_type()),
}
}
/// Assuming this `RuntimeValue` holds an `f64`, return that value.
pub fn unwrap_f64(self) -> f64 {
f64::from_bits(self.unwrap_f64_bits())
}
/// Assuming this `RuntimeValue` holds an `f64`, return the bits of that value as a `u64`.
pub fn unwrap_f64_bits(self) -> u64 {
match self {
RuntimeValue::F64(x) => x,
_ => panic!("unwrapping value of type {} as f64", self.value_type()),
}
}
}
impl fmt::Display for RuntimeValue {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
RuntimeValue::I32(x) => write!(f, "{}: i32", x),
RuntimeValue::I64(x) => write!(f, "{}: i64", x),
RuntimeValue::F32(x) => write!(f, "{}: f32", x),
RuntimeValue::F64(x) => write!(f, "{}: f64", x),
}
}
}
/// The result of invoking a wasm function or reading a wasm global.
#[derive(Debug)]
pub enum ActionOutcome {
/// The action returned normally. Its return values are provided.
Returned {
/// The return values.
values: Vec<RuntimeValue>,
},
/// A trap occurred while the action was executing.
Trapped {
/// The trap message.
message: String,
},
}
/// An error detected while invoking a wasm function or reading a wasm global.
/// Note that at this level, traps are not reported errors, but are rather
/// returned through `ActionOutcome`.
#[derive(Fail, Debug)]
pub enum ActionError {
/// An internal implementation error occurred.
#[fail(display = "{}", _0)]
Setup(SetupError),
/// No field with the specified name was present.
#[fail(display = "Unknown field: {}", _0)]
Field(String),
/// The field was present but was the wrong kind (eg. function, table, global, or memory).
#[fail(display = "Kind error: {}", _0)]
Kind(String),
/// The field was present but was the wrong type (eg. i32, i64, f32, or f64).
#[fail(display = "Type error: {}", _0)]
Type(String),
}
/// Invoke a function in an `Instance` identified by an export name.
pub fn invoke(
compiler: &mut Compiler,
instance: &mut Instance,
function_name: &str,
args: &[RuntimeValue],
) -> Result<ActionOutcome, ActionError> {
let (address, signature, callee_vmctx) = match instance.lookup(function_name) {
Some(Export::Function {
address,
signature,
vmctx,
}) => (address, signature, vmctx),
Some(_) => {
return Err(ActionError::Kind(format!(
"exported item \"{}\" is not a function",
function_name
)))
}
None => {
return Err(ActionError::Field(format!(
"no export named \"{}\"",
function_name
)))
}
};
for (index, value) in args.iter().enumerate() {
assert_eq!(value.value_type(), signature.params[index].value_type);
}
// TODO: Support values larger than u64. And pack the values into memory
// instead of just using fixed-sized slots.
let mut values_vec: Vec<u64> = Vec::new();
let value_size = mem::size_of::<u64>();
values_vec.resize(max(signature.params.len(), signature.returns.len()), 0u64);
// Store the argument values into `values_vec`.
for (index, arg) in args.iter().enumerate() {
unsafe {
let ptr = values_vec.as_mut_ptr().add(index);
match arg {
RuntimeValue::I32(x) => ptr::write(ptr as *mut i32, *x),
RuntimeValue::I64(x) => ptr::write(ptr as *mut i64, *x),
RuntimeValue::F32(x) => ptr::write(ptr as *mut u32, *x),
RuntimeValue::F64(x) => ptr::write(ptr as *mut u64, *x),
}
}
}
// Get the trampoline to call for this function.
let exec_code_buf = compiler
.get_trampoline(address, &signature, value_size)
.map_err(ActionError::Setup)?;
// Make all JIT code produced thus far executable.
compiler.publish_compiled_code();
// Call the trampoline.
if let Err(message) = unsafe {
wasmtime_call_trampoline(
exec_code_buf,
values_vec.as_mut_ptr() as *mut u8,
callee_vmctx,
)
} {
return Ok(ActionOutcome::Trapped { message });
}
// Load the return values out of `values_vec`.
let values = signature
.returns
.iter()
.enumerate()
.map(|(index, abi_param)| unsafe {
let ptr = values_vec.as_ptr().add(index);
match abi_param.value_type {
ir::types::I32 => RuntimeValue::I32(ptr::read(ptr as *const i32)),
ir::types::I64 => RuntimeValue::I64(ptr::read(ptr as *const i64)),
ir::types::F32 => RuntimeValue::F32(ptr::read(ptr as *const u32)),
ir::types::F64 => RuntimeValue::F64(ptr::read(ptr as *const u64)),
other => panic!("unsupported value type {:?}", other),
}
})
.collect();
Ok(ActionOutcome::Returned { values })
}
/// Returns a slice of the contents of allocated linear memory.
pub fn inspect_memory<'instance>(
instance: &'instance Instance,
memory_name: &str,
start: usize,
len: usize,
) -> Result<&'instance [u8], ActionError> {
let definition = match unsafe { instance.lookup_immutable(memory_name) } {
Some(Export::Memory {
definition,
memory: _memory,
vmctx: _vmctx,
}) => definition,
Some(_) => {
return Err(ActionError::Kind(format!(
"exported item \"{}\" is not a linear memory",
memory_name
)))
}
None => {
return Err(ActionError::Field(format!(
"no export named \"{}\"",
memory_name
)))
}
};
Ok(unsafe {
let memory_def = &*definition;
&slice::from_raw_parts(memory_def.base, memory_def.current_length)[start..start + len]
})
}
/// Read a global in this `Instance` identified by an export name.
pub fn get(instance: &Instance, global_name: &str) -> Result<RuntimeValue, ActionError> {
let (definition, global) = match unsafe { instance.lookup_immutable(global_name) } {
Some(Export::Global { definition, global }) => (definition, global),
Some(_) => {
return Err(ActionError::Kind(format!(
"exported item \"{}\" is not a global variable",
global_name
)))
}
None => {
return Err(ActionError::Field(format!(
"no export named \"{}\"",
global_name
)))
}
};
unsafe {
let global_def = &*definition;
Ok(match global.ty {
ir::types::I32 => RuntimeValue::I32(*global_def.as_i32()),
ir::types::I64 => RuntimeValue::I64(*global_def.as_i64()),
ir::types::F32 => RuntimeValue::F32(*global_def.as_f32_bits()),
ir::types::F64 => RuntimeValue::F64(*global_def.as_f64_bits()),
other => {
return Err(ActionError::Type(format!(
"global with type {} not supported",
other
)))
}
})
}
}

View File

@@ -0,0 +1,82 @@
//! Memory management for executable code.
use region;
use std::string::String;
use std::vec::Vec;
use std::{cmp, mem};
use wasmtime_runtime::{Mmap, VMFunctionBody};
/// Memory manager for executable code.
pub(crate) struct CodeMemory {
current: Mmap,
mmaps: Vec<Mmap>,
position: usize,
published: usize,
}
impl CodeMemory {
/// Create a new `CodeMemory` instance.
pub fn new() -> Self {
Self {
current: Mmap::new(),
mmaps: Vec::new(),
position: 0,
published: 0,
}
}
/// Allocate `size` bytes of memory which can be made executable later by
/// calling `publish()`. Note that we allocate the memory as writeable so
/// that it can be written to and patched, though we make it readonly before
/// actually executing from it.
///
/// TODO: Add an alignment flag.
fn allocate(&mut self, size: usize) -> Result<&mut [u8], String> {
if self.current.len() - self.position < size {
self.mmaps.push(mem::replace(
&mut self.current,
Mmap::with_size(cmp::max(0x10000, size.next_power_of_two()))?,
));
self.position = 0;
}
let old_position = self.position;
self.position += size;
Ok(&mut self.current.as_mut_slice()[old_position..self.position])
}
/// Convert mut a slice from u8 to VMFunctionBody.
fn view_as_mut_vmfunc_slice(slice: &mut [u8]) -> &mut [VMFunctionBody] {
let byte_ptr: *mut [u8] = slice;
let body_ptr = byte_ptr as *mut [VMFunctionBody];
unsafe { &mut *body_ptr }
}
/// Allocate enough memory to hold a copy of `slice` and copy the data into it.
/// TODO: Reorganize the code that calls this to emit code directly into the
/// mmap region rather than into a Vec that we need to copy in.
pub fn allocate_copy_of_byte_slice(
&mut self,
slice: &[u8],
) -> Result<&mut [VMFunctionBody], String> {
let new = self.allocate(slice.len())?;
new.copy_from_slice(slice);
Ok(Self::view_as_mut_vmfunc_slice(new))
}
/// Make all allocated memory executable.
pub fn publish(&mut self) {
self.mmaps
.push(mem::replace(&mut self.current, Mmap::new()));
self.position = 0;
for m in &mut self.mmaps[self.published..] {
if !m.as_ptr().is_null() {
unsafe {
region::protect(m.as_mut_ptr(), m.len(), region::Protection::ReadExecute)
}
.expect("unable to make memory readonly and executable");
}
}
self.published = self.mmaps.len();
}
}

252
lib/jit/src/compiler.rs Normal file
View File

@@ -0,0 +1,252 @@
//! JIT compilation.
use code_memory::CodeMemory;
use cranelift_codegen::ir::InstBuilder;
use cranelift_codegen::isa::{TargetFrontendConfig, TargetIsa};
use cranelift_codegen::Context;
use cranelift_codegen::{binemit, ir};
use cranelift_entity::PrimaryMap;
use cranelift_frontend::{FunctionBuilder, FunctionBuilderContext};
use cranelift_wasm::DefinedFuncIndex;
use instantiate::SetupError;
use std::boxed::Box;
use std::collections::HashMap;
use std::string::String;
use std::vec::Vec;
use target_tunables::target_tunables;
use wasmtime_environ::cranelift;
use wasmtime_environ::{Compilation, CompileError, Module, Relocations, Tunables};
use wasmtime_runtime::{InstantiationError, VMFunctionBody};
/// A WebAssembly code JIT compiler.
///
/// A `Compiler` instance owns the executable memory that it allocates.
///
/// TODO: Evolve this to support streaming rather than requiring a `&[u8]`
/// containing a whole wasm module at once.
///
/// TODO: Consider using cranelift-module.
pub struct Compiler {
isa: Box<TargetIsa>,
code_memory: CodeMemory,
trampoline_park: HashMap<*const VMFunctionBody, *const VMFunctionBody>,
/// The `FunctionBuilderContext`, shared between trampline function compilations.
fn_builder_ctx: FunctionBuilderContext,
}
impl Compiler {
/// Construct a new `Compiler`.
pub fn new(isa: Box<TargetIsa>) -> Self {
Self {
isa,
code_memory: CodeMemory::new(),
trampoline_park: HashMap::new(),
fn_builder_ctx: FunctionBuilderContext::new(),
}
}
}
impl Compiler {
/// Return the target's frontend configuration settings.
pub fn frontend_config(&self) -> TargetFrontendConfig {
self.isa.frontend_config()
}
/// Return the tunables in use by this engine.
pub fn tunables(&self) -> Tunables {
target_tunables(self.isa.triple())
}
/// Compile the given function bodies.
pub(crate) fn compile<'data>(
&mut self,
module: &Module,
function_body_inputs: PrimaryMap<DefinedFuncIndex, &'data [u8]>,
) -> Result<
(
PrimaryMap<DefinedFuncIndex, *mut [VMFunctionBody]>,
Relocations,
),
SetupError,
> {
let (compilation, relocations) =
cranelift::compile_module(&module, function_body_inputs, &*self.isa)
.map_err(SetupError::Compile)?;
let allocated_functions =
allocate_functions(&mut self.code_memory, compilation).map_err(|message| {
SetupError::Instantiate(InstantiationError::Resource(format!(
"failed to allocate memory for functions: {}",
message
)))
})?;
Ok((allocated_functions, relocations))
}
/// Create a trampoline for invoking a function.
pub(crate) fn get_trampoline(
&mut self,
callee_address: *const VMFunctionBody,
signature: &ir::Signature,
value_size: usize,
) -> Result<*const VMFunctionBody, SetupError> {
use std::collections::hash_map::Entry::{Occupied, Vacant};
Ok(match self.trampoline_park.entry(callee_address) {
Occupied(entry) => *entry.get(),
Vacant(entry) => {
let body = make_trampoline(
&*self.isa,
&mut self.code_memory,
&mut self.fn_builder_ctx,
callee_address,
signature,
value_size,
)?;
entry.insert(body);
body
}
})
}
/// Make memory containing compiled code executable.
pub(crate) fn publish_compiled_code(&mut self) {
self.code_memory.publish();
}
}
/// Create a trampoline for invoking a function.
fn make_trampoline(
isa: &TargetIsa,
code_memory: &mut CodeMemory,
fn_builder_ctx: &mut FunctionBuilderContext,
callee_address: *const VMFunctionBody,
signature: &ir::Signature,
value_size: usize,
) -> Result<*const VMFunctionBody, SetupError> {
let pointer_type = isa.pointer_type();
let mut wrapper_sig = ir::Signature::new(isa.frontend_config().default_call_conv);
// Add the `values_vec` parameter.
wrapper_sig.params.push(ir::AbiParam::new(pointer_type));
// Add the `vmctx` parameter.
wrapper_sig.params.push(ir::AbiParam::special(
pointer_type,
ir::ArgumentPurpose::VMContext,
));
let mut context = Context::new();
context.func = ir::Function::with_name_signature(ir::ExternalName::user(0, 0), wrapper_sig);
{
let mut builder = FunctionBuilder::new(&mut context.func, fn_builder_ctx);
let block0 = builder.create_ebb();
builder.append_ebb_params_for_function_params(block0);
builder.switch_to_block(block0);
builder.seal_block(block0);
let mut callee_args = Vec::new();
let (values_vec_ptr_val, vmctx_ptr_val) = {
let params = builder.func.dfg.ebb_params(block0);
(params[0], params[1])
};
// Load the argument values out of `values_vec`.
let mflags = ir::MemFlags::trusted();
for (i, r) in signature.params.iter().enumerate() {
let value = match r.purpose {
ir::ArgumentPurpose::Normal => builder.ins().load(
r.value_type,
mflags,
values_vec_ptr_val,
(i * value_size) as i32,
),
ir::ArgumentPurpose::VMContext => vmctx_ptr_val,
other => panic!("unsupported argument purpose {}", other),
};
callee_args.push(value);
}
let new_sig = builder.import_signature(signature.clone());
// TODO: It's possible to make this a direct call. We just need Cranelift
// to support functions declared with an immediate integer address.
// ExternalName::Absolute(u64). Let's do it.
let callee_value = builder.ins().iconst(pointer_type, callee_address as i64);
let call = builder
.ins()
.call_indirect(new_sig, callee_value, &callee_args);
let results = builder.func.dfg.inst_results(call).to_vec();
// Store the return values into `values_vec`.
let mflags = ir::MemFlags::trusted();
for (i, r) in results.iter().enumerate() {
builder
.ins()
.store(mflags, *r, values_vec_ptr_val, (i * value_size) as i32);
}
builder.ins().return_(&[]);
builder.finalize()
}
let mut code_buf: Vec<u8> = Vec::new();
let mut reloc_sink = RelocSink {};
let mut trap_sink = binemit::NullTrapSink {};
context
.compile_and_emit(isa, &mut code_buf, &mut reloc_sink, &mut trap_sink)
.map_err(|error| SetupError::Compile(CompileError::Codegen(error)))?;
Ok(code_memory
.allocate_copy_of_byte_slice(&code_buf)
.map_err(|message| SetupError::Instantiate(InstantiationError::Resource(message)))?
.as_ptr())
}
fn allocate_functions(
code_memory: &mut CodeMemory,
compilation: Compilation,
) -> Result<PrimaryMap<DefinedFuncIndex, *mut [VMFunctionBody]>, String> {
let mut result = PrimaryMap::with_capacity(compilation.functions.len());
for (_, body) in compilation.functions.into_iter() {
let fatptr: *mut [VMFunctionBody] = code_memory.allocate_copy_of_byte_slice(body)?;
result.push(fatptr);
}
Ok(result)
}
/// We don't expect trampoline compilation to produce any relocations, so
/// this `RelocSink` just asserts that it doesn't recieve any.
struct RelocSink {}
impl binemit::RelocSink for RelocSink {
fn reloc_ebb(
&mut self,
_offset: binemit::CodeOffset,
_reloc: binemit::Reloc,
_ebb_offset: binemit::CodeOffset,
) {
panic!("trampoline compilation should not produce ebb relocs");
}
fn reloc_external(
&mut self,
_offset: binemit::CodeOffset,
_reloc: binemit::Reloc,
_name: &ir::ExternalName,
_addend: binemit::Addend,
) {
panic!("trampoline compilation should not produce external symbol relocs");
}
fn reloc_jt(
&mut self,
_offset: binemit::CodeOffset,
_reloc: binemit::Reloc,
_jt: ir::JumpTable,
) {
panic!("trampoline compilation should not produce jump table relocs");
}
}

199
lib/jit/src/instantiate.rs Normal file
View File

@@ -0,0 +1,199 @@
//! Define the `instantiate` function, which takes a byte array containing an
//! encoded wasm module and returns a live wasm instance. Also, define
//! `CompiledModule` to allow compiling and instantiating to be done as separate
//! steps.
use compiler::Compiler;
use cranelift_entity::{BoxedSlice, PrimaryMap};
use cranelift_wasm::DefinedFuncIndex;
use link::link_module;
use resolver::Resolver;
use std::boxed::Box;
use std::rc::Rc;
use std::string::String;
use std::vec::Vec;
use wasmtime_environ::{
CompileError, DataInitializer, DataInitializerLocation, Module, ModuleEnvironment,
};
use wasmtime_runtime::{Imports, Instance, InstantiationError, VMFunctionBody};
/// An error condition while setting up a wasm instance, be it validation,
/// compilation, or instantiation.
#[derive(Fail, Debug)]
pub enum SetupError {
/// The module did not pass validation.
#[fail(display = "Validation error: {}", _0)]
Validate(String),
/// A wasm translation error occured.
#[fail(display = "WebAssembly compilation error: {}", _0)]
Compile(CompileError),
/// Some runtime resource was unavailable or insufficient, or the start function
/// trapped.
#[fail(display = "Instantiation error: {}", _0)]
Instantiate(InstantiationError),
}
/// This is similar to `CompiledModule`, but references the data initializers
/// from the wasm buffer rather than holding its own copy.
struct RawCompiledModule<'data> {
module: Module,
finished_functions: BoxedSlice<DefinedFuncIndex, *const VMFunctionBody>,
imports: Imports,
data_initializers: Box<[DataInitializer<'data>]>,
}
impl<'data> RawCompiledModule<'data> {
/// Create a new `RawCompiledModule` by compiling the wasm module in `data` and instatiating it.
fn new(
compiler: &mut Compiler,
data: &'data [u8],
resolver: &mut Resolver,
) -> Result<Self, SetupError> {
let environ = ModuleEnvironment::new(compiler.frontend_config(), compiler.tunables());
let translation = environ
.translate(data)
.map_err(|error| SetupError::Compile(CompileError::Wasm(error)))?;
let (allocated_functions, relocations) =
compiler.compile(&translation.module, translation.function_body_inputs)?;
let imports = link_module(
&translation.module,
&allocated_functions,
relocations,
resolver,
)
.map_err(|err| SetupError::Instantiate(InstantiationError::Link(err)))?;
// Gather up the pointers to the compiled functions.
let finished_functions: BoxedSlice<DefinedFuncIndex, *const VMFunctionBody> =
allocated_functions
.into_iter()
.map(|(_index, allocated)| {
let fatptr: *const [VMFunctionBody] = *allocated;
fatptr as *const VMFunctionBody
})
.collect::<PrimaryMap<_, _>>()
.into_boxed_slice();
// Make all code compiled thus far executable.
compiler.publish_compiled_code();
Ok(Self {
module: translation.module,
finished_functions,
imports,
data_initializers: translation.data_initializers.into_boxed_slice(),
})
}
}
/// A compiled wasm module, ready to be instantiated.
pub struct CompiledModule {
module: Rc<Module>,
finished_functions: BoxedSlice<DefinedFuncIndex, *const VMFunctionBody>,
imports: Imports,
data_initializers: Box<[OwnedDataInitializer]>,
}
impl CompiledModule {
/// Compile a data buffer into a `CompiledModule`, which may then be instantiated.
pub fn new<'data>(
compiler: &mut Compiler,
data: &'data [u8],
resolver: &mut Resolver,
) -> Result<Self, SetupError> {
let raw = RawCompiledModule::<'data>::new(compiler, data, resolver)?;
Ok(Self {
module: Rc::new(raw.module),
finished_functions: raw.finished_functions,
imports: raw.imports,
data_initializers: raw
.data_initializers
.iter()
.map(OwnedDataInitializer::new)
.collect::<Vec<_>>()
.into_boxed_slice(),
})
}
/// Construct a `CompiledModule` from component parts.
pub fn from_parts(
module: Module,
finished_functions: BoxedSlice<DefinedFuncIndex, *const VMFunctionBody>,
imports: Imports,
data_initializers: Box<[OwnedDataInitializer]>,
) -> Self {
Self {
module: Rc::new(module),
finished_functions,
imports,
data_initializers,
}
}
/// Crate an `Instance` from this `CompiledModule`.
///
/// Note that if only one instance of this module is needed, it may be more
/// efficient to call the top-level `instantiate`, since that avoids copying
/// the data initializers.
pub fn instantiate(&mut self) -> Result<Box<Instance>, InstantiationError> {
let data_initializers = self
.data_initializers
.iter()
.map(|init| DataInitializer {
location: init.location.clone(),
data: &*init.data,
})
.collect::<Vec<_>>();
Instance::new(
Rc::clone(&self.module),
self.finished_functions.clone(),
self.imports.clone(),
&data_initializers,
)
}
}
/// Similar to `DataInitializer`, but owns its own copy of the data rather
/// than holding a slice of the original module.
pub struct OwnedDataInitializer {
/// The location where the initialization is to be performed.
location: DataInitializerLocation,
/// The initialization data.
data: Box<[u8]>,
}
impl OwnedDataInitializer {
fn new(borrowed: &DataInitializer) -> Self {
Self {
location: borrowed.location.clone(),
data: borrowed.data.to_vec().into_boxed_slice(),
}
}
}
/// Create a new `Instance` by compiling the wasm module in `data` and instatiating it.
///
/// This is equivalent to createing a `CompiledModule` and calling `instantiate()` on it,
/// but avoids creating an intermediate copy of the data initializers.
pub fn instantiate(
compiler: &mut Compiler,
data: &[u8],
resolver: &mut Resolver,
) -> Result<Box<Instance>, SetupError> {
let raw = RawCompiledModule::new(compiler, data, resolver)?;
Instance::new(
Rc::new(raw.module),
raw.finished_functions,
raw.imports,
&*raw.data_initializers,
)
.map_err(SetupError::Instantiate)
}

69
lib/jit/src/lib.rs Normal file
View File

@@ -0,0 +1,69 @@
//! JIT-style runtime for WebAssembly using Cranelift.
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
#![warn(unused_import_braces)]
#![cfg_attr(feature = "std", deny(unstable_features))]
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
#![cfg_attr(
feature = "cargo-clippy",
allow(clippy::new_without_default, clippy::new_without_default_derive)
)]
#![cfg_attr(
feature = "cargo-clippy",
warn(
clippy::float_arithmetic,
clippy::mut_mut,
clippy::nonminimal_bool,
clippy::option_map_unwrap_or,
clippy::option_map_unwrap_or_else,
clippy::print_stdout,
clippy::unicode_not_nfc,
clippy::use_self
)
)]
#![cfg_attr(not(feature = "std"), no_std)]
#![cfg_attr(not(feature = "std"), feature(alloc))]
extern crate cranelift_codegen;
#[macro_use]
extern crate cranelift_entity;
extern crate cranelift_frontend;
extern crate cranelift_wasm;
extern crate region;
extern crate wasmtime_environ;
extern crate wasmtime_runtime;
#[cfg(not(feature = "std"))]
#[macro_use]
extern crate alloc;
extern crate failure;
extern crate target_lexicon;
#[macro_use]
extern crate failure_derive;
mod action;
mod code_memory;
mod compiler;
mod instantiate;
mod link;
mod namespace;
mod resolver;
mod target_tunables;
pub use action::{ActionError, ActionOutcome, RuntimeValue};
pub use compiler::Compiler;
pub use instantiate::{instantiate, CompiledModule, SetupError};
pub use link::link_module;
pub use namespace::{InstanceIndex, Namespace};
pub use resolver::{NullResolver, Resolver};
pub use target_tunables::target_tunables;
// Re-export `Instance` so that users won't need to separately depend on
// wasmtime-runtime in common cases.
pub use wasmtime_runtime::{Instance, InstantiationError};
#[cfg(not(feature = "std"))]
mod std {
pub use alloc::{boxed, rc, string, vec};
pub use core::*;
pub use core::{i32, str, u32};
}

363
lib/jit/src/link.rs Normal file
View File

@@ -0,0 +1,363 @@
//! Linking for JIT-compiled code.
use cranelift_codegen::binemit::Reloc;
use cranelift_entity::PrimaryMap;
use cranelift_wasm::{DefinedFuncIndex, Global, GlobalInit, Memory, Table, TableElementType};
use resolver::Resolver;
use std::ptr::write_unaligned;
use std::vec::Vec;
use wasmtime_environ::{
MemoryPlan, MemoryStyle, Module, Relocation, RelocationTarget, Relocations, TablePlan,
};
use wasmtime_runtime::libcalls;
use wasmtime_runtime::{
Export, Imports, LinkError, VMFunctionBody, VMFunctionImport, VMGlobalImport, VMMemoryImport,
VMTableImport,
};
/// Links a module that has been compiled with `compiled_module` in `wasmtime-environ`.
pub fn link_module(
module: &Module,
allocated_functions: &PrimaryMap<DefinedFuncIndex, *mut [VMFunctionBody]>,
relocations: Relocations,
resolver: &mut Resolver,
) -> Result<Imports, LinkError> {
let mut function_imports = PrimaryMap::with_capacity(module.imported_funcs.len());
for (index, (ref module_name, ref field)) in module.imported_funcs.iter() {
match resolver.resolve(module_name, field) {
Some(export_value) => match export_value {
Export::Function {
address,
signature,
vmctx,
} => {
let import_signature = &module.signatures[module.functions[index]];
if signature != *import_signature {
// TODO: If the difference is in the calling convention,
// we could emit a wrapper function to fix it up.
return Err(LinkError(
format!("{}/{}: exported function with signature {} incompatible with function import with signature {}",
module_name, field,
signature, import_signature)
));
}
function_imports.push(VMFunctionImport {
body: address,
vmctx,
});
}
Export::Table { .. } | Export::Memory { .. } | Export::Global { .. } => {
return Err(LinkError(format!(
"{}/{}: export not compatible with function import",
module_name, field
)));
}
},
None => {
return Err(LinkError(format!(
"{}/{}: no provided import function",
module_name, field
)))
}
}
}
let mut table_imports = PrimaryMap::with_capacity(module.imported_tables.len());
for (index, (ref module_name, ref field)) in module.imported_tables.iter() {
match resolver.resolve(module_name, field) {
Some(export_value) => match export_value {
Export::Table {
definition,
vmctx,
table,
} => {
let import_table = &module.table_plans[index];
if !is_table_compatible(&table, import_table) {
return Err(LinkError(format!(
"{}/{}: exported table incompatible with table import",
module_name, field,
)));
}
table_imports.push(VMTableImport {
from: definition,
vmctx,
});
}
Export::Global { .. } | Export::Memory { .. } | Export::Function { .. } => {
return Err(LinkError(format!(
"{}/{}: export not compatible with table import",
module_name, field
)));
}
},
None => {
return Err(LinkError(format!(
"no provided import table for {}/{}",
module_name, field
)))
}
}
}
let mut memory_imports = PrimaryMap::with_capacity(module.imported_memories.len());
for (index, (ref module_name, ref field)) in module.imported_memories.iter() {
match resolver.resolve(module_name, field) {
Some(export_value) => match export_value {
Export::Memory {
definition,
vmctx,
memory,
} => {
let import_memory = &module.memory_plans[index];
if !is_memory_compatible(&memory, import_memory) {
return Err(LinkError(format!(
"{}/{}: exported memory incompatible with memory import",
module_name, field
)));
}
// Sanity-check: Ensure that the imported memory has at least
// guard-page protections the importing module expects it to have.
match (memory.style, &import_memory.style) {
(
MemoryStyle::Static { bound },
MemoryStyle::Static {
bound: import_bound,
},
) => {
assert!(bound >= *import_bound);
}
_ => (),
}
assert!(memory.offset_guard_size >= import_memory.offset_guard_size);
memory_imports.push(VMMemoryImport {
from: definition,
vmctx,
});
}
Export::Table { .. } | Export::Global { .. } | Export::Function { .. } => {
return Err(LinkError(format!(
"{}/{}: export not compatible with memory import",
module_name, field
)));
}
},
None => {
return Err(LinkError(format!(
"no provided import memory for {}/{}",
module_name, field
)))
}
}
}
let mut global_imports = PrimaryMap::with_capacity(module.imported_globals.len());
for (index, (ref module_name, ref field)) in module.imported_globals.iter() {
match resolver.resolve(module_name, field) {
Some(export_value) => match export_value {
Export::Global { definition, global } => {
let imported_global = module.globals[index];
if !is_global_compatible(&global, &imported_global) {
return Err(LinkError(format!(
"{}/{}: exported global incompatible with global import",
module_name, field
)));
}
global_imports.push(VMGlobalImport { from: definition });
}
Export::Table { .. } | Export::Memory { .. } | Export::Function { .. } => {
return Err(LinkError(format!(
"{}/{}: exported global incompatible with global import",
module_name, field
)));
}
},
None => {
return Err(LinkError(format!(
"no provided import global for {}/{}",
module_name, field
)))
}
}
}
// Apply relocations, now that we have virtual addresses for everything.
relocate(allocated_functions, relocations, &module);
Ok(Imports::new(
function_imports,
table_imports,
memory_imports,
global_imports,
))
}
fn is_global_compatible(exported: &Global, imported: &Global) -> bool {
match imported.initializer {
GlobalInit::Import => (),
_ => panic!("imported Global should have an Imported initializer"),
}
let Global {
ty: exported_ty,
mutability: exported_mutability,
initializer: _exported_initializer,
} = exported;
let Global {
ty: imported_ty,
mutability: imported_mutability,
initializer: _imported_initializer,
} = imported;
exported_ty == imported_ty && imported_mutability == exported_mutability
}
fn is_table_element_type_compatible(
exported_type: TableElementType,
imported_type: TableElementType,
) -> bool {
match exported_type {
TableElementType::Func => match imported_type {
TableElementType::Func => true,
_ => false,
},
TableElementType::Val(exported_val_ty) => match imported_type {
TableElementType::Val(imported_val_ty) => exported_val_ty == imported_val_ty,
_ => false,
},
}
}
fn is_table_compatible(exported: &TablePlan, imported: &TablePlan) -> bool {
let TablePlan {
table:
Table {
ty: exported_ty,
minimum: exported_minimum,
maximum: exported_maximum,
},
style: _exported_style,
} = exported;
let TablePlan {
table:
Table {
ty: imported_ty,
minimum: imported_minimum,
maximum: imported_maximum,
},
style: _imported_style,
} = imported;
is_table_element_type_compatible(*exported_ty, *imported_ty)
&& imported_minimum <= exported_minimum
&& (imported_maximum.is_none()
|| (!exported_maximum.is_none()
&& imported_maximum.unwrap() >= exported_maximum.unwrap()))
}
fn is_memory_compatible(exported: &MemoryPlan, imported: &MemoryPlan) -> bool {
let MemoryPlan {
memory:
Memory {
minimum: exported_minimum,
maximum: exported_maximum,
shared: exported_shared,
},
style: _exported_style,
offset_guard_size: _exported_offset_guard_size,
} = exported;
let MemoryPlan {
memory:
Memory {
minimum: imported_minimum,
maximum: imported_maximum,
shared: imported_shared,
},
style: _imported_style,
offset_guard_size: _imported_offset_guard_size,
} = imported;
imported_minimum <= exported_minimum
&& (imported_maximum.is_none()
|| (!exported_maximum.is_none()
&& imported_maximum.unwrap() >= exported_maximum.unwrap()))
&& exported_shared == imported_shared
}
/// Performs the relocations inside the function bytecode, provided the necessary metadata.
fn relocate(
allocated_functions: &PrimaryMap<DefinedFuncIndex, *mut [VMFunctionBody]>,
relocations: PrimaryMap<DefinedFuncIndex, Vec<Relocation>>,
module: &Module,
) {
for (i, function_relocs) in relocations.into_iter() {
for r in function_relocs {
use self::libcalls::*;
let target_func_address: usize = match r.reloc_target {
RelocationTarget::UserFunc(index) => match module.defined_func_index(index) {
Some(f) => {
let fatptr: *const [VMFunctionBody] = allocated_functions[f];
fatptr as *const VMFunctionBody as usize
}
None => panic!("direct call to import"),
},
RelocationTarget::Memory32Grow => wasmtime_memory32_grow as usize,
RelocationTarget::Memory32Size => wasmtime_memory32_size as usize,
RelocationTarget::ImportedMemory32Grow => wasmtime_imported_memory32_grow as usize,
RelocationTarget::ImportedMemory32Size => wasmtime_imported_memory32_size as usize,
RelocationTarget::LibCall(libcall) => {
use cranelift_codegen::ir::LibCall::*;
match libcall {
CeilF32 => wasmtime_f32_ceil as usize,
FloorF32 => wasmtime_f32_floor as usize,
TruncF32 => wasmtime_f32_trunc as usize,
NearestF32 => wasmtime_f32_nearest as usize,
CeilF64 => wasmtime_f64_ceil as usize,
FloorF64 => wasmtime_f64_floor as usize,
TruncF64 => wasmtime_f64_trunc as usize,
NearestF64 => wasmtime_f64_nearest as usize,
#[cfg(not(target_os = "windows"))]
Probestack => __rust_probestack as usize,
#[cfg(all(target_os = "windows", target_pointer_width = "64"))]
Probestack => __chkstk as usize,
other => panic!("unexpected libcall: {}", other),
}
}
};
let fatptr: *const [VMFunctionBody] = allocated_functions[i];
let body = fatptr as *const VMFunctionBody;
match r.reloc {
#[cfg(target_pointer_width = "64")]
Reloc::Abs8 => unsafe {
let reloc_address = body.add(r.offset as usize) as usize;
let reloc_addend = r.addend as isize;
let reloc_abs = (target_func_address as u64)
.checked_add(reloc_addend as u64)
.unwrap();
write_unaligned(reloc_address as *mut u64, reloc_abs);
},
#[cfg(target_pointer_width = "32")]
Reloc::X86PCRel4 => unsafe {
let reloc_address = body.add(r.offset as usize) as usize;
let reloc_addend = r.addend as isize;
let reloc_delta_u32 = (target_func_address as u32)
.wrapping_sub(reloc_address as u32)
.checked_add(reloc_addend as u32)
.unwrap();
write_unaligned(reloc_address as *mut u32, reloc_delta_u32);
},
_ => panic!("unsupported reloc kind"),
}
}
}
}
/// A declaration for the stack probe function in Rust's standard library, for
/// catching callstack overflow.
extern "C" {
#[cfg(not(target_os = "windows"))]
pub fn __rust_probestack();
#[cfg(all(target_os = "windows", target_pointer_width = "64"))]
pub fn __chkstk();
}

101
lib/jit/src/namespace.rs Normal file
View File

@@ -0,0 +1,101 @@
//! The core WebAssembly spec does not specify how imports are to be resolved
//! to exports. This file provides one possible way to manage multiple instances
//! and resolve imports to exports among them.
use action::{get, inspect_memory, invoke};
use action::{ActionError, ActionOutcome, RuntimeValue};
use compiler::Compiler;
use cranelift_entity::PrimaryMap;
use resolver::Resolver;
use std::boxed::Box;
use std::collections::HashMap;
use std::string::String;
use wasmtime_runtime::{Export, Instance};
/// An opaque reference to an `Instance`.
#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct InstanceIndex(u32);
entity_impl!(InstanceIndex, "instance");
/// A namespace containing instances keyed by name.
///
/// Note that `Namespace` implements the `Resolver` trait, so it can resolve
/// imports using defined exports.
pub struct Namespace {
/// Mapping from identifiers to indices in `self.instances`.
names: HashMap<String, InstanceIndex>,
/// The instances, available by index.
instances: PrimaryMap<InstanceIndex, Box<Instance>>,
}
impl Namespace {
/// Construct a new `Namespace`.
pub fn new() -> Self {
Self {
names: HashMap::new(),
instances: PrimaryMap::new(),
}
}
/// Install a new `Instance` in this `Namespace`, optionally with the
/// given name, and return its index.
pub fn instance(
&mut self,
instance_name: Option<&str>,
instance: Box<Instance>,
) -> InstanceIndex {
let index = self.instances.push(instance);
if let Some(instance_name) = instance_name {
self.names.insert(instance_name.into(), index);
}
index
}
/// Get the instance index registered with the given `instance_name`.
pub fn get_instance_index(&mut self, instance_name: &str) -> Option<&mut InstanceIndex> {
self.names.get_mut(instance_name)
}
/// Register an instance with a given name.
pub fn register(&mut self, name: String, index: InstanceIndex) {
self.names.insert(name, index);
}
/// Invoke an exported function from an instance.
pub fn invoke(
&mut self,
compiler: &mut Compiler,
index: InstanceIndex,
field_name: &str,
args: &[RuntimeValue],
) -> Result<ActionOutcome, ActionError> {
invoke(compiler, &mut self.instances[index], &field_name, &args)
}
/// Get a slice of memory from an instance.
pub fn inspect_memory(
&self,
index: InstanceIndex,
field_name: &str,
start: usize,
len: usize,
) -> Result<&[u8], ActionError> {
inspect_memory(&self.instances[index], &field_name, start, len)
}
/// Get the value of an exported global from an instance.
pub fn get(&self, index: InstanceIndex, field_name: &str) -> Result<RuntimeValue, ActionError> {
get(&self.instances[index], &field_name)
}
}
impl Resolver for Namespace {
fn resolve(&mut self, instance: &str, field: &str) -> Option<Export> {
if let Some(index) = self.names.get(instance) {
self.instances[*index].lookup(field)
} else {
None
}
}
}

19
lib/jit/src/resolver.rs Normal file
View File

@@ -0,0 +1,19 @@
//! Define the `Resolver` trait, allowing custom resolution for external
//! references.
use wasmtime_runtime::Export;
/// Import resolver connects imports with available exported values.
pub trait Resolver {
/// Resolve the given module/field combo.
fn resolve(&mut self, module: &str, field: &str) -> Option<Export>;
}
/// `Resolver` implementation that always resolves to `None`.
pub struct NullResolver {}
impl Resolver for NullResolver {
fn resolve(&mut self, _module: &str, _field: &str) -> Option<Export> {
None
}
}

View File

@@ -0,0 +1,22 @@
use std::cmp::min;
use target_lexicon::{OperatingSystem, Triple};
use wasmtime_environ::Tunables;
/// Return a `Tunables` instance tuned for the given target platform.
pub fn target_tunables(triple: &Triple) -> Tunables {
let mut result = Tunables::default();
match triple.operating_system {
OperatingSystem::Windows => {
// For now, use a smaller footprint on Windows so that we don't
// don't outstrip the paging file.
// TODO: Make this configurable.
result.static_memory_bound = min(result.static_memory_bound, 0x100);
result.static_memory_offset_guard_size =
min(result.static_memory_offset_guard_size, 0x10000);
}
_ => {}
}
result
}