Dumped code from the wasm2cretonne repo

This commit is contained in:
Denis Merigoux
2017-08-10 16:05:04 -07:00
parent 378e7cfe6b
commit 234e72a5b3
400 changed files with 3552 additions and 1 deletions

3
lib/wasmstandalone/.gitignore vendored Normal file
View File

@@ -0,0 +1,3 @@
target/
**/*.rs.bk
Cargo.lock

View File

@@ -0,0 +1,11 @@
[package]
name = "wasmstandalone"
version = "0.0.0"
authors = ["The Cretonne Project Developers"]
publish = false
[dependencies]
cretonne = { path = "../cretonne" }
cretonne-frontend = { path = "../frontend" }
wasm2cretonne = { path = "../wasm2cretonne" }
region = "0.0.8"

View File

@@ -0,0 +1,256 @@
use cretonne::Context;
use cretonne::settings;
use cretonne::isa::{self, TargetIsa};
use cretonne::verify_function;
use cretonne::verifier;
use cretonne::settings::Configurable;
use cretonne::result::CtonError;
use cretonne::ir::entities::AnyEntity;
use cretonne::ir::{self, Ebb, FuncRef, JumpTable, Function};
use cretonne::binemit::{RelocSink, Reloc, CodeOffset};
use wasm2cretonne::{TranslationResult, FunctionTranslation, ImportMappings, FunctionIndex};
use std::mem::transmute;
use region::Protection;
use region::protect;
use std::collections::HashMap;
use std::ptr::write_unaligned;
use std::fmt::Write;
type RelocRef = u16;
// Implementation of a relocation sink that just saves all the information for later
struct StandaloneRelocSink {
ebbs: HashMap<RelocRef, (Ebb, CodeOffset)>,
funcs: HashMap<RelocRef, (FuncRef, CodeOffset)>,
jts: HashMap<RelocRef, (JumpTable, CodeOffset)>,
}
// Contains all the metadata necessary to perform relocations
enum FunctionMetaData {
Import(),
Local {
relocs: StandaloneRelocSink,
imports: ImportMappings,
il_func: Function,
},
}
impl RelocSink for StandaloneRelocSink {
fn reloc_ebb(&mut self, offset: CodeOffset, reloc: Reloc, ebb: Ebb) {
self.ebbs.insert(reloc.0, (ebb, offset));
}
fn reloc_func(&mut self, offset: CodeOffset, reloc: Reloc, func: FuncRef) {
self.funcs.insert(reloc.0, (func, offset));
}
fn reloc_jt(&mut self, offset: CodeOffset, reloc: Reloc, jt: JumpTable) {
self.jts.insert(reloc.0, (jt, offset));
}
}
impl StandaloneRelocSink {
fn new() -> StandaloneRelocSink {
StandaloneRelocSink {
ebbs: HashMap::new(),
funcs: HashMap::new(),
jts: HashMap::new(),
}
}
}
/// Structure containing the compiled code of the functions, ready to be executed.
pub struct ExecutableCode {
functions_code: Vec<Vec<u8>>,
start_index: FunctionIndex,
}
/// Executes a module that has been translated with the `StandaloneRuntime` runtime implementation.
pub fn compile_module(trans_result: &TranslationResult) -> Result<ExecutableCode, String> {
let mut shared_builder = settings::builder();
shared_builder
.enable("enable_verifier")
.expect("Missing enable_verifier setting");
shared_builder
.set("is_64bit", "1")
.expect("Missing 64bits setting");
let isa = match isa::lookup("intel") {
Err(_) => {
panic!() // The target ISA is not available.
}
Ok(mut isa_builder) => {
isa_builder
.enable("haswell")
.expect("Missing haswell setting");
isa_builder.finish(settings::Flags::new(&shared_builder))
}
};
let mut functions_metatada = Vec::new();
let mut functions_code = Vec::new();
for (function_index, function) in trans_result.functions.iter().enumerate() {
let mut context = Context::new();
let (il, imports) = match function {
&FunctionTranslation::Import() => {
if trans_result.start_index.is_some() &&
trans_result.start_index.unwrap() == function_index {
return Err(String::from("start function should not be an import"));
} else {
functions_code.push(Vec::new());
functions_metatada.push(FunctionMetaData::Import());
continue;
}
}
&FunctionTranslation::Code {
ref il,
ref imports,
..
} => (il.clone(), imports.clone()),
};
verify_function(&il, None).unwrap();
context.func = il;
let code_size = context
.compile(&*isa)
.map_err(|e| pretty_error(&context.func, Some(&*isa), e))? as
usize;
if code_size == 0 {
return Err(String::from("no code generated by Cretonne"));
}
let mut code_buf: Vec<u8> = Vec::with_capacity(code_size);
code_buf.resize(code_size, 0);
let mut relocsink = StandaloneRelocSink::new();
context.emit_to_memory(code_buf.as_mut_ptr(), &mut relocsink, &*isa);
functions_metatada.push(FunctionMetaData::Local {
relocs: relocsink,
imports: imports,
il_func: context.func,
});
functions_code.push(code_buf);
}
relocate(&functions_metatada, &mut functions_code);
// After having emmitted the code to memory, we deal with relocations
match trans_result.start_index {
None => Err(String::from("No start function defined, aborting execution")),
Some(index) => {
Ok(ExecutableCode {
functions_code,
start_index: index,
})
}
}
}
// Jumps to the code region of memory and execute the start function of the module.
pub fn execute(exec: ExecutableCode) -> Result<(), String> {
let code_buf = &exec.functions_code[exec.start_index];
unsafe {
match protect(code_buf.as_ptr(),
code_buf.len(),
Protection::ReadWriteExecute) {
Ok(()) => (),
Err(err) => {
return Err(format!("failed to give executable permission to code: {}",
err.description()))
}
};
// Rather than writing inline assembly to jump to the code region, we use the fact that
// the Rust ABI for calling a function with no arguments and no return matches the one of
// the generated code.Thanks to this, we can transmute the code region into a first-class
// Rust function and call it.
// TODO: the Rust callee-saved registers will be overwritten by the executed code, inline
// assembly spilling these registers to the stack and restoring them after the call is
// needed.
let start_func = transmute::<_, fn()>(code_buf.as_ptr());
// The code below saves the Intel callee-saved registers. It is not activate because
// inline ASM is not supported in the release version of the Rust compiler.
/*asm!("push rax
push rcx
push rdx
push rsi
push rdi
push r8
push r9
push r10
push r11
" :::: "intel", "volatile");*/
start_func();
/*asm!("pop r11
pop r10
pop r9
pop r8
pop rdi
pop rsi
pop rdx
pop rcx
pop rax
" :::: "intel", "volatile");*/
Ok(())
}
}
/// Performs the relocations inside the function bytecode, provided the necessary metadata
fn relocate(functions_metatada: &Vec<FunctionMetaData>, functions_code: &mut Vec<Vec<u8>>) {
// The relocations are relative to the relocation's address plus four bytes
for (func_index, function_in_memory) in functions_metatada.iter().enumerate() {
match function_in_memory {
&FunctionMetaData::Import() => continue,
&FunctionMetaData::Local {
ref relocs,
ref imports,
ref il_func,
} => {
for (_, &(func_ref, offset)) in relocs.funcs.iter() {
let target_func_index = imports.functions[&func_ref];
let target_func_address: isize = functions_code[target_func_index].as_ptr() as
isize;
unsafe {
let reloc_address: isize = functions_code[func_index]
.as_mut_ptr()
.offset(offset as isize + 4) as
isize;
let reloc_delta_i32: i32 = (target_func_address - reloc_address) as i32;
write_unaligned(reloc_address as *mut i32, reloc_delta_i32);
}
}
for (_, &(ebb, offset)) in relocs.ebbs.iter() {
unsafe {
let reloc_address: isize = functions_code[func_index]
.as_mut_ptr()
.offset(offset as isize + 4) as
isize;
let target_ebb_address: isize =
functions_code[func_index]
.as_ptr()
.offset(il_func.offsets[ebb] as isize) as
isize;
let reloc_delta_i32: i32 = (target_ebb_address - reloc_address) as i32;
write_unaligned(reloc_address as *mut i32, reloc_delta_i32);
}
}
// TODO: deal with jumptable relocations
}
}
}
}
/// Pretty-print a verifier error.
pub fn pretty_verifier_error(func: &Function,
isa: Option<&TargetIsa>,
err: verifier::Error)
-> String {
let mut msg = err.to_string();
match err.location {
AnyEntity::Inst(inst) => {
write!(msg, "\n{}: {}\n\n", inst, func.dfg.display_inst(inst, isa)).unwrap()
}
_ => msg.push('\n'),
}
write!(msg, "{}", func.display(isa)).unwrap();
msg
}
/// Pretty-print a Cretonne error.
pub fn pretty_error(func: &ir::Function, isa: Option<&TargetIsa>, err: CtonError) -> String {
if let CtonError::Verifier(e) = err {
pretty_verifier_error(func, isa, e)
} else {
err.to_string()
}
}

View File

@@ -0,0 +1,15 @@
//! Standalone JIT-style runtime for WebAssembly using Cretonne. Provides functions to translate
//! `get_global`, `set_global`, `current_memory`, `grow_memory`, `call_indirect` that hardcode in
//! the translation the base addresses of regions of memory that will hold the globals, tables and
//! linear memories.
extern crate cretonne;
extern crate wasm2cretonne;
extern crate cton_frontend;
extern crate region;
mod execution;
mod standalone;
pub use execution::{compile_module, execute, ExecutableCode};
pub use standalone::StandaloneRuntime;

View File

@@ -0,0 +1,332 @@
use wasm2cretonne::{Local, FunctionIndex, GlobalIndex, TableIndex, MemoryIndex, RawByte,
MemoryAddress, Global, GlobalInit, Table, Memory, WasmRuntime};
use cton_frontend::FunctionBuilder;
use cretonne::ir::{MemFlags, Value, InstBuilder, SigRef, FuncRef, ExtFuncData, FunctionName,
Signature, ArgumentType, CallConv};
use cretonne::ir::types::*;
use cretonne::ir::condcodes::IntCC;
use cretonne::ir::immediates::Offset32;
use std::mem::transmute;
use std::ptr::copy_nonoverlapping;
use std::ptr::write;
#[derive(Clone, Debug)]
enum TableElement {
Trap(),
Function(FunctionIndex),
}
struct GlobalInfo {
global: Global,
offset: usize,
}
struct GlobalsData {
data: Vec<RawByte>,
info: Vec<GlobalInfo>,
}
struct TableData {
data: Vec<MemoryAddress>,
elements: Vec<TableElement>,
info: Table,
}
struct MemoryData {
data: Vec<RawByte>,
info: Memory,
}
const PAGE_SIZE: usize = 65536;
/// Object containing the standalone runtime information. To be passed after creation as argument
/// to [`wasm2cretonne::translatemodule`](../wasm2cretonne/fn.translate_module.html).
pub struct StandaloneRuntime {
globals: GlobalsData,
tables: Vec<TableData>,
memories: Vec<MemoryData>,
instantiated: bool,
has_current_memory: Option<FuncRef>,
has_grow_memory: Option<FuncRef>,
}
impl StandaloneRuntime {
/// Allocates the runtime data structures.
pub fn new() -> StandaloneRuntime {
StandaloneRuntime {
globals: GlobalsData {
data: Vec::new(),
info: Vec::new(),
},
tables: Vec::new(),
memories: Vec::new(),
instantiated: false,
has_current_memory: None,
has_grow_memory: None,
}
}
}
/// This trait is useful for
/// [`wasm2cretonne::translatemodule`](../wasm2cretonne/fn.translate_module.html) because it
/// tells how to translate runtime-dependent wasm instructions. These functions should not be
/// called by the user.
impl WasmRuntime for StandaloneRuntime {
fn translate_get_global(&self,
builder: &mut FunctionBuilder<Local>,
global_index: GlobalIndex)
-> Value {
debug_assert!(self.instantiated);
let ty = self.globals.info[global_index as usize].global.ty;
let offset = self.globals.info[global_index as usize].offset;
let memflags = MemFlags::new();
let memoffset = Offset32::new(offset as i32);
let addr: i64 = unsafe { transmute(self.globals.data.as_ptr()) };
let addr_val = builder.ins().iconst(I64, addr);
builder.ins().load(ty, memflags, addr_val, memoffset)
}
fn translate_set_global(&self,
builder: &mut FunctionBuilder<Local>,
global_index: GlobalIndex,
val: Value) {
let offset = self.globals.info[global_index as usize].offset;
let memflags = MemFlags::new();
let memoffset = Offset32::new(offset as i32);
let addr: i64 = unsafe { transmute(self.globals.data.as_ptr()) };
let addr_val = builder.ins().iconst(I64, addr);
builder.ins().store(memflags, val, addr_val, memoffset);
}
fn translate_memory_base_address(&self,
builder: &mut FunctionBuilder<Local>,
memory_index: MemoryIndex)
-> Value {
let addr: i64 = unsafe { transmute(self.memories[memory_index].data.as_ptr()) };
builder.ins().iconst(I64, addr)
}
fn translate_grow_memory(&mut self,
builder: &mut FunctionBuilder<Local>,
pages: Value)
-> Value {
debug_assert!(self.instantiated);
let grow_mem_func = match self.has_grow_memory {
Some(grow_mem_func) => grow_mem_func,
None => {
let sig_ref =
builder.import_signature(Signature {
call_conv: CallConv::Native,
argument_bytes: None,
argument_types: vec![ArgumentType::new(I32)],
return_types: vec![ArgumentType::new(I32)],
});
builder.import_function(ExtFuncData {
name: FunctionName::new("grow_memory"),
signature: sig_ref,
})
}
};
self.has_grow_memory = Some(grow_mem_func);
let call_inst = builder.ins().call(grow_mem_func, &[pages]);
*builder.inst_results(call_inst).first().unwrap()
}
fn translate_current_memory(&mut self, builder: &mut FunctionBuilder<Local>) -> Value {
debug_assert!(self.instantiated);
let cur_mem_func = match self.has_current_memory {
Some(cur_mem_func) => cur_mem_func,
None => {
let sig_ref = builder.import_signature(Signature {
call_conv: CallConv::Native,
argument_bytes: None,
argument_types: Vec::new(),
return_types:
vec![ArgumentType::new(I32)],
});
builder.import_function(ExtFuncData {
name: FunctionName::new("current_memory"),
signature: sig_ref,
})
}
};
self.has_current_memory = Some(cur_mem_func);
let call_inst = builder.ins().call(cur_mem_func, &[]);
*builder.inst_results(call_inst).first().unwrap()
}
fn translate_call_indirect<'a>(&self,
builder: &'a mut FunctionBuilder<Local>,
sig_ref: SigRef,
index_val: Value,
call_args: &[Value])
-> &'a [Value] {
let trap_ebb = builder.create_ebb();
let continue_ebb = builder.create_ebb();
let size_val = builder.ins().iconst(I32, self.tables[0].info.size as i64);
let zero_val = builder.ins().iconst(I32, 0);
builder
.ins()
.br_icmp(IntCC::UnsignedLessThan, index_val, zero_val, trap_ebb, &[]);
builder
.ins()
.br_icmp(IntCC::UnsignedGreaterThanOrEqual,
index_val,
size_val,
trap_ebb,
&[]);
builder.seal_block(trap_ebb);
let offset_val = builder.ins().imul_imm(index_val, 4);
let base_table_addr: i64 = unsafe { transmute(self.tables[0].data.as_ptr()) };
let table_addr_val = builder.ins().iconst(I32, base_table_addr);
let table_entry_addr_val = builder.ins().iadd(table_addr_val, offset_val);
let memflags = MemFlags::new();
let memoffset = Offset32::new(0);
let table_entry_val = builder
.ins()
.load(I32, memflags, table_entry_addr_val, memoffset);
let call_inst = builder
.ins()
.call_indirect(sig_ref, table_entry_val, call_args);
builder.ins().jump(continue_ebb, &[]);
builder.seal_block(continue_ebb);
builder.switch_to_block(trap_ebb, &[]);
builder.ins().trap();
builder.switch_to_block(continue_ebb, &[]);
builder.inst_results(call_inst)
}
fn begin_translation(&mut self) {
debug_assert!(!self.instantiated);
self.instantiated = true;
// At instantiation, we allocate memory for the globals, the memories and the tables
// First the globals
let mut globals_data_size = 0;
for globalinfo in self.globals.info.iter_mut() {
globalinfo.offset = globals_data_size;
globals_data_size += globalinfo.global.ty.bytes() as usize;
}
self.globals.data.resize(globals_data_size as usize, 0);
for globalinfo in self.globals.info.iter() {
match globalinfo.global.initializer {
GlobalInit::I32Const(val) => unsafe {
write(self.globals
.data
.as_mut_ptr()
.offset(globalinfo.offset as isize) as
*mut i32,
val)
},
GlobalInit::I64Const(val) => unsafe {
write(self.globals
.data
.as_mut_ptr()
.offset(globalinfo.offset as isize) as
*mut i64,
val)
},
GlobalInit::F32Const(val) => unsafe {
write(self.globals
.data
.as_mut_ptr()
.offset(globalinfo.offset as isize) as
*mut f32,
transmute(val))
},
GlobalInit::F64Const(val) => unsafe {
write(self.globals
.data
.as_mut_ptr()
.offset(globalinfo.offset as isize) as
*mut f64,
transmute(val))
},
GlobalInit::Import() => {
// We don't initialize, this is inter-module linking
// TODO: support inter-module imports
}
GlobalInit::GlobalRef(index) => {
let ref_offset = self.globals.info[index].offset;
let size = globalinfo.global.ty.bytes();
unsafe {
let dst = self.globals
.data
.as_mut_ptr()
.offset(globalinfo.offset as isize);
let src = self.globals.data.as_ptr().offset(ref_offset as isize);
copy_nonoverlapping(src, dst, size as usize)
}
}
}
}
}
fn next_function(&mut self) {
self.has_current_memory = None;
self.has_grow_memory = None;
}
fn declare_global(&mut self, global: Global) {
debug_assert!(!self.instantiated);
self.globals
.info
.push(GlobalInfo {
global: global,
offset: 0,
});
}
fn declare_table(&mut self, table: Table) {
debug_assert!(!self.instantiated);
let mut elements_vec = Vec::with_capacity(table.size as usize);
elements_vec.resize(table.size as usize, TableElement::Trap());
let mut addresses_vec = Vec::with_capacity(table.size as usize);
addresses_vec.resize(table.size as usize, 0);
self.tables
.push(TableData {
info: table,
data: addresses_vec,
elements: elements_vec,
});
}
fn declare_table_elements(&mut self,
table_index: TableIndex,
offset: usize,
elements: &[FunctionIndex]) {
debug_assert!(!self.instantiated);
for (i, elt) in elements.iter().enumerate() {
self.tables[table_index].elements[offset as usize + i] = TableElement::Function(*elt);
}
}
fn declare_memory(&mut self, memory: Memory) {
debug_assert!(!self.instantiated);
let mut memory_vec = Vec::with_capacity(memory.pages_count as usize * PAGE_SIZE);
memory_vec.resize(memory.pages_count as usize * PAGE_SIZE, 0);
self.memories
.push(MemoryData {
info: memory,
data: memory_vec,
});
}
fn declare_data_initialization(&mut self,
memory_index: MemoryIndex,
offset: usize,
data: &[u8])
-> Result<(), String> {
if offset + data.len() > self.memories[memory_index].info.pages_count * PAGE_SIZE {
return Err(String::from("initialization data out of bounds"));
}
self.memories[memory_index].data[offset..offset + data.len()].copy_from_slice(data);
Ok(())
}
}
/// Convenience functions for the user to be called after execution for debug purposes.
impl StandaloneRuntime {
/// Returns a slice of the contents of allocated linear memory.
pub fn inspect_memory(&self, memory_index: usize, address: usize, len: usize) -> &[u8] {
&self.memories
.get(memory_index)
.expect(format!("no memory for index {}", memory_index).as_str())
.data
[address..address + len]
}
/// Shows the value of a global variable.
pub fn inspect_global(&self, global_index: usize) -> &[u8] {
let (offset, len) = (self.globals.info[global_index].offset,
self.globals.info[global_index].global.ty.bytes() as usize);
&self.globals.data[offset..offset + len]
}
}