Merge pull request #462 from peterhuene/windows-unwind

Implement registering JIT unwind information on Windows.
This commit is contained in:
Peter Huene
2019-11-07 10:35:25 -08:00
committed by GitHub
16 changed files with 382 additions and 131 deletions

View File

@@ -206,38 +206,6 @@ fn ignore(testsuite: &str, testname: &str, strategy: &str) -> bool {
// ABI only has a single return register, so we need to wait on full // ABI only has a single return register, so we need to wait on full
// multi-value support in Cranelift. // multi-value support in Cranelift.
(_, _) if is_multi_value => true, (_, _) if is_multi_value => true,
// Until Windows unwind information is added we must disable SIMD spec tests that trap.
(_, _) if testname.starts_with("simd") => return true,
("spec_testsuite", "address") => true,
("spec_testsuite", "align") => true,
("spec_testsuite", "call") => true,
("spec_testsuite", "call_indirect") => true,
("spec_testsuite", "conversions") => true,
("spec_testsuite", "elem") => true,
("spec_testsuite", "fac") => true,
("spec_testsuite", "func_ptrs") => true,
("spec_testsuite", "globals") => true,
("spec_testsuite", "i32") => true,
("spec_testsuite", "i64") => true,
("spec_testsuite", "f32") => true,
("spec_testsuite", "f64") => true,
("spec_testsuite", "if") => true,
("spec_testsuite", "imports") => true,
("spec_testsuite", "int_exprs") => true,
("spec_testsuite", "linking") => true,
("spec_testsuite", "memory_grow") => true,
("spec_testsuite", "memory_trap") => true,
("spec_testsuite", "resizing") => true,
("spec_testsuite", "select") => true,
("spec_testsuite", "skip_stack_guard_page") => true,
("spec_testsuite", "start") => true,
("spec_testsuite", "traps") => true,
("spec_testsuite", "unreachable") => true,
("spec_testsuite", "unwind") => true,
("misc_testsuite", "misc_traps") => true,
("misc_testsuite", "stack_overflow") => true,
(_, _) => false, (_, _) => false,
}; };
} }

View File

@@ -16,7 +16,7 @@ use cranelift_entity::{EntityRef, PrimaryMap};
use cranelift_frontend::{FunctionBuilder, FunctionBuilderContext}; use cranelift_frontend::{FunctionBuilder, FunctionBuilderContext};
use cranelift_wasm::{DefinedFuncIndex, FuncIndex}; use cranelift_wasm::{DefinedFuncIndex, FuncIndex};
use target_lexicon::HOST; use target_lexicon::HOST;
use wasmtime_environ::{Export, Module}; use wasmtime_environ::{CompiledFunction, Export, Module};
use wasmtime_jit::CodeMemory; use wasmtime_jit::CodeMemory;
use wasmtime_runtime::{Imports, InstanceHandle, VMContext, VMFunctionBody}; use wasmtime_runtime::{Imports, InstanceHandle, VMContext, VMFunctionBody};
@@ -184,9 +184,16 @@ fn make_trampoline(
) )
.expect("compile_and_emit"); .expect("compile_and_emit");
let mut unwind_info = Vec::new();
context.emit_unwind_info(isa, &mut unwind_info);
code_memory code_memory
.allocate_copy_of_byte_slice(&code_buf) .allocate_for_function(&CompiledFunction {
.expect("allocate_copy_of_byte_slice") body: code_buf,
jt_offsets: context.func.jt_offsets,
unwind_info,
})
.expect("allocate_for_function")
.as_ptr() .as_ptr()
} }

View File

@@ -9,8 +9,7 @@ use cranelift_codegen::{binemit, ir, isa};
use cranelift_entity::{EntityRef, PrimaryMap}; use cranelift_entity::{EntityRef, PrimaryMap};
use cranelift_frontend::{FunctionBuilder, FunctionBuilderContext}; use cranelift_frontend::{FunctionBuilder, FunctionBuilderContext};
use cranelift_wasm::{DefinedFuncIndex, FuncIndex}; use cranelift_wasm::{DefinedFuncIndex, FuncIndex};
//use target_lexicon::HOST; use wasmtime_environ::{CompiledFunction, Export, Module};
use wasmtime_environ::{Export, Module};
use wasmtime_jit::CodeMemory; use wasmtime_jit::CodeMemory;
use wasmtime_runtime::{InstanceHandle, VMContext, VMFunctionBody}; use wasmtime_runtime::{InstanceHandle, VMContext, VMFunctionBody};
@@ -185,9 +184,16 @@ fn make_trampoline(
) )
.expect("compile_and_emit"); .expect("compile_and_emit");
let mut unwind_info = Vec::new();
context.emit_unwind_info(isa, &mut unwind_info);
code_memory code_memory
.allocate_copy_of_byte_slice(&code_buf) .allocate_for_function(&CompiledFunction {
.expect("allocate_copy_of_byte_slice") body: code_buf,
jt_offsets: context.func.jt_offsets,
unwind_info,
})
.expect("allocate_for_function")
.as_ptr() .as_ptr()
} }

View File

@@ -1,7 +1,7 @@
use super::config::tests::test_prolog; use super::config::tests::test_prolog;
use super::*; use super::*;
use crate::address_map::{FunctionAddressMap, InstructionAddressMap}; use crate::address_map::{FunctionAddressMap, InstructionAddressMap};
use crate::compilation::{CodeAndJTOffsets, Relocation, RelocationTarget, TrapInformation}; use crate::compilation::{CompiledFunction, Relocation, RelocationTarget, TrapInformation};
use crate::module::{MemoryPlan, MemoryStyle, Module}; use crate::module::{MemoryPlan, MemoryStyle, Module};
use alloc::boxed::Box; use alloc::boxed::Box;
use alloc::vec::Vec; use alloc::vec::Vec;
@@ -258,9 +258,10 @@ fn new_module_cache_data(rng: &mut impl Rng) -> ModuleCacheData {
*v = (j as u32) * 3 / 4 *v = (j as u32) * 3 / 4
} }
}); });
CodeAndJTOffsets { CompiledFunction {
body: (0..(i * 3 / 2)).collect(), body: (0..(i * 3 / 2)).collect(),
jt_offsets: sm, jt_offsets: sm,
unwind_info: (0..(i * 3 / 2)).collect(),
} }
}) })
.collect(); .collect();

View File

@@ -12,17 +12,20 @@ use serde::{Deserialize, Serialize};
use std::ops::Range; use std::ops::Range;
use thiserror::Error; use thiserror::Error;
/// Compiled machine code: body and jump table offsets. /// Compiled function: machine code body, jump table offsets, and unwind information.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct CodeAndJTOffsets { pub struct CompiledFunction {
/// The function body. /// The function body.
pub body: Vec<u8>, pub body: Vec<u8>,
/// The jump tables offsets (in the body). /// The jump tables offsets (in the body).
pub jt_offsets: ir::JumpTableOffsets, pub jt_offsets: ir::JumpTableOffsets,
/// The unwind information.
pub unwind_info: Vec<u8>,
} }
type Functions = PrimaryMap<DefinedFuncIndex, CodeAndJTOffsets>; type Functions = PrimaryMap<DefinedFuncIndex, CompiledFunction>;
/// The result of compiling a WebAssembly module's functions. /// The result of compiling a WebAssembly module's functions.
#[derive(Deserialize, Serialize, Debug, PartialEq, Eq)] #[derive(Deserialize, Serialize, Debug, PartialEq, Eq)]
@@ -40,21 +43,22 @@ impl Compilation {
/// Allocates the compilation result with the given function bodies. /// Allocates the compilation result with the given function bodies.
pub fn from_buffer( pub fn from_buffer(
buffer: Vec<u8>, buffer: Vec<u8>,
functions: impl IntoIterator<Item = (Range<usize>, ir::JumpTableOffsets)>, functions: impl IntoIterator<Item = (Range<usize>, ir::JumpTableOffsets, Range<usize>)>,
) -> Self { ) -> Self {
Self::new( Self::new(
functions functions
.into_iter() .into_iter()
.map(|(range, jt_offsets)| CodeAndJTOffsets { .map(|(body_range, jt_offsets, unwind_range)| CompiledFunction {
body: buffer[range].to_vec(), body: buffer[body_range].to_vec(),
jt_offsets, jt_offsets,
unwind_info: buffer[unwind_range].to_vec(),
}) })
.collect(), .collect(),
) )
} }
/// Gets the bytes of a single function /// Gets the bytes of a single function
pub fn get(&self, func: DefinedFuncIndex) -> &CodeAndJTOffsets { pub fn get(&self, func: DefinedFuncIndex) -> &CompiledFunction {
&self.functions[func] &self.functions[func]
} }
@@ -67,7 +71,7 @@ impl Compilation {
pub fn get_jt_offsets(&self) -> PrimaryMap<DefinedFuncIndex, ir::JumpTableOffsets> { pub fn get_jt_offsets(&self) -> PrimaryMap<DefinedFuncIndex, ir::JumpTableOffsets> {
self.functions self.functions
.iter() .iter()
.map(|(_, code_and_jt)| code_and_jt.jt_offsets.clone()) .map(|(_, func)| func.jt_offsets.clone())
.collect::<PrimaryMap<DefinedFuncIndex, _>>() .collect::<PrimaryMap<DefinedFuncIndex, _>>()
} }
} }
@@ -88,7 +92,7 @@ pub struct Iter<'a> {
} }
impl<'a> Iterator for Iter<'a> { impl<'a> Iterator for Iter<'a> {
type Item = &'a CodeAndJTOffsets; type Item = &'a CompiledFunction;
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
self.iterator.next().map(|(_, b)| b) self.iterator.next().map(|(_, b)| b)

View File

@@ -5,7 +5,7 @@ use crate::address_map::{
}; };
use crate::cache::{ModuleCacheData, ModuleCacheEntry}; use crate::cache::{ModuleCacheData, ModuleCacheEntry};
use crate::compilation::{ use crate::compilation::{
CodeAndJTOffsets, Compilation, CompileError, Relocation, RelocationTarget, Relocations, Compilation, CompileError, CompiledFunction, Relocation, RelocationTarget, Relocations,
TrapInformation, Traps, TrapInformation, Traps,
}; };
use crate::func_environ::{ use crate::func_environ::{
@@ -235,6 +235,7 @@ impl crate::compilation::Compiler for Cranelift {
)?; )?;
let mut code_buf: Vec<u8> = Vec::new(); let mut code_buf: Vec<u8> = Vec::new();
let mut unwind_info = Vec::new();
let mut reloc_sink = RelocSink::new(func_index); let mut reloc_sink = RelocSink::new(func_index);
let mut trap_sink = TrapSink::new(); let mut trap_sink = TrapSink::new();
let mut stackmap_sink = binemit::NullStackmapSink {}; let mut stackmap_sink = binemit::NullStackmapSink {};
@@ -246,7 +247,7 @@ impl crate::compilation::Compiler for Cranelift {
&mut stackmap_sink, &mut stackmap_sink,
)?; )?;
let jt_offsets = context.func.jt_offsets.clone(); context.emit_unwind_info(isa, &mut unwind_info);
let address_transform = if generate_debug_info { let address_transform = if generate_debug_info {
let body_len = code_buf.len(); let body_len = code_buf.len();
@@ -261,16 +262,15 @@ impl crate::compilation::Compiler for Cranelift {
None None
}; };
let stack_slots = context.func.stack_slots.clone();
Ok(( Ok((
code_buf, code_buf,
jt_offsets, context.func.jt_offsets,
reloc_sink.func_relocs, reloc_sink.func_relocs,
address_transform, address_transform,
ranges, ranges,
stack_slots, context.func.stack_slots,
trap_sink.traps, trap_sink.traps,
unwind_info,
)) ))
}, },
) )
@@ -285,10 +285,12 @@ impl crate::compilation::Compiler for Cranelift {
ranges, ranges,
sss, sss,
function_traps, function_traps,
unwind_info,
)| { )| {
functions.push(CodeAndJTOffsets { functions.push(CompiledFunction {
body: function, body: function,
jt_offsets: func_jt_offsets, jt_offsets: func_jt_offsets,
unwind_info,
}); });
relocations.push(relocs); relocations.push(relocs);
if let Some(address_transform) = address_transform { if let Some(address_transform) = address_transform {

View File

@@ -46,8 +46,8 @@ pub use crate::address_map::{
}; };
pub use crate::cache::{create_new_config as cache_create_new_config, init as cache_init}; pub use crate::cache::{create_new_config as cache_create_new_config, init as cache_init};
pub use crate::compilation::{ pub use crate::compilation::{
Compilation, CompileError, Compiler, Relocation, RelocationTarget, Relocations, Compilation, CompileError, CompiledFunction, Compiler, Relocation, RelocationTarget,
TrapInformation, Traps, Relocations, TrapInformation, Traps,
}; };
pub use crate::cranelift::Cranelift; pub use crate::cranelift::Cranelift;
pub use crate::func_environ::BuiltinFunctionIndex; pub use crate::func_environ::BuiltinFunctionIndex;

View File

@@ -65,10 +65,11 @@ impl crate::compilation::Compiler for Lightbeam {
// TODO pass jump table offsets to Compilation::from_buffer() when they // TODO pass jump table offsets to Compilation::from_buffer() when they
// are implemented in lightbeam -- using empty set of offsets for now. // are implemented in lightbeam -- using empty set of offsets for now.
// TODO: pass an empty range for the unwind information until lightbeam emits it
let code_section_ranges_and_jt = code_section let code_section_ranges_and_jt = code_section
.funcs() .funcs()
.into_iter() .into_iter()
.map(|r| (r, SecondaryMap::new())); .map(|r| (r, SecondaryMap::new(), 0..0));
Ok(( Ok((
Compilation::from_buffer(code_section.buffer().to_vec(), code_section_ranges_and_jt), Compilation::from_buffer(code_section.buffer().to_vec(), code_section_ranges_and_jt),

View File

@@ -25,6 +25,9 @@ target-lexicon = { version = "0.9.0", default-features = false }
hashbrown = { version = "0.6.0", optional = true } hashbrown = { version = "0.6.0", optional = true }
wasmparser = { version = "0.39.2", default-features = false } wasmparser = { version = "0.39.2", default-features = false }
[target.'cfg(target_os = "windows")'.dependencies]
winapi = { version = "0.3.7", features = ["winnt", "impl-default"] }
[features] [features]
default = ["std"] default = ["std"]
std = ["cranelift-codegen/std", "cranelift-wasm/std", "wasmtime-environ/std", "wasmtime-debug/std", "wasmtime-runtime/std", "wasmparser/std"] std = ["cranelift-codegen/std", "cranelift-wasm/std", "wasmtime-environ/std", "wasmtime-debug/std", "wasmtime-runtime/std", "wasmparser/std"]

View File

@@ -1,16 +1,18 @@
//! Memory management for executable code. //! Memory management for executable code.
use crate::function_table::FunctionTable;
use alloc::boxed::Box; use alloc::boxed::Box;
use alloc::string::String; use alloc::string::String;
use alloc::vec::Vec; use alloc::vec::Vec;
use core::{cmp, mem}; use core::{cmp, mem};
use region; use region;
use wasmtime_environ::{Compilation, CompiledFunction};
use wasmtime_runtime::{Mmap, VMFunctionBody}; use wasmtime_runtime::{Mmap, VMFunctionBody};
/// Memory manager for executable code. /// Memory manager for executable code.
pub struct CodeMemory { pub struct CodeMemory {
current: Mmap, current: (Mmap, FunctionTable),
mmaps: Vec<Mmap>, mmaps: Vec<(Mmap, FunctionTable)>,
position: usize, position: usize,
published: usize, published: usize,
} }
@@ -19,30 +21,144 @@ impl CodeMemory {
/// Create a new `CodeMemory` instance. /// Create a new `CodeMemory` instance.
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
current: Mmap::new(), current: (Mmap::new(), FunctionTable::new()),
mmaps: Vec::new(), mmaps: Vec::new(),
position: 0, position: 0,
published: 0, published: 0,
} }
} }
/// Allocate a continuous memory block for a single compiled function.
/// TODO: Reorganize the code that calls this to emit code directly into the
/// mmap region rather than into a Vec that we need to copy in.
pub fn allocate_for_function(
&mut self,
func: &CompiledFunction,
) -> Result<&mut [VMFunctionBody], String> {
let size = Self::function_allocation_size(func);
let start = self.position as u32;
let (buf, table) = self.allocate(size)?;
let (_, _, _, vmfunc) = Self::copy_function(func, start, buf, table);
Ok(vmfunc)
}
/// Allocate a continuous memory block for a compilation.
///
/// Allocates memory for both the function bodies as well as function unwind data.
pub fn allocate_for_compilation(
&mut self,
compilation: &Compilation,
) -> Result<Box<[&mut [VMFunctionBody]]>, String> {
let total_len = compilation
.into_iter()
.fold(0, |acc, func| acc + Self::function_allocation_size(func));
let mut start = self.position as u32;
let (mut buf, mut table) = self.allocate(total_len)?;
let mut result = Vec::with_capacity(compilation.len());
for func in compilation.into_iter() {
let (next_start, next_buf, next_table, vmfunc) =
Self::copy_function(func, start, buf, table);
result.push(vmfunc);
start = next_start;
buf = next_buf;
table = next_table;
}
Ok(result.into_boxed_slice())
}
/// Make all allocated memory executable.
pub fn publish(&mut self) {
self.push_current(0)
.expect("failed to push current memory map");
for (m, t) in &mut self.mmaps[self.published..] {
if m.len() != 0 {
unsafe {
region::protect(m.as_mut_ptr(), m.len(), region::Protection::ReadExecute)
}
.expect("unable to make memory readonly and executable");
}
t.publish(m.as_ptr() as u64)
.expect("failed to publish function table");
}
self.published = self.mmaps.len();
}
/// Allocate `size` bytes of memory which can be made executable later by /// Allocate `size` bytes of memory which can be made executable later by
/// calling `publish()`. Note that we allocate the memory as writeable so /// calling `publish()`. Note that we allocate the memory as writeable so
/// that it can be written to and patched, though we make it readonly before /// that it can be written to and patched, though we make it readonly before
/// actually executing from it. /// actually executing from it.
/// ///
/// TODO: Add an alignment flag. /// TODO: Add an alignment flag.
fn allocate(&mut self, size: usize) -> Result<&mut [u8], String> { fn allocate(&mut self, size: usize) -> Result<(&mut [u8], &mut FunctionTable), String> {
if self.current.len() - self.position < size { if self.current.0.len() - self.position < size {
self.mmaps.push(mem::replace( self.push_current(cmp::max(0x10000, size))?;
&mut self.current,
Mmap::with_at_least(cmp::max(0x10000, size))?,
));
self.position = 0;
} }
let old_position = self.position; let old_position = self.position;
self.position += size; self.position += size;
Ok(&mut self.current.as_mut_slice()[old_position..self.position])
Ok((
&mut self.current.0.as_mut_slice()[old_position..self.position],
&mut self.current.1,
))
}
/// Calculates the allocation size of the given compiled function.
fn function_allocation_size(func: &CompiledFunction) -> usize {
if func.unwind_info.is_empty() {
func.body.len()
} else {
// Account for necessary unwind information alignment padding (32-bit)
((func.body.len() + 3) & !3) + func.unwind_info.len()
}
}
/// Copies the data of the compiled function to the given buffer.
///
/// This will also add the function to the current function table.
fn copy_function<'a>(
func: &CompiledFunction,
func_start: u32,
buf: &'a mut [u8],
table: &'a mut FunctionTable,
) -> (
u32,
&'a mut [u8],
&'a mut FunctionTable,
&'a mut [VMFunctionBody],
) {
let func_end = func_start + (func.body.len() as u32);
let (body, remainder) = buf.split_at_mut(func.body.len());
body.copy_from_slice(&func.body);
let vmfunc = Self::view_as_mut_vmfunc_slice(body);
if func.unwind_info.is_empty() {
return (func_end, remainder, table, vmfunc);
}
// Keep unwind information 32-bit aligned (round up to the nearest 4 byte boundary)
let padding = ((func.body.len() + 3) & !3) - func.body.len();
let (unwind, remainder) = remainder.split_at_mut(padding + func.unwind_info.len());
unwind[padding..].copy_from_slice(&func.unwind_info);
let unwind_start = func_end + (padding as u32);
let unwind_end = unwind_start + (func.unwind_info.len() as u32);
table.add_function(func_start, func_end, unwind_start);
(unwind_end, remainder, table, vmfunc)
} }
/// Convert mut a slice from u8 to VMFunctionBody. /// Convert mut a slice from u8 to VMFunctionBody.
@@ -52,51 +168,28 @@ impl CodeMemory {
unsafe { &mut *body_ptr } unsafe { &mut *body_ptr }
} }
/// Allocate enough memory to hold a copy of `slice` and copy the data into it. /// Pushes the current Mmap (and function table) and allocates a new Mmap of the given size.
/// TODO: Reorganize the code that calls this to emit code directly into the fn push_current(&mut self, new_size: usize) -> Result<(), String> {
/// mmap region rather than into a Vec that we need to copy in. let previous = mem::replace(
pub fn allocate_copy_of_byte_slice( &mut self.current,
&mut self, (
slice: &[u8], if new_size == 0 {
) -> Result<&mut [VMFunctionBody], String> { Mmap::new()
let new = self.allocate(slice.len())?; } else {
new.copy_from_slice(slice); Mmap::with_at_least(cmp::max(0x10000, new_size))?
Ok(Self::view_as_mut_vmfunc_slice(new)) },
} FunctionTable::new(),
),
);
/// Allocate enough continuous memory block for multiple code blocks. See also if previous.0.len() > 0 {
/// allocate_copy_of_byte_slice. self.mmaps.push(previous);
pub fn allocate_copy_of_byte_slices( } else {
&mut self, assert!(previous.1.len() == 0);
slices: &[&[u8]],
) -> Result<Box<[&mut [VMFunctionBody]]>, String> {
let total_len = slices.into_iter().fold(0, |acc, slice| acc + slice.len());
let new = self.allocate(total_len)?;
let mut tail = new;
let mut result = Vec::with_capacity(slices.len());
for slice in slices {
let (block, next_tail) = tail.split_at_mut(slice.len());
block.copy_from_slice(slice);
tail = next_tail;
result.push(Self::view_as_mut_vmfunc_slice(block));
} }
Ok(result.into_boxed_slice())
}
/// Make all allocated memory executable.
pub fn publish(&mut self) {
self.mmaps
.push(mem::replace(&mut self.current, Mmap::new()));
self.position = 0; self.position = 0;
for m in &mut self.mmaps[self.published..] { Ok(())
if m.len() != 0 {
unsafe {
region::protect(m.as_mut_ptr(), m.len(), region::Protection::ReadExecute)
}
.expect("unable to make memory readonly and executable");
}
}
self.published = self.mmaps.len();
} }
} }

View File

@@ -17,8 +17,8 @@ use cranelift_frontend::{FunctionBuilder, FunctionBuilderContext};
use cranelift_wasm::{DefinedFuncIndex, DefinedMemoryIndex, ModuleTranslationState}; use cranelift_wasm::{DefinedFuncIndex, DefinedMemoryIndex, ModuleTranslationState};
use wasmtime_debug::{emit_debugsections_image, DebugInfoData}; use wasmtime_debug::{emit_debugsections_image, DebugInfoData};
use wasmtime_environ::{ use wasmtime_environ::{
Compilation, CompileError, Compiler as _C, FunctionBodyData, Module, ModuleVmctxInfo, Compilation, CompileError, CompiledFunction, Compiler as _C, FunctionBodyData, Module,
Relocations, Traps, Tunables, VMOffsets, ModuleVmctxInfo, Relocations, Traps, Tunables, VMOffsets,
}; };
use wasmtime_runtime::{ use wasmtime_runtime::{
get_mut_trap_registry, InstantiationError, SignatureRegistry, TrapRegistrationGuard, get_mut_trap_registry, InstantiationError, SignatureRegistry, TrapRegistrationGuard,
@@ -323,7 +323,8 @@ fn make_trampoline(
builder.finalize() builder.finalize()
} }
let mut code_buf: Vec<u8> = Vec::new(); let mut code_buf = Vec::new();
let mut unwind_info = Vec::new();
let mut reloc_sink = RelocSink {}; let mut reloc_sink = RelocSink {};
let mut trap_sink = binemit::NullTrapSink {}; let mut trap_sink = binemit::NullTrapSink {};
let mut stackmap_sink = binemit::NullStackmapSink {}; let mut stackmap_sink = binemit::NullStackmapSink {};
@@ -337,8 +338,14 @@ fn make_trampoline(
) )
.map_err(|error| SetupError::Compile(CompileError::Codegen(error)))?; .map_err(|error| SetupError::Compile(CompileError::Codegen(error)))?;
context.emit_unwind_info(isa, &mut unwind_info);
Ok(code_memory Ok(code_memory
.allocate_copy_of_byte_slice(&code_buf) .allocate_for_function(&CompiledFunction {
body: code_buf,
jt_offsets: context.func.jt_offsets,
unwind_info,
})
.map_err(|message| SetupError::Instantiate(InstantiationError::Resource(message)))? .map_err(|message| SetupError::Instantiate(InstantiationError::Resource(message)))?
.as_ptr()) .as_ptr())
} }
@@ -347,14 +354,8 @@ fn allocate_functions(
code_memory: &mut CodeMemory, code_memory: &mut CodeMemory,
compilation: &Compilation, compilation: &Compilation,
) -> Result<PrimaryMap<DefinedFuncIndex, *mut [VMFunctionBody]>, String> { ) -> Result<PrimaryMap<DefinedFuncIndex, *mut [VMFunctionBody]>, String> {
// Allocate code for all function in one continuous memory block. let fat_ptrs = code_memory.allocate_for_compilation(compilation)?;
// First, collect all function bodies into vector to pass to the
// allocate_copy_of_byte_slices.
let bodies = compilation
.into_iter()
.map(|code_and_jt| &code_and_jt.body[..])
.collect::<Vec<&[u8]>>();
let fat_ptrs = code_memory.allocate_copy_of_byte_slices(&bodies)?;
// Second, create a PrimaryMap from result vector of pointers. // Second, create a PrimaryMap from result vector of pointers.
let mut result = PrimaryMap::with_capacity(compilation.len()); let mut result = PrimaryMap::with_capacity(compilation.len());
for i in 0..fat_ptrs.len() { for i in 0..fat_ptrs.len() {

View File

@@ -0,0 +1,134 @@
//! Runtime function table.
//!
//! This module is primarily used to track JIT functions on Windows for stack walking and unwind.
/// Represents a runtime function table.
///
/// The runtime function table is not implemented for non-Windows target platforms.
#[cfg(not(target_os = "windows"))]
pub(crate) struct FunctionTable;
#[cfg(not(target_os = "windows"))]
impl FunctionTable {
/// Creates a new function table.
pub fn new() -> Self {
Self
}
/// Returns the number of functions in the table, also referred to as its 'length'.
///
/// For non-Windows platforms, the table will always be empty.
pub fn len(&self) -> usize {
0
}
/// Adds a function to the table based off of the start offset, end offset, and unwind offset.
///
/// The offsets are from the "module base", which is provided when the table is published.
///
/// For non-Windows platforms, this is a no-op.
pub fn add_function(&mut self, _start: u32, _end: u32, _unwind: u32) {}
/// Publishes the function table using the given base address.
///
/// A published function table will automatically be deleted when it is dropped.
///
/// For non-Windows platforms, this is a no-op.
pub fn publish(&mut self, _base_address: u64) -> Result<(), String> {
Ok(())
}
}
/// Represents a runtime function table.
///
/// This is used to register JIT code with the operating system to enable stack walking and unwinding.
#[cfg(all(target_os = "windows", target_arch = "x86_64"))]
pub(crate) struct FunctionTable {
functions: Vec<winapi::um::winnt::RUNTIME_FUNCTION>,
published: bool,
}
#[cfg(all(target_os = "windows", target_arch = "x86_64"))]
impl FunctionTable {
/// Creates a new function table.
pub fn new() -> Self {
Self {
functions: Vec::new(),
published: false,
}
}
/// Returns the number of functions in the table, also referred to as its 'length'.
pub fn len(&self) -> usize {
self.functions.len()
}
/// Adds a function to the table based off of the start offset, end offset, and unwind offset.
///
/// The offsets are from the "module base", which is provided when the table is published.
pub fn add_function(&mut self, start: u32, end: u32, unwind: u32) {
use winapi::um::winnt;
assert!(!self.published, "table has already been published");
let mut entry = winnt::RUNTIME_FUNCTION::default();
entry.BeginAddress = start;
entry.EndAddress = end;
unsafe {
*entry.u.UnwindInfoAddress_mut() = unwind;
}
self.functions.push(entry);
}
/// Publishes the function table using the given base address.
///
/// A published function table will automatically be deleted when it is dropped.
pub fn publish(&mut self, base_address: u64) -> Result<(), String> {
use winapi::um::winnt;
if self.published {
return Err("function table was already published".into());
}
self.published = true;
if self.functions.is_empty() {
return Ok(());
}
unsafe {
// Windows heap allocations are 32-bit aligned, but assert just in case
assert!(
(self.functions.as_mut_ptr() as u64) % 4 == 0,
"function table allocation was not aligned"
);
if winnt::RtlAddFunctionTable(
self.functions.as_mut_ptr(),
self.functions.len() as u32,
base_address,
) == 0
{
return Err("failed to add function table".into());
}
}
Ok(())
}
}
#[cfg(target_os = "windows")]
impl Drop for FunctionTable {
fn drop(&mut self) {
use winapi::um::winnt;
if self.published {
unsafe {
winnt::RtlDeleteFunctionTable(self.functions.as_mut_ptr());
}
}
}
}

View File

@@ -34,6 +34,7 @@ mod action;
mod code_memory; mod code_memory;
mod compiler; mod compiler;
mod context; mod context;
mod function_table;
mod instantiate; mod instantiate;
mod link; mod link;
mod namespace; mod namespace;

View File

@@ -404,7 +404,7 @@ static
__attribute__ ((warn_unused_result)) __attribute__ ((warn_unused_result))
#endif #endif
bool bool
HandleTrap(CONTEXT* context) HandleTrap(CONTEXT* context, bool reset_guard_page)
{ {
assert(sAlreadyHandlingTrap); assert(sAlreadyHandlingTrap);
@@ -412,7 +412,7 @@ HandleTrap(CONTEXT* context)
return false; return false;
} }
RecordTrap(ContextToPC(context)); RecordTrap(ContextToPC(context), reset_guard_page);
// Unwind calls longjmp, so it doesn't run the automatic // Unwind calls longjmp, so it doesn't run the automatic
// sAlreadhHanldingTrap cleanups, so reset it manually before doing // sAlreadhHanldingTrap cleanups, so reset it manually before doing
@@ -467,7 +467,8 @@ WasmTrapHandler(LPEXCEPTION_POINTERS exception)
return EXCEPTION_CONTINUE_SEARCH; return EXCEPTION_CONTINUE_SEARCH;
} }
if (!HandleTrap(exception->ContextRecord)) { if (!HandleTrap(exception->ContextRecord,
record->ExceptionCode == EXCEPTION_STACK_OVERFLOW)) {
return EXCEPTION_CONTINUE_SEARCH; return EXCEPTION_CONTINUE_SEARCH;
} }
@@ -549,7 +550,7 @@ HandleMachException(const ExceptionRequest& request)
{ {
AutoHandlingTrap aht; AutoHandlingTrap aht;
if (!HandleTrap(&context)) { if (!HandleTrap(&context, false)) {
return false; return false;
} }
} }
@@ -632,7 +633,7 @@ WasmTrapHandler(int signum, siginfo_t* info, void* context)
if (!sAlreadyHandlingTrap) { if (!sAlreadyHandlingTrap) {
AutoHandlingTrap aht; AutoHandlingTrap aht;
assert(signum == SIGSEGV || signum == SIGBUS || signum == SIGFPE || signum == SIGILL); assert(signum == SIGSEGV || signum == SIGBUS || signum == SIGFPE || signum == SIGILL);
if (HandleTrap(static_cast<CONTEXT*>(context))) { if (HandleTrap(static_cast<CONTEXT*>(context), false)) {
return; return;
} }
} }

View File

@@ -13,7 +13,7 @@ extern "C" {
int8_t CheckIfTrapAtAddress(const uint8_t* pc); int8_t CheckIfTrapAtAddress(const uint8_t* pc);
// Record the Trap code and wasm bytecode offset in TLS somewhere // Record the Trap code and wasm bytecode offset in TLS somewhere
void RecordTrap(const uint8_t* pc); void RecordTrap(const uint8_t* pc, bool reset_guard_page);
void* EnterScope(void*); void* EnterScope(void*);
void LeaveScope(void*); void LeaveScope(void*);

View File

@@ -21,6 +21,7 @@ extern "C" {
thread_local! { thread_local! {
static RECORDED_TRAP: Cell<Option<TrapDescription>> = Cell::new(None); static RECORDED_TRAP: Cell<Option<TrapDescription>> = Cell::new(None);
static JMP_BUF: Cell<*const u8> = Cell::new(ptr::null()); static JMP_BUF: Cell<*const u8> = Cell::new(ptr::null());
static RESET_GUARD_PAGE: Cell<bool> = Cell::new(false);
} }
/// Check if there is a trap at given PC /// Check if there is a trap at given PC
@@ -40,7 +41,7 @@ pub extern "C" fn CheckIfTrapAtAddress(_pc: *const u8) -> i8 {
#[doc(hidden)] #[doc(hidden)]
#[allow(non_snake_case)] #[allow(non_snake_case)]
#[no_mangle] #[no_mangle]
pub extern "C" fn RecordTrap(pc: *const u8) { pub extern "C" fn RecordTrap(pc: *const u8, reset_guard_page: bool) {
// TODO: please see explanation in CheckIfTrapAtAddress. // TODO: please see explanation in CheckIfTrapAtAddress.
let registry = get_trap_registry(); let registry = get_trap_registry();
let trap_desc = registry let trap_desc = registry
@@ -49,6 +50,11 @@ pub extern "C" fn RecordTrap(pc: *const u8) {
source_loc: ir::SourceLoc::default(), source_loc: ir::SourceLoc::default(),
trap_code: ir::TrapCode::StackOverflow, trap_code: ir::TrapCode::StackOverflow,
}); });
if reset_guard_page {
RESET_GUARD_PAGE.with(|v| v.set(true));
}
RECORDED_TRAP.with(|data| { RECORDED_TRAP.with(|data| {
assert_eq!( assert_eq!(
data.get(), data.get(),
@@ -77,9 +83,32 @@ pub extern "C" fn GetScope() -> *const u8 {
#[allow(non_snake_case)] #[allow(non_snake_case)]
#[no_mangle] #[no_mangle]
pub extern "C" fn LeaveScope(ptr: *const u8) { pub extern "C" fn LeaveScope(ptr: *const u8) {
RESET_GUARD_PAGE.with(|v| {
if v.get() {
reset_guard_page();
v.set(false);
}
});
JMP_BUF.with(|buf| buf.set(ptr)) JMP_BUF.with(|buf| buf.set(ptr))
} }
#[cfg(target_os = "windows")]
fn reset_guard_page() {
extern "C" {
fn _resetstkoflw() -> winapi::ctypes::c_int;
}
// We need to restore guard page under stack to handle future stack overflows properly.
// https://docs.microsoft.com/en-us/cpp/c-runtime-library/reference/resetstkoflw?view=vs-2019
if unsafe { _resetstkoflw() } == 0 {
panic!("failed to restore stack guard page");
}
}
#[cfg(not(target_os = "windows"))]
fn reset_guard_page() {}
fn trap_message() -> String { fn trap_message() -> String {
let trap_desc = RECORDED_TRAP let trap_desc = RECORDED_TRAP
.with(|data| data.replace(None)) .with(|data| data.replace(None))