Files
wasmtime/crates/jit/src/link.rs
Alex Crichton ba0dc40b2b Handle select relocations while generating trampolines (#1347)
* Handle select relocations while generating trampolines

Trampoline generation for all function signatures exposed a preexisting
bug in wasmtime where trampoline generation occasionally does have
relocations, but it's asserted that trampolines don't generate
relocations, causing a panic. The relocation is currently primarily the
probestack function which happens when functions might have a huge
number of parameters, but not so huge as to blow the wasmparser limit of
how many parameters are allowed.

This commit fixes the issue by handling relocations for trampolines in
the same manner as the rest of the code. Note that dynamically-generated
trampolines via the `Func` API still panic if they have too many
arguments and generate a relocation, but it seems like we can try to fix
that later if the need truly arises.

Closes #1322

* Log trampoline relocations
2020-03-17 16:30:21 -05:00

138 lines
5.1 KiB
Rust

//! Linking for JIT-compiled code.
use crate::Compilation;
use cranelift_codegen::binemit::Reloc;
use std::ptr::write_unaligned;
use wasmtime_environ::{Module, Relocation, RelocationTarget};
use wasmtime_runtime::libcalls;
use wasmtime_runtime::VMFunctionBody;
/// Links a module that has been compiled with `compiled_module` in `wasmtime-environ`.
///
/// Performs all required relocations inside the function code, provided the necessary metadata.
pub fn link_module(module: &Module, compilation: &Compilation) {
for (i, function_relocs) in compilation.relocations.iter() {
for r in function_relocs.iter() {
let fatptr: *const [VMFunctionBody] = compilation.finished_functions[i];
let body = fatptr as *const VMFunctionBody;
apply_reloc(module, compilation, body, r);
}
}
for (i, function_relocs) in compilation.trampoline_relocations.iter() {
for r in function_relocs.iter() {
println!("tramopline relocation");
let body = compilation.trampolines[&i] as *const VMFunctionBody;
apply_reloc(module, compilation, body, r);
}
}
}
fn apply_reloc(
module: &Module,
compilation: &Compilation,
body: *const VMFunctionBody,
r: &Relocation,
) {
use self::libcalls::*;
let target_func_address: usize = match r.reloc_target {
RelocationTarget::UserFunc(index) => match module.local.defined_func_index(index) {
Some(f) => {
let fatptr: *const [VMFunctionBody] = compilation.finished_functions[f];
fatptr as *const VMFunctionBody as usize
}
None => panic!("direct call to import"),
},
RelocationTarget::LibCall(libcall) => {
use cranelift_codegen::ir::LibCall::*;
match libcall {
CeilF32 => wasmtime_f32_ceil as usize,
FloorF32 => wasmtime_f32_floor as usize,
TruncF32 => wasmtime_f32_trunc as usize,
NearestF32 => wasmtime_f32_nearest as usize,
CeilF64 => wasmtime_f64_ceil as usize,
FloorF64 => wasmtime_f64_floor as usize,
TruncF64 => wasmtime_f64_trunc as usize,
NearestF64 => wasmtime_f64_nearest as usize,
Probestack => PROBESTACK as usize,
other => panic!("unexpected libcall: {}", other),
}
}
RelocationTarget::JumpTable(func_index, jt) => {
match module.local.defined_func_index(func_index) {
Some(f) => {
let offset = *compilation
.jt_offsets
.get(f)
.and_then(|ofs| ofs.get(jt))
.expect("func jump table");
let fatptr: *const [VMFunctionBody] = compilation.finished_functions[f];
fatptr as *const VMFunctionBody as usize + offset as usize
}
None => panic!("func index of jump table"),
}
}
};
match r.reloc {
#[cfg(target_pointer_width = "64")]
Reloc::Abs8 => unsafe {
let reloc_address = body.add(r.offset as usize) as usize;
let reloc_addend = r.addend as isize;
let reloc_abs = (target_func_address as u64)
.checked_add(reloc_addend as u64)
.unwrap();
write_unaligned(reloc_address as *mut u64, reloc_abs);
},
#[cfg(target_pointer_width = "32")]
Reloc::X86PCRel4 => unsafe {
let reloc_address = body.add(r.offset as usize) as usize;
let reloc_addend = r.addend as isize;
let reloc_delta_u32 = (target_func_address as u32)
.wrapping_sub(reloc_address as u32)
.checked_add(reloc_addend as u32)
.unwrap();
write_unaligned(reloc_address as *mut u32, reloc_delta_u32);
},
#[cfg(target_pointer_width = "32")]
Reloc::X86CallPCRel4 => {
// ignore
}
Reloc::X86PCRelRodata4 => {
// ignore
}
_ => panic!("unsupported reloc kind"),
}
}
// A declaration for the stack probe function in Rust's standard library, for
// catching callstack overflow.
cfg_if::cfg_if! {
if #[cfg(any(
target_arch="aarch64",
all(
target_os = "windows",
target_env = "msvc",
target_pointer_width = "64"
)
))] {
extern "C" {
pub fn __chkstk();
}
const PROBESTACK: unsafe extern "C" fn() = __chkstk;
} else if #[cfg(all(target_os = "windows", target_env = "gnu"))] {
extern "C" {
// ___chkstk (note the triple underscore) is implemented in compiler-builtins/src/x86_64.rs
// by the Rust compiler for the MinGW target
#[cfg(all(target_os = "windows", target_env = "gnu"))]
pub fn ___chkstk();
}
const PROBESTACK: unsafe extern "C" fn() = ___chkstk;
} else {
extern "C" {
pub fn __rust_probestack();
}
static PROBESTACK: unsafe extern "C" fn() = __rust_probestack;
}
}