Handle select relocations while generating trampolines (#1347)
* Handle select relocations while generating trampolines Trampoline generation for all function signatures exposed a preexisting bug in wasmtime where trampoline generation occasionally does have relocations, but it's asserted that trampolines don't generate relocations, causing a panic. The relocation is currently primarily the probestack function which happens when functions might have a huge number of parameters, but not so huge as to blow the wasmparser limit of how many parameters are allowed. This commit fixes the issue by handling relocations for trampolines in the same manner as the rest of the code. Note that dynamically-generated trampolines via the `Func` API still panic if they have too many arguments and generate a relocation, but it seems like we can try to fix that later if the need truly arises. Closes #1322 * Log trampoline relocations
This commit is contained in:
@@ -1,96 +1,107 @@
|
||||
//! Linking for JIT-compiled code.
|
||||
|
||||
use crate::Compilation;
|
||||
use cranelift_codegen::binemit::Reloc;
|
||||
use cranelift_codegen::ir::JumpTableOffsets;
|
||||
use std::ptr::write_unaligned;
|
||||
use wasmtime_environ::entity::PrimaryMap;
|
||||
use wasmtime_environ::wasm::DefinedFuncIndex;
|
||||
use wasmtime_environ::{Module, RelocationTarget, Relocations};
|
||||
use wasmtime_environ::{Module, Relocation, RelocationTarget};
|
||||
use wasmtime_runtime::libcalls;
|
||||
use wasmtime_runtime::VMFunctionBody;
|
||||
|
||||
/// Links a module that has been compiled with `compiled_module` in `wasmtime-environ`.
|
||||
///
|
||||
/// Performs all required relocations inside the function code, provided the necessary metadata.
|
||||
pub fn link_module(
|
||||
module: &Module,
|
||||
allocated_functions: &PrimaryMap<DefinedFuncIndex, *mut [VMFunctionBody]>,
|
||||
jt_offsets: &PrimaryMap<DefinedFuncIndex, JumpTableOffsets>,
|
||||
relocations: Relocations,
|
||||
) {
|
||||
for (i, function_relocs) in relocations.into_iter() {
|
||||
for r in function_relocs {
|
||||
use self::libcalls::*;
|
||||
let target_func_address: usize = match r.reloc_target {
|
||||
RelocationTarget::UserFunc(index) => match module.local.defined_func_index(index) {
|
||||
Some(f) => {
|
||||
let fatptr: *const [VMFunctionBody] = allocated_functions[f];
|
||||
fatptr as *const VMFunctionBody as usize
|
||||
}
|
||||
None => panic!("direct call to import"),
|
||||
},
|
||||
RelocationTarget::LibCall(libcall) => {
|
||||
use cranelift_codegen::ir::LibCall::*;
|
||||
match libcall {
|
||||
CeilF32 => wasmtime_f32_ceil as usize,
|
||||
FloorF32 => wasmtime_f32_floor as usize,
|
||||
TruncF32 => wasmtime_f32_trunc as usize,
|
||||
NearestF32 => wasmtime_f32_nearest as usize,
|
||||
CeilF64 => wasmtime_f64_ceil as usize,
|
||||
FloorF64 => wasmtime_f64_floor as usize,
|
||||
TruncF64 => wasmtime_f64_trunc as usize,
|
||||
NearestF64 => wasmtime_f64_nearest as usize,
|
||||
Probestack => PROBESTACK as usize,
|
||||
other => panic!("unexpected libcall: {}", other),
|
||||
}
|
||||
}
|
||||
RelocationTarget::JumpTable(func_index, jt) => {
|
||||
match module.local.defined_func_index(func_index) {
|
||||
Some(f) => {
|
||||
let offset = *jt_offsets
|
||||
.get(f)
|
||||
.and_then(|ofs| ofs.get(jt))
|
||||
.expect("func jump table");
|
||||
let fatptr: *const [VMFunctionBody] = allocated_functions[f];
|
||||
fatptr as *const VMFunctionBody as usize + offset as usize
|
||||
}
|
||||
None => panic!("func index of jump table"),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let fatptr: *const [VMFunctionBody] = allocated_functions[i];
|
||||
pub fn link_module(module: &Module, compilation: &Compilation) {
|
||||
for (i, function_relocs) in compilation.relocations.iter() {
|
||||
for r in function_relocs.iter() {
|
||||
let fatptr: *const [VMFunctionBody] = compilation.finished_functions[i];
|
||||
let body = fatptr as *const VMFunctionBody;
|
||||
match r.reloc {
|
||||
#[cfg(target_pointer_width = "64")]
|
||||
Reloc::Abs8 => unsafe {
|
||||
let reloc_address = body.add(r.offset as usize) as usize;
|
||||
let reloc_addend = r.addend as isize;
|
||||
let reloc_abs = (target_func_address as u64)
|
||||
.checked_add(reloc_addend as u64)
|
||||
.unwrap();
|
||||
write_unaligned(reloc_address as *mut u64, reloc_abs);
|
||||
},
|
||||
#[cfg(target_pointer_width = "32")]
|
||||
Reloc::X86PCRel4 => unsafe {
|
||||
let reloc_address = body.add(r.offset as usize) as usize;
|
||||
let reloc_addend = r.addend as isize;
|
||||
let reloc_delta_u32 = (target_func_address as u32)
|
||||
.wrapping_sub(reloc_address as u32)
|
||||
.checked_add(reloc_addend as u32)
|
||||
.unwrap();
|
||||
write_unaligned(reloc_address as *mut u32, reloc_delta_u32);
|
||||
},
|
||||
#[cfg(target_pointer_width = "32")]
|
||||
Reloc::X86CallPCRel4 => {
|
||||
// ignore
|
||||
}
|
||||
Reloc::X86PCRelRodata4 => {
|
||||
// ignore
|
||||
}
|
||||
_ => panic!("unsupported reloc kind"),
|
||||
apply_reloc(module, compilation, body, r);
|
||||
}
|
||||
}
|
||||
|
||||
for (i, function_relocs) in compilation.trampoline_relocations.iter() {
|
||||
for r in function_relocs.iter() {
|
||||
println!("tramopline relocation");
|
||||
let body = compilation.trampolines[&i] as *const VMFunctionBody;
|
||||
apply_reloc(module, compilation, body, r);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn apply_reloc(
|
||||
module: &Module,
|
||||
compilation: &Compilation,
|
||||
body: *const VMFunctionBody,
|
||||
r: &Relocation,
|
||||
) {
|
||||
use self::libcalls::*;
|
||||
let target_func_address: usize = match r.reloc_target {
|
||||
RelocationTarget::UserFunc(index) => match module.local.defined_func_index(index) {
|
||||
Some(f) => {
|
||||
let fatptr: *const [VMFunctionBody] = compilation.finished_functions[f];
|
||||
fatptr as *const VMFunctionBody as usize
|
||||
}
|
||||
None => panic!("direct call to import"),
|
||||
},
|
||||
RelocationTarget::LibCall(libcall) => {
|
||||
use cranelift_codegen::ir::LibCall::*;
|
||||
match libcall {
|
||||
CeilF32 => wasmtime_f32_ceil as usize,
|
||||
FloorF32 => wasmtime_f32_floor as usize,
|
||||
TruncF32 => wasmtime_f32_trunc as usize,
|
||||
NearestF32 => wasmtime_f32_nearest as usize,
|
||||
CeilF64 => wasmtime_f64_ceil as usize,
|
||||
FloorF64 => wasmtime_f64_floor as usize,
|
||||
TruncF64 => wasmtime_f64_trunc as usize,
|
||||
NearestF64 => wasmtime_f64_nearest as usize,
|
||||
Probestack => PROBESTACK as usize,
|
||||
other => panic!("unexpected libcall: {}", other),
|
||||
}
|
||||
}
|
||||
RelocationTarget::JumpTable(func_index, jt) => {
|
||||
match module.local.defined_func_index(func_index) {
|
||||
Some(f) => {
|
||||
let offset = *compilation
|
||||
.jt_offsets
|
||||
.get(f)
|
||||
.and_then(|ofs| ofs.get(jt))
|
||||
.expect("func jump table");
|
||||
let fatptr: *const [VMFunctionBody] = compilation.finished_functions[f];
|
||||
fatptr as *const VMFunctionBody as usize + offset as usize
|
||||
}
|
||||
None => panic!("func index of jump table"),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
match r.reloc {
|
||||
#[cfg(target_pointer_width = "64")]
|
||||
Reloc::Abs8 => unsafe {
|
||||
let reloc_address = body.add(r.offset as usize) as usize;
|
||||
let reloc_addend = r.addend as isize;
|
||||
let reloc_abs = (target_func_address as u64)
|
||||
.checked_add(reloc_addend as u64)
|
||||
.unwrap();
|
||||
write_unaligned(reloc_address as *mut u64, reloc_abs);
|
||||
},
|
||||
#[cfg(target_pointer_width = "32")]
|
||||
Reloc::X86PCRel4 => unsafe {
|
||||
let reloc_address = body.add(r.offset as usize) as usize;
|
||||
let reloc_addend = r.addend as isize;
|
||||
let reloc_delta_u32 = (target_func_address as u32)
|
||||
.wrapping_sub(reloc_address as u32)
|
||||
.checked_add(reloc_addend as u32)
|
||||
.unwrap();
|
||||
write_unaligned(reloc_address as *mut u32, reloc_delta_u32);
|
||||
},
|
||||
#[cfg(target_pointer_width = "32")]
|
||||
Reloc::X86CallPCRel4 => {
|
||||
// ignore
|
||||
}
|
||||
Reloc::X86PCRelRodata4 => {
|
||||
// ignore
|
||||
}
|
||||
_ => panic!("unsupported reloc kind"),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user