Write ELF image and instantiate code_memory from it (#1931)
- Create the ELF image from Compilation - Create CodeMemory from the ELF image - Link using ELF image - Remove creation of GDB JIT images from crates/debug - Move make_trampoline from compiler.rs
This commit is contained in:
@@ -1,96 +1,87 @@
|
||||
//! Linking for JIT-compiled code.
|
||||
|
||||
use crate::CodeMemory;
|
||||
use cranelift_codegen::binemit::Reloc;
|
||||
use cranelift_codegen::ir::JumpTableOffsets;
|
||||
use crate::object::utils::try_parse_func_name;
|
||||
use object::read::{Object, ObjectSection, Relocation, RelocationTarget};
|
||||
use object::{elf, File, RelocationEncoding, RelocationKind};
|
||||
use std::ptr::{read_unaligned, write_unaligned};
|
||||
use wasmtime_environ::entity::PrimaryMap;
|
||||
use wasmtime_environ::wasm::DefinedFuncIndex;
|
||||
use wasmtime_environ::{Module, Relocation, RelocationTarget};
|
||||
use wasmtime_environ::Module;
|
||||
use wasmtime_runtime::libcalls;
|
||||
use wasmtime_runtime::VMFunctionBody;
|
||||
|
||||
/// Links a module that has been compiled with `compiled_module` in `wasmtime-environ`.
|
||||
///
|
||||
/// Performs all required relocations inside the function code, provided the necessary metadata.
|
||||
/// The relocations data provided in the object file, see object.rs for details.
|
||||
///
|
||||
/// Currently, the produced ELF image can be trusted.
|
||||
/// TODO refactor logic to remove panics and add defensive code the image data
|
||||
/// becomes untrusted.
|
||||
pub fn link_module(
|
||||
code_memory: &mut CodeMemory,
|
||||
obj: &File,
|
||||
module: &Module,
|
||||
code_range: &mut [u8],
|
||||
finished_functions: &PrimaryMap<DefinedFuncIndex, *mut [VMFunctionBody]>,
|
||||
jt_offsets: &PrimaryMap<DefinedFuncIndex, JumpTableOffsets>,
|
||||
) {
|
||||
for (fatptr, r) in code_memory.unpublished_relocations() {
|
||||
let body = fatptr as *const VMFunctionBody;
|
||||
apply_reloc(module, finished_functions, jt_offsets, body, r);
|
||||
// Read the ".text" section and process its relocations.
|
||||
let text_section = obj.section_by_name(".text").unwrap();
|
||||
let body = code_range.as_ptr() as *const VMFunctionBody;
|
||||
|
||||
for (offset, r) in text_section.relocations() {
|
||||
apply_reloc(module, obj, finished_functions, body, offset, r);
|
||||
}
|
||||
}
|
||||
|
||||
fn apply_reloc(
|
||||
module: &Module,
|
||||
obj: &File,
|
||||
finished_functions: &PrimaryMap<DefinedFuncIndex, *mut [VMFunctionBody]>,
|
||||
jt_offsets: &PrimaryMap<DefinedFuncIndex, JumpTableOffsets>,
|
||||
body: *const VMFunctionBody,
|
||||
r: &Relocation,
|
||||
offset: u64,
|
||||
r: Relocation,
|
||||
) {
|
||||
use self::libcalls::*;
|
||||
let target_func_address: usize = match r.reloc_target {
|
||||
RelocationTarget::UserFunc(index) => match module.local.defined_func_index(index) {
|
||||
Some(f) => {
|
||||
let fatptr: *const [VMFunctionBody] = finished_functions[f];
|
||||
fatptr as *const VMFunctionBody as usize
|
||||
}
|
||||
None => panic!("direct call to import"),
|
||||
},
|
||||
RelocationTarget::LibCall(libcall) => {
|
||||
use cranelift_codegen::ir::LibCall::*;
|
||||
match libcall {
|
||||
UdivI64 => wasmtime_i64_udiv as usize,
|
||||
SdivI64 => wasmtime_i64_sdiv as usize,
|
||||
UremI64 => wasmtime_i64_urem as usize,
|
||||
SremI64 => wasmtime_i64_srem as usize,
|
||||
IshlI64 => wasmtime_i64_ishl as usize,
|
||||
UshrI64 => wasmtime_i64_ushr as usize,
|
||||
SshrI64 => wasmtime_i64_sshr as usize,
|
||||
CeilF32 => wasmtime_f32_ceil as usize,
|
||||
FloorF32 => wasmtime_f32_floor as usize,
|
||||
TruncF32 => wasmtime_f32_trunc as usize,
|
||||
NearestF32 => wasmtime_f32_nearest as usize,
|
||||
CeilF64 => wasmtime_f64_ceil as usize,
|
||||
FloorF64 => wasmtime_f64_floor as usize,
|
||||
TruncF64 => wasmtime_f64_trunc as usize,
|
||||
NearestF64 => wasmtime_f64_nearest as usize,
|
||||
other => panic!("unexpected libcall: {}", other),
|
||||
}
|
||||
}
|
||||
RelocationTarget::JumpTable(func_index, jt) => {
|
||||
match module.local.defined_func_index(func_index) {
|
||||
Some(f) => {
|
||||
let offset = *jt_offsets
|
||||
.get(f)
|
||||
.and_then(|ofs| ofs.get(jt))
|
||||
.expect("func jump table");
|
||||
let fatptr: *const [VMFunctionBody] = finished_functions[f];
|
||||
fatptr as *const VMFunctionBody as usize + offset as usize
|
||||
let target_func_address: usize = match r.target() {
|
||||
RelocationTarget::Symbol(i) => {
|
||||
// Processing relocation target is a named symbols that is compiled
|
||||
// wasm function or runtime libcall.
|
||||
let sym = obj.symbol_by_index(i).unwrap();
|
||||
match sym.name() {
|
||||
Some(name) => {
|
||||
if let Some(index) = try_parse_func_name(name) {
|
||||
match module.local.defined_func_index(index) {
|
||||
Some(f) => {
|
||||
let fatptr: *const [VMFunctionBody] = finished_functions[f];
|
||||
fatptr as *const VMFunctionBody as usize
|
||||
}
|
||||
None => panic!("direct call to import"),
|
||||
}
|
||||
} else if let Some(addr) = to_libcall_address(name) {
|
||||
addr
|
||||
} else {
|
||||
panic!("unknown function to link: {}", name);
|
||||
}
|
||||
}
|
||||
None => panic!("func index of jump table"),
|
||||
None => panic!("unexpected relocation target: not a symbol"),
|
||||
}
|
||||
}
|
||||
_ => panic!("unexpected relocation target"),
|
||||
};
|
||||
|
||||
match r.reloc {
|
||||
match (r.kind(), r.encoding(), r.size()) {
|
||||
#[cfg(target_pointer_width = "64")]
|
||||
Reloc::Abs8 => unsafe {
|
||||
let reloc_address = body.add(r.offset as usize) as usize;
|
||||
let reloc_addend = r.addend as isize;
|
||||
(RelocationKind::Absolute, RelocationEncoding::Generic, 64) => unsafe {
|
||||
let reloc_address = body.add(offset as usize) as usize;
|
||||
let reloc_addend = r.addend() as isize;
|
||||
let reloc_abs = (target_func_address as u64)
|
||||
.checked_add(reloc_addend as u64)
|
||||
.unwrap();
|
||||
write_unaligned(reloc_address as *mut u64, reloc_abs);
|
||||
},
|
||||
#[cfg(target_pointer_width = "32")]
|
||||
Reloc::X86PCRel4 => unsafe {
|
||||
let reloc_address = body.add(r.offset as usize) as usize;
|
||||
let reloc_addend = r.addend as isize;
|
||||
(RelocationKind::Relative, RelocationEncoding::Generic, 32) => unsafe {
|
||||
let reloc_address = body.add(offset as usize) as usize;
|
||||
let reloc_addend = r.addend() as isize;
|
||||
let reloc_delta_u32 = (target_func_address as u32)
|
||||
.wrapping_sub(reloc_address as u32)
|
||||
.checked_add(reloc_addend as u32)
|
||||
@@ -98,18 +89,18 @@ fn apply_reloc(
|
||||
write_unaligned(reloc_address as *mut u32, reloc_delta_u32);
|
||||
},
|
||||
#[cfg(target_pointer_width = "32")]
|
||||
Reloc::X86CallPCRel4 => unsafe {
|
||||
let reloc_address = body.add(r.offset as usize) as usize;
|
||||
let reloc_addend = r.addend as isize;
|
||||
(RelocationKind::Relative, RelocationEncoding::X86Branch, 32) => unsafe {
|
||||
let reloc_address = body.add(offset as usize) as usize;
|
||||
let reloc_addend = r.addend() as isize;
|
||||
let reloc_delta_u32 = (target_func_address as u32)
|
||||
.wrapping_sub(reloc_address as u32)
|
||||
.wrapping_add(reloc_addend as u32);
|
||||
write_unaligned(reloc_address as *mut u32, reloc_delta_u32);
|
||||
},
|
||||
#[cfg(target_pointer_width = "64")]
|
||||
Reloc::X86CallPCRel4 => unsafe {
|
||||
let reloc_address = body.add(r.offset as usize) as usize;
|
||||
let reloc_addend = r.addend as isize;
|
||||
(RelocationKind::Relative, RelocationEncoding::X86Branch, 32) => unsafe {
|
||||
let reloc_address = body.add(offset as usize) as usize;
|
||||
let reloc_addend = r.addend() as isize;
|
||||
let reloc_delta_u64 = (target_func_address as u64)
|
||||
.wrapping_sub(reloc_address as u64)
|
||||
.wrapping_add(reloc_addend as u64);
|
||||
@@ -119,12 +110,9 @@ fn apply_reloc(
|
||||
);
|
||||
write_unaligned(reloc_address as *mut u32, reloc_delta_u64 as u32);
|
||||
},
|
||||
Reloc::X86PCRelRodata4 => {
|
||||
// ignore
|
||||
}
|
||||
Reloc::Arm64Call => unsafe {
|
||||
let reloc_address = body.add(r.offset as usize) as usize;
|
||||
let reloc_addend = r.addend as isize;
|
||||
(RelocationKind::Elf(elf::R_AARCH64_CALL26), RelocationEncoding::Generic, 32) => unsafe {
|
||||
let reloc_address = body.add(offset as usize) as usize;
|
||||
let reloc_addend = r.addend() as isize;
|
||||
let reloc_delta = (target_func_address as u64).wrapping_sub(reloc_address as u64);
|
||||
// TODO: come up with a PLT-like solution for longer calls. We can't extend the
|
||||
// code segment at this point, but we could conservatively allocate space at the
|
||||
@@ -139,6 +127,24 @@ fn apply_reloc(
|
||||
let new_insn = (insn & 0xfc00_0000) | (delta_bits & 0x03ff_ffff);
|
||||
write_unaligned(reloc_address as *mut u32, new_insn);
|
||||
},
|
||||
_ => panic!("unsupported reloc kind"),
|
||||
other => panic!("unsupported reloc kind: {:?}", other),
|
||||
}
|
||||
}
|
||||
|
||||
fn to_libcall_address(name: &str) -> Option<usize> {
|
||||
use self::libcalls::*;
|
||||
use crate::for_each_libcall;
|
||||
macro_rules! add_libcall_symbol {
|
||||
[$(($libcall:ident, $export:ident)),*] => {
|
||||
Some(match name {
|
||||
$(
|
||||
stringify!($export) => $export as usize,
|
||||
)+
|
||||
_ => {
|
||||
return None;
|
||||
}
|
||||
})
|
||||
};
|
||||
}
|
||||
for_each_libcall!(add_libcall_symbol)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user