Don't copy executable code into a CodeMemory (#3265)

* Don't copy executable code into a `CodeMemory`

This commit moves a copy from compiled artifacts into a `CodeMemory`. In
general this commit drastically changes the meaning of a `CodeMemory`.
Previously it was an iteratively-pushed-on structure that would
accumulate executable code over time. Afterwards, however, it's a
manager for an `MmapVec` which updates the permissions on text section
to ensure that the pages are executable.

By taking ownership of an `MmapVec` within a `CodeMemory` there's no
need to copy any data around, which means that the `.text` section in
the ELF image produced by Wasmtime is usable as-is after placement in
memory and relocations have been resolved. This moves Wasmtime one step
closer to being able to directly use a module after it's `mmap`'d into
memory, optimizing when a module is loaded.

* Fix windows section alignment

* Review comments
This commit is contained in:
Alex Crichton
2021-08-30 13:38:35 -05:00
committed by GitHub
parent eb251deca9
commit ef3ec594ce
8 changed files with 215 additions and 236 deletions

View File

@@ -396,7 +396,7 @@ impl<'a> ObjectBuilder<'a> {
unwind_info.extend_from_slice(&info.end.to_le_bytes()); unwind_info.extend_from_slice(&info.end.to_le_bytes());
unwind_info.extend_from_slice(&info.unwind_address.to_le_bytes()); unwind_info.extend_from_slice(&info.unwind_address.to_le_bytes());
} }
self.obj.append_section_data(section_id, &unwind_info, 1); self.obj.append_section_data(section_id, &unwind_info, 4);
} }
/// This function appends a nonstandard section to the object which is only /// This function appends a nonstandard section to the object which is only

View File

@@ -1,147 +1,187 @@
//! Memory management for executable code. //! Memory management for executable code.
use crate::unwind::UnwindRegistration; use crate::unwind::UnwindRegistration;
use anyhow::{Context, Result}; use crate::MmapVec;
use object::read::{File as ObjectFile, Object, ObjectSection}; use anyhow::{bail, Context, Result};
use object::read::{File, Object, ObjectSection};
use std::mem::ManuallyDrop; use std::mem::ManuallyDrop;
use wasmtime_runtime::Mmap;
struct CodeMemoryEntry { /// Management of executable memory within a `MmapVec`
mmap: ManuallyDrop<Mmap>, ///
/// This type consumes ownership of a region of memory and will manage the
/// executable permissions of the contained JIT code as necessary.
pub struct CodeMemory {
// NB: these are `ManuallyDrop` because `unwind_registration` must be
// dropped first since it refers to memory owned by `mmap`.
mmap: ManuallyDrop<MmapVec>,
unwind_registration: ManuallyDrop<Option<UnwindRegistration>>, unwind_registration: ManuallyDrop<Option<UnwindRegistration>>,
text_len: usize, published: bool,
unwind_info_len: usize,
} }
impl CodeMemoryEntry { impl Drop for CodeMemory {
fn new(text_len: usize, unwind_info_len: usize) -> Result<Self> {
let mmap = ManuallyDrop::new(Mmap::with_at_least(text_len + unwind_info_len)?);
Ok(Self {
mmap,
unwind_registration: ManuallyDrop::new(None),
text_len,
unwind_info_len,
})
}
}
impl Drop for CodeMemoryEntry {
fn drop(&mut self) { fn drop(&mut self) {
// Drop `unwind_registration` before `self.mmap`
unsafe { unsafe {
// The registry needs to be dropped before the mmap
ManuallyDrop::drop(&mut self.unwind_registration); ManuallyDrop::drop(&mut self.unwind_registration);
ManuallyDrop::drop(&mut self.mmap); ManuallyDrop::drop(&mut self.mmap);
} }
} }
} }
/// Memory manager for executable code.
pub struct CodeMemory {
entries: Vec<CodeMemoryEntry>,
published: usize,
}
fn _assert() { fn _assert() {
fn _assert_send_sync<T: Send + Sync>() {} fn _assert_send_sync<T: Send + Sync>() {}
_assert_send_sync::<CodeMemory>(); _assert_send_sync::<CodeMemory>();
} }
/// Result of publishing a `CodeMemory`, containing references to the parsed
/// internals.
pub struct Publish<'a> {
/// The parsed ELF image that resides within the original `MmapVec`.
pub obj: File<'a>,
/// Reference to the entire `MmapVec` and its contents.
pub mmap: &'a [u8],
/// Reference to just the text section of the object file, a subslice of
/// `mmap`.
pub text: &'a [u8],
}
impl CodeMemory { impl CodeMemory {
/// Create a new `CodeMemory` instance. /// Creates a new `CodeMemory` by taking ownership of the provided
pub fn new() -> Self { /// `MmapVec`.
///
/// The returned `CodeMemory` manages the internal `MmapVec` and the
/// `publish` method is used to actually make the memory executable.
pub fn new(mmap: MmapVec) -> Self {
Self { Self {
entries: Vec::new(), mmap: ManuallyDrop::new(mmap),
published: 0, unwind_registration: ManuallyDrop::new(None),
published: false,
} }
} }
/// Make all allocated memory executable. /// Returns a reference to the underlying `MmapVec` this memory owns.
pub fn publish(&mut self) { pub fn mmap(&self) -> &MmapVec {
for entry in &mut self.entries[self.published..] { &self.mmap
assert!(!entry.mmap.is_empty()); }
unsafe { /// Publishes the internal ELF image to be ready for execution.
// Switch the executable portion from read/write to ///
// read/execute, notably not using read/write/execute to prevent /// This method can only be called once and will panic if called twice. This
// modifications. /// will parse the ELF image from the original `MmapVec` and do everything
region::protect( /// necessary to get it ready for execution, including:
entry.mmap.as_mut_ptr(), ///
entry.text_len, /// * Change page protections from read/write to read/execute.
region::Protection::READ_EXECUTE, /// * Register unwinding information with the OS
) ///
.expect("unable to make memory readonly and executable"); /// After this function executes all JIT code should be ready to execute.
/// The various parsed results of the internals of the `MmapVec` are
/// returned through the `Publish` structure.
pub fn publish(&mut self) -> Result<Publish<'_>> {
assert!(!self.published);
self.published = true;
if entry.unwind_info_len == 0 { let mut ret = Publish {
continue; obj: File::parse(&self.mmap[..])
} .with_context(|| "failed to parse internal compilation artifact")?,
mmap: &self.mmap,
text: &[],
};
// With all our memory setup use the platform-specific // Sanity-check that all sections are aligned correctly.
// `UnwindRegistration` implementation to inform the general for section in ret.obj.sections() {
// runtime that there's unwinding information available for all let data = match section.data() {
// our just-published JIT functions. Ok(data) => data,
*entry.unwind_registration = Some( Err(_) => continue,
UnwindRegistration::new( };
entry.mmap.as_mut_ptr(), if section.align() == 0 || data.len() == 0 {
entry.mmap.as_mut_ptr().add(entry.text_len), continue;
entry.unwind_info_len, }
) if data.as_ptr() as u64 % section.align() != 0 {
.expect("failed to create unwind info registration"), bail!(
"section `{}` isn't aligned to {:#x}",
section.name().unwrap_or("ERROR"),
section.align()
); );
} }
} }
self.published = self.entries.len(); // Find the `.text` section with executable code in it.
} let text = match ret.obj.section_by_name(".text") {
Some(section) => section,
None => return Ok(ret),
};
ret.text = match text.data() {
Ok(data) if !data.is_empty() => data,
_ => return Ok(ret),
};
/// Alternative to `allocate_for_object`, but when the object file isn't // The unsafety here comes from a few things:
/// already parsed. //
pub fn allocate_for_object_unparsed<'a, 'b>( // * First in `apply_reloc` we're walking around the `File` that the
&'a mut self, // `object` crate has to get a mutable view into the text section.
obj: &'b [u8], // Currently the `object` crate doesn't support easily parsing a file
) -> Result<(&'a mut [u8], ObjectFile<'b>)> { // and updating small bits and pieces of it, so we work around it for
let obj = ObjectFile::parse(obj)?; // now. ELF's file format should guarantee that `text_mut` doesn't
Ok((self.allocate_for_object(&obj)?, obj)) // collide with any memory accessed by `text.relocations()`.
} //
// * Second we're actually updating some page protections to executable
// memory.
//
// * Finally we're registering unwinding information which relies on the
// correctness of the information in the first place. This applies to
// both the actual unwinding tables as well as the validity of the
// pointers we pass in itself.
unsafe {
let text_mut =
std::slice::from_raw_parts_mut(ret.text.as_ptr() as *mut u8, ret.text.len());
for (offset, r) in text.relocations() {
crate::link::apply_reloc(&ret.obj, text_mut, offset, r);
}
/// Allocates and copies the ELF image code section into CodeMemory. // Switch the executable portion from read/write to
/// Returns references to functions and trampolines defined there. // read/execute, notably not using read/write/execute to prevent
pub fn allocate_for_object(&mut self, obj: &ObjectFile) -> Result<&mut [u8]> { // modifications.
let text_section = obj.section_by_name(".text").unwrap(); assert!(
let text_section_size = text_section.size() as usize; ret.text.as_ptr() as usize % region::page::size() == 0,
"text section is not page-aligned"
if text_section_size == 0 {
// No code in the image.
return Ok(&mut []);
}
// Find the platform-specific unwind section, if present, which contains
// unwinding tables that will be used to load unwinding information
// dynamically at runtime.
let unwind_section = obj.section_by_name(UnwindRegistration::section_name());
let unwind_section_size = unwind_section
.as_ref()
.map(|s| s.size() as usize)
.unwrap_or(0);
// Allocate memory for the text section and unwinding information if it
// is present. Then we can copy in all of the code and unwinding memory
// over.
let entry = CodeMemoryEntry::new(text_section_size, unwind_section_size)?;
self.entries.push(entry);
let entry = self.entries.last_mut().unwrap();
entry.mmap.as_mut_slice()[..text_section_size].copy_from_slice(
text_section
.data()
.with_context(|| "cannot read text section data")?,
);
if let Some(section) = unwind_section {
entry.mmap.as_mut_slice()[text_section_size..][..unwind_section_size].copy_from_slice(
section
.data()
.with_context(|| "cannot read unwind section data")?,
); );
region::protect(
ret.text.as_ptr() as *mut _,
ret.text.len(),
region::Protection::READ_EXECUTE,
)
.expect("unable to make memory readonly and executable");
// With all our memory set up use the platform-specific
// `UnwindRegistration` implementation to inform the general
// runtime that there's unwinding information available for all
// our just-published JIT functions.
*self.unwind_registration = register_unwind_info(&ret.obj, ret.text)?;
} }
Ok(&mut entry.mmap.as_mut_slice()[..text_section_size]) Ok(ret)
} }
} }
unsafe fn register_unwind_info(obj: &File, text: &[u8]) -> Result<Option<UnwindRegistration>> {
let unwind_info = match obj
.section_by_name(UnwindRegistration::section_name())
.and_then(|s| s.data().ok())
{
Some(info) => info,
None => return Ok(None),
};
if unwind_info.len() == 0 {
return Ok(None);
}
Ok(Some(
UnwindRegistration::new(
text.as_ptr() as *mut _,
unwind_info.as_ptr() as *mut _,
unwind_info.len(),
)
.context("failed to create unwind info registration")?,
))
}

View File

@@ -5,7 +5,6 @@
use crate::code_memory::CodeMemory; use crate::code_memory::CodeMemory;
use crate::debug::create_gdbjit_image; use crate::debug::create_gdbjit_image;
use crate::link::link_module;
use crate::{MmapVec, ProfilingAgent}; use crate::{MmapVec, ProfilingAgent};
use anyhow::{anyhow, Context, Result}; use anyhow::{anyhow, Context, Result};
use object::write::{Object, StandardSegment}; use object::write::{Object, StandardSegment};
@@ -237,33 +236,18 @@ pub struct TypeTables {
pub instance_signatures: PrimaryMap<InstanceTypeIndex, InstanceSignature>, pub instance_signatures: PrimaryMap<InstanceTypeIndex, InstanceSignature>,
} }
/// Container for data needed for an Instance function to exist.
pub struct ModuleCode {
range: (usize, usize),
#[allow(dead_code)]
code_memory: CodeMemory,
#[allow(dead_code)]
dbg_jit_registration: Option<GdbJitImageRegistration>,
}
impl ModuleCode {
/// Gets the [begin, end) range of the module's code.
pub fn range(&self) -> (usize, usize) {
self.range
}
}
/// A compiled wasm module, ready to be instantiated. /// A compiled wasm module, ready to be instantiated.
pub struct CompiledModule { pub struct CompiledModule {
wasm_data: Range<usize>, wasm_data: Range<usize>,
address_map_data: Range<usize>, address_map_data: Range<usize>,
trap_data: Range<usize>, trap_data: Range<usize>,
mmap: MmapVec,
module: Arc<Module>, module: Arc<Module>,
funcs: PrimaryMap<DefinedFuncIndex, FunctionInfo>, funcs: PrimaryMap<DefinedFuncIndex, FunctionInfo>,
trampolines: Vec<Trampoline>, trampolines: Vec<Trampoline>,
meta: Metadata, meta: Metadata,
code: Arc<ModuleCode>, code: Range<usize>,
code_memory: CodeMemory,
dbg_jit_registration: Option<GdbJitImageRegistration>,
} }
impl CompiledModule { impl CompiledModule {
@@ -288,11 +272,18 @@ impl CompiledModule {
info: Option<CompiledModuleInfo>, info: Option<CompiledModuleInfo>,
profiler: &dyn ProfilingAgent, profiler: &dyn ProfilingAgent,
) -> Result<Arc<Self>> { ) -> Result<Arc<Self>> {
let obj = File::parse(&mmap[..]) // Transfer ownership of `obj` to a `CodeMemory` object which will
.with_context(|| "failed to parse internal ELF compilation artifact")?; // manage permissions, such as the executable bit. Once it's located
// there we also publish it for being able to execute. Note that this
// step will also resolve pending relocations in the compiled image.
let mut code_memory = CodeMemory::new(mmap);
let code = code_memory
.publish()
.context("failed to publish code memory")?;
let section = |name: &str| { let section = |name: &str| {
obj.section_by_name(name) code.obj
.section_by_name(name)
.and_then(|s| s.data().ok()) .and_then(|s| s.data().ok())
.ok_or_else(|| anyhow!("missing section `{}` in compilation artifacts", name)) .ok_or_else(|| anyhow!("missing section `{}` in compilation artifacts", name))
}; };
@@ -304,39 +295,18 @@ impl CompiledModule {
None => bincode::deserialize(section(ELF_WASMTIME_INFO)?) None => bincode::deserialize(section(ELF_WASMTIME_INFO)?)
.context("failed to deserialize wasmtime module info")?, .context("failed to deserialize wasmtime module info")?,
}; };
let module = Arc::new(info.module);
let funcs = info.funcs;
let trampolines = info.trampolines;
let wasm_data = subslice_range(section(ELF_WASM_DATA)?, &mmap);
let address_map_data = subslice_range(section(ELF_WASMTIME_ADDRMAP)?, &mmap);
let trap_data = subslice_range(section(ELF_WASMTIME_TRAPS)?, &mmap);
// Allocate all of the compiled functions into executable memory,
// copying over their contents.
let (code_memory, code_range) = build_code_memory(&obj).map_err(|message| {
SetupError::Instantiate(InstantiationError::Resource(anyhow::anyhow!(
"failed to build code memory for functions: {}",
message
)))
})?;
let start = code_range.0 as usize;
let end = start + code_range.1;
let mut ret = Self { let mut ret = Self {
meta: info.meta, meta: info.meta,
funcs, module: Arc::new(info.module),
trampolines, funcs: info.funcs,
module, trampolines: info.trampolines,
mmap, wasm_data: subslice_range(section(ELF_WASM_DATA)?, code.mmap),
wasm_data, address_map_data: subslice_range(section(ELF_WASMTIME_ADDRMAP)?, code.mmap),
address_map_data, trap_data: subslice_range(section(ELF_WASMTIME_TRAPS)?, code.mmap),
trap_data, code: subslice_range(code.text, code.mmap),
code: Arc::new(ModuleCode { dbg_jit_registration: None,
range: (start, end), code_memory,
code_memory,
dbg_jit_registration: None,
}),
}; };
ret.register_debug_and_profiling(profiler)?; ret.register_debug_and_profiling(profiler)?;
@@ -345,31 +315,23 @@ impl CompiledModule {
fn register_debug_and_profiling(&mut self, profiler: &dyn ProfilingAgent) -> Result<()> { fn register_debug_and_profiling(&mut self, profiler: &dyn ProfilingAgent) -> Result<()> {
// Register GDB JIT images; initialize profiler and load the wasm module. // Register GDB JIT images; initialize profiler and load the wasm module.
let dbg_jit_registration = if self.meta.native_debug_info_present { if self.meta.native_debug_info_present {
let bytes = create_gdbjit_image( let code = self.code();
self.mmap.to_vec(), let bytes = create_gdbjit_image(self.mmap().to_vec(), (code.as_ptr(), code.len()))
( .map_err(SetupError::DebugInfo)?;
self.code.range.0 as *const u8,
self.code.range.1 - self.code.range.0,
),
)
.map_err(SetupError::DebugInfo)?;
profiler.module_load(self, Some(&bytes)); profiler.module_load(self, Some(&bytes));
let reg = GdbJitImageRegistration::register(bytes); let reg = GdbJitImageRegistration::register(bytes);
Some(reg) self.dbg_jit_registration = Some(reg);
} else { } else {
profiler.module_load(self, None); profiler.module_load(self, None);
None }
};
Arc::get_mut(&mut self.code).unwrap().dbg_jit_registration = dbg_jit_registration;
Ok(()) Ok(())
} }
/// Returns the underlying memory which contains the compiled module's /// Returns the underlying memory which contains the compiled module's
/// image. /// image.
pub fn mmap(&self) -> &MmapVec { pub fn mmap(&self) -> &MmapVec {
&self.mmap self.code_memory.mmap()
} }
/// Returns the concatenated list of all data associated with this wasm /// Returns the concatenated list of all data associated with this wasm
@@ -378,20 +340,27 @@ impl CompiledModule {
/// This is used for initialization of memories and all data ranges stored /// This is used for initialization of memories and all data ranges stored
/// in a `Module` are relative to the slice returned here. /// in a `Module` are relative to the slice returned here.
pub fn wasm_data(&self) -> &[u8] { pub fn wasm_data(&self) -> &[u8] {
&self.mmap[self.wasm_data.clone()] &self.mmap()[self.wasm_data.clone()]
} }
/// Returns the encoded address map section used to pass to /// Returns the encoded address map section used to pass to
/// `wasmtime_environ::lookup_file_pos`. /// `wasmtime_environ::lookup_file_pos`.
pub fn address_map_data(&self) -> &[u8] { pub fn address_map_data(&self) -> &[u8] {
&self.mmap[self.address_map_data.clone()] &self.mmap()[self.address_map_data.clone()]
} }
/// Returns the encoded trap information for this compiled image. /// Returns the encoded trap information for this compiled image.
/// ///
/// For more information see `wasmtime_environ::trap_encoding`. /// For more information see `wasmtime_environ::trap_encoding`.
pub fn trap_data(&self) -> &[u8] { pub fn trap_data(&self) -> &[u8] {
&self.mmap[self.trap_data.clone()] &self.mmap()[self.trap_data.clone()]
}
/// Returns the text section of the ELF image for this compiled module.
///
/// This memory should have the read/execute permissions.
pub fn code(&self) -> &[u8] {
&self.mmap()[self.code.clone()]
} }
/// Return a reference-counting pointer to a module. /// Return a reference-counting pointer to a module.
@@ -414,12 +383,14 @@ impl CompiledModule {
pub fn finished_functions( pub fn finished_functions(
&self, &self,
) -> impl ExactSizeIterator<Item = (DefinedFuncIndex, *mut [VMFunctionBody])> + '_ { ) -> impl ExactSizeIterator<Item = (DefinedFuncIndex, *mut [VMFunctionBody])> + '_ {
let code = self.code();
self.funcs.iter().map(move |(i, info)| { self.funcs.iter().map(move |(i, info)| {
let func = &code[info.start as usize..][..info.length as usize];
( (
i, i,
std::ptr::slice_from_raw_parts_mut( std::ptr::slice_from_raw_parts_mut(
(self.code.range.0 + info.start as usize) as *mut VMFunctionBody, func.as_ptr() as *mut VMFunctionBody,
info.length as usize, func.len(),
), ),
) )
}) })
@@ -427,10 +398,11 @@ impl CompiledModule {
/// Returns the per-signature trampolines for this module. /// Returns the per-signature trampolines for this module.
pub fn trampolines(&self) -> impl Iterator<Item = (SignatureIndex, VMTrampoline)> + '_ { pub fn trampolines(&self) -> impl Iterator<Item = (SignatureIndex, VMTrampoline)> + '_ {
let code = self.code();
self.trampolines.iter().map(move |info| { self.trampolines.iter().map(move |info| {
(info.signature, unsafe { (info.signature, unsafe {
let ptr = self.code.range.0 + info.start as usize; let ptr = &code[info.start as usize];
std::mem::transmute::<usize, VMTrampoline>(ptr) std::mem::transmute::<*const u8, VMTrampoline>(ptr)
}) })
}) })
} }
@@ -492,11 +464,6 @@ impl CompiledModule {
.expect("defined function should be present") .expect("defined function should be present")
} }
/// Returns module's JIT code.
pub fn code(&self) -> &Arc<ModuleCode> {
&self.code
}
/// Creates a new symbolication context which can be used to further /// Creates a new symbolication context which can be used to further
/// symbolicate stack traces. /// symbolicate stack traces.
/// ///
@@ -507,7 +474,7 @@ impl CompiledModule {
if !self.meta.has_wasm_debuginfo { if !self.meta.has_wasm_debuginfo {
return Ok(None); return Ok(None);
} }
let obj = File::parse(&self.mmap[..]) let obj = File::parse(&self.mmap()[..])
.context("failed to parse internal ELF file representation")?; .context("failed to parse internal ELF file representation")?;
let dwarf = gimli::Dwarf::load(|id| -> Result<_> { let dwarf = gimli::Dwarf::load(|id| -> Result<_> {
let data = obj let data = obj
@@ -554,21 +521,6 @@ impl<'a> SymbolizeContext<'a> {
} }
} }
fn build_code_memory(obj: &File) -> Result<(CodeMemory, (*const u8, usize))> {
let mut code_memory = CodeMemory::new();
let allocation = code_memory.allocate_for_object(obj)?;
link_module(obj, allocation);
let code_range = (allocation.as_ptr(), allocation.len());
// Make all code compiled thus far executable.
code_memory.publish();
Ok((code_memory, code_range))
}
/// Returns the range of `inner` within `outer`, such that `outer[range]` is the /// Returns the range of `inner` within `outer`, such that `outer[range]` is the
/// same as `inner`. /// same as `inner`.
/// ///

View File

@@ -30,10 +30,9 @@ mod unwind;
pub use crate::code_memory::CodeMemory; pub use crate::code_memory::CodeMemory;
pub use crate::instantiate::{ pub use crate::instantiate::{
finish_compile, subslice_range, CompiledModule, CompiledModuleInfo, ModuleCode, SetupError, finish_compile, subslice_range, CompiledModule, CompiledModuleInfo, SetupError,
SymbolizeContext, TypeTables, SymbolizeContext, TypeTables,
}; };
pub use crate::link::link_module;
pub use crate::mmap_vec::MmapVec; pub use crate::mmap_vec::MmapVec;
pub use profiling::*; pub use profiling::*;

View File

@@ -1,6 +1,6 @@
//! Linking for JIT-compiled code. //! Linking for JIT-compiled code.
use object::read::{Object, ObjectSection, Relocation, RelocationTarget}; use object::read::{Object, Relocation, RelocationTarget};
use object::{elf, File, NativeEndian as NE, ObjectSymbol, RelocationEncoding, RelocationKind}; use object::{elf, File, NativeEndian as NE, ObjectSymbol, RelocationEncoding, RelocationKind};
use std::convert::TryFrom; use std::convert::TryFrom;
use wasmtime_runtime::libcalls; use wasmtime_runtime::libcalls;
@@ -9,24 +9,13 @@ type U32 = object::U32Bytes<NE>;
type I32 = object::I32Bytes<NE>; type I32 = object::I32Bytes<NE>;
type U64 = object::U64Bytes<NE>; type U64 = object::U64Bytes<NE>;
/// Links a module that has been compiled with `compiled_module` in `wasmtime-environ`. /// Applies the relocation `r` at `offset` within `code`, according to the
/// symbols found in `obj`.
/// ///
/// Performs all required relocations inside the function code, provided the necessary metadata. /// This method is used at runtime to resolve relocations in ELF images,
/// The relocations data provided in the object file, see object.rs for details. /// typically with respect to where the memory was placed in the final address
/// /// in memory.
/// Currently, the produced ELF image can be trusted. pub fn apply_reloc(obj: &File, code: &mut [u8], offset: u64, r: Relocation) {
/// TODO refactor logic to remove panics and add defensive code the image data
/// becomes untrusted.
pub fn link_module(obj: &File, code_range: &mut [u8]) {
// Read the ".text" section and process its relocations.
let text_section = obj.section_by_name(".text").unwrap();
for (offset, r) in text_section.relocations() {
apply_reloc(obj, code_range, offset, r);
}
}
fn apply_reloc(obj: &File, code: &mut [u8], offset: u64, r: Relocation) {
let target_func_address: usize = match r.target() { let target_func_address: usize = match r.target() {
RelocationTarget::Symbol(i) => { RelocationTarget::Symbol(i) => {
// Processing relocation target is a named symbols that is compiled // Processing relocation target is a named symbols that is compiled

View File

@@ -732,7 +732,7 @@ impl<'a> Instantiator<'a> {
.allocator() .allocator()
.allocate(InstanceAllocationRequest { .allocate(InstanceAllocationRequest {
module: compiled_module.module().clone(), module: compiled_module.module().clone(),
image_base: compiled_module.code().range().0, image_base: compiled_module.code().as_ptr() as usize,
functions: compiled_module.functions(), functions: compiled_module.functions(),
imports: self.cur.build(), imports: self.cur.build(),
shared_signatures: self.cur.module.signatures().as_module_map().into(), shared_signatures: self.cur.module.signatures().as_module_map().into(),

View File

@@ -44,7 +44,6 @@ impl ModuleRegistry {
/// Registers a new module with the registry. /// Registers a new module with the registry.
pub fn register(&mut self, module: &Module) { pub fn register(&mut self, module: &Module) {
let compiled_module = module.compiled_module(); let compiled_module = module.compiled_module();
let (start, end) = compiled_module.code().range();
// If there's not actually any functions in this module then we may // If there's not actually any functions in this module then we may
// still need to preserve it for its data segments. Instances of this // still need to preserve it for its data segments. Instances of this
@@ -59,8 +58,10 @@ impl ModuleRegistry {
// The module code range is exclusive for end, so make it inclusive as it // The module code range is exclusive for end, so make it inclusive as it
// may be a valid PC value // may be a valid PC value
assert!(start < end); let code = compiled_module.code();
let end = end - 1; assert!(!code.is_empty());
let start = code.as_ptr() as usize;
let end = start + code.len() - 1;
// Ensure the module isn't already present in the registry // Ensure the module isn't already present in the registry
// This is expected when a module is instantiated multiple times in the // This is expected when a module is instantiated multiple times in the

View File

@@ -6,7 +6,7 @@ use std::any::Any;
use std::panic::{self, AssertUnwindSafe}; use std::panic::{self, AssertUnwindSafe};
use std::sync::Arc; use std::sync::Arc;
use wasmtime_environ::{EntityIndex, Module, ModuleType, PrimaryMap, SignatureIndex}; use wasmtime_environ::{EntityIndex, Module, ModuleType, PrimaryMap, SignatureIndex};
use wasmtime_jit::CodeMemory; use wasmtime_jit::{CodeMemory, MmapVec};
use wasmtime_runtime::{ use wasmtime_runtime::{
Imports, InstanceAllocationRequest, InstanceAllocator, InstanceHandle, Imports, InstanceAllocationRequest, InstanceAllocator, InstanceHandle,
OnDemandInstanceAllocator, VMContext, VMFunctionBody, VMSharedSignatureIndex, VMTrampoline, OnDemandInstanceAllocator, VMContext, VMFunctionBody, VMSharedSignatureIndex, VMTrampoline,
@@ -82,20 +82,18 @@ pub fn create_function(
stub_fn as usize, stub_fn as usize,
&mut obj, &mut obj,
)?; )?;
let obj = obj.write()?; let obj = MmapVec::from_obj(obj)?;
// Copy the results of JIT compilation into executable memory, and this will // Copy the results of JIT compilation into executable memory, and this will
// also take care of unwind table registration. // also take care of unwind table registration.
let mut code_memory = CodeMemory::new(); let mut code_memory = CodeMemory::new(obj);
let (alloc, _obj) = code_memory.allocate_for_object_unparsed(&obj)?; let code = code_memory.publish()?;
// Extract the host/wasm trampolines from the results of compilation since // Extract the host/wasm trampolines from the results of compilation since
// we know their start/length. // we know their start/length.
let host_trampoline = alloc[t1.start as usize..][..t1.length as usize].as_ptr(); let host_trampoline = code.text[t1.start as usize..][..t1.length as usize].as_ptr();
let wasm_trampoline = &mut alloc[t2.start as usize..][..t2.length as usize]; let wasm_trampoline = &code.text[t2.start as usize..][..t2.length as usize];
let wasm_trampoline = wasm_trampoline as *mut [u8] as *mut [VMFunctionBody]; let wasm_trampoline = wasm_trampoline as *const [u8] as *mut [VMFunctionBody];
code_memory.publish();
let sig = engine.signatures().register(ft.as_wasm_func_type()); let sig = engine.signatures().register(ft.as_wasm_func_type());