Remove dependency on TargetIsa from Wasmtime crates (#3178)
This commit started off by deleting the `cranelift_codegen::settings` reexport in the `wasmtime-environ` crate and then basically played whack-a-mole until everything compiled again. The main result of this is that the `wasmtime-*` family of crates have generally less of a dependency on the `TargetIsa` trait and type from Cranelift. While the dependency isn't entirely severed yet this is at least a significant start. This commit is intended to be largely refactorings, no functional changes are intended here. The refactorings are: * A `CompilerBuilder` trait has been added to `wasmtime_environ` which server as an abstraction used to create compilers and configure them in a uniform fashion. The `wasmtime::Config` type now uses this instead of cranelift-specific settings. The `wasmtime-jit` crate exports the ability to create a compiler builder from a `CompilationStrategy`, which only works for Cranelift right now. In a cranelift-less build of Wasmtime this is expected to return a trait object that fails all requests to compile. * The `Compiler` trait in the `wasmtime_environ` crate has been souped up with a number of methods that Wasmtime and other crates needed. * The `wasmtime-debug` crate is now moved entirely behind the `wasmtime-cranelift` crate. * The `wasmtime-cranelift` crate is now only depended on by the `wasmtime-jit` crate. * Wasm types in `cranelift-wasm` no longer contain their IR type, instead they only contain the `WasmType`. This is required to get everything to align correctly but will also be required in a future refactoring where the types used by `cranelift-wasm` will be extracted to a separate crate. * I moved around a fair bit of code in `wasmtime-cranelift`. * Some gdb-specific jit-specific code has moved from `wasmtime-debug` to `wasmtime-jit`.
This commit is contained in:
@@ -15,7 +15,6 @@ wasmtime-environ = { path = "../environ", version = "0.29.0" }
|
||||
wasmtime-runtime = { path = "../runtime", version = "0.29.0" }
|
||||
wasmtime-cranelift = { path = "../cranelift", version = "0.29.0" }
|
||||
wasmtime-lightbeam = { path = "../lightbeam/wasmtime", version = "0.29.0", optional = true }
|
||||
wasmtime-debug = { path = "../debug", version = "0.29.0" }
|
||||
wasmtime-profiling = { path = "../profiling", version = "0.29.0" }
|
||||
wasmtime-obj = { path = "../obj", version = "0.29.0" }
|
||||
rayon = { version = "1.0", optional = true }
|
||||
|
||||
@@ -5,6 +5,7 @@ use crate::object::{
|
||||
ObjectUnwindInfo,
|
||||
};
|
||||
use crate::unwind::UnwindRegistry;
|
||||
use crate::Compiler;
|
||||
use anyhow::{Context, Result};
|
||||
use object::read::{File as ObjectFile, Object, ObjectSection, ObjectSymbol};
|
||||
use region;
|
||||
@@ -12,7 +13,7 @@ use std::collections::BTreeMap;
|
||||
use std::mem::ManuallyDrop;
|
||||
use std::{cmp, mem};
|
||||
use wasmtime_environ::{
|
||||
isa::{unwind::UnwindInfo, TargetIsa},
|
||||
isa::unwind::UnwindInfo,
|
||||
wasm::{FuncIndex, SignatureIndex},
|
||||
CompiledFunction,
|
||||
};
|
||||
@@ -131,7 +132,7 @@ impl CodeMemory {
|
||||
}
|
||||
|
||||
/// Make all allocated memory executable.
|
||||
pub fn publish(&mut self, isa: &dyn TargetIsa) {
|
||||
pub fn publish(&mut self, compiler: &Compiler) {
|
||||
self.push_current(0)
|
||||
.expect("failed to push current memory map");
|
||||
|
||||
@@ -142,7 +143,7 @@ impl CodeMemory {
|
||||
} in &mut self.entries[self.published..]
|
||||
{
|
||||
// Remove write access to the pages due to the relocation fixups.
|
||||
r.publish(isa)
|
||||
r.publish(compiler)
|
||||
.expect("failed to publish function unwind registry");
|
||||
|
||||
if !m.is_empty() {
|
||||
|
||||
@@ -6,15 +6,14 @@ use object::write::Object;
|
||||
#[cfg(feature = "parallel-compilation")]
|
||||
use rayon::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::BTreeMap;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::mem;
|
||||
use wasmparser::WasmFeatures;
|
||||
use wasmtime_debug::{emit_dwarf, DwarfSection};
|
||||
use wasmtime_environ::entity::EntityRef;
|
||||
use wasmtime_environ::isa::{TargetFrontendConfig, TargetIsa};
|
||||
use wasmtime_environ::wasm::{DefinedMemoryIndex, MemoryIndex};
|
||||
use wasmtime_environ::{
|
||||
CompiledFunctions, Compiler as EnvCompiler, DebugInfoData, Module, ModuleMemoryOffset,
|
||||
CompiledFunctions, Compiler as EnvCompiler, CompilerBuilder, ModuleMemoryOffset,
|
||||
ModuleTranslation, Tunables, TypeTables, VMOffsets,
|
||||
};
|
||||
|
||||
@@ -33,41 +32,35 @@ pub enum CompilationStrategy {
|
||||
}
|
||||
|
||||
/// A WebAssembly code JIT compiler.
|
||||
///
|
||||
/// A `Compiler` instance owns the executable memory that it allocates.
|
||||
///
|
||||
/// TODO: Evolve this to support streaming rather than requiring a `&[u8]`
|
||||
/// containing a whole wasm module at once.
|
||||
///
|
||||
/// TODO: Consider using cranelift-module.
|
||||
pub struct Compiler {
|
||||
isa: Box<dyn TargetIsa>,
|
||||
compiler: Box<dyn EnvCompiler>,
|
||||
strategy: CompilationStrategy,
|
||||
tunables: Tunables,
|
||||
features: WasmFeatures,
|
||||
parallel_compilation: bool,
|
||||
}
|
||||
|
||||
impl Compiler {
|
||||
/// Construct a new `Compiler`.
|
||||
/// Creates a new compiler builder for the provided compilation strategy.
|
||||
pub fn builder(strategy: CompilationStrategy) -> Box<dyn CompilerBuilder> {
|
||||
match strategy {
|
||||
CompilationStrategy::Auto | CompilationStrategy::Cranelift => {
|
||||
wasmtime_cranelift::builder()
|
||||
}
|
||||
#[cfg(feature = "lightbeam")]
|
||||
CompilationStrategy::Lightbeam => unimplemented!(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a new instance of a `Compiler` from the provided compiler
|
||||
/// builder.
|
||||
pub fn new(
|
||||
isa: Box<dyn TargetIsa>,
|
||||
strategy: CompilationStrategy,
|
||||
builder: &dyn CompilerBuilder,
|
||||
tunables: Tunables,
|
||||
features: WasmFeatures,
|
||||
parallel_compilation: bool,
|
||||
) -> Self {
|
||||
Self {
|
||||
isa,
|
||||
strategy,
|
||||
compiler: match strategy {
|
||||
CompilationStrategy::Auto | CompilationStrategy::Cranelift => {
|
||||
Box::new(wasmtime_cranelift::Cranelift::default())
|
||||
}
|
||||
#[cfg(feature = "lightbeam")]
|
||||
CompilationStrategy::Lightbeam => Box::new(wasmtime_lightbeam::Lightbeam),
|
||||
},
|
||||
) -> Compiler {
|
||||
Compiler {
|
||||
compiler: builder.build(),
|
||||
tunables,
|
||||
features,
|
||||
parallel_compilation,
|
||||
@@ -80,25 +73,6 @@ fn _assert_compiler_send_sync() {
|
||||
_assert::<Compiler>();
|
||||
}
|
||||
|
||||
fn transform_dwarf_data(
|
||||
isa: &dyn TargetIsa,
|
||||
module: &Module,
|
||||
debug_data: &DebugInfoData,
|
||||
funcs: &CompiledFunctions,
|
||||
) -> Result<Vec<DwarfSection>, SetupError> {
|
||||
let target_config = isa.frontend_config();
|
||||
let ofs = VMOffsets::new(target_config.pointer_bytes(), &module);
|
||||
|
||||
let memory_offset = if ofs.num_imported_memories > 0 {
|
||||
ModuleMemoryOffset::Imported(ofs.vmctx_vmmemory_import(MemoryIndex::new(0)))
|
||||
} else if ofs.num_defined_memories > 0 {
|
||||
ModuleMemoryOffset::Defined(ofs.vmctx_vmmemory_definition_base(DefinedMemoryIndex::new(0)))
|
||||
} else {
|
||||
ModuleMemoryOffset::None
|
||||
};
|
||||
emit_dwarf(isa, debug_data, funcs, &memory_offset).map_err(SetupError::DebugInfo)
|
||||
}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
pub struct Compilation {
|
||||
pub obj: Object,
|
||||
@@ -107,21 +81,6 @@ pub struct Compilation {
|
||||
}
|
||||
|
||||
impl Compiler {
|
||||
/// Return the isa.
|
||||
pub fn isa(&self) -> &dyn TargetIsa {
|
||||
self.isa.as_ref()
|
||||
}
|
||||
|
||||
/// Return the compiler's strategy.
|
||||
pub fn strategy(&self) -> CompilationStrategy {
|
||||
self.strategy
|
||||
}
|
||||
|
||||
/// Return the target's frontend configuration settings.
|
||||
pub fn frontend_config(&self) -> TargetFrontendConfig {
|
||||
self.isa.frontend_config()
|
||||
}
|
||||
|
||||
/// Return the tunables in use by this engine.
|
||||
pub fn tunables(&self) -> &Tunables {
|
||||
&self.tunables
|
||||
@@ -137,6 +96,11 @@ impl Compiler {
|
||||
&*self.compiler
|
||||
}
|
||||
|
||||
/// Returns the target this compiler is compiling for.
|
||||
pub fn triple(&self) -> &target_lexicon::Triple {
|
||||
self.compiler.triple()
|
||||
}
|
||||
|
||||
/// Compile the given function bodies.
|
||||
pub fn compile<'data>(
|
||||
&self,
|
||||
@@ -148,25 +112,35 @@ impl Compiler {
|
||||
|
||||
let funcs = self
|
||||
.run_maybe_parallel(functions, |(index, func)| {
|
||||
self.compiler.compile_function(
|
||||
translation,
|
||||
index,
|
||||
func,
|
||||
&*self.isa,
|
||||
&self.tunables,
|
||||
types,
|
||||
)
|
||||
self.compiler
|
||||
.compile_function(translation, index, func, &self.tunables, types)
|
||||
})?
|
||||
.into_iter()
|
||||
.collect::<CompiledFunctions>();
|
||||
|
||||
let dwarf_sections = if self.tunables.generate_native_debuginfo && !funcs.is_empty() {
|
||||
transform_dwarf_data(
|
||||
&*self.isa,
|
||||
let ofs = VMOffsets::new(
|
||||
self.compiler
|
||||
.triple()
|
||||
.architecture
|
||||
.pointer_width()
|
||||
.unwrap()
|
||||
.bytes(),
|
||||
&translation.module,
|
||||
&translation.debuginfo,
|
||||
&funcs,
|
||||
)?
|
||||
);
|
||||
|
||||
let memory_offset = if ofs.num_imported_memories > 0 {
|
||||
ModuleMemoryOffset::Imported(ofs.vmctx_vmmemory_import(MemoryIndex::new(0)))
|
||||
} else if ofs.num_defined_memories > 0 {
|
||||
ModuleMemoryOffset::Defined(
|
||||
ofs.vmctx_vmmemory_definition_base(DefinedMemoryIndex::new(0)),
|
||||
)
|
||||
} else {
|
||||
ModuleMemoryOffset::None
|
||||
};
|
||||
self.compiler
|
||||
.emit_dwarf(&translation.debuginfo, &funcs, &memory_offset)
|
||||
.map_err(SetupError::DebugInfo)?
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
@@ -211,29 +185,27 @@ impl Compiler {
|
||||
impl Hash for Compiler {
|
||||
fn hash<H: Hasher>(&self, hasher: &mut H) {
|
||||
let Compiler {
|
||||
strategy,
|
||||
compiler: _,
|
||||
isa,
|
||||
compiler,
|
||||
tunables,
|
||||
features,
|
||||
parallel_compilation: _,
|
||||
} = self;
|
||||
|
||||
// Hash compiler's flags: compilation strategy, isa, frontend config,
|
||||
// misc tunables.
|
||||
strategy.hash(hasher);
|
||||
isa.triple().hash(hasher);
|
||||
isa.hash_all_flags(hasher);
|
||||
isa.frontend_config().hash(hasher);
|
||||
compiler.triple().hash(hasher);
|
||||
compiler
|
||||
.flags()
|
||||
.into_iter()
|
||||
.collect::<BTreeMap<_, _>>()
|
||||
.hash(hasher);
|
||||
compiler
|
||||
.isa_flags()
|
||||
.into_iter()
|
||||
.collect::<BTreeMap<_, _>>()
|
||||
.hash(hasher);
|
||||
tunables.hash(hasher);
|
||||
features.hash(hasher);
|
||||
|
||||
// Catch accidental bugs of reusing across crate versions.
|
||||
env!("CARGO_PKG_VERSION").hash(hasher);
|
||||
|
||||
// TODO: ... and should we hash anything else? There's a lot of stuff in
|
||||
// `TargetIsa`, like registers/encodings/etc. Should we be hashing that
|
||||
// too? It seems like wasmtime doesn't configure it too too much, but
|
||||
// this may become an issue at some point.
|
||||
}
|
||||
}
|
||||
|
||||
212
crates/jit/src/debug.rs
Normal file
212
crates/jit/src/debug.rs
Normal file
@@ -0,0 +1,212 @@
|
||||
use anyhow::{bail, ensure, Error};
|
||||
use object::endian::{BigEndian, Endian, Endianness, LittleEndian};
|
||||
use object::{RelocationEncoding, RelocationKind};
|
||||
use std::collections::HashMap;
|
||||
|
||||
pub fn create_gdbjit_image(
|
||||
mut bytes: Vec<u8>,
|
||||
code_region: (*const u8, usize),
|
||||
defined_funcs_offset: usize,
|
||||
funcs: &[*const u8],
|
||||
) -> Result<Vec<u8>, Error> {
|
||||
let e = ensure_supported_elf_format(&bytes)?;
|
||||
|
||||
// patch relocs
|
||||
relocate_dwarf_sections(&bytes, defined_funcs_offset, funcs)?;
|
||||
|
||||
// elf is still missing details...
|
||||
match e {
|
||||
Endianness::Little => {
|
||||
convert_object_elf_to_loadable_file::<LittleEndian>(&mut bytes, code_region)
|
||||
}
|
||||
Endianness::Big => {
|
||||
convert_object_elf_to_loadable_file::<BigEndian>(&mut bytes, code_region)
|
||||
}
|
||||
}
|
||||
|
||||
// let mut file = ::std::fs::File::create(::std::path::Path::new("test.o")).expect("file");
|
||||
// ::std::io::Write::write_all(&mut file, &bytes).expect("write");
|
||||
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
fn relocate_dwarf_sections(
|
||||
bytes: &[u8],
|
||||
defined_funcs_offset: usize,
|
||||
funcs: &[*const u8],
|
||||
) -> Result<(), Error> {
|
||||
use object::read::{File, Object, ObjectSection, ObjectSymbol, RelocationTarget};
|
||||
|
||||
let obj = File::parse(bytes)?;
|
||||
let mut func_symbols = HashMap::new();
|
||||
for sym in obj.symbols() {
|
||||
match (sym.name(), sym.section_index()) {
|
||||
(Ok(name), Some(_section_index)) if name.starts_with("_wasm_function_") => {
|
||||
let index = name["_wasm_function_".len()..].parse::<usize>()?;
|
||||
let data = funcs[index - defined_funcs_offset];
|
||||
func_symbols.insert(sym.index(), data);
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
||||
for section in obj.sections() {
|
||||
for (off, r) in section.relocations() {
|
||||
if r.kind() != RelocationKind::Absolute
|
||||
|| r.encoding() != RelocationEncoding::Generic
|
||||
|| r.size() != 64
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
let data = match r.target() {
|
||||
RelocationTarget::Symbol(ref index) => func_symbols.get(index),
|
||||
_ => None,
|
||||
};
|
||||
let data: *const u8 = match data {
|
||||
Some(data) => *data,
|
||||
None => {
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
let target = (data as u64).wrapping_add(r.addend() as u64);
|
||||
|
||||
let entry_ptr = section.data_range(off, 8).unwrap().unwrap().as_ptr();
|
||||
unsafe {
|
||||
std::ptr::write(entry_ptr as *mut u64, target);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn ensure_supported_elf_format(bytes: &[u8]) -> Result<Endianness, Error> {
|
||||
use object::elf::*;
|
||||
use object::read::elf::*;
|
||||
use std::mem::size_of;
|
||||
|
||||
let kind = match object::FileKind::parse(bytes) {
|
||||
Ok(file) => file,
|
||||
Err(err) => {
|
||||
bail!("Failed to parse file: {}", err);
|
||||
}
|
||||
};
|
||||
let header = match kind {
|
||||
object::FileKind::Elf64 => match object::elf::FileHeader64::<Endianness>::parse(bytes) {
|
||||
Ok(header) => header,
|
||||
Err(err) => {
|
||||
bail!("Unsupported ELF file: {}", err);
|
||||
}
|
||||
},
|
||||
_ => {
|
||||
bail!("only 64-bit ELF files currently supported")
|
||||
}
|
||||
};
|
||||
let e = header.endian().unwrap();
|
||||
|
||||
match header.e_machine.get(e) {
|
||||
EM_AARCH64 => (),
|
||||
EM_X86_64 => (),
|
||||
EM_S390 => (),
|
||||
machine => {
|
||||
bail!("Unsupported ELF target machine: {:x}", machine);
|
||||
}
|
||||
}
|
||||
ensure!(
|
||||
header.e_phoff.get(e) == 0 && header.e_phnum.get(e) == 0,
|
||||
"program header table is empty"
|
||||
);
|
||||
let e_shentsize = header.e_shentsize.get(e);
|
||||
let req_shentsize = match e {
|
||||
Endianness::Little => size_of::<SectionHeader64<LittleEndian>>(),
|
||||
Endianness::Big => size_of::<SectionHeader64<BigEndian>>(),
|
||||
};
|
||||
ensure!(e_shentsize as usize == req_shentsize, "size of sh");
|
||||
Ok(e)
|
||||
}
|
||||
|
||||
fn convert_object_elf_to_loadable_file<E: Endian>(
|
||||
bytes: &mut Vec<u8>,
|
||||
code_region: (*const u8, usize),
|
||||
) {
|
||||
use object::elf::*;
|
||||
use std::ffi::CStr;
|
||||
use std::mem::size_of;
|
||||
use std::os::raw::c_char;
|
||||
|
||||
let e = E::default();
|
||||
let header: &FileHeader64<E> = unsafe { &*(bytes.as_mut_ptr() as *const FileHeader64<_>) };
|
||||
|
||||
let e_shentsize = header.e_shentsize.get(e);
|
||||
let e_shoff = header.e_shoff.get(e);
|
||||
let e_shnum = header.e_shnum.get(e);
|
||||
let mut shstrtab_off = 0;
|
||||
for i in 0..e_shnum {
|
||||
let off = e_shoff as isize + i as isize * e_shentsize as isize;
|
||||
let section: &SectionHeader64<E> =
|
||||
unsafe { &*(bytes.as_ptr().offset(off) as *const SectionHeader64<_>) };
|
||||
if section.sh_type.get(e) != SHT_STRTAB {
|
||||
continue;
|
||||
}
|
||||
shstrtab_off = section.sh_offset.get(e);
|
||||
}
|
||||
let mut segment: Option<_> = None;
|
||||
for i in 0..e_shnum {
|
||||
let off = e_shoff as isize + i as isize * e_shentsize as isize;
|
||||
let section: &mut SectionHeader64<E> =
|
||||
unsafe { &mut *(bytes.as_mut_ptr().offset(off) as *mut SectionHeader64<_>) };
|
||||
if section.sh_type.get(e) != SHT_PROGBITS {
|
||||
continue;
|
||||
}
|
||||
// It is a SHT_PROGBITS, but we need to check sh_name to ensure it is our function
|
||||
let sh_name_off = section.sh_name.get(e);
|
||||
let sh_name = unsafe {
|
||||
CStr::from_ptr(
|
||||
bytes
|
||||
.as_ptr()
|
||||
.offset((shstrtab_off + sh_name_off as u64) as isize)
|
||||
as *const c_char,
|
||||
)
|
||||
.to_str()
|
||||
.expect("name")
|
||||
};
|
||||
if sh_name != ".text" {
|
||||
continue;
|
||||
}
|
||||
|
||||
assert!(segment.is_none());
|
||||
// Patch vaddr, and save file location and its size.
|
||||
section.sh_addr.set(e, code_region.0 as u64);
|
||||
let sh_offset = section.sh_offset.get(e);
|
||||
let sh_size = section.sh_size.get(e);
|
||||
segment = Some((sh_offset, sh_size));
|
||||
}
|
||||
|
||||
// LLDB wants segment with virtual address set, placing them at the end of ELF.
|
||||
let ph_off = bytes.len();
|
||||
let e_phentsize = size_of::<ProgramHeader64<E>>();
|
||||
let e_phnum = 1;
|
||||
bytes.resize(ph_off + e_phentsize * e_phnum, 0);
|
||||
if let Some((sh_offset, sh_size)) = segment {
|
||||
let (v_offset, size) = code_region;
|
||||
let program: &mut ProgramHeader64<E> =
|
||||
unsafe { &mut *(bytes.as_ptr().add(ph_off) as *mut ProgramHeader64<_>) };
|
||||
program.p_type.set(e, PT_LOAD);
|
||||
program.p_offset.set(e, sh_offset);
|
||||
program.p_vaddr.set(e, v_offset as u64);
|
||||
program.p_paddr.set(e, v_offset as u64);
|
||||
program.p_filesz.set(e, sh_size);
|
||||
program.p_memsz.set(e, size as u64);
|
||||
} else {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
// It is somewhat loadable ELF file at this moment.
|
||||
let header: &mut FileHeader64<E> =
|
||||
unsafe { &mut *(bytes.as_mut_ptr() as *mut FileHeader64<_>) };
|
||||
header.e_type.set(e, ET_DYN);
|
||||
header.e_phoff.set(e, ph_off as u64);
|
||||
header.e_phentsize.set(e, e_phentsize as u16);
|
||||
header.e_phnum.set(e, e_phnum as u16);
|
||||
}
|
||||
@@ -5,6 +5,7 @@
|
||||
|
||||
use crate::code_memory::CodeMemory;
|
||||
use crate::compiler::{Compilation, Compiler};
|
||||
use crate::debug::create_gdbjit_image;
|
||||
use crate::link::link_module;
|
||||
use crate::object::ObjectUnwindInfo;
|
||||
use anyhow::{Context, Result};
|
||||
@@ -13,9 +14,7 @@ use serde::{Deserialize, Serialize};
|
||||
use std::ops::Range;
|
||||
use std::sync::Arc;
|
||||
use thiserror::Error;
|
||||
use wasmtime_debug::create_gdbjit_image;
|
||||
use wasmtime_environ::entity::PrimaryMap;
|
||||
use wasmtime_environ::isa::TargetIsa;
|
||||
use wasmtime_environ::wasm::{
|
||||
DefinedFuncIndex, InstanceTypeIndex, ModuleTypeIndex, SignatureIndex, WasmFuncType,
|
||||
};
|
||||
@@ -103,13 +102,10 @@ impl CompilationArtifacts {
|
||||
data: &[u8],
|
||||
use_paged_mem_init: bool,
|
||||
) -> Result<(usize, Vec<CompilationArtifacts>, TypeTables), SetupError> {
|
||||
let (main_module, translations, types) = ModuleEnvironment::new(
|
||||
compiler.frontend_config(),
|
||||
compiler.tunables(),
|
||||
compiler.features(),
|
||||
)
|
||||
.translate(data)
|
||||
.map_err(|error| SetupError::Compile(CompileError::Wasm(error)))?;
|
||||
let (main_module, translations, types) =
|
||||
ModuleEnvironment::new(compiler.tunables(), compiler.features())
|
||||
.translate(data)
|
||||
.map_err(|error| SetupError::Compile(CompileError::Wasm(error)))?;
|
||||
|
||||
let list = compiler.run_maybe_parallel::<_, _, SetupError, _>(
|
||||
translations,
|
||||
@@ -225,25 +221,24 @@ impl CompiledModule {
|
||||
/// artifacts.
|
||||
pub fn from_artifacts_list(
|
||||
artifacts: Vec<CompilationArtifacts>,
|
||||
isa: &dyn TargetIsa,
|
||||
profiler: &dyn ProfilingAgent,
|
||||
compiler: &Compiler,
|
||||
profiler: &dyn ProfilingAgent,
|
||||
) -> Result<Vec<Arc<Self>>, SetupError> {
|
||||
compiler.run_maybe_parallel(artifacts, |a| {
|
||||
CompiledModule::from_artifacts(a, isa, profiler)
|
||||
CompiledModule::from_artifacts(a, compiler, profiler)
|
||||
})
|
||||
}
|
||||
|
||||
/// Creates `CompiledModule` directly from `CompilationArtifacts`.
|
||||
pub fn from_artifacts(
|
||||
artifacts: CompilationArtifacts,
|
||||
isa: &dyn TargetIsa,
|
||||
compiler: &Compiler,
|
||||
profiler: &dyn ProfilingAgent,
|
||||
) -> Result<Arc<Self>, SetupError> {
|
||||
// Allocate all of the compiled functions into executable memory,
|
||||
// copying over their contents.
|
||||
let (code_memory, code_range, finished_functions, trampolines) = build_code_memory(
|
||||
isa,
|
||||
compiler,
|
||||
&artifacts.obj,
|
||||
&artifacts.module,
|
||||
&artifacts.unwind_info,
|
||||
@@ -480,7 +475,7 @@ fn create_dbg_image(
|
||||
}
|
||||
|
||||
fn build_code_memory(
|
||||
isa: &dyn TargetIsa,
|
||||
compiler: &Compiler,
|
||||
obj: &[u8],
|
||||
module: &Module,
|
||||
unwind_info: &[ObjectUnwindInfo],
|
||||
@@ -531,7 +526,7 @@ fn build_code_memory(
|
||||
let code_range = (code_range.as_ptr(), code_range.len());
|
||||
|
||||
// Make all code compiled thus far executable.
|
||||
code_memory.publish(isa);
|
||||
code_memory.publish(compiler);
|
||||
|
||||
Ok((code_memory, code_range, finished_functions, trampolines))
|
||||
}
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
|
||||
mod code_memory;
|
||||
mod compiler;
|
||||
mod debug;
|
||||
mod instantiate;
|
||||
mod link;
|
||||
mod object;
|
||||
|
||||
@@ -4,10 +4,9 @@ use crate::Compiler;
|
||||
use object::write::Object;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::BTreeSet;
|
||||
use wasmtime_debug::DwarfSection;
|
||||
use wasmtime_environ::isa::unwind::UnwindInfo;
|
||||
use wasmtime_environ::wasm::{FuncIndex, SignatureIndex};
|
||||
use wasmtime_environ::{CompiledFunctions, ModuleTranslation, TypeTables};
|
||||
use wasmtime_environ::{CompiledFunctions, DwarfSection, ModuleTranslation, TypeTables};
|
||||
use wasmtime_obj::{ObjectBuilder, ObjectBuilderTarget};
|
||||
|
||||
pub use wasmtime_obj::utils;
|
||||
@@ -52,7 +51,7 @@ pub(crate) fn build_object(
|
||||
for i in signatures {
|
||||
let func = compiler
|
||||
.compiler()
|
||||
.host_to_wasm_trampoline(compiler.isa(), &types.wasm_signatures[i])?;
|
||||
.host_to_wasm_trampoline(&types.wasm_signatures[i])?;
|
||||
// Preserve trampoline function unwind info.
|
||||
if let Some(info) = &func.unwind_info {
|
||||
unwind_info.push(ObjectUnwindInfo::Trampoline(i, info.clone()))
|
||||
@@ -60,7 +59,7 @@ pub(crate) fn build_object(
|
||||
trampolines.push((i, func));
|
||||
}
|
||||
|
||||
let target = ObjectBuilderTarget::new(compiler.isa().triple().architecture)?;
|
||||
let target = ObjectBuilderTarget::new(compiler.compiler().triple().architecture)?;
|
||||
let mut builder = ObjectBuilder::new(target, &translation.module, funcs);
|
||||
builder
|
||||
.set_code_alignment(CODE_SECTION_ALIGNMENT)
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
//! Module for System V ABI unwind registry.
|
||||
|
||||
use crate::Compiler;
|
||||
use anyhow::{bail, Result};
|
||||
use gimli::{
|
||||
write::{Address, EhFrame, EndianVec, FrameTable, Writer},
|
||||
RunTimeEndian,
|
||||
};
|
||||
use wasmtime_environ::isa::{unwind::UnwindInfo, TargetIsa};
|
||||
use wasmtime_environ::isa::unwind::UnwindInfo;
|
||||
|
||||
/// Represents a registry of function unwind information for System V ABI.
|
||||
pub struct UnwindRegistry {
|
||||
@@ -53,7 +54,7 @@ impl UnwindRegistry {
|
||||
}
|
||||
|
||||
/// Publishes all registered functions.
|
||||
pub fn publish(&mut self, isa: &dyn TargetIsa) -> Result<()> {
|
||||
pub fn publish(&mut self, compiler: &Compiler) -> Result<()> {
|
||||
if self.published {
|
||||
bail!("unwind registry has already been published");
|
||||
}
|
||||
@@ -63,7 +64,7 @@ impl UnwindRegistry {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
self.set_frame_table(isa)?;
|
||||
self.set_frame_table(compiler)?;
|
||||
|
||||
unsafe {
|
||||
self.register_frames();
|
||||
@@ -74,9 +75,9 @@ impl UnwindRegistry {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn set_frame_table(&mut self, isa: &dyn TargetIsa) -> Result<()> {
|
||||
fn set_frame_table(&mut self, compiler: &Compiler) -> Result<()> {
|
||||
let mut table = FrameTable::default();
|
||||
let cie_id = table.add_cie(match isa.create_systemv_cie() {
|
||||
let cie_id = table.add_cie(match compiler.compiler().create_systemv_cie() {
|
||||
Some(cie) => cie,
|
||||
None => bail!("ISA does not support System V unwind information"),
|
||||
});
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
//! Module for Windows x64 ABI unwind registry.
|
||||
|
||||
use crate::Compiler;
|
||||
use anyhow::{bail, Result};
|
||||
use wasmtime_environ::isa::{unwind::UnwindInfo, TargetIsa};
|
||||
use wasmtime_environ::isa::unwind::UnwindInfo;
|
||||
use winapi::um::winnt;
|
||||
|
||||
/// Represents a registry of function unwind information for Windows x64 ABI.
|
||||
@@ -49,7 +50,7 @@ impl UnwindRegistry {
|
||||
}
|
||||
|
||||
/// Publishes all registered functions.
|
||||
pub fn publish(&mut self, _isa: &dyn TargetIsa) -> Result<()> {
|
||||
pub fn publish(&mut self, _compiler: &Compiler) -> Result<()> {
|
||||
if self.published {
|
||||
bail!("unwind registry has already been published");
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user