moved crates in lib/ to src/, renamed crates, modified some files' text (#660)

moved crates in lib/ to src/, renamed crates, modified some files' text (#660)
This commit is contained in:
lazypassion
2019-01-28 18:56:54 -05:00
committed by Dan Gohman
parent 54959cf5bb
commit 747ad3c4c5
508 changed files with 94 additions and 92 deletions

View File

@@ -0,0 +1,506 @@
//! Defines `SimpleJITBackend`.
use crate::memory::Memory;
use cranelift_codegen::binemit::{Addend, CodeOffset, NullTrapSink, Reloc, RelocSink};
use cranelift_codegen::isa::TargetIsa;
use cranelift_codegen::{self, ir, settings};
use cranelift_module::{
Backend, DataContext, DataDescription, Init, Linkage, ModuleNamespace, ModuleResult,
};
use cranelift_native;
use libc;
use std::collections::HashMap;
use std::ffi::CString;
use std::io::Write;
use std::ptr;
use target_lexicon::PointerWidth;
#[cfg(windows)]
use winapi;
/// A builder for `SimpleJITBackend`.
pub struct SimpleJITBuilder {
isa: Box<TargetIsa>,
symbols: HashMap<String, *const u8>,
}
impl SimpleJITBuilder {
/// Create a new `SimpleJITBuilder`.
pub fn new() -> Self {
let flag_builder = settings::builder();
let isa_builder = cranelift_native::builder().unwrap_or_else(|msg| {
panic!("host machine is not supported: {}", msg);
});
let isa = isa_builder.finish(settings::Flags::new(flag_builder));
Self::with_isa(isa)
}
/// Create a new `SimpleJITBuilder` with an arbitrary target. This is mainly
/// useful for testing.
///
/// SimpleJIT requires a `TargetIsa` configured for non-PIC.
///
/// To create a `SimpleJITBuilder` for native use, use the `new` constructor
/// instead.
pub fn with_isa(isa: Box<TargetIsa>) -> Self {
debug_assert!(!isa.flags().is_pic(), "SimpleJIT requires non-PIC code");
let symbols = HashMap::new();
Self { isa, symbols }
}
/// Define a symbol in the internal symbol table.
///
/// The JIT will use the symbol table to resolve names that are declared,
/// but not defined, in the module being compiled. A common example is
/// external functions. With this method, functions and data can be exposed
/// to the code being compiled which are defined by the host.
///
/// If a symbol is defined more than once, the most recent definition will
/// be retained.
///
/// If the JIT fails to find a symbol in its internal table, it will fall
/// back to a platform-specific search (this typically involves searching
/// the current process for public symbols, followed by searching the
/// platform's C runtime).
pub fn symbol<K>(&mut self, name: K, ptr: *const u8) -> &Self
where
K: Into<String>,
{
self.symbols.insert(name.into(), ptr);
self
}
/// Define multiple symbols in the internal symbol table.
///
/// Using this is equivalent to calling `symbol` on each element.
pub fn symbols<It, K>(&mut self, symbols: It) -> &Self
where
It: IntoIterator<Item = (K, *const u8)>,
K: Into<String>,
{
for (name, ptr) in symbols {
self.symbols.insert(name.into(), ptr);
}
self
}
}
/// A `SimpleJITBackend` implements `Backend` and emits code and data into memory where it can be
/// directly called and accessed.
///
/// See the `SimpleJITBuilder` for a convenient way to construct `SimpleJITBackend` instances.
pub struct SimpleJITBackend {
isa: Box<TargetIsa>,
symbols: HashMap<String, *const u8>,
code_memory: Memory,
readonly_memory: Memory,
writable_memory: Memory,
}
/// A record of a relocation to perform.
struct RelocRecord {
offset: CodeOffset,
reloc: Reloc,
name: ir::ExternalName,
addend: Addend,
}
pub struct SimpleJITCompiledFunction {
code: *mut u8,
size: usize,
relocs: Vec<RelocRecord>,
}
pub struct SimpleJITCompiledData {
storage: *mut u8,
size: usize,
relocs: Vec<RelocRecord>,
}
impl SimpleJITBackend {
fn lookup_symbol(&self, name: &str) -> *const u8 {
match self.symbols.get(name) {
Some(&ptr) => ptr,
None => lookup_with_dlsym(name),
}
}
}
impl<'simple_jit_backend> Backend for SimpleJITBackend {
type Builder = SimpleJITBuilder;
/// SimpleJIT compiled function and data objects may have outstanding
/// relocations that need to be performed before the memory can be used.
/// These relocations are performed within `finalize_function` and
/// `finalize_data`.
type CompiledFunction = SimpleJITCompiledFunction;
type CompiledData = SimpleJITCompiledData;
/// SimpleJIT emits code and data into memory, and provides raw pointers
/// to them.
type FinalizedFunction = *const u8;
type FinalizedData = (*mut u8, usize);
/// SimpleJIT emits code and data into memory as it processes them, so it
/// doesn't need to provide anything after the `Module` is complete.
type Product = ();
/// Create a new `SimpleJITBackend`.
fn new(builder: SimpleJITBuilder) -> Self {
Self {
isa: builder.isa,
symbols: builder.symbols,
code_memory: Memory::new(),
readonly_memory: Memory::new(),
writable_memory: Memory::new(),
}
}
fn isa(&self) -> &TargetIsa {
&*self.isa
}
fn declare_function(&mut self, _name: &str, _linkage: Linkage) {
// Nothing to do.
}
fn declare_data(&mut self, _name: &str, _linkage: Linkage, _writable: bool) {
// Nothing to do.
}
fn define_function(
&mut self,
name: &str,
ctx: &cranelift_codegen::Context,
_namespace: &ModuleNamespace<Self>,
code_size: u32,
) -> ModuleResult<Self::CompiledFunction> {
let size = code_size as usize;
let ptr = self
.code_memory
.allocate(size)
.expect("TODO: handle OOM etc.");
if cfg!(target_os = "linux") && ::std::env::var_os("PERF_BUILDID_DIR").is_some() {
let mut map_file = ::std::fs::OpenOptions::new()
.create(true)
.append(true)
.open(format!("/tmp/perf-{}.map", ::std::process::id()))
.unwrap();
let _ = writeln!(map_file, "{:x} {:x} {}", ptr as usize, code_size, name);
}
let mut reloc_sink = SimpleJITRelocSink::new();
// Ignore traps for now. For now, frontends should just avoid generating code
// that traps.
let mut trap_sink = NullTrapSink {};
unsafe { ctx.emit_to_memory(&*self.isa, ptr, &mut reloc_sink, &mut trap_sink) };
Ok(Self::CompiledFunction {
code: ptr,
size,
relocs: reloc_sink.relocs,
})
}
fn define_data(
&mut self,
_name: &str,
writable: bool,
data: &DataContext,
_namespace: &ModuleNamespace<Self>,
) -> ModuleResult<Self::CompiledData> {
let &DataDescription {
ref init,
ref function_decls,
ref data_decls,
ref function_relocs,
ref data_relocs,
} = data.description();
let size = init.size();
let storage = if writable {
self.writable_memory
.allocate(size)
.expect("TODO: handle OOM etc.")
} else {
self.readonly_memory
.allocate(size)
.expect("TODO: handle OOM etc.")
};
match *init {
Init::Uninitialized => {
panic!("data is not initialized yet");
}
Init::Zeros { .. } => {
unsafe { ptr::write_bytes(storage, 0, size) };
}
Init::Bytes { ref contents } => {
let src = contents.as_ptr();
unsafe { ptr::copy_nonoverlapping(src, storage, size) };
}
}
let reloc = match self.isa.triple().pointer_width().unwrap() {
PointerWidth::U16 => panic!(),
PointerWidth::U32 => Reloc::Abs4,
PointerWidth::U64 => Reloc::Abs8,
};
let mut relocs = Vec::new();
for &(offset, id) in function_relocs {
relocs.push(RelocRecord {
reloc,
offset,
name: function_decls[id].clone(),
addend: 0,
});
}
for &(offset, id, addend) in data_relocs {
relocs.push(RelocRecord {
reloc,
offset,
name: data_decls[id].clone(),
addend,
});
}
Ok(Self::CompiledData {
storage,
size,
relocs,
})
}
fn write_data_funcaddr(
&mut self,
_data: &mut Self::CompiledData,
_offset: usize,
_what: ir::FuncRef,
) {
unimplemented!();
}
fn write_data_dataaddr(
&mut self,
_data: &mut Self::CompiledData,
_offset: usize,
_what: ir::GlobalValue,
_usize: Addend,
) {
unimplemented!();
}
fn finalize_function(
&mut self,
func: &Self::CompiledFunction,
namespace: &ModuleNamespace<Self>,
) -> Self::FinalizedFunction {
use std::ptr::write_unaligned;
for &RelocRecord {
reloc,
offset,
ref name,
addend,
} in &func.relocs
{
let ptr = func.code;
debug_assert!((offset as usize) < func.size);
let at = unsafe { ptr.offset(offset as isize) };
let base = if namespace.is_function(name) {
let (def, name_str, _signature) = namespace.get_function_definition(&name);
match def {
Some(compiled) => compiled.code,
None => self.lookup_symbol(name_str),
}
} else {
let (def, name_str, _writable) = namespace.get_data_definition(&name);
match def {
Some(compiled) => compiled.storage,
None => self.lookup_symbol(name_str),
}
};
// TODO: Handle overflow.
let what = unsafe { base.offset(addend as isize) };
match reloc {
Reloc::Abs4 => {
// TODO: Handle overflow.
#[cfg_attr(feature = "cargo-clippy", allow(clippy::cast_ptr_alignment))]
unsafe {
write_unaligned(at as *mut u32, what as u32)
};
}
Reloc::Abs8 => {
#[cfg_attr(feature = "cargo-clippy", allow(clippy::cast_ptr_alignment))]
unsafe {
write_unaligned(at as *mut u64, what as u64)
};
}
Reloc::X86PCRel4 | Reloc::X86CallPCRel4 => {
// TODO: Handle overflow.
let pcrel = ((what as isize) - (at as isize)) as i32;
#[cfg_attr(feature = "cargo-clippy", allow(clippy::cast_ptr_alignment))]
unsafe {
write_unaligned(at as *mut i32, pcrel)
};
}
Reloc::X86GOTPCRel4 | Reloc::X86CallPLTRel4 => panic!("unexpected PIC relocation"),
_ => unimplemented!(),
}
}
func.code
}
fn get_finalized_function(&self, func: &Self::CompiledFunction) -> Self::FinalizedFunction {
func.code
}
fn finalize_data(
&mut self,
data: &Self::CompiledData,
namespace: &ModuleNamespace<Self>,
) -> Self::FinalizedData {
use std::ptr::write_unaligned;
for &RelocRecord {
reloc,
offset,
ref name,
addend,
} in &data.relocs
{
let ptr = data.storage;
debug_assert!((offset as usize) < data.size);
let at = unsafe { ptr.offset(offset as isize) };
let base = if namespace.is_function(name) {
let (def, name_str, _signature) = namespace.get_function_definition(&name);
match def {
Some(compiled) => compiled.code,
None => self.lookup_symbol(name_str),
}
} else {
let (def, name_str, _writable) = namespace.get_data_definition(&name);
match def {
Some(compiled) => compiled.storage,
None => self.lookup_symbol(name_str),
}
};
// TODO: Handle overflow.
let what = unsafe { base.offset(addend as isize) };
match reloc {
Reloc::Abs4 => {
// TODO: Handle overflow.
#[cfg_attr(feature = "cargo-clippy", allow(clippy::cast_ptr_alignment))]
unsafe {
write_unaligned(at as *mut u32, what as u32)
};
}
Reloc::Abs8 => {
#[cfg_attr(feature = "cargo-clippy", allow(clippy::cast_ptr_alignment))]
unsafe {
write_unaligned(at as *mut u64, what as u64)
};
}
Reloc::X86PCRel4
| Reloc::X86CallPCRel4
| Reloc::X86GOTPCRel4
| Reloc::X86CallPLTRel4 => panic!("unexpected text relocation in data"),
_ => unimplemented!(),
}
}
(data.storage, data.size)
}
fn get_finalized_data(&self, data: &Self::CompiledData) -> Self::FinalizedData {
(data.storage, data.size)
}
fn publish(&mut self) {
// Now that we're done patching, prepare the memory for execution!
self.readonly_memory.set_readonly();
self.code_memory.set_readable_and_executable();
}
/// SimpleJIT emits code and data into memory as it processes them, so it
/// doesn't need to provide anything after the `Module` is complete.
fn finish(self) {}
}
#[cfg(not(windows))]
fn lookup_with_dlsym(name: &str) -> *const u8 {
let c_str = CString::new(name).unwrap();
let c_str_ptr = c_str.as_ptr();
let sym = unsafe { libc::dlsym(libc::RTLD_DEFAULT, c_str_ptr) };
if sym.is_null() {
panic!("can't resolve symbol {}", name);
}
sym as *const u8
}
#[cfg(windows)]
fn lookup_with_dlsym(name: &str) -> *const u8 {
const MSVCRT_DLL: &[u8] = b"msvcrt.dll\0";
let c_str = CString::new(name).unwrap();
let c_str_ptr = c_str.as_ptr();
unsafe {
let handles = [
// try to find the searched symbol in the currently running executable
ptr::null_mut(),
// try to find the searched symbol in local c runtime
winapi::um::libloaderapi::GetModuleHandleA(MSVCRT_DLL.as_ptr() as *const i8),
];
for handle in &handles {
let addr = winapi::um::libloaderapi::GetProcAddress(*handle, c_str_ptr);
if addr.is_null() {
continue;
}
return addr as *const u8;
}
let msg = if handles[1].is_null() {
"(msvcrt not loaded)"
} else {
""
};
panic!("cannot resolve address of symbol {} {}", name, msg);
}
}
struct SimpleJITRelocSink {
pub relocs: Vec<RelocRecord>,
}
impl SimpleJITRelocSink {
pub fn new() -> Self {
Self { relocs: Vec::new() }
}
}
impl RelocSink for SimpleJITRelocSink {
fn reloc_ebb(&mut self, _offset: CodeOffset, _reloc: Reloc, _ebb_offset: CodeOffset) {
unimplemented!();
}
fn reloc_external(
&mut self,
offset: CodeOffset,
reloc: Reloc,
name: &ir::ExternalName,
addend: Addend,
) {
self.relocs.push(RelocRecord {
offset,
reloc,
name: name.clone(),
addend,
});
}
fn reloc_jt(&mut self, _offset: CodeOffset, _reloc: Reloc, _jt: ir::JumpTable) {
unimplemented!();
}
}

View File

@@ -0,0 +1,32 @@
//! Top-level lib.rs for `cranelift_simplejit`.
#![deny(
missing_docs,
trivial_numeric_casts,
unused_extern_crates,
unstable_features
)]
#![warn(unused_import_braces)]
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
#![cfg_attr(feature = "cargo-clippy", allow(clippy::new_without_default))]
#![cfg_attr(
feature = "cargo-clippy",
warn(
clippy::float_arithmetic,
clippy::mut_mut,
clippy::nonminimal_bool,
clippy::option_map_unwrap_or,
clippy::option_map_unwrap_or_else,
clippy::print_stdout,
clippy::unicode_not_nfc,
clippy::use_self
)
)]
mod backend;
mod memory;
pub use crate::backend::{SimpleJITBackend, SimpleJITBuilder};
/// Version number of this crate.
pub const VERSION: &str = env!("CARGO_PKG_VERSION");

View File

@@ -0,0 +1,158 @@
use errno;
use libc;
use region;
use std::mem;
use std::ptr;
/// Round `size` up to the nearest multiple of `page_size`.
fn round_up_to_page_size(size: usize, page_size: usize) -> usize {
(size + (page_size - 1)) & !(page_size - 1)
}
/// A simple struct consisting of a pointer and length.
struct PtrLen {
ptr: *mut u8,
len: usize,
}
impl PtrLen {
/// Create a new empty `PtrLen`.
fn new() -> Self {
Self {
ptr: ptr::null_mut(),
len: 0,
}
}
/// Create a new `PtrLen` pointing to at least `size` bytes of memory,
/// suitably sized and aligned for memory protection.
#[cfg(not(target_os = "windows"))]
fn with_size(size: usize) -> Result<Self, String> {
let page_size = region::page::size();
let alloc_size = round_up_to_page_size(size, page_size);
unsafe {
let mut ptr: *mut libc::c_void = mem::uninitialized();
let err = libc::posix_memalign(&mut ptr, page_size, alloc_size);
if err == 0 {
Ok(Self {
ptr: ptr as *mut u8,
len: alloc_size,
})
} else {
Err(errno::Errno(err).to_string())
}
}
}
#[cfg(target_os = "windows")]
fn with_size(size: usize) -> Result<Self, String> {
use winapi::um::memoryapi::VirtualAlloc;
use winapi::um::winnt::{MEM_COMMIT, MEM_RESERVE, PAGE_READWRITE};
let page_size = region::page::size();
// VirtualAlloc always rounds up to the next multiple of the page size
let ptr = unsafe {
VirtualAlloc(
ptr::null_mut(),
size,
MEM_COMMIT | MEM_RESERVE,
PAGE_READWRITE,
)
};
if !ptr.is_null() {
Ok(Self {
ptr: ptr as *mut u8,
len: round_up_to_page_size(size, page_size),
})
} else {
Err(errno::errno().to_string())
}
}
}
/// JIT memory manager. This manages pages of suitably aligned and
/// accessible memory.
pub struct Memory {
allocations: Vec<PtrLen>,
executable: usize,
current: PtrLen,
position: usize,
}
impl Memory {
pub fn new() -> Self {
Self {
allocations: Vec::new(),
executable: 0,
current: PtrLen::new(),
position: 0,
}
}
fn finish_current(&mut self) {
self.allocations
.push(mem::replace(&mut self.current, PtrLen::new()));
self.position = 0;
}
/// TODO: Use a proper error type.
pub fn allocate(&mut self, size: usize) -> Result<*mut u8, String> {
if size <= self.current.len - self.position {
// TODO: Ensure overflow is not possible.
let ptr = unsafe { self.current.ptr.add(self.position) };
self.position += size;
return Ok(ptr);
}
self.finish_current();
// TODO: Allocate more at a time.
self.current = PtrLen::with_size(size)?;
self.position = size;
Ok(self.current.ptr)
}
/// Set all memory allocated in this `Memory` up to now as readable and executable.
pub fn set_readable_and_executable(&mut self) {
self.finish_current();
for &PtrLen { ptr, len } in &self.allocations[self.executable..] {
if len != 0 {
unsafe {
region::protect(ptr, len, region::Protection::ReadExecute)
.expect("unable to make memory readable+executable");
}
}
}
}
/// Set all memory allocated in this `Memory` up to now as readonly.
pub fn set_readonly(&mut self) {
self.finish_current();
for &PtrLen { ptr, len } in &self.allocations[self.executable..] {
if len != 0 {
unsafe {
region::protect(ptr, len, region::Protection::Read)
.expect("unable to make memory readonly");
}
}
}
}
}
// TODO: Implement Drop to unprotect and deallocate the memory?
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_round_up_to_page_size() {
assert_eq!(round_up_to_page_size(0, 4096), 0);
assert_eq!(round_up_to_page_size(1, 4096), 4096);
assert_eq!(round_up_to_page_size(4096, 4096), 4096);
assert_eq!(round_up_to_page_size(4097, 4096), 8192);
}
}