Merge branch 'master' into bforest
This commit is contained in:
@@ -34,6 +34,8 @@ term = "0.5.1"
|
|||||||
capstone = { version = "0.4", optional = true }
|
capstone = { version = "0.4", optional = true }
|
||||||
wabt = { version = "0.4", optional = true }
|
wabt = { version = "0.4", optional = true }
|
||||||
target-lexicon = "0.0.3"
|
target-lexicon = "0.0.3"
|
||||||
|
pretty_env_logger = "0.2.4"
|
||||||
|
file-per-thread-logger = "0.1.1"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["disas", "wasm"]
|
default = ["disas", "wasm"]
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ into executable machine code.
|
|||||||
|
|
||||||
[](https://cranelift.readthedocs.io/en/latest/?badge=latest)
|
[](https://cranelift.readthedocs.io/en/latest/?badge=latest)
|
||||||
[](https://travis-ci.org/CraneStation/cranelift)
|
[](https://travis-ci.org/CraneStation/cranelift)
|
||||||
[](https://gitter.im/CraneStation/Lobby/~chat)
|
[](https://gitter.im/CraneStation/Lobby)
|
||||||
|
|
||||||
For more information, see [the
|
For more information, see [the
|
||||||
documentation](https://cranelift.readthedocs.io/en/latest/?badge=latest).
|
documentation](https://cranelift.readthedocs.io/en/latest/?badge=latest).
|
||||||
@@ -127,7 +127,7 @@ Building the documentation
|
|||||||
--------------------------
|
--------------------------
|
||||||
|
|
||||||
To build the Cranelift documentation, you need the [Sphinx documentation
|
To build the Cranelift documentation, you need the [Sphinx documentation
|
||||||
generator](https://www.sphinx-doc.org/):
|
generator](http://www.sphinx-doc.org/) as well as Python 3::
|
||||||
|
|
||||||
$ pip install sphinx sphinx-autobuild sphinx_rtd_theme
|
$ pip install sphinx sphinx-autobuild sphinx_rtd_theme
|
||||||
$ cd cranelift/docs
|
$ cd cranelift/docs
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
#![deny(trivial_numeric_casts)]
|
#![deny(trivial_numeric_casts, unused_extern_crates, unstable_features)]
|
||||||
#![warn(unused_import_braces, unstable_features, unused_extern_crates)]
|
#![warn(unused_import_braces)]
|
||||||
#![cfg_attr(
|
#![cfg_attr(
|
||||||
feature = "cargo-clippy",
|
feature = "cargo-clippy",
|
||||||
warn(
|
warn(
|
||||||
@@ -14,11 +14,13 @@ extern crate cranelift_codegen;
|
|||||||
extern crate cranelift_filetests;
|
extern crate cranelift_filetests;
|
||||||
extern crate cranelift_reader;
|
extern crate cranelift_reader;
|
||||||
extern crate docopt;
|
extern crate docopt;
|
||||||
|
extern crate file_per_thread_logger;
|
||||||
extern crate filecheck;
|
extern crate filecheck;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate serde_derive;
|
extern crate serde_derive;
|
||||||
#[cfg(feature = "disas")]
|
#[cfg(feature = "disas")]
|
||||||
extern crate capstone;
|
extern crate capstone;
|
||||||
|
extern crate pretty_env_logger;
|
||||||
extern crate term;
|
extern crate term;
|
||||||
|
|
||||||
cfg_if! {
|
cfg_if! {
|
||||||
@@ -31,6 +33,7 @@ cfg_if! {
|
|||||||
}
|
}
|
||||||
extern crate target_lexicon;
|
extern crate target_lexicon;
|
||||||
|
|
||||||
|
use cranelift_codegen::dbg::LOG_FILENAME_PREFIX;
|
||||||
use cranelift_codegen::{timing, VERSION};
|
use cranelift_codegen::{timing, VERSION};
|
||||||
use docopt::Docopt;
|
use docopt::Docopt;
|
||||||
use std::io::{self, Write};
|
use std::io::{self, Write};
|
||||||
@@ -46,16 +49,17 @@ const USAGE: &str = "
|
|||||||
Cranelift code generator utility
|
Cranelift code generator utility
|
||||||
|
|
||||||
Usage:
|
Usage:
|
||||||
clif-util test [-vT] <file>...
|
clif-util test [-vTd] <file>...
|
||||||
clif-util cat <file>...
|
clif-util cat [-d] <file>...
|
||||||
clif-util filecheck [-v] <file>
|
clif-util filecheck [-vd] <file>
|
||||||
clif-util print-cfg <file>...
|
clif-util print-cfg [-d] <file>...
|
||||||
clif-util compile [-vpT] [--set <set>]... [--target <triple>] <file>...
|
clif-util compile [-vpTd] [--set <set>]... [--target <triple>] <file>...
|
||||||
clif-util wasm [-ctvpTs] [--set <set>]... [--target <triple>] <file>...
|
clif-util wasm [-ctvpTsd] [--set <set>]... [--target <triple>] <file>...
|
||||||
clif-util --help | --version
|
clif-util --help | --version
|
||||||
|
|
||||||
Options:
|
Options:
|
||||||
-v, --verbose be more verbose
|
-v, --verbose be more verbose
|
||||||
|
-d, --debug enable debug output on stderr/stdout
|
||||||
-T, --time-passes
|
-T, --time-passes
|
||||||
print pass timing report
|
print pass timing report
|
||||||
-t, --just-decode
|
-t, --just-decode
|
||||||
@@ -90,6 +94,7 @@ struct Args {
|
|||||||
flag_target: String,
|
flag_target: String,
|
||||||
flag_time_passes: bool,
|
flag_time_passes: bool,
|
||||||
flag_print_size: bool,
|
flag_print_size: bool,
|
||||||
|
flag_debug: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A command either succeeds or fails with an error message.
|
/// A command either succeeds or fails with an error message.
|
||||||
@@ -106,6 +111,12 @@ fn clif_util() -> CommandResult {
|
|||||||
})
|
})
|
||||||
.unwrap_or_else(|e| e.exit());
|
.unwrap_or_else(|e| e.exit());
|
||||||
|
|
||||||
|
if args.flag_debug {
|
||||||
|
pretty_env_logger::init();
|
||||||
|
} else {
|
||||||
|
file_per_thread_logger::initialize(LOG_FILENAME_PREFIX);
|
||||||
|
}
|
||||||
|
|
||||||
// Find the sub-command to execute.
|
// Find the sub-command to execute.
|
||||||
let result = if args.cmd_test {
|
let result = if args.cmd_test {
|
||||||
cranelift_filetests::run(args.flag_verbose, &args.arg_file).map(|_time| ())
|
cranelift_filetests::run(args.flag_verbose, &args.arg_file).map(|_time| ())
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ failure = { version = "0.1.1", default-features = false, features = ["derive"] }
|
|||||||
failure_derive = { version = "0.1.1", default-features = false }
|
failure_derive = { version = "0.1.1", default-features = false }
|
||||||
hashmap_core = { version = "0.1.9", optional = true }
|
hashmap_core = { version = "0.1.9", optional = true }
|
||||||
target-lexicon = { version = "0.0.3", default-features = false }
|
target-lexicon = { version = "0.0.3", default-features = false }
|
||||||
|
log = { version = "0.4.3", default-features = false, features = ["release_max_level_warn"] }
|
||||||
# It is a goal of the cranelift-codegen crate to have minimal external dependencies.
|
# It is a goal of the cranelift-codegen crate to have minimal external dependencies.
|
||||||
# Please don't add any unless they are essential to the task of creating binary
|
# Please don't add any unless they are essential to the task of creating binary
|
||||||
# machine code. Integration tests that need external dependencies can be
|
# machine code. Integration tests that need external dependencies can be
|
||||||
|
|||||||
@@ -140,7 +140,7 @@ fn relax_branch(
|
|||||||
isa: &TargetIsa,
|
isa: &TargetIsa,
|
||||||
) -> CodeOffset {
|
) -> CodeOffset {
|
||||||
let inst = cur.current_inst().unwrap();
|
let inst = cur.current_inst().unwrap();
|
||||||
dbg!(
|
debug!(
|
||||||
"Relaxing [{}] {} for {:#x}-{:#x} range",
|
"Relaxing [{}] {} for {:#x}-{:#x} range",
|
||||||
encinfo.display(cur.func.encodings[inst]),
|
encinfo.display(cur.func.encodings[inst]),
|
||||||
cur.func.dfg.display_inst(inst, isa),
|
cur.func.dfg.display_inst(inst, isa),
|
||||||
@@ -156,7 +156,7 @@ fn relax_branch(
|
|||||||
.find(|&enc| {
|
.find(|&enc| {
|
||||||
let range = encinfo.branch_range(enc).expect("Branch with no range");
|
let range = encinfo.branch_range(enc).expect("Branch with no range");
|
||||||
if !range.contains(offset, dest_offset) {
|
if !range.contains(offset, dest_offset) {
|
||||||
dbg!(" trying [{}]: out of range", encinfo.display(enc));
|
debug!(" trying [{}]: out of range", encinfo.display(enc));
|
||||||
false
|
false
|
||||||
} else if encinfo.operand_constraints(enc)
|
} else if encinfo.operand_constraints(enc)
|
||||||
!= encinfo.operand_constraints(cur.func.encodings[inst])
|
!= encinfo.operand_constraints(cur.func.encodings[inst])
|
||||||
@@ -166,10 +166,10 @@ fn relax_branch(
|
|||||||
// which the existing operands don't satisfy. We can't check for
|
// which the existing operands don't satisfy. We can't check for
|
||||||
// validity directly because we don't have a RegDiversions active so
|
// validity directly because we don't have a RegDiversions active so
|
||||||
// we don't know which registers are actually in use.
|
// we don't know which registers are actually in use.
|
||||||
dbg!(" trying [{}]: constraints differ", encinfo.display(enc));
|
debug!(" trying [{}]: constraints differ", encinfo.display(enc));
|
||||||
false
|
false
|
||||||
} else {
|
} else {
|
||||||
dbg!(" trying [{}]: OK", encinfo.display(enc));
|
debug!(" trying [{}]: OK", encinfo.display(enc));
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
}) {
|
}) {
|
||||||
|
|||||||
@@ -54,7 +54,7 @@ pub fn shrink_instructions(func: &mut Function, isa: &TargetIsa) {
|
|||||||
if best_enc != enc {
|
if best_enc != enc {
|
||||||
func.encodings[inst] = best_enc;
|
func.encodings[inst] = best_enc;
|
||||||
|
|
||||||
dbg!(
|
debug!(
|
||||||
"Shrunk [{}] to [{}] in {}, reducing the size from {} to {}",
|
"Shrunk [{}] to [{}] in {}, reducing the size from {} to {}",
|
||||||
encinfo.display(enc),
|
encinfo.display(enc),
|
||||||
encinfo.display(best_enc),
|
encinfo.display(best_enc),
|
||||||
|
|||||||
@@ -1,133 +1,8 @@
|
|||||||
//! Debug tracing macros.
|
//! Debug tracing helpers.
|
||||||
//!
|
|
||||||
//! This module defines the `dbg!` macro which works like `println!` except it writes to the
|
|
||||||
//! Cranelift tracing output file if enabled.
|
|
||||||
//!
|
|
||||||
//! Tracing can be enabled by setting the `CRANELIFT_DBG` environment variable to something
|
|
||||||
/// other than `0`.
|
|
||||||
///
|
|
||||||
/// The output will appear in files named `cranelift.dbg.*`, where the suffix is named after the
|
|
||||||
/// thread doing the logging.
|
|
||||||
#[cfg(feature = "std")]
|
|
||||||
use std::cell::RefCell;
|
|
||||||
#[cfg(feature = "std")]
|
|
||||||
use std::env;
|
|
||||||
#[cfg(feature = "std")]
|
|
||||||
use std::ffi::OsStr;
|
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
#[cfg(feature = "std")]
|
|
||||||
use std::fs::File;
|
|
||||||
#[cfg(feature = "std")]
|
|
||||||
use std::io::{self, Write};
|
|
||||||
#[cfg(feature = "std")]
|
|
||||||
use std::sync::atomic;
|
|
||||||
#[cfg(feature = "std")]
|
|
||||||
use std::thread;
|
|
||||||
|
|
||||||
#[cfg(feature = "std")]
|
/// Prefix added to the log file names, just before the thread name or id.
|
||||||
static STATE: atomic::AtomicIsize = atomic::ATOMIC_ISIZE_INIT;
|
pub static LOG_FILENAME_PREFIX: &str = "cranelift.dbg.";
|
||||||
|
|
||||||
/// Is debug tracing enabled?
|
|
||||||
///
|
|
||||||
/// Debug tracing can be enabled by setting the `CRANELIFT_DBG` environment variable to something
|
|
||||||
/// other than `0`.
|
|
||||||
///
|
|
||||||
/// This inline function turns into a constant `false` when debug assertions are disabled.
|
|
||||||
#[cfg(feature = "std")]
|
|
||||||
#[inline]
|
|
||||||
pub fn enabled() -> bool {
|
|
||||||
if cfg!(debug_assertions) {
|
|
||||||
match STATE.load(atomic::Ordering::Relaxed) {
|
|
||||||
0 => initialize(),
|
|
||||||
s => s > 0,
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Does nothing
|
|
||||||
#[cfg(not(feature = "std"))]
|
|
||||||
#[inline]
|
|
||||||
pub fn enabled() -> bool {
|
|
||||||
false
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Initialize `STATE` from the environment variable.
|
|
||||||
#[cfg(feature = "std")]
|
|
||||||
fn initialize() -> bool {
|
|
||||||
let enable = match env::var_os("CRANELIFT_DBG") {
|
|
||||||
Some(s) => s != OsStr::new("0"),
|
|
||||||
None => false,
|
|
||||||
};
|
|
||||||
|
|
||||||
if enable {
|
|
||||||
STATE.store(1, atomic::Ordering::Relaxed);
|
|
||||||
} else {
|
|
||||||
STATE.store(-1, atomic::Ordering::Relaxed);
|
|
||||||
}
|
|
||||||
|
|
||||||
enable
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "std")]
|
|
||||||
thread_local! {
|
|
||||||
static WRITER : RefCell<io::BufWriter<File>> = RefCell::new(open_file());
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Write a line with the given format arguments.
|
|
||||||
///
|
|
||||||
/// This is for use by the `dbg!` macro.
|
|
||||||
#[cfg(feature = "std")]
|
|
||||||
pub fn writeln_with_format_args(args: fmt::Arguments) -> io::Result<()> {
|
|
||||||
WRITER.with(|rc| {
|
|
||||||
let mut w = rc.borrow_mut();
|
|
||||||
writeln!(*w, "{}", args)?;
|
|
||||||
w.flush()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Open the tracing file for the current thread.
|
|
||||||
#[cfg(feature = "std")]
|
|
||||||
fn open_file() -> io::BufWriter<File> {
|
|
||||||
let curthread = thread::current();
|
|
||||||
let tmpstr;
|
|
||||||
let mut path = "cranelift.dbg.".to_owned();
|
|
||||||
path.extend(
|
|
||||||
match curthread.name() {
|
|
||||||
Some(name) => name.chars(),
|
|
||||||
// The thread is unnamed, so use the thread ID instead.
|
|
||||||
None => {
|
|
||||||
tmpstr = format!("{:?}", curthread.id());
|
|
||||||
tmpstr.chars()
|
|
||||||
}
|
|
||||||
}.filter(|ch| ch.is_alphanumeric() || *ch == '-' || *ch == '_'),
|
|
||||||
);
|
|
||||||
let file = File::create(path).expect("Can't open tracing file");
|
|
||||||
io::BufWriter::new(file)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Write a line to the debug trace file if tracing is enabled.
|
|
||||||
///
|
|
||||||
/// Arguments are the same as for `printf!`.
|
|
||||||
#[cfg(feature = "std")]
|
|
||||||
#[macro_export]
|
|
||||||
macro_rules! dbg {
|
|
||||||
($($arg:tt)+) => {
|
|
||||||
if $crate::dbg::enabled() {
|
|
||||||
// Drop the error result so we don't get compiler errors for ignoring it.
|
|
||||||
// What are you going to do, log the error?
|
|
||||||
$crate::dbg::writeln_with_format_args(format_args!($($arg)+)).ok();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// `dbg!` isn't supported in `no_std` mode, so expand it into nothing.
|
|
||||||
#[cfg(not(feature = "std"))]
|
|
||||||
#[macro_export]
|
|
||||||
macro_rules! dbg {
|
|
||||||
($($arg:tt)+) => {};
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Helper for printing lists.
|
/// Helper for printing lists.
|
||||||
pub struct DisplayList<'a, T>(pub &'a [T])
|
pub struct DisplayList<'a, T>(pub &'a [T])
|
||||||
|
|||||||
@@ -316,7 +316,7 @@ impl Layout {
|
|||||||
next_inst = self.insts[inst].next.expand();
|
next_inst = self.insts[inst].next.expand();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
dbg!("Renumbered {} program points", seq / MAJOR_STRIDE);
|
debug!("Renumbered {} program points", seq / MAJOR_STRIDE);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -217,6 +217,7 @@ fn callee_saved_gprs(isa: &TargetIsa) -> &'static [RU] {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get the set of callee-saved registers that are used.
|
||||||
fn callee_saved_gprs_used(isa: &TargetIsa, func: &ir::Function) -> RegisterSet {
|
fn callee_saved_gprs_used(isa: &TargetIsa, func: &ir::Function) -> RegisterSet {
|
||||||
let mut all_callee_saved = RegisterSet::empty();
|
let mut all_callee_saved = RegisterSet::empty();
|
||||||
for reg in callee_saved_gprs(isa) {
|
for reg in callee_saved_gprs(isa) {
|
||||||
|
|||||||
@@ -270,7 +270,7 @@ where
|
|||||||
// Reconstruct how `ty` was legalized into the `arg_type` argument.
|
// Reconstruct how `ty` was legalized into the `arg_type` argument.
|
||||||
let conversion = legalize_abi_value(ty, &arg_type);
|
let conversion = legalize_abi_value(ty, &arg_type);
|
||||||
|
|
||||||
dbg!("convert_from_abi({}): {:?}", ty, conversion);
|
debug!("convert_from_abi({}): {:?}", ty, conversion);
|
||||||
|
|
||||||
// The conversion describes value to ABI argument. We implement the reverse conversion here.
|
// The conversion describes value to ABI argument. We implement the reverse conversion here.
|
||||||
match conversion {
|
match conversion {
|
||||||
@@ -279,7 +279,7 @@ where
|
|||||||
let abi_ty = ty.half_width().expect("Invalid type for conversion");
|
let abi_ty = ty.half_width().expect("Invalid type for conversion");
|
||||||
let lo = convert_from_abi(pos, abi_ty, None, get_arg);
|
let lo = convert_from_abi(pos, abi_ty, None, get_arg);
|
||||||
let hi = convert_from_abi(pos, abi_ty, None, get_arg);
|
let hi = convert_from_abi(pos, abi_ty, None, get_arg);
|
||||||
dbg!(
|
debug!(
|
||||||
"intsplit {}: {}, {}: {}",
|
"intsplit {}: {}, {}: {}",
|
||||||
lo,
|
lo,
|
||||||
pos.func.dfg.value_type(lo),
|
pos.func.dfg.value_type(lo),
|
||||||
@@ -586,7 +586,7 @@ pub fn handle_return_abi(inst: Inst, func: &mut Function, cfg: &ControlFlowGraph
|
|||||||
// the legalized signature. These values should simply be propagated from the entry block
|
// the legalized signature. These values should simply be propagated from the entry block
|
||||||
// arguments.
|
// arguments.
|
||||||
if special_args > 0 {
|
if special_args > 0 {
|
||||||
dbg!(
|
debug!(
|
||||||
"Adding {} special-purpose arguments to {}",
|
"Adding {} special-purpose arguments to {}",
|
||||||
special_args,
|
special_args,
|
||||||
pos.func.dfg.display_inst(inst, None)
|
pos.func.dfg.display_inst(inst, None)
|
||||||
|
|||||||
@@ -8,7 +8,8 @@ use flowgraph::ControlFlowGraph;
|
|||||||
use ir::{self, InstBuilder};
|
use ir::{self, InstBuilder};
|
||||||
use isa::TargetIsa;
|
use isa::TargetIsa;
|
||||||
|
|
||||||
/// Expand a `call` instruction.
|
/// Expand a `call` instruction. This lowers it to a `call_indirect`, which
|
||||||
|
/// is only done if the ABI doesn't support direct calls.
|
||||||
pub fn expand_call(
|
pub fn expand_call(
|
||||||
inst: ir::Inst,
|
inst: ir::Inst,
|
||||||
func: &mut ir::Function,
|
func: &mut ir::Function,
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
||||||
#![warn(unused_import_braces)]
|
#![warn(unused_import_braces)]
|
||||||
#![cfg_attr(feature = "std", warn(unstable_features))]
|
#![cfg_attr(feature = "std", deny(unstable_features))]
|
||||||
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
||||||
#![cfg_attr(feature="cargo-clippy", allow(
|
#![cfg_attr(feature="cargo-clippy", allow(
|
||||||
// This requires Rust 1.27 or later.
|
// This requires Rust 1.27 or later.
|
||||||
@@ -49,6 +49,9 @@ extern crate failure_derive;
|
|||||||
#[cfg_attr(test, macro_use)]
|
#[cfg_attr(test, macro_use)]
|
||||||
extern crate target_lexicon;
|
extern crate target_lexicon;
|
||||||
|
|
||||||
|
#[macro_use]
|
||||||
|
extern crate log;
|
||||||
|
|
||||||
pub use context::Context;
|
pub use context::Context;
|
||||||
pub use legalizer::legalize_function;
|
pub use legalizer::legalize_function;
|
||||||
pub use verifier::verify_function;
|
pub use verifier::verify_function;
|
||||||
@@ -59,15 +62,12 @@ pub const VERSION: &str = env!("CARGO_PKG_VERSION");
|
|||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
pub extern crate cranelift_entity as entity;
|
pub extern crate cranelift_entity as entity;
|
||||||
|
|
||||||
pub extern crate cranelift_bforest as bforest;
|
pub extern crate cranelift_bforest as bforest;
|
||||||
|
|
||||||
#[macro_use]
|
|
||||||
pub mod dbg;
|
|
||||||
|
|
||||||
pub mod binemit;
|
pub mod binemit;
|
||||||
pub mod cfg_printer;
|
pub mod cfg_printer;
|
||||||
pub mod cursor;
|
pub mod cursor;
|
||||||
|
pub mod dbg;
|
||||||
pub mod dominator_tree;
|
pub mod dominator_tree;
|
||||||
pub mod flowgraph;
|
pub mod flowgraph;
|
||||||
pub mod ir;
|
pub mod ir;
|
||||||
|
|||||||
@@ -6,7 +6,6 @@
|
|||||||
//! parameter will belong to the same virtual register as the EBB parameter value itself.
|
//! parameter will belong to the same virtual register as the EBB parameter value itself.
|
||||||
|
|
||||||
use cursor::{Cursor, EncCursor};
|
use cursor::{Cursor, EncCursor};
|
||||||
#[cfg(feature = "std")]
|
|
||||||
use dbg::DisplayList;
|
use dbg::DisplayList;
|
||||||
use dominator_tree::{DominatorTree, DominatorTreePreorder};
|
use dominator_tree::{DominatorTree, DominatorTreePreorder};
|
||||||
use flowgraph::{BasicBlock, ControlFlowGraph};
|
use flowgraph::{BasicBlock, ControlFlowGraph};
|
||||||
@@ -116,7 +115,7 @@ impl Coalescing {
|
|||||||
virtregs: &mut VirtRegs,
|
virtregs: &mut VirtRegs,
|
||||||
) {
|
) {
|
||||||
let _tt = timing::ra_cssa();
|
let _tt = timing::ra_cssa();
|
||||||
dbg!("Coalescing for:\n{}", func.display(isa));
|
debug!("Coalescing for:\n{}", func.display(isa));
|
||||||
self.preorder.compute(domtree, &func.layout);
|
self.preorder.compute(domtree, &func.layout);
|
||||||
let mut context = Context {
|
let mut context = Context {
|
||||||
isa,
|
isa,
|
||||||
@@ -185,7 +184,7 @@ impl<'a> Context<'a> {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
dbg!(
|
debug!(
|
||||||
" - checking {} params at back-edge {}: {}",
|
" - checking {} params at back-edge {}: {}",
|
||||||
num_params,
|
num_params,
|
||||||
pred_ebb,
|
pred_ebb,
|
||||||
@@ -229,7 +228,7 @@ impl<'a> Context<'a> {
|
|||||||
if Some(def_ebb) == self.func.layout.entry_block()
|
if Some(def_ebb) == self.func.layout.entry_block()
|
||||||
&& self.func.signature.params[def_num].location.is_stack()
|
&& self.func.signature.params[def_num].location.is_stack()
|
||||||
{
|
{
|
||||||
dbg!("-> isolating function stack parameter {}", arg);
|
debug!("-> isolating function stack parameter {}", arg);
|
||||||
let new_arg = self.isolate_arg(pred_ebb, pred_inst, argnum, arg);
|
let new_arg = self.isolate_arg(pred_ebb, pred_inst, argnum, arg);
|
||||||
self.virtregs.union(param, new_arg);
|
self.virtregs.union(param, new_arg);
|
||||||
continue;
|
continue;
|
||||||
@@ -296,7 +295,7 @@ impl<'a> Context<'a> {
|
|||||||
let inst = pos.built_inst();
|
let inst = pos.built_inst();
|
||||||
self.liveness.move_def_locally(param, inst);
|
self.liveness.move_def_locally(param, inst);
|
||||||
|
|
||||||
dbg!(
|
debug!(
|
||||||
"-> inserted {}, following {}({}: {})",
|
"-> inserted {}, following {}({}: {})",
|
||||||
pos.display_inst(inst),
|
pos.display_inst(inst),
|
||||||
ebb,
|
ebb,
|
||||||
@@ -365,7 +364,7 @@ impl<'a> Context<'a> {
|
|||||||
|
|
||||||
pos.func.dfg.inst_variable_args_mut(pred_inst)[argnum] = copy;
|
pos.func.dfg.inst_variable_args_mut(pred_inst)[argnum] = copy;
|
||||||
|
|
||||||
dbg!(
|
debug!(
|
||||||
"-> inserted {}, before {}: {}",
|
"-> inserted {}, before {}: {}",
|
||||||
pos.display_inst(inst),
|
pos.display_inst(inst),
|
||||||
pred_ebb,
|
pred_ebb,
|
||||||
@@ -381,7 +380,7 @@ impl<'a> Context<'a> {
|
|||||||
/// closure of the relation formed by EBB parameter-argument pairs found by `union_find_ebb()`.
|
/// closure of the relation formed by EBB parameter-argument pairs found by `union_find_ebb()`.
|
||||||
fn finish_union_find(&mut self) {
|
fn finish_union_find(&mut self) {
|
||||||
self.virtregs.finish_union_find(None);
|
self.virtregs.finish_union_find(None);
|
||||||
dbg!("After union-find phase:{}", self.virtregs);
|
debug!("After union-find phase:{}", self.virtregs);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -412,7 +411,7 @@ impl<'a> Context<'a> {
|
|||||||
fn check_vreg(&mut self, vreg: VirtReg) -> bool {
|
fn check_vreg(&mut self, vreg: VirtReg) -> bool {
|
||||||
// Order the values according to the dominator pre-order of their definition.
|
// Order the values according to the dominator pre-order of their definition.
|
||||||
let values = self.virtregs.sort_values(vreg, self.func, self.preorder);
|
let values = self.virtregs.sort_values(vreg, self.func, self.preorder);
|
||||||
dbg!("Checking {} = {}", vreg, DisplayList(values));
|
debug!("Checking {} = {}", vreg, DisplayList(values));
|
||||||
|
|
||||||
// Now push the values in order to the dominator forest.
|
// Now push the values in order to the dominator forest.
|
||||||
// This gives us the closest dominating value def for each of the values.
|
// This gives us the closest dominating value def for each of the values.
|
||||||
@@ -434,7 +433,7 @@ impl<'a> Context<'a> {
|
|||||||
let ctx = self.liveness.context(&self.func.layout);
|
let ctx = self.liveness.context(&self.func.layout);
|
||||||
if self.liveness[parent.value].overlaps_def(node.def, node.ebb, ctx) {
|
if self.liveness[parent.value].overlaps_def(node.def, node.ebb, ctx) {
|
||||||
// The two values are interfering, so they can't be in the same virtual register.
|
// The two values are interfering, so they can't be in the same virtual register.
|
||||||
dbg!("-> interference: {} overlaps def of {}", parent, value);
|
debug!("-> interference: {} overlaps def of {}", parent, value);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -458,7 +457,7 @@ impl<'a> Context<'a> {
|
|||||||
self.cfg,
|
self.cfg,
|
||||||
self.preorder,
|
self.preorder,
|
||||||
);
|
);
|
||||||
dbg!(
|
debug!(
|
||||||
"Synthesizing {} from {} branches and params {}",
|
"Synthesizing {} from {} branches and params {}",
|
||||||
vreg,
|
vreg,
|
||||||
self.vcopies.branches.len(),
|
self.vcopies.branches.len(),
|
||||||
@@ -546,7 +545,7 @@ impl<'a> Context<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let _vreg = self.virtregs.unify(self.values);
|
let _vreg = self.virtregs.unify(self.values);
|
||||||
dbg!("-> merged into {} = {}", _vreg, DisplayList(self.values));
|
debug!("-> merged into {} = {}", _vreg, DisplayList(self.values));
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -568,7 +567,7 @@ impl<'a> Context<'a> {
|
|||||||
// registers and the filtered virtual copies.
|
// registers and the filtered virtual copies.
|
||||||
let v0 = self.virtregs.congruence_class(¶m);
|
let v0 = self.virtregs.congruence_class(¶m);
|
||||||
let v1 = self.virtregs.congruence_class(&arg);
|
let v1 = self.virtregs.congruence_class(&arg);
|
||||||
dbg!(
|
debug!(
|
||||||
" - set 0: {}\n - set 1: {}",
|
" - set 0: {}\n - set 1: {}",
|
||||||
DisplayList(v0),
|
DisplayList(v0),
|
||||||
DisplayList(v1)
|
DisplayList(v1)
|
||||||
@@ -621,7 +620,7 @@ impl<'a> Context<'a> {
|
|||||||
if node.set_id != parent.set_id
|
if node.set_id != parent.set_id
|
||||||
&& self.liveness[parent.value].reaches_use(inst, node.ebb, ctx)
|
&& self.liveness[parent.value].reaches_use(inst, node.ebb, ctx)
|
||||||
{
|
{
|
||||||
dbg!(
|
debug!(
|
||||||
" - interference: {} overlaps vcopy at {}:{}",
|
" - interference: {} overlaps vcopy at {}:{}",
|
||||||
parent,
|
parent,
|
||||||
node.ebb,
|
node.ebb,
|
||||||
@@ -646,7 +645,7 @@ impl<'a> Context<'a> {
|
|||||||
&& self.liveness[parent.value].overlaps_def(node.def, node.ebb, ctx)
|
&& self.liveness[parent.value].overlaps_def(node.def, node.ebb, ctx)
|
||||||
{
|
{
|
||||||
// The two values are interfering.
|
// The two values are interfering.
|
||||||
dbg!(" - interference: {} overlaps def of {}", parent, node.value);
|
debug!(" - interference: {} overlaps def of {}", parent, node.value);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -124,7 +124,7 @@ impl Coloring {
|
|||||||
tracker: &mut LiveValueTracker,
|
tracker: &mut LiveValueTracker,
|
||||||
) {
|
) {
|
||||||
let _tt = timing::ra_coloring();
|
let _tt = timing::ra_coloring();
|
||||||
dbg!("Coloring for:\n{}", func.display(isa));
|
debug!("Coloring for:\n{}", func.display(isa));
|
||||||
let mut ctx = Context {
|
let mut ctx = Context {
|
||||||
usable_regs: isa.allocatable_registers(func),
|
usable_regs: isa.allocatable_registers(func),
|
||||||
cur: EncCursor::new(func, isa),
|
cur: EncCursor::new(func, isa),
|
||||||
@@ -156,7 +156,7 @@ impl<'a> Context<'a> {
|
|||||||
|
|
||||||
/// Visit `ebb`, assuming that the immediate dominator has already been visited.
|
/// Visit `ebb`, assuming that the immediate dominator has already been visited.
|
||||||
fn visit_ebb(&mut self, ebb: Ebb, tracker: &mut LiveValueTracker) {
|
fn visit_ebb(&mut self, ebb: Ebb, tracker: &mut LiveValueTracker) {
|
||||||
dbg!("Coloring {}:", ebb);
|
debug!("Coloring {}:", ebb);
|
||||||
let mut regs = self.visit_ebb_header(ebb, tracker);
|
let mut regs = self.visit_ebb_header(ebb, tracker);
|
||||||
tracker.drop_dead_params();
|
tracker.drop_dead_params();
|
||||||
self.divert.clear();
|
self.divert.clear();
|
||||||
@@ -216,7 +216,7 @@ impl<'a> Context<'a> {
|
|||||||
let mut regs = AvailableRegs::new(&self.usable_regs);
|
let mut regs = AvailableRegs::new(&self.usable_regs);
|
||||||
|
|
||||||
for lv in live.iter().filter(|lv| !lv.is_dead) {
|
for lv in live.iter().filter(|lv| !lv.is_dead) {
|
||||||
dbg!(
|
debug!(
|
||||||
"Live-in: {}:{} in {}",
|
"Live-in: {}:{} in {}",
|
||||||
lv.value,
|
lv.value,
|
||||||
lv.affinity.display(&self.reginfo),
|
lv.affinity.display(&self.reginfo),
|
||||||
@@ -294,7 +294,7 @@ impl<'a> Context<'a> {
|
|||||||
tracker: &mut LiveValueTracker,
|
tracker: &mut LiveValueTracker,
|
||||||
regs: &mut AvailableRegs,
|
regs: &mut AvailableRegs,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
dbg!(
|
debug!(
|
||||||
"Coloring {}\n from {}",
|
"Coloring {}\n from {}",
|
||||||
self.cur.display_inst(inst),
|
self.cur.display_inst(inst),
|
||||||
regs.input.display(&self.reginfo),
|
regs.input.display(&self.reginfo),
|
||||||
@@ -362,7 +362,7 @@ impl<'a> Context<'a> {
|
|||||||
if let Affinity::Reg(rci) = lv.affinity {
|
if let Affinity::Reg(rci) = lv.affinity {
|
||||||
let rc = self.reginfo.rc(rci);
|
let rc = self.reginfo.rc(rci);
|
||||||
let reg = self.divert.reg(lv.value, &self.cur.func.locations);
|
let reg = self.divert.reg(lv.value, &self.cur.func.locations);
|
||||||
dbg!(
|
debug!(
|
||||||
" kill {} in {} ({} {})",
|
" kill {} in {} ({} {})",
|
||||||
lv.value,
|
lv.value,
|
||||||
self.reginfo.display_regunit(reg),
|
self.reginfo.display_regunit(reg),
|
||||||
@@ -380,7 +380,7 @@ impl<'a> Context<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// This aligns with the " from" line at the top of the function.
|
// This aligns with the " from" line at the top of the function.
|
||||||
dbg!(" glob {}", regs.global.display(&self.reginfo));
|
debug!(" glob {}", regs.global.display(&self.reginfo));
|
||||||
|
|
||||||
// This flag is set when the solver failed to find a solution for the global defines that
|
// This flag is set when the solver failed to find a solution for the global defines that
|
||||||
// doesn't interfere with `regs.global`. We need to rewrite all of `inst`s global defines
|
// doesn't interfere with `regs.global`. We need to rewrite all of `inst`s global defines
|
||||||
@@ -419,7 +419,7 @@ impl<'a> Context<'a> {
|
|||||||
// Finally, we've fully programmed the constraint solver.
|
// Finally, we've fully programmed the constraint solver.
|
||||||
// We expect a quick solution in most cases.
|
// We expect a quick solution in most cases.
|
||||||
let output_regs = self.solver.quick_solve(®s.global).unwrap_or_else(|_| {
|
let output_regs = self.solver.quick_solve(®s.global).unwrap_or_else(|_| {
|
||||||
dbg!("quick_solve failed for {}", self.solver);
|
debug!("quick_solve failed for {}", self.solver);
|
||||||
self.iterate_solution(throughs, ®s.global, &mut replace_global_defines)
|
self.iterate_solution(throughs, ®s.global, &mut replace_global_defines)
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -454,7 +454,7 @@ impl<'a> Context<'a> {
|
|||||||
regs.input = output_regs;
|
regs.input = output_regs;
|
||||||
for lv in defs {
|
for lv in defs {
|
||||||
let loc = self.cur.func.locations[lv.value];
|
let loc = self.cur.func.locations[lv.value];
|
||||||
dbg!(
|
debug!(
|
||||||
" color {} -> {}{}",
|
" color {} -> {}{}",
|
||||||
lv.value,
|
lv.value,
|
||||||
loc.display(&self.reginfo),
|
loc.display(&self.reginfo),
|
||||||
@@ -700,7 +700,7 @@ impl<'a> Context<'a> {
|
|||||||
ConstraintKind::FixedReg(reg) | ConstraintKind::FixedTied(reg) => {
|
ConstraintKind::FixedReg(reg) | ConstraintKind::FixedTied(reg) => {
|
||||||
self.add_fixed_output(lv.value, op.regclass, reg, throughs);
|
self.add_fixed_output(lv.value, op.regclass, reg, throughs);
|
||||||
if !lv.is_local && !global_regs.is_avail(op.regclass, reg) {
|
if !lv.is_local && !global_regs.is_avail(op.regclass, reg) {
|
||||||
dbg!(
|
debug!(
|
||||||
"Fixed output {} in {}:{} is not available in global regs",
|
"Fixed output {} in {}:{} is not available in global regs",
|
||||||
lv.value,
|
lv.value,
|
||||||
op.regclass,
|
op.regclass,
|
||||||
@@ -736,7 +736,7 @@ impl<'a> Context<'a> {
|
|||||||
let rc = self.reginfo.rc(rci);
|
let rc = self.reginfo.rc(rci);
|
||||||
self.add_fixed_output(lv.value, rc, reg, throughs);
|
self.add_fixed_output(lv.value, rc, reg, throughs);
|
||||||
if !lv.is_local && !global_regs.is_avail(rc, reg) {
|
if !lv.is_local && !global_regs.is_avail(rc, reg) {
|
||||||
dbg!(
|
debug!(
|
||||||
"ABI output {} in {}:{} is not available in global regs",
|
"ABI output {} in {}:{} is not available in global regs",
|
||||||
lv.value,
|
lv.value,
|
||||||
rc,
|
rc,
|
||||||
@@ -812,7 +812,7 @@ impl<'a> Context<'a> {
|
|||||||
// We need to make sure that fixed output register is compatible with the
|
// We need to make sure that fixed output register is compatible with the
|
||||||
// global register set.
|
// global register set.
|
||||||
if !lv.is_local && !global_regs.is_avail(op.regclass, reg) {
|
if !lv.is_local && !global_regs.is_avail(op.regclass, reg) {
|
||||||
dbg!(
|
debug!(
|
||||||
"Tied output {} in {}:{} is not available in global regs",
|
"Tied output {} in {}:{} is not available in global regs",
|
||||||
lv.value,
|
lv.value,
|
||||||
op.regclass,
|
op.regclass,
|
||||||
@@ -848,7 +848,7 @@ impl<'a> Context<'a> {
|
|||||||
debug_assert!(added, "Ran out of registers in {}", rc);
|
debug_assert!(added, "Ran out of registers in {}", rc);
|
||||||
}
|
}
|
||||||
Err(SolverError::Global(_value)) => {
|
Err(SolverError::Global(_value)) => {
|
||||||
dbg!(
|
debug!(
|
||||||
"Not enough global registers for {}, trying as local",
|
"Not enough global registers for {}, trying as local",
|
||||||
_value
|
_value
|
||||||
);
|
);
|
||||||
@@ -863,7 +863,7 @@ impl<'a> Context<'a> {
|
|||||||
|
|
||||||
/// Try to add an `rc` variable to the solver from the `throughs` set.
|
/// Try to add an `rc` variable to the solver from the `throughs` set.
|
||||||
fn try_add_var(&mut self, rc: RegClass, throughs: &[LiveValue]) -> bool {
|
fn try_add_var(&mut self, rc: RegClass, throughs: &[LiveValue]) -> bool {
|
||||||
dbg!("Trying to add a {} reg from {} values", rc, throughs.len());
|
debug!("Trying to add a {} reg from {} values", rc, throughs.len());
|
||||||
|
|
||||||
for lv in throughs {
|
for lv in throughs {
|
||||||
if let Affinity::Reg(rci) = lv.affinity {
|
if let Affinity::Reg(rci) = lv.affinity {
|
||||||
@@ -995,7 +995,7 @@ impl<'a> Context<'a> {
|
|||||||
/// the constraints on the instruction operands.
|
/// the constraints on the instruction operands.
|
||||||
///
|
///
|
||||||
fn replace_global_defines(&mut self, inst: Inst, tracker: &mut LiveValueTracker) {
|
fn replace_global_defines(&mut self, inst: Inst, tracker: &mut LiveValueTracker) {
|
||||||
dbg!("Replacing global defs on {}", self.cur.display_inst(inst));
|
debug!("Replacing global defs on {}", self.cur.display_inst(inst));
|
||||||
|
|
||||||
// We'll insert copies *after `inst`. Our caller will move the cursor back.
|
// We'll insert copies *after `inst`. Our caller will move the cursor back.
|
||||||
self.cur.next_inst();
|
self.cur.next_inst();
|
||||||
@@ -1042,14 +1042,14 @@ impl<'a> Context<'a> {
|
|||||||
lv.endpoint = copy;
|
lv.endpoint = copy;
|
||||||
lv.is_local = true;
|
lv.is_local = true;
|
||||||
|
|
||||||
dbg!(
|
debug!(
|
||||||
" + {} with {} in {}",
|
" + {} with {} in {}",
|
||||||
self.cur.display_inst(copy),
|
self.cur.display_inst(copy),
|
||||||
local,
|
local,
|
||||||
loc.display(&self.reginfo)
|
loc.display(&self.reginfo)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
dbg!("Done: {}", self.cur.display_inst(inst));
|
debug!("Done: {}", self.cur.display_inst(inst));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Process kills on a ghost instruction.
|
/// Process kills on a ghost instruction.
|
||||||
|
|||||||
@@ -72,7 +72,7 @@ impl Reload {
|
|||||||
tracker: &mut LiveValueTracker,
|
tracker: &mut LiveValueTracker,
|
||||||
) {
|
) {
|
||||||
let _tt = timing::ra_reload();
|
let _tt = timing::ra_reload();
|
||||||
dbg!("Reload for:\n{}", func.display(isa));
|
debug!("Reload for:\n{}", func.display(isa));
|
||||||
let mut ctx = Context {
|
let mut ctx = Context {
|
||||||
cur: EncCursor::new(func, isa),
|
cur: EncCursor::new(func, isa),
|
||||||
encinfo: isa.encoding_info(),
|
encinfo: isa.encoding_info(),
|
||||||
@@ -119,7 +119,7 @@ impl<'a> Context<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn visit_ebb(&mut self, ebb: Ebb, tracker: &mut LiveValueTracker) {
|
fn visit_ebb(&mut self, ebb: Ebb, tracker: &mut LiveValueTracker) {
|
||||||
dbg!("Reloading {}:", ebb);
|
debug!("Reloading {}:", ebb);
|
||||||
self.visit_ebb_header(ebb, tracker);
|
self.visit_ebb_header(ebb, tracker);
|
||||||
tracker.drop_dead_params();
|
tracker.drop_dead_params();
|
||||||
|
|
||||||
|
|||||||
@@ -532,7 +532,7 @@ impl Solver {
|
|||||||
/// In either case, `to` will not be available for variables on the input side of the
|
/// In either case, `to` will not be available for variables on the input side of the
|
||||||
/// instruction.
|
/// instruction.
|
||||||
pub fn reassign_in(&mut self, value: Value, rc: RegClass, from: RegUnit, to: RegUnit) {
|
pub fn reassign_in(&mut self, value: Value, rc: RegClass, from: RegUnit, to: RegUnit) {
|
||||||
dbg!(
|
debug!(
|
||||||
"reassign_in({}:{}, {} -> {})",
|
"reassign_in({}:{}, {} -> {})",
|
||||||
value,
|
value,
|
||||||
rc,
|
rc,
|
||||||
@@ -545,7 +545,7 @@ impl Solver {
|
|||||||
// added as a variable previously. A fixed constraint beats a variable, so convert it.
|
// added as a variable previously. A fixed constraint beats a variable, so convert it.
|
||||||
if let Some(idx) = self.vars.iter().position(|v| v.value == value) {
|
if let Some(idx) = self.vars.iter().position(|v| v.value == value) {
|
||||||
let v = self.vars.remove(idx);
|
let v = self.vars.remove(idx);
|
||||||
dbg!("-> converting variable {} to a fixed constraint", v);
|
debug!("-> converting variable {} to a fixed constraint", v);
|
||||||
// The spiller is responsible for ensuring that all constraints on the uses of a
|
// The spiller is responsible for ensuring that all constraints on the uses of a
|
||||||
// value are compatible.
|
// value are compatible.
|
||||||
debug_assert!(
|
debug_assert!(
|
||||||
@@ -578,7 +578,7 @@ impl Solver {
|
|||||||
/// This function can only be used before calling `inputs_done()`. Afterwards, more input-side
|
/// This function can only be used before calling `inputs_done()`. Afterwards, more input-side
|
||||||
/// variables can be added by calling `add_killed_var()` and `add_through_var()`
|
/// variables can be added by calling `add_killed_var()` and `add_through_var()`
|
||||||
pub fn add_var(&mut self, value: Value, constraint: RegClass, from: RegUnit) {
|
pub fn add_var(&mut self, value: Value, constraint: RegClass, from: RegUnit) {
|
||||||
dbg!(
|
debug!(
|
||||||
"add_var({}:{}, from={})",
|
"add_var({}:{}, from={})",
|
||||||
value,
|
value,
|
||||||
constraint,
|
constraint,
|
||||||
@@ -593,7 +593,7 @@ impl Solver {
|
|||||||
///
|
///
|
||||||
/// This function should be called after `inputs_done()` only. Use `add_var()` before.
|
/// This function should be called after `inputs_done()` only. Use `add_var()` before.
|
||||||
pub fn add_killed_var(&mut self, value: Value, constraint: RegClass, from: RegUnit) {
|
pub fn add_killed_var(&mut self, value: Value, constraint: RegClass, from: RegUnit) {
|
||||||
dbg!(
|
debug!(
|
||||||
"add_killed_var({}:{}, from={})",
|
"add_killed_var({}:{}, from={})",
|
||||||
value,
|
value,
|
||||||
constraint,
|
constraint,
|
||||||
@@ -608,7 +608,7 @@ impl Solver {
|
|||||||
///
|
///
|
||||||
/// This function should be called after `inputs_done()` only. Use `add_var()` before.
|
/// This function should be called after `inputs_done()` only. Use `add_var()` before.
|
||||||
pub fn add_through_var(&mut self, value: Value, constraint: RegClass, from: RegUnit) {
|
pub fn add_through_var(&mut self, value: Value, constraint: RegClass, from: RegUnit) {
|
||||||
dbg!(
|
debug!(
|
||||||
"add_through_var({}:{}, from={})",
|
"add_through_var({}:{}, from={})",
|
||||||
value,
|
value,
|
||||||
constraint,
|
constraint,
|
||||||
@@ -635,7 +635,7 @@ impl Solver {
|
|||||||
if let Some(v) = self.vars.iter_mut().find(|v| v.value == value) {
|
if let Some(v) = self.vars.iter_mut().find(|v| v.value == value) {
|
||||||
// We have an existing variable entry for `value`. Combine the constraints.
|
// We have an existing variable entry for `value`. Combine the constraints.
|
||||||
if let Some(rc) = v.constraint.intersect(constraint) {
|
if let Some(rc) = v.constraint.intersect(constraint) {
|
||||||
dbg!("-> combining constraint with {} yields {}", v, rc);
|
debug!("-> combining constraint with {} yields {}", v, rc);
|
||||||
v.constraint = rc;
|
v.constraint = rc;
|
||||||
return;
|
return;
|
||||||
} else {
|
} else {
|
||||||
@@ -647,7 +647,7 @@ impl Solver {
|
|||||||
|
|
||||||
// No variable, then it must be a fixed reassignment.
|
// No variable, then it must be a fixed reassignment.
|
||||||
if let Some(a) = self.assignments.get(value) {
|
if let Some(a) = self.assignments.get(value) {
|
||||||
dbg!("-> already fixed assignment {}", a);
|
debug!("-> already fixed assignment {}", a);
|
||||||
debug_assert!(
|
debug_assert!(
|
||||||
constraint.contains(a.to),
|
constraint.contains(a.to),
|
||||||
"Incompatible constraints for {}",
|
"Incompatible constraints for {}",
|
||||||
@@ -656,12 +656,12 @@ impl Solver {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
dbg!("{}", self);
|
debug!("{}", self);
|
||||||
panic!("Wrong from register for {}", value);
|
panic!("Wrong from register for {}", value);
|
||||||
}
|
}
|
||||||
|
|
||||||
let new_var = Variable::new_live(value, constraint, from, live_through);
|
let new_var = Variable::new_live(value, constraint, from, live_through);
|
||||||
dbg!("-> new var: {}", new_var);
|
debug!("-> new var: {}", new_var);
|
||||||
|
|
||||||
self.regs_in.free(constraint, from);
|
self.regs_in.free(constraint, from);
|
||||||
if self.inputs_done && live_through {
|
if self.inputs_done && live_through {
|
||||||
@@ -777,7 +777,7 @@ impl Solver {
|
|||||||
if is_global {
|
if is_global {
|
||||||
let mut new_var = Variable::new_live(value, rc, reg, true);
|
let mut new_var = Variable::new_live(value, rc, reg, true);
|
||||||
new_var.is_global = true;
|
new_var.is_global = true;
|
||||||
dbg!("add_tied_input: new tied-global value: {}", new_var);
|
debug!("add_tied_input: new tied-global value: {}", new_var);
|
||||||
self.vars.push(new_var);
|
self.vars.push(new_var);
|
||||||
self.regs_in.free(rc, reg);
|
self.regs_in.free(rc, reg);
|
||||||
} else {
|
} else {
|
||||||
@@ -899,7 +899,7 @@ impl Solver {
|
|||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
dbg!("real_solve for {}", self);
|
debug!("real_solve for {}", self);
|
||||||
self.find_solution(global_regs)
|
self.find_solution(global_regs)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -982,7 +982,7 @@ impl Solver {
|
|||||||
.extend(self.assignments.values().filter_map(Move::with_assignment));
|
.extend(self.assignments.values().filter_map(Move::with_assignment));
|
||||||
|
|
||||||
if !(self.moves.is_empty()) {
|
if !(self.moves.is_empty()) {
|
||||||
dbg!("collect_moves: {}", DisplayList(&self.moves));
|
debug!("collect_moves: {}", DisplayList(&self.moves));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1024,7 +1024,7 @@ impl Solver {
|
|||||||
if let Some((rc, reg)) = m.from_reg() {
|
if let Some((rc, reg)) = m.from_reg() {
|
||||||
avail.free(rc, reg);
|
avail.free(rc, reg);
|
||||||
}
|
}
|
||||||
dbg!("move #{}: {}", i, m);
|
debug!("move #{}: {}", i, m);
|
||||||
i += 1;
|
i += 1;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@@ -1058,7 +1058,7 @@ impl Solver {
|
|||||||
let m = self.moves[i].clone();
|
let m = self.moves[i].clone();
|
||||||
let toprc = m.rc().toprc();
|
let toprc = m.rc().toprc();
|
||||||
if let Some(reg) = avail.iter(toprc).next() {
|
if let Some(reg) = avail.iter(toprc).next() {
|
||||||
dbg!(
|
debug!(
|
||||||
"breaking cycle at {} with available {} register {}",
|
"breaking cycle at {} with available {} register {}",
|
||||||
m,
|
m,
|
||||||
toprc,
|
toprc,
|
||||||
@@ -1089,7 +1089,7 @@ impl Solver {
|
|||||||
// a last resort.
|
// a last resort.
|
||||||
let slot = num_spill_slots;
|
let slot = num_spill_slots;
|
||||||
num_spill_slots += 1;
|
num_spill_slots += 1;
|
||||||
dbg!("breaking cycle at {} with slot {}", m, slot);
|
debug!("breaking cycle at {} with slot {}", m, slot);
|
||||||
let old_to_reg = self.moves[i].change_to_spill(slot);
|
let old_to_reg = self.moves[i].change_to_spill(slot);
|
||||||
self.fills.push(Move::Fill {
|
self.fills.push(Move::Fill {
|
||||||
value: m.value(),
|
value: m.value(),
|
||||||
|
|||||||
@@ -90,7 +90,7 @@ impl Spilling {
|
|||||||
tracker: &mut LiveValueTracker,
|
tracker: &mut LiveValueTracker,
|
||||||
) {
|
) {
|
||||||
let _tt = timing::ra_spilling();
|
let _tt = timing::ra_spilling();
|
||||||
dbg!("Spilling for:\n{}", func.display(isa));
|
debug!("Spilling for:\n{}", func.display(isa));
|
||||||
let reginfo = isa.register_info();
|
let reginfo = isa.register_info();
|
||||||
let usable_regs = isa.allocatable_registers(func);
|
let usable_regs = isa.allocatable_registers(func);
|
||||||
let mut ctx = Context {
|
let mut ctx = Context {
|
||||||
@@ -118,7 +118,7 @@ impl<'a> Context<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn visit_ebb(&mut self, ebb: Ebb, tracker: &mut LiveValueTracker) {
|
fn visit_ebb(&mut self, ebb: Ebb, tracker: &mut LiveValueTracker) {
|
||||||
dbg!("Spilling {}:", ebb);
|
debug!("Spilling {}:", ebb);
|
||||||
self.cur.goto_top(ebb);
|
self.cur.goto_top(ebb);
|
||||||
self.visit_ebb_header(ebb, tracker);
|
self.visit_ebb_header(ebb, tracker);
|
||||||
tracker.drop_dead_params();
|
tracker.drop_dead_params();
|
||||||
@@ -198,14 +198,12 @@ impl<'a> Context<'a> {
|
|||||||
if let Affinity::Reg(rci) = lv.affinity {
|
if let Affinity::Reg(rci) = lv.affinity {
|
||||||
let rc = self.reginfo.rc(rci);
|
let rc = self.reginfo.rc(rci);
|
||||||
'try_take: while let Err(mask) = self.pressure.take_transient(rc) {
|
'try_take: while let Err(mask) = self.pressure.take_transient(rc) {
|
||||||
dbg!("Need {} reg for EBB param {}", rc, lv.value);
|
debug!("Need {} reg for EBB param {}", rc, lv.value);
|
||||||
match self.spill_candidate(mask, liveins) {
|
match self.spill_candidate(mask, liveins) {
|
||||||
Some(cand) => {
|
Some(cand) => {
|
||||||
dbg!(
|
debug!(
|
||||||
"Spilling live-in {} to make room for {} EBB param {}",
|
"Spilling live-in {} to make room for {} EBB param {}",
|
||||||
cand,
|
cand, rc, lv.value
|
||||||
rc,
|
|
||||||
lv.value
|
|
||||||
);
|
);
|
||||||
self.spill_reg(cand);
|
self.spill_reg(cand);
|
||||||
}
|
}
|
||||||
@@ -213,7 +211,7 @@ impl<'a> Context<'a> {
|
|||||||
// We can't spill any of the live-in registers, so we have to spill an
|
// We can't spill any of the live-in registers, so we have to spill an
|
||||||
// EBB argument. Since the current spill metric would consider all the
|
// EBB argument. Since the current spill metric would consider all the
|
||||||
// EBB arguments equal, just spill the present register.
|
// EBB arguments equal, just spill the present register.
|
||||||
dbg!("Spilling {} EBB argument {}", rc, lv.value);
|
debug!("Spilling {} EBB argument {}", rc, lv.value);
|
||||||
|
|
||||||
// Since `spill_reg` will free a register, add the current one here.
|
// Since `spill_reg` will free a register, add the current one here.
|
||||||
self.pressure.take(rc);
|
self.pressure.take(rc);
|
||||||
@@ -237,7 +235,7 @@ impl<'a> Context<'a> {
|
|||||||
constraints: &RecipeConstraints,
|
constraints: &RecipeConstraints,
|
||||||
tracker: &mut LiveValueTracker,
|
tracker: &mut LiveValueTracker,
|
||||||
) {
|
) {
|
||||||
dbg!("Inst {}, {}", self.cur.display_inst(inst), self.pressure);
|
debug!("Inst {}, {}", self.cur.display_inst(inst), self.pressure);
|
||||||
debug_assert_eq!(self.cur.current_inst(), Some(inst));
|
debug_assert_eq!(self.cur.current_inst(), Some(inst));
|
||||||
debug_assert_eq!(self.cur.current_ebb(), Some(ebb));
|
debug_assert_eq!(self.cur.current_ebb(), Some(ebb));
|
||||||
|
|
||||||
@@ -279,7 +277,7 @@ impl<'a> Context<'a> {
|
|||||||
if op.kind != ConstraintKind::Stack {
|
if op.kind != ConstraintKind::Stack {
|
||||||
// Add register def to pressure, spill if needed.
|
// Add register def to pressure, spill if needed.
|
||||||
while let Err(mask) = self.pressure.take_transient(op.regclass) {
|
while let Err(mask) = self.pressure.take_transient(op.regclass) {
|
||||||
dbg!("Need {} reg from {} throughs", op.regclass, throughs.len());
|
debug!("Need {} reg from {} throughs", op.regclass, throughs.len());
|
||||||
match self.spill_candidate(mask, throughs) {
|
match self.spill_candidate(mask, throughs) {
|
||||||
Some(cand) => self.spill_reg(cand),
|
Some(cand) => self.spill_reg(cand),
|
||||||
None => panic!(
|
None => panic!(
|
||||||
@@ -333,7 +331,7 @@ impl<'a> Context<'a> {
|
|||||||
|
|
||||||
// Only collect the interesting register uses.
|
// Only collect the interesting register uses.
|
||||||
if reguse.fixed || reguse.tied || reguse.spilled {
|
if reguse.fixed || reguse.tied || reguse.spilled {
|
||||||
dbg!(" reguse: {}", reguse);
|
debug!(" reguse: {}", reguse);
|
||||||
self.reg_uses.push(reguse);
|
self.reg_uses.push(reguse);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -406,7 +404,7 @@ impl<'a> Context<'a> {
|
|||||||
if need_copy || ru.spilled {
|
if need_copy || ru.spilled {
|
||||||
let rc = self.reginfo.rc(ru.rci);
|
let rc = self.reginfo.rc(ru.rci);
|
||||||
while let Err(mask) = self.pressure.take_transient(rc) {
|
while let Err(mask) = self.pressure.take_transient(rc) {
|
||||||
dbg!("Copy of {} reg causes spill", rc);
|
debug!("Copy of {} reg causes spill", rc);
|
||||||
// Spill a live register that is *not* used by the current instruction.
|
// Spill a live register that is *not* used by the current instruction.
|
||||||
// Spilling a use wouldn't help.
|
// Spilling a use wouldn't help.
|
||||||
//
|
//
|
||||||
@@ -489,7 +487,7 @@ impl<'a> Context<'a> {
|
|||||||
let rc = self.reginfo.rc(rci);
|
let rc = self.reginfo.rc(rci);
|
||||||
self.pressure.free(rc);
|
self.pressure.free(rc);
|
||||||
self.spills.push(value);
|
self.spills.push(value);
|
||||||
dbg!("Spilled {}:{} -> {}", value, rc, self.pressure);
|
debug!("Spilled {}:{} -> {}", value, rc, self.pressure);
|
||||||
} else {
|
} else {
|
||||||
panic!("Cannot spill {} that was already on the stack", value);
|
panic!("Cannot spill {} that was already on the stack", value);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -179,7 +179,7 @@ mod details {
|
|||||||
/// This function is called by the publicly exposed pass functions.
|
/// This function is called by the publicly exposed pass functions.
|
||||||
pub(super) fn start_pass(pass: Pass) -> TimingToken {
|
pub(super) fn start_pass(pass: Pass) -> TimingToken {
|
||||||
let prev = CURRENT_PASS.with(|p| p.replace(pass));
|
let prev = CURRENT_PASS.with(|p| p.replace(pass));
|
||||||
dbg!("timing: Starting {}, (during {})", pass, prev);
|
debug!("timing: Starting {}, (during {})", pass, prev);
|
||||||
TimingToken {
|
TimingToken {
|
||||||
start: Instant::now(),
|
start: Instant::now(),
|
||||||
pass,
|
pass,
|
||||||
@@ -191,7 +191,7 @@ mod details {
|
|||||||
impl Drop for TimingToken {
|
impl Drop for TimingToken {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
let duration = self.start.elapsed();
|
let duration = self.start.elapsed();
|
||||||
dbg!("timing: Ending {}", self.pass);
|
debug!("timing: Ending {}", self.pass);
|
||||||
let old_cur = CURRENT_PASS.with(|p| p.replace(self.prev));
|
let old_cur = CURRENT_PASS.with(|p| p.replace(self.prev));
|
||||||
debug_assert_eq!(self.pass, old_cur, "Timing tokens dropped out of order");
|
debug_assert_eq!(self.pass, old_cur, "Timing tokens dropped out of order");
|
||||||
PASS_TIME.with(|rc| {
|
PASS_TIME.with(|rc| {
|
||||||
|
|||||||
@@ -24,14 +24,14 @@ pub fn eliminate_unreachable_code(
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
dbg!("Eliminating unreachable {}", ebb);
|
debug!("Eliminating unreachable {}", ebb);
|
||||||
// Move the cursor out of the way and make sure the next lop iteration goes to the right
|
// Move the cursor out of the way and make sure the next lop iteration goes to the right
|
||||||
// EBB.
|
// EBB.
|
||||||
pos.prev_ebb();
|
pos.prev_ebb();
|
||||||
|
|
||||||
// Remove all instructions from `ebb`.
|
// Remove all instructions from `ebb`.
|
||||||
while let Some(inst) = pos.func.layout.first_inst(ebb) {
|
while let Some(inst) = pos.func.layout.first_inst(ebb) {
|
||||||
dbg!(" - {}", pos.func.dfg.display_inst(inst, None));
|
debug!(" - {}", pos.func.dfg.display_inst(inst, None));
|
||||||
pos.func.layout.remove_inst(inst);
|
pos.func.layout.remove_inst(inst);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -31,7 +31,7 @@
|
|||||||
|
|
||||||
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
||||||
#![warn(unused_import_braces)]
|
#![warn(unused_import_braces)]
|
||||||
#![cfg_attr(feature = "std", warn(unstable_features))]
|
#![cfg_attr(feature = "std", deny(unstable_features))]
|
||||||
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
||||||
#![cfg_attr(feature = "cargo-clippy", allow(new_without_default, new_without_default_derive))]
|
#![cfg_attr(feature = "cargo-clippy", allow(new_without_default, new_without_default_derive))]
|
||||||
#![cfg_attr(
|
#![cfg_attr(
|
||||||
|
|||||||
@@ -2,8 +2,8 @@
|
|||||||
//!
|
//!
|
||||||
//! Users of this module should not have to depend on faerie directly.
|
//! Users of this module should not have to depend on faerie directly.
|
||||||
|
|
||||||
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates, unstable_features)]
|
||||||
#![warn(unused_import_braces, unstable_features)]
|
#![warn(unused_import_braces)]
|
||||||
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
||||||
#![cfg_attr(feature = "cargo-clippy", allow(new_without_default, new_without_default_derive))]
|
#![cfg_attr(feature = "cargo-clippy", allow(new_without_default, new_without_default_derive))]
|
||||||
#![cfg_attr(
|
#![cfg_attr(
|
||||||
|
|||||||
@@ -11,5 +11,7 @@ publish = false
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
cranelift-codegen = { path = "../codegen", version = "0.18.1" }
|
cranelift-codegen = { path = "../codegen", version = "0.18.1" }
|
||||||
cranelift-reader = { path = "../reader", version = "0.18.1" }
|
cranelift-reader = { path = "../reader", version = "0.18.1" }
|
||||||
|
file-per-thread-logger = "0.1.1"
|
||||||
filecheck = "0.3.0"
|
filecheck = "0.3.0"
|
||||||
num_cpus = "1.8.0"
|
num_cpus = "1.8.0"
|
||||||
|
log = "0.4.3"
|
||||||
|
|||||||
@@ -3,7 +3,9 @@
|
|||||||
//! This module provides the `ConcurrentRunner` struct which uses a pool of threads to run tests
|
//! This module provides the `ConcurrentRunner` struct which uses a pool of threads to run tests
|
||||||
//! concurrently.
|
//! concurrently.
|
||||||
|
|
||||||
|
use cranelift_codegen::dbg::LOG_FILENAME_PREFIX;
|
||||||
use cranelift_codegen::timing;
|
use cranelift_codegen::timing;
|
||||||
|
use file_per_thread_logger;
|
||||||
use num_cpus;
|
use num_cpus;
|
||||||
use std::panic::catch_unwind;
|
use std::panic::catch_unwind;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
@@ -100,6 +102,7 @@ fn heartbeat_thread(replies: Sender<Reply>) -> thread::JoinHandle<()> {
|
|||||||
thread::Builder::new()
|
thread::Builder::new()
|
||||||
.name("heartbeat".to_string())
|
.name("heartbeat".to_string())
|
||||||
.spawn(move || {
|
.spawn(move || {
|
||||||
|
file_per_thread_logger::initialize(LOG_FILENAME_PREFIX);
|
||||||
while replies.send(Reply::Tick).is_ok() {
|
while replies.send(Reply::Tick).is_ok() {
|
||||||
thread::sleep(Duration::from_secs(1));
|
thread::sleep(Duration::from_secs(1));
|
||||||
}
|
}
|
||||||
@@ -116,6 +119,7 @@ fn worker_thread(
|
|||||||
thread::Builder::new()
|
thread::Builder::new()
|
||||||
.name(format!("worker #{}", thread_num))
|
.name(format!("worker #{}", thread_num))
|
||||||
.spawn(move || {
|
.spawn(move || {
|
||||||
|
file_per_thread_logger::initialize(LOG_FILENAME_PREFIX);
|
||||||
loop {
|
loop {
|
||||||
// Lock the mutex only long enough to extract a request.
|
// Lock the mutex only long enough to extract a request.
|
||||||
let Request(jobid, path) = match requests.lock().unwrap().recv() {
|
let Request(jobid, path) = match requests.lock().unwrap().recv() {
|
||||||
@@ -140,7 +144,7 @@ fn worker_thread(
|
|||||||
});
|
});
|
||||||
|
|
||||||
if let Err(ref msg) = result {
|
if let Err(ref msg) = result {
|
||||||
dbg!("FAIL: {}", msg);
|
error!("FAIL: {}", msg);
|
||||||
}
|
}
|
||||||
|
|
||||||
replies.send(Reply::Done { jobid, result }).unwrap();
|
replies.send(Reply::Done { jobid, result }).unwrap();
|
||||||
|
|||||||
@@ -3,8 +3,8 @@
|
|||||||
//! This crate contains the main test driver as well as implementations of the
|
//! This crate contains the main test driver as well as implementations of the
|
||||||
//! available filetest commands.
|
//! available filetest commands.
|
||||||
|
|
||||||
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates, unstable_features)]
|
||||||
#![warn(unused_import_braces, unstable_features)]
|
#![warn(unused_import_braces)]
|
||||||
#![cfg_attr(feature = "cargo-clippy",
|
#![cfg_attr(feature = "cargo-clippy",
|
||||||
allow(
|
allow(
|
||||||
type_complexity,
|
type_complexity,
|
||||||
@@ -18,11 +18,13 @@
|
|||||||
)
|
)
|
||||||
)]
|
)]
|
||||||
|
|
||||||
#[macro_use(dbg)]
|
|
||||||
extern crate cranelift_codegen;
|
extern crate cranelift_codegen;
|
||||||
extern crate cranelift_reader;
|
extern crate cranelift_reader;
|
||||||
|
extern crate file_per_thread_logger;
|
||||||
extern crate filecheck;
|
extern crate filecheck;
|
||||||
extern crate num_cpus;
|
extern crate num_cpus;
|
||||||
|
#[macro_use]
|
||||||
|
extern crate log;
|
||||||
|
|
||||||
use cranelift_reader::TestCommand;
|
use cranelift_reader::TestCommand;
|
||||||
use runner::TestRunner;
|
use runner::TestRunner;
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ fn read_to_string<P: AsRef<Path>>(path: P) -> io::Result<String> {
|
|||||||
/// If running this test causes a panic, it will propagate as normal.
|
/// If running this test causes a panic, it will propagate as normal.
|
||||||
pub fn run(path: &Path) -> TestResult {
|
pub fn run(path: &Path) -> TestResult {
|
||||||
let _tt = timing::process_file();
|
let _tt = timing::process_file();
|
||||||
dbg!("---\nFile: {}", path.to_string_lossy());
|
info!("---\nFile: {}", path.to_string_lossy());
|
||||||
let started = time::Instant::now();
|
let started = time::Instant::now();
|
||||||
let buffer = read_to_string(path).map_err(|e| e.to_string())?;
|
let buffer = read_to_string(path).map_err(|e| e.to_string())?;
|
||||||
let testfile = parse_test(&buffer).map_err(|e| e.to_string())?;
|
let testfile = parse_test(&buffer).map_err(|e| e.to_string())?;
|
||||||
@@ -122,7 +122,7 @@ fn run_one_test<'a>(
|
|||||||
) -> SubtestResult<()> {
|
) -> SubtestResult<()> {
|
||||||
let (test, flags, isa) = tuple;
|
let (test, flags, isa) = tuple;
|
||||||
let name = format!("{}({})", test.name(), func.name);
|
let name = format!("{}({})", test.name(), func.name);
|
||||||
dbg!("Test: {} {}", name, isa.map_or("-", TargetIsa::name));
|
info!("Test: {} {}", name, isa.map_or("-", TargetIsa::name));
|
||||||
|
|
||||||
context.flags = flags;
|
context.flags = flags;
|
||||||
context.isa = isa;
|
context.isa = isa;
|
||||||
|
|||||||
@@ -41,7 +41,7 @@ impl SubTest for TestCompile {
|
|||||||
.compile(isa)
|
.compile(isa)
|
||||||
.map_err(|e| pretty_error(&comp_ctx.func, context.isa, e))?;
|
.map_err(|e| pretty_error(&comp_ctx.func, context.isa, e))?;
|
||||||
|
|
||||||
dbg!(
|
info!(
|
||||||
"Generated {} bytes of code:\n{}",
|
"Generated {} bytes of code:\n{}",
|
||||||
code_size,
|
code_size,
|
||||||
comp_ctx.func.display(isa)
|
comp_ctx.func.display(isa)
|
||||||
|
|||||||
@@ -129,7 +129,7 @@
|
|||||||
|
|
||||||
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
||||||
#![warn(unused_import_braces)]
|
#![warn(unused_import_braces)]
|
||||||
#![cfg_attr(feature = "std", warn(unstable_features))]
|
#![cfg_attr(feature = "std", deny(unstable_features))]
|
||||||
#![cfg_attr(feature = "cargo-clippy", allow(new_without_default))]
|
#![cfg_attr(feature = "cargo-clippy", allow(new_without_default))]
|
||||||
#![cfg_attr(
|
#![cfg_attr(
|
||||||
feature = "cargo-clippy",
|
feature = "cargo-clippy",
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ cranelift-codegen = { path = "../codegen", version = "0.18.1", default-features
|
|||||||
cranelift-entity = { path = "../entity", version = "0.18.1", default-features = false }
|
cranelift-entity = { path = "../entity", version = "0.18.1", default-features = false }
|
||||||
hashmap_core = { version = "0.1.9", optional = true }
|
hashmap_core = { version = "0.1.9", optional = true }
|
||||||
failure = "0.1.1"
|
failure = "0.1.1"
|
||||||
|
log = { version = "0.4.3", default-features = false, features = ["release_max_level_warn"] }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["std"]
|
default = ["std"]
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
||||||
#![warn(unused_import_braces)]
|
#![warn(unused_import_braces)]
|
||||||
#![cfg_attr(feature = "std", warn(unstable_features))]
|
#![cfg_attr(feature = "std", deny(unstable_features))]
|
||||||
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
||||||
#![cfg_attr(feature = "cargo-clippy", allow(new_without_default, new_without_default_derive))]
|
#![cfg_attr(feature = "cargo-clippy", allow(new_without_default, new_without_default_derive))]
|
||||||
#![cfg_attr(
|
#![cfg_attr(
|
||||||
@@ -20,12 +20,13 @@
|
|||||||
#[cfg_attr(test, macro_use)]
|
#[cfg_attr(test, macro_use)]
|
||||||
extern crate alloc;
|
extern crate alloc;
|
||||||
|
|
||||||
#[macro_use]
|
|
||||||
extern crate cranelift_codegen;
|
extern crate cranelift_codegen;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate cranelift_entity;
|
extern crate cranelift_entity;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate failure;
|
extern crate failure;
|
||||||
|
#[macro_use]
|
||||||
|
extern crate log;
|
||||||
|
|
||||||
mod backend;
|
mod backend;
|
||||||
mod data_context;
|
mod data_context;
|
||||||
|
|||||||
@@ -467,7 +467,7 @@ where
|
|||||||
pub fn define_function(&mut self, func: FuncId, ctx: &mut Context) -> ModuleResult<()> {
|
pub fn define_function(&mut self, func: FuncId, ctx: &mut Context) -> ModuleResult<()> {
|
||||||
let compiled = {
|
let compiled = {
|
||||||
let code_size = ctx.compile(self.backend.isa()).map_err(|e| {
|
let code_size = ctx.compile(self.backend.isa()).map_err(|e| {
|
||||||
dbg!(
|
info!(
|
||||||
"defining function {}: {}",
|
"defining function {}: {}",
|
||||||
func,
|
func,
|
||||||
ctx.func.display(self.backend.isa())
|
ctx.func.display(self.backend.isa())
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
//! Performs autodetection of the host for the purposes of running
|
//! Performs autodetection of the host for the purposes of running
|
||||||
//! Cranelift to generate code to run on the same machine.
|
//! Cranelift to generate code to run on the same machine.
|
||||||
|
|
||||||
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates, unstable_features)]
|
||||||
#![warn(unused_import_braces, unstable_features)]
|
#![warn(unused_import_braces)]
|
||||||
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
||||||
#![cfg_attr(feature = "cargo-clippy", allow(new_without_default, new_without_default_derive))]
|
#![cfg_attr(feature = "cargo-clippy", allow(new_without_default, new_without_default_derive))]
|
||||||
#![cfg_attr(
|
#![cfg_attr(
|
||||||
|
|||||||
@@ -3,8 +3,8 @@
|
|||||||
//! The `cranelift_reader` library supports reading .clif files. This functionality is needed for
|
//! The `cranelift_reader` library supports reading .clif files. This functionality is needed for
|
||||||
//! testing Cranelift, but is not essential for a JIT compiler.
|
//! testing Cranelift, but is not essential for a JIT compiler.
|
||||||
|
|
||||||
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates, unstable_features)]
|
||||||
#![warn(unused_import_braces, unstable_features)]
|
#![warn(unused_import_braces)]
|
||||||
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
||||||
#![cfg_attr(feature = "cargo-clippy", allow(new_without_default, new_without_default_derive))]
|
#![cfg_attr(feature = "cargo-clippy", allow(new_without_default, new_without_default_derive))]
|
||||||
#![cfg_attr(
|
#![cfg_attr(
|
||||||
|
|||||||
@@ -1,7 +1,9 @@
|
|||||||
//! Utility for `cranelift_serde`.
|
//! Utility for `cranelift_serde`.
|
||||||
|
|
||||||
#![deny(trivial_numeric_casts)]
|
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates, unstable_features)]
|
||||||
#![warn(unused_import_braces, unstable_features, unused_extern_crates)]
|
#![warn(unused_import_braces)]
|
||||||
|
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
||||||
|
#![cfg_attr(feature = "cargo-clippy", allow(new_without_default, new_without_default_derive))]
|
||||||
#![cfg_attr(
|
#![cfg_attr(
|
||||||
feature = "cargo-clippy",
|
feature = "cargo-clippy",
|
||||||
warn(
|
warn(
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ use cranelift_module::{
|
|||||||
use cranelift_native;
|
use cranelift_native;
|
||||||
use libc;
|
use libc;
|
||||||
use memory::Memory;
|
use memory::Memory;
|
||||||
|
use std::collections::HashMap;
|
||||||
use std::ffi::CString;
|
use std::ffi::CString;
|
||||||
use std::ptr;
|
use std::ptr;
|
||||||
use target_lexicon::PointerWidth;
|
use target_lexicon::PointerWidth;
|
||||||
@@ -19,6 +20,7 @@ use winapi;
|
|||||||
/// A builder for `SimpleJITBackend`.
|
/// A builder for `SimpleJITBackend`.
|
||||||
pub struct SimpleJITBuilder {
|
pub struct SimpleJITBuilder {
|
||||||
isa: Box<TargetIsa>,
|
isa: Box<TargetIsa>,
|
||||||
|
symbols: HashMap<String, *const u8>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SimpleJITBuilder {
|
impl SimpleJITBuilder {
|
||||||
@@ -40,7 +42,44 @@ impl SimpleJITBuilder {
|
|||||||
/// instead.
|
/// instead.
|
||||||
pub fn with_isa(isa: Box<TargetIsa>) -> Self {
|
pub fn with_isa(isa: Box<TargetIsa>) -> Self {
|
||||||
debug_assert!(!isa.flags().is_pic(), "SimpleJIT requires non-PIC code");
|
debug_assert!(!isa.flags().is_pic(), "SimpleJIT requires non-PIC code");
|
||||||
Self { isa }
|
let symbols = HashMap::new();
|
||||||
|
Self { isa, symbols }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Define a symbol in the internal symbol table.
|
||||||
|
///
|
||||||
|
/// The JIT will use the symbol table to resolve names that are declared,
|
||||||
|
/// but not defined, in the module being compiled. A common example is
|
||||||
|
/// external functions. With this method, functions and data can be exposed
|
||||||
|
/// to the code being compiled which are defined by the host.
|
||||||
|
///
|
||||||
|
/// If a symbol is defined more than once, the most recent definition will
|
||||||
|
/// be retained.
|
||||||
|
///
|
||||||
|
/// If the JIT fails to find a symbol in its internal table, it will fall
|
||||||
|
/// back to a platform-specific search (this typically involves searching
|
||||||
|
/// the current process for public symbols, followed by searching the
|
||||||
|
/// platform's C runtime).
|
||||||
|
pub fn symbol<'a, K>(&'a mut self, name: K, ptr: *const u8) -> &'a mut Self
|
||||||
|
where
|
||||||
|
K: Into<String>,
|
||||||
|
{
|
||||||
|
self.symbols.insert(name.into(), ptr);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Define multiple symbols in the internal symbol table.
|
||||||
|
///
|
||||||
|
/// Using this is equivalent to calling `symbol` on each element.
|
||||||
|
pub fn symbols<'a, It, K>(&'a mut self, symbols: It) -> &'a mut Self
|
||||||
|
where
|
||||||
|
It: IntoIterator<Item = (K, *const u8)>,
|
||||||
|
K: Into<String>,
|
||||||
|
{
|
||||||
|
for (name, ptr) in symbols {
|
||||||
|
self.symbols.insert(name.into(), ptr);
|
||||||
|
}
|
||||||
|
self
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -48,6 +87,7 @@ impl SimpleJITBuilder {
|
|||||||
/// directly called and accessed.
|
/// directly called and accessed.
|
||||||
pub struct SimpleJITBackend {
|
pub struct SimpleJITBackend {
|
||||||
isa: Box<TargetIsa>,
|
isa: Box<TargetIsa>,
|
||||||
|
symbols: HashMap<String, *const u8>,
|
||||||
code_memory: Memory,
|
code_memory: Memory,
|
||||||
readonly_memory: Memory,
|
readonly_memory: Memory,
|
||||||
writable_memory: Memory,
|
writable_memory: Memory,
|
||||||
@@ -73,6 +113,15 @@ pub struct SimpleJITCompiledData {
|
|||||||
relocs: Vec<RelocRecord>,
|
relocs: Vec<RelocRecord>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl SimpleJITBackend {
|
||||||
|
fn lookup_symbol(&self, name: &str) -> *const u8 {
|
||||||
|
match self.symbols.get(name) {
|
||||||
|
Some(&ptr) => ptr,
|
||||||
|
None => lookup_with_dlsym(name),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<'simple_jit_backend> Backend for SimpleJITBackend {
|
impl<'simple_jit_backend> Backend for SimpleJITBackend {
|
||||||
type Builder = SimpleJITBuilder;
|
type Builder = SimpleJITBuilder;
|
||||||
|
|
||||||
@@ -96,6 +145,7 @@ impl<'simple_jit_backend> Backend for SimpleJITBackend {
|
|||||||
fn new(builder: SimpleJITBuilder) -> Self {
|
fn new(builder: SimpleJITBuilder) -> Self {
|
||||||
Self {
|
Self {
|
||||||
isa: builder.isa,
|
isa: builder.isa,
|
||||||
|
symbols: builder.symbols,
|
||||||
code_memory: Memory::new(),
|
code_memory: Memory::new(),
|
||||||
readonly_memory: Memory::new(),
|
readonly_memory: Memory::new(),
|
||||||
writable_memory: Memory::new(),
|
writable_memory: Memory::new(),
|
||||||
@@ -249,13 +299,13 @@ impl<'simple_jit_backend> Backend for SimpleJITBackend {
|
|||||||
let (def, name_str, _signature) = namespace.get_function_definition(&name);
|
let (def, name_str, _signature) = namespace.get_function_definition(&name);
|
||||||
match def {
|
match def {
|
||||||
Some(compiled) => compiled.code,
|
Some(compiled) => compiled.code,
|
||||||
None => lookup_with_dlsym(name_str),
|
None => self.lookup_symbol(name_str),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let (def, name_str, _writable) = namespace.get_data_definition(&name);
|
let (def, name_str, _writable) = namespace.get_data_definition(&name);
|
||||||
match def {
|
match def {
|
||||||
Some(compiled) => compiled.storage,
|
Some(compiled) => compiled.storage,
|
||||||
None => lookup_with_dlsym(name_str),
|
None => self.lookup_symbol(name_str),
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
// TODO: Handle overflow.
|
// TODO: Handle overflow.
|
||||||
@@ -314,13 +364,13 @@ impl<'simple_jit_backend> Backend for SimpleJITBackend {
|
|||||||
let (def, name_str, _signature) = namespace.get_function_definition(&name);
|
let (def, name_str, _signature) = namespace.get_function_definition(&name);
|
||||||
match def {
|
match def {
|
||||||
Some(compiled) => compiled.code,
|
Some(compiled) => compiled.code,
|
||||||
None => lookup_with_dlsym(name_str),
|
None => self.lookup_symbol(name_str),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let (def, name_str, _writable) = namespace.get_data_definition(&name);
|
let (def, name_str, _writable) = namespace.get_data_definition(&name);
|
||||||
match def {
|
match def {
|
||||||
Some(compiled) => compiled.storage,
|
Some(compiled) => compiled.storage,
|
||||||
None => lookup_with_dlsym(name_str),
|
None => self.lookup_symbol(name_str),
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
// TODO: Handle overflow.
|
// TODO: Handle overflow.
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
//! Top-level lib.rs for `cranelift_simplejit`.
|
//! Top-level lib.rs for `cranelift_simplejit`.
|
||||||
|
|
||||||
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates, unstable_features)]
|
||||||
#![warn(unused_import_braces, unstable_features)]
|
#![warn(unused_import_braces)]
|
||||||
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
||||||
#![cfg_attr(feature = "cargo-clippy", allow(new_without_default, new_without_default_derive))]
|
#![cfg_attr(feature = "cargo-clippy", allow(new_without_default, new_without_default_derive))]
|
||||||
#![cfg_attr(
|
#![cfg_attr(
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
//! Cranelift umbrella crate, providing a convenient one-line dependency.
|
//! Cranelift umbrella crate, providing a convenient one-line dependency.
|
||||||
|
|
||||||
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates, unstable_features)]
|
||||||
#![warn(unused_import_braces, unstable_features)]
|
#![warn(unused_import_braces)]
|
||||||
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
||||||
#![cfg_attr(feature = "cargo-clippy", allow(new_without_default, new_without_default_derive))]
|
#![cfg_attr(feature = "cargo-clippy", allow(new_without_default, new_without_default_derive))]
|
||||||
#![cfg_attr(
|
#![cfg_attr(
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ hashmap_core = { version = "0.1.9", optional = true }
|
|||||||
failure = { version = "0.1.1", default-features = false, features = ["derive"] }
|
failure = { version = "0.1.1", default-features = false, features = ["derive"] }
|
||||||
failure_derive = { version = "0.1.1", default-features = false }
|
failure_derive = { version = "0.1.1", default-features = false }
|
||||||
target-lexicon = { version = "0.0.3", default-features = false }
|
target-lexicon = { version = "0.0.3", default-features = false }
|
||||||
|
log = { version = "0.4.3", default-features = false, features = ["release_max_level_warn"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
wabt = "0.4"
|
wabt = "0.4"
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
//! "Dummy" environment for testing wasm translation.
|
//! "Dummy" implementations of `ModuleEnvironment` and `FuncEnvironment` for testing
|
||||||
|
//! wasm translation.
|
||||||
|
|
||||||
use cranelift_codegen::cursor::FuncCursor;
|
use cranelift_codegen::cursor::FuncCursor;
|
||||||
use cranelift_codegen::ir::immediates::Imm64;
|
use cranelift_codegen::ir::immediates::Imm64;
|
||||||
|
|||||||
@@ -67,7 +67,7 @@ impl FuncTranslator {
|
|||||||
environ: &mut FE,
|
environ: &mut FE,
|
||||||
) -> WasmResult<()> {
|
) -> WasmResult<()> {
|
||||||
let _tt = timing::wasm_translate_function();
|
let _tt = timing::wasm_translate_function();
|
||||||
dbg!(
|
info!(
|
||||||
"translate({} bytes, {}{})",
|
"translate({} bytes, {}{})",
|
||||||
reader.bytes_remaining(),
|
reader.bytes_remaining(),
|
||||||
func.name,
|
func.name,
|
||||||
@@ -265,7 +265,7 @@ mod tests {
|
|||||||
trans
|
trans
|
||||||
.translate(&BODY, &mut ctx.func, &mut runtime.func_env())
|
.translate(&BODY, &mut ctx.func, &mut runtime.func_env())
|
||||||
.unwrap();
|
.unwrap();
|
||||||
dbg!("{}", ctx.func.display(None));
|
debug!("{}", ctx.func.display(None));
|
||||||
ctx.verify(runtime.func_env().flags()).unwrap();
|
ctx.verify(runtime.func_env().flags()).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -296,7 +296,7 @@ mod tests {
|
|||||||
trans
|
trans
|
||||||
.translate(&BODY, &mut ctx.func, &mut runtime.func_env())
|
.translate(&BODY, &mut ctx.func, &mut runtime.func_env())
|
||||||
.unwrap();
|
.unwrap();
|
||||||
dbg!("{}", ctx.func.display(None));
|
debug!("{}", ctx.func.display(None));
|
||||||
ctx.verify(runtime.func_env().flags()).unwrap();
|
ctx.verify(runtime.func_env().flags()).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -335,7 +335,7 @@ mod tests {
|
|||||||
trans
|
trans
|
||||||
.translate(&BODY, &mut ctx.func, &mut runtime.func_env())
|
.translate(&BODY, &mut ctx.func, &mut runtime.func_env())
|
||||||
.unwrap();
|
.unwrap();
|
||||||
dbg!("{}", ctx.func.display(None));
|
debug!("{}", ctx.func.display(None));
|
||||||
ctx.verify(runtime.func_env().flags()).unwrap();
|
ctx.verify(runtime.func_env().flags()).unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,7 +11,7 @@
|
|||||||
|
|
||||||
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
||||||
#![warn(unused_import_braces)]
|
#![warn(unused_import_braces)]
|
||||||
#![cfg_attr(feature = "std", warn(unstable_features))]
|
#![cfg_attr(feature = "std", deny(unstable_features))]
|
||||||
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
||||||
#![cfg_attr(feature = "cargo-clippy", allow(new_without_default, new_without_default_derive))]
|
#![cfg_attr(feature = "cargo-clippy", allow(new_without_default, new_without_default_derive))]
|
||||||
#![cfg_attr(
|
#![cfg_attr(
|
||||||
@@ -24,7 +24,6 @@
|
|||||||
#![cfg_attr(not(feature = "std"), no_std)]
|
#![cfg_attr(not(feature = "std"), no_std)]
|
||||||
#![cfg_attr(not(feature = "std"), feature(alloc))]
|
#![cfg_attr(not(feature = "std"), feature(alloc))]
|
||||||
|
|
||||||
#[macro_use(dbg)]
|
|
||||||
extern crate cranelift_codegen;
|
extern crate cranelift_codegen;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate cranelift_entity;
|
extern crate cranelift_entity;
|
||||||
@@ -36,6 +35,9 @@ extern crate failure;
|
|||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate failure_derive;
|
extern crate failure_derive;
|
||||||
|
|
||||||
|
#[macro_use]
|
||||||
|
extern crate log;
|
||||||
|
|
||||||
mod code_translator;
|
mod code_translator;
|
||||||
mod environ;
|
mod environ;
|
||||||
mod func_translator;
|
mod func_translator;
|
||||||
|
|||||||
Reference in New Issue
Block a user