Merge remote-tracking branch 'origin/master' into no_std
This commit is contained in:
@@ -1,7 +1,7 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "cretonne-tools"
|
name = "cretonne-tools"
|
||||||
authors = ["The Cretonne Project Developers"]
|
authors = ["The Cretonne Project Developers"]
|
||||||
version = "0.5.0"
|
version = "0.5.1"
|
||||||
description = "Binaries for testing the Cretonne libraries"
|
description = "Binaries for testing the Cretonne libraries"
|
||||||
license = "Apache-2.0"
|
license = "Apache-2.0"
|
||||||
documentation = "https://cretonne.readthedocs.io/"
|
documentation = "https://cretonne.readthedocs.io/"
|
||||||
@@ -13,16 +13,16 @@ name = "cton-util"
|
|||||||
path = "src/cton-util.rs"
|
path = "src/cton-util.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
cretonne-codegen = { path = "lib/codegen", version = "0.5.0" }
|
cretonne-codegen = { path = "lib/codegen", version = "0.5.1" }
|
||||||
cretonne-reader = { path = "lib/reader", version = "0.5.0" }
|
cretonne-reader = { path = "lib/reader", version = "0.5.1" }
|
||||||
cretonne-frontend = { path = "lib/frontend", version = "0.5.0" }
|
cretonne-frontend = { path = "lib/frontend", version = "0.5.1" }
|
||||||
cretonne-wasm = { path = "lib/wasm", version = "0.5.0" }
|
cretonne-wasm = { path = "lib/wasm", version = "0.5.1" }
|
||||||
cretonne-native = { path = "lib/native", version = "0.5.0" }
|
cretonne-native = { path = "lib/native", version = "0.5.1" }
|
||||||
cretonne-filetests = { path = "lib/filetests", version = "0.5.0" }
|
cretonne-filetests = { path = "lib/filetests", version = "0.5.1" }
|
||||||
cretonne-module = { path = "lib/module", version = "0.5.0" }
|
cretonne-module = { path = "lib/module", version = "0.5.1" }
|
||||||
cretonne-faerie = { path = "lib/faerie", version = "0.5.0" }
|
cretonne-faerie = { path = "lib/faerie", version = "0.5.1" }
|
||||||
cretonne-simplejit = { path = "lib/simplejit", version = "0.5.0" }
|
cretonne-simplejit = { path = "lib/simplejit", version = "0.5.1" }
|
||||||
cretonne = { path = "lib/umbrella", version = "0.5.0" }
|
cretonne = { path = "lib/umbrella", version = "0.5.1" }
|
||||||
filecheck = "0.2.1"
|
filecheck = "0.2.1"
|
||||||
docopt = "0.8.0"
|
docopt = "0.8.0"
|
||||||
serde = "1.0.8"
|
serde = "1.0.8"
|
||||||
|
|||||||
@@ -34,6 +34,19 @@ Rust Crate Documentation
|
|||||||
This crate translates from Cretonne IR's text format into Cretonne IR
|
This crate translates from Cretonne IR's text format into Cretonne IR
|
||||||
in in-memory data structures.
|
in in-memory data structures.
|
||||||
|
|
||||||
|
`cretonne-module <https://docs.rs/cretonne-module/>`_
|
||||||
|
This crate manages compiling multiple functions and data objects
|
||||||
|
together.
|
||||||
|
|
||||||
|
`cretonne-faerie <https://docs.rs/cretonne-faerie/>`_
|
||||||
|
This crate provides a faerie-based backend for `cretonne-module`, which
|
||||||
|
emits native object files using the
|
||||||
|
`faerie <https://crates.io/crates/faerie/>`_ library.
|
||||||
|
|
||||||
|
`cretonne-simplejit <https://docs.rs/cretonne-simplejit/>`_
|
||||||
|
This crate provides a simple JIT backend for `cretonne-module`, which
|
||||||
|
emits code and data into memory.
|
||||||
|
|
||||||
Indices and tables
|
Indices and tables
|
||||||
==================
|
==================
|
||||||
|
|
||||||
|
|||||||
@@ -477,7 +477,7 @@ ebb0:
|
|||||||
; Colocated functions.
|
; Colocated functions.
|
||||||
|
|
||||||
; asm: call bar
|
; asm: call bar
|
||||||
; call fn1() ; bin: e8 PCRel4(%bar-4) 00000000
|
call fn1() ; bin: e8 PCRel4(%bar-4) 00000000
|
||||||
|
|
||||||
; asm: lea 0x0(%rip), %rcx
|
; asm: lea 0x0(%rip), %rcx
|
||||||
[-,%rcx] v400 = func_addr.i64 fn1 ; bin: 48 8d 0d PCRel4(%bar-4) 00000000
|
[-,%rcx] v400 = func_addr.i64 fn1 ; bin: 48 8d 0d PCRel4(%bar-4) 00000000
|
||||||
|
|||||||
15
cranelift/filetests/isa/x86/legalize-call.cton
Normal file
15
cranelift/filetests/isa/x86/legalize-call.cton
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
; Test legalization of a non-colocated call in 64-bit non-PIC mode.
|
||||||
|
test legalizer
|
||||||
|
set is_64bit
|
||||||
|
set is_compressed
|
||||||
|
isa x86 haswell
|
||||||
|
|
||||||
|
function %call() {
|
||||||
|
fn0 = %foo()
|
||||||
|
ebb0:
|
||||||
|
call fn0()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
; check: v0 = func_addr.i64 fn0
|
||||||
|
; nextln: call_indirect sig0, v0()
|
||||||
@@ -4,7 +4,7 @@ cd $(dirname "$0")
|
|||||||
topdir="$(pwd)"
|
topdir="$(pwd)"
|
||||||
|
|
||||||
# All the cretonne-* crates have the same version number
|
# All the cretonne-* crates have the same version number
|
||||||
version="0.5.0"
|
version="0.5.1"
|
||||||
|
|
||||||
# Update all of the Cargo.toml files.
|
# Update all of the Cargo.toml files.
|
||||||
#
|
#
|
||||||
|
|||||||
@@ -96,19 +96,18 @@ fn handle_module(
|
|||||||
for (func, _) in test_file.functions {
|
for (func, _) in test_file.functions {
|
||||||
let mut context = Context::new();
|
let mut context = Context::new();
|
||||||
context.func = func;
|
context.func = func;
|
||||||
let size = context.compile(isa).map_err(|err| {
|
|
||||||
pretty_error(&context.func, Some(isa), err)
|
|
||||||
})?;
|
|
||||||
if flag_print {
|
|
||||||
println!("{}", context.func.display(isa));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Encode the result as machine code.
|
// Compile and encode the result to machine code.
|
||||||
let mut mem = Vec::new();
|
let mut mem = Vec::new();
|
||||||
let mut relocs = PrintRelocs { flag_print };
|
let mut relocs = PrintRelocs { flag_print };
|
||||||
let mut traps = PrintTraps { flag_print };
|
let mut traps = PrintTraps { flag_print };
|
||||||
mem.resize(size as usize, 0);
|
context
|
||||||
context.emit_to_memory(mem.as_mut_ptr(), &mut relocs, &mut traps, &*isa);
|
.compile_and_emit(isa, &mut mem, &mut relocs, &mut traps)
|
||||||
|
.map_err(|err| pretty_error(&context.func, Some(isa), err))?;
|
||||||
|
|
||||||
|
if flag_print {
|
||||||
|
println!("{}", context.func.display(isa));
|
||||||
|
}
|
||||||
|
|
||||||
if flag_print {
|
if flag_print {
|
||||||
print!(".byte ");
|
print!(".byte ");
|
||||||
|
|||||||
@@ -1,3 +1,15 @@
|
|||||||
|
#![deny(trivial_numeric_casts, unused_extern_crates)]
|
||||||
|
#![warn(unused_import_braces, unstable_features)]
|
||||||
|
#![cfg_attr(feature="cargo-clippy", warn(
|
||||||
|
float_arithmetic,
|
||||||
|
mut_mut,
|
||||||
|
nonminimal_bool,
|
||||||
|
option_map_unwrap_or,
|
||||||
|
option_map_unwrap_or_else,
|
||||||
|
unicode_not_nfc,
|
||||||
|
use_self,
|
||||||
|
))]
|
||||||
|
|
||||||
extern crate cretonne_codegen;
|
extern crate cretonne_codegen;
|
||||||
extern crate cretonne_filetests;
|
extern crate cretonne_filetests;
|
||||||
extern crate cretonne_reader;
|
extern crate cretonne_reader;
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
[package]
|
[package]
|
||||||
authors = ["The Cretonne Project Developers"]
|
authors = ["The Cretonne Project Developers"]
|
||||||
name = "cretonne-codegen"
|
name = "cretonne-codegen"
|
||||||
version = "0.5.0"
|
version = "0.5.1"
|
||||||
description = "Low-level code generator library"
|
description = "Low-level code generator library"
|
||||||
license = "Apache-2.0"
|
license = "Apache-2.0"
|
||||||
documentation = "https://cretonne.readthedocs.io/"
|
documentation = "https://cretonne.readthedocs.io/"
|
||||||
@@ -11,7 +11,7 @@ keywords = ["compile", "compiler", "jit"]
|
|||||||
build = "build.rs"
|
build = "build.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
cretonne-entity = { path = "../entity", version = "0.5.0", default-features = false }
|
cretonne-entity = { path = "../entity", version = "0.5.1", default-features = false }
|
||||||
# It is a goal of the cretonne-codegen crate to have minimal external dependencies.
|
# It is a goal of the cretonne-codegen crate to have minimal external dependencies.
|
||||||
# Please don't add any unless they are essential to the task of creating binary
|
# Please don't add any unless they are essential to the task of creating binary
|
||||||
# machine code. Integration tests that need external dependencies can be
|
# machine code. Integration tests that need external dependencies can be
|
||||||
|
|||||||
@@ -54,7 +54,7 @@ class TargetISA(object):
|
|||||||
self._predicates = dict() # type: Dict[PredKey, PredNode]
|
self._predicates = dict() # type: Dict[PredKey, PredNode]
|
||||||
|
|
||||||
assert InstructionGroup._current is None,\
|
assert InstructionGroup._current is None,\
|
||||||
"InstructionGroup {} is still open!"\
|
"InstructionGroup {} is still open"\
|
||||||
.format(InstructionGroup._current.name)
|
.format(InstructionGroup._current.name)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
|
|||||||
@@ -231,7 +231,7 @@ class TypeSet(object):
|
|||||||
def __hash__(self):
|
def __hash__(self):
|
||||||
# type: () -> int
|
# type: () -> int
|
||||||
h = hash(self.typeset_key())
|
h = hash(self.typeset_key())
|
||||||
assert h == getattr(self, 'prev_hash', h), "TypeSet changed!"
|
assert h == getattr(self, 'prev_hash', h), "TypeSet changed"
|
||||||
self.prev_hash = h
|
self.prev_hash = h
|
||||||
return h
|
return h
|
||||||
|
|
||||||
|
|||||||
@@ -47,7 +47,7 @@ def gen_formats(fmt):
|
|||||||
with fmt.indented(
|
with fmt.indented(
|
||||||
"impl<'a> From<&'a InstructionData> for InstructionFormat {", '}'):
|
"impl<'a> From<&'a InstructionData> for InstructionFormat {", '}'):
|
||||||
with fmt.indented(
|
with fmt.indented(
|
||||||
"fn from(inst: &'a InstructionData) -> InstructionFormat {",
|
"fn from(inst: &'a InstructionData) -> Self {",
|
||||||
'}'):
|
'}'):
|
||||||
m = srcgen.Match('*inst')
|
m = srcgen.Match('*inst')
|
||||||
for f in InstructionFormat.all_formats:
|
for f in InstructionFormat.all_formats:
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ def gen_enum_types(sgrp, fmt):
|
|||||||
continue
|
continue
|
||||||
ty = camel_case(setting.name)
|
ty = camel_case(setting.name)
|
||||||
fmt.doc_comment('Values for `{}`.'.format(setting))
|
fmt.doc_comment('Values for `{}`.'.format(setting))
|
||||||
fmt.line('#[derive(Debug, PartialEq, Eq)]')
|
fmt.line('#[derive(Debug, Copy, Clone, PartialEq, Eq)]')
|
||||||
with fmt.indented('pub enum {} {{'.format(ty), '}'):
|
with fmt.indented('pub enum {} {{'.format(ty), '}'):
|
||||||
for v in setting.values:
|
for v in setting.values:
|
||||||
fmt.doc_comment('`{}`.'.format(v))
|
fmt.doc_comment('`{}`.'.format(v))
|
||||||
@@ -223,7 +223,7 @@ def gen_display(sgrp, fmt):
|
|||||||
fmt.line(
|
fmt.line(
|
||||||
'TEMPLATE.format_toml_value(d.detail,' +
|
'TEMPLATE.format_toml_value(d.detail,' +
|
||||||
'self.bytes[d.offset as usize], f)?;')
|
'self.bytes[d.offset as usize], f)?;')
|
||||||
fmt.line('writeln!(f, "")?;')
|
fmt.line('writeln!(f)?;')
|
||||||
fmt.line('Ok(())')
|
fmt.line('Ok(())')
|
||||||
|
|
||||||
|
|
||||||
@@ -241,7 +241,7 @@ def gen_constructor(sgrp, parent, fmt):
|
|||||||
fmt.doc_comment('Create flags {} settings group.'.format(sgrp.name))
|
fmt.doc_comment('Create flags {} settings group.'.format(sgrp.name))
|
||||||
fmt.line('#[allow(unused_variables)]')
|
fmt.line('#[allow(unused_variables)]')
|
||||||
with fmt.indented(
|
with fmt.indented(
|
||||||
'pub fn new({}) -> Flags {{'.format(args), '}'):
|
'pub fn new({}) -> Self {{'.format(args), '}'):
|
||||||
fmt.line('let bvec = builder.state_for("{}");'.format(sgrp.name))
|
fmt.line('let bvec = builder.state_for("{}");'.format(sgrp.name))
|
||||||
fmt.line('let mut bytes = [0; {}];'.format(sgrp.byte_size()))
|
fmt.line('let mut bytes = [0; {}];'.format(sgrp.byte_size()))
|
||||||
fmt.line(
|
fmt.line(
|
||||||
@@ -252,12 +252,12 @@ def gen_constructor(sgrp, parent, fmt):
|
|||||||
|
|
||||||
# Stop here without predicates.
|
# Stop here without predicates.
|
||||||
if len(sgrp.predicate_number) == sgrp.boolean_settings:
|
if len(sgrp.predicate_number) == sgrp.boolean_settings:
|
||||||
fmt.line('Flags { bytes: bytes }')
|
fmt.line('Self { bytes }')
|
||||||
return
|
return
|
||||||
|
|
||||||
# Now compute the predicates.
|
# Now compute the predicates.
|
||||||
fmt.line(
|
fmt.line(
|
||||||
'let mut {} = Flags {{ bytes: bytes }};'
|
'let mut {} = Self {{ bytes }};'
|
||||||
.format(sgrp.name))
|
.format(sgrp.name))
|
||||||
|
|
||||||
for pred, number in sgrp.predicate_number.items():
|
for pred, number in sgrp.predicate_number.items():
|
||||||
|
|||||||
@@ -530,7 +530,7 @@ puid_bool = TailRecipe(
|
|||||||
// The destination register is encoded in the low bits of the opcode.
|
// The destination register is encoded in the low bits of the opcode.
|
||||||
// No ModR/M.
|
// No ModR/M.
|
||||||
PUT_OP(bits | (out_reg0 & 7), rex1(out_reg0), sink);
|
PUT_OP(bits | (out_reg0 & 7), rex1(out_reg0), sink);
|
||||||
let imm: u32 = if imm.into() { 1 } else { 0 };
|
let imm: u32 = if imm { 1 } else { 0 };
|
||||||
sink.put4(imm);
|
sink.put4(imm);
|
||||||
''')
|
''')
|
||||||
|
|
||||||
|
|||||||
@@ -25,13 +25,13 @@ pub enum ArgAction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl From<ArgumentLoc> for ArgAction {
|
impl From<ArgumentLoc> for ArgAction {
|
||||||
fn from(x: ArgumentLoc) -> ArgAction {
|
fn from(x: ArgumentLoc) -> Self {
|
||||||
ArgAction::Assign(x)
|
ArgAction::Assign(x)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<ValueConversion> for ArgAction {
|
impl From<ValueConversion> for ArgAction {
|
||||||
fn from(x: ValueConversion) -> ArgAction {
|
fn from(x: ValueConversion) -> Self {
|
||||||
ArgAction::Convert(x)
|
ArgAction::Convert(x)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -45,8 +45,8 @@ where
|
|||||||
C: Comparator<K>,
|
C: Comparator<K>,
|
||||||
{
|
{
|
||||||
/// Create a new empty forest.
|
/// Create a new empty forest.
|
||||||
pub fn new() -> MapForest<K, V, C> {
|
pub fn new() -> Self {
|
||||||
MapForest { nodes: NodePool::new() }
|
Self { nodes: NodePool::new() }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Clear all maps in the forest.
|
/// Clear all maps in the forest.
|
||||||
@@ -83,8 +83,8 @@ where
|
|||||||
C: Comparator<K>,
|
C: Comparator<K>,
|
||||||
{
|
{
|
||||||
/// Make an empty map.
|
/// Make an empty map.
|
||||||
pub fn new() -> Map<K, V, C> {
|
pub fn new() -> Self {
|
||||||
Map {
|
Self {
|
||||||
root: None.into(),
|
root: None.into(),
|
||||||
unused: PhantomData,
|
unused: PhantomData,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -73,7 +73,7 @@ impl<F: Forest> NodeData<F> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Create an inner node with a single key and two sub-trees.
|
/// Create an inner node with a single key and two sub-trees.
|
||||||
pub fn inner(left: Node, key: F::Key, right: Node) -> NodeData<F> {
|
pub fn inner(left: Node, key: F::Key, right: Node) -> Self {
|
||||||
// Splat the key and right node to the whole array.
|
// Splat the key and right node to the whole array.
|
||||||
// Saves us from inventing a default/reserved value.
|
// Saves us from inventing a default/reserved value.
|
||||||
let mut tree = [right; INNER_SIZE];
|
let mut tree = [right; INNER_SIZE];
|
||||||
@@ -86,7 +86,7 @@ impl<F: Forest> NodeData<F> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Create a leaf node with a single key-value pair.
|
/// Create a leaf node with a single key-value pair.
|
||||||
pub fn leaf(key: F::Key, value: F::Value) -> NodeData<F> {
|
pub fn leaf(key: F::Key, value: F::Value) -> Self {
|
||||||
NodeData::Leaf {
|
NodeData::Leaf {
|
||||||
size: 1,
|
size: 1,
|
||||||
keys: F::splat_key(key),
|
keys: F::splat_key(key),
|
||||||
@@ -360,7 +360,7 @@ impl<F: Forest> NodeData<F> {
|
|||||||
///
|
///
|
||||||
/// In the first case, `None` is returned. In the second case, the new critical key for the
|
/// In the first case, `None` is returned. In the second case, the new critical key for the
|
||||||
/// right sibling node is returned.
|
/// right sibling node is returned.
|
||||||
pub fn balance(&mut self, crit_key: F::Key, rhs: &mut NodeData<F>) -> Option<F::Key> {
|
pub fn balance(&mut self, crit_key: F::Key, rhs: &mut Self) -> Option<F::Key> {
|
||||||
match (self, rhs) {
|
match (self, rhs) {
|
||||||
(&mut NodeData::Inner {
|
(&mut NodeData::Inner {
|
||||||
size: ref mut l_size,
|
size: ref mut l_size,
|
||||||
@@ -514,7 +514,7 @@ pub(super) enum Removed {
|
|||||||
|
|
||||||
impl Removed {
|
impl Removed {
|
||||||
/// Create a `Removed` status from a size and capacity.
|
/// Create a `Removed` status from a size and capacity.
|
||||||
fn new(removed: usize, new_size: usize, capacity: usize) -> Removed {
|
fn new(removed: usize, new_size: usize, capacity: usize) -> Self {
|
||||||
if 2 * new_size >= capacity {
|
if 2 * new_size >= capacity {
|
||||||
if removed == new_size {
|
if removed == new_size {
|
||||||
Removed::Rightmost
|
Removed::Rightmost
|
||||||
|
|||||||
@@ -22,8 +22,8 @@ pub(super) struct Path<F: Forest> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<F: Forest> Default for Path<F> {
|
impl<F: Forest> Default for Path<F> {
|
||||||
fn default() -> Path<F> {
|
fn default() -> Self {
|
||||||
Path {
|
Self {
|
||||||
size: 0,
|
size: 0,
|
||||||
node: [Node(0); MAX_PATH],
|
node: [Node(0); MAX_PATH],
|
||||||
entry: [0; MAX_PATH],
|
entry: [0; MAX_PATH],
|
||||||
|
|||||||
@@ -12,8 +12,8 @@ pub(super) struct NodePool<F: Forest> {
|
|||||||
|
|
||||||
impl<F: Forest> NodePool<F> {
|
impl<F: Forest> NodePool<F> {
|
||||||
/// Allocate a new empty pool of nodes.
|
/// Allocate a new empty pool of nodes.
|
||||||
pub fn new() -> NodePool<F> {
|
pub fn new() -> Self {
|
||||||
NodePool {
|
Self {
|
||||||
nodes: PrimaryMap::new(),
|
nodes: PrimaryMap::new(),
|
||||||
freelist: None,
|
freelist: None,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -42,8 +42,8 @@ where
|
|||||||
C: Comparator<K>,
|
C: Comparator<K>,
|
||||||
{
|
{
|
||||||
/// Create a new empty forest.
|
/// Create a new empty forest.
|
||||||
pub fn new() -> SetForest<K, C> {
|
pub fn new() -> Self {
|
||||||
SetForest { nodes: NodePool::new() }
|
Self { nodes: NodePool::new() }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Clear all sets in the forest.
|
/// Clear all sets in the forest.
|
||||||
@@ -78,8 +78,8 @@ where
|
|||||||
C: Comparator<K>,
|
C: Comparator<K>,
|
||||||
{
|
{
|
||||||
/// Make an empty set.
|
/// Make an empty set.
|
||||||
pub fn new() -> Set<K, C> {
|
pub fn new() -> Self {
|
||||||
Set {
|
Self {
|
||||||
root: None.into(),
|
root: None.into(),
|
||||||
unused: PhantomData,
|
unused: PhantomData,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -38,7 +38,10 @@ pub struct MemoryCodeSink<'a> {
|
|||||||
|
|
||||||
impl<'a> MemoryCodeSink<'a> {
|
impl<'a> MemoryCodeSink<'a> {
|
||||||
/// Create a new memory code sink that writes a function to the memory pointed to by `data`.
|
/// Create a new memory code sink that writes a function to the memory pointed to by `data`.
|
||||||
pub fn new<'sink>(
|
///
|
||||||
|
/// This function is unsafe since `MemoryCodeSink` does not perform bounds checking on the
|
||||||
|
/// memory buffer, and it can't guarantee that the `data` pointer is valid.
|
||||||
|
pub unsafe fn new<'sink>(
|
||||||
data: *mut u8,
|
data: *mut u8,
|
||||||
relocs: &'sink mut RelocSink,
|
relocs: &'sink mut RelocSink,
|
||||||
traps: &'sink mut TrapSink,
|
traps: &'sink mut TrapSink,
|
||||||
@@ -84,6 +87,7 @@ impl<'a> CodeSink for MemoryCodeSink<'a> {
|
|||||||
|
|
||||||
fn put2(&mut self, x: u16) {
|
fn put2(&mut self, x: u16) {
|
||||||
unsafe {
|
unsafe {
|
||||||
|
#[cfg_attr(feature = "cargo-clippy", allow(cast_ptr_alignment))]
|
||||||
write_unaligned(self.data.offset(self.offset) as *mut u16, x);
|
write_unaligned(self.data.offset(self.offset) as *mut u16, x);
|
||||||
}
|
}
|
||||||
self.offset += 2;
|
self.offset += 2;
|
||||||
@@ -91,6 +95,7 @@ impl<'a> CodeSink for MemoryCodeSink<'a> {
|
|||||||
|
|
||||||
fn put4(&mut self, x: u32) {
|
fn put4(&mut self, x: u32) {
|
||||||
unsafe {
|
unsafe {
|
||||||
|
#[cfg_attr(feature = "cargo-clippy", allow(cast_ptr_alignment))]
|
||||||
write_unaligned(self.data.offset(self.offset) as *mut u32, x);
|
write_unaligned(self.data.offset(self.offset) as *mut u32, x);
|
||||||
}
|
}
|
||||||
self.offset += 4;
|
self.offset += 4;
|
||||||
@@ -98,6 +103,7 @@ impl<'a> CodeSink for MemoryCodeSink<'a> {
|
|||||||
|
|
||||||
fn put8(&mut self, x: u64) {
|
fn put8(&mut self, x: u64) {
|
||||||
unsafe {
|
unsafe {
|
||||||
|
#[cfg_attr(feature = "cargo-clippy", allow(cast_ptr_alignment))]
|
||||||
write_unaligned(self.data.offset(self.offset) as *mut u64, x);
|
write_unaligned(self.data.offset(self.offset) as *mut u64, x);
|
||||||
}
|
}
|
||||||
self.offset += 8;
|
self.offset += 8;
|
||||||
|
|||||||
@@ -48,12 +48,12 @@ impl fmt::Display for Reloc {
|
|||||||
/// already unambigious, e.g. cton syntax with isa specified. In other contexts, use Debug.
|
/// already unambigious, e.g. cton syntax with isa specified. In other contexts, use Debug.
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
match *self {
|
match *self {
|
||||||
Reloc::Abs4 => write!(f, "{}", "Abs4"),
|
Reloc::Abs4 => write!(f, "Abs4"),
|
||||||
Reloc::Abs8 => write!(f, "{}", "Abs8"),
|
Reloc::Abs8 => write!(f, "Abs8"),
|
||||||
Reloc::X86PCRel4 => write!(f, "{}", "PCRel4"),
|
Reloc::X86PCRel4 => write!(f, "PCRel4"),
|
||||||
Reloc::X86GOTPCRel4 => write!(f, "{}", "GOTPCRel4"),
|
Reloc::X86GOTPCRel4 => write!(f, "GOTPCRel4"),
|
||||||
Reloc::X86PLTRel4 => write!(f, "{}", "PLTRel4"),
|
Reloc::X86PLTRel4 => write!(f, "PLTRel4"),
|
||||||
Reloc::Arm32Call | Reloc::Arm64Call | Reloc::RiscvCall => write!(f, "{}", "Call"),
|
Reloc::Arm32Call | Reloc::Arm64Call | Reloc::RiscvCall => write!(f, "Call"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -52,7 +52,7 @@ impl Context {
|
|||||||
/// The returned instance should be reused for compiling multiple functions in order to avoid
|
/// The returned instance should be reused for compiling multiple functions in order to avoid
|
||||||
/// needless allocator thrashing.
|
/// needless allocator thrashing.
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Context::for_function(Function::new())
|
Self::for_function(Function::new())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Allocate a new compilation context with an existing Function.
|
/// Allocate a new compilation context with an existing Function.
|
||||||
@@ -61,7 +61,7 @@ impl Context {
|
|||||||
/// needless allocator thrashing.
|
/// needless allocator thrashing.
|
||||||
pub fn for_function(func: Function) -> Self {
|
pub fn for_function(func: Function) -> Self {
|
||||||
Self {
|
Self {
|
||||||
func: func,
|
func,
|
||||||
cfg: ControlFlowGraph::new(),
|
cfg: ControlFlowGraph::new(),
|
||||||
domtree: DominatorTree::new(),
|
domtree: DominatorTree::new(),
|
||||||
regalloc: regalloc::Context::new(),
|
regalloc: regalloc::Context::new(),
|
||||||
@@ -78,6 +78,36 @@ impl Context {
|
|||||||
self.loop_analysis.clear();
|
self.loop_analysis.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Compile the function, and emit machine code into a `Vec<u8>`.
|
||||||
|
///
|
||||||
|
/// Run the function through all the passes necessary to generate code for the target ISA
|
||||||
|
/// represented by `isa`, as well as the final step of emitting machine code into a
|
||||||
|
/// `Vec<u8>`. The machine code is not relocated. Instead, any relocations are emitted
|
||||||
|
/// into `relocs`.
|
||||||
|
///
|
||||||
|
/// This function calls `compile` and `emit_to_memory`, taking care to resize `mem` as
|
||||||
|
/// needed, so it provides a safe interface.
|
||||||
|
pub fn compile_and_emit(
|
||||||
|
&mut self,
|
||||||
|
isa: &TargetIsa,
|
||||||
|
mem: &mut Vec<u8>,
|
||||||
|
relocs: &mut RelocSink,
|
||||||
|
traps: &mut TrapSink,
|
||||||
|
) -> CtonResult {
|
||||||
|
let code_size = self.compile(isa)?;
|
||||||
|
let old_len = mem.len();
|
||||||
|
mem.resize(old_len + code_size as usize, 0);
|
||||||
|
unsafe {
|
||||||
|
self.emit_to_memory(
|
||||||
|
isa,
|
||||||
|
mem.as_mut_ptr().offset(old_len as isize),
|
||||||
|
relocs,
|
||||||
|
traps,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
/// Compile the function.
|
/// Compile the function.
|
||||||
///
|
///
|
||||||
/// Run the function through all the passes necessary to generate code for the target ISA
|
/// Run the function through all the passes necessary to generate code for the target ISA
|
||||||
@@ -119,12 +149,15 @@ impl Context {
|
|||||||
/// code is returned by `compile` above.
|
/// code is returned by `compile` above.
|
||||||
///
|
///
|
||||||
/// The machine code is not relocated. Instead, any relocations are emitted into `relocs`.
|
/// The machine code is not relocated. Instead, any relocations are emitted into `relocs`.
|
||||||
pub fn emit_to_memory(
|
///
|
||||||
|
/// This function is unsafe since it does not perform bounds checking on the memory buffer,
|
||||||
|
/// and it can't guarantee that the `mem` pointer is valid.
|
||||||
|
pub unsafe fn emit_to_memory(
|
||||||
&self,
|
&self,
|
||||||
|
isa: &TargetIsa,
|
||||||
mem: *mut u8,
|
mem: *mut u8,
|
||||||
relocs: &mut RelocSink,
|
relocs: &mut RelocSink,
|
||||||
traps: &mut TrapSink,
|
traps: &mut TrapSink,
|
||||||
isa: &TargetIsa,
|
|
||||||
) {
|
) {
|
||||||
let _tt = timing::binemit();
|
let _tt = timing::binemit();
|
||||||
isa.emit_function(&self.func, &mut MemoryCodeSink::new(mem, relocs, traps));
|
isa.emit_function(&self.func, &mut MemoryCodeSink::new(mem, relocs, traps));
|
||||||
|
|||||||
@@ -528,8 +528,8 @@ struct ExtraNode {
|
|||||||
/// Creating and computing the dominator tree pre-order.
|
/// Creating and computing the dominator tree pre-order.
|
||||||
impl DominatorTreePreorder {
|
impl DominatorTreePreorder {
|
||||||
/// Create a new blank `DominatorTreePreorder`.
|
/// Create a new blank `DominatorTreePreorder`.
|
||||||
pub fn new() -> DominatorTreePreorder {
|
pub fn new() -> Self {
|
||||||
DominatorTreePreorder {
|
Self {
|
||||||
nodes: EntityMap::new(),
|
nodes: EntityMap::new(),
|
||||||
stack: Vec::new(),
|
stack: Vec::new(),
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -31,7 +31,7 @@ impl Ebb {
|
|||||||
/// Create a new EBB reference from its number. This corresponds to the `ebbNN` representation.
|
/// Create a new EBB reference from its number. This corresponds to the `ebbNN` representation.
|
||||||
///
|
///
|
||||||
/// This method is for use by the parser.
|
/// This method is for use by the parser.
|
||||||
pub fn with_number(n: u32) -> Option<Ebb> {
|
pub fn with_number(n: u32) -> Option<Self> {
|
||||||
if n < u32::MAX { Some(Ebb(n)) } else { None }
|
if n < u32::MAX { Some(Ebb(n)) } else { None }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -46,7 +46,7 @@ impl Value {
|
|||||||
/// This is the number in the `vNN` notation.
|
/// This is the number in the `vNN` notation.
|
||||||
///
|
///
|
||||||
/// This method is for use by the parser.
|
/// This method is for use by the parser.
|
||||||
pub fn with_number(n: u32) -> Option<Value> {
|
pub fn with_number(n: u32) -> Option<Self> {
|
||||||
if n < u32::MAX / 2 {
|
if n < u32::MAX / 2 {
|
||||||
Some(Value(n))
|
Some(Value(n))
|
||||||
} else {
|
} else {
|
||||||
@@ -69,7 +69,7 @@ impl StackSlot {
|
|||||||
/// Create a new stack slot reference from its number.
|
/// Create a new stack slot reference from its number.
|
||||||
///
|
///
|
||||||
/// This method is for use by the parser.
|
/// This method is for use by the parser.
|
||||||
pub fn with_number(n: u32) -> Option<StackSlot> {
|
pub fn with_number(n: u32) -> Option<Self> {
|
||||||
if n < u32::MAX {
|
if n < u32::MAX {
|
||||||
Some(StackSlot(n))
|
Some(StackSlot(n))
|
||||||
} else {
|
} else {
|
||||||
@@ -87,7 +87,7 @@ impl GlobalVar {
|
|||||||
/// Create a new global variable reference from its number.
|
/// Create a new global variable reference from its number.
|
||||||
///
|
///
|
||||||
/// This method is for use by the parser.
|
/// This method is for use by the parser.
|
||||||
pub fn with_number(n: u32) -> Option<GlobalVar> {
|
pub fn with_number(n: u32) -> Option<Self> {
|
||||||
if n < u32::MAX {
|
if n < u32::MAX {
|
||||||
Some(GlobalVar(n))
|
Some(GlobalVar(n))
|
||||||
} else {
|
} else {
|
||||||
@@ -105,7 +105,7 @@ impl JumpTable {
|
|||||||
/// Create a new jump table reference from its number.
|
/// Create a new jump table reference from its number.
|
||||||
///
|
///
|
||||||
/// This method is for use by the parser.
|
/// This method is for use by the parser.
|
||||||
pub fn with_number(n: u32) -> Option<JumpTable> {
|
pub fn with_number(n: u32) -> Option<Self> {
|
||||||
if n < u32::MAX {
|
if n < u32::MAX {
|
||||||
Some(JumpTable(n))
|
Some(JumpTable(n))
|
||||||
} else {
|
} else {
|
||||||
@@ -123,7 +123,7 @@ impl FuncRef {
|
|||||||
/// Create a new external function reference from its number.
|
/// Create a new external function reference from its number.
|
||||||
///
|
///
|
||||||
/// This method is for use by the parser.
|
/// This method is for use by the parser.
|
||||||
pub fn with_number(n: u32) -> Option<FuncRef> {
|
pub fn with_number(n: u32) -> Option<Self> {
|
||||||
if n < u32::MAX { Some(FuncRef(n)) } else { None }
|
if n < u32::MAX { Some(FuncRef(n)) } else { None }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -137,7 +137,7 @@ impl SigRef {
|
|||||||
/// Create a new function signature reference from its number.
|
/// Create a new function signature reference from its number.
|
||||||
///
|
///
|
||||||
/// This method is for use by the parser.
|
/// This method is for use by the parser.
|
||||||
pub fn with_number(n: u32) -> Option<SigRef> {
|
pub fn with_number(n: u32) -> Option<Self> {
|
||||||
if n < u32::MAX { Some(SigRef(n)) } else { None }
|
if n < u32::MAX { Some(SigRef(n)) } else { None }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -151,7 +151,7 @@ impl Heap {
|
|||||||
/// Create a new heap reference from its number.
|
/// Create a new heap reference from its number.
|
||||||
///
|
///
|
||||||
/// This method is for use by the parser.
|
/// This method is for use by the parser.
|
||||||
pub fn with_number(n: u32) -> Option<Heap> {
|
pub fn with_number(n: u32) -> Option<Self> {
|
||||||
if n < u32::MAX { Some(Heap(n)) } else { None }
|
if n < u32::MAX { Some(Heap(n)) } else { None }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -205,55 +205,55 @@ impl fmt::Debug for AnyEntity {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl From<Ebb> for AnyEntity {
|
impl From<Ebb> for AnyEntity {
|
||||||
fn from(r: Ebb) -> AnyEntity {
|
fn from(r: Ebb) -> Self {
|
||||||
AnyEntity::Ebb(r)
|
AnyEntity::Ebb(r)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Inst> for AnyEntity {
|
impl From<Inst> for AnyEntity {
|
||||||
fn from(r: Inst) -> AnyEntity {
|
fn from(r: Inst) -> Self {
|
||||||
AnyEntity::Inst(r)
|
AnyEntity::Inst(r)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Value> for AnyEntity {
|
impl From<Value> for AnyEntity {
|
||||||
fn from(r: Value) -> AnyEntity {
|
fn from(r: Value) -> Self {
|
||||||
AnyEntity::Value(r)
|
AnyEntity::Value(r)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<StackSlot> for AnyEntity {
|
impl From<StackSlot> for AnyEntity {
|
||||||
fn from(r: StackSlot) -> AnyEntity {
|
fn from(r: StackSlot) -> Self {
|
||||||
AnyEntity::StackSlot(r)
|
AnyEntity::StackSlot(r)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<GlobalVar> for AnyEntity {
|
impl From<GlobalVar> for AnyEntity {
|
||||||
fn from(r: GlobalVar) -> AnyEntity {
|
fn from(r: GlobalVar) -> Self {
|
||||||
AnyEntity::GlobalVar(r)
|
AnyEntity::GlobalVar(r)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<JumpTable> for AnyEntity {
|
impl From<JumpTable> for AnyEntity {
|
||||||
fn from(r: JumpTable) -> AnyEntity {
|
fn from(r: JumpTable) -> Self {
|
||||||
AnyEntity::JumpTable(r)
|
AnyEntity::JumpTable(r)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<FuncRef> for AnyEntity {
|
impl From<FuncRef> for AnyEntity {
|
||||||
fn from(r: FuncRef) -> AnyEntity {
|
fn from(r: FuncRef) -> Self {
|
||||||
AnyEntity::FuncRef(r)
|
AnyEntity::FuncRef(r)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<SigRef> for AnyEntity {
|
impl From<SigRef> for AnyEntity {
|
||||||
fn from(r: SigRef) -> AnyEntity {
|
fn from(r: SigRef) -> Self {
|
||||||
AnyEntity::SigRef(r)
|
AnyEntity::SigRef(r)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Heap> for AnyEntity {
|
impl From<Heap> for AnyEntity {
|
||||||
fn from(r: Heap) -> AnyEntity {
|
fn from(r: Heap) -> Self {
|
||||||
AnyEntity::Heap(r)
|
AnyEntity::Heap(r)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -304,7 +304,7 @@ impl fmt::Display for ArgumentPurpose {
|
|||||||
|
|
||||||
impl FromStr for ArgumentPurpose {
|
impl FromStr for ArgumentPurpose {
|
||||||
type Err = ();
|
type Err = ();
|
||||||
fn from_str(s: &str) -> Result<ArgumentPurpose, ()> {
|
fn from_str(s: &str) -> Result<Self, ()> {
|
||||||
match s {
|
match s {
|
||||||
"normal" => Ok(ArgumentPurpose::Normal),
|
"normal" => Ok(ArgumentPurpose::Normal),
|
||||||
"sret" => Ok(ArgumentPurpose::StructReturn),
|
"sret" => Ok(ArgumentPurpose::StructReturn),
|
||||||
|
|||||||
@@ -56,7 +56,7 @@ impl ExternalName {
|
|||||||
/// let name = ExternalName::testcase("hello");
|
/// let name = ExternalName::testcase("hello");
|
||||||
/// assert_eq!(name.to_string(), "%hello");
|
/// assert_eq!(name.to_string(), "%hello");
|
||||||
/// ```
|
/// ```
|
||||||
pub fn testcase<T: AsRef<[u8]>>(v: T) -> ExternalName {
|
pub fn testcase<T: AsRef<[u8]>>(v: T) -> Self {
|
||||||
let vec = v.as_ref();
|
let vec = v.as_ref();
|
||||||
let len = cmp::min(vec.len(), TESTCASE_NAME_LENGTH);
|
let len = cmp::min(vec.len(), TESTCASE_NAME_LENGTH);
|
||||||
let mut bytes = [0u8; TESTCASE_NAME_LENGTH];
|
let mut bytes = [0u8; TESTCASE_NAME_LENGTH];
|
||||||
@@ -77,17 +77,14 @@ impl ExternalName {
|
|||||||
/// let name = ExternalName::user(123, 456);
|
/// let name = ExternalName::user(123, 456);
|
||||||
/// assert_eq!(name.to_string(), "u123:456");
|
/// assert_eq!(name.to_string(), "u123:456");
|
||||||
/// ```
|
/// ```
|
||||||
pub fn user(namespace: u32, index: u32) -> ExternalName {
|
pub fn user(namespace: u32, index: u32) -> Self {
|
||||||
ExternalName::User {
|
ExternalName::User { namespace, index }
|
||||||
namespace: namespace,
|
|
||||||
index: index,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for ExternalName {
|
impl Default for ExternalName {
|
||||||
fn default() -> ExternalName {
|
fn default() -> Self {
|
||||||
ExternalName::user(0, 0)
|
Self::user(0, 0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -18,12 +18,12 @@ pub struct Imm64(i64);
|
|||||||
|
|
||||||
impl Imm64 {
|
impl Imm64 {
|
||||||
/// Create a new `Imm64` representing the signed number `x`.
|
/// Create a new `Imm64` representing the signed number `x`.
|
||||||
pub fn new(x: i64) -> Imm64 {
|
pub fn new(x: i64) -> Self {
|
||||||
Imm64(x)
|
Imm64(x)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return self negated.
|
/// Return self negated.
|
||||||
pub fn wrapping_neg(self) -> Imm64 {
|
pub fn wrapping_neg(self) -> Self {
|
||||||
Imm64(self.0.wrapping_neg())
|
Imm64(self.0.wrapping_neg())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -143,8 +143,8 @@ impl FromStr for Imm64 {
|
|||||||
type Err = &'static str;
|
type Err = &'static str;
|
||||||
|
|
||||||
// Parse a decimal or hexadecimal `Imm64`, formatted as above.
|
// Parse a decimal or hexadecimal `Imm64`, formatted as above.
|
||||||
fn from_str(s: &str) -> Result<Imm64, &'static str> {
|
fn from_str(s: &str) -> Result<Self, &'static str> {
|
||||||
parse_i64(s).map(Imm64::new)
|
parse_i64(s).map(Self::new)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -191,7 +191,7 @@ impl FromStr for Uimm32 {
|
|||||||
type Err = &'static str;
|
type Err = &'static str;
|
||||||
|
|
||||||
// Parse a decimal or hexadecimal `Uimm32`, formatted as above.
|
// Parse a decimal or hexadecimal `Uimm32`, formatted as above.
|
||||||
fn from_str(s: &str) -> Result<Uimm32, &'static str> {
|
fn from_str(s: &str) -> Result<Self, &'static str> {
|
||||||
parse_i64(s).and_then(|x| if 0 <= x && x <= i64::from(u32::MAX) {
|
parse_i64(s).and_then(|x| if 0 <= x && x <= i64::from(u32::MAX) {
|
||||||
Ok(Uimm32(x as u32))
|
Ok(Uimm32(x as u32))
|
||||||
} else {
|
} else {
|
||||||
@@ -209,7 +209,7 @@ pub struct Offset32(i32);
|
|||||||
|
|
||||||
impl Offset32 {
|
impl Offset32 {
|
||||||
/// Create a new `Offset32` representing the signed number `x`.
|
/// Create a new `Offset32` representing the signed number `x`.
|
||||||
pub fn new(x: i32) -> Offset32 {
|
pub fn new(x: i32) -> Self {
|
||||||
Offset32(x)
|
Offset32(x)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -255,14 +255,14 @@ impl FromStr for Offset32 {
|
|||||||
type Err = &'static str;
|
type Err = &'static str;
|
||||||
|
|
||||||
// Parse a decimal or hexadecimal `Offset32`, formatted as above.
|
// Parse a decimal or hexadecimal `Offset32`, formatted as above.
|
||||||
fn from_str(s: &str) -> Result<Offset32, &'static str> {
|
fn from_str(s: &str) -> Result<Self, &'static str> {
|
||||||
if !(s.starts_with('-') || s.starts_with('+')) {
|
if !(s.starts_with('-') || s.starts_with('+')) {
|
||||||
return Err("Offset must begin with sign");
|
return Err("Offset must begin with sign");
|
||||||
}
|
}
|
||||||
parse_i64(s).and_then(|x| if i64::from(i32::MIN) <= x &&
|
parse_i64(s).and_then(|x| if i64::from(i32::MIN) <= x &&
|
||||||
x <= i64::from(i32::MAX)
|
x <= i64::from(i32::MAX)
|
||||||
{
|
{
|
||||||
Ok(Offset32::new(x as i32))
|
Ok(Self::new(x as i32))
|
||||||
} else {
|
} else {
|
||||||
Err("Offset out of range")
|
Err("Offset out of range")
|
||||||
})
|
})
|
||||||
@@ -524,12 +524,12 @@ fn parse_float(s: &str, w: u8, t: u8) -> Result<u64, &'static str> {
|
|||||||
|
|
||||||
impl Ieee32 {
|
impl Ieee32 {
|
||||||
/// Create a new `Ieee32` containing the bits of `x`.
|
/// Create a new `Ieee32` containing the bits of `x`.
|
||||||
pub fn with_bits(x: u32) -> Ieee32 {
|
pub fn with_bits(x: u32) -> Self {
|
||||||
Ieee32(x)
|
Ieee32(x)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create an `Ieee32` number representing `2.0^n`.
|
/// Create an `Ieee32` number representing `2.0^n`.
|
||||||
pub fn pow2<I: Into<i32>>(n: I) -> Ieee32 {
|
pub fn pow2<I: Into<i32>>(n: I) -> Self {
|
||||||
let n = n.into();
|
let n = n.into();
|
||||||
let w = 8;
|
let w = 8;
|
||||||
let t = 23;
|
let t = 23;
|
||||||
@@ -542,7 +542,7 @@ impl Ieee32 {
|
|||||||
|
|
||||||
/// Create an `Ieee32` number representing the greatest negative value
|
/// Create an `Ieee32` number representing the greatest negative value
|
||||||
/// not convertable from f32 to a signed integer with width n.
|
/// not convertable from f32 to a signed integer with width n.
|
||||||
pub fn fcvt_to_sint_negative_overflow<I: Into<i32>>(n: I) -> Ieee32 {
|
pub fn fcvt_to_sint_negative_overflow<I: Into<i32>>(n: I) -> Self {
|
||||||
let n = n.into();
|
let n = n.into();
|
||||||
debug_assert!(n < 32);
|
debug_assert!(n < 32);
|
||||||
debug_assert!(23 + 1 - n < 32);
|
debug_assert!(23 + 1 - n < 32);
|
||||||
@@ -552,12 +552,12 @@ impl Ieee32 {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Return self negated.
|
/// Return self negated.
|
||||||
pub fn neg(self) -> Ieee32 {
|
pub fn neg(self) -> Self {
|
||||||
Ieee32(self.0 ^ (1 << 31))
|
Ieee32(self.0 ^ (1 << 31))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a new `Ieee32` representing the number `x`.
|
/// Create a new `Ieee32` representing the number `x`.
|
||||||
pub fn with_float(x: f32) -> Ieee32 {
|
pub fn with_float(x: f32) -> Self {
|
||||||
Ieee32(unsafe { mem::transmute(x) })
|
Ieee32(unsafe { mem::transmute(x) })
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -577,7 +577,7 @@ impl Display for Ieee32 {
|
|||||||
impl FromStr for Ieee32 {
|
impl FromStr for Ieee32 {
|
||||||
type Err = &'static str;
|
type Err = &'static str;
|
||||||
|
|
||||||
fn from_str(s: &str) -> Result<Ieee32, &'static str> {
|
fn from_str(s: &str) -> Result<Self, &'static str> {
|
||||||
match parse_float(s, 8, 23) {
|
match parse_float(s, 8, 23) {
|
||||||
Ok(b) => Ok(Ieee32(b as u32)),
|
Ok(b) => Ok(Ieee32(b as u32)),
|
||||||
Err(s) => Err(s),
|
Err(s) => Err(s),
|
||||||
@@ -587,12 +587,12 @@ impl FromStr for Ieee32 {
|
|||||||
|
|
||||||
impl Ieee64 {
|
impl Ieee64 {
|
||||||
/// Create a new `Ieee64` containing the bits of `x`.
|
/// Create a new `Ieee64` containing the bits of `x`.
|
||||||
pub fn with_bits(x: u64) -> Ieee64 {
|
pub fn with_bits(x: u64) -> Self {
|
||||||
Ieee64(x)
|
Ieee64(x)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create an `Ieee64` number representing `2.0^n`.
|
/// Create an `Ieee64` number representing `2.0^n`.
|
||||||
pub fn pow2<I: Into<i64>>(n: I) -> Ieee64 {
|
pub fn pow2<I: Into<i64>>(n: I) -> Self {
|
||||||
let n = n.into();
|
let n = n.into();
|
||||||
let w = 11;
|
let w = 11;
|
||||||
let t = 52;
|
let t = 52;
|
||||||
@@ -605,7 +605,7 @@ impl Ieee64 {
|
|||||||
|
|
||||||
/// Create an `Ieee64` number representing the greatest negative value
|
/// Create an `Ieee64` number representing the greatest negative value
|
||||||
/// not convertable from f64 to a signed integer with width n.
|
/// not convertable from f64 to a signed integer with width n.
|
||||||
pub fn fcvt_to_sint_negative_overflow<I: Into<i64>>(n: I) -> Ieee64 {
|
pub fn fcvt_to_sint_negative_overflow<I: Into<i64>>(n: I) -> Self {
|
||||||
let n = n.into();
|
let n = n.into();
|
||||||
debug_assert!(n < 64);
|
debug_assert!(n < 64);
|
||||||
debug_assert!(52 + 1 - n < 64);
|
debug_assert!(52 + 1 - n < 64);
|
||||||
@@ -615,12 +615,12 @@ impl Ieee64 {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Return self negated.
|
/// Return self negated.
|
||||||
pub fn neg(self) -> Ieee64 {
|
pub fn neg(self) -> Self {
|
||||||
Ieee64(self.0 ^ (1 << 63))
|
Ieee64(self.0 ^ (1 << 63))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a new `Ieee64` representing the number `x`.
|
/// Create a new `Ieee64` representing the number `x`.
|
||||||
pub fn with_float(x: f64) -> Ieee64 {
|
pub fn with_float(x: f64) -> Self {
|
||||||
Ieee64(unsafe { mem::transmute(x) })
|
Ieee64(unsafe { mem::transmute(x) })
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -640,7 +640,7 @@ impl Display for Ieee64 {
|
|||||||
impl FromStr for Ieee64 {
|
impl FromStr for Ieee64 {
|
||||||
type Err = &'static str;
|
type Err = &'static str;
|
||||||
|
|
||||||
fn from_str(s: &str) -> Result<Ieee64, &'static str> {
|
fn from_str(s: &str) -> Result<Self, &'static str> {
|
||||||
match parse_float(s, 11, 52) {
|
match parse_float(s, 11, 52) {
|
||||||
Ok(b) => Ok(Ieee64(b)),
|
Ok(b) => Ok(Ieee64(b)),
|
||||||
Err(s) => Err(s),
|
Err(s) => Err(s),
|
||||||
|
|||||||
@@ -72,7 +72,7 @@ impl FromStr for Opcode {
|
|||||||
type Err = &'static str;
|
type Err = &'static str;
|
||||||
|
|
||||||
/// Parse an Opcode name from a string.
|
/// Parse an Opcode name from a string.
|
||||||
fn from_str(s: &str) -> Result<Opcode, &'static str> {
|
fn from_str(s: &str) -> Result<Self, &'static str> {
|
||||||
use constant_hash::{probe, simple_hash, Table};
|
use constant_hash::{probe, simple_hash, Table};
|
||||||
|
|
||||||
impl<'a> Table<&'a str> for [Option<Opcode>] {
|
impl<'a> Table<&'a str> for [Option<Opcode>] {
|
||||||
@@ -85,7 +85,7 @@ impl FromStr for Opcode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
match probe::<&str, [Option<Opcode>]>(&OPCODE_HASH_TABLE, s, simple_hash(s)) {
|
match probe::<&str, [Option<Self>]>(&OPCODE_HASH_TABLE, s, simple_hash(s)) {
|
||||||
Err(_) => Err("Unknown opcode"),
|
Err(_) => Err("Unknown opcode"),
|
||||||
// We unwrap here because probe() should have ensured that the entry
|
// We unwrap here because probe() should have ensured that the entry
|
||||||
// at this index is not None.
|
// at this index is not None.
|
||||||
|
|||||||
@@ -72,7 +72,7 @@ impl LibCall {
|
|||||||
/// given opcode and controlling type variable.
|
/// given opcode and controlling type variable.
|
||||||
///
|
///
|
||||||
/// Returns `None` if no well-known library routine name exists for that instruction.
|
/// Returns `None` if no well-known library routine name exists for that instruction.
|
||||||
pub fn for_inst(opcode: Opcode, ctrl_type: Type) -> Option<LibCall> {
|
pub fn for_inst(opcode: Opcode, ctrl_type: Type) -> Option<Self> {
|
||||||
Some(match ctrl_type {
|
Some(match ctrl_type {
|
||||||
types::F32 => {
|
types::F32 => {
|
||||||
match opcode {
|
match opcode {
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ use std::u32;
|
|||||||
pub struct ProgramPoint(u32);
|
pub struct ProgramPoint(u32);
|
||||||
|
|
||||||
impl From<Inst> for ProgramPoint {
|
impl From<Inst> for ProgramPoint {
|
||||||
fn from(inst: Inst) -> ProgramPoint {
|
fn from(inst: Inst) -> Self {
|
||||||
let idx = inst.index();
|
let idx = inst.index();
|
||||||
debug_assert!(idx < (u32::MAX / 2) as usize);
|
debug_assert!(idx < (u32::MAX / 2) as usize);
|
||||||
ProgramPoint((idx * 2) as u32)
|
ProgramPoint((idx * 2) as u32)
|
||||||
@@ -25,7 +25,7 @@ impl From<Inst> for ProgramPoint {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl From<Ebb> for ProgramPoint {
|
impl From<Ebb> for ProgramPoint {
|
||||||
fn from(ebb: Ebb) -> ProgramPoint {
|
fn from(ebb: Ebb) -> Self {
|
||||||
let idx = ebb.index();
|
let idx = ebb.index();
|
||||||
debug_assert!(idx < (u32::MAX / 2) as usize);
|
debug_assert!(idx < (u32::MAX / 2) as usize);
|
||||||
ProgramPoint((idx * 2 + 1) as u32)
|
ProgramPoint((idx * 2 + 1) as u32)
|
||||||
@@ -33,7 +33,7 @@ impl From<Ebb> for ProgramPoint {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl From<ValueDef> for ProgramPoint {
|
impl From<ValueDef> for ProgramPoint {
|
||||||
fn from(def: ValueDef) -> ProgramPoint {
|
fn from(def: ValueDef) -> Self {
|
||||||
match def {
|
match def {
|
||||||
ValueDef::Result(inst, _) => inst.into(),
|
ValueDef::Result(inst, _) => inst.into(),
|
||||||
ValueDef::Param(ebb, _) => ebb.into(),
|
ValueDef::Param(ebb, _) => ebb.into(),
|
||||||
@@ -62,19 +62,19 @@ impl ExpandedProgramPoint {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl From<Inst> for ExpandedProgramPoint {
|
impl From<Inst> for ExpandedProgramPoint {
|
||||||
fn from(inst: Inst) -> ExpandedProgramPoint {
|
fn from(inst: Inst) -> Self {
|
||||||
ExpandedProgramPoint::Inst(inst)
|
ExpandedProgramPoint::Inst(inst)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Ebb> for ExpandedProgramPoint {
|
impl From<Ebb> for ExpandedProgramPoint {
|
||||||
fn from(ebb: Ebb) -> ExpandedProgramPoint {
|
fn from(ebb: Ebb) -> Self {
|
||||||
ExpandedProgramPoint::Ebb(ebb)
|
ExpandedProgramPoint::Ebb(ebb)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<ValueDef> for ExpandedProgramPoint {
|
impl From<ValueDef> for ExpandedProgramPoint {
|
||||||
fn from(def: ValueDef) -> ExpandedProgramPoint {
|
fn from(def: ValueDef) -> Self {
|
||||||
match def {
|
match def {
|
||||||
ValueDef::Result(inst, _) => inst.into(),
|
ValueDef::Result(inst, _) => inst.into(),
|
||||||
ValueDef::Param(ebb, _) => ebb.into(),
|
ValueDef::Param(ebb, _) => ebb.into(),
|
||||||
@@ -83,7 +83,7 @@ impl From<ValueDef> for ExpandedProgramPoint {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl From<ProgramPoint> for ExpandedProgramPoint {
|
impl From<ProgramPoint> for ExpandedProgramPoint {
|
||||||
fn from(pp: ProgramPoint) -> ExpandedProgramPoint {
|
fn from(pp: ProgramPoint) -> Self {
|
||||||
if pp.0 & 1 == 0 {
|
if pp.0 & 1 == 0 {
|
||||||
ExpandedProgramPoint::Inst(Inst::new((pp.0 / 2) as usize))
|
ExpandedProgramPoint::Inst(Inst::new((pp.0 / 2) as usize))
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ pub struct SourceLoc(u32);
|
|||||||
|
|
||||||
impl SourceLoc {
|
impl SourceLoc {
|
||||||
/// Create a new source location with the given bits.
|
/// Create a new source location with the given bits.
|
||||||
pub fn new(bits: u32) -> SourceLoc {
|
pub fn new(bits: u32) -> Self {
|
||||||
SourceLoc(bits)
|
SourceLoc(bits)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -70,7 +70,7 @@ pub enum StackSlotKind {
|
|||||||
impl FromStr for StackSlotKind {
|
impl FromStr for StackSlotKind {
|
||||||
type Err = ();
|
type Err = ();
|
||||||
|
|
||||||
fn from_str(s: &str) -> Result<StackSlotKind, ()> {
|
fn from_str(s: &str) -> Result<Self, ()> {
|
||||||
use self::StackSlotKind::*;
|
use self::StackSlotKind::*;
|
||||||
match s {
|
match s {
|
||||||
"explicit_slot" => Ok(ExplicitSlot),
|
"explicit_slot" => Ok(ExplicitSlot),
|
||||||
@@ -117,8 +117,8 @@ pub struct StackSlotData {
|
|||||||
|
|
||||||
impl StackSlotData {
|
impl StackSlotData {
|
||||||
/// Create a stack slot with the specified byte size.
|
/// Create a stack slot with the specified byte size.
|
||||||
pub fn new(kind: StackSlotKind, size: StackSize) -> StackSlotData {
|
pub fn new(kind: StackSlotKind, size: StackSize) -> Self {
|
||||||
StackSlotData {
|
Self {
|
||||||
kind,
|
kind,
|
||||||
size,
|
size,
|
||||||
offset: None,
|
offset: None,
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ impl Type {
|
|||||||
/// Get the lane type of this SIMD vector type.
|
/// Get the lane type of this SIMD vector type.
|
||||||
///
|
///
|
||||||
/// A lane type is the same as a SIMD vector type with one lane, so it returns itself.
|
/// A lane type is the same as a SIMD vector type with one lane, so it returns itself.
|
||||||
pub fn lane_type(self) -> Type {
|
pub fn lane_type(self) -> Self {
|
||||||
if self.0 < VECTOR_BASE {
|
if self.0 < VECTOR_BASE {
|
||||||
self
|
self
|
||||||
} else {
|
} else {
|
||||||
@@ -72,7 +72,7 @@ impl Type {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Get an integer type with the requested number of bits.
|
/// Get an integer type with the requested number of bits.
|
||||||
pub fn int(bits: u16) -> Option<Type> {
|
pub fn int(bits: u16) -> Option<Self> {
|
||||||
match bits {
|
match bits {
|
||||||
8 => Some(I8),
|
8 => Some(I8),
|
||||||
16 => Some(I16),
|
16 => Some(I16),
|
||||||
@@ -83,7 +83,7 @@ impl Type {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Get a type with the same number of lanes as `self`, but using `lane` as the lane type.
|
/// Get a type with the same number of lanes as `self`, but using `lane` as the lane type.
|
||||||
fn replace_lanes(self, lane: Type) -> Type {
|
fn replace_lanes(self, lane: Self) -> Self {
|
||||||
debug_assert!(lane.is_lane() && !self.is_special());
|
debug_assert!(lane.is_lane() && !self.is_special());
|
||||||
Type((lane.0 & 0x0f) | (self.0 & 0xf0))
|
Type((lane.0 & 0x0f) | (self.0 & 0xf0))
|
||||||
}
|
}
|
||||||
@@ -93,7 +93,7 @@ impl Type {
|
|||||||
///
|
///
|
||||||
/// Scalar types are treated as vectors with one lane, so they are converted to the multi-bit
|
/// Scalar types are treated as vectors with one lane, so they are converted to the multi-bit
|
||||||
/// boolean types.
|
/// boolean types.
|
||||||
pub fn as_bool_pedantic(self) -> Type {
|
pub fn as_bool_pedantic(self) -> Self {
|
||||||
// Replace the low 4 bits with the boolean version, preserve the high 4 bits.
|
// Replace the low 4 bits with the boolean version, preserve the high 4 bits.
|
||||||
self.replace_lanes(match self.lane_type() {
|
self.replace_lanes(match self.lane_type() {
|
||||||
B8 | I8 => B8,
|
B8 | I8 => B8,
|
||||||
@@ -108,7 +108,7 @@ impl Type {
|
|||||||
/// booleans of the same size.
|
/// booleans of the same size.
|
||||||
///
|
///
|
||||||
/// Scalar types are all converted to `b1` which is usually what you want.
|
/// Scalar types are all converted to `b1` which is usually what you want.
|
||||||
pub fn as_bool(self) -> Type {
|
pub fn as_bool(self) -> Self {
|
||||||
if !self.is_vector() {
|
if !self.is_vector() {
|
||||||
B1
|
B1
|
||||||
} else {
|
} else {
|
||||||
@@ -118,7 +118,7 @@ impl Type {
|
|||||||
|
|
||||||
/// Get a type with the same number of lanes as this type, but with lanes that are half the
|
/// Get a type with the same number of lanes as this type, but with lanes that are half the
|
||||||
/// number of bits.
|
/// number of bits.
|
||||||
pub fn half_width(self) -> Option<Type> {
|
pub fn half_width(self) -> Option<Self> {
|
||||||
Some(self.replace_lanes(match self.lane_type() {
|
Some(self.replace_lanes(match self.lane_type() {
|
||||||
I16 => I8,
|
I16 => I8,
|
||||||
I32 => I16,
|
I32 => I16,
|
||||||
@@ -133,7 +133,7 @@ impl Type {
|
|||||||
|
|
||||||
/// Get a type with the same number of lanes as this type, but with lanes that are twice the
|
/// Get a type with the same number of lanes as this type, but with lanes that are twice the
|
||||||
/// number of bits.
|
/// number of bits.
|
||||||
pub fn double_width(self) -> Option<Type> {
|
pub fn double_width(self) -> Option<Self> {
|
||||||
Some(self.replace_lanes(match self.lane_type() {
|
Some(self.replace_lanes(match self.lane_type() {
|
||||||
I8 => I16,
|
I8 => I16,
|
||||||
I16 => I32,
|
I16 => I32,
|
||||||
@@ -235,7 +235,7 @@ impl Type {
|
|||||||
///
|
///
|
||||||
/// If this is already a SIMD vector type, this produces a SIMD vector type with `n *
|
/// If this is already a SIMD vector type, this produces a SIMD vector type with `n *
|
||||||
/// self.lane_count()` lanes.
|
/// self.lane_count()` lanes.
|
||||||
pub fn by(self, n: u16) -> Option<Type> {
|
pub fn by(self, n: u16) -> Option<Self> {
|
||||||
if self.lane_bits() == 0 || !n.is_power_of_two() {
|
if self.lane_bits() == 0 || !n.is_power_of_two() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
@@ -251,7 +251,7 @@ impl Type {
|
|||||||
/// Get a SIMD vector with half the number of lanes.
|
/// Get a SIMD vector with half the number of lanes.
|
||||||
///
|
///
|
||||||
/// There is no `double_vector()` method. Use `t.by(2)` instead.
|
/// There is no `double_vector()` method. Use `t.by(2)` instead.
|
||||||
pub fn half_vector(self) -> Option<Type> {
|
pub fn half_vector(self) -> Option<Self> {
|
||||||
if self.is_vector() {
|
if self.is_vector() {
|
||||||
Some(Type(self.0 - 0x10))
|
Some(Type(self.0 - 0x10))
|
||||||
} else {
|
} else {
|
||||||
@@ -268,7 +268,7 @@ impl Type {
|
|||||||
///
|
///
|
||||||
/// 1. `self.lane_count() == other.lane_count()` and
|
/// 1. `self.lane_count() == other.lane_count()` and
|
||||||
/// 2. `self.lane_bits() >= other.lane_bits()`
|
/// 2. `self.lane_bits() >= other.lane_bits()`
|
||||||
pub fn wider_or_equal(self, other: Type) -> bool {
|
pub fn wider_or_equal(self, other: Self) -> bool {
|
||||||
self.lane_count() == other.lane_count() && self.lane_bits() >= other.lane_bits()
|
self.lane_count() == other.lane_count() && self.lane_bits() >= other.lane_bits()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -18,8 +18,8 @@ pub struct Encoding {
|
|||||||
|
|
||||||
impl Encoding {
|
impl Encoding {
|
||||||
/// Create a new `Encoding` containing `(recipe, bits)`.
|
/// Create a new `Encoding` containing `(recipe, bits)`.
|
||||||
pub fn new(recipe: u16, bits: u16) -> Encoding {
|
pub fn new(recipe: u16, bits: u16) -> Self {
|
||||||
Encoding { recipe, bits }
|
Self { recipe, bits }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the recipe number in this encoding.
|
/// Get the recipe number in this encoding.
|
||||||
@@ -122,10 +122,10 @@ impl EncInfo {
|
|||||||
///
|
///
|
||||||
/// Returns 0 for illegal encodings.
|
/// Returns 0 for illegal encodings.
|
||||||
pub fn bytes(&self, enc: Encoding) -> CodeOffset {
|
pub fn bytes(&self, enc: Encoding) -> CodeOffset {
|
||||||
self.sizing
|
self.sizing.get(enc.recipe()).map_or(
|
||||||
.get(enc.recipe())
|
0,
|
||||||
.map(|s| CodeOffset::from(s.bytes))
|
|s| CodeOffset::from(s.bytes),
|
||||||
.unwrap_or(0)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the branch range that is supported by `enc`, if any.
|
/// Get the branch range that is supported by `enc`, if any.
|
||||||
|
|||||||
@@ -24,8 +24,8 @@ struct Args {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Args {
|
impl Args {
|
||||||
fn new(bits: u16, enable_e: bool) -> Args {
|
fn new(bits: u16, enable_e: bool) -> Self {
|
||||||
Args {
|
Self {
|
||||||
pointer_bits: bits,
|
pointer_bits: bits,
|
||||||
pointer_bytes: u32::from(bits) / 8,
|
pointer_bytes: u32::from(bits) / 8,
|
||||||
pointer_type: Type::int(bits).unwrap(),
|
pointer_type: Type::int(bits).unwrap(),
|
||||||
|
|||||||
@@ -23,10 +23,10 @@ pub struct StackRef {
|
|||||||
|
|
||||||
impl StackRef {
|
impl StackRef {
|
||||||
/// Get a reference to the stack slot `ss` using one of the base pointers in `mask`.
|
/// Get a reference to the stack slot `ss` using one of the base pointers in `mask`.
|
||||||
pub fn masked(ss: StackSlot, mask: StackBaseMask, frame: &StackSlots) -> Option<StackRef> {
|
pub fn masked(ss: StackSlot, mask: StackBaseMask, frame: &StackSlots) -> Option<Self> {
|
||||||
// Try an SP-relative reference.
|
// Try an SP-relative reference.
|
||||||
if mask.contains(StackBase::SP) {
|
if mask.contains(StackBase::SP) {
|
||||||
return Some(StackRef::sp(ss, frame));
|
return Some(Self::sp(ss, frame));
|
||||||
}
|
}
|
||||||
|
|
||||||
// No reference possible with this mask.
|
// No reference possible with this mask.
|
||||||
@@ -34,7 +34,7 @@ impl StackRef {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Get a reference to `ss` using the stack pointer as a base.
|
/// Get a reference to `ss` using the stack pointer as a base.
|
||||||
pub fn sp(ss: StackSlot, frame: &StackSlots) -> StackRef {
|
pub fn sp(ss: StackSlot, frame: &StackSlots) -> Self {
|
||||||
let size = frame.frame_size.expect(
|
let size = frame.frame_size.expect(
|
||||||
"Stack layout must be computed before referencing stack slots",
|
"Stack layout must be computed before referencing stack slots",
|
||||||
);
|
);
|
||||||
@@ -48,7 +48,7 @@ impl StackRef {
|
|||||||
let sp_offset = -(size as StackOffset);
|
let sp_offset = -(size as StackOffset);
|
||||||
slot.offset.unwrap() - sp_offset
|
slot.offset.unwrap() - sp_offset
|
||||||
};
|
};
|
||||||
StackRef {
|
Self {
|
||||||
base: StackBase::SP,
|
base: StackBase::SP,
|
||||||
offset,
|
offset,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -34,8 +34,8 @@ struct Args {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Args {
|
impl Args {
|
||||||
fn new(bits: u16, gpr: &'static [RU], fpr_limit: usize, call_conv: CallConv) -> Args {
|
fn new(bits: u16, gpr: &'static [RU], fpr_limit: usize, call_conv: CallConv) -> Self {
|
||||||
Args {
|
Self {
|
||||||
pointer_bytes: u32::from(bits) / 8,
|
pointer_bytes: u32::from(bits) / 8,
|
||||||
pointer_bits: bits,
|
pointer_bits: bits,
|
||||||
pointer_type: ir::Type::int(bits).unwrap(),
|
pointer_type: ir::Type::int(bits).unwrap(),
|
||||||
@@ -44,7 +44,7 @@ impl Args {
|
|||||||
fpr_limit,
|
fpr_limit,
|
||||||
fpr_used: 0,
|
fpr_used: 0,
|
||||||
offset: 0,
|
offset: 0,
|
||||||
call_conv: call_conv,
|
call_conv,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -205,7 +205,7 @@ fn callee_saved_gprs_used(flags: &shared_settings::Flags, func: &ir::Function) -
|
|||||||
}
|
}
|
||||||
|
|
||||||
used.intersect(&all_callee_saved);
|
used.intersect(&all_callee_saved);
|
||||||
return used;
|
used
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn prologue_epilogue(func: &mut ir::Function, isa: &TargetIsa) -> result::CtonResult {
|
pub fn prologue_epilogue(func: &mut ir::Function, isa: &TargetIsa) -> result::CtonResult {
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
//! Cretonne code generation library.
|
//! Cretonne code generation library.
|
||||||
|
|
||||||
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
||||||
|
#![warn(unused_import_braces, unstable_features)]
|
||||||
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
||||||
#![cfg_attr(feature="cargo-clippy", allow(
|
#![cfg_attr(feature="cargo-clippy", allow(
|
||||||
// Rustfmt 0.9.0 is at odds with this lint:
|
// Rustfmt 0.9.0 is at odds with this lint:
|
||||||
@@ -29,6 +30,16 @@
|
|||||||
redundant_field_names,
|
redundant_field_names,
|
||||||
useless_let_if_seq,
|
useless_let_if_seq,
|
||||||
len_without_is_empty))]
|
len_without_is_empty))]
|
||||||
|
#![cfg_attr(feature="cargo-clippy", warn(
|
||||||
|
float_arithmetic,
|
||||||
|
mut_mut,
|
||||||
|
nonminimal_bool,
|
||||||
|
option_map_unwrap_or,
|
||||||
|
option_map_unwrap_or_else,
|
||||||
|
print_stdout,
|
||||||
|
unicode_not_nfc,
|
||||||
|
use_self,
|
||||||
|
))]
|
||||||
|
|
||||||
// Turns on no_std and alloc features if std is not available.
|
// Turns on no_std and alloc features if std is not available.
|
||||||
#![cfg_attr(not(feature = "std"), no_std)]
|
#![cfg_attr(not(feature = "std"), no_std)]
|
||||||
|
|||||||
@@ -150,7 +150,7 @@ fn is_loop_invariant(inst: Inst, dfg: &DataFlowGraph, loop_values: &HashSet<Valu
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true;
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
// Traverses a loop in reverse post-order from a header EBB and identify loop-invariant
|
// Traverses a loop in reverse post-order from a header EBB and identify loop-invariant
|
||||||
@@ -173,7 +173,7 @@ fn remove_loop_invariant_instructions(
|
|||||||
}
|
}
|
||||||
pos.goto_top(*ebb);
|
pos.goto_top(*ebb);
|
||||||
#[cfg_attr(feature = "cargo-clippy", allow(block_in_if_condition_stmt))]
|
#[cfg_attr(feature = "cargo-clippy", allow(block_in_if_condition_stmt))]
|
||||||
'next_inst: while let Some(inst) = pos.next_inst() {
|
while let Some(inst) = pos.next_inst() {
|
||||||
if is_loop_invariant(inst, &pos.func.dfg, &loop_values) {
|
if is_loop_invariant(inst, &pos.func.dfg, &loop_values) {
|
||||||
// If all the instruction's argument are defined outside the loop
|
// If all the instruction's argument are defined outside the loop
|
||||||
// then this instruction is loop-invariant
|
// then this instruction is loop-invariant
|
||||||
|
|||||||
@@ -32,9 +32,9 @@ struct LoopData {
|
|||||||
|
|
||||||
impl LoopData {
|
impl LoopData {
|
||||||
/// Creates a `LoopData` object with the loop header and its eventual parent in the loop tree.
|
/// Creates a `LoopData` object with the loop header and its eventual parent in the loop tree.
|
||||||
pub fn new(header: Ebb, parent: Option<Loop>) -> LoopData {
|
pub fn new(header: Ebb, parent: Option<Loop>) -> Self {
|
||||||
LoopData {
|
Self {
|
||||||
header: header,
|
header,
|
||||||
parent: parent.into(),
|
parent: parent.into(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ pub enum Affinity {
|
|||||||
///
|
///
|
||||||
/// This indicates a value that is not defined or used by any real instructions. It is a ghost
|
/// This indicates a value that is not defined or used by any real instructions. It is a ghost
|
||||||
/// value that won't appear in the final program.
|
/// value that won't appear in the final program.
|
||||||
None,
|
Unassigned,
|
||||||
|
|
||||||
/// This value should be placed in a spill slot on the stack.
|
/// This value should be placed in a spill slot on the stack.
|
||||||
Stack,
|
Stack,
|
||||||
@@ -30,16 +30,16 @@ pub enum Affinity {
|
|||||||
|
|
||||||
impl Default for Affinity {
|
impl Default for Affinity {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Affinity::None
|
Affinity::Unassigned
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Affinity {
|
impl Affinity {
|
||||||
/// Create an affinity that satisfies a single constraint.
|
/// Create an affinity that satisfies a single constraint.
|
||||||
///
|
///
|
||||||
/// This will never create an `Affinity::None`.
|
/// This will never create an `Affinity::Unassigned`.
|
||||||
/// Use the `Default` implementation for that.
|
/// Use the `Default` implementation for that.
|
||||||
pub fn new(constraint: &OperandConstraint) -> Affinity {
|
pub fn new(constraint: &OperandConstraint) -> Self {
|
||||||
if constraint.kind == ConstraintKind::Stack {
|
if constraint.kind == ConstraintKind::Stack {
|
||||||
Affinity::Stack
|
Affinity::Stack
|
||||||
} else {
|
} else {
|
||||||
@@ -48,18 +48,18 @@ impl Affinity {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Create an affinity that matches an ABI argument for `isa`.
|
/// Create an affinity that matches an ABI argument for `isa`.
|
||||||
pub fn abi(arg: &AbiParam, isa: &TargetIsa) -> Affinity {
|
pub fn abi(arg: &AbiParam, isa: &TargetIsa) -> Self {
|
||||||
match arg.location {
|
match arg.location {
|
||||||
ArgumentLoc::Unassigned => Affinity::None,
|
ArgumentLoc::Unassigned => Affinity::Unassigned,
|
||||||
ArgumentLoc::Reg(_) => Affinity::Reg(isa.regclass_for_abi_type(arg.value_type).into()),
|
ArgumentLoc::Reg(_) => Affinity::Reg(isa.regclass_for_abi_type(arg.value_type).into()),
|
||||||
ArgumentLoc::Stack(_) => Affinity::Stack,
|
ArgumentLoc::Stack(_) => Affinity::Stack,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Is this the `None` affinity?
|
/// Is this the `Unassigned` affinity?
|
||||||
pub fn is_none(self) -> bool {
|
pub fn is_unassigned(self) -> bool {
|
||||||
match self {
|
match self {
|
||||||
Affinity::None => true,
|
Affinity::Unassigned => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -84,18 +84,18 @@ impl Affinity {
|
|||||||
///
|
///
|
||||||
/// Note that this does not guarantee that the register allocator will pick a register that
|
/// Note that this does not guarantee that the register allocator will pick a register that
|
||||||
/// satisfies the constraint.
|
/// satisfies the constraint.
|
||||||
pub fn merge(&mut self, constraint: &OperandConstraint, reg_info: &RegInfo) {
|
pub fn merge(&mut self, constraint: &OperandConstraint, reginfo: &RegInfo) {
|
||||||
match *self {
|
match *self {
|
||||||
Affinity::None => *self = Affinity::new(constraint),
|
Affinity::Unassigned => *self = Self::new(constraint),
|
||||||
Affinity::Reg(rc) => {
|
Affinity::Reg(rc) => {
|
||||||
// If the preferred register class is a subclass of the constraint, there's no need
|
// If the preferred register class is a subclass of the constraint, there's no need
|
||||||
// to change anything.
|
// to change anything.
|
||||||
if constraint.kind != ConstraintKind::Stack &&
|
if constraint.kind != ConstraintKind::Stack &&
|
||||||
!constraint.regclass.has_subclass(rc)
|
!constraint.regclass.has_subclass(rc)
|
||||||
{
|
{
|
||||||
// If the register classes don't overlap, `intersect` returns `None`, and we
|
// If the register classes don't overlap, `intersect` returns `Unassigned`, and
|
||||||
// just keep our previous affinity.
|
// we just keep our previous affinity.
|
||||||
if let Some(subclass) = constraint.regclass.intersect_index(reg_info.rc(rc)) {
|
if let Some(subclass) = constraint.regclass.intersect_index(reginfo.rc(rc)) {
|
||||||
// This constraint shrinks our preferred register class.
|
// This constraint shrinks our preferred register class.
|
||||||
*self = Affinity::Reg(subclass);
|
*self = Affinity::Reg(subclass);
|
||||||
}
|
}
|
||||||
@@ -118,7 +118,7 @@ pub struct DisplayAffinity<'a>(Affinity, Option<&'a RegInfo>);
|
|||||||
impl<'a> fmt::Display for DisplayAffinity<'a> {
|
impl<'a> fmt::Display for DisplayAffinity<'a> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
match self.0 {
|
match self.0 {
|
||||||
Affinity::None => write!(f, "none"),
|
Affinity::Unassigned => write!(f, "unassigned"),
|
||||||
Affinity::Stack => write!(f, "stack"),
|
Affinity::Stack => write!(f, "stack"),
|
||||||
Affinity::Reg(rci) => {
|
Affinity::Reg(rci) => {
|
||||||
match self.1 {
|
match self.1 {
|
||||||
|
|||||||
@@ -704,10 +704,10 @@ struct Node {
|
|||||||
|
|
||||||
impl Node {
|
impl Node {
|
||||||
/// Create a node representing `value`.
|
/// Create a node representing `value`.
|
||||||
pub fn value(value: Value, set_id: u8, func: &Function) -> Node {
|
pub fn value(value: Value, set_id: u8, func: &Function) -> Self {
|
||||||
let def = func.dfg.value_def(value).pp();
|
let def = func.dfg.value_def(value).pp();
|
||||||
let ebb = func.layout.pp_ebb(def);
|
let ebb = func.layout.pp_ebb(def);
|
||||||
Node {
|
Self {
|
||||||
def,
|
def,
|
||||||
ebb,
|
ebb,
|
||||||
is_vcopy: false,
|
is_vcopy: false,
|
||||||
@@ -717,10 +717,10 @@ impl Node {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Create a node representing a virtual copy.
|
/// Create a node representing a virtual copy.
|
||||||
pub fn vcopy(branch: Inst, value: Value, set_id: u8, func: &Function) -> Node {
|
pub fn vcopy(branch: Inst, value: Value, set_id: u8, func: &Function) -> Self {
|
||||||
let def = branch.into();
|
let def = branch.into();
|
||||||
let ebb = func.layout.pp_ebb(def);
|
let ebb = func.layout.pp_ebb(def);
|
||||||
Node {
|
Self {
|
||||||
def,
|
def,
|
||||||
ebb,
|
ebb,
|
||||||
is_vcopy: true,
|
is_vcopy: true,
|
||||||
@@ -891,8 +891,8 @@ struct VirtualCopies {
|
|||||||
|
|
||||||
impl VirtualCopies {
|
impl VirtualCopies {
|
||||||
/// Create an empty VirtualCopies struct.
|
/// Create an empty VirtualCopies struct.
|
||||||
pub fn new() -> VirtualCopies {
|
pub fn new() -> Self {
|
||||||
VirtualCopies {
|
Self {
|
||||||
params: Vec::new(),
|
params: Vec::new(),
|
||||||
branches: Vec::new(),
|
branches: Vec::new(),
|
||||||
filter: Vec::new(),
|
filter: Vec::new(),
|
||||||
|
|||||||
@@ -272,7 +272,7 @@ impl<'a> Context<'a> {
|
|||||||
Affinity::Stack => debug_assert!(abi.location.is_stack()),
|
Affinity::Stack => debug_assert!(abi.location.is_stack()),
|
||||||
// This is a ghost value, unused in the function. Don't assign it to a location
|
// This is a ghost value, unused in the function. Don't assign it to a location
|
||||||
// either.
|
// either.
|
||||||
Affinity::None => {}
|
Affinity::Unassigned => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1126,8 +1126,8 @@ struct AvailableRegs {
|
|||||||
|
|
||||||
impl AvailableRegs {
|
impl AvailableRegs {
|
||||||
/// Initialize both the input and global sets from `regs`.
|
/// Initialize both the input and global sets from `regs`.
|
||||||
pub fn new(regs: &RegisterSet) -> AvailableRegs {
|
pub fn new(regs: &RegisterSet) -> Self {
|
||||||
AvailableRegs {
|
Self {
|
||||||
input: regs.clone(),
|
input: regs.clone(),
|
||||||
global: regs.clone(),
|
global: regs.clone(),
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -32,9 +32,9 @@ pub struct Diversion {
|
|||||||
|
|
||||||
impl Diversion {
|
impl Diversion {
|
||||||
/// Make a new diversion.
|
/// Make a new diversion.
|
||||||
pub fn new(value: Value, from: ValueLoc, to: ValueLoc) -> Diversion {
|
pub fn new(value: Value, from: ValueLoc, to: ValueLoc) -> Self {
|
||||||
debug_assert!(from.is_assigned() && to.is_assigned());
|
debug_assert!(from.is_assigned() && to.is_assigned());
|
||||||
Diversion { value, from, to }
|
Self { value, from, to }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -179,7 +179,7 @@ use entity::SparseMap;
|
|||||||
use flowgraph::ControlFlowGraph;
|
use flowgraph::ControlFlowGraph;
|
||||||
use ir::dfg::ValueDef;
|
use ir::dfg::ValueDef;
|
||||||
use ir::{Ebb, Function, Inst, Layout, ProgramPoint, Value};
|
use ir::{Ebb, Function, Inst, Layout, ProgramPoint, Value};
|
||||||
use isa::{EncInfo, TargetIsa};
|
use isa::{EncInfo, TargetIsa, OperandConstraint};
|
||||||
use regalloc::affinity::Affinity;
|
use regalloc::affinity::Affinity;
|
||||||
use regalloc::liverange::{LiveRange, LiveRangeContext, LiveRangeForest};
|
use regalloc::liverange::{LiveRange, LiveRangeContext, LiveRangeForest};
|
||||||
use std::mem;
|
use std::mem;
|
||||||
@@ -197,7 +197,7 @@ fn get_or_create<'a>(
|
|||||||
value: Value,
|
value: Value,
|
||||||
isa: &TargetIsa,
|
isa: &TargetIsa,
|
||||||
func: &Function,
|
func: &Function,
|
||||||
enc_info: &EncInfo,
|
encinfo: &EncInfo,
|
||||||
) -> &'a mut LiveRange {
|
) -> &'a mut LiveRange {
|
||||||
// It would be better to use `get_mut()` here, but that leads to borrow checker fighting
|
// It would be better to use `get_mut()` here, but that leads to borrow checker fighting
|
||||||
// which can probably only be resolved by non-lexical lifetimes.
|
// which can probably only be resolved by non-lexical lifetimes.
|
||||||
@@ -211,7 +211,7 @@ fn get_or_create<'a>(
|
|||||||
def = inst.into();
|
def = inst.into();
|
||||||
// Initialize the affinity from the defining instruction's result constraints.
|
// Initialize the affinity from the defining instruction's result constraints.
|
||||||
// Don't do this for call return values which are always tied to a single register.
|
// Don't do this for call return values which are always tied to a single register.
|
||||||
affinity = enc_info
|
affinity = encinfo
|
||||||
.operand_constraints(func.encodings[inst])
|
.operand_constraints(func.encodings[inst])
|
||||||
.and_then(|rc| rc.outs.get(rnum))
|
.and_then(|rc| rc.outs.get(rnum))
|
||||||
.map(Affinity::new)
|
.map(Affinity::new)
|
||||||
@@ -385,8 +385,8 @@ impl Liveness {
|
|||||||
self.ranges.clear();
|
self.ranges.clear();
|
||||||
|
|
||||||
// Get ISA data structures used for computing live range affinities.
|
// Get ISA data structures used for computing live range affinities.
|
||||||
let enc_info = isa.encoding_info();
|
let encinfo = isa.encoding_info();
|
||||||
let reg_info = isa.register_info();
|
let reginfo = isa.register_info();
|
||||||
|
|
||||||
// The liveness computation needs to visit all uses, but the order doesn't matter.
|
// The liveness computation needs to visit all uses, but the order doesn't matter.
|
||||||
// TODO: Perhaps this traversal of the function could be combined with a dead code
|
// TODO: Perhaps this traversal of the function could be combined with a dead code
|
||||||
@@ -397,7 +397,7 @@ impl Liveness {
|
|||||||
// TODO: If these parameters are really dead, we could remove them, except for the
|
// TODO: If these parameters are really dead, we could remove them, except for the
|
||||||
// entry block which must match the function signature.
|
// entry block which must match the function signature.
|
||||||
for &arg in func.dfg.ebb_params(ebb) {
|
for &arg in func.dfg.ebb_params(ebb) {
|
||||||
get_or_create(&mut self.ranges, arg, isa, func, &enc_info);
|
get_or_create(&mut self.ranges, arg, isa, func, &encinfo);
|
||||||
}
|
}
|
||||||
|
|
||||||
for inst in func.layout.ebb_insts(ebb) {
|
for inst in func.layout.ebb_insts(ebb) {
|
||||||
@@ -408,20 +408,18 @@ impl Liveness {
|
|||||||
// TODO: When we implement DCE, we can use the absence of a live range to indicate
|
// TODO: When we implement DCE, we can use the absence of a live range to indicate
|
||||||
// an unused value.
|
// an unused value.
|
||||||
for &def in func.dfg.inst_results(inst) {
|
for &def in func.dfg.inst_results(inst) {
|
||||||
get_or_create(&mut self.ranges, def, isa, func, &enc_info);
|
get_or_create(&mut self.ranges, def, isa, func, &encinfo);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Iterator of constraints, one per value operand.
|
// Iterator of constraints, one per value operand.
|
||||||
let encoding = func.encodings[inst];
|
let encoding = func.encodings[inst];
|
||||||
let mut operand_constraints = enc_info
|
let operand_constraint_slice: &[OperandConstraint] =
|
||||||
.operand_constraints(encoding)
|
encinfo.operand_constraints(encoding).map_or(&[], |c| c.ins);
|
||||||
.map(|c| c.ins)
|
let mut operand_constraints = operand_constraint_slice.iter();
|
||||||
.unwrap_or(&[])
|
|
||||||
.iter();
|
|
||||||
|
|
||||||
for &arg in func.dfg.inst_args(inst) {
|
for &arg in func.dfg.inst_args(inst) {
|
||||||
// Get the live range, create it as a dead range if necessary.
|
// Get the live range, create it as a dead range if necessary.
|
||||||
let lr = get_or_create(&mut self.ranges, arg, isa, func, &enc_info);
|
let lr = get_or_create(&mut self.ranges, arg, isa, func, &encinfo);
|
||||||
|
|
||||||
// Extend the live range to reach this use.
|
// Extend the live range to reach this use.
|
||||||
extend_to_use(
|
extend_to_use(
|
||||||
@@ -438,7 +436,7 @@ impl Liveness {
|
|||||||
// operands described by `operand_constraints`. Variable arguments are either
|
// operands described by `operand_constraints`. Variable arguments are either
|
||||||
// EBB arguments or call/return ABI arguments.
|
// EBB arguments or call/return ABI arguments.
|
||||||
if let Some(constraint) = operand_constraints.next() {
|
if let Some(constraint) = operand_constraints.next() {
|
||||||
lr.affinity.merge(constraint, ®_info);
|
lr.affinity.merge(constraint, ®info);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -217,8 +217,8 @@ impl<PO: ProgramOrder> GenLiveRange<PO> {
|
|||||||
/// Create a new live range for `value` defined at `def`.
|
/// Create a new live range for `value` defined at `def`.
|
||||||
///
|
///
|
||||||
/// The live range will be created as dead, but it can be extended with `extend_in_ebb()`.
|
/// The live range will be created as dead, but it can be extended with `extend_in_ebb()`.
|
||||||
pub fn new(value: Value, def: ProgramPoint, affinity: Affinity) -> GenLiveRange<PO> {
|
pub fn new(value: Value, def: ProgramPoint, affinity: Affinity) -> Self {
|
||||||
GenLiveRange {
|
Self {
|
||||||
value,
|
value,
|
||||||
affinity,
|
affinity,
|
||||||
def_begin: def,
|
def_begin: def,
|
||||||
|
|||||||
@@ -81,8 +81,8 @@ pub struct Pressure {
|
|||||||
|
|
||||||
impl Pressure {
|
impl Pressure {
|
||||||
/// Create a new register pressure tracker.
|
/// Create a new register pressure tracker.
|
||||||
pub fn new(reginfo: &RegInfo, usable: &RegisterSet) -> Pressure {
|
pub fn new(reginfo: &RegInfo, usable: &RegisterSet) -> Self {
|
||||||
let mut p = Pressure {
|
let mut p = Self {
|
||||||
aliased: 0,
|
aliased: 0,
|
||||||
toprc: Default::default(),
|
toprc: Default::default(),
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -103,7 +103,7 @@ impl RegisterSet {
|
|||||||
/// of `other`.
|
/// of `other`.
|
||||||
///
|
///
|
||||||
/// This assumes that unused bits are 1.
|
/// This assumes that unused bits are 1.
|
||||||
pub fn interferes_with(&self, other: &RegisterSet) -> bool {
|
pub fn interferes_with(&self, other: &Self) -> bool {
|
||||||
self.avail.iter().zip(&other.avail).any(
|
self.avail.iter().zip(&other.avail).any(
|
||||||
|(&x, &y)| (x | y) != !0,
|
|(&x, &y)| (x | y) != !0,
|
||||||
)
|
)
|
||||||
@@ -111,7 +111,7 @@ impl RegisterSet {
|
|||||||
|
|
||||||
/// Intersect this set of registers with `other`. This has the effect of removing any register
|
/// Intersect this set of registers with `other`. This has the effect of removing any register
|
||||||
/// units from this set that are not in `other`.
|
/// units from this set that are not in `other`.
|
||||||
pub fn intersect(&mut self, other: &RegisterSet) {
|
pub fn intersect(&mut self, other: &Self) {
|
||||||
for (x, &y) in self.avail.iter_mut().zip(&other.avail) {
|
for (x, &y) in self.avail.iter_mut().zip(&other.avail) {
|
||||||
*x &= y;
|
*x &= y;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -219,7 +219,7 @@ impl<'a> Context<'a> {
|
|||||||
|
|
||||||
self.reloads.insert(ReloadedValue {
|
self.reloads.insert(ReloadedValue {
|
||||||
stack: cand.value,
|
stack: cand.value,
|
||||||
reg: reg,
|
reg,
|
||||||
});
|
});
|
||||||
cand.value = reg;
|
cand.value = reg;
|
||||||
|
|
||||||
|
|||||||
@@ -151,8 +151,8 @@ pub struct Variable {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Variable {
|
impl Variable {
|
||||||
fn new_live(value: Value, constraint: RegClass, from: RegUnit, is_output: bool) -> Variable {
|
fn new_live(value: Value, constraint: RegClass, from: RegUnit, is_output: bool) -> Self {
|
||||||
Variable {
|
Self {
|
||||||
value,
|
value,
|
||||||
constraint,
|
constraint,
|
||||||
from: Some(from),
|
from: Some(from),
|
||||||
@@ -164,8 +164,8 @@ impl Variable {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn new_def(value: Value, constraint: RegClass, is_global: bool) -> Variable {
|
fn new_def(value: Value, constraint: RegClass, is_global: bool) -> Self {
|
||||||
Variable {
|
Self {
|
||||||
value,
|
value,
|
||||||
constraint,
|
constraint,
|
||||||
from: None,
|
from: None,
|
||||||
@@ -280,7 +280,7 @@ pub enum Move {
|
|||||||
|
|
||||||
impl Move {
|
impl Move {
|
||||||
/// Create a register move from an assignment, but not for identity assignments.
|
/// Create a register move from an assignment, but not for identity assignments.
|
||||||
fn with_assignment(a: &Assignment) -> Option<Move> {
|
fn with_assignment(a: &Assignment) -> Option<Self> {
|
||||||
if a.from != a.to {
|
if a.from != a.to {
|
||||||
Some(Move::Reg {
|
Some(Move::Reg {
|
||||||
value: a.value,
|
value: a.value,
|
||||||
|
|||||||
@@ -363,7 +363,7 @@ impl<'a> Context<'a> {
|
|||||||
self.cur.isa.regclass_for_abi_type(abi.value_type).into(),
|
self.cur.isa.regclass_for_abi_type(abi.value_type).into(),
|
||||||
true,
|
true,
|
||||||
),
|
),
|
||||||
Affinity::None => panic!("Missing affinity for {}", arg),
|
Affinity::Unassigned => panic!("Missing affinity for {}", arg),
|
||||||
};
|
};
|
||||||
let mut reguse = RegUse::new(arg, fixed_args + idx, rci);
|
let mut reguse = RegUse::new(arg, fixed_args + idx, rci);
|
||||||
reguse.fixed = true;
|
reguse.fixed = true;
|
||||||
@@ -393,10 +393,9 @@ impl<'a> Context<'a> {
|
|||||||
} else if ru.fixed {
|
} else if ru.fixed {
|
||||||
// This is a fixed register use which doesn't necessarily require a copy.
|
// This is a fixed register use which doesn't necessarily require a copy.
|
||||||
// Make a copy only if this is not the first use of the value.
|
// Make a copy only if this is not the first use of the value.
|
||||||
self.reg_uses
|
self.reg_uses.get(i.wrapping_sub(1)).map_or(false, |ru2| {
|
||||||
.get(i.wrapping_sub(1))
|
ru2.value == ru.value
|
||||||
.map(|ru2| ru2.value == ru.value)
|
})
|
||||||
.unwrap_or(false)
|
|
||||||
} else {
|
} else {
|
||||||
false
|
false
|
||||||
};
|
};
|
||||||
@@ -567,8 +566,8 @@ struct RegUse {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl RegUse {
|
impl RegUse {
|
||||||
fn new(value: Value, idx: usize, rci: RegClassIndex) -> RegUse {
|
fn new(value: Value, idx: usize, rci: RegClassIndex) -> Self {
|
||||||
RegUse {
|
Self {
|
||||||
value,
|
value,
|
||||||
opidx: idx as u16,
|
opidx: idx as u16,
|
||||||
rci,
|
rci,
|
||||||
|
|||||||
@@ -101,10 +101,9 @@ impl VirtRegs {
|
|||||||
where
|
where
|
||||||
'a: 'b,
|
'a: 'b,
|
||||||
{
|
{
|
||||||
self.get(*value).map(|vr| self.values(vr)).unwrap_or_else(
|
self.get(*value).map_or_else(
|
||||||
|| {
|
|| ref_slice(value),
|
||||||
ref_slice(value)
|
|vr| self.values(vr),
|
||||||
},
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -257,7 +256,7 @@ enum UFEntry {
|
|||||||
/// A singleton set is the same as a set with rank 0. It contains only the leader value.
|
/// A singleton set is the same as a set with rank 0. It contains only the leader value.
|
||||||
impl UFEntry {
|
impl UFEntry {
|
||||||
/// Decode a table entry.
|
/// Decode a table entry.
|
||||||
fn decode(x: i32) -> UFEntry {
|
fn decode(x: i32) -> Self {
|
||||||
if x < 0 {
|
if x < 0 {
|
||||||
UFEntry::Link(Value::new((!x) as usize))
|
UFEntry::Link(Value::new((!x) as usize))
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -45,7 +45,7 @@ pub enum CtonError {
|
|||||||
pub type CtonResult = Result<(), CtonError>;
|
pub type CtonResult = Result<(), CtonError>;
|
||||||
|
|
||||||
impl From<verifier::Error> for CtonError {
|
impl From<verifier::Error> for CtonError {
|
||||||
fn from(e: verifier::Error) -> CtonError {
|
fn from(e: verifier::Error) -> Self {
|
||||||
CtonError::Verifier(e)
|
CtonError::Verifier(e)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -50,8 +50,8 @@ pub struct Builder {
|
|||||||
|
|
||||||
impl Builder {
|
impl Builder {
|
||||||
/// Create a new builder with defaults and names from the given template.
|
/// Create a new builder with defaults and names from the given template.
|
||||||
pub fn new(tmpl: &'static detail::Template) -> Builder {
|
pub fn new(tmpl: &'static detail::Template) -> Self {
|
||||||
Builder {
|
Self {
|
||||||
template: tmpl,
|
template: tmpl,
|
||||||
bytes: tmpl.defaults.into(),
|
bytes: tmpl.defaults.into(),
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -102,8 +102,7 @@ impl<'a> FlagsVerifier<'a> {
|
|||||||
if self.encinfo
|
if self.encinfo
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.and_then(|ei| ei.operand_constraints(self.func.encodings[inst]))
|
.and_then(|ei| ei.operand_constraints(self.func.encodings[inst]))
|
||||||
.map(|c| c.clobbers_flags)
|
.map_or(false, |c| c.clobbers_flags) && live_val.is_some()
|
||||||
.unwrap_or(false) && live_val.is_some()
|
|
||||||
{
|
{
|
||||||
return err!(inst, "encoding clobbers live CPU flags in {}", live);
|
return err!(inst, "encoding clobbers live CPU flags in {}", live);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -78,7 +78,7 @@ impl<'a> LivenessVerifier<'a> {
|
|||||||
|
|
||||||
if encoding.is_legal() {
|
if encoding.is_legal() {
|
||||||
// A legal instruction is not allowed to define ghost values.
|
// A legal instruction is not allowed to define ghost values.
|
||||||
if lr.affinity.is_none() {
|
if lr.affinity.is_unassigned() {
|
||||||
return err!(
|
return err!(
|
||||||
inst,
|
inst,
|
||||||
"{} is a ghost value defined by a real [{}] instruction",
|
"{} is a ghost value defined by a real [{}] instruction",
|
||||||
@@ -86,7 +86,7 @@ impl<'a> LivenessVerifier<'a> {
|
|||||||
self.isa.encoding_info().display(encoding)
|
self.isa.encoding_info().display(encoding)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
} else if !lr.affinity.is_none() {
|
} else if !lr.affinity.is_unassigned() {
|
||||||
// A non-encoded instruction can only define ghost values.
|
// A non-encoded instruction can only define ghost values.
|
||||||
return err!(
|
return err!(
|
||||||
inst,
|
inst,
|
||||||
@@ -108,7 +108,7 @@ impl<'a> LivenessVerifier<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// A legal instruction is not allowed to depend on ghost values.
|
// A legal instruction is not allowed to depend on ghost values.
|
||||||
if encoding.is_legal() && lr.affinity.is_none() {
|
if encoding.is_legal() && lr.affinity.is_unassigned() {
|
||||||
return err!(
|
return err!(
|
||||||
inst,
|
inst,
|
||||||
"{} is a ghost value used by a real [{}] instruction",
|
"{} is a ghost value used by a real [{}] instruction",
|
||||||
|
|||||||
@@ -200,7 +200,7 @@ impl<'a> Verifier<'a> {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
if is_last_inst && !is_terminator {
|
if is_last_inst && !is_terminator {
|
||||||
return err!(ebb, "block does not end in a terminator instruction!");
|
return err!(ebb, "block does not end in a terminator instruction");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Instructions belong to the correct ebb.
|
// Instructions belong to the correct ebb.
|
||||||
@@ -237,9 +237,9 @@ impl<'a> Verifier<'a> {
|
|||||||
|
|
||||||
let fixed_results = inst_data.opcode().constraints().fixed_results();
|
let fixed_results = inst_data.opcode().constraints().fixed_results();
|
||||||
// var_results is 0 if we aren't a call instruction
|
// var_results is 0 if we aren't a call instruction
|
||||||
let var_results = dfg.call_signature(inst)
|
let var_results = dfg.call_signature(inst).map_or(0, |sig| {
|
||||||
.map(|sig| dfg.signatures[sig].returns.len())
|
dfg.signatures[sig].returns.len()
|
||||||
.unwrap_or(0);
|
});
|
||||||
let total_results = fixed_results + var_results;
|
let total_results = fixed_results + var_results;
|
||||||
|
|
||||||
// All result values for multi-valued instructions are created
|
// All result values for multi-valued instructions are created
|
||||||
@@ -1156,7 +1156,7 @@ mod tests {
|
|||||||
macro_rules! assert_err_with_msg {
|
macro_rules! assert_err_with_msg {
|
||||||
($e:expr, $msg:expr) => {
|
($e:expr, $msg:expr) => {
|
||||||
match $e {
|
match $e {
|
||||||
Ok(_) => panic!("Expected an error!"),
|
Ok(_) => panic!("Expected an error"),
|
||||||
Err(Error { message, .. }) => {
|
Err(Error { message, .. }) => {
|
||||||
if !message.contains($msg) {
|
if !message.contains($msg) {
|
||||||
#[cfg(feature = "std")]
|
#[cfg(feature = "std")]
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ pub fn write_function(w: &mut Write, func: &Function, isa: Option<&TargetIsa>) -
|
|||||||
let mut any = write_preamble(w, func, regs)?;
|
let mut any = write_preamble(w, func, regs)?;
|
||||||
for ebb in &func.layout {
|
for ebb in &func.layout {
|
||||||
if any {
|
if any {
|
||||||
writeln!(w, "")?;
|
writeln!(w)?;
|
||||||
}
|
}
|
||||||
write_ebb(w, func, isa, ebb)?;
|
write_ebb(w, func, isa, ebb)?;
|
||||||
any = true;
|
any = true;
|
||||||
@@ -258,7 +258,7 @@ fn write_instruction(
|
|||||||
}
|
}
|
||||||
|
|
||||||
write_operands(w, &func.dfg, isa, inst)?;
|
write_operands(w, &func.dfg, isa, inst)?;
|
||||||
writeln!(w, "")
|
writeln!(w)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Write the operands of `inst` to `w` with a prepended space.
|
/// Write the operands of `inst` to `w` with a prepended space.
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
[package]
|
[package]
|
||||||
authors = ["The Cretonne Project Developers"]
|
authors = ["The Cretonne Project Developers"]
|
||||||
name = "cretonne-entity"
|
name = "cretonne-entity"
|
||||||
version = "0.5.0"
|
version = "0.5.1"
|
||||||
description = "Data structures using entity references as mapping keys"
|
description = "Data structures using entity references as mapping keys"
|
||||||
license = "Apache-2.0"
|
license = "Apache-2.0"
|
||||||
documentation = "https://cretonne.readthedocs.io/"
|
documentation = "https://cretonne.readthedocs.io/"
|
||||||
|
|||||||
@@ -30,9 +30,20 @@
|
|||||||
//! `Vec`.
|
//! `Vec`.
|
||||||
|
|
||||||
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
||||||
|
#![warn(unused_import_braces, unstable_features)]
|
||||||
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
||||||
#![cfg_attr(feature = "cargo-clippy",
|
#![cfg_attr(feature = "cargo-clippy",
|
||||||
allow(new_without_default, new_without_default_derive, redundant_field_names))]
|
allow(new_without_default, new_without_default_derive))]
|
||||||
|
#![cfg_attr(feature="cargo-clippy", warn(
|
||||||
|
float_arithmetic,
|
||||||
|
mut_mut,
|
||||||
|
nonminimal_bool,
|
||||||
|
option_map_unwrap_or,
|
||||||
|
option_map_unwrap_or_else,
|
||||||
|
print_stdout,
|
||||||
|
unicode_not_nfc,
|
||||||
|
use_self,
|
||||||
|
))]
|
||||||
|
|
||||||
// Turns on no_std and alloc features if std is not available.
|
// Turns on no_std and alloc features if std is not available.
|
||||||
#![cfg_attr(not(feature = "std"), no_std)]
|
#![cfg_attr(not(feature = "std"), no_std)]
|
||||||
@@ -91,7 +102,7 @@ macro_rules! entity_impl {
|
|||||||
|
|
||||||
impl $crate::__core::fmt::Display for $entity {
|
impl $crate::__core::fmt::Display for $entity {
|
||||||
fn fmt(&self, f: &mut $crate::__core::fmt::Formatter) -> $crate::__core::fmt::Result {
|
fn fmt(&self, f: &mut $crate::__core::fmt::Formatter) -> $crate::__core::fmt::Result {
|
||||||
write!(f, "{}{}", $display_prefix, self.0)
|
write!(f, concat!($display_prefix, "{}"), self.0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -49,7 +49,7 @@ where
|
|||||||
pub fn with_default(default: V) -> Self {
|
pub fn with_default(default: V) -> Self {
|
||||||
Self {
|
Self {
|
||||||
elems: Vec::new(),
|
elems: Vec::new(),
|
||||||
default: default,
|
default,
|
||||||
unused: PhantomData,
|
unused: PhantomData,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ impl<T: ReservedValue> PackedOption<T> {
|
|||||||
|
|
||||||
/// Returns `true` if the packed option is a `Some` value.
|
/// Returns `true` if the packed option is a `Some` value.
|
||||||
pub fn is_some(&self) -> bool {
|
pub fn is_some(&self) -> bool {
|
||||||
!self.is_none()
|
self.0 != T::reserved_value()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Expand the packed option into a normal `Option`.
|
/// Expand the packed option into a normal `Option`.
|
||||||
@@ -62,14 +62,14 @@ impl<T: ReservedValue> PackedOption<T> {
|
|||||||
|
|
||||||
impl<T: ReservedValue> Default for PackedOption<T> {
|
impl<T: ReservedValue> Default for PackedOption<T> {
|
||||||
/// Create a default packed option representing `None`.
|
/// Create a default packed option representing `None`.
|
||||||
fn default() -> PackedOption<T> {
|
fn default() -> Self {
|
||||||
PackedOption(T::reserved_value())
|
PackedOption(T::reserved_value())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: ReservedValue> From<T> for PackedOption<T> {
|
impl<T: ReservedValue> From<T> for PackedOption<T> {
|
||||||
/// Convert `t` into a packed `Some(x)`.
|
/// Convert `t` into a packed `Some(x)`.
|
||||||
fn from(t: T) -> PackedOption<T> {
|
fn from(t: T) -> Self {
|
||||||
debug_assert!(
|
debug_assert!(
|
||||||
t != T::reserved_value(),
|
t != T::reserved_value(),
|
||||||
"Can't make a PackedOption from the reserved value."
|
"Can't make a PackedOption from the reserved value."
|
||||||
@@ -80,7 +80,7 @@ impl<T: ReservedValue> From<T> for PackedOption<T> {
|
|||||||
|
|
||||||
impl<T: ReservedValue> From<Option<T>> for PackedOption<T> {
|
impl<T: ReservedValue> From<Option<T>> for PackedOption<T> {
|
||||||
/// Convert an option into its packed equivalent.
|
/// Convert an option into its packed equivalent.
|
||||||
fn from(opt: Option<T>) -> PackedOption<T> {
|
fn from(opt: Option<T>) -> Self {
|
||||||
match opt {
|
match opt {
|
||||||
None => Self::default(),
|
None => Self::default(),
|
||||||
Some(t) => t.into(),
|
Some(t) => t.into(),
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "cretonne-faerie"
|
name = "cretonne-faerie"
|
||||||
version = "0.5.0"
|
version = "0.5.1"
|
||||||
authors = ["The Cretonne Project Developers"]
|
authors = ["The Cretonne Project Developers"]
|
||||||
description = "Emit Cretonne output to native object files with Faerie"
|
description = "Emit Cretonne output to native object files with Faerie"
|
||||||
repository = "https://github.com/Cretonne/cretonne"
|
repository = "https://github.com/Cretonne/cretonne"
|
||||||
@@ -9,8 +9,8 @@ license = "Apache-2.0"
|
|||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
cretonne-codegen = { path = "../codegen", version = "0.5.0" }
|
cretonne-codegen = { path = "../codegen", version = "0.5.1" }
|
||||||
cretonne-module = { path = "../module", version = "0.5.0" }
|
cretonne-module = { path = "../module", version = "0.5.1" }
|
||||||
faerie = "0.1.0"
|
faerie = "0.1.0"
|
||||||
goblin = "0.0.14"
|
goblin = "0.0.14"
|
||||||
failure = "0.1.1"
|
failure = "0.1.1"
|
||||||
|
|||||||
@@ -107,12 +107,14 @@ impl Backend for FaerieBackend {
|
|||||||
// that traps.
|
// that traps.
|
||||||
let mut trap_sink = NullTrapSink {};
|
let mut trap_sink = NullTrapSink {};
|
||||||
|
|
||||||
|
unsafe {
|
||||||
ctx.emit_to_memory(
|
ctx.emit_to_memory(
|
||||||
|
&*self.isa,
|
||||||
code.as_mut_ptr(),
|
code.as_mut_ptr(),
|
||||||
&mut reloc_sink,
|
&mut reloc_sink,
|
||||||
&mut trap_sink,
|
&mut trap_sink,
|
||||||
&*self.isa,
|
)
|
||||||
);
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
self.artifact.define(name, code).expect(
|
self.artifact.define(name, code).expect(
|
||||||
@@ -270,7 +272,7 @@ impl<'a> RelocSink for FaerieRelocSink<'a> {
|
|||||||
&self.namespace.get_data_decl(name).name
|
&self.namespace.get_data_decl(name).name
|
||||||
};
|
};
|
||||||
let addend_i32 = addend as i32;
|
let addend_i32 = addend as i32;
|
||||||
debug_assert!(addend_i32 as i64 == addend);
|
debug_assert!(i64::from(addend_i32) == addend);
|
||||||
let raw_reloc = container::raw_relocation(reloc, self.format);
|
let raw_reloc = container::raw_relocation(reloc, self.format);
|
||||||
self.artifact
|
self.artifact
|
||||||
.link_with(
|
.link_with(
|
||||||
|
|||||||
@@ -3,6 +3,20 @@
|
|||||||
//! Users of this module should not have to depend on faerie directly.
|
//! Users of this module should not have to depend on faerie directly.
|
||||||
|
|
||||||
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
||||||
|
#![warn(unused_import_braces, unstable_features)]
|
||||||
|
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
||||||
|
#![cfg_attr(feature = "cargo-clippy",
|
||||||
|
allow(new_without_default, new_without_default_derive))]
|
||||||
|
#![cfg_attr(feature="cargo-clippy", warn(
|
||||||
|
float_arithmetic,
|
||||||
|
mut_mut,
|
||||||
|
nonminimal_bool,
|
||||||
|
option_map_unwrap_or,
|
||||||
|
option_map_unwrap_or_else,
|
||||||
|
print_stdout,
|
||||||
|
unicode_not_nfc,
|
||||||
|
use_self,
|
||||||
|
))]
|
||||||
|
|
||||||
extern crate cretonne_codegen;
|
extern crate cretonne_codegen;
|
||||||
extern crate cretonne_module;
|
extern crate cretonne_module;
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "cretonne-filetests"
|
name = "cretonne-filetests"
|
||||||
authors = ["The Cretonne Project Developers"]
|
authors = ["The Cretonne Project Developers"]
|
||||||
version = "0.5.0"
|
version = "0.5.1"
|
||||||
description = "Test driver and implementations of the filetest commands"
|
description = "Test driver and implementations of the filetest commands"
|
||||||
license = "Apache-2.0"
|
license = "Apache-2.0"
|
||||||
documentation = "http://cretonne.readthedocs.io/en/latest/testing.html#file-tests"
|
documentation = "http://cretonne.readthedocs.io/en/latest/testing.html#file-tests"
|
||||||
@@ -9,7 +9,7 @@ repository = "https://github.com/cretonne/cretonne"
|
|||||||
publish = false
|
publish = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
cretonne-codegen = { path = "../codegen", version = "0.5.0" }
|
cretonne-codegen = { path = "../codegen", version = "0.5.1" }
|
||||||
cretonne-reader = { path = "../reader", version = "0.5.0" }
|
cretonne-reader = { path = "../reader", version = "0.5.1" }
|
||||||
filecheck = "0.3.0"
|
filecheck = "0.3.0"
|
||||||
num_cpus = "1.8.0"
|
num_cpus = "1.8.0"
|
||||||
|
|||||||
@@ -3,10 +3,20 @@
|
|||||||
//! This crate contains the main test driver as well as implementations of the
|
//! This crate contains the main test driver as well as implementations of the
|
||||||
//! available filetest commands.
|
//! available filetest commands.
|
||||||
|
|
||||||
|
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
||||||
|
#![warn(unused_import_braces, unstable_features)]
|
||||||
#![cfg_attr(feature="cargo-clippy", allow(
|
#![cfg_attr(feature="cargo-clippy", allow(
|
||||||
type_complexity,
|
type_complexity,
|
||||||
// Rustfmt 0.9.0 is at odds with this lint:
|
// Rustfmt 0.9.0 is at odds with this lint:
|
||||||
block_in_if_condition_stmt))]
|
block_in_if_condition_stmt))]
|
||||||
|
#![cfg_attr(feature="cargo-clippy", warn(
|
||||||
|
mut_mut,
|
||||||
|
nonminimal_bool,
|
||||||
|
option_map_unwrap_or,
|
||||||
|
option_map_unwrap_or_else,
|
||||||
|
unicode_not_nfc,
|
||||||
|
use_self,
|
||||||
|
))]
|
||||||
|
|
||||||
#[macro_use(dbg)]
|
#[macro_use(dbg)]
|
||||||
extern crate cretonne_codegen;
|
extern crate cretonne_codegen;
|
||||||
|
|||||||
@@ -122,7 +122,7 @@ fn run_one_test<'a>(
|
|||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let (test, flags, isa) = tuple;
|
let (test, flags, isa) = tuple;
|
||||||
let name = format!("{}({})", test.name(), func.name);
|
let name = format!("{}({})", test.name(), func.name);
|
||||||
dbg!("Test: {} {}", name, isa.map(TargetIsa::name).unwrap_or("-"));
|
dbg!("Test: {} {}", name, isa.map_or("-", TargetIsa::name));
|
||||||
|
|
||||||
context.flags = flags;
|
context.flags = flags;
|
||||||
context.isa = isa;
|
context.isa = isa;
|
||||||
|
|||||||
@@ -125,7 +125,7 @@ fn filecheck_text(func: &Function, domtree: &DominatorTree) -> result::Result<St
|
|||||||
for &ebb in domtree.cfg_postorder() {
|
for &ebb in domtree.cfg_postorder() {
|
||||||
write!(s, " {}", ebb)?;
|
write!(s, " {}", ebb)?;
|
||||||
}
|
}
|
||||||
writeln!(s, "")?;
|
writeln!(s)?;
|
||||||
|
|
||||||
// Compute and print out a pre-order of the dominator tree.
|
// Compute and print out a pre-order of the dominator tree.
|
||||||
writeln!(s, "domtree_preorder {{")?;
|
writeln!(s, "domtree_preorder {{")?;
|
||||||
@@ -140,7 +140,7 @@ fn filecheck_text(func: &Function, domtree: &DominatorTree) -> result::Result<St
|
|||||||
write!(s, " {}", ch)?;
|
write!(s, " {}", ch)?;
|
||||||
stack.push(ch);
|
stack.push(ch);
|
||||||
}
|
}
|
||||||
writeln!(s, "")?;
|
writeln!(s)?;
|
||||||
// Reverse the children we just pushed so we'll pop them in order.
|
// Reverse the children we just pushed so we'll pop them in order.
|
||||||
stack[i..].reverse();
|
stack[i..].reverse();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
[package]
|
[package]
|
||||||
authors = ["The Cretonne Project Developers"]
|
authors = ["The Cretonne Project Developers"]
|
||||||
name = "cretonne-frontend"
|
name = "cretonne-frontend"
|
||||||
version = "0.5.0"
|
version = "0.5.1"
|
||||||
description = "Cretonne IR builder helper"
|
description = "Cretonne IR builder helper"
|
||||||
license = "Apache-2.0"
|
license = "Apache-2.0"
|
||||||
documentation = "https://cretonne.readthedocs.io/"
|
documentation = "https://cretonne.readthedocs.io/"
|
||||||
@@ -9,7 +9,7 @@ repository = "https://github.com/cretonne/cretonne"
|
|||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
cretonne-codegen = { path = "../codegen", version = "0.5.0", default-features = false }
|
cretonne-codegen = { path = "../codegen", version = "0.5.1", default-features = false }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["std"]
|
default = ["std"]
|
||||||
|
|||||||
@@ -238,9 +238,9 @@ where
|
|||||||
) -> FunctionBuilder<'a, Variable> {
|
) -> FunctionBuilder<'a, Variable> {
|
||||||
debug_assert!(func_ctx.is_empty());
|
debug_assert!(func_ctx.is_empty());
|
||||||
FunctionBuilder {
|
FunctionBuilder {
|
||||||
func: func,
|
func,
|
||||||
srcloc: Default::default(),
|
srcloc: Default::default(),
|
||||||
func_ctx: func_ctx,
|
func_ctx,
|
||||||
position: Position::default(),
|
position: Position::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -128,7 +128,18 @@
|
|||||||
//! ```
|
//! ```
|
||||||
|
|
||||||
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
||||||
#![cfg_attr(feature = "cargo-clippy", allow(new_without_default, redundant_field_names))]
|
#![warn(unused_import_braces, unstable_features)]
|
||||||
|
#![cfg_attr(feature = "cargo-clippy", allow(new_without_default))]
|
||||||
|
#![cfg_attr(feature="cargo-clippy", warn(
|
||||||
|
float_arithmetic,
|
||||||
|
mut_mut,
|
||||||
|
nonminimal_bool,
|
||||||
|
option_map_unwrap_or,
|
||||||
|
option_map_unwrap_or_else,
|
||||||
|
print_stdout,
|
||||||
|
unicode_not_nfc,
|
||||||
|
use_self,
|
||||||
|
))]
|
||||||
#![cfg_attr(not(feature = "std"), no_std)]
|
#![cfg_attr(not(feature = "std"), no_std)]
|
||||||
#![cfg_attr(not(feature = "std"), feature(alloc))]
|
#![cfg_attr(not(feature = "std"), feature(alloc))]
|
||||||
|
|
||||||
|
|||||||
@@ -361,7 +361,7 @@ where
|
|||||||
let block = self.blocks.push(BlockData::EbbHeader(EbbHeaderBlockData {
|
let block = self.blocks.push(BlockData::EbbHeader(EbbHeaderBlockData {
|
||||||
predecessors: Vec::new(),
|
predecessors: Vec::new(),
|
||||||
sealed: false,
|
sealed: false,
|
||||||
ebb: ebb,
|
ebb,
|
||||||
undef_variables: Vec::new(),
|
undef_variables: Vec::new(),
|
||||||
}));
|
}));
|
||||||
self.ebb_headers[ebb] = block.into();
|
self.ebb_headers[ebb] = block.into();
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "cretonne-module"
|
name = "cretonne-module"
|
||||||
version = "0.5.0"
|
version = "0.5.1"
|
||||||
authors = ["The Cretonne Project Developers"]
|
authors = ["The Cretonne Project Developers"]
|
||||||
description = "Support for linking functions and data with Cretonne"
|
description = "Support for linking functions and data with Cretonne"
|
||||||
repository = "https://github.com/Cretonne/cretonne"
|
repository = "https://github.com/Cretonne/cretonne"
|
||||||
@@ -9,8 +9,8 @@ license = "Apache-2.0"
|
|||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
cretonne-codegen = { path = "../codegen", version = "0.5.0" }
|
cretonne-codegen = { path = "../codegen", version = "0.5.1" }
|
||||||
cretonne-entity = { path = "../entity", version = "0.5.0" }
|
cretonne-entity = { path = "../entity", version = "0.5.1" }
|
||||||
|
|
||||||
[badges]
|
[badges]
|
||||||
maintenance = { status = "experimental" }
|
maintenance = { status = "experimental" }
|
||||||
|
|||||||
@@ -1,6 +1,20 @@
|
|||||||
//! Top-level lib.rs for `cretonne_module`.
|
//! Top-level lib.rs for `cretonne_module`.
|
||||||
|
|
||||||
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
||||||
|
#![warn(unused_import_braces, unstable_features)]
|
||||||
|
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
||||||
|
#![cfg_attr(feature = "cargo-clippy",
|
||||||
|
allow(new_without_default, new_without_default_derive))]
|
||||||
|
#![cfg_attr(feature="cargo-clippy", warn(
|
||||||
|
float_arithmetic,
|
||||||
|
mut_mut,
|
||||||
|
nonminimal_bool,
|
||||||
|
option_map_unwrap_or,
|
||||||
|
option_map_unwrap_or_else,
|
||||||
|
print_stdout,
|
||||||
|
unicode_not_nfc,
|
||||||
|
use_self,
|
||||||
|
))]
|
||||||
|
|
||||||
extern crate cretonne_codegen;
|
extern crate cretonne_codegen;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
|
|||||||
@@ -36,7 +36,7 @@ pub enum Linkage {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Linkage {
|
impl Linkage {
|
||||||
fn merge(a: Linkage, b: Linkage) -> Linkage {
|
fn merge(a: Self, b: Self) -> Self {
|
||||||
match a {
|
match a {
|
||||||
Linkage::Export => Linkage::Export,
|
Linkage::Export => Linkage::Export,
|
||||||
Linkage::Preemptible => {
|
Linkage::Preemptible => {
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "cretonne-native"
|
name = "cretonne-native"
|
||||||
version = "0.5.0"
|
version = "0.5.1"
|
||||||
authors = ["The Cretonne Project Developers"]
|
authors = ["The Cretonne Project Developers"]
|
||||||
description = "Support for targeting the host with Cretonne"
|
description = "Support for targeting the host with Cretonne"
|
||||||
repository = "https://github.com/cretonne/cretonne"
|
repository = "https://github.com/cretonne/cretonne"
|
||||||
@@ -8,7 +8,7 @@ license = "Apache-2.0"
|
|||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
cretonne-codegen = { path = "../codegen", version = "0.5.0", default-features = false }
|
cretonne-codegen = { path = "../codegen", version = "0.5.1", default-features = false }
|
||||||
|
|
||||||
[target.'cfg(any(target_arch = "x86", target_arch = "x86_64"))'.dependencies]
|
[target.'cfg(any(target_arch = "x86", target_arch = "x86_64"))'.dependencies]
|
||||||
raw-cpuid = "3.0.0"
|
raw-cpuid = "3.0.0"
|
||||||
|
|||||||
@@ -2,6 +2,20 @@
|
|||||||
//! Cretonne to generate code to run on the same machine.
|
//! Cretonne to generate code to run on the same machine.
|
||||||
|
|
||||||
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
||||||
|
#![warn(unused_import_braces, unstable_features)]
|
||||||
|
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
||||||
|
#![cfg_attr(feature = "cargo-clippy",
|
||||||
|
allow(new_without_default, new_without_default_derive))]
|
||||||
|
#![cfg_attr(feature="cargo-clippy", warn(
|
||||||
|
float_arithmetic,
|
||||||
|
mut_mut,
|
||||||
|
nonminimal_bool,
|
||||||
|
option_map_unwrap_or,
|
||||||
|
option_map_unwrap_or_else,
|
||||||
|
print_stdout,
|
||||||
|
unicode_not_nfc,
|
||||||
|
use_self,
|
||||||
|
))]
|
||||||
#![cfg_attr(not(feature = "std"), no_std)]
|
#![cfg_attr(not(feature = "std"), no_std)]
|
||||||
|
|
||||||
extern crate cretonne_codegen;
|
extern crate cretonne_codegen;
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
[package]
|
[package]
|
||||||
authors = ["The Cretonne Project Developers"]
|
authors = ["The Cretonne Project Developers"]
|
||||||
name = "cretonne-reader"
|
name = "cretonne-reader"
|
||||||
version = "0.5.0"
|
version = "0.5.1"
|
||||||
description = "Cretonne textual IR reader"
|
description = "Cretonne textual IR reader"
|
||||||
license = "Apache-2.0"
|
license = "Apache-2.0"
|
||||||
documentation = "https://cretonne.readthedocs.io/"
|
documentation = "https://cretonne.readthedocs.io/"
|
||||||
@@ -9,7 +9,7 @@ repository = "https://github.com/cretonne/cretonne"
|
|||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
cretonne-codegen = { path = "../codegen", version = "0.5.0" }
|
cretonne-codegen = { path = "../codegen", version = "0.5.1" }
|
||||||
|
|
||||||
[badges]
|
[badges]
|
||||||
maintenance = { status = "experimental" }
|
maintenance = { status = "experimental" }
|
||||||
|
|||||||
@@ -4,6 +4,20 @@
|
|||||||
//! testing Cretonne, but is not essential for a JIT compiler.
|
//! testing Cretonne, but is not essential for a JIT compiler.
|
||||||
|
|
||||||
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
||||||
|
#![warn(unused_import_braces, unstable_features)]
|
||||||
|
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
||||||
|
#![cfg_attr(feature = "cargo-clippy",
|
||||||
|
allow(new_without_default, new_without_default_derive))]
|
||||||
|
#![cfg_attr(feature="cargo-clippy", warn(
|
||||||
|
float_arithmetic,
|
||||||
|
mut_mut,
|
||||||
|
nonminimal_bool,
|
||||||
|
option_map_unwrap_or,
|
||||||
|
option_map_unwrap_or_else,
|
||||||
|
print_stdout,
|
||||||
|
unicode_not_nfc,
|
||||||
|
use_self,
|
||||||
|
))]
|
||||||
|
|
||||||
extern crate cretonne_codegen;
|
extern crate cretonne_codegen;
|
||||||
|
|
||||||
|
|||||||
@@ -54,7 +54,7 @@ impl<'a> Display for TestCommand<'a> {
|
|||||||
for opt in &self.options {
|
for opt in &self.options {
|
||||||
write!(f, " {}", opt)?;
|
write!(f, " {}", opt)?;
|
||||||
}
|
}
|
||||||
writeln!(f, "")
|
writeln!(f)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "cretonne-simplejit"
|
name = "cretonne-simplejit"
|
||||||
version = "0.5.0"
|
version = "0.5.1"
|
||||||
authors = ["The Cretonne Project Developers"]
|
authors = ["The Cretonne Project Developers"]
|
||||||
description = "A simple JIT library backed by Cretonne"
|
description = "A simple JIT library backed by Cretonne"
|
||||||
repository = "https://github.com/Cretonne/cretonne"
|
repository = "https://github.com/Cretonne/cretonne"
|
||||||
@@ -9,9 +9,9 @@ license = "Apache-2.0"
|
|||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
cretonne-codegen = { path = "../codegen", version = "0.5.0" }
|
cretonne-codegen = { path = "../codegen", version = "0.5.1" }
|
||||||
cretonne-module = { path = "../module", version = "0.5.0" }
|
cretonne-module = { path = "../module", version = "0.5.1" }
|
||||||
cretonne-native = { path = "../native", version = "0.5.0" }
|
cretonne-native = { path = "../native", version = "0.5.1" }
|
||||||
region = "0.2.0"
|
region = "0.2.0"
|
||||||
libc = "0.2.40"
|
libc = "0.2.40"
|
||||||
errno = "0.2.3"
|
errno = "0.2.3"
|
||||||
|
|||||||
@@ -103,7 +103,7 @@ impl<'simple_jit_backend> Backend for SimpleJITBackend {
|
|||||||
// Ignore traps for now. For now, frontends should just avoid generating code
|
// Ignore traps for now. For now, frontends should just avoid generating code
|
||||||
// that traps.
|
// that traps.
|
||||||
let mut trap_sink = NullTrapSink {};
|
let mut trap_sink = NullTrapSink {};
|
||||||
ctx.emit_to_memory(ptr, &mut reloc_sink, &mut trap_sink, &*self.isa);
|
unsafe { ctx.emit_to_memory(&*self.isa, ptr, &mut reloc_sink, &mut trap_sink) };
|
||||||
|
|
||||||
Ok(Self::CompiledFunction {
|
Ok(Self::CompiledFunction {
|
||||||
code: ptr,
|
code: ptr,
|
||||||
@@ -238,14 +238,17 @@ impl<'simple_jit_backend> Backend for SimpleJITBackend {
|
|||||||
match reloc {
|
match reloc {
|
||||||
Reloc::Abs4 => {
|
Reloc::Abs4 => {
|
||||||
// TODO: Handle overflow.
|
// TODO: Handle overflow.
|
||||||
|
#[cfg_attr(feature = "cargo-clippy", allow(cast_ptr_alignment))]
|
||||||
unsafe { write_unaligned(at as *mut u32, what as u32) };
|
unsafe { write_unaligned(at as *mut u32, what as u32) };
|
||||||
}
|
}
|
||||||
Reloc::Abs8 => {
|
Reloc::Abs8 => {
|
||||||
|
#[cfg_attr(feature = "cargo-clippy", allow(cast_ptr_alignment))]
|
||||||
unsafe { write_unaligned(at as *mut u64, what as u64) };
|
unsafe { write_unaligned(at as *mut u64, what as u64) };
|
||||||
}
|
}
|
||||||
Reloc::X86PCRel4 => {
|
Reloc::X86PCRel4 => {
|
||||||
// TODO: Handle overflow.
|
// TODO: Handle overflow.
|
||||||
let pcrel = ((what as isize) - (at as isize)) as i32;
|
let pcrel = ((what as isize) - (at as isize)) as i32;
|
||||||
|
#[cfg_attr(feature = "cargo-clippy", allow(cast_ptr_alignment))]
|
||||||
unsafe { write_unaligned(at as *mut i32, pcrel) };
|
unsafe { write_unaligned(at as *mut i32, pcrel) };
|
||||||
}
|
}
|
||||||
Reloc::X86GOTPCRel4 |
|
Reloc::X86GOTPCRel4 |
|
||||||
@@ -295,9 +298,11 @@ impl<'simple_jit_backend> Backend for SimpleJITBackend {
|
|||||||
match reloc {
|
match reloc {
|
||||||
Reloc::Abs4 => {
|
Reloc::Abs4 => {
|
||||||
// TODO: Handle overflow.
|
// TODO: Handle overflow.
|
||||||
|
#[cfg_attr(feature = "cargo-clippy", allow(cast_ptr_alignment))]
|
||||||
unsafe { write_unaligned(at as *mut u32, what as u32) };
|
unsafe { write_unaligned(at as *mut u32, what as u32) };
|
||||||
}
|
}
|
||||||
Reloc::Abs8 => {
|
Reloc::Abs8 => {
|
||||||
|
#[cfg_attr(feature = "cargo-clippy", allow(cast_ptr_alignment))]
|
||||||
unsafe { write_unaligned(at as *mut u64, what as u64) };
|
unsafe { write_unaligned(at as *mut u64, what as u64) };
|
||||||
}
|
}
|
||||||
Reloc::X86PCRel4 |
|
Reloc::X86PCRel4 |
|
||||||
|
|||||||
@@ -1,6 +1,20 @@
|
|||||||
//! Top-level lib.rs for `cretonne_simplejit`.
|
//! Top-level lib.rs for `cretonne_simplejit`.
|
||||||
|
|
||||||
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
||||||
|
#![warn(unused_import_braces, unstable_features)]
|
||||||
|
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
||||||
|
#![cfg_attr(feature = "cargo-clippy",
|
||||||
|
allow(new_without_default, new_without_default_derive))]
|
||||||
|
#![cfg_attr(feature="cargo-clippy", warn(
|
||||||
|
float_arithmetic,
|
||||||
|
mut_mut,
|
||||||
|
nonminimal_bool,
|
||||||
|
option_map_unwrap_or,
|
||||||
|
option_map_unwrap_or_else,
|
||||||
|
print_stdout,
|
||||||
|
unicode_not_nfc,
|
||||||
|
use_self,
|
||||||
|
))]
|
||||||
|
|
||||||
extern crate cretonne_codegen;
|
extern crate cretonne_codegen;
|
||||||
extern crate cretonne_module;
|
extern crate cretonne_module;
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ impl PtrLen {
|
|||||||
let err = libc::posix_memalign(&mut ptr, page_size, alloc_size);
|
let err = libc::posix_memalign(&mut ptr, page_size, alloc_size);
|
||||||
if err == 0 {
|
if err == 0 {
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
ptr: mem::transmute(ptr),
|
ptr: ptr as *mut u8,
|
||||||
len: alloc_size,
|
len: alloc_size,
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
[package]
|
[package]
|
||||||
authors = ["The Cretonne Project Developers"]
|
authors = ["The Cretonne Project Developers"]
|
||||||
name = "cretonne"
|
name = "cretonne"
|
||||||
version = "0.5.0"
|
version = "0.5.1"
|
||||||
description = "Umbrella for commonly-used cretonne crates"
|
description = "Umbrella for commonly-used cretonne crates"
|
||||||
license = "Apache-2.0"
|
license = "Apache-2.0"
|
||||||
documentation = "https://cretonne.readthedocs.io/"
|
documentation = "https://cretonne.readthedocs.io/"
|
||||||
@@ -10,8 +10,8 @@ readme = "README.md"
|
|||||||
keywords = ["compile", "compiler", "jit"]
|
keywords = ["compile", "compiler", "jit"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
cretonne-codegen = { path = "../codegen", version = "0.5.0" }
|
cretonne-codegen = { path = "../codegen", version = "0.5.1" }
|
||||||
cretonne-frontend = { path = "../frontend", version = "0.5.0" }
|
cretonne-frontend = { path = "../frontend", version = "0.5.1" }
|
||||||
|
|
||||||
[badges]
|
[badges]
|
||||||
maintenance = { status = "experimental" }
|
maintenance = { status = "experimental" }
|
||||||
|
|||||||
@@ -1,7 +1,20 @@
|
|||||||
//! Cretonne umbrella crate, providing a convenient one-line dependency.
|
//! Cretonne umbrella crate, providing a convenient one-line dependency.
|
||||||
|
|
||||||
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
||||||
|
#![warn(unused_import_braces, unstable_features)]
|
||||||
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
||||||
|
#![cfg_attr(feature = "cargo-clippy",
|
||||||
|
allow(new_without_default, new_without_default_derive))]
|
||||||
|
#![cfg_attr(feature="cargo-clippy", warn(
|
||||||
|
float_arithmetic,
|
||||||
|
mut_mut,
|
||||||
|
nonminimal_bool,
|
||||||
|
option_map_unwrap_or,
|
||||||
|
option_map_unwrap_or_else,
|
||||||
|
print_stdout,
|
||||||
|
unicode_not_nfc,
|
||||||
|
use_self,
|
||||||
|
))]
|
||||||
|
|
||||||
pub extern crate cretonne_codegen;
|
pub extern crate cretonne_codegen;
|
||||||
pub extern crate cretonne_frontend;
|
pub extern crate cretonne_frontend;
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "cretonne-wasm"
|
name = "cretonne-wasm"
|
||||||
version = "0.5.0"
|
version = "0.5.1"
|
||||||
authors = ["The Cretonne Project Developers"]
|
authors = ["The Cretonne Project Developers"]
|
||||||
description = "Translator from WebAssembly to Cretonne IR"
|
description = "Translator from WebAssembly to Cretonne IR"
|
||||||
repository = "https://github.com/cretonne/cretonne"
|
repository = "https://github.com/cretonne/cretonne"
|
||||||
@@ -9,9 +9,9 @@ readme = "README.md"
|
|||||||
keywords = ["webassembly", "wasm"]
|
keywords = ["webassembly", "wasm"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
wasmparser = { version = "0.16.0", default_features = false }
|
wasmparser = { version = "0.16.0", default-features = false }
|
||||||
cretonne-codegen = { path = "../codegen", version = "0.5.0", default_features = false }
|
cretonne-codegen = { path = "../codegen", version = "0.5.1", default-features = false }
|
||||||
cretonne-frontend = { path = "../frontend", version = "0.5.0", default_features = false }
|
cretonne-frontend = { path = "../frontend", version = "0.5.1", default-features = false }
|
||||||
|
|
||||||
[dependencies.hashmap_core]
|
[dependencies.hashmap_core]
|
||||||
version = "0.1.1"
|
version = "0.1.1"
|
||||||
|
|||||||
@@ -949,7 +949,7 @@ fn get_heap_addr(
|
|||||||
heap: ir::Heap,
|
heap: ir::Heap,
|
||||||
addr32: ir::Value,
|
addr32: ir::Value,
|
||||||
offset: u32,
|
offset: u32,
|
||||||
addr_ty: ir::Type,
|
addr_ty: Type,
|
||||||
builder: &mut FunctionBuilder<Variable>,
|
builder: &mut FunctionBuilder<Variable>,
|
||||||
) -> (ir::Value, i32) {
|
) -> (ir::Value, i32) {
|
||||||
use std::cmp::min;
|
use std::cmp::min;
|
||||||
@@ -985,7 +985,7 @@ fn get_heap_addr(
|
|||||||
fn translate_load<FE: FuncEnvironment + ?Sized>(
|
fn translate_load<FE: FuncEnvironment + ?Sized>(
|
||||||
offset: u32,
|
offset: u32,
|
||||||
opcode: ir::Opcode,
|
opcode: ir::Opcode,
|
||||||
result_ty: ir::Type,
|
result_ty: Type,
|
||||||
builder: &mut FunctionBuilder<Variable>,
|
builder: &mut FunctionBuilder<Variable>,
|
||||||
state: &mut TranslationState,
|
state: &mut TranslationState,
|
||||||
environ: &mut FE,
|
environ: &mut FE,
|
||||||
|
|||||||
@@ -225,7 +225,7 @@ impl<'dummy_environment> FuncEnvironment for DummyFuncEnvironment<'dummy_environ
|
|||||||
args.push(vmctx, &mut pos.func.dfg.value_lists);
|
args.push(vmctx, &mut pos.func.dfg.value_lists);
|
||||||
|
|
||||||
pos.ins()
|
pos.ins()
|
||||||
.CallIndirect(ir::Opcode::CallIndirect, ir::types::VOID, sig_ref, args)
|
.CallIndirect(ir::Opcode::CallIndirect, VOID, sig_ref, args)
|
||||||
.0
|
.0
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -247,9 +247,7 @@ impl<'dummy_environment> FuncEnvironment for DummyFuncEnvironment<'dummy_environ
|
|||||||
args.extend(call_args.iter().cloned(), &mut pos.func.dfg.value_lists);
|
args.extend(call_args.iter().cloned(), &mut pos.func.dfg.value_lists);
|
||||||
args.push(vmctx, &mut pos.func.dfg.value_lists);
|
args.push(vmctx, &mut pos.func.dfg.value_lists);
|
||||||
|
|
||||||
pos.ins()
|
pos.ins().Call(ir::Opcode::Call, VOID, callee, args).0
|
||||||
.Call(ir::Opcode::Call, ir::types::VOID, callee, args)
|
|
||||||
.0
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn translate_grow_memory(
|
fn translate_grow_memory(
|
||||||
|
|||||||
@@ -10,8 +10,20 @@
|
|||||||
//! The main function of this module is [`translate_module`](fn.translate_module.html).
|
//! The main function of this module is [`translate_module`](fn.translate_module.html).
|
||||||
|
|
||||||
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
||||||
|
#![warn(unused_import_braces, unstable_features)]
|
||||||
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
||||||
#![cfg_attr(feature = "cargo-clippy", allow(new_without_default, redundant_field_names))]
|
#![cfg_attr(feature = "cargo-clippy",
|
||||||
|
allow(new_without_default, new_without_default_derive))]
|
||||||
|
#![cfg_attr(feature="cargo-clippy", warn(
|
||||||
|
float_arithmetic,
|
||||||
|
mut_mut,
|
||||||
|
nonminimal_bool,
|
||||||
|
option_map_unwrap_or,
|
||||||
|
option_map_unwrap_or_else,
|
||||||
|
print_stdout,
|
||||||
|
unicode_not_nfc,
|
||||||
|
use_self,
|
||||||
|
))]
|
||||||
|
|
||||||
#![cfg_attr(not(feature = "std"), no_std)]
|
#![cfg_attr(not(feature = "std"), no_std)]
|
||||||
#![cfg_attr(not(feature = "std"), feature(alloc))]
|
#![cfg_attr(not(feature = "std"), feature(alloc))]
|
||||||
|
|||||||
@@ -241,8 +241,8 @@ pub fn parse_global_section(
|
|||||||
}
|
}
|
||||||
let global = Global {
|
let global = Global {
|
||||||
ty: type_to_type(&content_type).unwrap(),
|
ty: type_to_type(&content_type).unwrap(),
|
||||||
mutability: mutability,
|
mutability,
|
||||||
initializer: initializer,
|
initializer,
|
||||||
};
|
};
|
||||||
environ.declare_global(global);
|
environ.declare_global(global);
|
||||||
match *parser.read() {
|
match *parser.read() {
|
||||||
|
|||||||
Reference in New Issue
Block a user