Use clippy (#276)

* cton-util: fix some clippy unnecessary pass-by-value warnings

* clippy: ignore too many arguments / cyclomatic complexity in module

since these functions are taking args coming from the command line, i
dont think this is actually a valid lint, morally the arguments are all
from one structure

* cton-util: take care of remaining clippy warnings

* cton-reader: fix all non-suspicious clippy warnings

* cton-reader: disable clippy at site of suspicious lint

* cton-frontend: disable clippy at the site of an invalid lint

* cton-frontend: fix clippy warnings, or ignore benign ones

* clippy: ignore the camelcase word WebAssembly in docs

* cton-wasm: fix clippy complaints or ignore benign ones

* cton-wasm tests: fix clippy complaints

* cretonne: starting point turns off all clippy warnings

* cretonne: clippy fixes, or lower allow() to source of problem

* cretonne: more clippy fixes

* cretonne: fix or disable needless_lifetimes lint

this linter is buggy when the declared lifetime is used for another type
constraint.

* cretonne: fix clippy complaint about Pass::NoPass

* rustfmt

* fix prev minor api changes clippy suggested

* add clippy to test-all

* cton-filetests: clippy fixes

* simplify clippy reporting in test-all

* cretonne: document clippy allows better

* cretonne: fix some more clippy lints

* cretonne: fix clippy lints (mostly doc comments)

* cretonne: allow all needless_lifetimes clippy warnings

remove overrides at the false positives

* rustfmt
This commit is contained in:
Pat Hickey
2018-03-22 13:10:41 -07:00
committed by Dan Gohman
parent 2b3df1a506
commit 03ee007624
51 changed files with 310 additions and 245 deletions

10
check-clippy.sh Executable file
View File

@@ -0,0 +1,10 @@
#!/bin/bash
set -euo pipefail
# Usage: check-clippy.sh [--install]
if cargo install --list | tee /dev/null | grep -q "^clippy v0"; then
exit 0
else
exit 1
fi

7
clippy-all.sh Executable file
View File

@@ -0,0 +1,7 @@
#!/bin/bash
set -euo pipefail
# Check all sources with clippy.
# In the cton-util crate (root dir) clippy will only work with nightly cargo -
# there is a bug where it will reject the commands passed to it by cargo 0.25.0
cargo +nightly clippy --all

1
cranelift/clippy.toml Normal file
View File

@@ -0,0 +1 @@
doc-valid-idents = [ "WebAssembly", "NaN", "SetCC" ]

View File

@@ -7,7 +7,7 @@ use cton_reader::parse_functions;
use CommandResult; use CommandResult;
use utils::read_to_string; use utils::read_to_string;
pub fn run(files: Vec<String>) -> CommandResult { pub fn run(files: &[String]) -> CommandResult {
for (i, f) in files.into_iter().enumerate() { for (i, f) in files.into_iter().enumerate() {
if i != 0 { if i != 0 {
println!(); println!();
@@ -17,7 +17,7 @@ pub fn run(files: Vec<String>) -> CommandResult {
Ok(()) Ok(())
} }
fn cat_one(filename: String) -> CommandResult { fn cat_one(filename: &str) -> CommandResult {
let buffer = read_to_string(&filename).map_err( let buffer = read_to_string(&filename).map_err(
|e| format!("{}: {}", filename, e), |e| format!("{}: {}", filename, e),
)?; )?;

View File

@@ -49,23 +49,23 @@ impl binemit::RelocSink for PrintRelocs {
pub fn run( pub fn run(
files: Vec<String>, files: Vec<String>,
flag_print: bool, flag_print: bool,
flag_set: Vec<String>, flag_set: &[String],
flag_isa: String, flag_isa: &str,
) -> Result<(), String> { ) -> Result<(), String> {
let parsed = parse_sets_and_isa(flag_set, flag_isa)?; let parsed = parse_sets_and_isa(flag_set, flag_isa)?;
for filename in files { for filename in files {
let path = Path::new(&filename); let path = Path::new(&filename);
let name = String::from(path.as_os_str().to_string_lossy()); let name = String::from(path.as_os_str().to_string_lossy());
handle_module(flag_print, path.to_path_buf(), name, parsed.as_fisa())?; handle_module(flag_print, &path.to_path_buf(), &name, parsed.as_fisa())?;
} }
Ok(()) Ok(())
} }
fn handle_module( fn handle_module(
flag_print: bool, flag_print: bool,
path: PathBuf, path: &PathBuf,
name: String, name: &str,
fisa: FlagsOrIsa, fisa: FlagsOrIsa,
) -> Result<(), String> { ) -> Result<(), String> {
let buffer = read_to_string(&path).map_err( let buffer = read_to_string(&path).map_err(

View File

@@ -86,15 +86,20 @@ fn cton_util() -> CommandResult {
// Find the sub-command to execute. // Find the sub-command to execute.
let result = if args.cmd_test { let result = if args.cmd_test {
cton_filetests::run(args.flag_verbose, args.arg_file).map(|_time| ()) cton_filetests::run(args.flag_verbose, &args.arg_file).map(|_time| ())
} else if args.cmd_cat { } else if args.cmd_cat {
cat::run(args.arg_file) cat::run(&args.arg_file)
} else if args.cmd_filecheck { } else if args.cmd_filecheck {
rsfilecheck::run(args.arg_file, args.flag_verbose) rsfilecheck::run(&args.arg_file, args.flag_verbose)
} else if args.cmd_print_cfg { } else if args.cmd_print_cfg {
print_cfg::run(args.arg_file) print_cfg::run(&args.arg_file)
} else if args.cmd_compile { } else if args.cmd_compile {
compile::run(args.arg_file, args.flag_print, args.flag_set, args.flag_isa) compile::run(
args.arg_file,
args.flag_print,
&args.flag_set,
&args.flag_isa,
)
} else if args.cmd_wasm { } else if args.cmd_wasm {
wasm::run( wasm::run(
args.arg_file, args.arg_file,
@@ -102,8 +107,8 @@ fn cton_util() -> CommandResult {
args.flag_just_decode, args.flag_just_decode,
args.flag_check_translation, args.flag_check_translation,
args.flag_print, args.flag_print,
args.flag_set, &args.flag_set,
args.flag_isa, &args.flag_isa,
args.flag_print_size, args.flag_print_size,
) )
} else { } else {

View File

@@ -8,7 +8,7 @@ use cretonne::cfg_printer::CFGPrinter;
use cton_reader::parse_functions; use cton_reader::parse_functions;
use utils::read_to_string; use utils::read_to_string;
pub fn run(files: Vec<String>) -> CommandResult { pub fn run(files: &[String]) -> CommandResult {
for (i, f) in files.into_iter().enumerate() { for (i, f) in files.into_iter().enumerate() {
if i != 0 { if i != 0 {
println!(); println!();
@@ -18,8 +18,8 @@ pub fn run(files: Vec<String>) -> CommandResult {
Ok(()) Ok(())
} }
fn print_cfg(filename: String) -> CommandResult { fn print_cfg(filename: &str) -> CommandResult {
let buffer = read_to_string(&filename).map_err( let buffer = read_to_string(filename).map_err(
|e| format!("{}: {}", filename, e), |e| format!("{}: {}", filename, e),
)?; )?;
let items = parse_functions(&buffer).map_err( let items = parse_functions(&buffer).map_err(

View File

@@ -7,7 +7,7 @@ use utils::read_to_string;
use filecheck::{CheckerBuilder, Checker, NO_VARIABLES}; use filecheck::{CheckerBuilder, Checker, NO_VARIABLES};
use std::io::{self, Read}; use std::io::{self, Read};
pub fn run(files: Vec<String>, verbose: bool) -> CommandResult { pub fn run(files: &[String], verbose: bool) -> CommandResult {
if files.is_empty() { if files.is_empty() {
return Err("No check files".to_string()); return Err("No check files".to_string());
} }

View File

@@ -41,10 +41,7 @@ impl OwnedFlagsOrIsa {
} }
/// Parse "set" and "isa" commands. /// Parse "set" and "isa" commands.
pub fn parse_sets_and_isa( pub fn parse_sets_and_isa(flag_set: &[String], flag_isa: &str) -> Result<OwnedFlagsOrIsa, String> {
flag_set: Vec<String>,
flag_isa: String,
) -> Result<OwnedFlagsOrIsa, String> {
let mut flag_builder = settings::builder(); let mut flag_builder = settings::builder();
parse_options( parse_options(
flag_set.iter().map(|x| x.as_str()), flag_set.iter().map(|x| x.as_str()),

View File

@@ -1,6 +1,7 @@
//! CLI tool to use the functions provided by the [cretonne-wasm](../cton_wasm/index.html) crate. //! CLI tool to use the functions provided by the [cretonne-wasm](../cton_wasm/index.html) crate.
//! //!
//! Reads Wasm binary files, translates the functions' code to Cretonne IL. //! Reads Wasm binary files, translates the functions' code to Cretonne IL.
#![cfg_attr(feature = "cargo-clippy", allow(too_many_arguments, cyclomatic_complexity))]
use cton_wasm::{translate_module, DummyEnvironment, ModuleEnvironment}; use cton_wasm::{translate_module, DummyEnvironment, ModuleEnvironment};
use std::path::PathBuf; use std::path::PathBuf;
@@ -38,8 +39,8 @@ pub fn run(
flag_just_decode: bool, flag_just_decode: bool,
flag_check_translation: bool, flag_check_translation: bool,
flag_print: bool, flag_print: bool,
flag_set: Vec<String>, flag_set: &[String],
flag_isa: String, flag_isa: &str,
flag_print_size: bool, flag_print_size: bool,
) -> Result<(), String> { ) -> Result<(), String> {
let parsed = parse_sets_and_isa(flag_set, flag_isa)?; let parsed = parse_sets_and_isa(flag_set, flag_isa)?;
@@ -53,8 +54,8 @@ pub fn run(
flag_check_translation, flag_check_translation,
flag_print, flag_print,
flag_print_size, flag_print_size,
path.to_path_buf(), &path.to_path_buf(),
name, &name,
parsed.as_fisa(), parsed.as_fisa(),
)?; )?;
} }
@@ -67,8 +68,8 @@ fn handle_module(
flag_check_translation: bool, flag_check_translation: bool,
flag_print: bool, flag_print: bool,
flag_print_size: bool, flag_print_size: bool,
path: PathBuf, path: &PathBuf,
name: String, name: &str,
fisa: FlagsOrIsa, fisa: FlagsOrIsa,
) -> Result<(), String> { ) -> Result<(), String> {
let mut terminal = term::stdout().unwrap(); let mut terminal = term::stdout().unwrap();
@@ -153,29 +154,27 @@ fn handle_module(
context.func = func.clone(); context.func = func.clone();
if flag_check_translation { if flag_check_translation {
context.verify(fisa).map_err(|err| { context.verify(fisa).map_err(|err| {
pretty_verifier_error(&context.func, fisa.isa, err) pretty_verifier_error(&context.func, fisa.isa, &err)
})?; })?;
} else { } else if let Some(isa) = fisa.isa {
if let Some(isa) = fisa.isa { let compiled_size = context.compile(isa).map_err(|err| {
let compiled_size = context.compile(isa).map_err(|err| { pretty_error(&context.func, fisa.isa, err)
pretty_error(&context.func, fisa.isa, err) })?;
})?; if flag_print_size {
if flag_print_size { println!(
println!( "Function #{} code size: {} bytes",
"Function #{} code size: {} bytes", func_index,
func_index, compiled_size
compiled_size );
); total_module_code_size += compiled_size;
total_module_code_size += compiled_size; println!(
println!( "Function #{} bytecode size: {} bytes",
"Function #{} bytecode size: {} bytes", func_index,
func_index, dummy_environ.func_bytecode_sizes[func_index]
dummy_environ.func_bytecode_sizes[func_index] );
);
}
} else {
return Err(String::from("compilation requires a target isa"));
} }
} else {
return Err(String::from("compilation requires a target isa"));
} }
if flag_print { if flag_print {
vprintln!(flag_verbose, ""); vprintln!(flag_verbose, "");
@@ -194,10 +193,7 @@ fn handle_module(
if !flag_check_translation && flag_print_size { if !flag_check_translation && flag_print_size {
println!("Total module code size: {} bytes", total_module_code_size); println!("Total module code size: {} bytes", total_module_code_size);
let total_bytecode_size = dummy_environ.func_bytecode_sizes.iter().fold( let total_bytecode_size: usize = dummy_environ.func_bytecode_sizes.iter().sum();
0,
|sum, x| sum + x,
);
println!("Total module bytecode size: {} bytes", total_bytecode_size); println!("Total module bytecode size: {} bytes", total_bytecode_size);
} }

View File

@@ -55,4 +55,12 @@ cargo test --all --release
banner "Rust documentation: $topdir/target/doc/cretonne/index.html" banner "Rust documentation: $topdir/target/doc/cretonne/index.html"
cargo doc cargo doc
# Run clippy if we have it.
banner "Rust linter"
if $topdir/check-clippy.sh; then
$topdir/clippy-all.sh --write-mode=diff
else
echo "\`cargo +nightly install clippy\` for optional rust linting"
fi
banner "OK" banner "OK"

View File

@@ -3,5 +3,5 @@ extern crate cton_filetests;
#[test] #[test]
fn filetests() { fn filetests() {
// Run all the filetests in the following directories. // Run all the filetests in the following directories.
cton_filetests::run(false, vec!["filetests".into(), "docs".into()]).expect("test harness"); cton_filetests::run(false, &["filetests".into(), "docs".into()]).expect("test harness");
} }

View File

@@ -28,7 +28,7 @@ def gen_enum_types(sgrp, fmt):
if not isinstance(setting, EnumSetting): if not isinstance(setting, EnumSetting):
continue continue
ty = camel_case(setting.name) ty = camel_case(setting.name)
fmt.doc_comment('Values for {}.'.format(setting)) fmt.doc_comment('Values for `{}`.'.format(setting))
fmt.line('#[derive(Debug, PartialEq, Eq)]') fmt.line('#[derive(Debug, PartialEq, Eq)]')
with fmt.indented('pub enum {} {{'.format(ty), '}'): with fmt.indented('pub enum {} {{'.format(ty), '}'):
for v in setting.values: for v in setting.values:

View File

@@ -437,7 +437,7 @@ impl<F: Forest> Path<F> {
// Discard the root node if it has shrunk to a single sub-tree. // Discard the root node if it has shrunk to a single sub-tree.
let mut ns = 0; let mut ns = 0;
while let &NodeData::Inner { size: 0, ref tree, .. } = &pool[self.node[ns]] { while let NodeData::Inner { size: 0, ref tree, .. } = pool[self.node[ns]] {
ns += 1; ns += 1;
self.node[ns] = tree[0]; self.node[ns] = tree[0];
} }
@@ -529,12 +529,11 @@ impl<F: Forest> Path<F> {
// current entry[level] was one off the end of the node, it will now point at a proper // current entry[level] was one off the end of the node, it will now point at a proper
// entry. // entry.
debug_assert!(usize::from(self.entry[level]) < pool[self.node[level]].entries()); debug_assert!(usize::from(self.entry[level]) < pool[self.node[level]].entries());
} else {
} else if usize::from(self.entry[level]) >= pool[self.node[level]].entries() {
// There's no right sibling at this level, so the node can't be rebalanced. // There's no right sibling at this level, so the node can't be rebalanced.
// Check if we are in an off-the-end position. // Check if we are in an off-the-end position.
if usize::from(self.entry[level]) >= pool[self.node[level]].entries() { self.size = 0;
self.size = 0;
}
} }
} }

View File

@@ -57,6 +57,7 @@ impl<F: Forest> NodePool<F> {
pub fn free_tree(&mut self, node: Node) { pub fn free_tree(&mut self, node: Node) {
if let NodeData::Inner { size, tree, .. } = self[node] { if let NodeData::Inner { size, tree, .. } = self[node] {
// Note that we have to capture `tree` by value to avoid borrow checker trouble. // Note that we have to capture `tree` by value to avoid borrow checker trouble.
#[cfg_attr(feature = "cargo-clippy", allow(needless_range_loop))]
for i in 0..usize::from(size + 1) { for i in 0..usize::from(size + 1) {
// Recursively free sub-trees. This recursion can never be deeper than `MAX_PATH`, // Recursively free sub-trees. This recursion can never be deeper than `MAX_PATH`,
// and since most trees have less than a handful of nodes, it is worthwhile to // and since most trees have less than a handful of nodes, it is worthwhile to

View File

@@ -76,14 +76,13 @@ pub fn relax_branches(func: &mut Function, isa: &TargetIsa) -> Result<CodeOffset
if let Some(range) = encinfo.branch_range(enc) { if let Some(range) = encinfo.branch_range(enc) {
if let Some(dest) = cur.func.dfg[inst].branch_destination() { if let Some(dest) = cur.func.dfg[inst].branch_destination() {
let dest_offset = cur.func.offsets[dest]; let dest_offset = cur.func.offsets[dest];
if !range.contains(offset, dest_offset) { // This could be an out-of-range branch.
// This is an out-of-range branch. // Relax it unless the destination offset has not been computed yet.
// Relax it unless the destination offset has not been computed yet. if !range.contains(offset, dest_offset) &&
if dest_offset != 0 || Some(dest) == cur.func.layout.entry_block() { (dest_offset != 0 || Some(dest) == cur.func.layout.entry_block())
offset += {
relax_branch(&mut cur, offset, dest_offset, &encinfo, isa); offset += relax_branch(&mut cur, offset, dest_offset, &encinfo, isa);
continue; continue;
}
} }
} }
} }

View File

@@ -132,12 +132,12 @@ impl Context {
} }
/// Run the locations verifier on the function. /// Run the locations verifier on the function.
pub fn verify_locations<'a>(&self, isa: &TargetIsa) -> verifier::Result { pub fn verify_locations(&self, isa: &TargetIsa) -> verifier::Result {
verifier::verify_locations(isa, &self.func, None) verifier::verify_locations(isa, &self.func, None)
} }
/// Run the locations verifier only if the `enable_verifier` setting is true. /// Run the locations verifier only if the `enable_verifier` setting is true.
pub fn verify_locations_if<'a>(&self, isa: &TargetIsa) -> CtonResult { pub fn verify_locations_if(&self, isa: &TargetIsa) -> CtonResult {
if isa.flags().enable_verifier() { if isa.flags().enable_verifier() {
self.verify_locations(isa).map_err(Into::into) self.verify_locations(isa).map_err(Into::into)
} else { } else {

View File

@@ -744,8 +744,9 @@ impl<'c, 'f> ir::InstInserterBase<'c> for &'c mut EncCursor<'f> {
if !self.srcloc.is_default() { if !self.srcloc.is_default() {
self.func.srclocs[inst] = self.srcloc; self.func.srclocs[inst] = self.srcloc;
} }
// Assign an encoding. // Assign an encoding.
// XXX Is there a way to describe this error to the user?
#[cfg_attr(feature = "cargo-clippy", allow(match_wild_err_arm))]
match self.isa.encode( match self.isa.encode(
&self.func.dfg, &self.func.dfg,
&self.func.dfg[inst], &self.func.dfg[inst],

View File

@@ -16,7 +16,7 @@ const TESTCASE_NAME_LENGTH: usize = 16;
/// to keep track of a sy mbol table. /// to keep track of a sy mbol table.
/// ///
/// External names are primarily used as keys by code using Cretonne to map /// External names are primarily used as keys by code using Cretonne to map
/// from a cretonne::ir::FuncRef or similar to additional associated data. /// from a `cretonne::ir::FuncRef` or similar to additional associated data.
/// ///
/// External names can also serve as a primitive testing and debugging tool. /// External names can also serve as a primitive testing and debugging tool.
/// In particular, many `.cton` test files use function names to identify /// In particular, many `.cton` test files use function names to identify

View File

@@ -428,7 +428,7 @@ impl Layout {
} }
/// Return an iterator over all EBBs in layout order. /// Return an iterator over all EBBs in layout order.
pub fn ebbs<'f>(&'f self) -> Ebbs<'f> { pub fn ebbs(&self) -> Ebbs {
Ebbs { Ebbs {
layout: self, layout: self,
next: self.first_ebb, next: self.first_ebb,
@@ -611,7 +611,7 @@ impl Layout {
} }
/// Iterate over the instructions in `ebb` in layout order. /// Iterate over the instructions in `ebb` in layout order.
pub fn ebb_insts<'f>(&'f self, ebb: Ebb) -> Insts<'f> { pub fn ebb_insts(&self, ebb: Ebb) -> Insts {
Insts { Insts {
layout: self, layout: self,
head: self.ebbs[ebb].first_inst.into(), head: self.ebbs[ebb].first_inst.into(),

View File

@@ -4,13 +4,44 @@
trivial_numeric_casts, trivial_numeric_casts,
unused_extern_crates)] unused_extern_crates)]
#![cfg_attr(feature="clippy",
plugin(clippy(conf_file="../../clippy.toml")))]
#![cfg_attr(feature="cargo-clippy", allow(
// Rustfmt 0.9.0 is at odds with this lint:
block_in_if_condition_stmt,
// Produces only a false positive:
while_let_loop,
// Produces many false positives, but did produce some valid lints, now fixed:
needless_lifetimes,
// Generated code makes some style transgressions, but readability doesn't suffer much:
many_single_char_names,
identity_op,
needless_borrow,
cast_lossless,
unreadable_literal,
assign_op_pattern,
empty_line_after_outer_attr,
// Hard to avoid in generated code:
cyclomatic_complexity,
too_many_arguments,
// Code generator doesn't have a way to collapse identical arms:
match_same_arms,
// These are relatively minor style issues, but would be easy to fix:
new_without_default,
new_without_default_derive,
should_implement_trait,
redundant_field_names,
useless_let_if_seq,
len_without_is_empty))]
pub use context::Context; pub use context::Context;
pub use legalizer::legalize_function; pub use legalizer::legalize_function;
pub use verifier::verify_function; pub use verifier::verify_function;
pub use write::write_function; pub use write::write_function;
/// Version number of the cretonne crate. /// Version number of the cretonne crate.
pub const VERSION: &'static str = env!("CARGO_PKG_VERSION"); pub const VERSION: &str = env!("CARGO_PKG_VERSION");
#[macro_use] #[macro_use]
pub mod dbg; pub mod dbg;

View File

@@ -150,6 +150,7 @@ fn remove_loop_invariant_instructions(
loop_values.insert(*val); loop_values.insert(*val);
} }
pos.goto_top(*ebb); pos.goto_top(*ebb);
#[cfg_attr(feature = "cargo-clippy", allow(block_in_if_condition_stmt))]
while let Some(inst) = pos.next_inst() { while let Some(inst) = pos.next_inst() {
if pos.func.dfg.has_results(inst) && if pos.func.dfg.has_results(inst) &&
pos.func.dfg.inst_args(inst).into_iter().all(|arg| { pos.func.dfg.inst_args(inst).into_iter().all(|arg| {

View File

@@ -6,7 +6,7 @@
/// The order of elements is not preserved, unless the slice is already partitioned. /// The order of elements is not preserved, unless the slice is already partitioned.
/// ///
/// Returns the number of elements where `p(t)` is true. /// Returns the number of elements where `p(t)` is true.
pub fn partition_slice<'a, T: 'a, F>(s: &'a mut [T], mut p: F) -> usize pub fn partition_slice<T, F>(s: &mut [T], mut p: F) -> usize
where where
F: FnMut(&T) -> bool, F: FnMut(&T) -> bool,
{ {

View File

@@ -7,7 +7,7 @@
//! bound is implemented by all the native integer types as well as `Imm64`. //! bound is implemented by all the native integer types as well as `Imm64`.
//! //!
//! Some of these predicates may be unused in certain ISA configurations, so we suppress the //! Some of these predicates may be unused in certain ISA configurations, so we suppress the
//! dead_code warning. //! dead code warning.
/// Check that `x` is the same as `y`. /// Check that `x` is the same as `y`.
#[allow(dead_code)] #[allow(dead_code)]

View File

@@ -114,7 +114,7 @@ fn package_up_divrem_info(
fn get_div_info(inst: Inst, dfg: &DataFlowGraph) -> Option<DivRemByConstInfo> { fn get_div_info(inst: Inst, dfg: &DataFlowGraph) -> Option<DivRemByConstInfo> {
let idata: &InstructionData = &dfg[inst]; let idata: &InstructionData = &dfg[inst];
if let &InstructionData::BinaryImm { opcode, arg, imm } = idata { if let InstructionData::BinaryImm { opcode, arg, imm } = *idata {
let (isSigned, isRem) = match opcode { let (isSigned, isRem) = match opcode {
Opcode::UdivImm => (false, false), Opcode::UdivImm => (false, false),
Opcode::UremImm => (false, true), Opcode::UremImm => (false, true),
@@ -132,7 +132,7 @@ fn get_div_info(inst: Inst, dfg: &DataFlowGraph) -> Option<DivRemByConstInfo> {
// that some previous constant propagation pass has pushed all such // that some previous constant propagation pass has pushed all such
// immediates to their use points, creating BinaryImm instructions // immediates to their use points, creating BinaryImm instructions
// instead? For now we take the conservative approach. // instead? For now we take the conservative approach.
if let &InstructionData::Binary { opcode, args } = idata { if let InstructionData::Binary { opcode, args } = *idata {
let (isSigned, isRem) = match opcode { let (isSigned, isRem) = match opcode {
Opcode::Udiv => (false, false), Opcode::Udiv => (false, false),
Opcode::Urem => (false, true), Opcode::Urem => (false, true),
@@ -484,7 +484,7 @@ fn get_const(value: Value, dfg: &DataFlowGraph) -> Option<i64> {
match dfg.value_def(value) { match dfg.value_def(value) {
ValueDef::Result(definingInst, resultNo) => { ValueDef::Result(definingInst, resultNo) => {
let definingIData: &InstructionData = &dfg[definingInst]; let definingIData: &InstructionData = &dfg[definingInst];
if let &InstructionData::UnaryImm { opcode, imm } = definingIData { if let InstructionData::UnaryImm { opcode, imm } = *definingIData {
if opcode == Opcode::Iconst && resultNo == 0 { if opcode == Opcode::Iconst && resultNo == 0 {
return Some(imm.into()); return Some(imm.into());
} }

View File

@@ -10,7 +10,7 @@ use std::fmt::Write;
pub fn pretty_verifier_error( pub fn pretty_verifier_error(
func: &ir::Function, func: &ir::Function,
isa: Option<&TargetIsa>, isa: Option<&TargetIsa>,
err: verifier::Error, err: &verifier::Error,
) -> String { ) -> String {
let mut msg = err.to_string(); let mut msg = err.to_string();
match err.location { match err.location {
@@ -26,7 +26,7 @@ pub fn pretty_verifier_error(
/// Pretty-print a Cretonne error. /// Pretty-print a Cretonne error.
pub fn pretty_error(func: &ir::Function, isa: Option<&TargetIsa>, err: CtonError) -> String { pub fn pretty_error(func: &ir::Function, isa: Option<&TargetIsa>, err: CtonError) -> String {
if let CtonError::Verifier(e) = err { if let CtonError::Verifier(e) = err {
pretty_verifier_error(func, isa, e) pretty_verifier_error(func, isa, &e)
} else { } else {
err.to_string() err.to_string()
} }

View File

@@ -1,6 +1,6 @@
//! Functions for converting a reference into a singleton slice. //! Functions for converting a reference into a singleton slice.
//! //!
//! See also the ref_slice crate on crates.io. //! See also the [`ref_slice` crate](https://crates.io/crates/ref_slice).
//! //!
//! We define the functions here to avoid external dependencies, and to ensure that they are //! We define the functions here to avoid external dependencies, and to ensure that they are
//! inlined in this crate. //! inlined in this crate.

View File

@@ -197,9 +197,9 @@ impl<'a> fmt::Display for DisplayAllocatableSet<'a> {
"{}", "{}",
bank.names bank.names
.get(offset as usize) .get(offset as usize)
.and_then(|name| name.chars().skip(1).next()) .and_then(|name| name.chars().nth(1))
.unwrap_or( .unwrap_or_else(
char::from_digit(u32::from(offset % 10), 10).unwrap(), || char::from_digit(u32::from(offset % 10), 10).unwrap(),
) )
)?; )?;
} }

View File

@@ -276,33 +276,31 @@ impl<PO: ProgramOrder> GenLiveRange<PO> {
} else { } else {
return first_time_livein; return first_time_livein;
} }
} else { } else if let Some((_, end)) = c.prev() {
// There's no interval beginning at `ebb`, but we could still be live-in at `ebb` with // There's no interval beginning at `ebb`, but we could still be live-in at `ebb` with
// a coalesced interval that begins before and ends after. // a coalesced interval that begins before and ends after.
if let Some((_, end)) = c.prev() { if order.cmp(end, ebb) == Ordering::Greater {
if order.cmp(end, ebb) == Ordering::Greater { // Yep, the previous interval overlaps `ebb`.
// Yep, the previous interval overlaps `ebb`. first_time_livein = false;
first_time_livein = false; if order.cmp(end, to) == Ordering::Less {
if order.cmp(end, to) == Ordering::Less { *c.value_mut().unwrap() = to;
*c.value_mut().unwrap() = to;
} else {
return first_time_livein;
}
} else { } else {
first_time_livein = true; return first_time_livein;
// The current interval does not overlap `ebb`, but it may still be possible to
// coalesce with it.
if order.is_ebb_gap(end, ebb) {
*c.value_mut().unwrap() = to;
} else {
c.insert(ebb, to);
}
} }
} else { } else {
// There is no existing interval before `ebb`.
first_time_livein = true; first_time_livein = true;
c.insert(ebb, to); // The current interval does not overlap `ebb`, but it may still be possible to
// coalesce with it.
if order.is_ebb_gap(end, ebb) {
*c.value_mut().unwrap() = to;
} else {
c.insert(ebb, to);
}
} }
} else {
// There is no existing interval before `ebb`.
first_time_livein = true;
c.insert(ebb, to);
} }
// Now `c` to left pointing at an interval that ends in `to`. // Now `c` to left pointing at an interval that ends in `to`.

View File

@@ -306,14 +306,12 @@ impl<'a> Context<'a> {
let args = self.cur.func.dfg.inst_args(inst); let args = self.cur.func.dfg.inst_args(inst);
for (argidx, (op, &arg)) in constraints.ins.iter().zip(args).enumerate() { for (argidx, (op, &arg)) in constraints.ins.iter().zip(args).enumerate() {
if op.kind != ConstraintKind::Stack { if op.kind != ConstraintKind::Stack && self.liveness[arg].affinity.is_stack() {
if self.liveness[arg].affinity.is_stack() { self.candidates.push(ReloadCandidate {
self.candidates.push(ReloadCandidate { argidx,
argidx, value: arg,
value: arg, regclass: op.regclass,
regclass: op.regclass, })
})
}
} }
} }

View File

@@ -299,6 +299,7 @@ impl Move {
} }
/// Get the "from" register and register class, if possible. /// Get the "from" register and register class, if possible.
#[cfg_attr(feature = "cargo-clippy", allow(wrong_self_convention))]
fn from_reg(&self) -> Option<(RegClass, RegUnit)> { fn from_reg(&self) -> Option<(RegClass, RegUnit)> {
match *self { match *self {
Move::Reg { rc, from, .. } | Move::Reg { rc, from, .. } |

View File

@@ -101,8 +101,10 @@ impl VirtRegs {
where where
'a: 'b, 'a: 'b,
{ {
self.get(*value).map(|vr| self.values(vr)).unwrap_or( self.get(*value).map(|vr| self.values(vr)).unwrap_or_else(
ref_slice(value), || {
ref_slice(value)
},
) )
} }
@@ -371,7 +373,7 @@ impl VirtRegs {
let vreg = self.get(leader).unwrap_or_else(|| { let vreg = self.get(leader).unwrap_or_else(|| {
// Allocate a vreg for `leader`, but leave it empty. // Allocate a vreg for `leader`, but leave it empty.
let vr = self.alloc(); let vr = self.alloc();
if let &mut Some(ref mut vec) = &mut new_vregs { if let Some(ref mut vec) = new_vregs {
vec.push(vr); vec.push(vr);
} }
self.value_vregs[leader] = vr.into(); self.value_vregs[leader] = vr.into();

View File

@@ -1,4 +1,4 @@
//! ScopedHashMap //! `ScopedHashMap`
//! //!
//! This module defines a struct `ScopedHashMap<K, V>` which defines a `HashMap`-like //! This module defines a struct `ScopedHashMap<K, V>` which defines a `HashMap`-like
//! container that has a concept of scopes that can be entered and exited, such that //! container that has a concept of scopes that can be entered and exited, such that

View File

@@ -11,7 +11,7 @@ pub use self::details::{TimingToken, PassTimes, take_current, add_to_current};
// //
// This macro defines: // This macro defines:
// //
// - A C-style enum containing all the pass names and a `NoPass` variant. // - A C-style enum containing all the pass names and a `None` variant.
// - A usize constant with the number of defined passes. // - A usize constant with the number of defined passes.
// - A const array of pass descriptions. // - A const array of pass descriptions.
// - A public function per pass used to start the timing of that pass. // - A public function per pass used to start the timing of that pass.
@@ -21,9 +21,9 @@ macro_rules! define_passes {
} => { } => {
#[allow(non_camel_case_types)] #[allow(non_camel_case_types)]
#[derive(Clone, Copy, Debug, PartialEq, Eq)] #[derive(Clone, Copy, Debug, PartialEq, Eq)]
enum $enum { $($pass,)+ NoPass } enum $enum { $($pass,)+ None}
const $num_passes: usize = $enum::NoPass as usize; const $num_passes: usize = $enum::None as usize;
const $descriptions: [&str; $num_passes] = [ $($desc),+ ]; const $descriptions: [&str; $num_passes] = [ $($desc),+ ];
@@ -164,7 +164,7 @@ mod details {
/// Information about passes in a single thread. /// Information about passes in a single thread.
thread_local!{ thread_local!{
static CURRENT_PASS: Cell<Pass> = Cell::new(Pass::NoPass); static CURRENT_PASS: Cell<Pass> = Cell::new(Pass::None);
static PASS_TIME: RefCell<PassTimes> = RefCell::new(Default::default()); static PASS_TIME: RefCell<PassTimes> = RefCell::new(Default::default());
} }
@@ -204,7 +204,7 @@ mod details {
} }
/// Add `timings` to the accumulated timings for the current thread. /// Add `timings` to the accumulated timings for the current thread.
pub fn add_to_current(times: PassTimes) { pub fn add_to_current(times: &PassTimes) {
PASS_TIME.with(|rc| for (a, b) in rc.borrow_mut().pass.iter_mut().zip( PASS_TIME.with(|rc| for (a, b) in rc.borrow_mut().pass.iter_mut().zip(
&times.pass, &times.pass,
) )
@@ -221,7 +221,7 @@ mod test {
#[test] #[test]
fn display() { fn display() {
assert_eq!(Pass::NoPass.to_string(), "<no pass>"); assert_eq!(Pass::None.to_string(), "<no pass>");
assert_eq!(Pass::regalloc.to_string(), "Register allocation"); assert_eq!(Pass::regalloc.to_string(), "Register allocation");
} }
} }

View File

@@ -86,16 +86,14 @@ impl<'a> LivenessVerifier<'a> {
self.isa.encoding_info().display(encoding) self.isa.encoding_info().display(encoding)
); );
} }
} else { } else if !lr.affinity.is_none() {
// A non-encoded instruction can only define ghost values. // A non-encoded instruction can only define ghost values.
if !lr.affinity.is_none() { return err!(
return err!( inst,
inst, "{} is a real {} value defined by a ghost instruction",
"{} is a real {} value defined by a ghost instruction", val,
val, lr.affinity.display(&self.isa.register_info())
lr.affinity.display(&self.isa.register_info()) );
);
}
} }
} }
@@ -109,16 +107,14 @@ impl<'a> LivenessVerifier<'a> {
return err!(inst, "{} is not live at this use", val); return err!(inst, "{} is not live at this use", val);
} }
if encoding.is_legal() { // A legal instruction is not allowed to depend on ghost values.
// A legal instruction is not allowed to depend on ghost values. if encoding.is_legal() && lr.affinity.is_none() {
if lr.affinity.is_none() { return err!(
return err!( inst,
inst, "{} is a ghost value used by a real [{}] instruction",
"{} is a ghost value used by a real [{}] instruction", val,
val, self.isa.encoding_info().display(encoding)
self.isa.encoding_info().display(encoding) );
);
}
} }
} }
} }

View File

@@ -34,7 +34,7 @@
//! For polymorphic opcodes, determine the controlling type variable first. //! For polymorphic opcodes, determine the controlling type variable first.
//! - Branches and jumps must pass arguments to destination EBBs that match the //! - Branches and jumps must pass arguments to destination EBBs that match the
//! expected types exactly. The number of arguments must match. //! expected types exactly. The number of arguments must match.
//! - All EBBs in a jump_table must take no arguments. //! - All EBBs in a jump table must take no arguments.
//! - Function calls are type checked against their signature. //! - Function calls are type checked against their signature.
//! - The entry block must take arguments that match the signature of the current //! - The entry block must take arguments that match the signature of the current
//! function. //! function.
@@ -871,50 +871,47 @@ impl<'a> Verifier<'a> {
// Check special-purpose type constraints that can't be expressed in the normal opcode // Check special-purpose type constraints that can't be expressed in the normal opcode
// constraints. // constraints.
fn typecheck_special(&self, inst: Inst, ctrl_type: Type) -> Result { fn typecheck_special(&self, inst: Inst, ctrl_type: Type) -> Result {
match self.func.dfg[inst] { if let ir::InstructionData::Unary { opcode, arg } = self.func.dfg[inst] {
ir::InstructionData::Unary { opcode, arg } => { let arg_type = self.func.dfg.value_type(arg);
let arg_type = self.func.dfg.value_type(arg); match opcode {
match opcode { Opcode::Bextend | Opcode::Uextend | Opcode::Sextend | Opcode::Fpromote => {
Opcode::Bextend | Opcode::Uextend | Opcode::Sextend | Opcode::Fpromote => { if arg_type.lane_count() != ctrl_type.lane_count() {
if arg_type.lane_count() != ctrl_type.lane_count() { return err!(
return err!( inst,
inst, "input {} and output {} must have same number of lanes",
"input {} and output {} must have same number of lanes", arg_type,
arg_type, ctrl_type
ctrl_type );
);
}
if arg_type.lane_bits() >= ctrl_type.lane_bits() {
return err!(
inst,
"input {} must be smaller than output {}",
arg_type,
ctrl_type
);
}
} }
Opcode::Breduce | Opcode::Ireduce | Opcode::Fdemote => { if arg_type.lane_bits() >= ctrl_type.lane_bits() {
if arg_type.lane_count() != ctrl_type.lane_count() { return err!(
return err!( inst,
inst, "input {} must be smaller than output {}",
"input {} and output {} must have same number of lanes", arg_type,
arg_type, ctrl_type
ctrl_type );
);
}
if arg_type.lane_bits() <= ctrl_type.lane_bits() {
return err!(
inst,
"input {} must be larger than output {}",
arg_type,
ctrl_type
);
}
} }
_ => {}
} }
Opcode::Breduce | Opcode::Ireduce | Opcode::Fdemote => {
if arg_type.lane_count() != ctrl_type.lane_count() {
return err!(
inst,
"input {} and output {} must have same number of lanes",
arg_type,
ctrl_type
);
}
if arg_type.lane_bits() <= ctrl_type.lane_bits() {
return err!(
inst,
"input {} must be larger than output {}",
arg_type,
ctrl_type
);
}
}
_ => {}
} }
_ => {}
} }
Ok(()) Ok(())
} }

View File

@@ -70,7 +70,7 @@ impl ConcurrentRunner {
assert!(self.request_tx.is_none(), "must shutdown before join"); assert!(self.request_tx.is_none(), "must shutdown before join");
for h in self.handles.drain(..) { for h in self.handles.drain(..) {
match h.join() { match h.join() {
Ok(t) => timing::add_to_current(t), Ok(t) => timing::add_to_current(&t),
Err(e) => println!("worker panicked: {:?}", e), Err(e) => println!("worker panicked: {:?}", e),
} }
} }

View File

@@ -3,6 +3,11 @@
//! This crate contains the main test driver as well as implementations of the //! This crate contains the main test driver as well as implementations of the
//! available filetest commands. //! available filetest commands.
#![cfg_attr(feature="cargo-clippy", allow(
type_complexity,
// Rustfmt 0.9.0 is at odds with this lint:
block_in_if_condition_stmt))]
#[macro_use(dbg)] #[macro_use(dbg)]
extern crate cretonne; extern crate cretonne;
extern crate cton_reader; extern crate cton_reader;
@@ -44,7 +49,7 @@ type TestResult = Result<time::Duration, String>;
/// Directories are scanned recursively for test cases ending in `.cton`. These test cases are /// Directories are scanned recursively for test cases ending in `.cton`. These test cases are
/// executed on background threads. /// executed on background threads.
/// ///
pub fn run(verbose: bool, files: Vec<String>) -> TestResult { pub fn run(verbose: bool, files: &[String]) -> TestResult {
let mut runner = TestRunner::new(verbose); let mut runner = TestRunner::new(verbose);
for path in files.iter().map(Path::new) { for path in files.iter().map(Path::new) {

View File

@@ -45,7 +45,7 @@ impl Display for QueueEntry {
f, f,
"{}.{:03} {}", "{}.{:03} {}",
dur.as_secs(), dur.as_secs(),
dur.subsec_nanos() / 1000000, dur.subsec_nanos() / 1_000_000,
p p
) )
} }
@@ -135,7 +135,7 @@ impl TestRunner {
// This lets us skip spurious extensionless files without statting everything // This lets us skip spurious extensionless files without statting everything
// needlessly. // needlessly.
if !dir.is_file() { if !dir.is_file() {
self.path_error(dir, err); self.path_error(&dir, &err);
} }
} }
Ok(entries) => { Ok(entries) => {
@@ -149,7 +149,7 @@ impl TestRunner {
// libstd/sys/unix/fs.rs seems to suggest that breaking now would // libstd/sys/unix/fs.rs seems to suggest that breaking now would
// be a good idea, or the iterator could keep returning the same // be a good idea, or the iterator could keep returning the same
// error forever. // error forever.
self.path_error(dir, err); self.path_error(&dir, &err);
break; break;
} }
Ok(entry) => { Ok(entry) => {
@@ -172,7 +172,7 @@ impl TestRunner {
} }
/// Report an error related to a path. /// Report an error related to a path.
fn path_error<E: Error>(&mut self, path: PathBuf, err: E) { fn path_error<E: Error>(&mut self, path: &PathBuf, err: &E) {
self.errors += 1; self.errors += 1;
println!("{}: {}", path.to_string_lossy(), err); println!("{}: {}", path.to_string_lossy(), err);
} }

View File

@@ -132,7 +132,7 @@ fn run_one_test<'a>(
if !context.verified && test.needs_verifier() { if !context.verified && test.needs_verifier() {
verify_function(&func, context.flags_or_isa()).map_err( verify_function(&func, context.flags_or_isa()).map_err(
|e| { |e| {
pretty_verifier_error(&func, isa, e) pretty_verifier_error(&func, isa, &e)
}, },
)?; )?;
context.verified = true; context.verified = true;

View File

@@ -1,4 +1,4 @@
//! SubTest trait. //! `SubTest` trait.
use std::result; use std::result;
use std::borrow::Cow; use std::borrow::Cow;

View File

@@ -400,7 +400,7 @@ where
/// ///
/// This can be used to insert SSA code that doesn't need to access locals and that doesn't /// This can be used to insert SSA code that doesn't need to access locals and that doesn't
/// need to know about `FunctionBuilder` at all. /// need to know about `FunctionBuilder` at all.
pub fn cursor<'f>(&'f mut self) -> FuncCursor<'f> { pub fn cursor(&mut self) -> FuncCursor {
self.ensure_inserted_ebb(); self.ensure_inserted_ebb();
FuncCursor::new(self.func) FuncCursor::new(self.func)
.with_srcloc(self.srcloc) .with_srcloc(self.srcloc)
@@ -548,6 +548,8 @@ where
/// Returns a displayable object for the function as it is. /// Returns a displayable object for the function as it is.
/// ///
/// Useful for debug purposes. Use it with `None` for standard printing. /// Useful for debug purposes. Use it with `None` for standard printing.
// Clippy thinks the lifetime that follows is needless, but rustc needs it
#[cfg_attr(feature = "cargo-clippy", allow(needless_lifetimes))]
pub fn display<'b, I: Into<Option<&'b TargetIsa>>>(&'b self, isa: I) -> DisplayFunction { pub fn display<'b, I: Into<Option<&'b TargetIsa>>>(&'b self, isa: I) -> DisplayFunction {
self.func.display(isa) self.func.display(isa)
} }

View File

@@ -131,6 +131,9 @@
trivial_numeric_casts, trivial_numeric_casts,
unused_extern_crates)] unused_extern_crates)]
#![cfg_attr(feature="cargo-clippy",
allow(new_without_default, redundant_field_names))]
extern crate cretonne; extern crate cretonne;
pub use frontend::{FunctionBuilderContext, FunctionBuilder}; pub use frontend::{FunctionBuilderContext, FunctionBuilder};

View File

@@ -53,7 +53,7 @@ pub struct LocatedToken<'a> {
} }
/// Wrap up a `Token` with the given location. /// Wrap up a `Token` with the given location.
fn token<'a>(token: Token<'a>, loc: Location) -> Result<LocatedToken<'a>, LocatedError> { fn token(token: Token, loc: Location) -> Result<LocatedToken, LocatedError> {
Ok(LocatedToken { Ok(LocatedToken {
token, token,
location: loc, location: loc,

View File

@@ -1,6 +1,6 @@
//! Cretonne file reader library. //! Cretonne file reader library.
//! //!
//! The cton_reader library supports reading .cton files. This functionality is needed for testing //! The `cton_reader` library supports reading .cton files. This functionality is needed for testing
//! Cretonne, but is not essential for a JIT compiler. //! Cretonne, but is not essential for a JIT compiler.
#![deny(missing_docs, #![deny(missing_docs,

View File

@@ -37,7 +37,7 @@ pub fn parse_functions(text: &str) -> Result<Vec<Function>> {
/// Parse the entire `text` as a test case file. /// Parse the entire `text` as a test case file.
/// ///
/// The returned `TestFile` contains direct references to substrings of `text`. /// The returned `TestFile` contains direct references to substrings of `text`.
pub fn parse_test<'a>(text: &'a str) -> Result<TestFile<'a>> { pub fn parse_test(text: &str) -> Result<TestFile> {
let _tt = timing::parse_text(); let _tt = timing::parse_text();
let mut parser = Parser::new(text); let mut parser = Parser::new(text);
// Gather the preamble comments. // Gather the preamble comments.
@@ -277,6 +277,10 @@ impl<'a> Parser<'a> {
// Get the current lookahead token, after making sure there is one. // Get the current lookahead token, after making sure there is one.
fn token(&mut self) -> Option<Token<'a>> { fn token(&mut self) -> Option<Token<'a>> {
// clippy says self.lookahead is immutable so this loop is either infinite or never
// running. I don't think this is true - self.lookahead is mutated in the loop body - so
// maybe this is a clippy bug? Either way, disable clippy for this.
#[cfg_attr(feature = "cargo-clippy", allow(while_immutable_condition))]
while self.lookahead == None { while self.lookahead == None {
match self.lex.next() { match self.lex.next() {
Some(Ok(lexer::LocatedToken { token, location })) => { Some(Ok(lexer::LocatedToken { token, location })) => {
@@ -957,7 +961,7 @@ impl<'a> Parser<'a> {
isa.register_info() isa.register_info()
.parse_regunit(name) .parse_regunit(name)
.map(ArgumentLoc::Reg) .map(ArgumentLoc::Reg)
.ok_or(self.error("invalid register name")) .ok_or_else(|| self.error("invalid register name"))
} else { } else {
err!(self.loc, "argument location requires exactly one isa") err!(self.loc, "argument location requires exactly one isa")
} }
@@ -1392,12 +1396,12 @@ impl<'a> Parser<'a> {
match self.token() { match self.token() {
Some(Token::Arrow) => { Some(Token::Arrow) => {
self.consume(); self.consume();
self.parse_value_alias(results, ctx)?; self.parse_value_alias(&results, ctx)?;
} }
Some(Token::Equal) => { Some(Token::Equal) => {
self.consume(); self.consume();
self.parse_instruction( self.parse_instruction(
results, &results,
srcloc, srcloc,
encoding, encoding,
result_locations, result_locations,
@@ -1408,7 +1412,7 @@ impl<'a> Parser<'a> {
_ if !results.is_empty() => return err!(self.loc, "expected -> or ="), _ if !results.is_empty() => return err!(self.loc, "expected -> or ="),
_ => { _ => {
self.parse_instruction( self.parse_instruction(
results, &results,
srcloc, srcloc,
encoding, encoding,
result_locations, result_locations,
@@ -1512,7 +1516,7 @@ impl<'a> Parser<'a> {
isa.register_info() isa.register_info()
.parse_regunit(name) .parse_regunit(name)
.map(ValueLoc::Reg) .map(ValueLoc::Reg)
.ok_or(self.error("invalid register value location")) .ok_or_else(|| self.error("invalid register value location"))
} else { } else {
err!(self.loc, "value location requires exactly one isa") err!(self.loc, "value location requires exactly one isa")
} }
@@ -1601,7 +1605,7 @@ impl<'a> Parser<'a> {
// //
// value_alias ::= [inst-results] "->" Value(v) // value_alias ::= [inst-results] "->" Value(v)
// //
fn parse_value_alias(&mut self, results: Vec<Value>, ctx: &mut Context) -> Result<()> { fn parse_value_alias(&mut self, results: &[Value], ctx: &mut Context) -> Result<()> {
if results.len() != 1 { if results.len() != 1 {
return err!(self.loc, "wrong number of aliases"); return err!(self.loc, "wrong number of aliases");
} }
@@ -1621,7 +1625,7 @@ impl<'a> Parser<'a> {
// //
fn parse_instruction( fn parse_instruction(
&mut self, &mut self,
results: Vec<Value>, results: &[Value],
srcloc: ir::SourceLoc, srcloc: ir::SourceLoc,
encoding: Option<Encoding>, encoding: Option<Encoding>,
result_locations: Option<Vec<ValueLoc>>, result_locations: Option<Vec<ValueLoc>>,
@@ -1629,7 +1633,7 @@ impl<'a> Parser<'a> {
ebb: Ebb, ebb: Ebb,
) -> Result<()> { ) -> Result<()> {
// Define the result values. // Define the result values.
for val in &results { for val in results {
ctx.map.def_value(*val, &self.loc)?; ctx.map.def_value(*val, &self.loc)?;
} }
@@ -1674,7 +1678,7 @@ impl<'a> Parser<'a> {
let num_results = ctx.function.dfg.make_inst_results_for_parser( let num_results = ctx.function.dfg.make_inst_results_for_parser(
inst, inst,
ctrl_typevar, ctrl_typevar,
&results, results,
); );
ctx.function.layout.append_inst(inst, ebb); ctx.function.layout.append_inst(inst, ebb);
ctx.map.def_entity(inst.into(), &opcode_loc).expect( ctx.map.def_entity(inst.into(), &opcode_loc).expect(
@@ -1784,11 +1788,9 @@ impl<'a> Parser<'a> {
opcode opcode
); );
} }
} else { // Treat it as a syntax error to speficy a typevar on a non-polymorphic opcode.
// Treat it as a syntax error to speficy a typevar on a non-polymorphic opcode. } else if ctrl_type != VOID {
if ctrl_type != VOID { return err!(self.loc, "{} does not take a typevar", opcode);
return err!(self.loc, "{} does not take a typevar", opcode);
}
} }
Ok(ctrl_type) Ok(ctrl_type)

View File

@@ -13,7 +13,7 @@ use lexer::split_entity_name;
use std::collections::HashMap; use std::collections::HashMap;
/// Mapping from entity names to source locations. /// Mapping from entity names to source locations.
#[derive(Debug)] #[derive(Debug, Default)]
pub struct SourceMap { pub struct SourceMap {
// Store locations for entities, including instructions. // Store locations for entities, including instructions.
locations: HashMap<AnyEntity, Location>, locations: HashMap<AnyEntity, Location>,

View File

@@ -36,6 +36,8 @@ use environ::{FuncEnvironment, GlobalValue};
use std::{i32, u32}; use std::{i32, u32};
use std::vec::Vec; use std::vec::Vec;
// Clippy warns about "flags: _" but its important to document that the flags field is ignored
#[cfg_attr(feature = "cargo-clippy", allow(unneeded_field_pattern))]
/// Translates wasm operators into Cretonne IL instructions. Returns `true` if it inserted /// Translates wasm operators into Cretonne IL instructions. Returns `true` if it inserted
/// a return. /// a return.
pub fn translate_operator<FE: FuncEnvironment + ?Sized>( pub fn translate_operator<FE: FuncEnvironment + ?Sized>(
@@ -45,7 +47,7 @@ pub fn translate_operator<FE: FuncEnvironment + ?Sized>(
environ: &mut FE, environ: &mut FE,
) { ) {
if !state.reachable { if !state.reachable {
return translate_unreachable_operator(op, builder, state); return translate_unreachable_operator(&op, builder, state);
} }
// This big match treats all Wasm code operators. // This big match treats all Wasm code operators.
@@ -198,9 +200,8 @@ pub fn translate_operator<FE: FuncEnvironment + ?Sized>(
builder.switch_to_block(frame.following_code()); builder.switch_to_block(frame.following_code());
builder.seal_block(frame.following_code()); builder.seal_block(frame.following_code());
// If it is a loop we also have to seal the body loop block // If it is a loop we also have to seal the body loop block
match frame { if let ControlStackFrame::Loop { header, .. } = frame {
ControlStackFrame::Loop { header, .. } => builder.seal_block(header), builder.seal_block(header)
_ => {}
} }
state.stack.truncate(frame.original_stack_size()); state.stack.truncate(frame.original_stack_size());
state.stack.extend_from_slice( state.stack.extend_from_slice(
@@ -857,15 +858,17 @@ pub fn translate_operator<FE: FuncEnvironment + ?Sized>(
} }
} }
// Clippy warns us of some fields we are deliberately ignoring
#[cfg_attr(feature = "cargo-clippy", allow(unneeded_field_pattern))]
/// Deals with a Wasm instruction located in an unreachable portion of the code. Most of them /// Deals with a Wasm instruction located in an unreachable portion of the code. Most of them
/// are dropped but special ones like `End` or `Else` signal the potential end of the unreachable /// are dropped but special ones like `End` or `Else` signal the potential end of the unreachable
/// portion so the translation state muts be updated accordingly. /// portion so the translation state muts be updated accordingly.
fn translate_unreachable_operator( fn translate_unreachable_operator(
op: Operator, op: &Operator,
builder: &mut FunctionBuilder<Variable>, builder: &mut FunctionBuilder<Variable>,
state: &mut TranslationState, state: &mut TranslationState,
) { ) {
match op { match *op {
Operator::If { ty: _ } => { Operator::If { ty: _ } => {
// Push a placeholder control stack entry. The if isn't reachable, // Push a placeholder control stack entry. The if isn't reachable,
// so we don't have any branches anywhere. // so we don't have any branches anywhere.
@@ -877,27 +880,25 @@ fn translate_unreachable_operator(
} }
Operator::Else => { Operator::Else => {
let i = state.control_stack.len() - 1; let i = state.control_stack.len() - 1;
match state.control_stack[i] { if let ControlStackFrame::If {
ControlStackFrame::If { branch_inst,
branch_inst, ref mut reachable_from_top,
ref mut reachable_from_top, ..
.. } = state.control_stack[i]
} => { {
if *reachable_from_top { if *reachable_from_top {
// We have a branch from the top of the if to the else. // We have a branch from the top of the if to the else.
state.reachable = true; state.reachable = true;
// And because there's an else, there can no longer be a // And because there's an else, there can no longer be a
// branch from the top directly to the end. // branch from the top directly to the end.
*reachable_from_top = false; *reachable_from_top = false;
// We change the target of the branch instruction // We change the target of the branch instruction
let else_ebb = builder.create_ebb(); let else_ebb = builder.create_ebb();
builder.change_jump_destination(branch_inst, else_ebb); builder.change_jump_destination(branch_inst, else_ebb);
builder.seal_block(else_ebb); builder.seal_block(else_ebb);
builder.switch_to_block(else_ebb); builder.switch_to_block(else_ebb);
}
} }
_ => {}
} }
} }
Operator::End => { Operator::End => {
@@ -1057,10 +1058,10 @@ fn translate_br_if(
builder.ins().brnz(val, br_destination, inputs); builder.ins().brnz(val, br_destination, inputs);
} }
fn translate_br_if_args<'state>( fn translate_br_if_args(
relative_depth: u32, relative_depth: u32,
state: &'state mut TranslationState, state: &mut TranslationState,
) -> (ir::Ebb, &'state [ir::Value]) { ) -> (ir::Ebb, &[ir::Value]) {
let i = state.control_stack.len() - 1 - (relative_depth as usize); let i = state.control_stack.len() - 1 - (relative_depth as usize);
let (return_count, br_destination) = { let (return_count, br_destination) = {
let frame = &mut state.control_stack[i]; let frame = &mut state.control_stack[i];

View File

@@ -121,7 +121,7 @@ impl DummyEnvironment {
} }
} }
/// The FuncEnvironment implementation for use by the `DummyEnvironment`. /// The `FuncEnvironment` implementation for use by the `DummyEnvironment`.
pub struct DummyFuncEnvironment<'dummy_environment> { pub struct DummyFuncEnvironment<'dummy_environment> {
pub mod_info: &'dummy_environment DummyModuleInfo, pub mod_info: &'dummy_environment DummyModuleInfo,
} }

View File

@@ -12,6 +12,10 @@
#![deny(missing_docs, #![deny(missing_docs,
trivial_numeric_casts, trivial_numeric_casts,
unused_extern_crates)] unused_extern_crates)]
#![cfg_attr(feature="clippy",
plugin(clippy(conf_file="../../clippy.toml")))]
#![cfg_attr(feature="cargo-clippy",
allow(new_without_default, redundant_field_names))]
extern crate wasmparser; extern crate wasmparser;
extern crate cton_frontend; extern crate cton_frontend;

View File

@@ -25,7 +25,7 @@ fn testsuite() {
// Ignore files starting with `.`, which could be editor temporary files // Ignore files starting with `.`, which could be editor temporary files
if let Some(stem) = p.path().file_stem() { if let Some(stem) = p.path().file_stem() {
if let Some(stemstr) = stem.to_str() { if let Some(stemstr) = stem.to_str() {
return !stemstr.starts_with("."); return !stemstr.starts_with('.');
} }
} }
false false
@@ -35,7 +35,7 @@ fn testsuite() {
let flags = Flags::new(&settings::builder()); let flags = Flags::new(&settings::builder());
for path in paths { for path in paths {
let path = path.path(); let path = path.path();
handle_module(path, &flags); handle_module(&path, &flags);
} }
} }
@@ -44,7 +44,7 @@ fn return_at_end() {
let mut flag_builder = settings::builder(); let mut flag_builder = settings::builder();
flag_builder.enable("return_at_end").unwrap(); flag_builder.enable("return_at_end").unwrap();
let flags = Flags::new(&flag_builder); let flags = Flags::new(&flag_builder);
handle_module(PathBuf::from("../../wasmtests/return_at_end.wat"), &flags); handle_module(&PathBuf::from("../../wasmtests/return_at_end.wat"), &flags);
} }
fn read_wasm_file(path: PathBuf) -> Result<Vec<u8>, io::Error> { fn read_wasm_file(path: PathBuf) -> Result<Vec<u8>, io::Error> {
@@ -54,7 +54,7 @@ fn read_wasm_file(path: PathBuf) -> Result<Vec<u8>, io::Error> {
Ok(buf) Ok(buf)
} }
fn handle_module(path: PathBuf, flags: &Flags) { fn handle_module(path: &PathBuf, flags: &Flags) {
let data = match path.extension() { let data = match path.extension() {
None => { None => {
panic!("the file extension is not wasm or wat"); panic!("the file extension is not wasm or wat");
@@ -103,7 +103,7 @@ fn handle_module(path: PathBuf, flags: &Flags) {
translate_module(&data, &mut dummy_environ).unwrap(); translate_module(&data, &mut dummy_environ).unwrap();
for func in &dummy_environ.info.function_bodies { for func in &dummy_environ.info.function_bodies {
verifier::verify_function(func, flags) verifier::verify_function(func, flags)
.map_err(|err| panic!(pretty_verifier_error(func, None, err))) .map_err(|err| panic!(pretty_verifier_error(func, None, &err)))
.unwrap(); .unwrap();
} }
} }