Use clippy (#276)

* cton-util: fix some clippy unnecessary pass-by-value warnings

* clippy: ignore too many arguments / cyclomatic complexity in module

since these functions are taking args coming from the command line, i
dont think this is actually a valid lint, morally the arguments are all
from one structure

* cton-util: take care of remaining clippy warnings

* cton-reader: fix all non-suspicious clippy warnings

* cton-reader: disable clippy at site of suspicious lint

* cton-frontend: disable clippy at the site of an invalid lint

* cton-frontend: fix clippy warnings, or ignore benign ones

* clippy: ignore the camelcase word WebAssembly in docs

* cton-wasm: fix clippy complaints or ignore benign ones

* cton-wasm tests: fix clippy complaints

* cretonne: starting point turns off all clippy warnings

* cretonne: clippy fixes, or lower allow() to source of problem

* cretonne: more clippy fixes

* cretonne: fix or disable needless_lifetimes lint

this linter is buggy when the declared lifetime is used for another type
constraint.

* cretonne: fix clippy complaint about Pass::NoPass

* rustfmt

* fix prev minor api changes clippy suggested

* add clippy to test-all

* cton-filetests: clippy fixes

* simplify clippy reporting in test-all

* cretonne: document clippy allows better

* cretonne: fix some more clippy lints

* cretonne: fix clippy lints (mostly doc comments)

* cretonne: allow all needless_lifetimes clippy warnings

remove overrides at the false positives

* rustfmt
This commit is contained in:
Pat Hickey
2018-03-22 13:10:41 -07:00
committed by Dan Gohman
parent 2b3df1a506
commit 03ee007624
51 changed files with 310 additions and 245 deletions

10
check-clippy.sh Executable file
View File

@@ -0,0 +1,10 @@
#!/bin/bash
set -euo pipefail
# Usage: check-clippy.sh [--install]
if cargo install --list | tee /dev/null | grep -q "^clippy v0"; then
exit 0
else
exit 1
fi

7
clippy-all.sh Executable file
View File

@@ -0,0 +1,7 @@
#!/bin/bash
set -euo pipefail
# Check all sources with clippy.
# In the cton-util crate (root dir) clippy will only work with nightly cargo -
# there is a bug where it will reject the commands passed to it by cargo 0.25.0
cargo +nightly clippy --all

1
cranelift/clippy.toml Normal file
View File

@@ -0,0 +1 @@
doc-valid-idents = [ "WebAssembly", "NaN", "SetCC" ]

View File

@@ -7,7 +7,7 @@ use cton_reader::parse_functions;
use CommandResult;
use utils::read_to_string;
pub fn run(files: Vec<String>) -> CommandResult {
pub fn run(files: &[String]) -> CommandResult {
for (i, f) in files.into_iter().enumerate() {
if i != 0 {
println!();
@@ -17,7 +17,7 @@ pub fn run(files: Vec<String>) -> CommandResult {
Ok(())
}
fn cat_one(filename: String) -> CommandResult {
fn cat_one(filename: &str) -> CommandResult {
let buffer = read_to_string(&filename).map_err(
|e| format!("{}: {}", filename, e),
)?;

View File

@@ -49,23 +49,23 @@ impl binemit::RelocSink for PrintRelocs {
pub fn run(
files: Vec<String>,
flag_print: bool,
flag_set: Vec<String>,
flag_isa: String,
flag_set: &[String],
flag_isa: &str,
) -> Result<(), String> {
let parsed = parse_sets_and_isa(flag_set, flag_isa)?;
for filename in files {
let path = Path::new(&filename);
let name = String::from(path.as_os_str().to_string_lossy());
handle_module(flag_print, path.to_path_buf(), name, parsed.as_fisa())?;
handle_module(flag_print, &path.to_path_buf(), &name, parsed.as_fisa())?;
}
Ok(())
}
fn handle_module(
flag_print: bool,
path: PathBuf,
name: String,
path: &PathBuf,
name: &str,
fisa: FlagsOrIsa,
) -> Result<(), String> {
let buffer = read_to_string(&path).map_err(

View File

@@ -86,15 +86,20 @@ fn cton_util() -> CommandResult {
// Find the sub-command to execute.
let result = if args.cmd_test {
cton_filetests::run(args.flag_verbose, args.arg_file).map(|_time| ())
cton_filetests::run(args.flag_verbose, &args.arg_file).map(|_time| ())
} else if args.cmd_cat {
cat::run(args.arg_file)
cat::run(&args.arg_file)
} else if args.cmd_filecheck {
rsfilecheck::run(args.arg_file, args.flag_verbose)
rsfilecheck::run(&args.arg_file, args.flag_verbose)
} else if args.cmd_print_cfg {
print_cfg::run(args.arg_file)
print_cfg::run(&args.arg_file)
} else if args.cmd_compile {
compile::run(args.arg_file, args.flag_print, args.flag_set, args.flag_isa)
compile::run(
args.arg_file,
args.flag_print,
&args.flag_set,
&args.flag_isa,
)
} else if args.cmd_wasm {
wasm::run(
args.arg_file,
@@ -102,8 +107,8 @@ fn cton_util() -> CommandResult {
args.flag_just_decode,
args.flag_check_translation,
args.flag_print,
args.flag_set,
args.flag_isa,
&args.flag_set,
&args.flag_isa,
args.flag_print_size,
)
} else {

View File

@@ -8,7 +8,7 @@ use cretonne::cfg_printer::CFGPrinter;
use cton_reader::parse_functions;
use utils::read_to_string;
pub fn run(files: Vec<String>) -> CommandResult {
pub fn run(files: &[String]) -> CommandResult {
for (i, f) in files.into_iter().enumerate() {
if i != 0 {
println!();
@@ -18,8 +18,8 @@ pub fn run(files: Vec<String>) -> CommandResult {
Ok(())
}
fn print_cfg(filename: String) -> CommandResult {
let buffer = read_to_string(&filename).map_err(
fn print_cfg(filename: &str) -> CommandResult {
let buffer = read_to_string(filename).map_err(
|e| format!("{}: {}", filename, e),
)?;
let items = parse_functions(&buffer).map_err(

View File

@@ -7,7 +7,7 @@ use utils::read_to_string;
use filecheck::{CheckerBuilder, Checker, NO_VARIABLES};
use std::io::{self, Read};
pub fn run(files: Vec<String>, verbose: bool) -> CommandResult {
pub fn run(files: &[String], verbose: bool) -> CommandResult {
if files.is_empty() {
return Err("No check files".to_string());
}

View File

@@ -41,10 +41,7 @@ impl OwnedFlagsOrIsa {
}
/// Parse "set" and "isa" commands.
pub fn parse_sets_and_isa(
flag_set: Vec<String>,
flag_isa: String,
) -> Result<OwnedFlagsOrIsa, String> {
pub fn parse_sets_and_isa(flag_set: &[String], flag_isa: &str) -> Result<OwnedFlagsOrIsa, String> {
let mut flag_builder = settings::builder();
parse_options(
flag_set.iter().map(|x| x.as_str()),

View File

@@ -1,6 +1,7 @@
//! CLI tool to use the functions provided by the [cretonne-wasm](../cton_wasm/index.html) crate.
//!
//! Reads Wasm binary files, translates the functions' code to Cretonne IL.
#![cfg_attr(feature = "cargo-clippy", allow(too_many_arguments, cyclomatic_complexity))]
use cton_wasm::{translate_module, DummyEnvironment, ModuleEnvironment};
use std::path::PathBuf;
@@ -38,8 +39,8 @@ pub fn run(
flag_just_decode: bool,
flag_check_translation: bool,
flag_print: bool,
flag_set: Vec<String>,
flag_isa: String,
flag_set: &[String],
flag_isa: &str,
flag_print_size: bool,
) -> Result<(), String> {
let parsed = parse_sets_and_isa(flag_set, flag_isa)?;
@@ -53,8 +54,8 @@ pub fn run(
flag_check_translation,
flag_print,
flag_print_size,
path.to_path_buf(),
name,
&path.to_path_buf(),
&name,
parsed.as_fisa(),
)?;
}
@@ -67,8 +68,8 @@ fn handle_module(
flag_check_translation: bool,
flag_print: bool,
flag_print_size: bool,
path: PathBuf,
name: String,
path: &PathBuf,
name: &str,
fisa: FlagsOrIsa,
) -> Result<(), String> {
let mut terminal = term::stdout().unwrap();
@@ -153,10 +154,9 @@ fn handle_module(
context.func = func.clone();
if flag_check_translation {
context.verify(fisa).map_err(|err| {
pretty_verifier_error(&context.func, fisa.isa, err)
pretty_verifier_error(&context.func, fisa.isa, &err)
})?;
} else {
if let Some(isa) = fisa.isa {
} else if let Some(isa) = fisa.isa {
let compiled_size = context.compile(isa).map_err(|err| {
pretty_error(&context.func, fisa.isa, err)
})?;
@@ -176,7 +176,6 @@ fn handle_module(
} else {
return Err(String::from("compilation requires a target isa"));
}
}
if flag_print {
vprintln!(flag_verbose, "");
if let Some(start_func) = dummy_environ.info.start_func {
@@ -194,10 +193,7 @@ fn handle_module(
if !flag_check_translation && flag_print_size {
println!("Total module code size: {} bytes", total_module_code_size);
let total_bytecode_size = dummy_environ.func_bytecode_sizes.iter().fold(
0,
|sum, x| sum + x,
);
let total_bytecode_size: usize = dummy_environ.func_bytecode_sizes.iter().sum();
println!("Total module bytecode size: {} bytes", total_bytecode_size);
}

View File

@@ -55,4 +55,12 @@ cargo test --all --release
banner "Rust documentation: $topdir/target/doc/cretonne/index.html"
cargo doc
# Run clippy if we have it.
banner "Rust linter"
if $topdir/check-clippy.sh; then
$topdir/clippy-all.sh --write-mode=diff
else
echo "\`cargo +nightly install clippy\` for optional rust linting"
fi
banner "OK"

View File

@@ -3,5 +3,5 @@ extern crate cton_filetests;
#[test]
fn filetests() {
// Run all the filetests in the following directories.
cton_filetests::run(false, vec!["filetests".into(), "docs".into()]).expect("test harness");
cton_filetests::run(false, &["filetests".into(), "docs".into()]).expect("test harness");
}

View File

@@ -28,7 +28,7 @@ def gen_enum_types(sgrp, fmt):
if not isinstance(setting, EnumSetting):
continue
ty = camel_case(setting.name)
fmt.doc_comment('Values for {}.'.format(setting))
fmt.doc_comment('Values for `{}`.'.format(setting))
fmt.line('#[derive(Debug, PartialEq, Eq)]')
with fmt.indented('pub enum {} {{'.format(ty), '}'):
for v in setting.values:

View File

@@ -437,7 +437,7 @@ impl<F: Forest> Path<F> {
// Discard the root node if it has shrunk to a single sub-tree.
let mut ns = 0;
while let &NodeData::Inner { size: 0, ref tree, .. } = &pool[self.node[ns]] {
while let NodeData::Inner { size: 0, ref tree, .. } = pool[self.node[ns]] {
ns += 1;
self.node[ns] = tree[0];
}
@@ -529,14 +529,13 @@ impl<F: Forest> Path<F> {
// current entry[level] was one off the end of the node, it will now point at a proper
// entry.
debug_assert!(usize::from(self.entry[level]) < pool[self.node[level]].entries());
} else {
} else if usize::from(self.entry[level]) >= pool[self.node[level]].entries() {
// There's no right sibling at this level, so the node can't be rebalanced.
// Check if we are in an off-the-end position.
if usize::from(self.entry[level]) >= pool[self.node[level]].entries() {
self.size = 0;
}
}
}
/// The current node at `level` has become empty.
///

View File

@@ -57,6 +57,7 @@ impl<F: Forest> NodePool<F> {
pub fn free_tree(&mut self, node: Node) {
if let NodeData::Inner { size, tree, .. } = self[node] {
// Note that we have to capture `tree` by value to avoid borrow checker trouble.
#[cfg_attr(feature = "cargo-clippy", allow(needless_range_loop))]
for i in 0..usize::from(size + 1) {
// Recursively free sub-trees. This recursion can never be deeper than `MAX_PATH`,
// and since most trees have less than a handful of nodes, it is worthwhile to

View File

@@ -76,17 +76,16 @@ pub fn relax_branches(func: &mut Function, isa: &TargetIsa) -> Result<CodeOffset
if let Some(range) = encinfo.branch_range(enc) {
if let Some(dest) = cur.func.dfg[inst].branch_destination() {
let dest_offset = cur.func.offsets[dest];
if !range.contains(offset, dest_offset) {
// This is an out-of-range branch.
// This could be an out-of-range branch.
// Relax it unless the destination offset has not been computed yet.
if dest_offset != 0 || Some(dest) == cur.func.layout.entry_block() {
offset +=
relax_branch(&mut cur, offset, dest_offset, &encinfo, isa);
if !range.contains(offset, dest_offset) &&
(dest_offset != 0 || Some(dest) == cur.func.layout.entry_block())
{
offset += relax_branch(&mut cur, offset, dest_offset, &encinfo, isa);
continue;
}
}
}
}
offset += size;
}

View File

@@ -132,12 +132,12 @@ impl Context {
}
/// Run the locations verifier on the function.
pub fn verify_locations<'a>(&self, isa: &TargetIsa) -> verifier::Result {
pub fn verify_locations(&self, isa: &TargetIsa) -> verifier::Result {
verifier::verify_locations(isa, &self.func, None)
}
/// Run the locations verifier only if the `enable_verifier` setting is true.
pub fn verify_locations_if<'a>(&self, isa: &TargetIsa) -> CtonResult {
pub fn verify_locations_if(&self, isa: &TargetIsa) -> CtonResult {
if isa.flags().enable_verifier() {
self.verify_locations(isa).map_err(Into::into)
} else {

View File

@@ -744,8 +744,9 @@ impl<'c, 'f> ir::InstInserterBase<'c> for &'c mut EncCursor<'f> {
if !self.srcloc.is_default() {
self.func.srclocs[inst] = self.srcloc;
}
// Assign an encoding.
// XXX Is there a way to describe this error to the user?
#[cfg_attr(feature = "cargo-clippy", allow(match_wild_err_arm))]
match self.isa.encode(
&self.func.dfg,
&self.func.dfg[inst],

View File

@@ -16,7 +16,7 @@ const TESTCASE_NAME_LENGTH: usize = 16;
/// to keep track of a sy mbol table.
///
/// External names are primarily used as keys by code using Cretonne to map
/// from a cretonne::ir::FuncRef or similar to additional associated data.
/// from a `cretonne::ir::FuncRef` or similar to additional associated data.
///
/// External names can also serve as a primitive testing and debugging tool.
/// In particular, many `.cton` test files use function names to identify

View File

@@ -428,7 +428,7 @@ impl Layout {
}
/// Return an iterator over all EBBs in layout order.
pub fn ebbs<'f>(&'f self) -> Ebbs<'f> {
pub fn ebbs(&self) -> Ebbs {
Ebbs {
layout: self,
next: self.first_ebb,
@@ -611,7 +611,7 @@ impl Layout {
}
/// Iterate over the instructions in `ebb` in layout order.
pub fn ebb_insts<'f>(&'f self, ebb: Ebb) -> Insts<'f> {
pub fn ebb_insts(&self, ebb: Ebb) -> Insts {
Insts {
layout: self,
head: self.ebbs[ebb].first_inst.into(),

View File

@@ -4,13 +4,44 @@
trivial_numeric_casts,
unused_extern_crates)]
#![cfg_attr(feature="clippy",
plugin(clippy(conf_file="../../clippy.toml")))]
#![cfg_attr(feature="cargo-clippy", allow(
// Rustfmt 0.9.0 is at odds with this lint:
block_in_if_condition_stmt,
// Produces only a false positive:
while_let_loop,
// Produces many false positives, but did produce some valid lints, now fixed:
needless_lifetimes,
// Generated code makes some style transgressions, but readability doesn't suffer much:
many_single_char_names,
identity_op,
needless_borrow,
cast_lossless,
unreadable_literal,
assign_op_pattern,
empty_line_after_outer_attr,
// Hard to avoid in generated code:
cyclomatic_complexity,
too_many_arguments,
// Code generator doesn't have a way to collapse identical arms:
match_same_arms,
// These are relatively minor style issues, but would be easy to fix:
new_without_default,
new_without_default_derive,
should_implement_trait,
redundant_field_names,
useless_let_if_seq,
len_without_is_empty))]
pub use context::Context;
pub use legalizer::legalize_function;
pub use verifier::verify_function;
pub use write::write_function;
/// Version number of the cretonne crate.
pub const VERSION: &'static str = env!("CARGO_PKG_VERSION");
pub const VERSION: &str = env!("CARGO_PKG_VERSION");
#[macro_use]
pub mod dbg;

View File

@@ -150,6 +150,7 @@ fn remove_loop_invariant_instructions(
loop_values.insert(*val);
}
pos.goto_top(*ebb);
#[cfg_attr(feature = "cargo-clippy", allow(block_in_if_condition_stmt))]
while let Some(inst) = pos.next_inst() {
if pos.func.dfg.has_results(inst) &&
pos.func.dfg.inst_args(inst).into_iter().all(|arg| {

View File

@@ -6,7 +6,7 @@
/// The order of elements is not preserved, unless the slice is already partitioned.
///
/// Returns the number of elements where `p(t)` is true.
pub fn partition_slice<'a, T: 'a, F>(s: &'a mut [T], mut p: F) -> usize
pub fn partition_slice<T, F>(s: &mut [T], mut p: F) -> usize
where
F: FnMut(&T) -> bool,
{

View File

@@ -7,7 +7,7 @@
//! bound is implemented by all the native integer types as well as `Imm64`.
//!
//! Some of these predicates may be unused in certain ISA configurations, so we suppress the
//! dead_code warning.
//! dead code warning.
/// Check that `x` is the same as `y`.
#[allow(dead_code)]

View File

@@ -114,7 +114,7 @@ fn package_up_divrem_info(
fn get_div_info(inst: Inst, dfg: &DataFlowGraph) -> Option<DivRemByConstInfo> {
let idata: &InstructionData = &dfg[inst];
if let &InstructionData::BinaryImm { opcode, arg, imm } = idata {
if let InstructionData::BinaryImm { opcode, arg, imm } = *idata {
let (isSigned, isRem) = match opcode {
Opcode::UdivImm => (false, false),
Opcode::UremImm => (false, true),
@@ -132,7 +132,7 @@ fn get_div_info(inst: Inst, dfg: &DataFlowGraph) -> Option<DivRemByConstInfo> {
// that some previous constant propagation pass has pushed all such
// immediates to their use points, creating BinaryImm instructions
// instead? For now we take the conservative approach.
if let &InstructionData::Binary { opcode, args } = idata {
if let InstructionData::Binary { opcode, args } = *idata {
let (isSigned, isRem) = match opcode {
Opcode::Udiv => (false, false),
Opcode::Urem => (false, true),
@@ -484,7 +484,7 @@ fn get_const(value: Value, dfg: &DataFlowGraph) -> Option<i64> {
match dfg.value_def(value) {
ValueDef::Result(definingInst, resultNo) => {
let definingIData: &InstructionData = &dfg[definingInst];
if let &InstructionData::UnaryImm { opcode, imm } = definingIData {
if let InstructionData::UnaryImm { opcode, imm } = *definingIData {
if opcode == Opcode::Iconst && resultNo == 0 {
return Some(imm.into());
}

View File

@@ -10,7 +10,7 @@ use std::fmt::Write;
pub fn pretty_verifier_error(
func: &ir::Function,
isa: Option<&TargetIsa>,
err: verifier::Error,
err: &verifier::Error,
) -> String {
let mut msg = err.to_string();
match err.location {
@@ -26,7 +26,7 @@ pub fn pretty_verifier_error(
/// Pretty-print a Cretonne error.
pub fn pretty_error(func: &ir::Function, isa: Option<&TargetIsa>, err: CtonError) -> String {
if let CtonError::Verifier(e) = err {
pretty_verifier_error(func, isa, e)
pretty_verifier_error(func, isa, &e)
} else {
err.to_string()
}

View File

@@ -1,6 +1,6 @@
//! Functions for converting a reference into a singleton slice.
//!
//! See also the ref_slice crate on crates.io.
//! See also the [`ref_slice` crate](https://crates.io/crates/ref_slice).
//!
//! We define the functions here to avoid external dependencies, and to ensure that they are
//! inlined in this crate.

View File

@@ -197,9 +197,9 @@ impl<'a> fmt::Display for DisplayAllocatableSet<'a> {
"{}",
bank.names
.get(offset as usize)
.and_then(|name| name.chars().skip(1).next())
.unwrap_or(
char::from_digit(u32::from(offset % 10), 10).unwrap(),
.and_then(|name| name.chars().nth(1))
.unwrap_or_else(
|| char::from_digit(u32::from(offset % 10), 10).unwrap(),
)
)?;
}

View File

@@ -276,10 +276,9 @@ impl<PO: ProgramOrder> GenLiveRange<PO> {
} else {
return first_time_livein;
}
} else {
} else if let Some((_, end)) = c.prev() {
// There's no interval beginning at `ebb`, but we could still be live-in at `ebb` with
// a coalesced interval that begins before and ends after.
if let Some((_, end)) = c.prev() {
if order.cmp(end, ebb) == Ordering::Greater {
// Yep, the previous interval overlaps `ebb`.
first_time_livein = false;
@@ -303,7 +302,6 @@ impl<PO: ProgramOrder> GenLiveRange<PO> {
first_time_livein = true;
c.insert(ebb, to);
}
}
// Now `c` to left pointing at an interval that ends in `to`.
debug_assert_eq!(c.value(), Some(to));

View File

@@ -306,8 +306,7 @@ impl<'a> Context<'a> {
let args = self.cur.func.dfg.inst_args(inst);
for (argidx, (op, &arg)) in constraints.ins.iter().zip(args).enumerate() {
if op.kind != ConstraintKind::Stack {
if self.liveness[arg].affinity.is_stack() {
if op.kind != ConstraintKind::Stack && self.liveness[arg].affinity.is_stack() {
self.candidates.push(ReloadCandidate {
argidx,
value: arg,
@@ -315,7 +314,6 @@ impl<'a> Context<'a> {
})
}
}
}
// If we only have the fixed arguments, we're done now.
let offset = constraints.ins.len();

View File

@@ -299,6 +299,7 @@ impl Move {
}
/// Get the "from" register and register class, if possible.
#[cfg_attr(feature = "cargo-clippy", allow(wrong_self_convention))]
fn from_reg(&self) -> Option<(RegClass, RegUnit)> {
match *self {
Move::Reg { rc, from, .. } |

View File

@@ -101,8 +101,10 @@ impl VirtRegs {
where
'a: 'b,
{
self.get(*value).map(|vr| self.values(vr)).unwrap_or(
ref_slice(value),
self.get(*value).map(|vr| self.values(vr)).unwrap_or_else(
|| {
ref_slice(value)
},
)
}
@@ -371,7 +373,7 @@ impl VirtRegs {
let vreg = self.get(leader).unwrap_or_else(|| {
// Allocate a vreg for `leader`, but leave it empty.
let vr = self.alloc();
if let &mut Some(ref mut vec) = &mut new_vregs {
if let Some(ref mut vec) = new_vregs {
vec.push(vr);
}
self.value_vregs[leader] = vr.into();

View File

@@ -1,4 +1,4 @@
//! ScopedHashMap
//! `ScopedHashMap`
//!
//! This module defines a struct `ScopedHashMap<K, V>` which defines a `HashMap`-like
//! container that has a concept of scopes that can be entered and exited, such that

View File

@@ -11,7 +11,7 @@ pub use self::details::{TimingToken, PassTimes, take_current, add_to_current};
//
// This macro defines:
//
// - A C-style enum containing all the pass names and a `NoPass` variant.
// - A C-style enum containing all the pass names and a `None` variant.
// - A usize constant with the number of defined passes.
// - A const array of pass descriptions.
// - A public function per pass used to start the timing of that pass.
@@ -21,9 +21,9 @@ macro_rules! define_passes {
} => {
#[allow(non_camel_case_types)]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
enum $enum { $($pass,)+ NoPass }
enum $enum { $($pass,)+ None}
const $num_passes: usize = $enum::NoPass as usize;
const $num_passes: usize = $enum::None as usize;
const $descriptions: [&str; $num_passes] = [ $($desc),+ ];
@@ -164,7 +164,7 @@ mod details {
/// Information about passes in a single thread.
thread_local!{
static CURRENT_PASS: Cell<Pass> = Cell::new(Pass::NoPass);
static CURRENT_PASS: Cell<Pass> = Cell::new(Pass::None);
static PASS_TIME: RefCell<PassTimes> = RefCell::new(Default::default());
}
@@ -204,7 +204,7 @@ mod details {
}
/// Add `timings` to the accumulated timings for the current thread.
pub fn add_to_current(times: PassTimes) {
pub fn add_to_current(times: &PassTimes) {
PASS_TIME.with(|rc| for (a, b) in rc.borrow_mut().pass.iter_mut().zip(
&times.pass,
)
@@ -221,7 +221,7 @@ mod test {
#[test]
fn display() {
assert_eq!(Pass::NoPass.to_string(), "<no pass>");
assert_eq!(Pass::None.to_string(), "<no pass>");
assert_eq!(Pass::regalloc.to_string(), "Register allocation");
}
}

View File

@@ -86,9 +86,8 @@ impl<'a> LivenessVerifier<'a> {
self.isa.encoding_info().display(encoding)
);
}
} else {
} else if !lr.affinity.is_none() {
// A non-encoded instruction can only define ghost values.
if !lr.affinity.is_none() {
return err!(
inst,
"{} is a real {} value defined by a ghost instruction",
@@ -97,7 +96,6 @@ impl<'a> LivenessVerifier<'a> {
);
}
}
}
// Check the uses.
for &val in self.func.dfg.inst_args(inst) {
@@ -109,9 +107,8 @@ impl<'a> LivenessVerifier<'a> {
return err!(inst, "{} is not live at this use", val);
}
if encoding.is_legal() {
// A legal instruction is not allowed to depend on ghost values.
if lr.affinity.is_none() {
if encoding.is_legal() && lr.affinity.is_none() {
return err!(
inst,
"{} is a ghost value used by a real [{}] instruction",
@@ -122,7 +119,6 @@ impl<'a> LivenessVerifier<'a> {
}
}
}
}
Ok(())
}

View File

@@ -34,7 +34,7 @@
//! For polymorphic opcodes, determine the controlling type variable first.
//! - Branches and jumps must pass arguments to destination EBBs that match the
//! expected types exactly. The number of arguments must match.
//! - All EBBs in a jump_table must take no arguments.
//! - All EBBs in a jump table must take no arguments.
//! - Function calls are type checked against their signature.
//! - The entry block must take arguments that match the signature of the current
//! function.
@@ -871,8 +871,7 @@ impl<'a> Verifier<'a> {
// Check special-purpose type constraints that can't be expressed in the normal opcode
// constraints.
fn typecheck_special(&self, inst: Inst, ctrl_type: Type) -> Result {
match self.func.dfg[inst] {
ir::InstructionData::Unary { opcode, arg } => {
if let ir::InstructionData::Unary { opcode, arg } = self.func.dfg[inst] {
let arg_type = self.func.dfg.value_type(arg);
match opcode {
Opcode::Bextend | Opcode::Uextend | Opcode::Sextend | Opcode::Fpromote => {
@@ -914,8 +913,6 @@ impl<'a> Verifier<'a> {
_ => {}
}
}
_ => {}
}
Ok(())
}

View File

@@ -70,7 +70,7 @@ impl ConcurrentRunner {
assert!(self.request_tx.is_none(), "must shutdown before join");
for h in self.handles.drain(..) {
match h.join() {
Ok(t) => timing::add_to_current(t),
Ok(t) => timing::add_to_current(&t),
Err(e) => println!("worker panicked: {:?}", e),
}
}

View File

@@ -3,6 +3,11 @@
//! This crate contains the main test driver as well as implementations of the
//! available filetest commands.
#![cfg_attr(feature="cargo-clippy", allow(
type_complexity,
// Rustfmt 0.9.0 is at odds with this lint:
block_in_if_condition_stmt))]
#[macro_use(dbg)]
extern crate cretonne;
extern crate cton_reader;
@@ -44,7 +49,7 @@ type TestResult = Result<time::Duration, String>;
/// Directories are scanned recursively for test cases ending in `.cton`. These test cases are
/// executed on background threads.
///
pub fn run(verbose: bool, files: Vec<String>) -> TestResult {
pub fn run(verbose: bool, files: &[String]) -> TestResult {
let mut runner = TestRunner::new(verbose);
for path in files.iter().map(Path::new) {

View File

@@ -45,7 +45,7 @@ impl Display for QueueEntry {
f,
"{}.{:03} {}",
dur.as_secs(),
dur.subsec_nanos() / 1000000,
dur.subsec_nanos() / 1_000_000,
p
)
}
@@ -135,7 +135,7 @@ impl TestRunner {
// This lets us skip spurious extensionless files without statting everything
// needlessly.
if !dir.is_file() {
self.path_error(dir, err);
self.path_error(&dir, &err);
}
}
Ok(entries) => {
@@ -149,7 +149,7 @@ impl TestRunner {
// libstd/sys/unix/fs.rs seems to suggest that breaking now would
// be a good idea, or the iterator could keep returning the same
// error forever.
self.path_error(dir, err);
self.path_error(&dir, &err);
break;
}
Ok(entry) => {
@@ -172,7 +172,7 @@ impl TestRunner {
}
/// Report an error related to a path.
fn path_error<E: Error>(&mut self, path: PathBuf, err: E) {
fn path_error<E: Error>(&mut self, path: &PathBuf, err: &E) {
self.errors += 1;
println!("{}: {}", path.to_string_lossy(), err);
}

View File

@@ -132,7 +132,7 @@ fn run_one_test<'a>(
if !context.verified && test.needs_verifier() {
verify_function(&func, context.flags_or_isa()).map_err(
|e| {
pretty_verifier_error(&func, isa, e)
pretty_verifier_error(&func, isa, &e)
},
)?;
context.verified = true;

View File

@@ -1,4 +1,4 @@
//! SubTest trait.
//! `SubTest` trait.
use std::result;
use std::borrow::Cow;

View File

@@ -400,7 +400,7 @@ where
///
/// This can be used to insert SSA code that doesn't need to access locals and that doesn't
/// need to know about `FunctionBuilder` at all.
pub fn cursor<'f>(&'f mut self) -> FuncCursor<'f> {
pub fn cursor(&mut self) -> FuncCursor {
self.ensure_inserted_ebb();
FuncCursor::new(self.func)
.with_srcloc(self.srcloc)
@@ -548,6 +548,8 @@ where
/// Returns a displayable object for the function as it is.
///
/// Useful for debug purposes. Use it with `None` for standard printing.
// Clippy thinks the lifetime that follows is needless, but rustc needs it
#[cfg_attr(feature = "cargo-clippy", allow(needless_lifetimes))]
pub fn display<'b, I: Into<Option<&'b TargetIsa>>>(&'b self, isa: I) -> DisplayFunction {
self.func.display(isa)
}

View File

@@ -131,6 +131,9 @@
trivial_numeric_casts,
unused_extern_crates)]
#![cfg_attr(feature="cargo-clippy",
allow(new_without_default, redundant_field_names))]
extern crate cretonne;
pub use frontend::{FunctionBuilderContext, FunctionBuilder};

View File

@@ -53,7 +53,7 @@ pub struct LocatedToken<'a> {
}
/// Wrap up a `Token` with the given location.
fn token<'a>(token: Token<'a>, loc: Location) -> Result<LocatedToken<'a>, LocatedError> {
fn token(token: Token, loc: Location) -> Result<LocatedToken, LocatedError> {
Ok(LocatedToken {
token,
location: loc,

View File

@@ -1,6 +1,6 @@
//! Cretonne file reader library.
//!
//! The cton_reader library supports reading .cton files. This functionality is needed for testing
//! The `cton_reader` library supports reading .cton files. This functionality is needed for testing
//! Cretonne, but is not essential for a JIT compiler.
#![deny(missing_docs,

View File

@@ -37,7 +37,7 @@ pub fn parse_functions(text: &str) -> Result<Vec<Function>> {
/// Parse the entire `text` as a test case file.
///
/// The returned `TestFile` contains direct references to substrings of `text`.
pub fn parse_test<'a>(text: &'a str) -> Result<TestFile<'a>> {
pub fn parse_test(text: &str) -> Result<TestFile> {
let _tt = timing::parse_text();
let mut parser = Parser::new(text);
// Gather the preamble comments.
@@ -277,6 +277,10 @@ impl<'a> Parser<'a> {
// Get the current lookahead token, after making sure there is one.
fn token(&mut self) -> Option<Token<'a>> {
// clippy says self.lookahead is immutable so this loop is either infinite or never
// running. I don't think this is true - self.lookahead is mutated in the loop body - so
// maybe this is a clippy bug? Either way, disable clippy for this.
#[cfg_attr(feature = "cargo-clippy", allow(while_immutable_condition))]
while self.lookahead == None {
match self.lex.next() {
Some(Ok(lexer::LocatedToken { token, location })) => {
@@ -957,7 +961,7 @@ impl<'a> Parser<'a> {
isa.register_info()
.parse_regunit(name)
.map(ArgumentLoc::Reg)
.ok_or(self.error("invalid register name"))
.ok_or_else(|| self.error("invalid register name"))
} else {
err!(self.loc, "argument location requires exactly one isa")
}
@@ -1392,12 +1396,12 @@ impl<'a> Parser<'a> {
match self.token() {
Some(Token::Arrow) => {
self.consume();
self.parse_value_alias(results, ctx)?;
self.parse_value_alias(&results, ctx)?;
}
Some(Token::Equal) => {
self.consume();
self.parse_instruction(
results,
&results,
srcloc,
encoding,
result_locations,
@@ -1408,7 +1412,7 @@ impl<'a> Parser<'a> {
_ if !results.is_empty() => return err!(self.loc, "expected -> or ="),
_ => {
self.parse_instruction(
results,
&results,
srcloc,
encoding,
result_locations,
@@ -1512,7 +1516,7 @@ impl<'a> Parser<'a> {
isa.register_info()
.parse_regunit(name)
.map(ValueLoc::Reg)
.ok_or(self.error("invalid register value location"))
.ok_or_else(|| self.error("invalid register value location"))
} else {
err!(self.loc, "value location requires exactly one isa")
}
@@ -1601,7 +1605,7 @@ impl<'a> Parser<'a> {
//
// value_alias ::= [inst-results] "->" Value(v)
//
fn parse_value_alias(&mut self, results: Vec<Value>, ctx: &mut Context) -> Result<()> {
fn parse_value_alias(&mut self, results: &[Value], ctx: &mut Context) -> Result<()> {
if results.len() != 1 {
return err!(self.loc, "wrong number of aliases");
}
@@ -1621,7 +1625,7 @@ impl<'a> Parser<'a> {
//
fn parse_instruction(
&mut self,
results: Vec<Value>,
results: &[Value],
srcloc: ir::SourceLoc,
encoding: Option<Encoding>,
result_locations: Option<Vec<ValueLoc>>,
@@ -1629,7 +1633,7 @@ impl<'a> Parser<'a> {
ebb: Ebb,
) -> Result<()> {
// Define the result values.
for val in &results {
for val in results {
ctx.map.def_value(*val, &self.loc)?;
}
@@ -1674,7 +1678,7 @@ impl<'a> Parser<'a> {
let num_results = ctx.function.dfg.make_inst_results_for_parser(
inst,
ctrl_typevar,
&results,
results,
);
ctx.function.layout.append_inst(inst, ebb);
ctx.map.def_entity(inst.into(), &opcode_loc).expect(
@@ -1784,12 +1788,10 @@ impl<'a> Parser<'a> {
opcode
);
}
} else {
// Treat it as a syntax error to speficy a typevar on a non-polymorphic opcode.
if ctrl_type != VOID {
} else if ctrl_type != VOID {
return err!(self.loc, "{} does not take a typevar", opcode);
}
}
Ok(ctrl_type)
}

View File

@@ -13,7 +13,7 @@ use lexer::split_entity_name;
use std::collections::HashMap;
/// Mapping from entity names to source locations.
#[derive(Debug)]
#[derive(Debug, Default)]
pub struct SourceMap {
// Store locations for entities, including instructions.
locations: HashMap<AnyEntity, Location>,

View File

@@ -36,6 +36,8 @@ use environ::{FuncEnvironment, GlobalValue};
use std::{i32, u32};
use std::vec::Vec;
// Clippy warns about "flags: _" but its important to document that the flags field is ignored
#[cfg_attr(feature = "cargo-clippy", allow(unneeded_field_pattern))]
/// Translates wasm operators into Cretonne IL instructions. Returns `true` if it inserted
/// a return.
pub fn translate_operator<FE: FuncEnvironment + ?Sized>(
@@ -45,7 +47,7 @@ pub fn translate_operator<FE: FuncEnvironment + ?Sized>(
environ: &mut FE,
) {
if !state.reachable {
return translate_unreachable_operator(op, builder, state);
return translate_unreachable_operator(&op, builder, state);
}
// This big match treats all Wasm code operators.
@@ -198,9 +200,8 @@ pub fn translate_operator<FE: FuncEnvironment + ?Sized>(
builder.switch_to_block(frame.following_code());
builder.seal_block(frame.following_code());
// If it is a loop we also have to seal the body loop block
match frame {
ControlStackFrame::Loop { header, .. } => builder.seal_block(header),
_ => {}
if let ControlStackFrame::Loop { header, .. } = frame {
builder.seal_block(header)
}
state.stack.truncate(frame.original_stack_size());
state.stack.extend_from_slice(
@@ -857,15 +858,17 @@ pub fn translate_operator<FE: FuncEnvironment + ?Sized>(
}
}
// Clippy warns us of some fields we are deliberately ignoring
#[cfg_attr(feature = "cargo-clippy", allow(unneeded_field_pattern))]
/// Deals with a Wasm instruction located in an unreachable portion of the code. Most of them
/// are dropped but special ones like `End` or `Else` signal the potential end of the unreachable
/// portion so the translation state muts be updated accordingly.
fn translate_unreachable_operator(
op: Operator,
op: &Operator,
builder: &mut FunctionBuilder<Variable>,
state: &mut TranslationState,
) {
match op {
match *op {
Operator::If { ty: _ } => {
// Push a placeholder control stack entry. The if isn't reachable,
// so we don't have any branches anywhere.
@@ -877,12 +880,12 @@ fn translate_unreachable_operator(
}
Operator::Else => {
let i = state.control_stack.len() - 1;
match state.control_stack[i] {
ControlStackFrame::If {
if let ControlStackFrame::If {
branch_inst,
ref mut reachable_from_top,
..
} => {
} = state.control_stack[i]
{
if *reachable_from_top {
// We have a branch from the top of the if to the else.
state.reachable = true;
@@ -897,8 +900,6 @@ fn translate_unreachable_operator(
builder.switch_to_block(else_ebb);
}
}
_ => {}
}
}
Operator::End => {
let stack = &mut state.stack;
@@ -1057,10 +1058,10 @@ fn translate_br_if(
builder.ins().brnz(val, br_destination, inputs);
}
fn translate_br_if_args<'state>(
fn translate_br_if_args(
relative_depth: u32,
state: &'state mut TranslationState,
) -> (ir::Ebb, &'state [ir::Value]) {
state: &mut TranslationState,
) -> (ir::Ebb, &[ir::Value]) {
let i = state.control_stack.len() - 1 - (relative_depth as usize);
let (return_count, br_destination) = {
let frame = &mut state.control_stack[i];

View File

@@ -121,7 +121,7 @@ impl DummyEnvironment {
}
}
/// The FuncEnvironment implementation for use by the `DummyEnvironment`.
/// The `FuncEnvironment` implementation for use by the `DummyEnvironment`.
pub struct DummyFuncEnvironment<'dummy_environment> {
pub mod_info: &'dummy_environment DummyModuleInfo,
}

View File

@@ -12,6 +12,10 @@
#![deny(missing_docs,
trivial_numeric_casts,
unused_extern_crates)]
#![cfg_attr(feature="clippy",
plugin(clippy(conf_file="../../clippy.toml")))]
#![cfg_attr(feature="cargo-clippy",
allow(new_without_default, redundant_field_names))]
extern crate wasmparser;
extern crate cton_frontend;

View File

@@ -25,7 +25,7 @@ fn testsuite() {
// Ignore files starting with `.`, which could be editor temporary files
if let Some(stem) = p.path().file_stem() {
if let Some(stemstr) = stem.to_str() {
return !stemstr.starts_with(".");
return !stemstr.starts_with('.');
}
}
false
@@ -35,7 +35,7 @@ fn testsuite() {
let flags = Flags::new(&settings::builder());
for path in paths {
let path = path.path();
handle_module(path, &flags);
handle_module(&path, &flags);
}
}
@@ -44,7 +44,7 @@ fn return_at_end() {
let mut flag_builder = settings::builder();
flag_builder.enable("return_at_end").unwrap();
let flags = Flags::new(&flag_builder);
handle_module(PathBuf::from("../../wasmtests/return_at_end.wat"), &flags);
handle_module(&PathBuf::from("../../wasmtests/return_at_end.wat"), &flags);
}
fn read_wasm_file(path: PathBuf) -> Result<Vec<u8>, io::Error> {
@@ -54,7 +54,7 @@ fn read_wasm_file(path: PathBuf) -> Result<Vec<u8>, io::Error> {
Ok(buf)
}
fn handle_module(path: PathBuf, flags: &Flags) {
fn handle_module(path: &PathBuf, flags: &Flags) {
let data = match path.extension() {
None => {
panic!("the file extension is not wasm or wat");
@@ -103,7 +103,7 @@ fn handle_module(path: PathBuf, flags: &Flags) {
translate_module(&data, &mut dummy_environ).unwrap();
for func in &dummy_environ.info.function_bodies {
verifier::verify_function(func, flags)
.map_err(|err| panic!(pretty_verifier_error(func, None, err)))
.map_err(|err| panic!(pretty_verifier_error(func, None, &err)))
.unwrap();
}
}