Fix clippy warnings (#1168)

This commit is contained in:
yjh
2019-10-24 23:54:31 +08:00
committed by Sean Stangl
parent 2b6ea31621
commit 1176e4f178
7 changed files with 40 additions and 54 deletions

View File

@@ -581,7 +581,7 @@ impl Apply {
pred pred
} }
pub fn rust_builder(&self, defined_vars: &Vec<VarIndex>, var_pool: &VarPool) -> String { pub fn rust_builder(&self, defined_vars: &[VarIndex], var_pool: &VarPool) -> String {
let mut args = self let mut args = self
.args .args
.iter() .iter()

View File

@@ -202,13 +202,7 @@ impl Mutator for ReplaceInstWithConst {
// Copy result SSA names into our own vector; otherwise we couldn't mutably borrow pos // Copy result SSA names into our own vector; otherwise we couldn't mutably borrow pos
// in the loop below. // in the loop below.
let results = pos let results = pos.func.dfg.inst_results(prev_inst).to_vec();
.func
.dfg
.inst_results(prev_inst)
.iter()
.cloned()
.collect::<Vec<_>>();
// Detach results from the previous instruction, since we're going to reuse them. // Detach results from the previous instruction, since we're going to reuse them.
pos.func.dfg.clear_results(prev_inst); pos.func.dfg.clear_results(prev_inst);
@@ -390,15 +384,12 @@ impl Mutator for RemoveUnusedEntities {
let mut signatures_usage_map = HashMap::new(); let mut signatures_usage_map = HashMap::new();
for ebb in func.layout.ebbs() { for ebb in func.layout.ebbs() {
for inst in func.layout.ebb_insts(ebb) { for inst in func.layout.ebb_insts(ebb) {
match func.dfg[inst] { // Add new cases when there are new instruction formats taking a `SigRef`.
// Add new cases when there are new instruction formats taking a `SigRef`. if let InstructionData::CallIndirect { sig_ref, .. } = func.dfg[inst] {
InstructionData::CallIndirect { sig_ref, .. } => { signatures_usage_map
signatures_usage_map .entry(sig_ref)
.entry(sig_ref) .or_insert_with(Vec::new)
.or_insert_with(Vec::new) .push(SigRefUser::Instruction(inst));
.push(SigRefUser::Instruction(inst));
}
_ => {}
} }
} }
} }
@@ -500,15 +491,14 @@ impl Mutator for RemoveUnusedEntities {
let mut global_value_usage_map = HashMap::new(); let mut global_value_usage_map = HashMap::new();
for ebb in func.layout.ebbs() { for ebb in func.layout.ebbs() {
for inst in func.layout.ebb_insts(ebb) { for inst in func.layout.ebb_insts(ebb) {
match func.dfg[inst] { // Add new cases when there are new instruction formats taking a `GlobalValue`.
// Add new cases when there are new instruction formats taking a `GlobalValue`. if let InstructionData::UnaryGlobalValue { global_value, .. } =
InstructionData::UnaryGlobalValue { global_value, .. } => { func.dfg[inst]
global_value_usage_map {
.entry(global_value) global_value_usage_map
.or_insert_with(Vec::new) .entry(global_value)
.push(inst); .or_insert_with(Vec::new)
} .push(inst);
_ => {}
} }
} }
} }
@@ -519,8 +509,9 @@ impl Mutator for RemoveUnusedEntities {
// These can create cyclic references, which cause complications. Just skip // These can create cyclic references, which cause complications. Just skip
// the global value removal for now. // the global value removal for now.
// FIXME Handle them in a better way. // FIXME Handle them in a better way.
GlobalValueData::Load { base: _, .. } GlobalValueData::Load { .. } | GlobalValueData::IAddImm { .. } => {
| GlobalValueData::IAddImm { base: _, .. } => return None, return None
}
} }
} }
@@ -647,11 +638,11 @@ impl Mutator for MergeBlocks {
// we'll start back to this EBB. // we'll start back to this EBB.
self.prev_ebb = Some(pred.ebb); self.prev_ebb = Some(pred.ebb);
return Some(( Some((
func, func,
format!("merged {} and {}", pred.ebb, ebb), format!("merged {} and {}", pred.ebb, ebb),
ProgressStatus::ExpandedOrShrinked, ProgressStatus::ExpandedOrShrinked,
)); ))
} }
fn did_crash(&mut self) { fn did_crash(&mut self) {
@@ -701,9 +692,9 @@ fn reduce(
match context.check_for_crash(&func) { match context.check_for_crash(&func) {
CheckResult::Succeed => { CheckResult::Succeed => {
return Err(format!( return Err(
"Given function compiled successfully or gave a verifier error." "Given function compiled successfully or gave a verifier error.".to_string(),
)); );
} }
CheckResult::Crash(_) => {} CheckResult::Crash(_) => {}
} }
@@ -926,8 +917,8 @@ mod tests {
#[test] #[test]
fn test_reduce() { fn test_reduce() {
const TEST: &'static str = include_str!("../tests/bugpoint_test.clif"); const TEST: &str = include_str!("../tests/bugpoint_test.clif");
const EXPECTED: &'static str = include_str!("../tests/bugpoint_test_expected.clif"); const EXPECTED: &str = include_str!("../tests/bugpoint_test_expected.clif");
let test_file = parse_test(TEST, ParseOptions::default()).unwrap(); let test_file = parse_test(TEST, ParseOptions::default()).unwrap();

View File

@@ -8,7 +8,7 @@ use crate::CommandResult;
use cranelift_reader::parse_functions; use cranelift_reader::parse_functions;
pub fn run(files: &[String]) -> CommandResult { pub fn run(files: &[String]) -> CommandResult {
for (i, f) in files.into_iter().enumerate() { for (i, f) in files.iter().enumerate() {
if i != 0 { if i != 0 {
println!(); println!();
} }

View File

@@ -25,12 +25,7 @@ impl binemit::RelocSink for PrintRelocs {
offset: binemit::CodeOffset, offset: binemit::CodeOffset,
) { ) {
if self.flag_print { if self.flag_print {
write!( writeln!(&mut self.text, "reloc_ebb: {} {} at {}", r, offset, where_).unwrap();
&mut self.text,
"reloc_ebb: {} {} at {}\n",
r, offset, where_
)
.unwrap();
} }
} }
@@ -42,9 +37,9 @@ impl binemit::RelocSink for PrintRelocs {
addend: binemit::Addend, addend: binemit::Addend,
) { ) {
if self.flag_print { if self.flag_print {
write!( writeln!(
&mut self.text, &mut self.text,
"reloc_external: {} {} {} at {}\n", "reloc_external: {} {} {} at {}",
r, name, addend, where_ r, name, addend, where_
) )
.unwrap(); .unwrap();
@@ -53,7 +48,7 @@ impl binemit::RelocSink for PrintRelocs {
fn reloc_jt(&mut self, where_: binemit::CodeOffset, r: binemit::Reloc, jt: ir::JumpTable) { fn reloc_jt(&mut self, where_: binemit::CodeOffset, r: binemit::Reloc, jt: ir::JumpTable) {
if self.flag_print { if self.flag_print {
write!(&mut self.text, "reloc_jt: {} {} at {}\n", r, jt, where_).unwrap(); writeln!(&mut self.text, "reloc_jt: {} {} at {}", r, jt, where_).unwrap();
} }
} }
@@ -64,9 +59,9 @@ impl binemit::RelocSink for PrintRelocs {
constant: ir::ConstantOffset, constant: ir::ConstantOffset,
) { ) {
if self.flag_print { if self.flag_print {
write!( writeln!(
&mut self.text, &mut self.text,
"reloc_constant: {} {} at {}\n", "reloc_constant: {} {} at {}",
reloc, constant, code_offset reloc, constant, code_offset
) )
.unwrap(); .unwrap();
@@ -91,7 +86,7 @@ impl PrintTraps {
impl binemit::TrapSink for PrintTraps { impl binemit::TrapSink for PrintTraps {
fn trap(&mut self, offset: binemit::CodeOffset, _srcloc: ir::SourceLoc, code: ir::TrapCode) { fn trap(&mut self, offset: binemit::CodeOffset, _srcloc: ir::SourceLoc, code: ir::TrapCode) {
if self.flag_print { if self.flag_print {
write!(&mut self.text, "trap: {} at {}\n", code, offset).unwrap(); writeln!(&mut self.text, "trap: {} at {}", code, offset).unwrap();
} }
} }
} }
@@ -113,7 +108,7 @@ impl PrintStackmaps {
impl binemit::StackmapSink for PrintStackmaps { impl binemit::StackmapSink for PrintStackmaps {
fn add_stackmap(&mut self, offset: binemit::CodeOffset, _: binemit::Stackmap) { fn add_stackmap(&mut self, offset: binemit::CodeOffset, _: binemit::Stackmap) {
if self.flag_print { if self.flag_print {
write!(&mut self.text, "add_stackmap at {}\n", offset).unwrap(); writeln!(&mut self.text, "add_stackmap at {}", offset).unwrap();
} }
} }
} }

View File

@@ -9,7 +9,7 @@ use cranelift_codegen::cfg_printer::CFGPrinter;
use cranelift_reader::parse_functions; use cranelift_reader::parse_functions;
pub fn run(files: &[String]) -> CommandResult { pub fn run(files: &[String]) -> CommandResult {
for (i, f) in files.into_iter().enumerate() { for (i, f) in files.iter().enumerate() {
if i != 0 { if i != 0 {
println!(); println!();
} }

View File

@@ -52,7 +52,7 @@ fn iterate_files(files: Vec<String>) -> impl Iterator<Item = PathBuf> {
.filter(|f| match f { .filter(|f| match f {
Ok(d) => { Ok(d) => {
// filter out hidden files (starting with .) // filter out hidden files (starting with .)
!d.file_name().to_str().map_or(false, |s| s.starts_with(".")) !d.file_name().to_str().map_or(false, |s| s.starts_with('.'))
// filter out directories // filter out directories
&& !d.file_type().is_dir() && !d.file_type().is_dir()
} }
@@ -96,7 +96,7 @@ fn create_target_isa(isa_spec: &IsaSpec) -> Result<Box<dyn TargetIsa>, String> {
let builder = host_isa_builder()?; let builder = host_isa_builder()?;
Ok(builder.finish(flags.clone())) Ok(builder.finish(flags.clone()))
} else { } else {
Err(String::from("A target ISA was specified in the file but should not have been--only the host ISA can be used for running CLIF files"))? Err(String::from("A target ISA was specified in the file but should not have been--only the host ISA can be used for running CLIF files"))
} }
} }

View File

@@ -49,8 +49,8 @@ impl OwnedFlagsOrIsa {
/// Produce a FlagsOrIsa reference. /// Produce a FlagsOrIsa reference.
pub fn as_fisa(&self) -> FlagsOrIsa { pub fn as_fisa(&self) -> FlagsOrIsa {
match *self { match *self {
OwnedFlagsOrIsa::Flags(ref flags) => FlagsOrIsa::from(flags), Self::Flags(ref flags) => FlagsOrIsa::from(flags),
OwnedFlagsOrIsa::Isa(ref isa) => FlagsOrIsa::from(&**isa), Self::Isa(ref isa) => FlagsOrIsa::from(&**isa),
} }
} }
} }