diff --git a/cranelift/src/cton-util.rs b/cranelift/src/cton-util.rs index 681ab5b8bc..fea4d879be 100644 --- a/cranelift/src/cton-util.rs +++ b/cranelift/src/cton-util.rs @@ -51,7 +51,11 @@ pub type CommandResult = Result<(), String>; fn cton_util() -> CommandResult { // Parse comand line arguments. let args: Args = Docopt::new(USAGE) - .and_then(|d| d.help(true).version(Some(format!("Cretonne {}", VERSION))).decode()) + .and_then(|d| { + d.help(true) + .version(Some(format!("Cretonne {}", VERSION))) + .decode() + }) .unwrap_or_else(|e| e.exit()); // Find the sub-command to execute. diff --git a/cranelift/src/filetest/concurrent.rs b/cranelift/src/filetest/concurrent.rs index effe454d38..9ea1114000 100644 --- a/cranelift/src/filetest/concurrent.rs +++ b/cranelift/src/filetest/concurrent.rs @@ -119,10 +119,11 @@ fn worker_thread(thread_num: usize, // Tell them we're starting this job. // The receiver should always be present for this as long as we have jobs. - replies.send(Reply::Starting { - jobid: jobid, - thread_num: thread_num, - }) + replies + .send(Reply::Starting { + jobid: jobid, + thread_num: thread_num, + }) .unwrap(); let result = catch_unwind(|| runone::run(path.as_path())).unwrap_or_else(|e| { @@ -141,10 +142,11 @@ fn worker_thread(thread_num: usize, dbg!("FAIL: {}", msg); } - replies.send(Reply::Done { - jobid: jobid, - result: result, - }) + replies + .send(Reply::Done { + jobid: jobid, + result: result, + }) .unwrap(); } }) diff --git a/cranelift/src/filetest/legalizer.rs b/cranelift/src/filetest/legalizer.rs index 39854769ac..1a5f774664 100644 --- a/cranelift/src/filetest/legalizer.rs +++ b/cranelift/src/filetest/legalizer.rs @@ -41,7 +41,8 @@ impl SubTest for TestLegalizer { comp_ctx.flowgraph(); comp_ctx.legalize(isa); - comp_ctx.verify(isa).map_err(|e| pretty_verifier_error(&comp_ctx.func, e))?; + comp_ctx.verify(isa) + .map_err(|e| pretty_verifier_error(&comp_ctx.func, e))?; let mut text = String::new(); write_function(&mut text, &comp_ctx.func, Some(isa)).map_err(|e| e.to_string())?; diff --git a/cranelift/src/filetest/regalloc.rs b/cranelift/src/filetest/regalloc.rs index 5f70f51100..daf7adcb07 100644 --- a/cranelift/src/filetest/regalloc.rs +++ b/cranelift/src/filetest/regalloc.rs @@ -47,7 +47,8 @@ impl SubTest for TestRegalloc { // TODO: Should we have an option to skip legalization? comp_ctx.legalize(isa); comp_ctx.regalloc(isa); - comp_ctx.verify(isa).map_err(|e| pretty_verifier_error(&comp_ctx.func, e))?; + comp_ctx.verify(isa) + .map_err(|e| pretty_verifier_error(&comp_ctx.func, e))?; let mut text = String::new(); write_function(&mut text, &comp_ctx.func, Some(isa)).map_err(|e| e.to_string())?; diff --git a/cranelift/src/filetest/runner.rs b/cranelift/src/filetest/runner.rs index 91331a0299..1993ca5877 100644 --- a/cranelift/src/filetest/runner.rs +++ b/cranelift/src/filetest/runner.rs @@ -104,10 +104,11 @@ impl TestRunner { /// /// Any problems reading `file` as a test case file will be reported as a test failure. pub fn push_test>(&mut self, file: P) { - self.tests.push(QueueEntry { - path: file.into(), - state: State::New, - }); + self.tests + .push(QueueEntry { + path: file.into(), + state: State::New, + }); } /// Begin running tests concurrently. @@ -206,7 +207,9 @@ impl TestRunner { } // Check for any asynchronous replies without blocking. - while let Some(reply) = self.threads.as_mut().and_then(ConcurrentRunner::try_get) { + while let Some(reply) = self.threads + .as_mut() + .and_then(ConcurrentRunner::try_get) { self.handle_reply(reply); } } @@ -303,12 +306,12 @@ impl TestRunner { return; } - for t in self.tests.iter().filter(|entry| match **entry { - QueueEntry { state: State::Done(Ok(dur)), .. } => { - dur > cut - } - _ => false, - }) { + for t in self.tests + .iter() + .filter(|entry| match **entry { + QueueEntry { state: State::Done(Ok(dur)), .. } => dur > cut, + _ => false, + }) { println!("slow: {}", t) } diff --git a/cranelift/src/filetest/runone.rs b/cranelift/src/filetest/runone.rs index ec61c98240..bc16725d74 100644 --- a/cranelift/src/filetest/runone.rs +++ b/cranelift/src/filetest/runone.rs @@ -120,5 +120,6 @@ fn run_one_test<'a>(tuple: (&'a SubTest, &'a Flags, Option<&'a TargetIsa>), context.verified = true; } - test.run(func, context).map_err(|e| format!("{}: {}", name, e)) + test.run(func, context) + .map_err(|e| format!("{}: {}", name, e)) } diff --git a/cranelift/src/filetest/subtest.rs b/cranelift/src/filetest/subtest.rs index ede013ef9f..923439d490 100644 --- a/cranelift/src/filetest/subtest.rs +++ b/cranelift/src/filetest/subtest.rs @@ -76,11 +76,13 @@ impl<'a> filecheck::VariableMap for Context<'a> { /// Run filecheck on `text`, using directives extracted from `context`. pub fn run_filecheck(text: &str, context: &Context) -> Result<()> { let checker = build_filechecker(context)?; - if checker.check(&text, context).map_err(|e| format!("filecheck: {}", e))? { + if checker.check(&text, context) + .map_err(|e| format!("filecheck: {}", e))? { Ok(()) } else { // Filecheck mismatch. Emit an explanation as output. - let (_, explain) = checker.explain(&text, context).map_err(|e| format!("explain: {}", e))?; + let (_, explain) = checker.explain(&text, context) + .map_err(|e| format!("explain: {}", e))?; Err(format!("filecheck failed:\n{}{}", checker, explain)) } } @@ -90,10 +92,12 @@ pub fn build_filechecker(context: &Context) -> Result { let mut builder = CheckerBuilder::new(); // Preamble comments apply to all functions. for comment in context.preamble_comments { - builder.directive(comment.text).map_err(|e| format!("filecheck: {}", e))?; + builder.directive(comment.text) + .map_err(|e| format!("filecheck: {}", e))?; } for comment in &context.details.comments { - builder.directive(comment.text).map_err(|e| format!("filecheck: {}", e))?; + builder.directive(comment.text) + .map_err(|e| format!("filecheck: {}", e))?; } let checker = builder.finish(); if checker.is_empty() { diff --git a/cranelift/src/rsfilecheck.rs b/cranelift/src/rsfilecheck.rs index 734c067a36..79d913bce9 100644 --- a/cranelift/src/rsfilecheck.rs +++ b/cranelift/src/rsfilecheck.rs @@ -18,10 +18,12 @@ pub fn run(files: Vec, verbose: bool) -> CommandResult { } let mut buffer = String::new(); - io::stdin().read_to_string(&mut buffer).map_err(|e| format!("stdin: {}", e))?; + io::stdin().read_to_string(&mut buffer) + .map_err(|e| format!("stdin: {}", e))?; if verbose { - let (success, explain) = checker.explain(&buffer, NO_VARIABLES).map_err(|e| e.to_string())?; + let (success, explain) = checker.explain(&buffer, NO_VARIABLES) + .map_err(|e| e.to_string())?; print!("{}", explain); if success { println!("OK"); @@ -29,10 +31,12 @@ pub fn run(files: Vec, verbose: bool) -> CommandResult { } else { Err("Check failed".to_string()) } - } else if checker.check(&buffer, NO_VARIABLES).map_err(|e| e.to_string())? { + } else if checker.check(&buffer, NO_VARIABLES) + .map_err(|e| e.to_string())? { Ok(()) } else { - let (_, explain) = checker.explain(&buffer, NO_VARIABLES).map_err(|e| e.to_string())?; + let (_, explain) = checker.explain(&buffer, NO_VARIABLES) + .map_err(|e| e.to_string())?; print!("{}", explain); Err("Check failed".to_string()) } @@ -41,6 +45,7 @@ pub fn run(files: Vec, verbose: bool) -> CommandResult { fn read_checkfile(filename: &str) -> Result { let buffer = read_to_string(&filename).map_err(|e| format!("{}: {}", filename, e))?; let mut builder = CheckerBuilder::new(); - builder.text(&buffer).map_err(|e| format!("{}: {}", filename, e))?; + builder.text(&buffer) + .map_err(|e| format!("{}: {}", filename, e))?; Ok(builder.finish()) } diff --git a/cranelift/test-all.sh b/cranelift/test-all.sh index 7c4938902f..522cf594a2 100755 --- a/cranelift/test-all.sh +++ b/cranelift/test-all.sh @@ -30,7 +30,7 @@ function banner() { # rustfmt is installed. # # This version should always be bumped to the newest version available. -RUSTFMT_VERSION="0.8.0" +RUSTFMT_VERSION="0.8.1" if cargo install --list | grep -q "^rustfmt v$RUSTFMT_VERSION"; then banner "Rust formatting" diff --git a/cranelift/tests/cfg_traversal.rs b/cranelift/tests/cfg_traversal.rs index f7fb119463..903b1a1ec1 100644 --- a/cranelift/tests/cfg_traversal.rs +++ b/cranelift/tests/cfg_traversal.rs @@ -9,7 +9,10 @@ use self::cton_reader::parse_functions; fn test_reverse_postorder_traversal(function_source: &str, ebb_order: Vec) { let func = &parse_functions(function_source).unwrap()[0]; let cfg = ControlFlowGraph::with_function(&func); - let ebbs = ebb_order.iter().map(|n| Ebb::with_number(*n).unwrap()).collect::>(); + let ebbs = ebb_order + .iter() + .map(|n| Ebb::with_number(*n).unwrap()) + .collect::>(); let mut postorder_ebbs = cfg.postorder_ebbs(); let mut postorder_map = EntityMap::with_capacity(postorder_ebbs.len()); diff --git a/lib/cretonne/src/abi.rs b/lib/cretonne/src/abi.rs index 14a034b8fb..32210fecfb 100644 --- a/lib/cretonne/src/abi.rs +++ b/lib/cretonne/src/abi.rs @@ -108,7 +108,10 @@ pub fn legalize_args(args: &mut Vec, aa: &mut AA) } // Split this argument into two smaller ones. Then revisit both. ArgAction::Convert(conv) => { - let new_arg = ArgumentType { value_type: conv.apply(arg.value_type), ..arg }; + let new_arg = ArgumentType { + value_type: conv.apply(arg.value_type), + ..arg + }; args[argno].value_type = new_arg.value_type; if conv.is_split() { args.insert(argno + 1, new_arg); diff --git a/lib/cretonne/src/context.rs b/lib/cretonne/src/context.rs index 2d5631f62c..051adc47aa 100644 --- a/lib/cretonne/src/context.rs +++ b/lib/cretonne/src/context.rs @@ -69,6 +69,7 @@ impl Context { /// Run the register allocator. pub fn regalloc(&mut self, isa: &TargetIsa) { - self.regalloc.run(isa, &mut self.func, &self.cfg, &self.domtree); + self.regalloc + .run(isa, &mut self.func, &self.cfg, &self.domtree); } } diff --git a/lib/cretonne/src/dominator_tree.rs b/lib/cretonne/src/dominator_tree.rs index b014e85ec1..13d1175d8a 100644 --- a/lib/cretonne/src/dominator_tree.rs +++ b/lib/cretonne/src/dominator_tree.rs @@ -88,7 +88,8 @@ impl DominatorTree { // Run a finger up the dominator tree from b until we see a. // Do nothing if b is unreachable. while rpo_a < self.nodes[ebb_b].rpo_number { - b = self.idom(ebb_b).expect("Shouldn't meet unreachable here."); + b = self.idom(ebb_b) + .expect("Shouldn't meet unreachable here."); ebb_b = layout.inst_ebb(b).expect("Dominator got removed."); } @@ -209,8 +210,9 @@ impl DominatorTree { .filter(|&(ebb, _)| self.is_reachable(ebb)); // The RPO must visit at least one predecessor before this node. - let mut idom = - reachable_preds.next().expect("EBB node must have one reachable predecessor"); + let mut idom = reachable_preds + .next() + .expect("EBB node must have one reachable predecessor"); for pred in reachable_preds { idom = self.common_dominator(idom, pred, layout); diff --git a/lib/cretonne/src/entity_list.rs b/lib/cretonne/src/entity_list.rs index 1a05464c21..0b90265470 100644 --- a/lib/cretonne/src/entity_list.rs +++ b/lib/cretonne/src/entity_list.rs @@ -135,7 +135,9 @@ impl ListPool { // The `wrapping_sub` handles the special case 0, which is the empty list. This way, the // cost of the bounds check that we have to pay anyway is co-opted to handle the special // case of the empty list. - self.data.get(idx.wrapping_sub(1)).map(|len| len.index()) + self.data + .get(idx.wrapping_sub(1)) + .map(|len| len.index()) } /// Allocate a storage block with a size given by `sclass`. diff --git a/lib/cretonne/src/ir/builder.rs b/lib/cretonne/src/ir/builder.rs index 8880351d9b..fb5c4ba20a 100644 --- a/lib/cretonne/src/ir/builder.rs +++ b/lib/cretonne/src/ir/builder.rs @@ -65,10 +65,7 @@ impl<'c, 'fc, 'fd> InsertBuilder<'c, 'fc, 'fd> { pub fn new(dfg: &'fd mut DataFlowGraph, pos: &'c mut Cursor<'fc>) -> InsertBuilder<'c, 'fc, 'fd> { - InsertBuilder { - dfg: dfg, - pos: pos, - } + InsertBuilder { dfg: dfg, pos: pos } } } @@ -182,8 +179,9 @@ impl<'f> InstBuilderBase<'f> for ReplaceBuilder<'f> { // Normally, make_inst_results() would also set the first result type, but we're not // going to call that, so set it manually. - *self.dfg[self.inst].first_type_mut() = - self.dfg.compute_result_type(self.inst, 0, ctrl_typevar).unwrap_or_default(); + *self.dfg[self.inst].first_type_mut() = self.dfg + .compute_result_type(self.inst, 0, ctrl_typevar) + .unwrap_or_default(); } (self.inst, self.dfg) diff --git a/lib/cretonne/src/ir/dfg.rs b/lib/cretonne/src/ir/dfg.rs index f7d214182b..22ce7a116d 100644 --- a/lib/cretonne/src/ir/dfg.rs +++ b/lib/cretonne/src/ir/dfg.rs @@ -356,25 +356,37 @@ impl DataFlowGraph { /// Get the fixed value arguments on `inst` as a slice. pub fn inst_fixed_args(&self, inst: Inst) -> &[Value] { - let fixed_args = self[inst].opcode().constraints().fixed_value_arguments(); + let fixed_args = self[inst] + .opcode() + .constraints() + .fixed_value_arguments(); &self.inst_args(inst)[..fixed_args] } /// Get the fixed value arguments on `inst` as a mutable slice. pub fn inst_fixed_args_mut(&mut self, inst: Inst) -> &mut [Value] { - let fixed_args = self[inst].opcode().constraints().fixed_value_arguments(); + let fixed_args = self[inst] + .opcode() + .constraints() + .fixed_value_arguments(); &mut self.inst_args_mut(inst)[..fixed_args] } /// Get the variable value arguments on `inst` as a slice. pub fn inst_variable_args(&self, inst: Inst) -> &[Value] { - let fixed_args = self[inst].opcode().constraints().fixed_value_arguments(); + let fixed_args = self[inst] + .opcode() + .constraints() + .fixed_value_arguments(); &self.inst_args(inst)[fixed_args..] } /// Get the variable value arguments on `inst` as a mutable slice. pub fn inst_variable_args_mut(&mut self, inst: Inst) -> &mut [Value] { - let fixed_args = self[inst].opcode().constraints().fixed_value_arguments(); + let fixed_args = self[inst] + .opcode() + .constraints() + .fixed_value_arguments(); &mut self.inst_args_mut(inst)[fixed_args..] } @@ -444,8 +456,8 @@ impl DataFlowGraph { // Update the second_result pointer in `inst`. if head.is_some() { *self.insts[inst] - .second_result_mut() - .expect("instruction format doesn't allow multiple results") = head.into(); + .second_result_mut() + .expect("instruction format doesn't allow multiple results") = head.into(); } *self.insts[inst].first_type_mut() = first_type.unwrap_or_default(); @@ -505,7 +517,12 @@ impl DataFlowGraph { (inst, 1) } ExpandedValue::Table(idx) => { - if let ValueData::Inst { num, inst, ref mut next, .. } = self.extended_values[idx] { + if let ValueData::Inst { + num, + inst, + ref mut next, + .. + } = self.extended_values[idx] { assert!(next.is_none(), "last_res is not the last result"); *next = res.into(); assert!(num < u16::MAX, "Too many arguments to EBB"); @@ -518,8 +535,12 @@ impl DataFlowGraph { // Now update `res` itself. if let ExpandedValue::Table(idx) = res.expand() { - if let ValueData::Inst { ref mut num, ref mut inst, ref mut next, .. } = - self.extended_values[idx] { + if let ValueData::Inst { + ref mut num, + ref mut inst, + ref mut next, + .. + } = self.extended_values[idx] { *num = res_num; *inst = res_inst; *next = None.into(); @@ -565,7 +586,8 @@ impl DataFlowGraph { /// /// Returns the new `Inst` reference where the original instruction has been moved. pub fn redefine_first_value(&mut self, pos: &mut Cursor) -> Inst { - let orig = pos.current_inst().expect("Cursor must point at an instruction"); + let orig = pos.current_inst() + .expect("Cursor must point at an instruction"); let data = self[orig].clone(); // After cloning, any secondary values are attached to both copies. Don't do that, we only // want them on the new clone. @@ -630,12 +652,13 @@ impl DataFlowGraph { } // Not a fixed result, try to extract a return type from the call signature. - self.call_signature(inst).and_then(|sigref| { - self.signatures[sigref] - .return_types - .get(result_idx - fixed_results) - .map(|&arg| arg.value_type) - }) + self.call_signature(inst) + .and_then(|sigref| { + self.signatures[sigref] + .return_types + .get(result_idx - fixed_results) + .map(|&arg| arg.value_type) + }) } } @@ -815,8 +838,12 @@ impl DataFlowGraph { // Now update `arg` itself. let arg_ebb = ebb; if let ExpandedValue::Table(idx) = arg.expand() { - if let ValueData::Arg { ref mut num, ebb, ref mut next, .. } = - self.extended_values[idx] { + if let ValueData::Arg { + ref mut num, + ebb, + ref mut next, + .. + } = self.extended_values[idx] { *num = arg_num; *next = None.into(); assert_eq!(arg_ebb, ebb, "{} should already belong to EBB", arg); diff --git a/lib/cretonne/src/ir/entities.rs b/lib/cretonne/src/ir/entities.rs index 42d109bf7a..876598d13a 100644 --- a/lib/cretonne/src/ir/entities.rs +++ b/lib/cretonne/src/ir/entities.rs @@ -286,14 +286,18 @@ mod tests { assert_eq!(Value::table_with_number(1).unwrap().to_string(), "vx1"); assert_eq!(Value::direct_with_number(u32::MAX / 2), None); - assert_eq!(match Value::direct_with_number(u32::MAX / 2 - 1).unwrap().expand() { + assert_eq!(match Value::direct_with_number(u32::MAX / 2 - 1) + .unwrap() + .expand() { ExpandedValue::Direct(i) => i.index() as u32, _ => u32::MAX, }, u32::MAX / 2 - 1); assert_eq!(Value::table_with_number(u32::MAX / 2), None); - assert_eq!(match Value::table_with_number(u32::MAX / 2 - 1).unwrap().expand() { + assert_eq!(match Value::table_with_number(u32::MAX / 2 - 1) + .unwrap() + .expand() { ExpandedValue::Table(i) => i as u32, _ => u32::MAX, }, diff --git a/lib/cretonne/src/ir/extfunc.rs b/lib/cretonne/src/ir/extfunc.rs index bc293d1169..43daaed192 100644 --- a/lib/cretonne/src/ir/extfunc.rs +++ b/lib/cretonne/src/ir/extfunc.rs @@ -221,7 +221,8 @@ mod tests { assert_eq!(sig.to_string(), "(i32)"); sig.return_types.push(ArgumentType::new(F32)); assert_eq!(sig.to_string(), "(i32) -> f32"); - sig.argument_types.push(ArgumentType::new(I32.by(4).unwrap())); + sig.argument_types + .push(ArgumentType::new(I32.by(4).unwrap())); assert_eq!(sig.to_string(), "(i32, i32x4) -> f32"); sig.return_types.push(ArgumentType::new(B8)); assert_eq!(sig.to_string(), "(i32, i32x4) -> f32, b8"); diff --git a/lib/cretonne/src/ir/instructions.rs b/lib/cretonne/src/ir/instructions.rs index d3ca0bc62f..1c292f473a 100644 --- a/lib/cretonne/src/ir/instructions.rs +++ b/lib/cretonne/src/ir/instructions.rs @@ -304,15 +304,21 @@ impl InstructionData { /// here. pub fn analyze_branch<'a>(&'a self, pool: &'a ValueListPool) -> BranchInfo<'a> { match self { - &InstructionData::Jump { destination, ref args, .. } => { - BranchInfo::SingleDest(destination, &args.as_slice(pool)) - } - &InstructionData::Branch { destination, ref args, .. } => { - BranchInfo::SingleDest(destination, &args.as_slice(pool)[1..]) - } - &InstructionData::BranchIcmp { destination, ref args, .. } => { - BranchInfo::SingleDest(destination, &args.as_slice(pool)[2..]) - } + &InstructionData::Jump { + destination, + ref args, + .. + } => BranchInfo::SingleDest(destination, &args.as_slice(pool)), + &InstructionData::Branch { + destination, + ref args, + .. + } => BranchInfo::SingleDest(destination, &args.as_slice(pool)[1..]), + &InstructionData::BranchIcmp { + destination, + ref args, + .. + } => BranchInfo::SingleDest(destination, &args.as_slice(pool)[2..]), &InstructionData::BranchTable { table, .. } => BranchInfo::Table(table), _ => BranchInfo::NotABranch, } @@ -601,9 +607,21 @@ impl OperandConstraint { Same => Bound(ctrl_type), LaneOf => Bound(ctrl_type.lane_type()), AsBool => Bound(ctrl_type.as_bool()), - HalfWidth => Bound(ctrl_type.half_width().expect("invalid type for half_width")), - DoubleWidth => Bound(ctrl_type.double_width().expect("invalid type for double_width")), - HalfVector => Bound(ctrl_type.half_vector().expect("invalid type for half_vector")), + HalfWidth => { + Bound(ctrl_type + .half_width() + .expect("invalid type for half_width")) + } + DoubleWidth => { + Bound(ctrl_type + .double_width() + .expect("invalid type for double_width")) + } + HalfVector => { + Bound(ctrl_type + .half_vector() + .expect("invalid type for half_vector")) + } DoubleVector => Bound(ctrl_type.by(2).expect("invalid type for double_vector")), } } diff --git a/lib/cretonne/src/ir/jumptable.rs b/lib/cretonne/src/ir/jumptable.rs index e7f98a329d..dd9bc5c187 100644 --- a/lib/cretonne/src/ir/jumptable.rs +++ b/lib/cretonne/src/ir/jumptable.rs @@ -66,15 +66,14 @@ impl JumpTableData { /// /// This returns an iterator that skips any empty slots in the table. pub fn entries<'a>(&'a self) -> Entries { - Entries(self.table - .iter() - .cloned() - .enumerate()) + Entries(self.table.iter().cloned().enumerate()) } /// Checks if any of the entries branch to `ebb`. pub fn branches_to(&self, ebb: Ebb) -> bool { - self.table.iter().any(|target_ebb| target_ebb.expand() == Some(ebb)) + self.table + .iter() + .any(|target_ebb| target_ebb.expand() == Some(ebb)) } /// Access the whole table as a mutable slice. @@ -109,10 +108,7 @@ impl Display for JumpTableData { Some(first) => write!(fmt, "jump_table {}", first)?, } - for dest in self.table - .iter() - .skip(1) - .map(|e| e.expand()) { + for dest in self.table.iter().skip(1).map(|e| e.expand()) { match dest { None => write!(fmt, ", 0")?, Some(ebb) => write!(fmt, ", {}", ebb)?, diff --git a/lib/cretonne/src/ir/layout.rs b/lib/cretonne/src/ir/layout.rs index c34d6a0aba..4f5d2355b2 100644 --- a/lib/cretonne/src/ir/layout.rs +++ b/lib/cretonne/src/ir/layout.rs @@ -125,7 +125,10 @@ impl Layout { /// Get the last sequence number in `ebb`. fn last_ebb_seq(&self, ebb: Ebb) -> SequenceNumber { // Get the seq of the last instruction if it exists, otherwise use the EBB header seq. - self.ebbs[ebb].last_inst.map(|inst| self.insts[inst].seq).unwrap_or(self.ebbs[ebb].seq) + self.ebbs[ebb] + .last_inst + .map(|inst| self.insts[inst].seq) + .unwrap_or(self.ebbs[ebb].seq) } /// Assign a valid sequence number to `ebb` such that the numbers are still monotonic. This may @@ -134,7 +137,10 @@ impl Layout { assert!(self.is_ebb_inserted(ebb)); // Get the sequence number immediately before `ebb`, or 0. - let prev_seq = self.ebbs[ebb].prev.map(|prev_ebb| self.last_ebb_seq(prev_ebb)).unwrap_or(0); + let prev_seq = self.ebbs[ebb] + .prev + .map(|prev_ebb| self.last_ebb_seq(prev_ebb)) + .unwrap_or(0); // Get the sequence number immediately following `ebb`. let next_seq = if let Some(inst) = self.ebbs[ebb].first_inst.expand() { @@ -159,7 +165,8 @@ impl Layout { /// Assign a valid sequence number to `inst` such that the numbers are still monotonic. This may /// require renumbering. fn assign_inst_seq(&mut self, inst: Inst) { - let ebb = self.inst_ebb(inst).expect("inst must be inserted before assigning an seq"); + let ebb = self.inst_ebb(inst) + .expect("inst must be inserted before assigning an seq"); // Get the sequence number immediately before `inst`. let prev_seq = match self.insts[inst].prev.expand() { @@ -436,8 +443,8 @@ impl Layout { /// Insert `inst` before the instruction `before` in the same EBB. pub fn insert_inst(&mut self, inst: Inst, before: Inst) { assert_eq!(self.inst_ebb(inst), None); - let ebb = - self.inst_ebb(before).expect("Instruction before insertion point not in the layout"); + let ebb = self.inst_ebb(before) + .expect("Instruction before insertion point not in the layout"); let after = self.insts[before].prev; { let inst_node = self.insts.ensure(inst); @@ -485,8 +492,8 @@ impl Layout { /// i4 /// ``` pub fn split_ebb(&mut self, new_ebb: Ebb, before: Inst) { - let old_ebb = - self.inst_ebb(before).expect("The `before` instruction must be in the layout"); + let old_ebb = self.inst_ebb(before) + .expect("The `before` instruction must be in the layout"); assert!(!self.is_ebb_inserted(new_ebb)); // Insert new_ebb after old_ebb. @@ -786,8 +793,9 @@ impl<'f> Cursor<'f> { self.pos = At(next); Some(next) } else { - self.pos = - After(self.layout.inst_ebb(inst).expect("current instruction removed?")); + self.pos = After(self.layout + .inst_ebb(inst) + .expect("current instruction removed?")); None } } @@ -837,8 +845,9 @@ impl<'f> Cursor<'f> { self.pos = At(prev); Some(prev) } else { - self.pos = - Before(self.layout.inst_ebb(inst).expect("current instruction removed?")); + self.pos = Before(self.layout + .inst_ebb(inst) + .expect("current instruction removed?")); None } } diff --git a/lib/cretonne/src/ir/types.rs b/lib/cretonne/src/ir/types.rs index 1321636add..550f25804b 100644 --- a/lib/cretonne/src/ir/types.rs +++ b/lib/cretonne/src/ir/types.rs @@ -348,12 +348,7 @@ mod tests { assert_eq!(big.bits(), 64 * 256); assert_eq!(big.half_vector().unwrap().to_string(), "f64x128"); - assert_eq!(B1.by(2) - .unwrap() - .half_vector() - .unwrap() - .to_string(), - "b1"); + assert_eq!(B1.by(2).unwrap().half_vector().unwrap().to_string(), "b1"); assert_eq!(I32.half_vector(), None); assert_eq!(VOID.half_vector(), None); @@ -383,12 +378,7 @@ mod tests { assert_eq!(B1.by(8).unwrap().to_string(), "b1x8"); assert_eq!(B8.by(1).unwrap().to_string(), "b8"); assert_eq!(B16.by(256).unwrap().to_string(), "b16x256"); - assert_eq!(B32.by(4) - .unwrap() - .by(2) - .unwrap() - .to_string(), - "b32x8"); + assert_eq!(B32.by(4).unwrap().by(2).unwrap().to_string(), "b32x8"); assert_eq!(B64.by(8).unwrap().to_string(), "b64x8"); assert_eq!(I8.by(64).unwrap().to_string(), "i8x64"); assert_eq!(F64.by(2).unwrap().to_string(), "f64x2"); diff --git a/lib/cretonne/src/isa/enc_tables.rs b/lib/cretonne/src/isa/enc_tables.rs index 71a3eba2e0..cfb0066b8c 100644 --- a/lib/cretonne/src/isa/enc_tables.rs +++ b/lib/cretonne/src/isa/enc_tables.rs @@ -90,13 +90,13 @@ pub fn lookup_enclist(ctrl_typevar: Type, Legalize::Expand }) .and_then(|l1idx| { - let l1ent = &level1_table[l1idx]; - let l2off = l1ent.offset.into() as usize; - let l2tab = &level2_table[l2off..l2off + (1 << l1ent.log2len)]; - probe(l2tab, opcode, opcode as usize) - .map(|l2idx| l2tab[l2idx].offset.into() as usize) - .ok_or(Legalize::Expand) - }) + let l1ent = &level1_table[l1idx]; + let l2off = l1ent.offset.into() as usize; + let l2tab = &level2_table[l2off..l2off + (1 << l1ent.log2len)]; + probe(l2tab, opcode, opcode as usize) + .map(|l2idx| l2tab[l2idx].offset.into() as usize) + .ok_or(Legalize::Expand) + }) } /// Encoding list entry. diff --git a/lib/cretonne/src/isa/riscv/binemit.rs b/lib/cretonne/src/isa/riscv/binemit.rs index 057fac11aa..213d8a132e 100644 --- a/lib/cretonne/src/isa/riscv/binemit.rs +++ b/lib/cretonne/src/isa/riscv/binemit.rs @@ -235,7 +235,11 @@ fn put_sb(bits: u16, rs1: RegUnit, rs2: RegUnit, sink: &m } fn recipe_sb(func: &Function, inst: Inst, sink: &mut CS) { - if let InstructionData::BranchIcmp { destination, ref args, .. } = func.dfg[inst] { + if let InstructionData::BranchIcmp { + destination, + ref args, + .. + } = func.dfg[inst] { let args = &args.as_slice(&func.dfg.value_lists)[0..2]; sink.reloc_ebb(RelocKind::Branch.into(), destination); put_sb(func.encodings[inst].bits(), @@ -248,7 +252,11 @@ fn recipe_sb(func: &Function, inst: Inst, sink: &mut CS) } fn recipe_sbzero(func: &Function, inst: Inst, sink: &mut CS) { - if let InstructionData::Branch { destination, ref args, .. } = func.dfg[inst] { + if let InstructionData::Branch { + destination, + ref args, + .. + } = func.dfg[inst] { let args = &args.as_slice(&func.dfg.value_lists)[0..1]; sink.reloc_ebb(RelocKind::Branch.into(), destination); put_sb(func.encodings[inst].bits(), diff --git a/lib/cretonne/src/legalizer/boundary.rs b/lib/cretonne/src/legalizer/boundary.rs index 2e3766057d..19e06b7ba4 100644 --- a/lib/cretonne/src/legalizer/boundary.rs +++ b/lib/cretonne/src/legalizer/boundary.rs @@ -107,7 +107,8 @@ fn legalize_inst_results(dfg: &mut DataFlowGraph, -> Inst where ResType: FnMut(&DataFlowGraph, usize) -> ArgumentType { - let mut call = pos.current_inst().expect("Cursor must point to a call instruction"); + let mut call = pos.current_inst() + .expect("Cursor must point to a call instruction"); // We theoretically allow for call instructions that return a number of fixed results before // the call return values. In practice, it doesn't happen. @@ -364,10 +365,13 @@ fn legalize_inst_arguments(dfg: &mut DataFlowGraph, mut get_abi_type: ArgType) where ArgType: FnMut(&DataFlowGraph, usize) -> ArgumentType { - let inst = pos.current_inst().expect("Cursor must point to a call instruction"); + let inst = pos.current_inst() + .expect("Cursor must point to a call instruction"); // Lift the value list out of the call instruction so we modify it. - let mut vlist = dfg[inst].take_value_list().expect("Call must have a value list"); + let mut vlist = dfg[inst] + .take_value_list() + .expect("Call must have a value list"); // The value list contains all arguments to the instruction, including the callee on an // indirect call which isn't part of the call arguments that must match the ABI signature. @@ -405,7 +409,9 @@ fn legalize_inst_arguments(dfg: &mut DataFlowGraph, let mut abi_arg = 0; for old_arg in 0..have_args { - let old_value = vlist.get(old_arg_offset + old_arg, &dfg.value_lists).unwrap(); + let old_value = vlist + .get(old_arg_offset + old_arg, &dfg.value_lists) + .unwrap(); let mut put_arg = |dfg: &mut DataFlowGraph, arg| { let abi_type = get_abi_type(dfg, abi_arg); if dfg.value_type(arg) == abi_type.value_type { @@ -435,7 +441,8 @@ fn legalize_inst_arguments(dfg: &mut DataFlowGraph, /// /// Returns `true` if any instructions were inserted. pub fn handle_call_abi(dfg: &mut DataFlowGraph, cfg: &ControlFlowGraph, pos: &mut Cursor) -> bool { - let mut inst = pos.current_inst().expect("Cursor must point to a call instruction"); + let mut inst = pos.current_inst() + .expect("Cursor must point to a call instruction"); // Start by checking if the argument types already match the signature. let sig_ref = match check_call_signature(dfg, inst) { @@ -475,7 +482,8 @@ pub fn handle_return_abi(dfg: &mut DataFlowGraph, pos: &mut Cursor, sig: &Signature) -> bool { - let inst = pos.current_inst().expect("Cursor must point to a return instruction"); + let inst = pos.current_inst() + .expect("Cursor must point to a return instruction"); // Check if the returned types already match the signature. if check_return_signature(dfg, inst, sig) { diff --git a/lib/cretonne/src/legalizer/split.rs b/lib/cretonne/src/legalizer/split.rs index 3dda54abe7..d1f55a6266 100644 --- a/lib/cretonne/src/legalizer/split.rs +++ b/lib/cretonne/src/legalizer/split.rs @@ -125,7 +125,9 @@ fn split_any(dfg: &mut DataFlowGraph, "Predecessor not a branch: {}", dfg.display_inst(inst)); let fixed_args = branch_opc.constraints().fixed_value_arguments(); - let mut args = dfg[inst].take_value_list().expect("Branches must have value lists."); + let mut args = dfg[inst] + .take_value_list() + .expect("Branches must have value lists."); let num_args = args.len(&dfg.value_lists); // Get the old value passed to the EBB argument we're repairing. let old_arg = args.get(fixed_args + repair.num, &dfg.value_lists) @@ -142,12 +144,14 @@ fn split_any(dfg: &mut DataFlowGraph, let (lo, hi) = split_value(dfg, pos, old_arg, repair.concat, &mut repairs); // The `lo` part replaces the original argument. - *args.get_mut(fixed_args + repair.num, &mut dfg.value_lists).unwrap() = lo; + *args.get_mut(fixed_args + repair.num, &mut dfg.value_lists) + .unwrap() = lo; // The `hi` part goes at the end. Since multiple repairs may have been scheduled to the // same EBB, there could be multiple arguments missing. if num_args > fixed_args + repair.hi_num { - *args.get_mut(fixed_args + repair.hi_num, &mut dfg.value_lists).unwrap() = hi; + *args.get_mut(fixed_args + repair.hi_num, &mut dfg.value_lists) + .unwrap() = hi; } else { // We need to append one or more arguments. If we're adding more than one argument, // there must be pending repairs on the stack that will fill in the correct values diff --git a/lib/cretonne/src/partition_slice.rs b/lib/cretonne/src/partition_slice.rs index 0986613040..9626b5fd37 100644 --- a/lib/cretonne/src/partition_slice.rs +++ b/lib/cretonne/src/partition_slice.rs @@ -35,10 +35,7 @@ mod tests { fn check(x: &[u32], want: &[u32]) { assert_eq!(x.len(), want.len()); - let want_count = want.iter() - .cloned() - .filter(|&x| x % 10 == 0) - .count(); + let want_count = want.iter().cloned().filter(|&x| x % 10 == 0).count(); let mut v = Vec::new(); v.extend(x.iter().cloned()); let count = partition_slice(&mut v[..], |&x| x % 10 == 0); diff --git a/lib/cretonne/src/regalloc/coloring.rs b/lib/cretonne/src/regalloc/coloring.rs index 9bc10c30b3..b8c08bd278 100644 --- a/lib/cretonne/src/regalloc/coloring.rs +++ b/lib/cretonne/src/regalloc/coloring.rs @@ -232,7 +232,9 @@ impl<'a> Context<'a> { if let Affinity::Reg(rc_index) = lv.affinity { let regclass = self.reginfo.rc(rc_index); // TODO: Fall back to a top-level super-class. Sub-classes are only hints. - let regunit = regs.iter(regclass).next().expect("Out of registers for arguments"); + let regunit = regs.iter(regclass) + .next() + .expect("Out of registers for arguments"); regs.take(regclass, regunit); *locations.ensure(lv.value) = ValueLoc::Reg(regunit); } diff --git a/lib/cretonne/src/regalloc/context.rs b/lib/cretonne/src/regalloc/context.rs index 195c480372..2c59ab9797 100644 --- a/lib/cretonne/src/regalloc/context.rs +++ b/lib/cretonne/src/regalloc/context.rs @@ -52,6 +52,7 @@ impl Context { // TODO: Second pass: Spilling. // Third pass: Reload and coloring. - self.coloring.run(isa, func, domtree, &mut self.liveness, &mut self.tracker); + self.coloring + .run(isa, func, domtree, &mut self.liveness, &mut self.tracker); } } diff --git a/lib/cretonne/src/regalloc/live_value_tracker.rs b/lib/cretonne/src/regalloc/live_value_tracker.rs index 19ed62e8fb..75b18ef019 100644 --- a/lib/cretonne/src/regalloc/live_value_tracker.rs +++ b/lib/cretonne/src/regalloc/live_value_tracker.rs @@ -68,11 +68,12 @@ impl LiveValueVec { /// Add a new live value to `values`. fn push(&mut self, value: Value, endpoint: Inst, affinity: Affinity) { - self.values.push(LiveValue { - value: value, - endpoint: endpoint, - affinity: affinity, - }); + self.values + .push(LiveValue { + value: value, + endpoint: endpoint, + affinity: affinity, + }); } /// Remove all elements. @@ -163,11 +164,14 @@ impl LiveValueTracker { // If the immediate dominator exits, we must have a stored list for it. This is a // requirement to the order EBBs are visited: All dominators must have been processed // before the current EBB. - let idom_live_list = - self.idom_sets.get(&idom).expect("No stored live set for dominator"); + let idom_live_list = self.idom_sets + .get(&idom) + .expect("No stored live set for dominator"); // Get just the values that are live-in to `ebb`. for &value in idom_live_list.as_slice(&self.idom_pool) { - let lr = liveness.get(value).expect("Immediate dominator value has no live range"); + let lr = liveness + .get(value) + .expect("Immediate dominator value has no live range"); // Check if this value is live-in here. if let Some(endpoint) = lr.livein_local_end(ebb, program_order) { @@ -179,7 +183,9 @@ impl LiveValueTracker { // Now add all the live arguments to `ebb`. let first_arg = self.live.values.len(); for value in dfg.ebb_args(ebb) { - let lr = liveness.get(value).expect("EBB argument value has no live range"); + let lr = liveness + .get(value) + .expect("EBB argument value has no live range"); assert_eq!(lr.def(), ebb.into()); match lr.def_local_end().into() { ExpandedProgramPoint::Inst(endpoint) => { @@ -259,16 +265,15 @@ impl LiveValueTracker { /// Save the current set of live values so it is associated with `idom`. fn save_idom_live_set(&mut self, idom: Inst) { - let values = self.live - .values - .iter() - .map(|lv| lv.value); + let values = self.live.values.iter().map(|lv| lv.value); let pool = &mut self.idom_pool; // If there already is a set saved for `idom`, just keep it. - self.idom_sets.entry(idom).or_insert_with(|| { - let mut list = ValueList::default(); - list.extend(values, pool); - list - }); + self.idom_sets + .entry(idom) + .or_insert_with(|| { + let mut list = ValueList::default(); + list.extend(values, pool); + list + }); } } diff --git a/lib/cretonne/src/regalloc/liveness.rs b/lib/cretonne/src/regalloc/liveness.rs index 40faff0591..3344c5b841 100644 --- a/lib/cretonne/src/regalloc/liveness.rs +++ b/lib/cretonne/src/regalloc/liveness.rs @@ -205,7 +205,8 @@ fn get_or_create<'a>(lrset: &'a mut LiveRangeSet, def = inst.into(); // Initialize the affinity from the defining instruction's result constraints. // Don't do this for call return values which are always tied to a single register. - affinity = recipe_constraints.get(func.encodings[inst].recipe()) + affinity = recipe_constraints + .get(func.encodings[inst].recipe()) .and_then(|rc| rc.outs.get(rnum)) .map(Affinity::new) .unwrap_or_default(); @@ -315,7 +316,8 @@ impl Liveness { let recipe = func.encodings[inst].recipe(); // Iterator of constraints, one per value operand. // TODO: Should we fail here if the instruction doesn't have a valid encoding? - let mut operand_constraints = recipe_constraints.get(recipe) + let mut operand_constraints = recipe_constraints + .get(recipe) .map(|c| c.ins) .unwrap_or(&[]) .iter(); diff --git a/lib/cretonne/src/regalloc/liverange.rs b/lib/cretonne/src/regalloc/liverange.rs index 116ccce4aa..94c16cb9ae 100644 --- a/lib/cretonne/src/regalloc/liverange.rs +++ b/lib/cretonne/src/regalloc/liverange.rs @@ -221,14 +221,16 @@ impl LiveRange { /// Return `Ok(n)` if `liveins[n]` already contains `ebb`. /// Otherwise, return `Err(n)` with the index where such an interval should be inserted. fn find_ebb_interval(&self, ebb: Ebb, order: &PO) -> Result { - self.liveins.binary_search_by(|intv| order.cmp(intv.begin, ebb)).or_else(|n| { - // The interval at `n-1` may cover `ebb`. - if n > 0 && order.cmp(self.liveins[n - 1].end, ebb) == Ordering::Greater { - Ok(n - 1) - } else { - Err(n) - } - }) + self.liveins + .binary_search_by(|intv| order.cmp(intv.begin, ebb)) + .or_else(|n| { + // The interval at `n-1` may cover `ebb`. + if n > 0 && order.cmp(self.liveins[n - 1].end, ebb) == Ordering::Greater { + Ok(n - 1) + } else { + Err(n) + } + }) } /// Extend the local interval for `ebb` so it reaches `to` which must belong to `ebb`. @@ -307,11 +309,12 @@ impl LiveRange { } // Cannot coalesce; insert new interval (false, false) => { - self.liveins.insert(n, - Interval { - begin: ebb, - end: to, - }); + self.liveins + .insert(n, + Interval { + begin: ebb, + end: to, + }); } } @@ -361,7 +364,9 @@ impl LiveRange { /// answer, but it is also possible that an even later program point is returned. So don't /// depend on the returned `Inst` to belong to `ebb`. pub fn livein_local_end(&self, ebb: Ebb, order: &PO) -> Option { - self.find_ebb_interval(ebb, order).ok().map(|n| self.liveins[n].end) + self.find_ebb_interval(ebb, order) + .ok() + .map(|n| self.liveins[n].end) } } diff --git a/lib/cretonne/src/verifier.rs b/lib/cretonne/src/verifier.rs index 589f117e73..776755d4c4 100644 --- a/lib/cretonne/src/verifier.rs +++ b/lib/cretonne/src/verifier.rs @@ -177,8 +177,9 @@ impl<'a> Verifier<'a> { let fixed_results = inst_data.opcode().constraints().fixed_results(); // var_results is 0 if we aren't a call instruction - let var_results = - dfg.call_signature(inst).map(|sig| dfg.signatures[sig].return_types.len()).unwrap_or(0); + let var_results = dfg.call_signature(inst) + .map(|sig| dfg.signatures[sig].return_types.len()) + .unwrap_or(0); let total_results = fixed_results + var_results; if total_results == 0 { @@ -218,9 +219,21 @@ impl<'a> Verifier<'a> { &MultiAry { ref args, .. } => { self.verify_value_list(inst, args)?; } - &Jump { destination, ref args, .. } | - &Branch { destination, ref args, .. } | - &BranchIcmp { destination, ref args, .. } => { + &Jump { + destination, + ref args, + .. + } | + &Branch { + destination, + ref args, + .. + } | + &BranchIcmp { + destination, + ref args, + .. + } => { self.verify_ebb(inst, destination)?; self.verify_value_list(inst, args)?; } @@ -265,10 +278,7 @@ impl<'a> Verifier<'a> { } fn verify_sig_ref(&self, inst: Inst, s: SigRef) -> Result<()> { - if !self.func - .dfg - .signatures - .is_valid(s) { + if !self.func.dfg.signatures.is_valid(s) { err!(inst, "invalid signature reference {}", s) } else { Ok(()) @@ -276,10 +286,7 @@ impl<'a> Verifier<'a> { } fn verify_func_ref(&self, inst: Inst, f: FuncRef) -> Result<()> { - if !self.func - .dfg - .ext_funcs - .is_valid(f) { + if !self.func.dfg.ext_funcs.is_valid(f) { err!(inst, "invalid function reference {}", f) } else { Ok(()) @@ -326,7 +333,8 @@ impl<'a> Verifier<'a> { def_inst); } // Defining instruction dominates the instruction that uses the value. - if !self.domtree.dominates(def_inst, loc_inst, &self.func.layout) { + if !self.domtree + .dominates(def_inst, loc_inst, &self.func.layout) { return err!(loc_inst, "uses value from non-dominating {}", def_inst); } } @@ -343,7 +351,8 @@ impl<'a> Verifier<'a> { ebb); } // The defining EBB dominates the instruction using this value. - if !self.domtree.ebb_dominates(ebb, loc_inst, &self.func.layout) { + if !self.domtree + .ebb_dominates(ebb, loc_inst, &self.func.layout) { return err!(loc_inst, "uses value arg from non-dominating {}", ebb); } } @@ -378,10 +387,7 @@ impl<'a> Verifier<'a> { return err!(ebb, "entry block arguments must match function signature"); } - for (i, arg) in self.func - .dfg - .ebb_args(ebb) - .enumerate() { + for (i, arg) in self.func.dfg.ebb_args(ebb).enumerate() { let arg_type = self.func.dfg.value_type(arg); if arg_type != expected_types[i].value_type { return err!(ebb, @@ -452,11 +458,7 @@ impl<'a> Verifier<'a> { fn typecheck_fixed_args(&self, inst: Inst, ctrl_type: Type) -> Result<()> { let constraints = self.func.dfg[inst].opcode().constraints(); - for (i, &arg) in self.func - .dfg - .inst_fixed_args(inst) - .iter() - .enumerate() { + for (i, &arg) in self.func.dfg.inst_fixed_args(inst).iter().enumerate() { let arg_type = self.func.dfg.value_type(arg); match constraints.value_argument_constraint(i, ctrl_type) { ResolvedConstraint::Bound(expected_type) => { @@ -510,13 +512,17 @@ impl<'a> Verifier<'a> { match self.func.dfg[inst].analyze_call(&self.func.dfg.value_lists) { CallInfo::Direct(func_ref, _) => { let sig_ref = self.func.dfg.ext_funcs[func_ref].signature; - let arg_types = - self.func.dfg.signatures[sig_ref].argument_types.iter().map(|a| a.value_type); + let arg_types = self.func.dfg.signatures[sig_ref] + .argument_types + .iter() + .map(|a| a.value_type); self.typecheck_variable_args_iterator(inst, arg_types)?; } CallInfo::Indirect(sig_ref, _) => { - let arg_types = - self.func.dfg.signatures[sig_ref].argument_types.iter().map(|a| a.value_type); + let arg_types = self.func.dfg.signatures[sig_ref] + .argument_types + .iter() + .map(|a| a.value_type); self.typecheck_variable_args_iterator(inst, arg_types)?; } CallInfo::NotACall => {} @@ -673,10 +679,11 @@ mod tests { let mut func = Function::new(); let ebb0 = func.dfg.make_ebb(); func.layout.append_ebb(ebb0); - let nullary_with_bad_opcode = func.dfg.make_inst(InstructionData::Nullary { - opcode: Opcode::Jump, - ty: types::VOID, - }); + let nullary_with_bad_opcode = func.dfg + .make_inst(InstructionData::Nullary { + opcode: Opcode::Jump, + ty: types::VOID, + }); func.layout.append_inst(nullary_with_bad_opcode, ebb0); let verifier = Verifier::new(&func); assert_err_with_msg!(verifier.run(), "instruction format"); diff --git a/lib/cretonne/src/write.rs b/lib/cretonne/src/write.rs index e1ac42ac09..e2cd0fe3f5 100644 --- a/lib/cretonne/src/write.rs +++ b/lib/cretonne/src/write.rs @@ -262,7 +262,11 @@ pub fn write_operands(w: &mut Write, dfg: &DataFlowGraph, inst: Inst) -> Result IntCompare { cond, args, .. } => write!(w, " {}, {}, {}", cond, args[0], args[1]), IntCompareImm { cond, arg, imm, .. } => write!(w, " {}, {}, {}", cond, arg, imm), FloatCompare { cond, args, .. } => write!(w, " {}, {}, {}", cond, args[0], args[1]), - Jump { destination, ref args, .. } => { + Jump { + destination, + ref args, + .. + } => { if args.is_empty() { write!(w, " {}", destination) } else { @@ -272,7 +276,11 @@ pub fn write_operands(w: &mut Write, dfg: &DataFlowGraph, inst: Inst) -> Result DisplayValues(args.as_slice(pool))) } } - Branch { destination, ref args, .. } => { + Branch { + destination, + ref args, + .. + } => { let args = args.as_slice(pool); write!(w, " {}, {}", args[0], destination)?; if args.len() > 1 { @@ -280,7 +288,12 @@ pub fn write_operands(w: &mut Write, dfg: &DataFlowGraph, inst: Inst) -> Result } Ok(()) } - BranchIcmp { cond, destination, ref args, .. } => { + BranchIcmp { + cond, + destination, + ref args, + .. + } => { let args = args.as_slice(pool); write!(w, " {}, {}, {}, {}", cond, args[0], args[1], destination)?; if args.len() > 2 { diff --git a/lib/filecheck/src/checker.rs b/lib/filecheck/src/checker.rs index c854f3b6b8..031a50d9e3 100644 --- a/lib/filecheck/src/checker.rs +++ b/lib/filecheck/src/checker.rs @@ -183,11 +183,13 @@ impl Checker { continue; } Directive::Regex(ref var, ref rx) => { - state.vars.insert(var.clone(), - VarDef { - value: Value::Regex(Cow::Borrowed(rx)), - offset: 0, - }); + state + .vars + .insert(var.clone(), + VarDef { + value: Value::Regex(Cow::Borrowed(rx)), + offset: 0, + }); continue; } }; @@ -208,10 +210,14 @@ impl Checker { state.recorder.directive(not_idx); if let Some((s, e)) = rx.find(&text[not_begin..match_begin]) { // Matched `not:` pattern. - state.recorder.matched_not(rx.as_str(), (not_begin + s, not_begin + e)); + state + .recorder + .matched_not(rx.as_str(), (not_begin + s, not_begin + e)); return Ok(false); } else { - state.recorder.missed_not(rx.as_str(), (not_begin, match_begin)); + state + .recorder + .missed_not(rx.as_str(), (not_begin, match_begin)); } } } @@ -410,9 +416,11 @@ mod tests { Ok(true)); assert_eq!(b.directive("regex: X = tommy").map_err(e2s), Err("expected '=' after variable 'X' in regex: X = tommy".to_string())); - assert_eq!(b.directive("[arm]not: patt $x $(y) here").map_err(e2s), + assert_eq!(b.directive("[arm]not: patt $x $(y) here") + .map_err(e2s), Ok(true)); - assert_eq!(b.directive("[x86]sameln: $x $(y=[^]]*) there").map_err(e2s), + assert_eq!(b.directive("[x86]sameln: $x $(y=[^]]*) there") + .map_err(e2s), Ok(true)); // Windows line ending sneaking in. assert_eq!(b.directive("regex: Y=foo\r").map_err(e2s), Ok(true)); diff --git a/lib/filecheck/src/explain.rs b/lib/filecheck/src/explain.rs index 04a6cd233e..49780321b5 100644 --- a/lib/filecheck/src/explain.rs +++ b/lib/filecheck/src/explain.rs @@ -119,7 +119,8 @@ impl<'a> Display for Explainer<'a> { m.regex)?; // Emit any variable definitions. - if let Ok(found) = self.vardefs.binary_search_by_key(&m.directive, |v| v.directive) { + if let Ok(found) = self.vardefs + .binary_search_by_key(&m.directive, |v| v.directive) { let mut first = found; while first > 0 && self.vardefs[first - 1].directive == m.directive { first -= 1; @@ -147,50 +148,55 @@ impl<'a> Recorder for Explainer<'a> { } fn matched_check(&mut self, regex: &str, matched: MatchRange) { - self.matches.push(Match { - directive: self.directive, - is_match: true, - is_not: false, - regex: regex.to_owned(), - range: matched, - }); + self.matches + .push(Match { + directive: self.directive, + is_match: true, + is_not: false, + regex: regex.to_owned(), + range: matched, + }); } fn matched_not(&mut self, regex: &str, matched: MatchRange) { - self.matches.push(Match { - directive: self.directive, - is_match: true, - is_not: true, - regex: regex.to_owned(), - range: matched, - }); + self.matches + .push(Match { + directive: self.directive, + is_match: true, + is_not: true, + regex: regex.to_owned(), + range: matched, + }); } fn missed_check(&mut self, regex: &str, searched: MatchRange) { - self.matches.push(Match { - directive: self.directive, - is_match: false, - is_not: false, - regex: regex.to_owned(), - range: searched, - }); + self.matches + .push(Match { + directive: self.directive, + is_match: false, + is_not: false, + regex: regex.to_owned(), + range: searched, + }); } fn missed_not(&mut self, regex: &str, searched: MatchRange) { - self.matches.push(Match { - directive: self.directive, - is_match: false, - is_not: true, - regex: regex.to_owned(), - range: searched, - }); + self.matches + .push(Match { + directive: self.directive, + is_match: false, + is_not: true, + regex: regex.to_owned(), + range: searched, + }); } fn defined_var(&mut self, varname: &str, value: &str) { - self.vardefs.push(VarDef { - directive: self.directive, - varname: varname.to_owned(), - value: value.to_owned(), - }); + self.vardefs + .push(VarDef { + directive: self.directive, + varname: varname.to_owned(), + value: value.to_owned(), + }); } } diff --git a/lib/reader/src/parser.rs b/lib/reader/src/parser.rs index e8d88dde89..ca98bd98f2 100644 --- a/lib/reader/src/parser.rs +++ b/lib/reader/src/parser.rs @@ -107,7 +107,8 @@ impl<'a> Context<'a> { // Get the index of a recipe name if it exists. fn find_recipe_index(&self, recipe_name: &str) -> Option { if let Some(unique_isa) = self.unique_isa { - unique_isa.recipe_names() + unique_isa + .recipe_names() .iter() .position(|&name| name == recipe_name) .map(|idx| idx as u16) @@ -118,17 +119,14 @@ impl<'a> Context<'a> { // Allocate a new stack slot and add a mapping number -> StackSlot. fn add_ss(&mut self, number: u32, data: StackSlotData, loc: &Location) -> Result<()> { - self.map.def_ss(number, self.function.stack_slots.push(data), loc) + self.map + .def_ss(number, self.function.stack_slots.push(data), loc) } // Allocate a new signature and add a mapping number -> SigRef. fn add_sig(&mut self, number: u32, data: Signature, loc: &Location) -> Result<()> { - self.map.def_sig(number, - self.function - .dfg - .signatures - .push(data), - loc) + self.map + .def_sig(number, self.function.dfg.signatures.push(data), loc) } // Resolve a reference to a signature. @@ -141,12 +139,8 @@ impl<'a> Context<'a> { // Allocate a new external function and add a mapping number -> FuncRef. fn add_fn(&mut self, number: u32, data: ExtFuncData, loc: &Location) -> Result<()> { - self.map.def_fn(number, - self.function - .dfg - .ext_funcs - .push(data), - loc) + self.map + .def_fn(number, self.function.dfg.ext_funcs.push(data), loc) } // Resolve a reference to a function. @@ -159,7 +153,8 @@ impl<'a> Context<'a> { // Allocate a new jump table and add a mapping number -> JumpTable. fn add_jt(&mut self, number: u32, data: JumpTableData, loc: &Location) -> Result<()> { - self.map.def_jt(number, self.function.jump_tables.push(data), loc) + self.map + .def_jt(number, self.function.jump_tables.push(data), loc) } // Resolve a reference to a jump table. @@ -238,19 +233,34 @@ impl<'a> Context<'a> { } InstructionData::MultiAry { ref mut args, .. } => { - self.map.rewrite_values(args.as_mut_slice(value_lists), loc)?; + self.map + .rewrite_values(args.as_mut_slice(value_lists), loc)?; } - InstructionData::Jump { ref mut destination, ref mut args, .. } | - InstructionData::Branch { ref mut destination, ref mut args, .. } | - InstructionData::BranchIcmp { ref mut destination, ref mut args, .. } => { + InstructionData::Jump { + ref mut destination, + ref mut args, + .. + } | + InstructionData::Branch { + ref mut destination, + ref mut args, + .. + } | + InstructionData::BranchIcmp { + ref mut destination, + ref mut args, + .. + } => { self.map.rewrite_ebb(destination, loc)?; - self.map.rewrite_values(args.as_mut_slice(value_lists), loc)?; + self.map + .rewrite_values(args.as_mut_slice(value_lists), loc)?; } InstructionData::Call { ref mut args, .. } | InstructionData::IndirectCall { ref mut args, .. } => { - self.map.rewrite_values(args.as_mut_slice(value_lists), loc)?; + self.map + .rewrite_values(args.as_mut_slice(value_lists), loc)?; } } } @@ -307,10 +317,11 @@ impl<'a> Parser<'a> { Token::Comment(text) => { // Gather comments, associate them with `comment_entity`. if let Some(entity) = self.comment_entity { - self.comments.push(Comment { - entity: entity, - text: text, - }); + self.comments + .push(Comment { + entity: entity, + text: text, + }); } } _ => self.lookahead = Some(token), @@ -464,7 +475,8 @@ impl<'a> Parser<'a> { self.consume(); // Lexer just gives us raw text that looks like an integer. // Parse it as a u8 to check for overflow and other issues. - text.parse().map_err(|_| self.error("expected u8 decimal immediate")) + text.parse() + .map_err(|_| self.error("expected u8 decimal immediate")) } else { err!(self.loc, err_msg) } @@ -477,7 +489,8 @@ impl<'a> Parser<'a> { self.consume(); // Lexer just gives us raw text that looks like an integer. // Parse it as a u32 to check for overflow and other issues. - text.parse().map_err(|_| self.error("expected u32 decimal immediate")) + text.parse() + .map_err(|_| self.error("expected u32 decimal immediate")) } else { err!(self.loc, err_msg) } @@ -714,7 +727,9 @@ impl<'a> Parser<'a> { sig.return_types = self.parse_argument_list(unique_isa)?; } - if sig.argument_types.iter().all(|a| a.location.is_assigned()) { + if sig.argument_types + .iter() + .all(|a| a.location.is_assigned()) { sig.compute_argument_bytes(); } @@ -816,35 +831,23 @@ impl<'a> Parser<'a> { match self.token() { Some(Token::StackSlot(..)) => { self.gather_comments(ctx.function.stack_slots.next_key()); - self.parse_stack_slot_decl().and_then(|(num, dat)| { - ctx.add_ss(num, dat, &self.loc) - }) + self.parse_stack_slot_decl() + .and_then(|(num, dat)| ctx.add_ss(num, dat, &self.loc)) } Some(Token::SigRef(..)) => { - self.gather_comments(ctx.function - .dfg - .signatures - .next_key()); - self.parse_signature_decl(ctx.unique_isa).and_then(|(num, dat)| { - ctx.add_sig(num, - dat, - &self.loc) - }) + self.gather_comments(ctx.function.dfg.signatures.next_key()); + self.parse_signature_decl(ctx.unique_isa) + .and_then(|(num, dat)| ctx.add_sig(num, dat, &self.loc)) } Some(Token::FuncRef(..)) => { - self.gather_comments(ctx.function - .dfg - .ext_funcs - .next_key()); - self.parse_function_decl(ctx).and_then(|(num, dat)| { - ctx.add_fn(num, dat, &self.loc) - }) + self.gather_comments(ctx.function.dfg.ext_funcs.next_key()); + self.parse_function_decl(ctx) + .and_then(|(num, dat)| ctx.add_fn(num, dat, &self.loc)) } Some(Token::JumpTable(..)) => { self.gather_comments(ctx.function.jump_tables.next_key()); - self.parse_jump_table_decl().and_then(|(num, dat)| { - ctx.add_jt(num, dat, &self.loc) - }) + self.parse_jump_table_decl() + .and_then(|(num, dat)| ctx.add_jt(num, dat, &self.loc)) } // More to come.. _ => return Ok(()), @@ -861,7 +864,8 @@ impl<'a> Parser<'a> { self.match_identifier("stack_slot", "expected 'stack_slot'")?; // stack-slot-decl ::= StackSlot(ss) "=" "stack_slot" * Bytes {"," stack-slot-flag} - let bytes: i64 = self.match_imm64("expected byte-size in stack_slot decl")?.into(); + let bytes: i64 = self.match_imm64("expected byte-size in stack_slot decl")? + .into(); if bytes < 0 { return err!(self.loc, "negative stack slot size"); } @@ -903,11 +907,10 @@ impl<'a> Parser<'a> { let data = match self.token() { Some(Token::Identifier("function")) => { let (loc, name, sig) = self.parse_function_spec(ctx.unique_isa)?; - let sigref = ctx.function - .dfg - .signatures - .push(sig); - ctx.map.def_entity(sigref.into(), &loc).expect("duplicate SigRef entities created"); + let sigref = ctx.function.dfg.signatures.push(sig); + ctx.map + .def_entity(sigref.into(), &loc) + .expect("duplicate SigRef entities created"); ExtFuncData { name: name, signature: sigref, @@ -1224,7 +1227,9 @@ impl<'a> Parser<'a> { let inst = ctx.function.dfg.make_inst(inst_data); let num_results = ctx.function.dfg.make_inst_results(inst, ctrl_typevar); ctx.function.layout.append_inst(inst, ebb); - ctx.map.def_entity(inst.into(), &opcode_loc).expect("duplicate inst references created"); + ctx.map + .def_entity(inst.into(), &opcode_loc) + .expect("duplicate inst references created"); if let Some(encoding) = encoding { *ctx.function.encodings.ensure(inst) = encoding; @@ -1282,18 +1287,21 @@ impl<'a> Parser<'a> { inst_data: &InstructionData) -> Result { let constraints = opcode.constraints(); - let ctrl_type = match explicit_ctrl_type { - Some(t) => t, - None => { - if constraints.use_typevar_operand() { - // This is an opcode that supports type inference, AND there was no explicit - // type specified. Look up `ctrl_value` to see if it was defined already. - // TBD: If it is defined in another block, the type should have been specified - // explicitly. It is unfortunate that the correctness of IL depends on the - // layout of the blocks. - let ctrl_src_value = inst_data.typevar_operand(&ctx.function.dfg.value_lists) - .expect("Constraints <-> Format inconsistency"); - ctx.function.dfg.value_type(match ctx.map.get_value(ctrl_src_value) { + let ctrl_type = + match explicit_ctrl_type { + Some(t) => t, + None => { + if constraints.use_typevar_operand() { + // This is an opcode that supports type inference, AND there was no + // explicit type specified. Look up `ctrl_value` to see if it was defined + // already. + // TBD: If it is defined in another block, the type should have been + // specified explicitly. It is unfortunate that the correctness of IL + // depends on the layout of the blocks. + let ctrl_src_value = inst_data + .typevar_operand(&ctx.function.dfg.value_lists) + .expect("Constraints <-> Format inconsistency"); + ctx.function.dfg.value_type(match ctx.map.get_value(ctrl_src_value) { Some(v) => v, None => { if let Some(v) = ctx.aliases @@ -1308,19 +1316,19 @@ impl<'a> Parser<'a> { } } }) - } else if constraints.is_polymorphic() { - // This opcode does not support type inference, so the explicit type variable - // is required. - return err!(self.loc, - "type variable required for polymorphic opcode, e.g. '{}.{}'", - opcode, - constraints.ctrl_typeset().unwrap().example()); - } else { - // This is a non-polymorphic opcode. No typevar needed. - VOID + } else if constraints.is_polymorphic() { + // This opcode does not support type inference, so the explicit type + // variable is required. + return err!(self.loc, + "type variable required for polymorphic opcode, e.g. '{}.{}'", + opcode, + constraints.ctrl_typeset().unwrap().example()); + } else { + // This is a non-polymorphic opcode. No typevar needed. + VOID + } } - } - }; + }; // Verify that `ctrl_type` is valid for the controlling type variable. We don't want to // attempt deriving types from an incorrect basis. @@ -1629,7 +1637,8 @@ impl<'a> Parser<'a> { InstructionFormat::BranchTable => { let arg = self.match_value("expected SSA value operand")?; self.match_token(Token::Comma, "expected ',' between operands")?; - let table = self.match_jt().and_then(|num| ctx.get_jt(num, &self.loc))?; + let table = self.match_jt() + .and_then(|num| ctx.get_jt(num, &self.loc))?; InstructionData::BranchTable { opcode: opcode, ty: VOID, @@ -1679,7 +1688,8 @@ mod tests { assert_eq!(v4.to_string(), "v0"); let vx3 = details.map.lookup_str("vx3").unwrap(); assert_eq!(vx3.to_string(), "vx0"); - let aliased_to = func.dfg.resolve_aliases(Value::table_with_number(0).unwrap()); + let aliased_to = func.dfg + .resolve_aliases(Value::table_with_number(0).unwrap()); assert_eq!(aliased_to.to_string(), "v0"); } @@ -1696,11 +1706,20 @@ mod tests { "(i8 uext inreg, f32, f64) -> i32 sext, f64"); // `void` is not recognized as a type by the lexer. It should not appear in files. - assert_eq!(Parser::new("() -> void").parse_signature(None).unwrap_err().to_string(), + assert_eq!(Parser::new("() -> void") + .parse_signature(None) + .unwrap_err() + .to_string(), "1: expected argument type"); - assert_eq!(Parser::new("i8 -> i8").parse_signature(None).unwrap_err().to_string(), + assert_eq!(Parser::new("i8 -> i8") + .parse_signature(None) + .unwrap_err() + .to_string(), "1: expected function signature: ( args... )"); - assert_eq!(Parser::new("(i8 -> i8").parse_signature(None).unwrap_err().to_string(), + assert_eq!(Parser::new("(i8 -> i8") + .parse_signature(None) + .unwrap_err() + .to_string(), "1: expected ')' after function arguments"); } diff --git a/lib/reader/src/testcommand.rs b/lib/reader/src/testcommand.rs index 6d9dd9969e..a147487c77 100644 --- a/lib/reader/src/testcommand.rs +++ b/lib/reader/src/testcommand.rs @@ -40,7 +40,10 @@ impl<'a> TestCommand<'a> { let cmd = parts.next().unwrap_or(""); TestCommand { command: cmd, - options: parts.filter(|s| !s.is_empty()).map(TestOption::new).collect(), + options: parts + .filter(|s| !s.is_empty()) + .map(TestOption::new) + .collect(), } } }