diff --git a/cranelift/codegen/src/machinst/blockorder.rs b/cranelift/codegen/src/machinst/blockorder.rs index 847f2a6b66..9c5060bc46 100644 --- a/cranelift/codegen/src/machinst/blockorder.rs +++ b/cranelift/codegen/src/machinst/blockorder.rs @@ -1,7 +1,6 @@ //! Computation of basic block order in emitted code. use crate::machinst::*; -use regalloc::{BlockIx, Function}; /// Simple reverse postorder-based block order emission. /// @@ -10,15 +9,13 @@ use regalloc::{BlockIx, Function}; struct BlockRPO { visited: Vec, postorder: Vec, - deferred_last: Option, } impl BlockRPO { fn new(vcode: &VCode) -> BlockRPO { BlockRPO { visited: vec![false; vcode.num_blocks()], - postorder: vec![], - deferred_last: None, + postorder: Vec::with_capacity(vcode.num_blocks()), } } @@ -29,22 +26,15 @@ impl BlockRPO { self.visit(vcode, *succ); } } - - for i in vcode.block_insns(BlockIx::new(block)) { - if vcode.get_insn(i).is_epilogue_placeholder() { - debug_assert!(self.deferred_last.is_none()); - self.deferred_last = Some(block); - return; - } + if Some(block) != vcode.fallthrough_return_block { + self.postorder.push(block); } - - self.postorder.push(block); } - fn rpo(self) -> Vec { + fn rpo(self, vcode: &VCode) -> Vec { let mut rpo = self.postorder; rpo.reverse(); - if let Some(block) = self.deferred_last { + if let Some(block) = vcode.fallthrough_return_block { rpo.push(block); } rpo @@ -55,5 +45,5 @@ impl BlockRPO { pub fn compute_final_block_order(vcode: &VCode) -> Vec { let mut rpo = BlockRPO::new(vcode); rpo.visit(vcode, vcode.entry()); - rpo.rpo() + rpo.rpo(vcode) } diff --git a/cranelift/codegen/src/machinst/lower.rs b/cranelift/codegen/src/machinst/lower.rs index c9078fbc7c..8a1791aa87 100644 --- a/cranelift/codegen/src/machinst/lower.rs +++ b/cranelift/codegen/src/machinst/lower.rs @@ -332,6 +332,7 @@ impl<'func, I: VCodeInst> Lower<'func, I> { GenerateReturn::Yes } else { debug_assert!(last_insn_opcode == Opcode::FallthroughReturn); + self.vcode.set_fallthrough_return_block(*bb); GenerateReturn::No }; self.gen_retval_setup(gen_ret); diff --git a/cranelift/codegen/src/machinst/vcode.rs b/cranelift/codegen/src/machinst/vcode.rs index 5b36213bf8..836be33941 100644 --- a/cranelift/codegen/src/machinst/vcode.rs +++ b/cranelift/codegen/src/machinst/vcode.rs @@ -18,8 +18,7 @@ //! backend pipeline. use crate::entity::SecondaryMap; -use crate::ir; -use crate::ir::SourceLoc; +use crate::ir::{self, Block, SourceLoc}; use crate::machinst::*; use crate::settings; @@ -101,6 +100,9 @@ pub struct VCode { /// ABI object. abi: Box>, + + /// The block targeted by fallthrough_returns, if there's one. + pub fallthrough_return_block: Option, } /// A builder for a VCode function body. This builder is designed for the @@ -153,6 +155,16 @@ impl VCodeBuilder { &mut *self.vcode.abi } + /// Set the fallthrough_return target block for this function. There must be at most once per + /// function. + pub fn set_fallthrough_return_block(&mut self, bb: Block) { + debug_assert!( + self.vcode.fallthrough_return_block.is_none(), + "a function must have at most one fallthrough-return instruction" + ); + self.vcode.fallthrough_return_block = Some(self.bb_to_bindex(bb)); + } + /// Set the type of a VReg. pub fn set_vreg_type(&mut self, vreg: VirtualReg, ty: Type) { while self.vcode.vreg_types.len() <= vreg.get_index() { @@ -315,6 +327,7 @@ impl VCode { final_block_offsets: vec![], code_size: 0, abi, + fallthrough_return_block: None, } }