diff --git a/cranelift/codegen/src/isa/aarch64/abi.rs b/cranelift/codegen/src/isa/aarch64/abi.rs index c71096d485..afdab0d2a4 100644 --- a/cranelift/codegen/src/isa/aarch64/abi.rs +++ b/cranelift/codegen/src/isa/aarch64/abi.rs @@ -1179,44 +1179,40 @@ impl AArch64ABICall { } } -fn adjust_stack(amount: u64, is_sub: bool) -> Vec { - if amount > 0 { - let sp_adjustment = if is_sub { - amount as i64 - } else { - -(amount as i64) - }; - let adj_meta_insn = Inst::VirtualSPOffsetAdj { - offset: sp_adjustment, - }; +fn adjust_stack>(ctx: &mut C, amount: u64, is_sub: bool) { + if amount == 0 { + return; + } - let alu_op = if is_sub { ALUOp::Sub64 } else { ALUOp::Add64 }; - if let Some(imm12) = Imm12::maybe_from_u64(amount) { - vec![ - adj_meta_insn, - Inst::AluRRImm12 { - alu_op, - rd: writable_stack_reg(), - rn: stack_reg(), - imm12, - }, - ] - } else { - let const_load = Inst::LoadConst64 { - rd: writable_spilltmp_reg(), - const_data: amount, - }; - let adj = Inst::AluRRRExtend { - alu_op, - rd: writable_stack_reg(), - rn: stack_reg(), - rm: spilltmp_reg(), - extendop: ExtendOp::UXTX, - }; - vec![adj_meta_insn, const_load, adj] - } + let sp_adjustment = if is_sub { + amount as i64 } else { - vec![] + -(amount as i64) + }; + ctx.emit(Inst::VirtualSPOffsetAdj { + offset: sp_adjustment, + }); + + let alu_op = if is_sub { ALUOp::Sub64 } else { ALUOp::Add64 }; + if let Some(imm12) = Imm12::maybe_from_u64(amount) { + ctx.emit(Inst::AluRRImm12 { + alu_op, + rd: writable_stack_reg(), + rn: stack_reg(), + imm12, + }) + } else { + ctx.emit(Inst::LoadConst64 { + rd: writable_spilltmp_reg(), + const_data: amount, + }); + ctx.emit(Inst::AluRRRExtend { + alu_op, + rd: writable_stack_reg(), + rn: stack_reg(), + rm: spilltmp_reg(), + extendop: ExtendOp::UXTX, + }); } } @@ -1227,64 +1223,82 @@ impl ABICall for AArch64ABICall { self.sig.args.len() } - fn gen_stack_pre_adjust(&self) -> Vec { - adjust_stack(self.sig.stack_arg_space as u64, /* is_sub = */ true) + fn emit_stack_pre_adjust>(&self, ctx: &mut C) { + adjust_stack( + ctx, + self.sig.stack_arg_space as u64, + /* is_sub = */ true, + ) } - fn gen_stack_post_adjust(&self) -> Vec { - adjust_stack(self.sig.stack_arg_space as u64, /* is_sub = */ false) + fn emit_stack_post_adjust>(&self, ctx: &mut C) { + adjust_stack( + ctx, + self.sig.stack_arg_space as u64, + /* is_sub = */ false, + ) } - fn gen_copy_reg_to_arg(&self, idx: usize, from_reg: Reg) -> Vec { + fn emit_copy_reg_to_arg>( + &self, + ctx: &mut C, + idx: usize, + from_reg: Reg, + ) { match &self.sig.args[idx] { - &ABIArg::Reg(reg, ty) => vec![Inst::gen_move( + &ABIArg::Reg(reg, ty) => ctx.emit(Inst::gen_move( Writable::from_reg(reg.to_reg()), from_reg, ty, - )], - &ABIArg::Stack(off, ty) => vec![store_stack(MemArg::SPOffset(off), from_reg, ty)], + )), + &ABIArg::Stack(off, ty) => ctx.emit(store_stack(MemArg::SPOffset(off), from_reg, ty)), } } - fn gen_copy_retval_to_reg(&self, idx: usize, into_reg: Writable) -> Inst { + fn emit_copy_retval_to_reg>( + &self, + ctx: &mut C, + idx: usize, + into_reg: Writable, + ) { match &self.sig.rets[idx] { - &ABIArg::Reg(reg, ty) => Inst::gen_move(into_reg, reg.to_reg(), ty), + &ABIArg::Reg(reg, ty) => ctx.emit(Inst::gen_move(into_reg, reg.to_reg(), ty)), _ => unimplemented!(), } } - fn gen_call(&self) -> Vec { + fn emit_call>(&self, ctx: &mut C) { let (uses, defs) = (self.uses.clone(), self.defs.clone()); match &self.dest { - &CallDest::ExtName(ref name, RelocDistance::Near) => vec![Inst::Call { + &CallDest::ExtName(ref name, RelocDistance::Near) => ctx.emit(Inst::Call { dest: name.clone(), uses, defs, loc: self.loc, opcode: self.opcode, - }], - &CallDest::ExtName(ref name, RelocDistance::Far) => vec![ - Inst::LoadExtName { + }), + &CallDest::ExtName(ref name, RelocDistance::Far) => { + ctx.emit(Inst::LoadExtName { rd: writable_spilltmp_reg(), name: name.clone(), offset: 0, srcloc: self.loc, - }, - Inst::CallInd { + }); + ctx.emit(Inst::CallInd { rn: spilltmp_reg(), uses, defs, loc: self.loc, opcode: self.opcode, - }, - ], - &CallDest::Reg(reg) => vec![Inst::CallInd { + }); + } + &CallDest::Reg(reg) => ctx.emit(Inst::CallInd { rn: reg, uses, defs, loc: self.loc, opcode: self.opcode, - }], + }), } } } diff --git a/cranelift/codegen/src/isa/aarch64/lower_inst.rs b/cranelift/codegen/src/isa/aarch64/lower_inst.rs index bc2944f2b9..f27a5166ee 100644 --- a/cranelift/codegen/src/isa/aarch64/lower_inst.rs +++ b/cranelift/codegen/src/isa/aarch64/lower_inst.rs @@ -1285,26 +1285,18 @@ pub(crate) fn lower_insn_to_regs>(ctx: &mut C, insn: IRIns _ => unreachable!(), }; - for inst in abi.gen_stack_pre_adjust().into_iter() { - ctx.emit(inst); - } + abi.emit_stack_pre_adjust(ctx); assert!(inputs.len() == abi.num_args()); for (i, input) in inputs.iter().enumerate() { let arg_reg = input_to_reg(ctx, *input, NarrowValueMode::None); - for inst in abi.gen_copy_reg_to_arg(i, arg_reg) { - ctx.emit(inst); - } - } - for inst in abi.gen_call().into_iter() { - ctx.emit(inst); + abi.emit_copy_reg_to_arg(ctx, i, arg_reg); } + abi.emit_call(ctx); for (i, output) in outputs.iter().enumerate() { let retval_reg = output_to_reg(ctx, *output); - ctx.emit(abi.gen_copy_retval_to_reg(i, retval_reg)); - } - for inst in abi.gen_stack_post_adjust().into_iter() { - ctx.emit(inst); + abi.emit_copy_retval_to_reg(ctx, i, retval_reg); } + abi.emit_stack_post_adjust(ctx); } Opcode::GetPinnedReg => { diff --git a/cranelift/codegen/src/machinst/abi.rs b/cranelift/codegen/src/machinst/abi.rs index 83aa158662..ae04e127cf 100644 --- a/cranelift/codegen/src/machinst/abi.rs +++ b/cranelift/codegen/src/machinst/abi.rs @@ -135,19 +135,29 @@ pub trait ABICall { /// Get the number of arguments expected. fn num_args(&self) -> usize; - /// Copy an argument value from a source register, prior to the call. - fn gen_copy_reg_to_arg(&self, idx: usize, from_reg: Reg) -> Vec; + /// Emit a copy of an argument value from a source register, prior to the call. + fn emit_copy_reg_to_arg>( + &self, + ctx: &mut C, + idx: usize, + from_reg: Reg, + ); - /// Copy a return value into a destination register, after the call returns. - fn gen_copy_retval_to_reg(&self, idx: usize, into_reg: Writable) -> Self::I; + /// Emit a copy a return value into a destination register, after the call returns. + fn emit_copy_retval_to_reg>( + &self, + ctx: &mut C, + idx: usize, + into_reg: Writable, + ); - /// Pre-adjust the stack, prior to argument copies and call. - fn gen_stack_pre_adjust(&self) -> Vec; + /// Emit code to pre-adjust the stack, prior to argument copies and call. + fn emit_stack_pre_adjust>(&self, ctx: &mut C); - /// Post-adjust the satck, after call return and return-value copies. - fn gen_stack_post_adjust(&self) -> Vec; + /// Emit code to post-adjust the satck, after call return and return-value copies. + fn emit_stack_post_adjust>(&self, ctx: &mut C); - /// Generate the call itself. + /// Emit the call itself. /// /// The returned instruction should have proper use- and def-sets according /// to the argument registers, return-value registers, and clobbered @@ -157,5 +167,5 @@ pub trait ABICall { /// registers are also logically defs, but should never be read; their /// values are "defined" (to the regalloc) but "undefined" in every other /// sense.) - fn gen_call(&self) -> Vec; + fn emit_call>(&self, ctx: &mut C); }