machinst: Avoid a lot of short-lived allocations in ABICall;

This commit is contained in:
Benjamin Bouvier
2020-05-06 18:05:14 +02:00
parent 4f919c6460
commit 9215b610ef
3 changed files with 96 additions and 80 deletions

View File

@@ -1179,44 +1179,40 @@ impl AArch64ABICall {
} }
} }
fn adjust_stack(amount: u64, is_sub: bool) -> Vec<Inst> { fn adjust_stack<C: LowerCtx<I = Inst>>(ctx: &mut C, amount: u64, is_sub: bool) {
if amount > 0 { if amount == 0 {
return;
}
let sp_adjustment = if is_sub { let sp_adjustment = if is_sub {
amount as i64 amount as i64
} else { } else {
-(amount as i64) -(amount as i64)
}; };
let adj_meta_insn = Inst::VirtualSPOffsetAdj { ctx.emit(Inst::VirtualSPOffsetAdj {
offset: sp_adjustment, offset: sp_adjustment,
}; });
let alu_op = if is_sub { ALUOp::Sub64 } else { ALUOp::Add64 }; let alu_op = if is_sub { ALUOp::Sub64 } else { ALUOp::Add64 };
if let Some(imm12) = Imm12::maybe_from_u64(amount) { if let Some(imm12) = Imm12::maybe_from_u64(amount) {
vec![ ctx.emit(Inst::AluRRImm12 {
adj_meta_insn,
Inst::AluRRImm12 {
alu_op, alu_op,
rd: writable_stack_reg(), rd: writable_stack_reg(),
rn: stack_reg(), rn: stack_reg(),
imm12, imm12,
}, })
]
} else { } else {
let const_load = Inst::LoadConst64 { ctx.emit(Inst::LoadConst64 {
rd: writable_spilltmp_reg(), rd: writable_spilltmp_reg(),
const_data: amount, const_data: amount,
}; });
let adj = Inst::AluRRRExtend { ctx.emit(Inst::AluRRRExtend {
alu_op, alu_op,
rd: writable_stack_reg(), rd: writable_stack_reg(),
rn: stack_reg(), rn: stack_reg(),
rm: spilltmp_reg(), rm: spilltmp_reg(),
extendop: ExtendOp::UXTX, extendop: ExtendOp::UXTX,
}; });
vec![adj_meta_insn, const_load, adj]
}
} else {
vec![]
} }
} }
@@ -1227,64 +1223,82 @@ impl ABICall for AArch64ABICall {
self.sig.args.len() self.sig.args.len()
} }
fn gen_stack_pre_adjust(&self) -> Vec<Inst> { fn emit_stack_pre_adjust<C: LowerCtx<I = Self::I>>(&self, ctx: &mut C) {
adjust_stack(self.sig.stack_arg_space as u64, /* is_sub = */ true) adjust_stack(
ctx,
self.sig.stack_arg_space as u64,
/* is_sub = */ true,
)
} }
fn gen_stack_post_adjust(&self) -> Vec<Inst> { fn emit_stack_post_adjust<C: LowerCtx<I = Self::I>>(&self, ctx: &mut C) {
adjust_stack(self.sig.stack_arg_space as u64, /* is_sub = */ false) adjust_stack(
ctx,
self.sig.stack_arg_space as u64,
/* is_sub = */ false,
)
} }
fn gen_copy_reg_to_arg(&self, idx: usize, from_reg: Reg) -> Vec<Inst> { fn emit_copy_reg_to_arg<C: LowerCtx<I = Self::I>>(
&self,
ctx: &mut C,
idx: usize,
from_reg: Reg,
) {
match &self.sig.args[idx] { match &self.sig.args[idx] {
&ABIArg::Reg(reg, ty) => vec![Inst::gen_move( &ABIArg::Reg(reg, ty) => ctx.emit(Inst::gen_move(
Writable::from_reg(reg.to_reg()), Writable::from_reg(reg.to_reg()),
from_reg, from_reg,
ty, ty,
)], )),
&ABIArg::Stack(off, ty) => vec![store_stack(MemArg::SPOffset(off), from_reg, ty)], &ABIArg::Stack(off, ty) => ctx.emit(store_stack(MemArg::SPOffset(off), from_reg, ty)),
} }
} }
fn gen_copy_retval_to_reg(&self, idx: usize, into_reg: Writable<Reg>) -> Inst { fn emit_copy_retval_to_reg<C: LowerCtx<I = Self::I>>(
&self,
ctx: &mut C,
idx: usize,
into_reg: Writable<Reg>,
) {
match &self.sig.rets[idx] { match &self.sig.rets[idx] {
&ABIArg::Reg(reg, ty) => Inst::gen_move(into_reg, reg.to_reg(), ty), &ABIArg::Reg(reg, ty) => ctx.emit(Inst::gen_move(into_reg, reg.to_reg(), ty)),
_ => unimplemented!(), _ => unimplemented!(),
} }
} }
fn gen_call(&self) -> Vec<Inst> { fn emit_call<C: LowerCtx<I = Self::I>>(&self, ctx: &mut C) {
let (uses, defs) = (self.uses.clone(), self.defs.clone()); let (uses, defs) = (self.uses.clone(), self.defs.clone());
match &self.dest { match &self.dest {
&CallDest::ExtName(ref name, RelocDistance::Near) => vec![Inst::Call { &CallDest::ExtName(ref name, RelocDistance::Near) => ctx.emit(Inst::Call {
dest: name.clone(), dest: name.clone(),
uses, uses,
defs, defs,
loc: self.loc, loc: self.loc,
opcode: self.opcode, opcode: self.opcode,
}], }),
&CallDest::ExtName(ref name, RelocDistance::Far) => vec![ &CallDest::ExtName(ref name, RelocDistance::Far) => {
Inst::LoadExtName { ctx.emit(Inst::LoadExtName {
rd: writable_spilltmp_reg(), rd: writable_spilltmp_reg(),
name: name.clone(), name: name.clone(),
offset: 0, offset: 0,
srcloc: self.loc, srcloc: self.loc,
}, });
Inst::CallInd { ctx.emit(Inst::CallInd {
rn: spilltmp_reg(), rn: spilltmp_reg(),
uses, uses,
defs, defs,
loc: self.loc, loc: self.loc,
opcode: self.opcode, opcode: self.opcode,
}, });
], }
&CallDest::Reg(reg) => vec![Inst::CallInd { &CallDest::Reg(reg) => ctx.emit(Inst::CallInd {
rn: reg, rn: reg,
uses, uses,
defs, defs,
loc: self.loc, loc: self.loc,
opcode: self.opcode, opcode: self.opcode,
}], }),
} }
} }
} }

View File

@@ -1285,26 +1285,18 @@ pub(crate) fn lower_insn_to_regs<C: LowerCtx<I = Inst>>(ctx: &mut C, insn: IRIns
_ => unreachable!(), _ => unreachable!(),
}; };
for inst in abi.gen_stack_pre_adjust().into_iter() { abi.emit_stack_pre_adjust(ctx);
ctx.emit(inst);
}
assert!(inputs.len() == abi.num_args()); assert!(inputs.len() == abi.num_args());
for (i, input) in inputs.iter().enumerate() { for (i, input) in inputs.iter().enumerate() {
let arg_reg = input_to_reg(ctx, *input, NarrowValueMode::None); let arg_reg = input_to_reg(ctx, *input, NarrowValueMode::None);
for inst in abi.gen_copy_reg_to_arg(i, arg_reg) { abi.emit_copy_reg_to_arg(ctx, i, arg_reg);
ctx.emit(inst);
}
}
for inst in abi.gen_call().into_iter() {
ctx.emit(inst);
} }
abi.emit_call(ctx);
for (i, output) in outputs.iter().enumerate() { for (i, output) in outputs.iter().enumerate() {
let retval_reg = output_to_reg(ctx, *output); let retval_reg = output_to_reg(ctx, *output);
ctx.emit(abi.gen_copy_retval_to_reg(i, retval_reg)); abi.emit_copy_retval_to_reg(ctx, i, retval_reg);
}
for inst in abi.gen_stack_post_adjust().into_iter() {
ctx.emit(inst);
} }
abi.emit_stack_post_adjust(ctx);
} }
Opcode::GetPinnedReg => { Opcode::GetPinnedReg => {

View File

@@ -135,19 +135,29 @@ pub trait ABICall {
/// Get the number of arguments expected. /// Get the number of arguments expected.
fn num_args(&self) -> usize; fn num_args(&self) -> usize;
/// Copy an argument value from a source register, prior to the call. /// Emit a copy of an argument value from a source register, prior to the call.
fn gen_copy_reg_to_arg(&self, idx: usize, from_reg: Reg) -> Vec<Self::I>; fn emit_copy_reg_to_arg<C: LowerCtx<I = Self::I>>(
&self,
ctx: &mut C,
idx: usize,
from_reg: Reg,
);
/// Copy a return value into a destination register, after the call returns. /// Emit a copy a return value into a destination register, after the call returns.
fn gen_copy_retval_to_reg(&self, idx: usize, into_reg: Writable<Reg>) -> Self::I; fn emit_copy_retval_to_reg<C: LowerCtx<I = Self::I>>(
&self,
ctx: &mut C,
idx: usize,
into_reg: Writable<Reg>,
);
/// Pre-adjust the stack, prior to argument copies and call. /// Emit code to pre-adjust the stack, prior to argument copies and call.
fn gen_stack_pre_adjust(&self) -> Vec<Self::I>; fn emit_stack_pre_adjust<C: LowerCtx<I = Self::I>>(&self, ctx: &mut C);
/// Post-adjust the satck, after call return and return-value copies. /// Emit code to post-adjust the satck, after call return and return-value copies.
fn gen_stack_post_adjust(&self) -> Vec<Self::I>; fn emit_stack_post_adjust<C: LowerCtx<I = Self::I>>(&self, ctx: &mut C);
/// Generate the call itself. /// Emit the call itself.
/// ///
/// The returned instruction should have proper use- and def-sets according /// The returned instruction should have proper use- and def-sets according
/// to the argument registers, return-value registers, and clobbered /// to the argument registers, return-value registers, and clobbered
@@ -157,5 +167,5 @@ pub trait ABICall {
/// registers are also logically defs, but should never be read; their /// registers are also logically defs, but should never be read; their
/// values are "defined" (to the regalloc) but "undefined" in every other /// values are "defined" (to the regalloc) but "undefined" in every other
/// sense.) /// sense.)
fn gen_call(&self) -> Vec<Self::I>; fn emit_call<C: LowerCtx<I = Self::I>>(&self, ctx: &mut C);
} }