Reftypes part two: add support for stackmaps.

This commit adds support for generating stackmaps at safepoints to the
new backend framework and to the AArch64 backend in particular. It has
been tested to work with SpiderMonkey.
This commit is contained in:
Chris Fallin
2020-06-29 15:49:18 -07:00
parent b93e8c296d
commit 08353fcc14
17 changed files with 597 additions and 143 deletions

View File

@@ -90,12 +90,13 @@
//! - Return v1 in memory at `[P+8]`.
//! - Return v0 in memory at `[P+16]`.
use crate::binemit::Stackmap;
use crate::ir;
use crate::ir::types;
use crate::ir::types::*;
use crate::ir::{ArgumentExtension, StackSlot};
use crate::isa;
use crate::isa::aarch64::{inst::*, lower::ty_bits};
use crate::isa::aarch64::{inst::EmitState, inst::*, lower::ty_bits};
use crate::machinst::*;
use crate::settings;
use crate::{CodegenError, CodegenResult};
@@ -372,7 +373,10 @@ pub struct AArch64ABIBody {
clobbered: Set<Writable<RealReg>>,
/// Total number of spillslots, from regalloc.
spillslots: Option<usize>,
/// Total frame size.
/// "Total frame size", as defined by "distance between FP and nominal-SP".
/// Some items are pushed below nominal SP, so the function may actually use
/// more stack than this would otherwise imply. It is simply the initial
/// frame/allocation size needed for stackslots and spillslots.
total_frame_size: Option<u32>,
/// The register holding the return-area pointer, if needed.
ret_area_ptr: Option<Writable<Reg>>,
@@ -811,6 +815,35 @@ fn get_caller_saves(call_conv: isa::CallConv) -> Vec<Writable<Reg>> {
caller_saved
}
fn gen_sp_adjust_insts<F: FnMut(Inst)>(adj: u64, is_sub: bool, mut f: F) {
let alu_op = if is_sub { ALUOp::Sub64 } else { ALUOp::Add64 };
if let Some(imm12) = Imm12::maybe_from_u64(adj) {
let adj_inst = Inst::AluRRImm12 {
alu_op,
rd: writable_stack_reg(),
rn: stack_reg(),
imm12,
};
f(adj_inst);
} else {
let tmp = writable_spilltmp_reg();
let const_inst = Inst::LoadConst64 {
rd: tmp,
const_data: adj,
};
let adj_inst = Inst::AluRRRExtend {
alu_op,
rd: writable_stack_reg(),
rn: stack_reg(),
rm: tmp.to_reg(),
extendop: ExtendOp::UXTX,
};
f(const_inst);
f(adj_inst);
}
}
impl ABIBody for AArch64ABIBody {
type I = Inst;
@@ -1025,6 +1058,29 @@ impl ABIBody for AArch64ABIBody {
store_stack(MemArg::NominalSPOffset(sp_off, ty), from_reg, ty)
}
fn spillslots_to_stackmap(&self, slots: &[SpillSlot], state: &EmitState) -> Stackmap {
assert!(state.virtual_sp_offset >= 0);
trace!(
"spillslots_to_stackmap: slots = {:?}, state = {:?}",
slots,
state
);
let map_size = (state.virtual_sp_offset + state.nominal_sp_to_fp) as u32;
let map_words = (map_size + 7) / 8;
let mut bits = std::iter::repeat(false)
.take(map_words as usize)
.collect::<Vec<bool>>();
let first_spillslot_word =
((self.stackslots_size + state.virtual_sp_offset as u32) / 8) as usize;
for &slot in slots {
let slot = slot.get() as usize;
bits[first_spillslot_word + slot] = true;
}
Stackmap::from_slice(&bits[..])
}
fn gen_prologue(&mut self) -> Vec<Inst> {
let mut insts = vec![];
if !self.call_conv.extends_baldrdash() {
@@ -1060,6 +1116,9 @@ impl ABIBody for AArch64ABIBody {
}
let total_stacksize = (total_stacksize + 15) & !15; // 16-align the stack.
let mut total_sp_adjust = 0;
let mut nominal_sp_to_real_sp = 0;
if !self.call_conv.extends_baldrdash() {
// Leaf functions with zero stack don't need a stack check if one's
// specified, otherwise always insert the stack check.
@@ -1070,42 +1129,28 @@ impl ABIBody for AArch64ABIBody {
}
}
if total_stacksize > 0 {
// sub sp, sp, #total_stacksize
if let Some(imm12) = Imm12::maybe_from_u64(total_stacksize as u64) {
let sub_inst = Inst::AluRRImm12 {
alu_op: ALUOp::Sub64,
rd: writable_stack_reg(),
rn: stack_reg(),
imm12,
};
insts.push(sub_inst);
} else {
let tmp = writable_spilltmp_reg();
let const_inst = Inst::LoadConst64 {
rd: tmp,
const_data: total_stacksize as u64,
};
let sub_inst = Inst::AluRRRExtend {
alu_op: ALUOp::Sub64,
rd: writable_stack_reg(),
rn: stack_reg(),
rm: tmp.to_reg(),
extendop: ExtendOp::UXTX,
};
insts.push(const_inst);
insts.push(sub_inst);
}
total_sp_adjust += total_stacksize as u64;
}
}
// N.B.: "nominal SP", which we use to refer to stackslots
// and spillslots, is *here* (the value of SP at this program point).
// N.B.: "nominal SP", which we use to refer to stackslots and
// spillslots, is right here.
//
// If we push any clobbers below, we emit a virtual-SP adjustment
// meta-instruction so that the nominal-SP references behave as if SP
// were still at this point. See documentation for
// [crate::isa::aarch64::abi](this module) for more details on
// stackframe layout and nominal-SP maintenance.
if total_sp_adjust > 0 {
// sub sp, sp, #total_stacksize
gen_sp_adjust_insts(
total_sp_adjust,
/* is_sub = */ true,
|inst| insts.push(inst),
);
}
// Save clobbered registers.
let (clobbered_int, clobbered_vec) =
get_callee_saves(self.call_conv, self.clobbered.to_vec());
@@ -1149,10 +1194,11 @@ impl ABIBody for AArch64ABIBody {
srcloc: None,
});
}
nominal_sp_to_real_sp += clobber_size as i64;
if clobber_size > 0 {
insts.push(Inst::VirtualSPOffsetAdj {
offset: clobber_size as i64,
offset: nominal_sp_to_real_sp,
});
}
@@ -1246,6 +1292,10 @@ impl ABIBody for AArch64ABIBody {
.expect("frame size not computed before prologue generation")
}
fn stack_args_size(&self) -> u32 {
self.sig.stack_arg_space as u32
}
fn get_spillslot_size(&self, rc: RegClass, ty: Type) -> u32 {
// We allocate in terms of 8-byte slots.
match (rc, ty) {
@@ -1256,15 +1306,30 @@ impl ABIBody for AArch64ABIBody {
}
}
fn gen_spill(&self, to_slot: SpillSlot, from_reg: RealReg, ty: Type) -> Inst {
fn gen_spill(&self, to_slot: SpillSlot, from_reg: RealReg, ty: Option<Type>) -> Inst {
let ty = ty_from_ty_hint_or_reg_class(from_reg.to_reg(), ty);
self.store_spillslot(to_slot, ty, from_reg.to_reg())
}
fn gen_reload(&self, to_reg: Writable<RealReg>, from_slot: SpillSlot, ty: Type) -> Inst {
fn gen_reload(
&self,
to_reg: Writable<RealReg>,
from_slot: SpillSlot,
ty: Option<Type>,
) -> Inst {
let ty = ty_from_ty_hint_or_reg_class(to_reg.to_reg().to_reg(), ty);
self.load_spillslot(from_slot, ty, to_reg.map(|r| r.to_reg()))
}
}
fn ty_from_ty_hint_or_reg_class(r: Reg, ty: Option<Type>) -> Type {
match (ty, r.get_class()) {
(Some(t), _) => t,
(None, RegClass::I64) => I64,
_ => panic!("Unexpected register class!"),
}
}
enum CallDest {
ExtName(ir::ExternalName, RelocDistance),
Reg(Reg),
@@ -1343,7 +1408,7 @@ impl AArch64ABICall {
}
}
fn adjust_stack<C: LowerCtx<I = Inst>>(ctx: &mut C, amount: u64, is_sub: bool) {
fn adjust_stack_and_nominal_sp<C: LowerCtx<I = Inst>>(ctx: &mut C, amount: u64, is_sub: bool) {
if amount == 0 {
return;
}
@@ -1357,27 +1422,9 @@ fn adjust_stack<C: LowerCtx<I = Inst>>(ctx: &mut C, amount: u64, is_sub: bool) {
offset: sp_adjustment,
});
let alu_op = if is_sub { ALUOp::Sub64 } else { ALUOp::Add64 };
if let Some(imm12) = Imm12::maybe_from_u64(amount) {
ctx.emit(Inst::AluRRImm12 {
alu_op,
rd: writable_stack_reg(),
rn: stack_reg(),
imm12,
})
} else {
ctx.emit(Inst::LoadConst64 {
rd: writable_spilltmp_reg(),
const_data: amount,
});
ctx.emit(Inst::AluRRRExtend {
alu_op,
rd: writable_stack_reg(),
rn: stack_reg(),
rm: spilltmp_reg(),
extendop: ExtendOp::UXTX,
});
}
gen_sp_adjust_insts(amount, is_sub, |inst| {
ctx.emit(inst);
});
}
impl ABICall for AArch64ABICall {
@@ -1393,12 +1440,12 @@ impl ABICall for AArch64ABICall {
fn emit_stack_pre_adjust<C: LowerCtx<I = Self::I>>(&self, ctx: &mut C) {
let off = self.sig.stack_arg_space + self.sig.stack_ret_space;
adjust_stack(ctx, off as u64, /* is_sub = */ true)
adjust_stack_and_nominal_sp(ctx, off as u64, /* is_sub = */ true)
}
fn emit_stack_post_adjust<C: LowerCtx<I = Self::I>>(&self, ctx: &mut C) {
let off = self.sig.stack_arg_space + self.sig.stack_ret_space;
adjust_stack(ctx, off as u64, /* is_sub = */ false)
adjust_stack_and_nominal_sp(ctx, off as u64, /* is_sub = */ false)
}
fn emit_copy_reg_to_arg<C: LowerCtx<I = Self::I>>(
@@ -1453,7 +1500,7 @@ impl ABICall for AArch64ABICall {
self.emit_copy_reg_to_arg(ctx, i, rd.to_reg());
}
match &self.dest {
&CallDest::ExtName(ref name, RelocDistance::Near) => ctx.emit(Inst::Call {
&CallDest::ExtName(ref name, RelocDistance::Near) => ctx.emit_safepoint(Inst::Call {
info: Box::new(CallInfo {
dest: name.clone(),
uses,
@@ -1469,7 +1516,7 @@ impl ABICall for AArch64ABICall {
offset: 0,
srcloc: self.loc,
});
ctx.emit(Inst::CallInd {
ctx.emit_safepoint(Inst::CallInd {
info: Box::new(CallIndInfo {
rn: spilltmp_reg(),
uses,
@@ -1479,7 +1526,7 @@ impl ABICall for AArch64ABICall {
}),
});
}
&CallDest::Reg(reg) => ctx.emit(Inst::CallInd {
&CallDest::Reg(reg) => ctx.emit_safepoint(Inst::CallInd {
info: Box::new(CallIndInfo {
rn: reg,
uses,

View File

@@ -1,6 +1,6 @@
//! AArch64 ISA: binary code emission.
use crate::binemit::{CodeOffset, Reloc};
use crate::binemit::{CodeOffset, Reloc, Stackmap};
use crate::ir::constant::ConstantData;
use crate::ir::types::*;
use crate::ir::TrapCode;
@@ -376,7 +376,37 @@ fn enc_vec_lanes(q: u32, u: u32, size: u32, opcode: u32, rd: Writable<Reg>, rn:
/// State carried between emissions of a sequence of instructions.
#[derive(Default, Clone, Debug)]
pub struct EmitState {
virtual_sp_offset: i64,
/// Addend to convert nominal-SP offsets to real-SP offsets at the current
/// program point.
pub(crate) virtual_sp_offset: i64,
/// Offset of FP from nominal-SP.
pub(crate) nominal_sp_to_fp: i64,
/// Safepoint stackmap for upcoming instruction, as provided to `pre_safepoint()`.
stackmap: Option<Stackmap>,
}
impl MachInstEmitState<Inst> for EmitState {
fn new(abi: &dyn ABIBody<I = Inst>) -> Self {
EmitState {
virtual_sp_offset: 0,
nominal_sp_to_fp: abi.frame_size() as i64,
stackmap: None,
}
}
fn pre_safepoint(&mut self, stackmap: Stackmap) {
self.stackmap = Some(stackmap);
}
}
impl EmitState {
fn take_stackmap(&mut self) -> Option<Stackmap> {
self.stackmap.take()
}
fn clear_post_insn(&mut self) {
self.stackmap = None;
}
}
impl MachInstEmit for Inst {
@@ -1463,6 +1493,9 @@ impl MachInstEmit for Inst {
// Noop; this is just a placeholder for epilogues.
}
&Inst::Call { ref info } => {
if let Some(s) = state.take_stackmap() {
sink.add_stackmap(4, s);
}
sink.add_reloc(info.loc, Reloc::Arm64Call, &info.dest, 0);
sink.put4(enc_jump26(0b100101, 0));
if info.opcode.is_call() {
@@ -1470,6 +1503,9 @@ impl MachInstEmit for Inst {
}
}
&Inst::CallInd { ref info } => {
if let Some(s) = state.take_stackmap() {
sink.add_stackmap(4, s);
}
sink.put4(0b1101011_0001_11111_000000_00000_00000 | (machreg_to_gpr(info.rn) << 5));
if info.opcode.is_call() {
sink.add_call_site(info.loc, info.opcode);
@@ -1525,6 +1561,9 @@ impl MachInstEmit for Inst {
&Inst::Udf { trap_info } => {
let (srcloc, code) = trap_info;
sink.add_trap(srcloc, code);
if let Some(s) = state.take_stackmap() {
sink.add_stackmap(4, s);
}
sink.put4(0xd4a00000);
}
&Inst::Adr { rd, off } => {
@@ -1709,7 +1748,7 @@ impl MachInstEmit for Inst {
debug!(
"virtual sp offset adjusted by {} -> {}",
offset,
state.virtual_sp_offset + offset
state.virtual_sp_offset + offset,
);
state.virtual_sp_offset += offset;
}
@@ -1728,5 +1767,11 @@ impl MachInstEmit for Inst {
let end_off = sink.cur_offset();
debug_assert!((end_off - start_off) <= Inst::worst_case_size());
state.clear_post_insn();
}
fn pretty_print(&self, mb_rru: Option<&RealRegUniverse>, state: &mut EmitState) -> String {
self.print_with_state(mb_rru, state)
}
}

View File

@@ -1346,11 +1346,11 @@ fn aarch64_get_regs(inst: &Inst, collector: &mut RegUsageCollector) {
collector.add_use(rn);
}
&Inst::Jump { .. } | &Inst::Ret | &Inst::EpiloguePlaceholder => {}
&Inst::Call { ref info } => {
&Inst::Call { ref info, .. } => {
collector.add_uses(&*info.uses);
collector.add_defs(&*info.defs);
}
&Inst::CallInd { ref info } => {
&Inst::CallInd { ref info, .. } => {
collector.add_uses(&*info.uses);
collector.add_defs(&*info.defs);
collector.add_use(info.rn);
@@ -2137,13 +2137,21 @@ impl MachInst for Inst {
// feasible for other reasons).
44
}
fn ref_type_rc(_: &settings::Flags) -> RegClass {
RegClass::I64
}
}
//=============================================================================
// Pretty-printing of instructions.
fn mem_finalize_for_show(mem: &MemArg, mb_rru: Option<&RealRegUniverse>) -> (String, MemArg) {
let (mem_insts, mem) = mem_finalize(0, mem, &mut Default::default());
fn mem_finalize_for_show(
mem: &MemArg,
mb_rru: Option<&RealRegUniverse>,
state: &EmitState,
) -> (String, MemArg) {
let (mem_insts, mem) = mem_finalize(0, mem, state);
let mut mem_str = mem_insts
.into_iter()
.map(|inst| inst.show_rru(mb_rru))
@@ -2158,6 +2166,12 @@ fn mem_finalize_for_show(mem: &MemArg, mb_rru: Option<&RealRegUniverse>) -> (Str
impl ShowWithRRU for Inst {
fn show_rru(&self, mb_rru: Option<&RealRegUniverse>) -> String {
self.pretty_print(mb_rru, &mut EmitState::default())
}
}
impl Inst {
fn print_with_state(&self, mb_rru: Option<&RealRegUniverse>, state: &mut EmitState) -> String {
fn op_name_size(alu_op: ALUOp) -> (&'static str, OperandSize) {
match alu_op {
ALUOp::Add32 => ("add", OperandSize::Size32),
@@ -2344,7 +2358,7 @@ impl ShowWithRRU for Inst {
srcloc: _srcloc,
..
} => {
let (mem_str, mem) = mem_finalize_for_show(mem, mb_rru);
let (mem_str, mem) = mem_finalize_for_show(mem, mb_rru, state);
let is_unscaled = match &mem {
&MemArg::Unscaled(..) => true,
@@ -2392,7 +2406,7 @@ impl ShowWithRRU for Inst {
srcloc: _srcloc,
..
} => {
let (mem_str, mem) = mem_finalize_for_show(mem, mb_rru);
let (mem_str, mem) = mem_finalize_for_show(mem, mb_rru, state);
let is_unscaled = match &mem {
&MemArg::Unscaled(..) => true,
@@ -2576,39 +2590,39 @@ impl ShowWithRRU for Inst {
}
&Inst::FpuLoad32 { rd, ref mem, .. } => {
let rd = show_freg_sized(rd.to_reg(), mb_rru, ScalarSize::Size32);
let (mem_str, mem) = mem_finalize_for_show(mem, mb_rru);
let (mem_str, mem) = mem_finalize_for_show(mem, mb_rru, state);
let mem = mem.show_rru(mb_rru);
format!("{}ldr {}, {}", mem_str, rd, mem)
}
&Inst::FpuLoad64 { rd, ref mem, .. } => {
let rd = show_freg_sized(rd.to_reg(), mb_rru, ScalarSize::Size64);
let (mem_str, mem) = mem_finalize_for_show(mem, mb_rru);
let (mem_str, mem) = mem_finalize_for_show(mem, mb_rru, state);
let mem = mem.show_rru(mb_rru);
format!("{}ldr {}, {}", mem_str, rd, mem)
}
&Inst::FpuLoad128 { rd, ref mem, .. } => {
let rd = rd.to_reg().show_rru(mb_rru);
let rd = "q".to_string() + &rd[1..];
let (mem_str, mem) = mem_finalize_for_show(mem, mb_rru);
let (mem_str, mem) = mem_finalize_for_show(mem, mb_rru, state);
let mem = mem.show_rru(mb_rru);
format!("{}ldr {}, {}", mem_str, rd, mem)
}
&Inst::FpuStore32 { rd, ref mem, .. } => {
let rd = show_freg_sized(rd, mb_rru, ScalarSize::Size32);
let (mem_str, mem) = mem_finalize_for_show(mem, mb_rru);
let (mem_str, mem) = mem_finalize_for_show(mem, mb_rru, state);
let mem = mem.show_rru(mb_rru);
format!("{}str {}, {}", mem_str, rd, mem)
}
&Inst::FpuStore64 { rd, ref mem, .. } => {
let rd = show_freg_sized(rd, mb_rru, ScalarSize::Size64);
let (mem_str, mem) = mem_finalize_for_show(mem, mb_rru);
let (mem_str, mem) = mem_finalize_for_show(mem, mb_rru, state);
let mem = mem.show_rru(mb_rru);
format!("{}str {}, {}", mem_str, rd, mem)
}
&Inst::FpuStore128 { rd, ref mem, .. } => {
let rd = rd.show_rru(mb_rru);
let rd = "q".to_string() + &rd[1..];
let (mem_str, mem) = mem_finalize_for_show(mem, mb_rru);
let (mem_str, mem) = mem_finalize_for_show(mem, mb_rru, state);
let mem = mem.show_rru(mb_rru);
format!("{}str {}, {}", mem_str, rd, mem)
}
@@ -2978,7 +2992,7 @@ impl ShowWithRRU for Inst {
// this logic between `emit()` and `show_rru()` -- a separate 1-to-N
// expansion stage (i.e., legalization, but without the slow edit-in-place
// of the existing legalization framework).
let (mem_insts, mem) = mem_finalize(0, mem, &EmitState::default());
let (mem_insts, mem) = mem_finalize(0, mem, state);
let mut ret = String::new();
for inst in mem_insts.into_iter() {
ret.push_str(&inst.show_rru(mb_rru));
@@ -3025,7 +3039,10 @@ impl ShowWithRRU for Inst {
}
ret
}
&Inst::VirtualSPOffsetAdj { offset } => format!("virtual_sp_offset_adjust {}", offset),
&Inst::VirtualSPOffsetAdj { offset } => {
state.virtual_sp_offset += offset;
format!("virtual_sp_offset_adjust {}", offset)
}
&Inst::EmitIsland { needed_space } => format!("emit_island {}", needed_space),
}
}

View File

@@ -1394,7 +1394,7 @@ pub(crate) fn lower_insn_to_regs<C: LowerCtx<I = Inst>>(
Opcode::Trap | Opcode::ResumableTrap => {
let trap_info = (ctx.srcloc(insn), inst_trapcode(ctx.data(insn)).unwrap());
ctx.emit(Inst::Udf { trap_info })
ctx.emit_safepoint(Inst::Udf { trap_info });
}
Opcode::Trapif | Opcode::Trapff => {
@@ -1432,10 +1432,11 @@ pub(crate) fn lower_insn_to_regs<C: LowerCtx<I = Inst>>(
trap_info,
kind: CondBrKind::Cond(cond),
});
ctx.emit_safepoint(Inst::Udf { trap_info })
}
Opcode::Safepoint => {
panic!("safepoint support not implemented!");
panic!("safepoint instructions not used by new backend's safepoints!");
}
Opcode::Trapz | Opcode::Trapnz | Opcode::ResumableTrapnz => {

View File

@@ -5,6 +5,7 @@ use log::trace;
use regalloc::{RealReg, Reg, RegClass, Set, SpillSlot, Writable};
use std::mem;
use crate::binemit::Stackmap;
use crate::ir::{self, types, types::*, ArgumentExtension, StackSlot, Type};
use crate::isa::{self, x64::inst::*};
use crate::machinst::*;
@@ -415,6 +416,10 @@ impl ABIBody for X64ABIBody {
)
}
fn spillslots_to_stackmap(&self, _slots: &[SpillSlot], _state: &EmitState) -> Stackmap {
unimplemented!("spillslots_to_stackmap")
}
fn gen_prologue(&mut self) -> Vec<Inst> {
let r_rsp = regs::rsp();
@@ -553,6 +558,10 @@ impl ABIBody for X64ABIBody {
.expect("frame size not computed before prologue generation") as u32
}
fn stack_args_size(&self) -> u32 {
unimplemented!("I need to be computed!")
}
fn get_spillslot_size(&self, rc: RegClass, ty: Type) -> u32 {
// We allocate in terms of 8-byte slots.
match (rc, ty) {
@@ -563,15 +572,30 @@ impl ABIBody for X64ABIBody {
}
}
fn gen_spill(&self, to_slot: SpillSlot, from_reg: RealReg, ty: Type) -> Inst {
fn gen_spill(&self, to_slot: SpillSlot, from_reg: RealReg, ty: Option<Type>) -> Inst {
let ty = ty_from_ty_hint_or_reg_class(from_reg.to_reg(), ty);
self.store_spillslot(to_slot, ty, from_reg.to_reg())
}
fn gen_reload(&self, to_reg: Writable<RealReg>, from_slot: SpillSlot, ty: Type) -> Inst {
fn gen_reload(
&self,
to_reg: Writable<RealReg>,
from_slot: SpillSlot,
ty: Option<Type>,
) -> Inst {
let ty = ty_from_ty_hint_or_reg_class(to_reg.to_reg().to_reg(), ty);
self.load_spillslot(from_slot, ty, to_reg.map(|r| r.to_reg()))
}
}
fn ty_from_ty_hint_or_reg_class(r: Reg, ty: Option<Type>) -> Type {
match (ty, r.get_class()) {
(Some(t), _) => t,
(None, RegClass::I64) => I64,
_ => panic!("Unexpected register class!"),
}
}
fn get_caller_saves(call_conv: isa::CallConv) -> Vec<Writable<Reg>> {
let mut caller_saved = Vec::new();

View File

@@ -1258,6 +1258,10 @@ impl MachInst for Inst {
15
}
fn ref_type_rc(_: &settings::Flags) -> RegClass {
RegClass::I64
}
type LabelUse = LabelUse;
}
@@ -1273,6 +1277,18 @@ impl MachInstEmit for Inst {
fn emit(&self, sink: &mut MachBuffer<Inst>, flags: &settings::Flags, state: &mut Self::State) {
emit::emit(self, sink, flags, state);
}
fn pretty_print(&self, mb_rru: Option<&RealRegUniverse>, _: &mut Self::State) -> String {
self.show_rru(mb_rru)
}
}
impl MachInstEmitState<Inst> for EmitState {
fn new(_: &dyn ABIBody<I = Inst>) -> Self {
EmitState {
virtual_sp_offset: 0,
}
}
}
/// A label-use (internal relocation) in generated code.