Merge pull request #1614 from cfallin/aarch64-regalloc-dense-maps

Use new regalloc.rs version with dense vreg->rreg maps.
This commit is contained in:
Chris Fallin
2020-04-29 12:29:39 -07:00
committed by GitHub
5 changed files with 186 additions and 191 deletions

4
Cargo.lock generated
View File

@@ -1538,9 +1538,9 @@ dependencies = [
[[package]]
name = "regalloc"
version = "0.0.20"
version = "0.0.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75d8659dfee0d5b4c06022ab25e3efda0737a2a1798f9c33d0c0a147a740db86"
checksum = "b27b256b41986ac5141b37b8bbba85d314fbf546c182eb255af6720e07e4f804"
dependencies = [
"log",
"rustc-hash",

View File

@@ -24,7 +24,7 @@ gimli = { version = "0.20.0", default-features = false, features = ["write"], op
smallvec = { version = "1.0.0" }
thiserror = "1.0.4"
byteorder = { version = "1.3.2", default-features = false }
regalloc = "0.0.20"
regalloc = "0.0.21"
# It is a goal of the cranelift-codegen crate to have minimal external dependencies.
# Please don't add any unless they are essential to the task of creating binary
# machine code. Integration tests that need external dependencies can be

View File

@@ -9,9 +9,8 @@ use crate::ir::{ExternalName, Opcode, SourceLoc, TrapCode, Type};
use crate::machinst::*;
use crate::{settings, CodegenError, CodegenResult};
use regalloc::Map as RegallocMap;
use regalloc::{RealReg, RealRegUniverse, Reg, RegClass, SpillSlot, VirtualReg, Writable};
use regalloc::{RegUsageCollector, Set};
use regalloc::{RealRegUniverse, Reg, RegClass, SpillSlot, VirtualReg, Writable};
use regalloc::{RegUsageCollector, RegUsageMapper, Set};
use alloc::vec::Vec;
use core::convert::TryFrom;
@@ -1136,70 +1135,71 @@ fn aarch64_get_regs(inst: &Inst, collector: &mut RegUsageCollector) {
//=============================================================================
// Instructions: map_regs
fn aarch64_map_regs(
inst: &mut Inst,
pre_map: &RegallocMap<VirtualReg, RealReg>,
post_map: &RegallocMap<VirtualReg, RealReg>,
) {
fn map(m: &RegallocMap<VirtualReg, RealReg>, r: &mut Reg) {
fn aarch64_map_regs(inst: &mut Inst, mapper: &RegUsageMapper) {
fn map_use(m: &RegUsageMapper, r: &mut Reg) {
if r.is_virtual() {
let new = m.get(&r.to_virtual_reg()).cloned().unwrap().to_reg();
let new = m.get_use(r.to_virtual_reg()).unwrap().to_reg();
*r = new;
}
}
fn map_wr(m: &RegallocMap<VirtualReg, RealReg>, r: &mut Writable<Reg>) {
let mut reg = r.to_reg();
map(m, &mut reg);
*r = Writable::from_reg(reg);
fn map_def(m: &RegUsageMapper, r: &mut Writable<Reg>) {
if r.to_reg().is_virtual() {
let new = m.get_def(r.to_reg().to_virtual_reg()).unwrap().to_reg();
*r = Writable::from_reg(new);
}
}
fn map_mem(u: &RegallocMap<VirtualReg, RealReg>, mem: &mut MemArg) {
fn map_mod(m: &RegUsageMapper, r: &mut Writable<Reg>) {
if r.to_reg().is_virtual() {
let new = m.get_mod(r.to_reg().to_virtual_reg()).unwrap().to_reg();
*r = Writable::from_reg(new);
}
}
fn map_mem(m: &RegUsageMapper, mem: &mut MemArg) {
// N.B.: we take only the pre-map here, but this is OK because the
// only addressing modes that update registers (pre/post-increment on
// AArch64) both read and write registers, so they are "mods" rather
// than "defs", so must be the same in both the pre- and post-map.
match mem {
&mut MemArg::Unscaled(ref mut reg, ..) => map(u, reg),
&mut MemArg::UnsignedOffset(ref mut reg, ..) => map(u, reg),
&mut MemArg::Unscaled(ref mut reg, ..) => map_use(m, reg),
&mut MemArg::UnsignedOffset(ref mut reg, ..) => map_use(m, reg),
&mut MemArg::RegReg(ref mut r1, ref mut r2) => {
map(u, r1);
map(u, r2);
map_use(m, r1);
map_use(m, r2);
}
&mut MemArg::RegScaled(ref mut r1, ref mut r2, ..) => {
map(u, r1);
map(u, r2);
map_use(m, r1);
map_use(m, r2);
}
&mut MemArg::RegScaledExtended(ref mut r1, ref mut r2, ..) => {
map(u, r1);
map(u, r2);
map_use(m, r1);
map_use(m, r2);
}
&mut MemArg::Label(..) => {}
&mut MemArg::PreIndexed(ref mut r, ..) => map_wr(u, r),
&mut MemArg::PostIndexed(ref mut r, ..) => map_wr(u, r),
&mut MemArg::PreIndexed(ref mut r, ..) => map_mod(m, r),
&mut MemArg::PostIndexed(ref mut r, ..) => map_mod(m, r),
&mut MemArg::FPOffset(..) | &mut MemArg::SPOffset(..) => {}
};
}
fn map_pairmem(u: &RegallocMap<VirtualReg, RealReg>, mem: &mut PairMemArg) {
fn map_pairmem(m: &RegUsageMapper, mem: &mut PairMemArg) {
match mem {
&mut PairMemArg::SignedOffset(ref mut reg, ..) => map(u, reg),
&mut PairMemArg::PreIndexed(ref mut reg, ..) => map_wr(u, reg),
&mut PairMemArg::PostIndexed(ref mut reg, ..) => map_wr(u, reg),
&mut PairMemArg::SignedOffset(ref mut reg, ..) => map_use(m, reg),
&mut PairMemArg::PreIndexed(ref mut reg, ..) => map_def(m, reg),
&mut PairMemArg::PostIndexed(ref mut reg, ..) => map_def(m, reg),
}
}
fn map_br(u: &RegallocMap<VirtualReg, RealReg>, br: &mut CondBrKind) {
fn map_br(m: &RegUsageMapper, br: &mut CondBrKind) {
match br {
&mut CondBrKind::Zero(ref mut reg) => map(u, reg),
&mut CondBrKind::NotZero(ref mut reg) => map(u, reg),
&mut CondBrKind::Zero(ref mut reg) => map_use(m, reg),
&mut CondBrKind::NotZero(ref mut reg) => map_use(m, reg),
&mut CondBrKind::Cond(..) => {}
};
}
let u = pre_map; // For brevity below.
let d = post_map;
match inst {
&mut Inst::AluRRR {
ref mut rd,
@@ -1207,9 +1207,9 @@ fn aarch64_map_regs(
ref mut rm,
..
} => {
map_wr(d, rd);
map(u, rn);
map(u, rm);
map_def(mapper, rd);
map_use(mapper, rn);
map_use(mapper, rm);
}
&mut Inst::AluRRRR {
ref mut rd,
@@ -1218,34 +1218,34 @@ fn aarch64_map_regs(
ref mut ra,
..
} => {
map_wr(d, rd);
map(u, rn);
map(u, rm);
map(u, ra);
map_def(mapper, rd);
map_use(mapper, rn);
map_use(mapper, rm);
map_use(mapper, ra);
}
&mut Inst::AluRRImm12 {
ref mut rd,
ref mut rn,
..
} => {
map_wr(d, rd);
map(u, rn);
map_def(mapper, rd);
map_use(mapper, rn);
}
&mut Inst::AluRRImmLogic {
ref mut rd,
ref mut rn,
..
} => {
map_wr(d, rd);
map(u, rn);
map_def(mapper, rd);
map_use(mapper, rn);
}
&mut Inst::AluRRImmShift {
ref mut rd,
ref mut rn,
..
} => {
map_wr(d, rd);
map(u, rn);
map_def(mapper, rd);
map_use(mapper, rn);
}
&mut Inst::AluRRRShift {
ref mut rd,
@@ -1253,9 +1253,9 @@ fn aarch64_map_regs(
ref mut rm,
..
} => {
map_wr(d, rd);
map(u, rn);
map(u, rm);
map_def(mapper, rd);
map_use(mapper, rn);
map_use(mapper, rm);
}
&mut Inst::AluRRRExtend {
ref mut rd,
@@ -1263,65 +1263,65 @@ fn aarch64_map_regs(
ref mut rm,
..
} => {
map_wr(d, rd);
map(u, rn);
map(u, rm);
map_def(mapper, rd);
map_use(mapper, rn);
map_use(mapper, rm);
}
&mut Inst::BitRR {
ref mut rd,
ref mut rn,
..
} => {
map_wr(d, rd);
map(u, rn);
map_def(mapper, rd);
map_use(mapper, rn);
}
&mut Inst::ULoad8 {
ref mut rd,
ref mut mem,
..
} => {
map_wr(d, rd);
map_mem(u, mem);
map_def(mapper, rd);
map_mem(mapper, mem);
}
&mut Inst::SLoad8 {
ref mut rd,
ref mut mem,
..
} => {
map_wr(d, rd);
map_mem(u, mem);
map_def(mapper, rd);
map_mem(mapper, mem);
}
&mut Inst::ULoad16 {
ref mut rd,
ref mut mem,
..
} => {
map_wr(d, rd);
map_mem(u, mem);
map_def(mapper, rd);
map_mem(mapper, mem);
}
&mut Inst::SLoad16 {
ref mut rd,
ref mut mem,
..
} => {
map_wr(d, rd);
map_mem(u, mem);
map_def(mapper, rd);
map_mem(mapper, mem);
}
&mut Inst::ULoad32 {
ref mut rd,
ref mut mem,
..
} => {
map_wr(d, rd);
map_mem(u, mem);
map_def(mapper, rd);
map_mem(mapper, mem);
}
&mut Inst::SLoad32 {
ref mut rd,
ref mut mem,
..
} => {
map_wr(d, rd);
map_mem(u, mem);
map_def(mapper, rd);
map_mem(mapper, mem);
}
&mut Inst::ULoad64 {
@@ -1329,40 +1329,40 @@ fn aarch64_map_regs(
ref mut mem,
..
} => {
map_wr(d, rd);
map_mem(u, mem);
map_def(mapper, rd);
map_mem(mapper, mem);
}
&mut Inst::Store8 {
ref mut rd,
ref mut mem,
..
} => {
map(u, rd);
map_mem(u, mem);
map_use(mapper, rd);
map_mem(mapper, mem);
}
&mut Inst::Store16 {
ref mut rd,
ref mut mem,
..
} => {
map(u, rd);
map_mem(u, mem);
map_use(mapper, rd);
map_mem(mapper, mem);
}
&mut Inst::Store32 {
ref mut rd,
ref mut mem,
..
} => {
map(u, rd);
map_mem(u, mem);
map_use(mapper, rd);
map_mem(mapper, mem);
}
&mut Inst::Store64 {
ref mut rd,
ref mut mem,
..
} => {
map(u, rd);
map_mem(u, mem);
map_use(mapper, rd);
map_mem(mapper, mem);
}
&mut Inst::StoreP64 {
@@ -1370,41 +1370,41 @@ fn aarch64_map_regs(
ref mut rt2,
ref mut mem,
} => {
map(u, rt);
map(u, rt2);
map_pairmem(u, mem);
map_use(mapper, rt);
map_use(mapper, rt2);
map_pairmem(mapper, mem);
}
&mut Inst::LoadP64 {
ref mut rt,
ref mut rt2,
ref mut mem,
} => {
map_wr(d, rt);
map_wr(d, rt2);
map_pairmem(u, mem);
map_def(mapper, rt);
map_def(mapper, rt2);
map_pairmem(mapper, mem);
}
&mut Inst::Mov {
ref mut rd,
ref mut rm,
} => {
map_wr(d, rd);
map(u, rm);
map_def(mapper, rd);
map_use(mapper, rm);
}
&mut Inst::Mov32 {
ref mut rd,
ref mut rm,
} => {
map_wr(d, rd);
map(u, rm);
map_def(mapper, rd);
map_use(mapper, rm);
}
&mut Inst::MovZ { ref mut rd, .. } => {
map_wr(d, rd);
map_def(mapper, rd);
}
&mut Inst::MovN { ref mut rd, .. } => {
map_wr(d, rd);
map_def(mapper, rd);
}
&mut Inst::MovK { ref mut rd, .. } => {
map_wr(d, rd);
map_def(mapper, rd);
}
&mut Inst::CSel {
ref mut rd,
@@ -1412,30 +1412,30 @@ fn aarch64_map_regs(
ref mut rm,
..
} => {
map_wr(d, rd);
map(u, rn);
map(u, rm);
map_def(mapper, rd);
map_use(mapper, rn);
map_use(mapper, rm);
}
&mut Inst::CSet { ref mut rd, .. } => {
map_wr(d, rd);
map_def(mapper, rd);
}
&mut Inst::CCmpImm { ref mut rn, .. } => {
map(u, rn);
map_use(mapper, rn);
}
&mut Inst::FpuMove64 {
ref mut rd,
ref mut rn,
} => {
map_wr(d, rd);
map(u, rn);
map_def(mapper, rd);
map_use(mapper, rn);
}
&mut Inst::FpuRR {
ref mut rd,
ref mut rn,
..
} => {
map_wr(d, rd);
map(u, rn);
map_def(mapper, rd);
map_use(mapper, rn);
}
&mut Inst::FpuRRR {
ref mut rd,
@@ -1443,9 +1443,9 @@ fn aarch64_map_regs(
ref mut rm,
..
} => {
map_wr(d, rd);
map(u, rn);
map(u, rm);
map_def(mapper, rd);
map_use(mapper, rn);
map_use(mapper, rm);
}
&mut Inst::FpuRRRR {
ref mut rd,
@@ -1454,94 +1454,94 @@ fn aarch64_map_regs(
ref mut ra,
..
} => {
map_wr(d, rd);
map(u, rn);
map(u, rm);
map(u, ra);
map_def(mapper, rd);
map_use(mapper, rn);
map_use(mapper, rm);
map_use(mapper, ra);
}
&mut Inst::FpuCmp32 {
ref mut rn,
ref mut rm,
} => {
map(u, rn);
map(u, rm);
map_use(mapper, rn);
map_use(mapper, rm);
}
&mut Inst::FpuCmp64 {
ref mut rn,
ref mut rm,
} => {
map(u, rn);
map(u, rm);
map_use(mapper, rn);
map_use(mapper, rm);
}
&mut Inst::FpuLoad32 {
ref mut rd,
ref mut mem,
..
} => {
map_wr(d, rd);
map_mem(u, mem);
map_def(mapper, rd);
map_mem(mapper, mem);
}
&mut Inst::FpuLoad64 {
ref mut rd,
ref mut mem,
..
} => {
map_wr(d, rd);
map_mem(u, mem);
map_def(mapper, rd);
map_mem(mapper, mem);
}
&mut Inst::FpuLoad128 {
ref mut rd,
ref mut mem,
..
} => {
map_wr(d, rd);
map_mem(u, mem);
map_def(mapper, rd);
map_mem(mapper, mem);
}
&mut Inst::FpuStore32 {
ref mut rd,
ref mut mem,
..
} => {
map(u, rd);
map_mem(u, mem);
map_use(mapper, rd);
map_mem(mapper, mem);
}
&mut Inst::FpuStore64 {
ref mut rd,
ref mut mem,
..
} => {
map(u, rd);
map_mem(u, mem);
map_use(mapper, rd);
map_mem(mapper, mem);
}
&mut Inst::FpuStore128 {
ref mut rd,
ref mut mem,
..
} => {
map(u, rd);
map_mem(u, mem);
map_use(mapper, rd);
map_mem(mapper, mem);
}
&mut Inst::LoadFpuConst32 { ref mut rd, .. } => {
map_wr(d, rd);
map_def(mapper, rd);
}
&mut Inst::LoadFpuConst64 { ref mut rd, .. } => {
map_wr(d, rd);
map_def(mapper, rd);
}
&mut Inst::FpuToInt {
ref mut rd,
ref mut rn,
..
} => {
map_wr(d, rd);
map(u, rn);
map_def(mapper, rd);
map_use(mapper, rn);
}
&mut Inst::IntToFpu {
ref mut rd,
ref mut rn,
..
} => {
map_wr(d, rd);
map(u, rn);
map_def(mapper, rd);
map_use(mapper, rn);
}
&mut Inst::FpuCSel32 {
ref mut rd,
@@ -1549,9 +1549,9 @@ fn aarch64_map_regs(
ref mut rm,
..
} => {
map_wr(d, rd);
map(u, rn);
map(u, rm);
map_def(mapper, rd);
map_use(mapper, rn);
map_use(mapper, rm);
}
&mut Inst::FpuCSel64 {
ref mut rd,
@@ -1559,31 +1559,31 @@ fn aarch64_map_regs(
ref mut rm,
..
} => {
map_wr(d, rd);
map(u, rn);
map(u, rm);
map_def(mapper, rd);
map_use(mapper, rn);
map_use(mapper, rm);
}
&mut Inst::FpuRound {
ref mut rd,
ref mut rn,
..
} => {
map_wr(d, rd);
map(u, rn);
map_def(mapper, rd);
map_use(mapper, rn);
}
&mut Inst::MovToVec64 {
ref mut rd,
ref mut rn,
} => {
map_wr(d, rd);
map(u, rn);
map_def(mapper, rd);
map_use(mapper, rn);
}
&mut Inst::MovFromVec64 {
ref mut rd,
ref mut rn,
} => {
map_wr(d, rd);
map(u, rn);
map_def(mapper, rd);
map_use(mapper, rn);
}
&mut Inst::VecRRR {
ref mut rd,
@@ -1591,26 +1591,26 @@ fn aarch64_map_regs(
ref mut rm,
..
} => {
map_wr(d, rd);
map(u, rn);
map(u, rm);
map_def(mapper, rd);
map_use(mapper, rn);
map_use(mapper, rm);
}
&mut Inst::MovToNZCV { ref mut rn } => {
map(u, rn);
map_use(mapper, rn);
}
&mut Inst::MovFromNZCV { ref mut rd } => {
map_wr(d, rd);
map_def(mapper, rd);
}
&mut Inst::CondSet { ref mut rd, .. } => {
map_wr(d, rd);
map_def(mapper, rd);
}
&mut Inst::Extend {
ref mut rd,
ref mut rn,
..
} => {
map_wr(d, rd);
map(u, rn);
map_def(mapper, rd);
map_use(mapper, rn);
}
&mut Inst::Jump { .. } => {}
&mut Inst::Call {
@@ -1621,12 +1621,12 @@ fn aarch64_map_regs(
// TODO: add `map_mut()` to regalloc.rs's Set.
let new_uses = uses.map(|r| {
let mut r = *r;
map(u, &mut r);
map_use(mapper, &mut r);
r
});
let new_defs = defs.map(|r| {
let mut r = *r;
map_wr(d, &mut r);
map_def(mapper, &mut r);
r
});
*uses = new_uses;
@@ -1642,33 +1642,33 @@ fn aarch64_map_regs(
// TODO: add `map_mut()` to regalloc.rs's Set.
let new_uses = uses.map(|r| {
let mut r = *r;
map(u, &mut r);
map_use(mapper, &mut r);
r
});
let new_defs = defs.map(|r| {
let mut r = *r;
map_wr(d, &mut r);
map_def(mapper, &mut r);
r
});
*uses = new_uses;
*defs = new_defs;
map(u, rn);
map_use(mapper, rn);
}
&mut Inst::CondBr { ref mut kind, .. } => {
map_br(u, kind);
map_br(mapper, kind);
}
&mut Inst::CondBrLowered { ref mut kind, .. } => {
map_br(u, kind);
map_br(mapper, kind);
}
&mut Inst::CondBrLoweredCompound { ref mut kind, .. } => {
map_br(u, kind);
map_br(mapper, kind);
}
&mut Inst::IndirectBr { ref mut rn, .. } => {
map(u, rn);
map_use(mapper, rn);
}
&mut Inst::Nop0 | &mut Inst::Nop4 | &mut Inst::Brk | &mut Inst::Udf { .. } => {}
&mut Inst::Adr { ref mut rd, .. } => {
map_wr(d, rd);
map_def(mapper, rd);
}
&mut Inst::Word4 { .. } | &mut Inst::Word8 { .. } => {}
&mut Inst::JTSequence {
@@ -1677,28 +1677,28 @@ fn aarch64_map_regs(
ref mut rtmp2,
..
} => {
map(u, ridx);
map_wr(d, rtmp1);
map_wr(d, rtmp2);
map_use(mapper, ridx);
map_def(mapper, rtmp1);
map_def(mapper, rtmp2);
}
&mut Inst::LoadConst64 { ref mut rd, .. } => {
map_wr(d, rd);
map_def(mapper, rd);
}
&mut Inst::LoadExtName { ref mut rd, .. } => {
map_wr(d, rd);
map_def(mapper, rd);
}
&mut Inst::LoadAddr {
ref mut rd,
ref mut mem,
} => {
map_wr(d, rd);
map_mem(u, mem);
map_def(mapper, rd);
map_mem(mapper, mem);
}
&mut Inst::GetPinnedReg { ref mut rd } => {
map_wr(d, rd);
map_def(mapper, rd);
}
&mut Inst::SetPinnedReg { ref mut rm } => {
map(u, rm);
map_use(mapper, rm);
}
}
}
@@ -1711,12 +1711,8 @@ impl MachInst for Inst {
aarch64_get_regs(self, collector)
}
fn map_regs(
&mut self,
pre_map: &RegallocMap<VirtualReg, RealReg>,
post_map: &RegallocMap<VirtualReg, RealReg>,
) {
aarch64_map_regs(self, pre_map, post_map);
fn map_regs(&mut self, mapper: &RegUsageMapper) {
aarch64_map_regs(self, mapper);
}
fn is_move(&self) -> Option<(Writable<Reg>, Reg)> {

View File

@@ -105,9 +105,10 @@ use crate::settings::Flags;
use alloc::boxed::Box;
use alloc::vec::Vec;
use core::fmt::Debug;
use regalloc::Map as RegallocMap;
use regalloc::RegUsageCollector;
use regalloc::{RealReg, RealRegUniverse, Reg, RegClass, SpillSlot, VirtualReg, Writable};
use regalloc::{
RealReg, RealRegUniverse, Reg, RegClass, RegUsageMapper, SpillSlot, VirtualReg, Writable,
};
use std::string::String;
use target_lexicon::Triple;
@@ -136,11 +137,7 @@ pub trait MachInst: Clone + Debug {
/// Map virtual registers to physical registers using the given virt->phys
/// maps corresponding to the program points prior to, and after, this instruction.
fn map_regs(
&mut self,
pre_map: &RegallocMap<VirtualReg, RealReg>,
post_map: &RegallocMap<VirtualReg, RealReg>,
);
fn map_regs(&mut self, maps: &RegUsageMapper);
/// If this is a simple move, return the (source, destination) tuple of registers.
fn is_move(&self) -> Option<(Writable<Reg>, Reg)>;

View File

@@ -25,7 +25,9 @@ use crate::settings;
use regalloc::Function as RegallocFunction;
use regalloc::Set as RegallocSet;
use regalloc::{BlockIx, InstIx, Range, RegAllocResult, RegClass, RegUsageCollector};
use regalloc::{
BlockIx, InstIx, Range, RegAllocResult, RegClass, RegUsageCollector, RegUsageMapper,
};
use alloc::boxed::Box;
use alloc::vec::Vec;
@@ -647,18 +649,18 @@ impl<I: VCodeInst> RegallocFunction for VCode<I> {
insn.get_regs(collector)
}
fn map_regs(
insn: &mut I,
pre_map: &RegallocMap<VirtualReg, RealReg>,
post_map: &RegallocMap<VirtualReg, RealReg>,
) {
insn.map_regs(pre_map, post_map);
fn map_regs(insn: &mut I, mapper: &RegUsageMapper) {
insn.map_regs(mapper);
}
fn is_move(&self, insn: &I) -> Option<(Writable<Reg>, Reg)> {
insn.is_move()
}
fn get_vreg_count_estimate(&self) -> Option<usize> {
Some(self.vreg_types.len())
}
fn get_spillslot_size(&self, regclass: RegClass, vreg: VirtualReg) -> u32 {
let ty = self.vreg_type(vreg);
self.abi.get_spillslot_size(regclass, ty)