Handle fixed stack slots in the move resolver (#78)

Fixed stack slots are treated as `PReg`s by most of the register
allocator, but need some additional handling the move resolver to avoid
generating stack-to-stack moves.
This commit is contained in:
Amanieu d'Antras
2022-09-20 03:27:24 +08:00
committed by GitHub
parent 1495c1e342
commit 520cafa129
5 changed files with 75 additions and 23 deletions

View File

@@ -10,6 +10,15 @@ use regalloc2::fuzzing::moves::{MoveAndScratchResolver, ParallelMoves};
use regalloc2::{Allocation, PReg, RegClass, SpillSlot};
use std::collections::{HashMap, HashSet};
fn is_stack_alloc(alloc: Allocation) -> bool {
// Treat registers 20..=29 as fixed stack slots.
if let Some(reg) = alloc.as_reg() {
reg.index() > 20
} else {
alloc.is_stack()
}
}
#[derive(Clone, Debug)]
struct TestCase {
moves: Vec<(Allocation, Allocation)>,
@@ -82,7 +91,8 @@ fuzz_target!(|testcase: TestCase| {
Allocation::stack(SpillSlot::new(slot, RegClass::Int))
};
let preferred_victim = PReg::new(0, RegClass::Int);
let scratch_resolver = MoveAndScratchResolver::new(get_reg, get_stackslot, preferred_victim);
let scratch_resolver =
MoveAndScratchResolver::new(get_reg, get_stackslot, is_stack_alloc, preferred_victim);
let moves = scratch_resolver.compute(moves);
log::trace!("resolved moves: {:?}", moves);
@@ -97,7 +107,7 @@ fuzz_target!(|testcase: TestCase| {
// Simulate the sequence of moves.
let mut locations: HashMap<Allocation, Allocation> = HashMap::new();
for (src, dst, _) in moves {
if src.is_stack() && dst.is_stack() {
if is_stack_alloc(src) && is_stack_alloc(dst) {
panic!("Stack-to-stack move!");
}

View File

@@ -97,7 +97,7 @@
use crate::{
Allocation, AllocationKind, Block, Edit, Function, Inst, InstOrEdit, InstPosition, MachineEnv,
Operand, OperandConstraint, OperandKind, OperandPos, Output, PReg, VReg,
Operand, OperandConstraint, OperandKind, OperandPos, Output, PReg, PRegSet, VReg,
};
use fxhash::{FxHashMap, FxHashSet};
use smallvec::{smallvec, SmallVec};
@@ -169,6 +169,10 @@ pub enum CheckerError {
alloc: Allocation,
vregs: FxHashSet<VReg>,
},
StackToStackMove {
into: Allocation,
from: Allocation,
},
}
/// Abstract state for an allocation.
@@ -560,7 +564,20 @@ impl CheckerState {
}
}
}
&CheckerInst::ParallelMove { .. } | &CheckerInst::Move { .. } => {
&CheckerInst::Move { into, from } => {
// Ensure that the allocator never returns stack-to-stack moves.
let is_stack = |alloc: Allocation| {
if let Some(reg) = alloc.as_reg() {
checker.stack_pregs.contains(reg)
} else {
alloc.is_stack()
}
};
if is_stack(into) && is_stack(from) {
return Err(CheckerError::StackToStackMove { into, from });
}
}
&CheckerInst::ParallelMove { .. } => {
// This doesn't need verification; we just update
// according to the move semantics in the step
// function below.
@@ -813,6 +830,7 @@ pub struct Checker<'a, F: Function> {
edge_insts: FxHashMap<(Block, Block), Vec<CheckerInst>>,
reftyped_vregs: FxHashSet<VReg>,
machine_env: &'a MachineEnv,
stack_pregs: PRegSet,
}
impl<'a, F: Function> Checker<'a, F> {
@@ -841,6 +859,11 @@ impl<'a, F: Function> Checker<'a, F> {
bb_in.insert(f.entry_block(), CheckerState::initial_with_pinned_vregs(f));
let mut stack_pregs = PRegSet::empty();
for &preg in &machine_env.fixed_stack_slots {
stack_pregs.add(preg);
}
Checker {
f,
bb_in,
@@ -848,6 +871,7 @@ impl<'a, F: Function> Checker<'a, F> {
edge_insts,
reftyped_vregs,
machine_env,
stack_pregs,
}
}

View File

@@ -39,16 +39,6 @@ impl<'a, F: Function> Env<'a, F> {
pos == self.cfginfo.block_exit[block.index()]
}
fn allocation_is_stack(&self, alloc: Allocation) -> bool {
if alloc.is_stack() {
true
} else if let Some(preg) = alloc.as_reg() {
self.pregs[preg.index()].is_stack
} else {
false
}
}
pub fn insert_move(
&mut self,
pos: ProgPoint,
@@ -1054,10 +1044,21 @@ impl<'a, F: Function> Env<'a, F> {
// below.
Allocation::stack(SpillSlot::new(SpillSlot::MAX - idx, regclass))
};
let is_stack_alloc = |alloc: Allocation| {
if let Some(preg) = alloc.as_reg() {
self.pregs[preg.index()].is_stack
} else {
alloc.is_stack()
}
};
let preferred_victim = self.preferred_victim_by_class[regclass as usize];
let scratch_resolver =
MoveAndScratchResolver::new(get_reg, get_stackslot, preferred_victim);
let scratch_resolver = MoveAndScratchResolver::new(
get_reg,
get_stackslot,
is_stack_alloc,
preferred_victim,
);
let resolved = scratch_resolver.compute(resolved);

View File

@@ -180,6 +180,13 @@ impl PRegSet {
Self { bits: 0 }
}
/// Returns whether the given register is part of the set.
pub fn contains(&self, reg: PReg) -> bool {
let bit = reg.index();
debug_assert!(bit < 128);
self.bits & 1u128 << bit != 0
}
/// Add a physical register (PReg) to the set, returning the new value.
pub const fn with(self, reg: PReg) -> Self {
let bit = reg.index();

View File

@@ -285,11 +285,11 @@ impl<T> MoveVecWithScratch<T> {
}
/// Do any moves go from stack to stack?
pub fn stack_to_stack(&self) -> bool {
pub fn stack_to_stack(&self, is_stack_alloc: impl Fn(Allocation) -> bool) -> bool {
match self {
MoveVecWithScratch::NoScratch(moves) | MoveVecWithScratch::Scratch(moves) => moves
.iter()
.any(|(src, dst, _)| src.is_stack() && dst.is_stack()),
.any(|&(src, dst, _)| is_stack_alloc(src) && is_stack_alloc(dst)),
}
}
}
@@ -320,10 +320,11 @@ impl<T> MoveVecWithScratch<T> {
/// Sometimes move elision will be able to clean this up a bit. But,
/// for simplicity reasons, let's keep the concerns separated! So we
/// always do the full expansion above.
pub struct MoveAndScratchResolver<GetReg, GetStackSlot>
pub struct MoveAndScratchResolver<GetReg, GetStackSlot, IsStackAlloc>
where
GetReg: FnMut() -> Option<Allocation>,
GetStackSlot: FnMut() -> Allocation,
IsStackAlloc: Fn(Allocation) -> bool,
{
/// Scratch register for stack-to-stack move expansion.
stack_stack_scratch_reg: Option<Allocation>,
@@ -335,6 +336,8 @@ where
find_free_reg: GetReg,
/// Closure that gets us a stackslot, if needed.
get_stackslot: GetStackSlot,
/// Closure to determine whether an `Allocation` refers to a stack slot.
is_stack_alloc: IsStackAlloc,
/// The victim PReg to evict to another stackslot at every
/// stack-to-stack move if a free PReg is not otherwise
/// available. Provided by caller and statically chosen. This is a
@@ -342,17 +345,24 @@ where
victim: PReg,
}
impl<GetReg, GetStackSlot> MoveAndScratchResolver<GetReg, GetStackSlot>
impl<GetReg, GetStackSlot, IsStackAlloc> MoveAndScratchResolver<GetReg, GetStackSlot, IsStackAlloc>
where
GetReg: FnMut() -> Option<Allocation>,
GetStackSlot: FnMut() -> Allocation,
IsStackAlloc: Fn(Allocation) -> bool,
{
pub fn new(find_free_reg: GetReg, get_stackslot: GetStackSlot, victim: PReg) -> Self {
pub fn new(
find_free_reg: GetReg,
get_stackslot: GetStackSlot,
is_stack_alloc: IsStackAlloc,
victim: PReg,
) -> Self {
Self {
stack_stack_scratch_reg: None,
stack_stack_scratch_reg_save: None,
find_free_reg,
get_stackslot,
is_stack_alloc,
victim,
}
}
@@ -360,7 +370,7 @@ where
pub fn compute<T: Debug + Copy>(mut self, moves: MoveVecWithScratch<T>) -> MoveVec<T> {
// First, do we have a vec with no stack-to-stack moves or use
// of a scratch register? Fast return if so.
if !moves.needs_scratch() && !moves.stack_to_stack() {
if !moves.needs_scratch() && !moves.stack_to_stack(&self.is_stack_alloc) {
return moves.without_scratch().unwrap();
}
@@ -373,7 +383,7 @@ where
let moves = moves.with_scratch(scratch);
for &(src, dst, data) in &moves {
// Do we have a stack-to-stack move? If so, resolve.
if src.is_stack() && dst.is_stack() {
if (self.is_stack_alloc)(src) && (self.is_stack_alloc)(dst) {
trace!("scratch resolver: stack to stack: {:?} -> {:?}", src, dst);
// Lazily allocate a stack-to-stack scratch.
if self.stack_stack_scratch_reg.is_none() {