Add support for reftypes/stackmaps and Stack constraints, and misc API changes.

The main enhancement in this commit is support for reference types and
stackmaps. This requires tracking whether each VReg is a "reference" or
"pointer". At certain instructions designated as "safepoints", the
regalloc will (i) ensure that all references are in spillslots rather
than in registers, and (ii) provide a list of exactly which spillslots
have live references at that program point. This can be used by, e.g., a
GC to trace and possibly modify pointers. The stackmap of spillslots is
precise: it includes all live references, and *only* live references.

This commit also brings in some API tweaks as part of the in-progress
Cranelift glue. In particular, it makes Allocations and Operands
mutually disjoint by using the same bitfield for the type-tag in both
and choosing non-overlapping tags. This will allow instructions to carry
an Operand for each register slot and then overwrite these in place with
Allocations. The `OperandOrAllocation` type does the necessary magic to
make this look like an enum, but staying in 32 bits.
This commit is contained in:
Chris Fallin
2021-04-17 21:28:26 -07:00
parent 33ac6cb41d
commit a08b0121a0
6 changed files with 686 additions and 91 deletions

View File

@@ -21,6 +21,7 @@ impl Arbitrary for TestCase {
reducible: false, reducible: false,
block_params: true, block_params: true,
always_local_uses: false, always_local_uses: false,
reftypes: true,
})?, })?,
}) })
} }

View File

@@ -48,6 +48,18 @@ impl BitVec {
} }
} }
pub fn assign(&mut self, other: &Self) {
if other.bits.len() > 0 {
self.ensure_idx(other.bits.len() - 1);
}
for i in 0..other.bits.len() {
self.bits[i] = other.bits[i];
}
for i in other.bits.len()..self.bits.len() {
self.bits[i] = 0;
}
}
#[inline(always)] #[inline(always)]
pub fn get(&mut self, idx: usize) -> bool { pub fn get(&mut self, idx: usize) -> bool {
let word = idx / BITS_PER_WORD; let word = idx / BITS_PER_WORD;
@@ -59,16 +71,21 @@ impl BitVec {
} }
} }
pub fn or(&mut self, other: &Self) { pub fn or(&mut self, other: &Self) -> bool {
if other.bits.is_empty() { if other.bits.is_empty() {
return; return false;
} }
let last_idx = other.bits.len() - 1; let last_idx = other.bits.len() - 1;
self.ensure_idx(last_idx); self.ensure_idx(last_idx);
let mut changed = false;
for (self_word, other_word) in self.bits.iter_mut().zip(other.bits.iter()) { for (self_word, other_word) in self.bits.iter_mut().zip(other.bits.iter()) {
if *other_word & !*self_word != 0 {
changed = true;
}
*self_word |= *other_word; *self_word |= *other_word;
} }
changed
} }
pub fn and(&mut self, other: &Self) { pub fn and(&mut self, other: &Self) {
@@ -91,6 +108,29 @@ impl BitVec {
} }
} }
impl std::cmp::PartialEq for BitVec {
fn eq(&self, other: &Self) -> bool {
let limit = std::cmp::min(self.bits.len(), other.bits.len());
for i in 0..limit {
if self.bits[i] != other.bits[i] {
return false;
}
}
for i in limit..self.bits.len() {
if self.bits[i] != 0 {
return false;
}
}
for i in limit..other.bits.len() {
if other.bits[i] != 0 {
return false;
}
}
true
}
}
impl std::cmp::Eq for BitVec {}
pub struct SetBitsIter<'a> { pub struct SetBitsIter<'a> {
words: &'a [u64], words: &'a [u64],
word_idx: usize, word_idx: usize,

View File

@@ -67,10 +67,10 @@
use crate::{ use crate::{
Allocation, AllocationKind, Block, Edit, Function, Inst, InstPosition, Operand, OperandKind, Allocation, AllocationKind, Block, Edit, Function, Inst, InstPosition, Operand, OperandKind,
OperandPolicy, OperandPos, Output, ProgPoint, VReg, OperandPolicy, OperandPos, Output, PReg, ProgPoint, SpillSlot, VReg,
}; };
use std::collections::{HashMap, VecDeque}; use std::collections::{HashMap, HashSet, VecDeque};
use std::default::Default; use std::default::Default;
use std::hash::Hash; use std::hash::Hash;
use std::result::Result; use std::result::Result;
@@ -127,6 +127,20 @@ pub enum CheckerError {
alloc: Allocation, alloc: Allocation,
expected_alloc: Allocation, expected_alloc: Allocation,
}, },
AllocationIsNotStack {
inst: Inst,
op: Operand,
alloc: Allocation,
},
ConflictedValueInStackmap {
inst: Inst,
slot: SpillSlot,
},
NonRefValueInStackmap {
inst: Inst,
slot: SpillSlot,
vreg: VReg,
},
} }
/// Abstract state for an allocation. /// Abstract state for an allocation.
@@ -162,8 +176,10 @@ impl CheckerValue {
(_, &CheckerValue::Unknown) => *self, (_, &CheckerValue::Unknown) => *self,
(&CheckerValue::Conflicted, _) => *self, (&CheckerValue::Conflicted, _) => *self,
(_, &CheckerValue::Conflicted) => *other, (_, &CheckerValue::Conflicted) => *other,
(&CheckerValue::Reg(r1, ref1), &CheckerValue::Reg(r2, ref2)) if r1 == r2 => { (&CheckerValue::Reg(r1, ref1), &CheckerValue::Reg(r2, ref2))
CheckerValue::Reg(r1, ref1 || ref2) if r1 == r2 && ref1 == ref2 =>
{
CheckerValue::Reg(r1, ref1)
} }
_ => { _ => {
log::debug!("{:?} and {:?} meet to Conflicted", self, other); log::debug!("{:?} and {:?} meet to Conflicted", self, other);
@@ -192,7 +208,8 @@ impl std::fmt::Display for CheckerValue {
match self { match self {
CheckerValue::Unknown => write!(f, "?"), CheckerValue::Unknown => write!(f, "?"),
CheckerValue::Conflicted => write!(f, "!"), CheckerValue::Conflicted => write!(f, "!"),
CheckerValue::Reg(r, _) => write!(f, "{}", r), CheckerValue::Reg(r, false) => write!(f, "{}", r),
CheckerValue::Reg(r, true) => write!(f, "{}/ref", r),
} }
} }
} }
@@ -305,13 +322,38 @@ impl CheckerState {
self.check_val(inst, *op, *alloc, val, allocs)?; self.check_val(inst, *op, *alloc, val, allocs)?;
} }
} }
&CheckerInst::Safepoint { inst, ref slots } => {
for &slot in slots {
let alloc = Allocation::stack(slot);
let val = self
.allocations
.get(&alloc)
.cloned()
.unwrap_or(Default::default());
debug!(
"checker: checkinst {:?}: safepoint slot {}, checker value {:?}",
checkinst, slot, val
);
match val {
CheckerValue::Unknown => {}
CheckerValue::Conflicted => {
return Err(CheckerError::ConflictedValueInStackmap { inst, slot });
}
CheckerValue::Reg(vreg, false) => {
return Err(CheckerError::NonRefValueInStackmap { inst, slot, vreg });
}
CheckerValue::Reg(_, true) => {}
}
}
}
_ => {} _ => {}
} }
Ok(()) Ok(())
} }
/// Update according to instruction. /// Update according to instruction.
fn update(&mut self, checkinst: &CheckerInst) { fn update<'a, F: Function>(&mut self, checkinst: &CheckerInst, checker: &Checker<'a, F>) {
match checkinst { match checkinst {
&CheckerInst::Move { into, from } => { &CheckerInst::Move { into, from } => {
let val = self let val = self
@@ -328,14 +370,19 @@ impl CheckerState {
&CheckerInst::Op { &CheckerInst::Op {
ref operands, ref operands,
ref allocs, ref allocs,
ref clobbers,
.. ..
} => { } => {
for (op, alloc) in operands.iter().zip(allocs.iter()) { for (op, alloc) in operands.iter().zip(allocs.iter()) {
if op.kind() != OperandKind::Def { if op.kind() != OperandKind::Def {
continue; continue;
} }
let reftyped = checker.reftyped_vregs.contains(&op.vreg());
self.allocations self.allocations
.insert(*alloc, CheckerValue::Reg(op.vreg(), false)); .insert(*alloc, CheckerValue::Reg(op.vreg(), reftyped));
}
for clobber in clobbers {
self.allocations.remove(&Allocation::reg(*clobber));
} }
} }
&CheckerInst::BlockParams { &CheckerInst::BlockParams {
@@ -344,8 +391,20 @@ impl CheckerState {
.. ..
} => { } => {
for (vreg, alloc) in vregs.iter().zip(allocs.iter()) { for (vreg, alloc) in vregs.iter().zip(allocs.iter()) {
let reftyped = checker.reftyped_vregs.contains(vreg);
self.allocations self.allocations
.insert(*alloc, CheckerValue::Reg(*vreg, false)); .insert(*alloc, CheckerValue::Reg(*vreg, reftyped));
}
}
&CheckerInst::Safepoint { ref slots, .. } => {
for (alloc, value) in &mut self.allocations {
if let CheckerValue::Reg(_, true) = *value {
if alloc.is_reg() {
*value = CheckerValue::Conflicted;
} else if alloc.is_stack() && !slots.contains(&alloc.as_stack().unwrap()) {
*value = CheckerValue::Conflicted;
}
}
} }
} }
} }
@@ -365,6 +424,11 @@ impl CheckerState {
return Err(CheckerError::AllocationIsNotReg { inst, op, alloc }); return Err(CheckerError::AllocationIsNotReg { inst, op, alloc });
} }
} }
OperandPolicy::Stack => {
if alloc.kind() != AllocationKind::Stack {
return Err(CheckerError::AllocationIsNotStack { inst, op, alloc });
}
}
OperandPolicy::FixedReg(preg) => { OperandPolicy::FixedReg(preg) => {
if alloc != Allocation::reg(preg) { if alloc != Allocation::reg(preg) {
return Err(CheckerError::AllocationIsNotFixedReg { inst, op, alloc }); return Err(CheckerError::AllocationIsNotFixedReg { inst, op, alloc });
@@ -402,6 +466,7 @@ pub(crate) enum CheckerInst {
inst: Inst, inst: Inst,
operands: Vec<Operand>, operands: Vec<Operand>,
allocs: Vec<Allocation>, allocs: Vec<Allocation>,
clobbers: Vec<PReg>,
}, },
/// The top of a block with blockparams. We define the given vregs /// The top of a block with blockparams. We define the given vregs
@@ -411,6 +476,10 @@ pub(crate) enum CheckerInst {
vregs: Vec<VReg>, vregs: Vec<VReg>,
allocs: Vec<Allocation>, allocs: Vec<Allocation>,
}, },
/// A safepoint, with the given SpillSlots specified as containing
/// reftyped values. All other reftyped values become invalid.
Safepoint { inst: Inst, slots: Vec<SpillSlot> },
} }
#[derive(Debug)] #[derive(Debug)]
@@ -418,6 +487,7 @@ pub struct Checker<'a, F: Function> {
f: &'a F, f: &'a F,
bb_in: HashMap<Block, CheckerState>, bb_in: HashMap<Block, CheckerState>,
bb_insts: HashMap<Block, Vec<CheckerInst>>, bb_insts: HashMap<Block, Vec<CheckerInst>>,
reftyped_vregs: HashSet<VReg>,
} }
impl<'a, F: Function> Checker<'a, F> { impl<'a, F: Function> Checker<'a, F> {
@@ -428,6 +498,7 @@ impl<'a, F: Function> Checker<'a, F> {
pub fn new(f: &'a F) -> Checker<'a, F> { pub fn new(f: &'a F) -> Checker<'a, F> {
let mut bb_in = HashMap::new(); let mut bb_in = HashMap::new();
let mut bb_insts = HashMap::new(); let mut bb_insts = HashMap::new();
let mut reftyped_vregs = HashSet::new();
for block in 0..f.blocks() { for block in 0..f.blocks() {
let block = Block::new(block); let block = Block::new(block);
@@ -435,13 +506,31 @@ impl<'a, F: Function> Checker<'a, F> {
bb_insts.insert(block, vec![]); bb_insts.insert(block, vec![]);
} }
Checker { f, bb_in, bb_insts } for &vreg in f.reftype_vregs() {
reftyped_vregs.insert(vreg);
}
Checker {
f,
bb_in,
bb_insts,
reftyped_vregs,
}
} }
/// Build the list of checker instructions based on the given func /// Build the list of checker instructions based on the given func
/// and allocation results. /// and allocation results.
pub fn prepare(&mut self, out: &Output) { pub fn prepare(&mut self, out: &Output) {
debug!("checker: out = {:?}", out); debug!("checker: out = {:?}", out);
// Preprocess safepoint stack-maps into per-inst vecs.
let mut safepoint_slots: HashMap<Inst, Vec<SpillSlot>> = HashMap::new();
for &(progpoint, slot) in &out.safepoint_slots {
safepoint_slots
.entry(progpoint.inst)
.or_insert_with(|| vec![])
.push(slot);
}
// For each original instruction, create an `Op`. // For each original instruction, create an `Op`.
let mut last_inst = None; let mut last_inst = None;
let mut insert_idx = 0; let mut insert_idx = 0;
@@ -454,13 +543,23 @@ impl<'a, F: Function> Checker<'a, F> {
// Any inserted edits before instruction. // Any inserted edits before instruction.
self.handle_edits(block, out, &mut insert_idx, ProgPoint::before(inst)); self.handle_edits(block, out, &mut insert_idx, ProgPoint::before(inst));
// If this is a safepoint, then check the spillslots at this point.
if self.f.is_safepoint(inst) {
let slots = safepoint_slots.remove(&inst).unwrap_or_else(|| vec![]);
let checkinst = CheckerInst::Safepoint { inst, slots };
self.bb_insts.get_mut(&block).unwrap().push(checkinst);
}
// Instruction itself. // Instruction itself.
let operands: Vec<_> = self.f.inst_operands(inst).iter().cloned().collect(); let operands: Vec<_> = self.f.inst_operands(inst).iter().cloned().collect();
let allocs: Vec<_> = out.inst_allocs(inst).iter().cloned().collect(); let allocs: Vec<_> = out.inst_allocs(inst).iter().cloned().collect();
let clobbers: Vec<_> = self.f.inst_clobbers(inst).iter().cloned().collect();
let checkinst = CheckerInst::Op { let checkinst = CheckerInst::Op {
inst, inst,
operands, operands,
allocs, allocs,
clobbers,
}; };
debug!("checker: adding inst {:?}", checkinst); debug!("checker: adding inst {:?}", checkinst);
self.bb_insts.get_mut(&block).unwrap().push(checkinst); self.bb_insts.get_mut(&block).unwrap().push(checkinst);
@@ -511,7 +610,7 @@ impl<'a, F: Function> Checker<'a, F> {
let mut state = self.bb_in.get(&block).cloned().unwrap(); let mut state = self.bb_in.get(&block).cloned().unwrap();
debug!("analyze: block {} has state {:?}", block.index(), state); debug!("analyze: block {} has state {:?}", block.index(), state);
for inst in self.bb_insts.get(&block).unwrap() { for inst in self.bb_insts.get(&block).unwrap() {
state.update(inst); state.update(inst, self);
debug!("analyze: inst {:?} -> state {:?}", inst, state); debug!("analyze: inst {:?} -> state {:?}", inst, state);
} }
@@ -546,7 +645,7 @@ impl<'a, F: Function> Checker<'a, F> {
debug!("Checker error: {:?}", e); debug!("Checker error: {:?}", e);
errors.push(e); errors.push(e);
} }
state.update(inst); state.update(inst, self);
if let Err(e) = state.check(InstPosition::After, inst) { if let Err(e) = state.check(InstPosition::After, inst) {
debug!("Checker error: {:?}", e); debug!("Checker error: {:?}", e);
errors.push(e); errors.push(e);
@@ -575,6 +674,9 @@ impl<'a, F: Function> Checker<'a, F> {
} }
debug!(" {{ {} }}", s.join(", ")) debug!(" {{ {} }}", s.join(", "))
} }
for vreg in self.f.reftype_vregs() {
debug!(" REF: {}", vreg);
}
for bb in 0..self.f.blocks() { for bb in 0..self.f.blocks() {
let bb = Block::new(bb); let bb = Block::new(bb);
debug!("block{}:", bb.index()); debug!("block{}:", bb.index());
@@ -587,8 +689,15 @@ impl<'a, F: Function> Checker<'a, F> {
inst, inst,
ref operands, ref operands,
ref allocs, ref allocs,
ref clobbers,
} => { } => {
debug!(" inst{}: {:?} ({:?})", inst.index(), operands, allocs); debug!(
" inst{}: {:?} ({:?}) clobbers:{:?}",
inst.index(),
operands,
allocs,
clobbers
);
} }
&CheckerInst::Move { from, into } => { &CheckerInst::Move { from, into } => {
debug!(" {} -> {}", from, into); debug!(" {} -> {}", from, into);
@@ -604,8 +713,15 @@ impl<'a, F: Function> Checker<'a, F> {
} }
debug!(" blockparams: {}", args.join(", ")); debug!(" blockparams: {}", args.join(", "));
} }
&CheckerInst::Safepoint { ref slots, .. } => {
let mut slotargs = vec![];
for &slot in slots {
slotargs.push(format!("{}", slot));
}
debug!(" safepoint: {}", slotargs.join(", "));
}
} }
state.update(inst); state.update(inst, &self);
print_state(&state); print_state(&state);
} }
} }

View File

@@ -20,6 +20,7 @@ pub struct InstData {
op: InstOpcode, op: InstOpcode,
operands: Vec<Operand>, operands: Vec<Operand>,
clobbers: Vec<PReg>, clobbers: Vec<PReg>,
is_safepoint: bool,
} }
impl InstData { impl InstData {
@@ -32,6 +33,7 @@ impl InstData {
op: InstOpcode::Op, op: InstOpcode::Op,
operands, operands,
clobbers: vec![], clobbers: vec![],
is_safepoint: false,
} }
} }
pub fn branch(uses: &[usize]) -> InstData { pub fn branch(uses: &[usize]) -> InstData {
@@ -43,6 +45,7 @@ impl InstData {
op: InstOpcode::Branch, op: InstOpcode::Branch,
operands, operands,
clobbers: vec![], clobbers: vec![],
is_safepoint: false,
} }
} }
pub fn ret() -> InstData { pub fn ret() -> InstData {
@@ -50,6 +53,7 @@ impl InstData {
op: InstOpcode::Ret, op: InstOpcode::Ret,
operands: vec![], operands: vec![],
clobbers: vec![], clobbers: vec![],
is_safepoint: false,
} }
} }
} }
@@ -62,6 +66,7 @@ pub struct Func {
block_succs: Vec<Vec<Block>>, block_succs: Vec<Vec<Block>>,
block_params: Vec<Vec<VReg>>, block_params: Vec<Vec<VReg>>,
num_vregs: usize, num_vregs: usize,
reftype_vregs: Vec<VReg>,
} }
impl Function for Func { impl Function for Func {
@@ -106,8 +111,12 @@ impl Function for Func {
self.insts[insn.index()].op == InstOpcode::Branch self.insts[insn.index()].op == InstOpcode::Branch
} }
fn is_safepoint(&self, _: Inst) -> bool { fn is_safepoint(&self, insn: Inst) -> bool {
false self.insts[insn.index()].is_safepoint
}
fn reftype_vregs(&self) -> &[VReg] {
&self.reftype_vregs[..]
} }
fn is_move(&self, _: Inst) -> Option<(VReg, VReg)> { fn is_move(&self, _: Inst) -> Option<(VReg, VReg)> {
@@ -153,6 +162,7 @@ impl FuncBuilder {
insts: vec![], insts: vec![],
blocks: vec![], blocks: vec![],
num_vregs: 0, num_vregs: 0,
reftype_vregs: vec![],
}, },
insts_per_block: vec![], insts_per_block: vec![],
} }
@@ -250,6 +260,7 @@ pub struct Options {
pub reducible: bool, pub reducible: bool,
pub block_params: bool, pub block_params: bool,
pub always_local_uses: bool, pub always_local_uses: bool,
pub reftypes: bool,
} }
impl std::default::Default for Options { impl std::default::Default for Options {
@@ -262,6 +273,7 @@ impl std::default::Default for Options {
reducible: false, reducible: false,
block_params: true, block_params: true,
always_local_uses: false, always_local_uses: false,
reftypes: false,
} }
} }
} }
@@ -355,6 +367,9 @@ impl Func {
let vreg = VReg::new(builder.f.num_vregs, RegClass::Int); let vreg = VReg::new(builder.f.num_vregs, RegClass::Int);
builder.f.num_vregs += 1; builder.f.num_vregs += 1;
vregs.push(vreg); vregs.push(vreg);
if opts.reftypes && bool::arbitrary(u)? {
builder.f.reftype_vregs.push(vreg);
}
} }
vregs_by_block.push(vregs.clone()); vregs_by_block.push(vregs.clone());
vregs_by_block_to_be_defined.push(vec![]); vregs_by_block_to_be_defined.push(vec![]);
@@ -428,17 +443,28 @@ impl Func {
op.kind(), op.kind(),
OperandPos::After, OperandPos::After,
); );
// Make sure reused input is a Reg.
let op = operands[reused];
operands[reused] =
Operand::new(op.vreg(), OperandPolicy::Reg, op.kind(), OperandPos::Before);
} else if opts.fixed_regs && bool::arbitrary(u)? { } else if opts.fixed_regs && bool::arbitrary(u)? {
// Pick an operand and make it a fixed reg. let mut fixed = vec![];
let fixed_reg = PReg::new(u.int_in_range(0..=30)?, RegClass::Int); for _ in 0..u.int_in_range(0..=operands.len() - 1)? {
let i = u.int_in_range(0..=(operands.len() - 1))?; // Pick an operand and make it a fixed reg.
let op = operands[i]; let fixed_reg = PReg::new(u.int_in_range(0..=30)?, RegClass::Int);
operands[i] = Operand::new( if fixed.contains(&fixed_reg) {
op.vreg(), break;
OperandPolicy::FixedReg(fixed_reg), }
op.kind(), fixed.push(fixed_reg);
op.pos(), let i = u.int_in_range(0..=(operands.len() - 1))?;
); let op = operands[i];
operands[i] = Operand::new(
op.vreg(),
OperandPolicy::FixedReg(fixed_reg),
op.kind(),
op.pos(),
);
}
} else if opts.clobbers && bool::arbitrary(u)? { } else if opts.clobbers && bool::arbitrary(u)? {
for _ in 0..u.int_in_range(0..=5)? { for _ in 0..u.int_in_range(0..=5)? {
let reg = u.int_in_range(0..=30)?; let reg = u.int_in_range(0..=30)?;
@@ -448,6 +474,13 @@ impl Func {
clobbers.push(PReg::new(reg, RegClass::Int)); clobbers.push(PReg::new(reg, RegClass::Int));
} }
} }
let is_safepoint = opts.reftypes
&& operands
.iter()
.all(|op| !builder.f.reftype_vregs.contains(&op.vreg()))
&& bool::arbitrary(u)?;
let op = *u.choose(&[InstOpcode::Op, InstOpcode::Call])?; let op = *u.choose(&[InstOpcode::Op, InstOpcode::Call])?;
builder.add_inst( builder.add_inst(
Block::new(block), Block::new(block),
@@ -455,6 +488,7 @@ impl Func {
op, op,
operands, operands,
clobbers, clobbers,
is_safepoint,
}, },
); );
avail.push(vreg); avail.push(vreg);
@@ -493,6 +527,9 @@ impl Func {
impl std::fmt::Debug for Func { impl std::fmt::Debug for Func {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{{\n")?; write!(f, "{{\n")?;
for vreg in self.reftype_vregs() {
write!(f, " REF: {}\n", vreg)?;
}
for (i, blockrange) in self.blocks.iter().enumerate() { for (i, blockrange) in self.blocks.iter().enumerate() {
let succs = self.block_succs[i] let succs = self.block_succs[i]
.iter() .iter()
@@ -513,6 +550,9 @@ impl std::fmt::Debug for Func {
i, params, succs, preds i, params, succs, preds
)?; )?;
for inst in blockrange.iter() { for inst in blockrange.iter() {
if self.is_safepoint(inst) {
write!(f, " -- SAFEPOINT --\n")?;
}
write!( write!(
f, f,
" inst{}: {:?} ops:{:?} clobber:{:?}\n", " inst{}: {:?} ops:{:?} clobber:{:?}\n",

View File

@@ -26,18 +26,13 @@
* *
* - reused-input reg: don't allocate register for input that is reused. * - reused-input reg: don't allocate register for input that is reused.
* *
* - more fuzzing:
* - test with *multiple* fixed-reg constraints on one vreg (same
* inst, different insts)
*
* - modify CL to generate SSA VCode * - modify CL to generate SSA VCode
* - lower blockparams to blockparams directly * - lower blockparams to blockparams directly
* - use temps properly (`alloc_tmp()` vs `alloc_reg()`) * - use temps properly (`alloc_tmp()` vs `alloc_reg()`)
* *
* - produce stackmaps * - "Fixed-stack location": negative spillslot numbers?
* - stack constraint (also: unify this with stack-args? spillslot vs user stackslot?) *
* - vreg reffyness * - Rematerialization
* - if reffy vreg, add to stackmap lists during reification scan
*/ */
#![allow(dead_code, unused_imports)] #![allow(dead_code, unused_imports)]
@@ -54,7 +49,7 @@ use crate::{
use log::debug; use log::debug;
use smallvec::{smallvec, SmallVec}; use smallvec::{smallvec, SmallVec};
use std::cmp::Ordering; use std::cmp::Ordering;
use std::collections::{BTreeMap, BinaryHeap}; use std::collections::{BTreeMap, BinaryHeap, HashMap, HashSet, VecDeque};
use std::fmt::Debug; use std::fmt::Debug;
#[cfg(not(debug))] #[cfg(not(debug))]
@@ -185,6 +180,8 @@ struct Use {
next_use: UseIndex, next_use: UseIndex,
} }
const SLOT_NONE: usize = usize::MAX;
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
struct Def { struct Def {
operand: Operand, operand: Operand,
@@ -241,6 +238,7 @@ struct VRegData {
def: DefIndex, def: DefIndex,
blockparam: Block, blockparam: Block,
first_range: LiveRangeIndex, first_range: LiveRangeIndex,
is_ref: bool,
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@@ -307,7 +305,8 @@ struct Env<'a, F: Function> {
pregs: Vec<PRegData>, pregs: Vec<PRegData>,
allocation_queue: PrioQueue, allocation_queue: PrioQueue,
hot_code: LiveRangeSet, hot_code: LiveRangeSet,
clobbers: Vec<Inst>, // Sorted list of insts with clobbers. clobbers: Vec<Inst>, // Sorted list of insts with clobbers.
safepoints: Vec<Inst>, // Sorted list of safepoint insts.
spilled_bundles: Vec<LiveBundleIndex>, spilled_bundles: Vec<LiveBundleIndex>,
spillslots: Vec<SpillSlotData>, spillslots: Vec<SpillSlotData>,
@@ -322,8 +321,8 @@ struct Env<'a, F: Function> {
// will insert a copy from wherever the VReg's primary allocation // will insert a copy from wherever the VReg's primary allocation
// was to the approprate PReg. // was to the approprate PReg.
// //
// (progpoint, copy-from-preg, copy-to-preg) // (progpoint, copy-from-preg, copy-to-preg, to-slot)
multi_fixed_reg_fixups: Vec<(ProgPoint, PRegIndex, PRegIndex)>, multi_fixed_reg_fixups: Vec<(ProgPoint, PRegIndex, PRegIndex, usize)>,
inserted_moves: Vec<InsertedMove>, inserted_moves: Vec<InsertedMove>,
@@ -332,6 +331,7 @@ struct Env<'a, F: Function> {
allocs: Vec<Allocation>, allocs: Vec<Allocation>,
inst_alloc_offsets: Vec<u32>, inst_alloc_offsets: Vec<u32>,
num_spillslots: u32, num_spillslots: u32,
safepoint_slots: Vec<(ProgPoint, SpillSlot)>,
stats: Stats, stats: Stats,
@@ -462,13 +462,16 @@ fn spill_weight_from_policy(policy: OperandPolicy) -> u32 {
enum Requirement { enum Requirement {
Fixed(PReg), Fixed(PReg),
Register(RegClass), Register(RegClass),
Stack(RegClass),
Any(RegClass), Any(RegClass),
} }
impl Requirement { impl Requirement {
fn class(self) -> RegClass { fn class(self) -> RegClass {
match self { match self {
Requirement::Fixed(preg) => preg.class(), Requirement::Fixed(preg) => preg.class(),
Requirement::Register(class) | Requirement::Any(class) => class, Requirement::Register(class) | Requirement::Any(class) | Requirement::Stack(class) => {
class
}
} }
} }
@@ -478,6 +481,7 @@ impl Requirement {
} }
match (self, other) { match (self, other) {
(other, Requirement::Any(_)) | (Requirement::Any(_), other) => Some(other), (other, Requirement::Any(_)) | (Requirement::Any(_), other) => Some(other),
(Requirement::Stack(_), Requirement::Stack(_)) => Some(self),
(Requirement::Register(_), Requirement::Fixed(preg)) (Requirement::Register(_), Requirement::Fixed(preg))
| (Requirement::Fixed(preg), Requirement::Register(_)) => { | (Requirement::Fixed(preg), Requirement::Register(_)) => {
Some(Requirement::Fixed(preg)) Some(Requirement::Fixed(preg))
@@ -491,6 +495,7 @@ impl Requirement {
match op.policy() { match op.policy() {
OperandPolicy::FixedReg(preg) => Requirement::Fixed(preg), OperandPolicy::FixedReg(preg) => Requirement::Fixed(preg),
OperandPolicy::Reg | OperandPolicy::Reuse(_) => Requirement::Register(op.class()), OperandPolicy::Reg | OperandPolicy::Reuse(_) => Requirement::Register(op.class()),
OperandPolicy::Stack => Requirement::Stack(op.class()),
_ => Requirement::Any(op.class()), _ => Requirement::Any(op.class()),
} }
} }
@@ -575,6 +580,7 @@ impl<'a, F: Function> Env<'a, F> {
pregs: vec![], pregs: vec![],
allocation_queue: PrioQueue::new(), allocation_queue: PrioQueue::new(),
clobbers: vec![], clobbers: vec![],
safepoints: vec![],
hot_code: LiveRangeSet::new(), hot_code: LiveRangeSet::new(),
spilled_bundles: vec![], spilled_bundles: vec![],
spillslots: vec![], spillslots: vec![],
@@ -586,6 +592,7 @@ impl<'a, F: Function> Env<'a, F> {
allocs: vec![], allocs: vec![],
inst_alloc_offsets: vec![], inst_alloc_offsets: vec![],
num_spillslots: 0, num_spillslots: 0,
safepoint_slots: vec![],
stats: Stats::default(), stats: Stats::default(),
@@ -610,8 +617,12 @@ impl<'a, F: Function> Env<'a, F> {
def: DefIndex::invalid(), def: DefIndex::invalid(),
first_range: LiveRangeIndex::invalid(), first_range: LiveRangeIndex::invalid(),
blockparam: Block::invalid(), blockparam: Block::invalid(),
is_ref: false,
}); });
} }
for v in self.func.reftype_vregs() {
self.vregs[v.vreg()].is_ref = true;
}
// Create allocations too. // Create allocations too.
for inst in 0..self.func.insts() { for inst in 0..self.func.insts() {
let start = self.allocs.len() as u32; let start = self.allocs.len() as u32;
@@ -994,12 +1005,20 @@ impl<'a, F: Function> Env<'a, F> {
if self.func.inst_clobbers(inst).len() > 0 { if self.func.inst_clobbers(inst).len() > 0 {
self.clobbers.push(inst); self.clobbers.push(inst);
} }
if self.func.is_safepoint(inst) {
self.safepoints.push(inst);
}
// Mark clobbers with CodeRanges on PRegs. // Mark clobbers with CodeRanges on PRegs.
for i in 0..self.func.inst_clobbers(inst).len() { for i in 0..self.func.inst_clobbers(inst).len() {
// don't borrow `self` // don't borrow `self`
let clobber = self.func.inst_clobbers(inst)[i]; let clobber = self.func.inst_clobbers(inst)[i];
// Clobber range is at After point only: an
// instruction can still take an input in a reg
// that it later clobbers. (In other words, the
// clobber is like a normal def that never gets
// used.)
let range = CodeRange { let range = CodeRange {
from: ProgPoint::before(inst), from: ProgPoint::after(inst),
to: ProgPoint::before(inst.next()), to: ProgPoint::before(inst.next()),
}; };
self.add_liverange_to_preg(range, clobber); self.add_liverange_to_preg(range, clobber);
@@ -1089,7 +1108,7 @@ impl<'a, F: Function> Env<'a, F> {
// If this is a branch, extend `pos` to // If this is a branch, extend `pos` to
// the end of the block. (Branch uses are // the end of the block. (Branch uses are
// blockparams and need to be live at the // blockparams and need to be live at the
// end of the block. // end of the block.)
if self.func.is_branch(inst) { if self.func.is_branch(inst) {
pos = self.cfginfo.block_exit[block.index()]; pos = self.cfginfo.block_exit[block.index()];
} }
@@ -1242,7 +1261,73 @@ impl<'a, F: Function> Env<'a, F> {
self.liveins[block.index()] = live; self.liveins[block.index()] = live;
} }
// Do a cleanup pass: if there are any LiveRanges with self.safepoints.sort();
// Insert safepoint virtual stack uses, if needed.
for vreg in self.func.reftype_vregs() {
let vreg = VRegIndex::new(vreg.vreg());
let mut iter = self.vregs[vreg.index()].first_range;
let mut safepoint_idx = 0;
while iter.is_valid() {
let rangedata = &self.ranges[iter.index()];
let range = rangedata.range;
while safepoint_idx < self.safepoints.len()
&& ProgPoint::before(self.safepoints[safepoint_idx]) < range.from
{
safepoint_idx += 1;
}
while safepoint_idx < self.safepoints.len()
&& range.contains_point(ProgPoint::before(self.safepoints[safepoint_idx]))
{
// Create a virtual use.
let pos = ProgPoint::before(self.safepoints[safepoint_idx]);
let operand = Operand::new(
self.vregs[vreg.index()].reg,
OperandPolicy::Stack,
OperandKind::Use,
OperandPos::Before,
);
// Create the actual use object.
let u = UseIndex(self.uses.len() as u32);
self.uses.push(Use {
operand,
pos,
slot: SLOT_NONE,
next_use: UseIndex::invalid(),
});
// Create/extend the LiveRange and add the use to the range.
let range = CodeRange {
from: pos,
to: pos.next(),
};
let lr = self.add_liverange_to_vreg(
VRegIndex::new(operand.vreg().vreg()),
range,
&mut num_ranges,
);
vreg_ranges[operand.vreg().vreg()] = lr;
log::debug!(
"Safepoint-induced stack use of {:?} at {:?} -> {:?} -> {:?}",
operand,
pos,
u,
lr
);
self.insert_use_into_liverange_and_update_stats(lr, u);
safepoint_idx += 1;
}
if safepoint_idx >= self.safepoints.len() {
break;
}
iter = self.ranges[iter.index()].next_in_reg;
}
}
// Do a fixed-reg cleanup pass: if there are any LiveRanges with
// multiple uses (or defs) at the same ProgPoint and there is // multiple uses (or defs) at the same ProgPoint and there is
// more than one FixedReg constraint at that ProgPoint, we // more than one FixedReg constraint at that ProgPoint, we
// need to record all but one of them in a special fixup list // need to record all but one of them in a special fixup list
@@ -1264,11 +1349,13 @@ impl<'a, F: Function> Env<'a, F> {
let mut first_preg: SmallVec<[PRegIndex; 16]> = smallvec![]; let mut first_preg: SmallVec<[PRegIndex; 16]> = smallvec![];
let mut extra_clobbers: SmallVec<[(PReg, Inst); 8]> = smallvec![]; let mut extra_clobbers: SmallVec<[(PReg, Inst); 8]> = smallvec![];
let mut fixup_multi_fixed_vregs = |pos: ProgPoint, let mut fixup_multi_fixed_vregs = |pos: ProgPoint,
slot: usize,
op: &mut Operand, op: &mut Operand,
fixups: &mut Vec<( fixups: &mut Vec<(
ProgPoint, ProgPoint,
PRegIndex, PRegIndex,
PRegIndex, PRegIndex,
usize,
)>| { )>| {
if last_point.is_some() && Some(pos) != last_point { if last_point.is_some() && Some(pos) != last_point {
seen_fixed_for_vreg.clear(); seen_fixed_for_vreg.clear();
@@ -1289,7 +1376,7 @@ impl<'a, F: Function> Env<'a, F> {
{ {
let orig_preg = first_preg[idx]; let orig_preg = first_preg[idx];
log::debug!(" -> duplicate; switching to policy Reg"); log::debug!(" -> duplicate; switching to policy Reg");
fixups.push((pos, orig_preg, preg_idx)); fixups.push((pos, orig_preg, preg_idx, slot));
*op = Operand::new(op.vreg(), OperandPolicy::Reg, op.kind(), op.pos()); *op = Operand::new(op.vreg(), OperandPolicy::Reg, op.kind(), op.pos());
extra_clobbers.push((preg, pos.inst)); extra_clobbers.push((preg, pos.inst));
} else { } else {
@@ -1302,8 +1389,10 @@ impl<'a, F: Function> Env<'a, F> {
if self.ranges[iter.index()].def.is_valid() { if self.ranges[iter.index()].def.is_valid() {
let def_idx = self.vregs[vreg].def; let def_idx = self.vregs[vreg].def;
let pos = self.defs[def_idx.index()].pos; let pos = self.defs[def_idx.index()].pos;
let slot = self.defs[def_idx.index()].slot;
fixup_multi_fixed_vregs( fixup_multi_fixed_vregs(
pos, pos,
slot,
&mut self.defs[def_idx.index()].operand, &mut self.defs[def_idx.index()].operand,
&mut self.multi_fixed_reg_fixups, &mut self.multi_fixed_reg_fixups,
); );
@@ -1312,8 +1401,10 @@ impl<'a, F: Function> Env<'a, F> {
let mut use_iter = self.ranges[iter.index()].first_use; let mut use_iter = self.ranges[iter.index()].first_use;
while use_iter.is_valid() { while use_iter.is_valid() {
let pos = self.uses[use_iter.index()].pos; let pos = self.uses[use_iter.index()].pos;
let slot = self.uses[use_iter.index()].slot;
fixup_multi_fixed_vregs( fixup_multi_fixed_vregs(
pos, pos,
slot,
&mut self.uses[use_iter.index()].operand, &mut self.uses[use_iter.index()].operand,
&mut self.multi_fixed_reg_fixups, &mut self.multi_fixed_reg_fixups,
); );
@@ -1916,13 +2007,17 @@ impl<'a, F: Function> Env<'a, F> {
let bundledata = &self.bundles[bundle.index()]; let bundledata = &self.bundles[bundle.index()];
let first_range = &self.ranges[bundledata.first_range.index()]; let first_range = &self.ranges[bundledata.first_range.index()];
log::debug!("recompute bundle properties: bundle {:?}", bundle);
if first_range.vreg.is_invalid() { if first_range.vreg.is_invalid() {
log::debug!(" -> no vreg; minimal and fixed");
minimal = true; minimal = true;
fixed = true; fixed = true;
} else { } else {
if first_range.def.is_valid() { if first_range.def.is_valid() {
let def_data = &self.defs[first_range.def.index()]; let def_data = &self.defs[first_range.def.index()];
if let OperandPolicy::FixedReg(_) = def_data.operand.policy() { if let OperandPolicy::FixedReg(_) = def_data.operand.policy() {
log::debug!(" -> fixed def {:?}", first_range.def);
fixed = true; fixed = true;
} }
} }
@@ -1930,6 +2025,7 @@ impl<'a, F: Function> Env<'a, F> {
while use_iter.is_valid() { while use_iter.is_valid() {
let use_data = &self.uses[use_iter.index()]; let use_data = &self.uses[use_iter.index()];
if let OperandPolicy::FixedReg(_) = use_data.operand.policy() { if let OperandPolicy::FixedReg(_) = use_data.operand.policy() {
log::debug!(" -> fixed use {:?}", use_iter);
fixed = true; fixed = true;
break; break;
} }
@@ -1939,16 +2035,22 @@ impl<'a, F: Function> Env<'a, F> {
// the range covers only one instruction. Note that it // the range covers only one instruction. Note that it
// could cover just one ProgPoint, i.e. X.Before..X.After, // could cover just one ProgPoint, i.e. X.Before..X.After,
// or two ProgPoints, i.e. X.Before..X+1.Before. // or two ProgPoints, i.e. X.Before..X+1.Before.
log::debug!(" -> first range has range {:?}", first_range.range);
log::debug!(
" -> first range has next in bundle {:?}",
first_range.next_in_bundle
);
minimal = first_range.next_in_bundle.is_invalid() minimal = first_range.next_in_bundle.is_invalid()
&& first_range.range.from.inst == first_range.range.to.prev().inst; && first_range.range.from.inst == first_range.range.to.prev().inst;
log::debug!(" -> minimal: {}", minimal);
} }
let spill_weight = if minimal { let spill_weight = if minimal {
if fixed { if fixed {
log::debug!(" -> fixed and minimal: 2000000"); log::debug!(" -> fixed and minimal: spill weight 2000000");
2_000_000 2_000_000
} else { } else {
log::debug!(" -> non-fixed and minimal: 1000000"); log::debug!(" -> non-fixed and minimal: spill weight 1000000");
1_000_000 1_000_000
} }
} else { } else {
@@ -1957,15 +2059,20 @@ impl<'a, F: Function> Env<'a, F> {
while range.is_valid() { while range.is_valid() {
let range_data = &self.ranges[range.index()]; let range_data = &self.ranges[range.index()];
if range_data.def.is_valid() { if range_data.def.is_valid() {
log::debug!(" -> has def (2000)"); log::debug!(" -> has def (spill weight +2000)");
total += 2000; total += 2000;
} }
log::debug!(" -> uses spill weight: {}", range_data.uses_spill_weight); log::debug!(" -> uses spill weight: +{}", range_data.uses_spill_weight);
total += range_data.uses_spill_weight; total += range_data.uses_spill_weight;
range = range_data.next_in_bundle; range = range_data.next_in_bundle;
} }
if self.bundles[bundle.index()].prio > 0 { if self.bundles[bundle.index()].prio > 0 {
log::debug!(
" -> dividing by prio {}; final weight {}",
self.bundles[bundle.index()].prio,
total / self.bundles[bundle.index()].prio
);
total / self.bundles[bundle.index()].prio total / self.bundles[bundle.index()].prio
} else { } else {
total total
@@ -2646,6 +2753,15 @@ impl<'a, F: Function> Env<'a, F> {
lowest_cost_conflict_set.unwrap_or(smallvec![]) lowest_cost_conflict_set.unwrap_or(smallvec![])
} }
Requirement::Stack(_) => {
// If we must be on the stack, put ourselves on
// the spillset's list immediately.
self.spillsets[self.bundles[bundle.index()].spillset.index()]
.bundles
.push(bundle);
return;
}
Requirement::Any(_) => { Requirement::Any(_) => {
// If a register is not *required*, spill now (we'll retry // If a register is not *required*, spill now (we'll retry
// allocation on spilled bundles later). // allocation on spilled bundles later).
@@ -2657,8 +2773,9 @@ impl<'a, F: Function> Env<'a, F> {
log::debug!(" -> conflict set {:?}", conflicting_bundles); log::debug!(" -> conflict set {:?}", conflicting_bundles);
// If we have already tried evictions once before and are still unsuccessful, give up // If we have already tried evictions once before and are
// and move on to splitting as long as this is not a minimal bundle. // still unsuccessful, give up and move on to splitting as
// long as this is not a minimal bundle.
if attempts >= 2 && !self.minimal_bundle(bundle) { if attempts >= 2 && !self.minimal_bundle(bundle) {
break; break;
} }
@@ -3324,7 +3441,11 @@ impl<'a, F: Function> Env<'a, F> {
debug_assert!(range.contains_point(usedata.pos)); debug_assert!(range.contains_point(usedata.pos));
let inst = usedata.pos.inst; let inst = usedata.pos.inst;
let slot = usedata.slot; let slot = usedata.slot;
self.set_alloc(inst, slot, alloc); // Safepoints add virtual uses with no slots;
// avoid these.
if slot != SLOT_NONE {
self.set_alloc(inst, slot, alloc);
}
use_iter = self.uses[use_iter.index()].next_use; use_iter = self.uses[use_iter.index()].next_use;
} }
@@ -3425,7 +3546,7 @@ impl<'a, F: Function> Env<'a, F> {
} }
// Handle multi-fixed-reg constraints by copying. // Handle multi-fixed-reg constraints by copying.
for (progpoint, from_preg, to_preg) in for (progpoint, from_preg, to_preg, slot) in
std::mem::replace(&mut self.multi_fixed_reg_fixups, vec![]) std::mem::replace(&mut self.multi_fixed_reg_fixups, vec![])
{ {
log::debug!( log::debug!(
@@ -3440,6 +3561,11 @@ impl<'a, F: Function> Env<'a, F> {
Allocation::reg(self.pregs[from_preg.index()].reg), Allocation::reg(self.pregs[from_preg.index()].reg),
Allocation::reg(self.pregs[to_preg.index()].reg), Allocation::reg(self.pregs[to_preg.index()].reg),
); );
self.set_alloc(
progpoint.inst,
slot,
Allocation::reg(self.pregs[to_preg.index()].reg),
);
} }
// Handle outputs that reuse inputs: copy beforehand, then set // Handle outputs that reuse inputs: copy beforehand, then set
@@ -3633,7 +3759,155 @@ impl<'a, F: Function> Env<'a, F> {
self.edits.push((pos.to_index(), prio, edit)); self.edits.push((pos.to_index(), prio, edit));
} }
fn compute_stackmaps(&mut self) {} fn compute_stackmaps(&mut self) {
// For each ref-typed vreg, iterate through ranges and find
// safepoints in-range. Add the SpillSlot to the stackmap.
//
// Note that unlike in the rest of the allocator, we cannot
// overapproximate here: we cannot list a vreg's alloc at a
// certain program point in the metadata if it is not yet
// live. Because arbitrary block order and irreducible control
// flow could result in us encountering an (overapproximated,
// not actually live) vreg range for a reftyped value when
// scanning in block order, we need to do a fixpoint liveness
// analysis here for reftyped vregs only. We only perform this
// analysis if there are reftyped vregs present, so it will
// not add to allocation runtime otherwise.
if self.func.reftype_vregs().is_empty() {
return;
}
let mut reftype_vreg_map = BitVec::new();
for vreg in self.func.reftype_vregs() {
reftype_vreg_map.set(vreg.vreg(), true);
}
let mut live_reftypes_block_start: Vec<BitVec> = vec![];
let mut live_reftypes_block_end: Vec<BitVec> = vec![];
for _ in 0..self.func.blocks() {
live_reftypes_block_start.push(BitVec::new());
live_reftypes_block_end.push(BitVec::new());
}
let mut safepoints_per_vreg: HashMap<usize, HashSet<Inst>> = HashMap::new();
for &vreg in self.func.reftype_vregs() {
safepoints_per_vreg.insert(vreg.vreg(), HashSet::new());
}
let mut workqueue = VecDeque::new();
let mut workqueue_set = HashSet::new();
let mut visited = HashSet::new();
// Backward analysis: start at return blocks.
for block in 0..self.func.blocks() {
let block = Block::new(block);
if self.func.is_ret(self.func.block_insns(block).last()) {
workqueue.push_back(block);
workqueue_set.insert(block);
}
}
// While workqueue is not empty, scan a block backward.
while !workqueue.is_empty() {
let block = workqueue.pop_back().unwrap();
workqueue_set.remove(&block);
visited.insert(block);
let live = &mut live_reftypes_block_start[block.index()];
live.assign(&live_reftypes_block_end[block.index()]);
for inst in self.func.block_insns(block).rev().iter() {
for pos in &[OperandPos::After, OperandPos::Before] {
for op in self.func.inst_operands(inst) {
if !reftype_vreg_map.get(op.vreg().vreg()) {
continue;
}
if op.pos() != OperandPos::Both && op.pos() != *pos {
continue;
}
match op.kind() {
OperandKind::Def => {
live.set(op.vreg().vreg(), false);
}
OperandKind::Use => {
live.set(op.vreg().vreg(), true);
}
}
}
}
if self.func.is_safepoint(inst) {
for vreg in live.iter() {
let safepoints = safepoints_per_vreg.get_mut(&vreg).unwrap();
safepoints.insert(inst);
}
}
}
for blockparam in self.func.block_params(block) {
if !reftype_vreg_map.get(blockparam.vreg()) {
continue;
}
live.set(blockparam.vreg(), false);
}
for &pred in self.func.block_preds(block) {
if live_reftypes_block_end[pred.index()].or(live) || !visited.contains(&pred) {
if !workqueue_set.contains(&pred) {
workqueue.push_back(pred);
workqueue_set.insert(pred);
}
}
}
}
// Now we have `safepoints_per_vreg`. All we have to do is,
// for each vreg in this map, step through the LiveRanges
// along with a sorted list of safepoints; and for each
// safepoint in the current range, emit the allocation into
// the `safepoint_slots` list.
log::debug!("safepoints_per_vreg = {:?}", safepoints_per_vreg);
for vreg in self.func.reftype_vregs() {
log::debug!("generating safepoint info for vreg {}", vreg);
let vreg = VRegIndex::new(vreg.vreg());
let mut safepoints: Vec<ProgPoint> = safepoints_per_vreg
.get(&vreg.index())
.unwrap()
.iter()
.map(|&inst| ProgPoint::before(inst))
.collect();
safepoints.sort();
log::debug!(" -> live over safepoints: {:?}", safepoints);
let mut safepoint_idx = 0;
let mut iter = self.vregs[vreg.index()].first_range;
while iter.is_valid() {
let rangedata = &self.ranges[iter.index()];
let range = rangedata.range;
let alloc = self.get_alloc_for_range(iter);
log::debug!(" -> range {:?}: alloc {}", range, alloc);
while safepoint_idx < safepoints.len() && safepoints[safepoint_idx] < range.to {
if safepoints[safepoint_idx] < range.from {
safepoint_idx += 1;
continue;
}
log::debug!(" -> covers safepoint {:?}", safepoints[safepoint_idx]);
let slot = alloc
.as_stack()
.expect("Reference-typed value not in spillslot at safepoint");
self.safepoint_slots.push((safepoints[safepoint_idx], slot));
safepoint_idx += 1;
}
iter = rangedata.next_in_reg;
}
}
self.safepoint_slots.sort();
log::debug!("final safepoint slots info: {:?}", self.safepoint_slots);
}
pub(crate) fn init(&mut self) -> Result<(), RegAllocError> { pub(crate) fn init(&mut self) -> Result<(), RegAllocError> {
self.create_pregs_and_vregs(); self.create_pregs_and_vregs();
@@ -3769,6 +4043,8 @@ pub fn run<F: Function>(func: &F, mach_env: &MachineEnv) -> Result<Output, RegAl
allocs: env.allocs, allocs: env.allocs,
inst_alloc_offsets: env.inst_alloc_offsets, inst_alloc_offsets: env.inst_alloc_offsets,
num_spillslots: env.num_spillslots as usize, num_spillslots: env.num_spillslots as usize,
debug_locations: vec![],
safepoint_slots: env.safepoint_slots,
stats: env.stats, stats: env.stats,
}) })
} }

View File

@@ -201,10 +201,12 @@ impl std::fmt::Display for SpillSlot {
/// `LAllocation` in Ion). /// `LAllocation` in Ion).
#[derive(Clone, Copy, PartialEq, Eq)] #[derive(Clone, Copy, PartialEq, Eq)]
pub struct Operand { pub struct Operand {
/// Bit-pack into 31 bits. This allows a `Reg` to encode an /// Bit-pack into 32 bits. Note that `policy` overlaps with `kind`
/// `Operand` or an `Allocation` in 32 bits. /// in `Allocation` and we use mutually disjoint tag-value ranges
/// so that clients, if they wish, can track just one `u32` per
/// register slot and edit it in-place after allocation.
/// ///
/// op-or-alloc:1 pos:2 kind:1 policy:2 class:1 preg:5 vreg:20 /// policy:3 kind:1 pos:2 class:1 preg:5 vreg:20
bits: u32, bits: u32,
} }
@@ -214,13 +216,14 @@ impl Operand {
let (preg_field, policy_field): (u32, u32) = match policy { let (preg_field, policy_field): (u32, u32) = match policy {
OperandPolicy::Any => (0, 0), OperandPolicy::Any => (0, 0),
OperandPolicy::Reg => (0, 1), OperandPolicy::Reg => (0, 1),
OperandPolicy::Stack => (0, 2),
OperandPolicy::FixedReg(preg) => { OperandPolicy::FixedReg(preg) => {
assert_eq!(preg.class(), vreg.class()); assert_eq!(preg.class(), vreg.class());
(preg.hw_enc() as u32, 2) (preg.hw_enc() as u32, 3)
} }
OperandPolicy::Reuse(which) => { OperandPolicy::Reuse(which) => {
assert!(which <= PReg::MAX); assert!(which <= PReg::MAX);
(which as u32, 3) (which as u32, 4)
} }
}; };
let class_field = vreg.class() as u8 as u32; let class_field = vreg.class() as u8 as u32;
@@ -230,9 +233,9 @@ impl Operand {
bits: vreg.vreg() as u32 bits: vreg.vreg() as u32
| (preg_field << 20) | (preg_field << 20)
| (class_field << 25) | (class_field << 25)
| (policy_field << 26) | (pos_field << 26)
| (kind_field << 28) | (kind_field << 28)
| (pos_field << 29), | (policy_field << 29),
} }
} }
@@ -322,7 +325,7 @@ impl Operand {
#[inline(always)] #[inline(always)]
pub fn pos(self) -> OperandPos { pub fn pos(self) -> OperandPos {
let pos_field = (self.bits >> 29) & 3; let pos_field = (self.bits >> 26) & 3;
match pos_field { match pos_field {
0 => OperandPos::Before, 0 => OperandPos::Before,
1 => OperandPos::After, 1 => OperandPos::After,
@@ -333,13 +336,14 @@ impl Operand {
#[inline(always)] #[inline(always)]
pub fn policy(self) -> OperandPolicy { pub fn policy(self) -> OperandPolicy {
let policy_field = (self.bits >> 26) & 3; let policy_field = (self.bits >> 29) & 7;
let preg_field = ((self.bits >> 20) as usize) & PReg::MAX; let preg_field = ((self.bits >> 20) as usize) & PReg::MAX;
match policy_field { match policy_field {
0 => OperandPolicy::Any, 0 => OperandPolicy::Any,
1 => OperandPolicy::Reg, 1 => OperandPolicy::Reg,
2 => OperandPolicy::FixedReg(PReg::new(preg_field, self.class())), 2 => OperandPolicy::Stack,
3 => OperandPolicy::Reuse(preg_field), 3 => OperandPolicy::FixedReg(PReg::new(preg_field, self.class())),
4 => OperandPolicy::Reuse(preg_field),
_ => unreachable!(), _ => unreachable!(),
} }
} }
@@ -357,15 +361,7 @@ impl Operand {
impl std::fmt::Debug for Operand { impl std::fmt::Debug for Operand {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!( std::fmt::Display::fmt(self, f)
f,
"Operand(vreg = {:?}, class = {:?}, kind = {:?}, pos = {:?}, policy = {:?})",
self.vreg().vreg(),
self.class(),
self.kind(),
self.pos(),
self.policy()
)
} }
} }
@@ -373,10 +369,14 @@ impl std::fmt::Display for Operand {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!( write!(
f, f,
"{:?}@{:?}: {} {}", "{:?}@{:?}: {}{} {}",
self.kind(), self.kind(),
self.pos(), self.pos(),
self.vreg(), self.vreg(),
match self.class() {
RegClass::Int => "i",
RegClass::Float => "f",
},
self.policy() self.policy()
) )
} }
@@ -388,6 +388,8 @@ pub enum OperandPolicy {
Any, Any,
/// Operand must be in a register. Register is read-only for Uses. /// Operand must be in a register. Register is read-only for Uses.
Reg, Reg,
/// Operand must be on the stack.
Stack,
/// Operand must be in a fixed register. /// Operand must be in a fixed register.
FixedReg(PReg), FixedReg(PReg),
/// On defs only: reuse a use's register. Which use is given by `preg` field. /// On defs only: reuse a use's register. Which use is given by `preg` field.
@@ -399,6 +401,7 @@ impl std::fmt::Display for OperandPolicy {
match self { match self {
Self::Any => write!(f, "any"), Self::Any => write!(f, "any"),
Self::Reg => write!(f, "reg"), Self::Reg => write!(f, "reg"),
Self::Stack => write!(f, "stack"),
Self::FixedReg(preg) => write!(f, "fixed({})", preg), Self::FixedReg(preg) => write!(f, "fixed({})", preg),
Self::Reuse(idx) => write!(f, "reuse({})", idx), Self::Reuse(idx) => write!(f, "reuse({})", idx),
} }
@@ -422,20 +425,21 @@ pub enum OperandPos {
/// Operand. /// Operand.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Allocation { pub struct Allocation {
/// Bit-pack in 31 bits: /// Bit-pack in 32 bits. Note that `kind` overlaps with the
/// `policy` field in `Operand`, and we are careful to use
/// disjoint ranges of values in this field for each type. We also
/// leave the def-or-use bit (`kind` for `Operand`) unused here so
/// that the client may use it to mark `Allocation`s on
/// instructions as read or write when it edits instructions
/// (which is sometimes useful for post-allocation analyses).
/// ///
/// op-or-alloc:1 kind:2 index:29 /// kind:3 unused:1 index:28
bits: u32, bits: u32,
} }
impl std::fmt::Debug for Allocation { impl std::fmt::Debug for Allocation {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!( std::fmt::Display::fmt(self, f)
f,
"Allocation(kind = {:?}, index = {})",
self.kind(),
self.index()
)
} }
} }
@@ -452,6 +456,7 @@ impl std::fmt::Display for Allocation {
impl Allocation { impl Allocation {
#[inline(always)] #[inline(always)]
pub(crate) fn new(kind: AllocationKind, index: usize) -> Self { pub(crate) fn new(kind: AllocationKind, index: usize) -> Self {
assert!(index < (1 << 28));
Self { Self {
bits: ((kind as u8 as u32) << 29) | (index as u32), bits: ((kind as u8 as u32) << 29) | (index as u32),
} }
@@ -474,17 +479,32 @@ impl Allocation {
#[inline(always)] #[inline(always)]
pub fn kind(self) -> AllocationKind { pub fn kind(self) -> AllocationKind {
match (self.bits >> 29) & 3 { match (self.bits >> 29) & 7 {
0 => AllocationKind::None, 5 => AllocationKind::None,
1 => AllocationKind::Reg, 6 => AllocationKind::Reg,
2 => AllocationKind::Stack, 7 => AllocationKind::Stack,
_ => unreachable!(), _ => unreachable!(),
} }
} }
#[inline(always)]
pub fn is_none(self) -> bool {
self.kind() == AllocationKind::None
}
#[inline(always)]
pub fn is_reg(self) -> bool {
self.kind() == AllocationKind::Reg
}
#[inline(always)]
pub fn is_stack(self) -> bool {
self.kind() == AllocationKind::Stack
}
#[inline(always)] #[inline(always)]
pub fn index(self) -> usize { pub fn index(self) -> usize {
(self.bits & ((1 << 29) - 1)) as usize (self.bits & ((1 << 28) - 1)) as usize
} }
#[inline(always)] #[inline(always)]
@@ -516,12 +536,14 @@ impl Allocation {
} }
} }
// N.B.: These values must be *disjoint* with the values used to
// encode `OperandPolicy`, because they share a 3-bit field.
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[repr(u8)] #[repr(u8)]
pub enum AllocationKind { pub enum AllocationKind {
None = 0, None = 5,
Reg = 1, Reg = 6,
Stack = 2, Stack = 7,
} }
impl Allocation { impl Allocation {
@@ -535,6 +557,59 @@ impl Allocation {
} }
} }
/// A helper that wraps either an `Operand` or an `Allocation` and is
/// able to tell which it is based on the tag bits.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct OperandOrAllocation {
bits: u32,
}
impl OperandOrAllocation {
pub fn from_operand(operand: Operand) -> Self {
Self {
bits: operand.bits(),
}
}
pub fn from_alloc(alloc: Allocation) -> Self {
Self { bits: alloc.bits() }
}
pub fn is_operand(&self) -> bool {
(self.bits >> 29) <= 4
}
pub fn is_allocation(&self) -> bool {
(self.bits >> 29) >= 5
}
pub fn as_operand(&self) -> Option<Operand> {
if self.is_operand() {
Some(Operand::from_bits(self.bits))
} else {
None
}
}
pub fn as_allocation(&self) -> Option<Allocation> {
if self.is_allocation() {
Some(Allocation::from_bits(self.bits & !(1 << 28)))
} else {
None
}
}
pub fn kind(&self) -> OperandKind {
let kind_field = (self.bits >> 28) & 1;
match kind_field {
0 => OperandKind::Def,
1 => OperandKind::Use,
_ => unreachable!(),
}
}
/// Replaces the Operand with an Allocation, keeping the def/use bit.
pub fn replace_with_alloc(&mut self, alloc: Allocation) {
self.bits &= 1 << 28;
self.bits |= alloc.bits;
}
}
/// A trait defined by the regalloc client to provide access to its /// A trait defined by the regalloc client to provide access to its
/// machine-instruction / CFG representation. /// machine-instruction / CFG representation.
pub trait Function { pub trait Function {
@@ -576,7 +651,9 @@ pub trait Function {
fn is_branch(&self, insn: Inst) -> bool; fn is_branch(&self, insn: Inst) -> bool;
/// Determine whether an instruction is a safepoint and requires a stackmap. /// Determine whether an instruction is a safepoint and requires a stackmap.
fn is_safepoint(&self, insn: Inst) -> bool; fn is_safepoint(&self, _: Inst) -> bool {
false
}
/// Determine whether an instruction is a move; if so, return the /// Determine whether an instruction is a move; if so, return the
/// vregs for (src, dst). /// vregs for (src, dst).
@@ -598,6 +675,40 @@ pub trait Function {
/// course better if it is exact. /// course better if it is exact.
fn num_vregs(&self) -> usize; fn num_vregs(&self) -> usize;
/// Get the VRegs that are pointer/reference types. This has the
/// following effects for each such vreg:
///
/// - At all safepoint instructions, the vreg will be in a
/// SpillSlot, not in a register.
/// - The vreg *may not* be used as a register operand on
/// safepoint instructions: this is because a vreg can only live
/// in one place at a time. The client should copy the value to an
/// integer-typed vreg and use this to pass a pointer as an input
/// to a safepoint instruction (such as a function call).
/// - At all safepoint instructions, all live vregs' locations
/// will be included in a list in the `Output` below, so that
/// pointer-inspecting/updating functionality (such as a moving
/// garbage collector) may observe and edit their values.
fn reftype_vregs(&self) -> &[VReg] {
&[]
}
/// Get the VRegs for which we should generate value-location
/// metadata for debugging purposes. This can be used to generate
/// e.g. DWARF with valid prgram-point ranges for each value
/// expression in a way that is more efficient than a post-hoc
/// analysis of the allocator's output.
///
/// Each tuple is (vreg, inclusive_start, exclusive_end,
/// label). In the `Output` there will be (label, inclusive_start,
/// exclusive_end, alloc)` tuples. The ranges may not exactly
/// match -- specifically, the returned metadata may cover only a
/// subset of the requested ranges -- if the value is not live for
/// the entire requested ranges.
fn debug_value_labels(&self) -> &[(Inst, Inst, VReg, u32)] {
&[]
}
// -------------- // --------------
// Spills/reloads // Spills/reloads
// -------------- // --------------
@@ -736,6 +847,17 @@ pub struct Output {
/// Allocation offset in `allocs` for each instruction. /// Allocation offset in `allocs` for each instruction.
pub inst_alloc_offsets: Vec<u32>, pub inst_alloc_offsets: Vec<u32>,
/// Safepoint records: at a given program point, a reference-typed value lives in the given SpillSlot.
pub safepoint_slots: Vec<(ProgPoint, SpillSlot)>,
/// Debug info: a labeled value (as applied to vregs by
/// `Function::debug_value_labels()` on the input side) is located
/// in the given allocation from the first program point
/// (inclusive) to the second (exclusive). Guaranteed to be sorted
/// by label and program point, and the ranges are guaranteed to
/// be disjoint.
pub debug_locations: Vec<(u32, ProgPoint, ProgPoint, Allocation)>,
/// Internal stats from the allocator. /// Internal stats from the allocator.
pub stats: ion::Stats, pub stats: ion::Stats,
} }