diff --git a/src/checker.rs b/src/checker.rs index 9a31ab9..952f612 100644 --- a/src/checker.rs +++ b/src/checker.rs @@ -590,18 +590,24 @@ impl CheckerState { fn update<'a, F: Function>(&mut self, checkinst: &CheckerInst, checker: &Checker<'a, F>) { self.become_defined(); - let default_val = Default::default(); match checkinst { &CheckerInst::Move { into, from } => { - let val = self.get_value(&from).unwrap_or(&default_val).clone(); - trace!( - "checker: checkinst {:?} updating: move {:?} -> {:?} val {:?}", - checkinst, - from, - into, - val - ); - self.set_value(into, val); + // Value may not be present if this move is part of + // the parallel move resolver's fallback sequence that + // saves a victim register elsewhere. (In other words, + // that sequence saves an undefined value and restores + // it, so has no effect.) The checker needs to avoid + // putting Universe lattice values into the map. + if let Some(val) = self.get_value(&from).cloned() { + trace!( + "checker: checkinst {:?} updating: move {:?} -> {:?} val {:?}", + checkinst, + from, + into, + val + ); + self.set_value(into, val); + } } &CheckerInst::ParallelMove { ref moves } => { // First, build map of actions for each vreg in an diff --git a/src/fuzzing/func.rs b/src/fuzzing/func.rs index e924c3d..7956d0c 100644 --- a/src/fuzzing/func.rs +++ b/src/fuzzing/func.rs @@ -483,16 +483,37 @@ impl Func { OperandPos::Early, ); } else if opts.fixed_regs && bool::arbitrary(u)? { - let mut fixed = vec![]; + let mut fixed_early = vec![]; + let mut fixed_late = vec![]; for _ in 0..u.int_in_range(0..=operands.len() - 1)? { // Pick an operand and make it a fixed reg. - let fixed_reg = PReg::new(u.int_in_range(0..=62)?, RegClass::Int); - if fixed.contains(&fixed_reg) { - break; - } - fixed.push(fixed_reg); let i = u.int_in_range(0..=(operands.len() - 1))?; let op = operands[i]; + let fixed_reg = PReg::new(u.int_in_range(0..=62)?, RegClass::Int); + let fixed_list = match op.pos() { + OperandPos::Early => &mut fixed_early, + OperandPos::Late => &mut fixed_late, + }; + if fixed_list.contains(&fixed_reg) { + break; + } + if op.kind() != OperandKind::Def && op.pos() == OperandPos::Late { + // Late-uses/mods with fixed constraints + // can't be allowed if we're allowing + // different constraints at Early and + // Late, because we can't move something + // into a location between Early and + // Late. Differing constraints only make + // sense if the instruction itself + // produces the newly-constrained values. + break; + } + if op.kind() != OperandKind::Use && op.pos() == OperandPos::Early { + // Likewise, we can *only* allow uses for + // fixed constraints at Early. + break; + } + fixed_list.push(fixed_reg); operands[i] = Operand::new( op.vreg(), OperandConstraint::FixedReg(fixed_reg), diff --git a/src/ion/data_structures.rs b/src/ion/data_structures.rs index 394c6f2..dd1013a 100644 --- a/src/ion/data_structures.rs +++ b/src/ion/data_structures.rs @@ -274,10 +274,20 @@ pub struct MultiFixedRegFixup { pub pos: ProgPoint, pub from_slot: u8, pub to_slot: u8, + pub level: FixedRegFixupLevel, pub to_preg: PRegIndex, pub vreg: VRegIndex, } +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum FixedRegFixupLevel { + /// A fixup copy for the initial fixed reg; must come first. + Initial, + /// A fixup copy from the first fixed reg to other fixed regs for + /// the same vreg; must come second. + Secondary, +} + /// The field order is significant: these are sorted so that a /// scan over vregs, then blocks in each range, can scan in /// order through this (sorted) list and add allocs to the @@ -564,7 +574,8 @@ pub enum InsertMovePrio { BlockParam, Regular, PostRegular, - MultiFixedReg, + MultiFixedRegInitial, + MultiFixedRegSecondary, ReusedInput, OutEdgeMoves, } diff --git a/src/ion/liveranges.rs b/src/ion/liveranges.rs index ae1b14e..350002d 100644 --- a/src/ion/liveranges.rs +++ b/src/ion/liveranges.rs @@ -18,12 +18,14 @@ use super::{ SpillSetIndex, Use, VRegData, VRegIndex, SLOT_NONE, }; use crate::indexset::IndexSet; -use crate::ion::data_structures::{BlockparamIn, BlockparamOut, MultiFixedRegFixup}; +use crate::ion::data_structures::{ + BlockparamIn, BlockparamOut, FixedRegFixupLevel, MultiFixedRegFixup, +}; use crate::{ Allocation, Block, Function, Inst, InstPosition, Operand, OperandConstraint, OperandKind, OperandPos, PReg, ProgPoint, RegAllocError, VReg, }; -use fxhash::FxHashSet; +use fxhash::{FxHashMap, FxHashSet}; use slice_group_by::GroupByMut; use smallvec::{smallvec, SmallVec}; use std::collections::{HashSet, VecDeque}; @@ -564,7 +566,7 @@ impl<'a, F: Function> Env<'a, F> { self.insert_move( ProgPoint::before(inst), - InsertMovePrio::MultiFixedReg, + InsertMovePrio::MultiFixedRegInitial, Allocation::reg(src_preg), Allocation::reg(dst_preg), Some(dst.vreg()), @@ -712,7 +714,7 @@ impl<'a, F: Function> Env<'a, F> { ); self.insert_move( ProgPoint::before(inst.next()), - InsertMovePrio::MultiFixedReg, + InsertMovePrio::MultiFixedRegInitial, Allocation::reg(preg), Allocation::reg(preg), Some(src.vreg()), @@ -917,11 +919,110 @@ impl<'a, F: Function> Env<'a, F> { continue; } + // Preprocess defs and uses. Specifically, if there + // are any fixed-reg-constrained defs at Late position + // and fixed-reg-constrained uses at Early position + // with the same preg, we need to (i) add a fixup move + // for the use, (ii) rewrite the use to have an Any + // constraint, and (ii) move the def to Early position + // to reserve the register for the whole instruction. + let mut operand_rewrites: FxHashMap = FxHashMap::default(); + let mut late_def_fixed: SmallVec<[(PReg, Operand, usize); 2]> = smallvec![]; + for (i, &operand) in self.func.inst_operands(inst).iter().enumerate() { + if let OperandConstraint::FixedReg(preg) = operand.constraint() { + match operand.pos() { + OperandPos::Late => { + // See note in fuzzing/func.rs: we + // can't allow this, because there + // would be no way to move a value + // into place for a late use *after* + // the early point (i.e. in the middle + // of the instruction). + assert!( + operand.kind() == OperandKind::Def, + "Invalid operand: fixed constraint on Use/Mod at Late point" + ); + + late_def_fixed.push((preg, operand, i)); + } + _ => {} + } + } + } + for (i, &operand) in self.func.inst_operands(inst).iter().enumerate() { + if let OperandConstraint::FixedReg(preg) = operand.constraint() { + match operand.pos() { + OperandPos::Early => { + assert!(operand.kind() == OperandKind::Use, + "Invalid operand: fixed constraint on Def/Mod at Early position"); + + // If we have a constraint at the + // Early point for a fixed preg, and + // this preg is also constrained with + // a *separate* def at Late, and *if* + // the vreg is live downward, we have + // to use the multi-fixed-reg + // mechanism for a fixup and rewrite + // here without the constraint. See + // #53. + // + // We adjust the def liverange and Use + // to an "early" position to reserve + // the register, it still must not be + // used by some other vreg at the + // use-site. + // + // Note that we handle multiple + // conflicting constraints for the + // same vreg in a separate pass (see + // `fixup_multi_fixed_vregs` below). + if let Some((_, def_op, def_slot)) = late_def_fixed + .iter() + .find(|(def_preg, _, _)| *def_preg == preg) + { + let pos = ProgPoint::before(inst); + self.multi_fixed_reg_fixups.push(MultiFixedRegFixup { + pos, + from_slot: i as u8, + to_slot: i as u8, + to_preg: PRegIndex::new(preg.index()), + vreg: VRegIndex::new(operand.vreg().vreg()), + level: FixedRegFixupLevel::Initial, + }); + + operand_rewrites.insert( + i, + Operand::new( + operand.vreg(), + OperandConstraint::Any, + operand.kind(), + operand.pos(), + ), + ); + operand_rewrites.insert( + *def_slot, + Operand::new( + def_op.vreg(), + def_op.constraint(), + def_op.kind(), + OperandPos::Early, + ), + ); + } + } + _ => {} + } + } + } + // Process defs and uses. for &cur_pos in &[InstPosition::After, InstPosition::Before] { for i in 0..self.func.inst_operands(inst).len() { // don't borrow `self` - let operand = self.func.inst_operands(inst)[i]; + let operand = operand_rewrites + .get(&i) + .cloned() + .unwrap_or(self.func.inst_operands(inst)[i]); let pos = match (operand.kind(), operand.pos()) { (OperandKind::Mod, _) => ProgPoint::before(inst), (OperandKind::Def, OperandPos::Early) => ProgPoint::before(inst), @@ -1286,6 +1387,7 @@ impl<'a, F: Function> Env<'a, F> { to_slot: u.slot, to_preg: preg_idx, vreg: vreg_idx, + level: FixedRegFixupLevel::Secondary, }); u.operand = Operand::new( u.operand.vreg(), diff --git a/src/ion/moves.rs b/src/ion/moves.rs index 15ceab4..3da42fe 100644 --- a/src/ion/moves.rs +++ b/src/ion/moves.rs @@ -17,7 +17,7 @@ use super::{ VRegIndex, SLOT_NONE, }; use crate::ion::data_structures::{ - BlockparamIn, BlockparamOut, CodeRange, LiveRangeKey, PosWithPrio, + BlockparamIn, BlockparamOut, CodeRange, FixedRegFixupLevel, LiveRangeKey, PosWithPrio, }; use crate::ion::reg_traversal::RegTraversalIter; use crate::moves::{MoveAndScratchResolver, ParallelMoves}; @@ -749,9 +749,13 @@ impl<'a, F: Function> Env<'a, F> { to_alloc, fixup.vreg.index(), ); + let prio = match fixup.level { + FixedRegFixupLevel::Initial => InsertMovePrio::MultiFixedRegInitial, + FixedRegFixupLevel::Secondary => InsertMovePrio::MultiFixedRegSecondary, + }; self.insert_move( fixup.pos, - InsertMovePrio::MultiFixedReg, + prio, from_alloc, to_alloc, Some(self.vreg(fixup.vreg)), diff --git a/src/moves.rs b/src/moves.rs index 26d5d55..3bc222b 100644 --- a/src/moves.rs +++ b/src/moves.rs @@ -368,17 +368,17 @@ where // Now, find a scratch allocation in order to resolve cycles. let scratch = (self.find_free_reg)().unwrap_or_else(|| (self.get_stackslot)()); - log::trace!("scratch resolver: scratch alloc {:?}", scratch); + trace!("scratch resolver: scratch alloc {:?}", scratch); let moves = moves.with_scratch(scratch); for &(src, dst, data) in &moves { // Do we have a stack-to-stack move? If so, resolve. if src.is_stack() && dst.is_stack() { - log::trace!("scratch resolver: stack to stack: {:?} -> {:?}", src, dst); + trace!("scratch resolver: stack to stack: {:?} -> {:?}", src, dst); // Lazily allocate a stack-to-stack scratch. if self.stack_stack_scratch_reg.is_none() { if let Some(reg) = (self.find_free_reg)() { - log::trace!( + trace!( "scratch resolver: have free stack-to-stack scratch preg: {:?}", reg ); @@ -386,7 +386,7 @@ where } else { self.stack_stack_scratch_reg = Some(Allocation::reg(self.victim)); self.stack_stack_scratch_reg_save = Some((self.get_stackslot)()); - log::trace!("scratch resolver: stack-to-stack using victim {:?} with save stackslot {:?}", + trace!("scratch resolver: stack-to-stack using victim {:?} with save stackslot {:?}", self.stack_stack_scratch_reg, self.stack_stack_scratch_reg_save); } @@ -422,7 +422,7 @@ where } } - log::trace!("scratch resolver: got {:?}", result); + trace!("scratch resolver: got {:?}", result); result } }