Replace assert! with debug_assert! in production code paths.
This allows the assertions to be disabled in release builds, so that the code is faster and smaller, at the expense of not performing the checks. Assertions can be re-enabled in release builds with the debug-assertions flag in Cargo.toml, as the top-level Cargo.toml file does.
This commit is contained in:
@@ -238,7 +238,7 @@ impl<'a> Context<'a> {
|
||||
// 1. It is defined in a dominating EBB and live-in to `ebb`.
|
||||
// 2. If is itself a parameter value for `ebb`. This case should already have been
|
||||
// eliminated by `isolate_conflicting_params()`.
|
||||
assert!(
|
||||
debug_assert!(
|
||||
lr.def() != ebb.into(),
|
||||
"{} parameter {} was missed by isolate_conflicting_params()",
|
||||
ebb,
|
||||
@@ -494,8 +494,8 @@ impl<'a> Context<'a> {
|
||||
// Second everything else in reverse layout order. Again, short forward branches get merged
|
||||
// first. There can also be backwards branches mixed in here, though, as long as they are
|
||||
// not loop backedges.
|
||||
assert!(self.predecessors.is_empty());
|
||||
assert!(self.backedges.is_empty());
|
||||
debug_assert!(self.predecessors.is_empty());
|
||||
debug_assert!(self.backedges.is_empty());
|
||||
for (pred_ebb, pred_inst) in self.cfg.pred_iter(ebb) {
|
||||
if self.preorder.dominates(ebb, pred_ebb) {
|
||||
self.backedges.push(pred_inst);
|
||||
@@ -957,7 +957,8 @@ impl VirtualCopies {
|
||||
|
||||
/// Indicate that `param` is now fully merged.
|
||||
pub fn merged_param(&mut self, param: Value, func: &Function) {
|
||||
assert_eq!(self.params.pop(), Some(param));
|
||||
let popped = self.params.pop();
|
||||
debug_assert_eq!(popped, Some(param));
|
||||
|
||||
// The domtree pre-order in `self.params` guarantees that all parameters defined at the
|
||||
// same EBB will be adjacent. This means we can see when all parameters at an EBB have been
|
||||
|
||||
@@ -246,7 +246,7 @@ impl<'a> Context<'a> {
|
||||
/// Return the set of remaining allocatable registers after filtering out the dead arguments.
|
||||
fn color_entry_params(&mut self, args: &[LiveValue]) -> AvailableRegs {
|
||||
let sig = &self.cur.func.signature;
|
||||
assert_eq!(sig.params.len(), args.len());
|
||||
debug_assert_eq!(sig.params.len(), args.len());
|
||||
|
||||
let mut regs = AvailableRegs::new(&self.usable_regs);
|
||||
|
||||
@@ -271,7 +271,7 @@ impl<'a> Context<'a> {
|
||||
|
||||
}
|
||||
// The spiller will have assigned an incoming stack slot already.
|
||||
Affinity::Stack => assert!(abi.location.is_stack()),
|
||||
Affinity::Stack => debug_assert!(abi.location.is_stack()),
|
||||
// This is a ghost value, unused in the function. Don't assign it to a location
|
||||
// either.
|
||||
Affinity::None => {}
|
||||
@@ -340,7 +340,7 @@ impl<'a> Context<'a> {
|
||||
} else {
|
||||
// This is a multi-way branch like `br_table`. We only support arguments on
|
||||
// single-destination branches.
|
||||
assert_eq!(
|
||||
debug_assert_eq!(
|
||||
self.cur.func.dfg.inst_variable_args(inst).len(),
|
||||
0,
|
||||
"Can't handle EBB arguments: {}",
|
||||
@@ -586,7 +586,7 @@ impl<'a> Context<'a> {
|
||||
// Now handle the EBB arguments.
|
||||
let br_args = self.cur.func.dfg.inst_variable_args(inst);
|
||||
let dest_args = self.cur.func.dfg.ebb_params(dest);
|
||||
assert_eq!(br_args.len(), dest_args.len());
|
||||
debug_assert_eq!(br_args.len(), dest_args.len());
|
||||
for (&dest_arg, &br_arg) in dest_args.iter().zip(br_args) {
|
||||
// The first time we encounter a branch to `dest`, we get to pick the location. The
|
||||
// following times we see a branch to `dest`, we must follow suit.
|
||||
@@ -631,7 +631,7 @@ impl<'a> Context<'a> {
|
||||
fn color_ebb_params(&mut self, inst: Inst, dest: Ebb) {
|
||||
let br_args = self.cur.func.dfg.inst_variable_args(inst);
|
||||
let dest_args = self.cur.func.dfg.ebb_params(dest);
|
||||
assert_eq!(br_args.len(), dest_args.len());
|
||||
debug_assert_eq!(br_args.len(), dest_args.len());
|
||||
for (&dest_arg, &br_arg) in dest_args.iter().zip(br_args) {
|
||||
match self.cur.func.locations[dest_arg] {
|
||||
ValueLoc::Unassigned => {
|
||||
@@ -741,7 +741,7 @@ impl<'a> Context<'a> {
|
||||
// It's technically possible for a call instruction to have fixed results before the
|
||||
// variable list of results, but we have no known instances of that.
|
||||
// Just assume all results are variable return values.
|
||||
assert_eq!(defs.len(), self.cur.func.dfg.signatures[sig].returns.len());
|
||||
debug_assert_eq!(defs.len(), self.cur.func.dfg.signatures[sig].returns.len());
|
||||
for (i, lv) in defs.iter().enumerate() {
|
||||
let abi = self.cur.func.dfg.signatures[sig].returns[i];
|
||||
if let ArgumentLoc::Reg(reg) = abi.location {
|
||||
@@ -787,7 +787,7 @@ impl<'a> Context<'a> {
|
||||
}
|
||||
|
||||
let ok = self.solver.add_fixed_output(rc, reg);
|
||||
assert!(ok, "Couldn't clear fixed output interference for {}", value);
|
||||
debug_assert!(ok, "Couldn't clear fixed output interference for {}", value);
|
||||
}
|
||||
self.cur.func.locations[value] = ValueLoc::Reg(reg);
|
||||
}
|
||||
@@ -858,11 +858,8 @@ impl<'a> Context<'a> {
|
||||
Ok(regs) => return regs,
|
||||
Err(SolverError::Divert(rc)) => {
|
||||
// Do we have any live-through `rc` registers that are not already variables?
|
||||
assert!(
|
||||
self.try_add_var(rc, throughs),
|
||||
"Ran out of registers in {}",
|
||||
rc
|
||||
);
|
||||
let added = self.try_add_var(rc, throughs);
|
||||
debug_assert!(added, "Ran out of registers in {}", rc);
|
||||
}
|
||||
Err(SolverError::Global(value)) => {
|
||||
dbg!("Not enough global registers for {}, trying as local", value);
|
||||
@@ -941,7 +938,7 @@ impl<'a> Context<'a> {
|
||||
// It is very unlikely (impossible?) that we would need more than one spill per top-level
|
||||
// register class, so avoid allocation by using a fixed array here.
|
||||
let mut slot = [PackedOption::default(); 8];
|
||||
assert!(spills <= slot.len(), "Too many spills ({})", spills);
|
||||
debug_assert!(spills <= slot.len(), "Too many spills ({})", spills);
|
||||
|
||||
for m in self.solver.moves() {
|
||||
match *m {
|
||||
|
||||
@@ -207,7 +207,7 @@ impl LiveValueTracker {
|
||||
let first_arg = self.live.values.len();
|
||||
for &value in dfg.ebb_params(ebb) {
|
||||
let lr = &liveness[value];
|
||||
assert_eq!(lr.def(), ebb.into());
|
||||
debug_assert_eq!(lr.def(), ebb.into());
|
||||
match lr.def_local_end().into() {
|
||||
ExpandedProgramPoint::Inst(endpoint) => {
|
||||
self.live.push(value, endpoint, lr);
|
||||
@@ -215,7 +215,7 @@ impl LiveValueTracker {
|
||||
ExpandedProgramPoint::Ebb(local_ebb) => {
|
||||
// This is a dead EBB parameter which is not even live into the first
|
||||
// instruction in the EBB.
|
||||
assert_eq!(
|
||||
debug_assert_eq!(
|
||||
local_ebb,
|
||||
ebb,
|
||||
"EBB parameter live range ends at wrong EBB header"
|
||||
@@ -273,7 +273,7 @@ impl LiveValueTracker {
|
||||
let first_def = self.live.values.len();
|
||||
for &value in dfg.inst_results(inst) {
|
||||
let lr = &liveness[value];
|
||||
assert_eq!(lr.def(), inst.into());
|
||||
debug_assert_eq!(lr.def(), inst.into());
|
||||
match lr.def_local_end().into() {
|
||||
ExpandedProgramPoint::Inst(endpoint) => {
|
||||
self.live.push(value, endpoint, lr);
|
||||
|
||||
@@ -251,7 +251,7 @@ fn extend_to_use(
|
||||
forest: &mut LiveRangeForest,
|
||||
) {
|
||||
// This is our scratch working space, and we'll leave it empty when we return.
|
||||
assert!(worklist.is_empty());
|
||||
debug_assert!(worklist.is_empty());
|
||||
|
||||
// Extend the range locally in `ebb`.
|
||||
// If there already was a live interval in that block, we're done.
|
||||
@@ -338,7 +338,7 @@ impl Liveness {
|
||||
let old = self.ranges.insert(
|
||||
LiveRange::new(value, def.into(), affinity),
|
||||
);
|
||||
assert!(old.is_none(), "{} already has a live range", value);
|
||||
debug_assert!(old.is_none(), "{} already has a live range", value);
|
||||
}
|
||||
|
||||
/// Move the definition of `value` to `def`.
|
||||
@@ -367,7 +367,7 @@ impl Liveness {
|
||||
debug_assert_eq!(Some(ebb), layout.inst_ebb(user));
|
||||
let lr = self.ranges.get_mut(value).expect("Value has no live range");
|
||||
let livein = lr.extend_in_ebb(ebb, user, layout, &mut self.forest);
|
||||
assert!(!livein, "{} should already be live in {}", value, ebb);
|
||||
debug_assert!(!livein, "{} should already be live in {}", value, ebb);
|
||||
&mut lr.affinity
|
||||
}
|
||||
|
||||
|
||||
@@ -253,7 +253,7 @@ impl<PO: ProgramOrder> GenLiveRange<PO> {
|
||||
order.cmp(to, self.def_begin) != Ordering::Less
|
||||
{
|
||||
let to_pp = to.into();
|
||||
assert_ne!(
|
||||
debug_assert_ne!(
|
||||
to_pp,
|
||||
self.def_begin,
|
||||
"Can't use value in the defining instruction."
|
||||
|
||||
@@ -145,7 +145,7 @@ impl<'a> Context<'a> {
|
||||
);
|
||||
|
||||
if self.cur.func.layout.entry_block() == Some(ebb) {
|
||||
assert_eq!(liveins.len(), 0);
|
||||
debug_assert_eq!(liveins.len(), 0);
|
||||
self.visit_entry_params(ebb, args);
|
||||
} else {
|
||||
self.visit_ebb_params(ebb, args);
|
||||
@@ -155,7 +155,7 @@ impl<'a> Context<'a> {
|
||||
/// Visit the parameters on the entry block.
|
||||
/// These values have ABI constraints from the function signature.
|
||||
fn visit_entry_params(&mut self, ebb: Ebb, args: &[LiveValue]) {
|
||||
assert_eq!(self.cur.func.signature.params.len(), args.len());
|
||||
debug_assert_eq!(self.cur.func.signature.params.len(), args.len());
|
||||
self.cur.goto_first_inst(ebb);
|
||||
|
||||
for (arg_idx, arg) in args.iter().enumerate() {
|
||||
@@ -175,7 +175,7 @@ impl<'a> Context<'a> {
|
||||
}
|
||||
}
|
||||
ArgumentLoc::Stack(_) => {
|
||||
assert!(arg.affinity.is_stack());
|
||||
debug_assert!(arg.affinity.is_stack());
|
||||
}
|
||||
ArgumentLoc::Unassigned => panic!("Unexpected ABI location"),
|
||||
}
|
||||
@@ -203,7 +203,7 @@ impl<'a> Context<'a> {
|
||||
);
|
||||
|
||||
// Identify reload candidates.
|
||||
assert!(self.candidates.is_empty());
|
||||
debug_assert!(self.candidates.is_empty());
|
||||
self.find_candidates(inst, constraints);
|
||||
|
||||
// Insert fill instructions before `inst` and replace `cand.value` with the filled value.
|
||||
@@ -375,7 +375,7 @@ fn handle_abi_args(
|
||||
isa: &TargetIsa,
|
||||
liveness: &Liveness,
|
||||
) {
|
||||
assert_eq!(abi_types.len(), var_args.len());
|
||||
debug_assert_eq!(abi_types.len(), var_args.len());
|
||||
for ((abi, &arg), argidx) in abi_types.iter().zip(var_args).zip(offset..) {
|
||||
if abi.location.is_reg() {
|
||||
let lv = liveness.get(arg).expect("Missing live range for ABI arg");
|
||||
|
||||
@@ -565,7 +565,7 @@ impl Solver {
|
||||
dbg!("-> converting variable {} to a fixed constraint", v);
|
||||
// The spiller is responsible for ensuring that all constraints on the uses of a
|
||||
// value are compatible.
|
||||
assert!(
|
||||
debug_assert!(
|
||||
v.constraint.contains(to),
|
||||
"Incompatible constraints for {}",
|
||||
value
|
||||
@@ -665,7 +665,7 @@ impl Solver {
|
||||
// No variable, then it must be a fixed reassignment.
|
||||
if let Some(a) = self.assignments.get(value) {
|
||||
dbg!("-> already fixed assignment {}", a);
|
||||
assert!(
|
||||
debug_assert!(
|
||||
constraint.contains(a.to),
|
||||
"Incompatible constraints for {}",
|
||||
value
|
||||
@@ -708,7 +708,7 @@ impl Solver {
|
||||
/// Call this method to indicate that there will be no more fixed input reassignments added
|
||||
/// and prepare for the output side constraints.
|
||||
pub fn inputs_done(&mut self) {
|
||||
assert!(!self.has_fixed_input_conflicts());
|
||||
debug_assert!(!self.has_fixed_input_conflicts());
|
||||
|
||||
// At this point, `regs_out` contains the `to` side of the input reassignments, and the
|
||||
// `from` side has already been marked as available in `regs_in`.
|
||||
@@ -746,7 +746,7 @@ impl Solver {
|
||||
// interference constraints on the output side.
|
||||
// Variables representing tied operands will get their `is_output` flag set again later.
|
||||
if let Some(v) = self.vars.iter_mut().find(|v| v.value == value) {
|
||||
assert!(v.is_input);
|
||||
debug_assert!(v.is_input);
|
||||
v.is_output = false;
|
||||
return;
|
||||
}
|
||||
@@ -782,7 +782,7 @@ impl Solver {
|
||||
|
||||
// Check if a variable was created.
|
||||
if let Some(v) = self.vars.iter_mut().find(|v| v.value == value) {
|
||||
assert!(v.is_input);
|
||||
debug_assert!(v.is_input);
|
||||
v.is_output = true;
|
||||
v.is_global = is_global;
|
||||
return None;
|
||||
@@ -1026,7 +1026,7 @@ impl Solver {
|
||||
/// Returns the number of spills that had to be emitted.
|
||||
pub fn schedule_moves(&mut self, regs: &AllocatableSet) -> usize {
|
||||
self.collect_moves();
|
||||
assert!(self.fills.is_empty());
|
||||
debug_assert!(self.fills.is_empty());
|
||||
|
||||
let mut num_spill_slots = 0;
|
||||
let mut avail = regs.clone();
|
||||
|
||||
@@ -242,7 +242,7 @@ impl<'a> Context<'a> {
|
||||
debug_assert_eq!(self.cur.current_ebb(), Some(ebb));
|
||||
|
||||
// We may need to resolve register constraints if there are any noteworthy uses.
|
||||
assert!(self.reg_uses.is_empty());
|
||||
debug_assert!(self.reg_uses.is_empty());
|
||||
self.collect_reg_uses(inst, ebb, constraints);
|
||||
|
||||
// Calls usually have fixed register uses.
|
||||
|
||||
@@ -140,7 +140,7 @@ impl VirtRegs {
|
||||
func: &Function,
|
||||
preorder: &DominatorTreePreorder,
|
||||
) -> VirtReg {
|
||||
assert_eq!(self.get(single), None, "Expected singleton {}", single);
|
||||
debug_assert_eq!(self.get(single), None, "Expected singleton {}", single);
|
||||
|
||||
// Make sure `big` has a vreg.
|
||||
let vreg = self.get(big).unwrap_or_else(|| {
|
||||
@@ -208,7 +208,7 @@ impl VirtRegs {
|
||||
}
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
debug_assert_eq!(
|
||||
values.len(),
|
||||
singletons + cleared,
|
||||
"Can't unify partial virtual registers"
|
||||
|
||||
Reference in New Issue
Block a user