Merge pull request from GHSA-5fhj-g3p3-pq9g

* Improve cranelift disassembly of stack maps

Print out extra information about stack maps such as their contents and
other related metadata available. Additionally also print out addresses
in hex to line up with the disassembly otherwise printed as well.

* Improve the `table_ops` fuzzer

* Generate more instructions by default
* Fix negative indices appearing in `table.{get,set}`
* Assert that the traps generated are expected to prevent accidental
  other errors reporting a fuzzing success.

* Fix `reftype_vregs` reported to `regalloc2`

This fixes a mistake in the register allocation of Cranelift functions
where functions using reference-typed arguments incorrectly report which
virtual registers are reference-typed values if there are vreg aliases
in play. The fix here is to apply the vreg aliases to the final list of
reftyped regs which is eventually passed to `regalloc2`.

The main consequence of this fix is that functions which previously
accidentally didn't have correct stack maps should now have the missing
stack maps.

* Add a test that `table_ops` gc's eventually

* Add a comment about new alias resolution

* Update crates/fuzzing/src/oracles.rs

Co-authored-by: Nick Fitzgerald <fitzgen@gmail.com>

* Add some comments

Co-authored-by: Nick Fitzgerald <fitzgen@gmail.com>
This commit is contained in:
Alex Crichton
2022-07-20 11:52:23 -05:00
committed by GitHub
parent 839c4cce17
commit 2154c63de9
4 changed files with 143 additions and 26 deletions

View File

@@ -598,6 +598,15 @@ impl<I: VCodeInst> VCodeBuilder<I> {
self.reverse_and_finalize();
}
self.collect_operands();
// Apply register aliases to the `reftyped_vregs` list since this list
// will be returned directly to `regalloc2` eventually and all
// operands/results of instructions will use the alias-resolved vregs
// from `regalloc2`'s perspective.
for reg in self.vcode.reftyped_vregs.iter_mut() {
*reg = Self::resolve_vreg_alias_impl(&self.vcode.vreg_aliases, *reg);
}
self.compute_preds_from_succs();
self.vcode.debug_value_labels.sort_unstable();
self.vcode
@@ -1120,6 +1129,29 @@ impl<I: VCodeInst> VCode<I> {
pub fn bindex_to_bb(&self, block: BlockIndex) -> Option<ir::Block> {
self.block_order.lowered_order()[block.index()].orig_block()
}
#[inline]
fn assert_no_vreg_aliases<'a>(&self, list: &'a [VReg]) -> &'a [VReg] {
for vreg in list {
self.assert_not_vreg_alias(*vreg);
}
list
}
#[inline]
fn assert_not_vreg_alias(&self, vreg: VReg) -> VReg {
debug_assert!(VCodeBuilder::<I>::resolve_vreg_alias_impl(&self.vreg_aliases, vreg) == vreg);
vreg
}
#[inline]
fn assert_operand_not_vreg_alias(&self, op: Operand) -> Operand {
// It should be true by construction that `Operand`s do not contain any
// aliased vregs since they're all collected and mapped when the VCode
// is itself constructed.
self.assert_not_vreg_alias(op.vreg());
op
}
}
impl<I: VCodeInst> RegallocFunction for VCode<I> {
@@ -1152,7 +1184,10 @@ impl<I: VCodeInst> RegallocFunction for VCode<I> {
fn block_params(&self, block: BlockIndex) -> &[VReg] {
let (start, end) = self.block_params_range[block.index()];
&self.block_params[start as usize..end as usize]
let ret = &self.block_params[start as usize..end as usize];
// Currently block params are never aliased to another vreg, but
// double-check just to be sure.
self.assert_no_vreg_aliases(ret)
}
fn branch_blockparams(&self, block: BlockIndex, _insn: InsnIndex, succ_idx: usize) -> &[VReg] {
@@ -1160,7 +1195,9 @@ impl<I: VCodeInst> RegallocFunction for VCode<I> {
let succ_ranges =
&self.branch_block_arg_range[succ_range_start as usize..succ_range_end as usize];
let (branch_block_args_start, branch_block_args_end) = succ_ranges[succ_idx];
&self.branch_block_args[branch_block_args_start as usize..branch_block_args_end as usize]
let ret = &self.branch_block_args
[branch_block_args_start as usize..branch_block_args_end as usize];
self.assert_no_vreg_aliases(ret)
}
fn is_ret(&self, insn: InsnIndex) -> bool {
@@ -1182,12 +1219,20 @@ impl<I: VCodeInst> RegallocFunction for VCode<I> {
}
fn is_move(&self, insn: InsnIndex) -> Option<(Operand, Operand)> {
self.is_move.get(&insn).cloned()
let (a, b) = self.is_move.get(&insn)?;
Some((
self.assert_operand_not_vreg_alias(*a),
self.assert_operand_not_vreg_alias(*b),
))
}
fn inst_operands(&self, insn: InsnIndex) -> &[Operand] {
let (start, end) = self.operand_ranges[insn.index()];
&self.operands[start as usize..end as usize]
let ret = &self.operands[start as usize..end as usize];
for op in ret {
self.assert_operand_not_vreg_alias(*op);
}
ret
}
fn inst_clobbers(&self, insn: InsnIndex) -> PRegSet {
@@ -1199,10 +1244,16 @@ impl<I: VCodeInst> RegallocFunction for VCode<I> {
}
fn reftype_vregs(&self) -> &[VReg] {
&self.reftyped_vregs[..]
self.assert_no_vreg_aliases(&self.reftyped_vregs[..])
}
fn debug_value_labels(&self) -> &[(VReg, InsnIndex, InsnIndex, u32)] {
// VRegs here are inserted into `debug_value_labels` after code is
// generated and aliases are fully defined, so no double-check that
// aliases are not lingering.
for (vreg, ..) in self.debug_value_labels.iter() {
self.assert_not_vreg_alias(*vreg);
}
&self.debug_value_labels[..]
}