diff --git a/cranelift/codegen/src/machinst/vcode.rs b/cranelift/codegen/src/machinst/vcode.rs index a29945383a..f9ef6670f8 100644 --- a/cranelift/codegen/src/machinst/vcode.rs +++ b/cranelift/codegen/src/machinst/vcode.rs @@ -598,6 +598,15 @@ impl VCodeBuilder { self.reverse_and_finalize(); } self.collect_operands(); + + // Apply register aliases to the `reftyped_vregs` list since this list + // will be returned directly to `regalloc2` eventually and all + // operands/results of instructions will use the alias-resolved vregs + // from `regalloc2`'s perspective. + for reg in self.vcode.reftyped_vregs.iter_mut() { + *reg = Self::resolve_vreg_alias_impl(&self.vcode.vreg_aliases, *reg); + } + self.compute_preds_from_succs(); self.vcode.debug_value_labels.sort_unstable(); self.vcode @@ -1120,6 +1129,29 @@ impl VCode { pub fn bindex_to_bb(&self, block: BlockIndex) -> Option { self.block_order.lowered_order()[block.index()].orig_block() } + + #[inline] + fn assert_no_vreg_aliases<'a>(&self, list: &'a [VReg]) -> &'a [VReg] { + for vreg in list { + self.assert_not_vreg_alias(*vreg); + } + list + } + + #[inline] + fn assert_not_vreg_alias(&self, vreg: VReg) -> VReg { + debug_assert!(VCodeBuilder::::resolve_vreg_alias_impl(&self.vreg_aliases, vreg) == vreg); + vreg + } + + #[inline] + fn assert_operand_not_vreg_alias(&self, op: Operand) -> Operand { + // It should be true by construction that `Operand`s do not contain any + // aliased vregs since they're all collected and mapped when the VCode + // is itself constructed. + self.assert_not_vreg_alias(op.vreg()); + op + } } impl RegallocFunction for VCode { @@ -1152,7 +1184,10 @@ impl RegallocFunction for VCode { fn block_params(&self, block: BlockIndex) -> &[VReg] { let (start, end) = self.block_params_range[block.index()]; - &self.block_params[start as usize..end as usize] + let ret = &self.block_params[start as usize..end as usize]; + // Currently block params are never aliased to another vreg, but + // double-check just to be sure. + self.assert_no_vreg_aliases(ret) } fn branch_blockparams(&self, block: BlockIndex, _insn: InsnIndex, succ_idx: usize) -> &[VReg] { @@ -1160,7 +1195,9 @@ impl RegallocFunction for VCode { let succ_ranges = &self.branch_block_arg_range[succ_range_start as usize..succ_range_end as usize]; let (branch_block_args_start, branch_block_args_end) = succ_ranges[succ_idx]; - &self.branch_block_args[branch_block_args_start as usize..branch_block_args_end as usize] + let ret = &self.branch_block_args + [branch_block_args_start as usize..branch_block_args_end as usize]; + self.assert_no_vreg_aliases(ret) } fn is_ret(&self, insn: InsnIndex) -> bool { @@ -1182,12 +1219,20 @@ impl RegallocFunction for VCode { } fn is_move(&self, insn: InsnIndex) -> Option<(Operand, Operand)> { - self.is_move.get(&insn).cloned() + let (a, b) = self.is_move.get(&insn)?; + Some(( + self.assert_operand_not_vreg_alias(*a), + self.assert_operand_not_vreg_alias(*b), + )) } fn inst_operands(&self, insn: InsnIndex) -> &[Operand] { let (start, end) = self.operand_ranges[insn.index()]; - &self.operands[start as usize..end as usize] + let ret = &self.operands[start as usize..end as usize]; + for op in ret { + self.assert_operand_not_vreg_alias(*op); + } + ret } fn inst_clobbers(&self, insn: InsnIndex) -> PRegSet { @@ -1199,10 +1244,16 @@ impl RegallocFunction for VCode { } fn reftype_vregs(&self) -> &[VReg] { - &self.reftyped_vregs[..] + self.assert_no_vreg_aliases(&self.reftyped_vregs[..]) } fn debug_value_labels(&self) -> &[(VReg, InsnIndex, InsnIndex, u32)] { + // VRegs here are inserted into `debug_value_labels` after code is + // generated and aliases are fully defined, so no double-check that + // aliases are not lingering. + for (vreg, ..) in self.debug_value_labels.iter() { + self.assert_not_vreg_alias(*vreg); + } &self.debug_value_labels[..] } diff --git a/cranelift/src/disasm.rs b/cranelift/src/disasm.rs index 63f5da3b83..c372707b4a 100644 --- a/cranelift/src/disasm.rs +++ b/cranelift/src/disasm.rs @@ -26,20 +26,39 @@ pub fn print_relocs(relocs: &[MachReloc]) -> String { pub fn print_traps(traps: &[MachTrap]) -> String { let mut text = String::new(); for &MachTrap { offset, code } in traps { - writeln!(text, "trap: {} at {}", code, offset).unwrap(); + writeln!(text, "trap: {code} at {offset:#x}").unwrap(); } text } pub fn print_stack_maps(traps: &[MachStackMap]) -> String { let mut text = String::new(); - for &MachStackMap { + for MachStackMap { offset, - offset_end: _, - stack_map: _, + offset_end, + stack_map, } in traps { - writeln!(text, "add_stack_map at {}", offset).unwrap(); + writeln!( + text, + "add_stack_map at {offset:#x}-{offset_end:#x} mapped_words={}", + stack_map.mapped_words() + ) + .unwrap(); + + write!(text, " entries: ").unwrap(); + let mut first = true; + for i in 0..stack_map.mapped_words() { + if !stack_map.get_bit(i as usize) { + continue; + } + if !first { + write!(text, ", ").unwrap(); + } else { + first = false; + } + write!(text, "{i}").unwrap(); + } } text } diff --git a/crates/fuzzing/src/generators/table_ops.rs b/crates/fuzzing/src/generators/table_ops.rs index 6ee6eb401c..012eeade89 100644 --- a/crates/fuzzing/src/generators/table_ops.rs +++ b/crates/fuzzing/src/generators/table_ops.rs @@ -147,12 +147,7 @@ impl<'a> Arbitrary<'a> for TableOps { let mut stack = 0; let mut ops = vec![]; let mut choices = vec![]; - loop { - let keep_going = ops.len() < MAX_OPS && u.arbitrary().unwrap_or(false); - if !keep_going { - break; - } - + while ops.len() < MAX_OPS && !u.is_empty() { ops.push(TableOp::arbitrary(u, &mut stack, &mut choices)?); } @@ -216,8 +211,8 @@ define_table_ops! { MakeRefs : 0 => 3, TakeRefs : 3 => 0, - TableGet(i32) : 0 => 1, - TableSet(i32) : 1 => 0, + TableGet(u32) : 0 => 1, + TableSet(u32) : 1 => 0, GlobalGet(u32) : 0 => 1, GlobalSet(u32) : 1 => 0, @@ -237,7 +232,7 @@ impl TableOp { // Add one to make sure that out of bounds table accesses are possible, // but still rare. - let table_mod = table_size as i32 + 1; + let table_mod = table_size + 1; let gc_func_idx = 0; let take_refs_func_idx = 1; @@ -256,12 +251,12 @@ impl TableOp { func.instruction(&Instruction::Call(take_refs_func_idx)); } Self::TableGet(x) => { - func.instruction(&Instruction::I32Const(x % table_mod)); + func.instruction(&Instruction::I32Const((x % table_mod) as i32)); func.instruction(&Instruction::TableGet { table: 0 }); } Self::TableSet(x) => { func.instruction(&Instruction::LocalSet(scratch_local)); - func.instruction(&Instruction::I32Const(x % table_mod)); + func.instruction(&Instruction::I32Const((x % table_mod) as i32)); func.instruction(&Instruction::LocalGet(scratch_local)); func.instruction(&Instruction::TableSet { table: 0 }); } diff --git a/crates/fuzzing/src/oracles.rs b/crates/fuzzing/src/oracles.rs index c4b6edbd7d..ae15eac8dd 100644 --- a/crates/fuzzing/src/oracles.rs +++ b/crates/fuzzing/src/oracles.rs @@ -541,10 +541,17 @@ pub fn spectest(mut fuzz_config: generators::Config, test: generators::SpecTest) } /// Execute a series of `table.get` and `table.set` operations. -pub fn table_ops(mut fuzz_config: generators::Config, ops: generators::table_ops::TableOps) { +/// +/// Returns the number of `gc` operations which occurred throughout the test +/// case -- used to test below that gc happens reasonably soon and eventually. +pub fn table_ops( + mut fuzz_config: generators::Config, + ops: generators::table_ops::TableOps, +) -> usize { let expected_drops = Arc::new(AtomicUsize::new(ops.num_params as usize)); let num_dropped = Arc::new(AtomicUsize::new(0)); + let num_gcs = Arc::new(AtomicUsize::new(0)); { fuzz_config.wasmtime.consume_fuel = true; let mut store = fuzz_config.to_store(); @@ -554,7 +561,7 @@ pub fn table_ops(mut fuzz_config: generators::Config, ops: generators::table_ops log_wasm(&wasm); let module = match compile_module(store.engine(), &wasm, false, &fuzz_config) { Some(m) => m, - None => return, + None => return 0, }; let mut linker = Linker::new(store.engine()); @@ -563,7 +570,6 @@ pub fn table_ops(mut fuzz_config: generators::Config, ops: generators::table_ops // test case. const MAX_GCS: usize = 5; - let num_gcs = AtomicUsize::new(0); linker .define( "", @@ -580,6 +586,7 @@ pub fn table_ops(mut fuzz_config: generators::Config, ops: generators::table_ops { let num_dropped = num_dropped.clone(); let expected_drops = expected_drops.clone(); + let num_gcs = num_gcs.clone(); move |mut caller: Caller<'_, StoreLimits>, _params, results| { log::info!("table_ops: GC"); if num_gcs.fetch_add(1, SeqCst) < MAX_GCS { @@ -663,14 +670,33 @@ pub fn table_ops(mut fuzz_config: generators::Config, ops: generators::table_ops let args: Vec<_> = (0..ops.num_params) .map(|_| Val::ExternRef(Some(ExternRef::new(CountDrops(num_dropped.clone()))))) .collect(); - let _ = run.call(&mut store, &args, &mut []); + + // The generated function should always return a trap. The only two + // valid traps are table-out-of-bounds which happens through `table.get` + // and `table.set` generated or an out-of-fuel trap. Otherwise any other + // error is unexpected and should fail fuzzing. + let trap = run + .call(&mut store, &args, &mut []) + .unwrap_err() + .downcast::() + .unwrap(); + + match trap.trap_code() { + Some(TrapCode::TableOutOfBounds) => {} + None if trap + .to_string() + .contains("all fuel consumed by WebAssembly") => {} + _ => { + panic!("unexpected trap: {}", trap); + } + } // Do a final GC after running the Wasm. store.gc(); } assert_eq!(num_dropped.load(SeqCst), expected_drops.load(SeqCst)); - return; + return num_gcs.load(SeqCst); struct CountDrops(Arc); @@ -681,6 +707,32 @@ pub fn table_ops(mut fuzz_config: generators::Config, ops: generators::table_ops } } +// Test that the `table_ops` fuzzer eventually runs the gc function in the host. +// We've historically had issues where this fuzzer accidentally wasn't fuzzing +// anything for a long time so this is an attempt to prevent that from happening +// again. +#[test] +fn table_ops_eventually_gcs() { + use arbitrary::Unstructured; + use rand::prelude::*; + + let mut rng = SmallRng::seed_from_u64(0); + let mut buf = vec![0; 2048]; + let n = 100; + for _ in 0..n { + rng.fill_bytes(&mut buf); + let u = Unstructured::new(&buf); + + if let Ok((config, test)) = Arbitrary::arbitrary_take_rest(u) { + if table_ops(config, test) > 0 { + return; + } + } + } + + panic!("after {n} runs nothing ever gc'd, something is probably wrong"); +} + /// Perform differential execution between Cranelift and wasmi, diffing the /// resulting memory image when execution terminates. This relies on the /// module-under-test to be instrumented to bound the execution time. Invoke