wasmtime: Add lots of logging for externrefs and table_ops fuzz target (#4583)
I essentially add these same logs back in every time I'm debugging something related to this fuzz target or `externref`s in general. Probably like 5 times I've added roughly these logs. We should just make them available whenever we need them via `RUST_LOG=wasmtime_runtime=trace`. This also changes a couple `if let`s to `unwrap`s that are now infallible after
This commit is contained in:
@@ -1338,6 +1338,7 @@ impl<I: VCodeInst> MachBuffer<I> {
|
|||||||
(start_offset, end_offset)
|
(start_offset, end_offset)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
trace!("Adding stack map for offsets {start:#x}..{end:#x}");
|
||||||
self.stack_maps.push(MachStackMap {
|
self.stack_maps.push(MachStackMap {
|
||||||
offset: start,
|
offset: start,
|
||||||
offset_end: end,
|
offset_end: end,
|
||||||
|
|||||||
@@ -595,13 +595,16 @@ pub fn table_ops(
|
|||||||
caller.gc();
|
caller.gc();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let a = ExternRef::new(CountDrops(num_dropped.clone()));
|
||||||
|
let b = ExternRef::new(CountDrops(num_dropped.clone()));
|
||||||
|
let c = ExternRef::new(CountDrops(num_dropped.clone()));
|
||||||
|
|
||||||
|
log::info!("table_ops: make_refs() -> ({:p}, {:p}, {:p})", a, b, c);
|
||||||
|
|
||||||
expected_drops.fetch_add(3, SeqCst);
|
expected_drops.fetch_add(3, SeqCst);
|
||||||
results[0] =
|
results[0] = Some(a).into();
|
||||||
Some(ExternRef::new(CountDrops(num_dropped.clone()))).into();
|
results[1] = Some(b).into();
|
||||||
results[1] =
|
results[2] = Some(c).into();
|
||||||
Some(ExternRef::new(CountDrops(num_dropped.clone()))).into();
|
|
||||||
results[2] =
|
|
||||||
Some(ExternRef::new(CountDrops(num_dropped.clone()))).into();
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -613,7 +616,22 @@ pub fn table_ops(
|
|||||||
.func_wrap("", "take_refs", {
|
.func_wrap("", "take_refs", {
|
||||||
let expected_drops = expected_drops.clone();
|
let expected_drops = expected_drops.clone();
|
||||||
move |a: Option<ExternRef>, b: Option<ExternRef>, c: Option<ExternRef>| {
|
move |a: Option<ExternRef>, b: Option<ExternRef>, c: Option<ExternRef>| {
|
||||||
log::info!("table_ops: take_refs");
|
log::info!(
|
||||||
|
"table_ops: take_refs({}, {}, {})",
|
||||||
|
a.as_ref().map_or_else(
|
||||||
|
|| format!("{:p}", std::ptr::null::<()>()),
|
||||||
|
|r| format!("{:p}", *r)
|
||||||
|
),
|
||||||
|
b.as_ref().map_or_else(
|
||||||
|
|| format!("{:p}", std::ptr::null::<()>()),
|
||||||
|
|r| format!("{:p}", *r)
|
||||||
|
),
|
||||||
|
c.as_ref().map_or_else(
|
||||||
|
|| format!("{:p}", std::ptr::null::<()>()),
|
||||||
|
|r| format!("{:p}", *r)
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
// Do the assertion on each ref's inner data, even though it
|
// Do the assertion on each ref's inner data, even though it
|
||||||
// all points to the same atomic, so that if we happen to
|
// all points to the same atomic, so that if we happen to
|
||||||
// run into a use-after-free bug with one of these refs we
|
// run into a use-after-free bug with one of these refs we
|
||||||
|
|||||||
@@ -263,6 +263,8 @@ impl VMExternData {
|
|||||||
|
|
||||||
/// Drop the inner value and then free this `VMExternData` heap allocation.
|
/// Drop the inner value and then free this `VMExternData` heap allocation.
|
||||||
pub(crate) unsafe fn drop_and_dealloc(mut data: NonNull<VMExternData>) {
|
pub(crate) unsafe fn drop_and_dealloc(mut data: NonNull<VMExternData>) {
|
||||||
|
log::trace!("Dropping externref data @ {:p}", data);
|
||||||
|
|
||||||
// Note: we introduce a block scope so that we drop the live
|
// Note: we introduce a block scope so that we drop the live
|
||||||
// reference to the data before we free the heap allocation it
|
// reference to the data before we free the heap allocation it
|
||||||
// resides within after this block.
|
// resides within after this block.
|
||||||
@@ -344,6 +346,7 @@ impl VMExternRef {
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
|
log::trace!("New externref data @ {:p}", extern_data_ptr);
|
||||||
VMExternRef(NonNull::new_unchecked(extern_data_ptr))
|
VMExternRef(NonNull::new_unchecked(extern_data_ptr))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -713,12 +716,15 @@ impl VMExternRefActivationsTable {
|
|||||||
root: NonNull<VMExternData>,
|
root: NonNull<VMExternData>,
|
||||||
) {
|
) {
|
||||||
let root = unsafe { VMExternRef::clone_from_raw(root.as_ptr().cast()) };
|
let root = unsafe { VMExternRef::clone_from_raw(root.as_ptr().cast()) };
|
||||||
|
log::trace!("Found externref on stack: {:p}", root);
|
||||||
precise_stack_roots.insert(VMExternRefWithTraits(root));
|
precise_stack_roots.insert(VMExternRefWithTraits(root));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sweep the bump allocation table after we've discovered our precise stack
|
/// Sweep the bump allocation table after we've discovered our precise stack
|
||||||
/// roots.
|
/// roots.
|
||||||
fn sweep(&mut self) {
|
fn sweep(&mut self) {
|
||||||
|
log::trace!("begin GC sweep");
|
||||||
|
|
||||||
// Sweep our bump chunk.
|
// Sweep our bump chunk.
|
||||||
let num_filled = self.num_filled_in_bump_chunk();
|
let num_filled = self.num_filled_in_bump_chunk();
|
||||||
unsafe {
|
unsafe {
|
||||||
@@ -767,6 +773,8 @@ impl VMExternRefActivationsTable {
|
|||||||
// destructors. Because of our `&mut` borrow above on this table,
|
// destructors. Because of our `&mut` borrow above on this table,
|
||||||
// though, we're guaranteed that nothing will touch this table.
|
// though, we're guaranteed that nothing will touch this table.
|
||||||
self.precise_stack_roots.clear();
|
self.precise_stack_roots.clear();
|
||||||
|
|
||||||
|
log::trace!("end GC sweep");
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set whether it is okay to GC or not right now.
|
/// Set whether it is okay to GC or not right now.
|
||||||
@@ -885,47 +893,73 @@ pub unsafe fn gc(
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
log::trace!("begin GC trace");
|
||||||
Backtrace::trace(|frame| {
|
Backtrace::trace(|frame| {
|
||||||
let pc = frame.pc();
|
let pc = frame.pc();
|
||||||
|
debug_assert!(pc != 0, "we should always get a valid PC for Wasm frames");
|
||||||
|
|
||||||
let fp = frame.fp();
|
let fp = frame.fp();
|
||||||
|
debug_assert!(
|
||||||
|
fp != 0,
|
||||||
|
"we should always get a valid frame pointer for Wasm frames"
|
||||||
|
);
|
||||||
|
|
||||||
if let Some(module_info) = module_info_lookup.lookup(pc) {
|
let module_info = module_info_lookup
|
||||||
if let Some(stack_map) = module_info.lookup_stack_map(pc) {
|
.lookup(pc)
|
||||||
debug_assert!(
|
.expect("should have module info for Wasm frame");
|
||||||
fp != 0,
|
|
||||||
"we should always get a valid frame pointer for Wasm frames"
|
let stack_map = match module_info.lookup_stack_map(pc) {
|
||||||
|
Some(sm) => sm,
|
||||||
|
None => {
|
||||||
|
log::trace!("No stack map for this Wasm frame");
|
||||||
|
return std::ops::ControlFlow::Continue(());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
log::trace!(
|
||||||
|
"We have a stack map that maps {} words in this Wasm frame",
|
||||||
|
stack_map.mapped_words()
|
||||||
|
);
|
||||||
|
|
||||||
|
let sp = fp - stack_map.mapped_words() as usize * mem::size_of::<usize>();
|
||||||
|
|
||||||
|
for i in 0..(stack_map.mapped_words() as usize) {
|
||||||
|
// Stack maps have one bit per word in the frame, and the
|
||||||
|
// zero^th bit is the *lowest* addressed word in the frame,
|
||||||
|
// i.e. the closest to the SP. So to get the `i`^th word in
|
||||||
|
// this frame, we add `i * sizeof(word)` to the SP.
|
||||||
|
let stack_slot = sp + i * mem::size_of::<usize>();
|
||||||
|
|
||||||
|
if !stack_map.get_bit(i) {
|
||||||
|
log::trace!(
|
||||||
|
"Stack slot @ {:p} does not contain externrefs",
|
||||||
|
stack_slot as *const (),
|
||||||
);
|
);
|
||||||
let sp = fp - stack_map.mapped_words() as usize * mem::size_of::<usize>();
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
for i in 0..(stack_map.mapped_words() as usize) {
|
let stack_slot = stack_slot as *const *mut VMExternData;
|
||||||
if stack_map.get_bit(i) {
|
let r = std::ptr::read(stack_slot);
|
||||||
// Stack maps have one bit per word in the frame, and the
|
log::trace!("Stack slot @ {:p} = {:p}", stack_slot, r);
|
||||||
// zero^th bit is the *lowest* addressed word in the frame,
|
|
||||||
// i.e. the closest to the SP. So to get the `i`^th word in
|
|
||||||
// this frame, we add `i * sizeof(word)` to the SP.
|
|
||||||
let ptr_to_ref = sp + i * mem::size_of::<usize>();
|
|
||||||
|
|
||||||
let r = std::ptr::read(ptr_to_ref as *const *mut VMExternData);
|
debug_assert!(
|
||||||
debug_assert!(
|
r.is_null() || activations_table_set.contains(&r),
|
||||||
r.is_null() || activations_table_set.contains(&r),
|
"every on-stack externref inside a Wasm frame should \
|
||||||
"every on-stack externref inside a Wasm frame should \
|
have an entry in the VMExternRefActivationsTable; \
|
||||||
have an entry in the VMExternRefActivationsTable; \
|
{:?} is not in the table",
|
||||||
{:?} is not in the table",
|
r
|
||||||
r
|
);
|
||||||
);
|
|
||||||
if let Some(r) = NonNull::new(r) {
|
if let Some(r) = NonNull::new(r) {
|
||||||
VMExternRefActivationsTable::insert_precise_stack_root(
|
VMExternRefActivationsTable::insert_precise_stack_root(
|
||||||
&mut externref_activations_table.precise_stack_roots,
|
&mut externref_activations_table.precise_stack_roots,
|
||||||
r,
|
r,
|
||||||
);
|
);
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
std::ops::ControlFlow::Continue(())
|
std::ops::ControlFlow::Continue(())
|
||||||
});
|
});
|
||||||
|
log::trace!("end GC trace");
|
||||||
|
|
||||||
externref_activations_table.sweep();
|
externref_activations_table.sweep();
|
||||||
|
|
||||||
|
|||||||
@@ -198,7 +198,7 @@ impl Backtrace {
|
|||||||
// contiguous sequence of Wasm frames, and we have nothing to
|
// contiguous sequence of Wasm frames, and we have nothing to
|
||||||
// walk through here.
|
// walk through here.
|
||||||
if first_wasm_sp == -1_isize as usize {
|
if first_wasm_sp == -1_isize as usize {
|
||||||
log::trace!("Empty sequence of Wasm frames");
|
log::trace!("=== Done tracing (empty sequence of Wasm frames) ===");
|
||||||
return ControlFlow::Continue(());
|
return ControlFlow::Continue(());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -224,8 +224,8 @@ impl Backtrace {
|
|||||||
arch::assert_fp_is_aligned(fp);
|
arch::assert_fp_is_aligned(fp);
|
||||||
|
|
||||||
log::trace!("--- Tracing through one Wasm frame ---");
|
log::trace!("--- Tracing through one Wasm frame ---");
|
||||||
log::trace!("pc = 0x{:016x}", pc);
|
log::trace!("pc = {:p}", pc as *const ());
|
||||||
log::trace!("fp = 0x{:016x}", fp);
|
log::trace!("fp = {:p}", fp as *const ());
|
||||||
|
|
||||||
f(Frame { pc, fp })?;
|
f(Frame { pc, fp })?;
|
||||||
|
|
||||||
@@ -234,6 +234,7 @@ impl Backtrace {
|
|||||||
// and have now reached a host frame. We're done iterating
|
// and have now reached a host frame. We're done iterating
|
||||||
// through this contiguous sequence of Wasm frames.
|
// through this contiguous sequence of Wasm frames.
|
||||||
if arch::reached_entry_sp(fp, first_wasm_sp) {
|
if arch::reached_entry_sp(fp, first_wasm_sp) {
|
||||||
|
log::trace!("=== Done tracing contiguous sequence of Wasm frames ===");
|
||||||
return ControlFlow::Continue(());
|
return ControlFlow::Continue(());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user