Remove the stack map registry.
This commit removes the stack map registry and instead uses the existing information from the store's module registry to lookup stack maps. A trait is now used to pass the lookup context to the runtime, implemented by `Store` to do the lookup. With this change, module registration in `Store` is now entirely limited to inserting the module into the module registry.
This commit is contained in:
@@ -207,7 +207,7 @@ unsafe impl WasmTy for Option<ExternRef> {
|
||||
unsafe {
|
||||
store
|
||||
.externref_activations_table()
|
||||
.insert_with_gc(x.inner, store.stack_map_registry());
|
||||
.insert_with_gc(x.inner, &*store.stack_map_lookup());
|
||||
}
|
||||
abi
|
||||
} else {
|
||||
|
||||
@@ -13,7 +13,7 @@ use wasmtime_environ::wasm::{
|
||||
};
|
||||
use wasmtime_environ::Initializer;
|
||||
use wasmtime_runtime::{
|
||||
Imports, InstanceAllocationRequest, InstantiationError, RuntimeInstance, StackMapRegistry,
|
||||
Imports, InstanceAllocationRequest, InstantiationError, RuntimeInstance, StackMapLookup,
|
||||
VMContext, VMExternRefActivationsTable, VMFunctionBody, VMFunctionImport, VMGlobalImport,
|
||||
VMMemoryImport, VMTableImport,
|
||||
};
|
||||
@@ -525,8 +525,7 @@ impl<'a> Instantiator<'a> {
|
||||
externref_activations_table: self.store.externref_activations_table()
|
||||
as *const VMExternRefActivationsTable
|
||||
as *mut _,
|
||||
stack_map_registry: self.store.stack_map_registry() as *const StackMapRegistry
|
||||
as *mut _,
|
||||
stack_map_lookup: Some(self.store.stack_map_lookup() as *const dyn StackMapLookup),
|
||||
})?;
|
||||
|
||||
// After we've created the `InstanceHandle` we still need to run
|
||||
|
||||
@@ -43,6 +43,11 @@ impl ModuleRegistry {
|
||||
self.module(pc)?.lookup_trap_info(pc)
|
||||
}
|
||||
|
||||
/// Looks up a stack map from a program counter.
|
||||
pub fn lookup_stack_map<'a>(&'a self, pc: usize) -> Option<&'a ir::StackMap> {
|
||||
self.module(pc)?.lookup_stack_map(pc)
|
||||
}
|
||||
|
||||
fn module(&self, pc: usize) -> Option<&RegisteredModule> {
|
||||
let (end, info) = self.0.range(pc..).next()?;
|
||||
if pc < info.start || *end < pc {
|
||||
@@ -53,13 +58,13 @@ impl ModuleRegistry {
|
||||
}
|
||||
|
||||
/// Registers a new module with the registry.
|
||||
pub fn register(&mut self, module: &Module) -> bool {
|
||||
pub fn register(&mut self, module: &Module) {
|
||||
let compiled_module = module.compiled_module();
|
||||
let (start, end) = compiled_module.code().range();
|
||||
|
||||
// Ignore modules with no code, finished functions, or if the module is already registered
|
||||
if start == end || compiled_module.finished_functions().is_empty() {
|
||||
return false;
|
||||
return;
|
||||
}
|
||||
|
||||
// The module code range is exclusive for end, so make it inclusive as it
|
||||
@@ -67,7 +72,7 @@ impl ModuleRegistry {
|
||||
let end = end - 1;
|
||||
|
||||
if self.0.get(&end).is_some() {
|
||||
return false;
|
||||
return;
|
||||
}
|
||||
|
||||
// Assert that this module's code doesn't collide with any other registered modules
|
||||
@@ -90,7 +95,6 @@ impl ModuleRegistry {
|
||||
assert!(prev.is_none());
|
||||
|
||||
GLOBAL_MODULES.lock().unwrap().register(start, end, module);
|
||||
true
|
||||
}
|
||||
|
||||
/// Looks up a trampoline from a shared signature index.
|
||||
@@ -135,7 +139,7 @@ impl RegisteredModule {
|
||||
/// if no information can be found.
|
||||
pub fn lookup_frame_info(&self, pc: usize) -> Option<FrameInfo> {
|
||||
let (index, offset) = self.func(pc)?;
|
||||
let (addr_map, _) = self.module.func_info(index);
|
||||
let (addr_map, _, _) = self.module.func_info(index);
|
||||
let pos = Self::instr_pos(offset, addr_map);
|
||||
|
||||
// In debug mode for now assert that we found a mapping for `pc` within
|
||||
@@ -197,13 +201,77 @@ impl RegisteredModule {
|
||||
/// Fetches trap information about a program counter in a backtrace.
|
||||
pub fn lookup_trap_info(&self, pc: usize) -> Option<&TrapInformation> {
|
||||
let (index, offset) = self.func(pc)?;
|
||||
let (_, traps) = self.module.func_info(index);
|
||||
let (_, traps, _) = self.module.func_info(index);
|
||||
let idx = traps
|
||||
.binary_search_by_key(&offset, |info| info.code_offset)
|
||||
.ok()?;
|
||||
Some(&traps[idx])
|
||||
}
|
||||
|
||||
/// Looks up a stack map from a program counter
|
||||
pub fn lookup_stack_map(&self, pc: usize) -> Option<&ir::StackMap> {
|
||||
let (index, offset) = self.func(pc)?;
|
||||
let (_, _, stack_maps) = self.module.func_info(index);
|
||||
|
||||
// Do a binary search to find the stack map for the given offset.
|
||||
//
|
||||
// Because GC safepoints are technically only associated with a single
|
||||
// PC, we should ideally only care about `Ok(index)` values returned
|
||||
// from the binary search. However, safepoints are inserted right before
|
||||
// calls, and there are two things that can disturb the PC/offset
|
||||
// associated with the safepoint versus the PC we actually use to query
|
||||
// for the stack map:
|
||||
//
|
||||
// 1. The `backtrace` crate gives us the PC in a frame that will be
|
||||
// *returned to*, and where execution will continue from, rather than
|
||||
// the PC of the call we are currently at. So we would need to
|
||||
// disassemble one instruction backwards to query the actual PC for
|
||||
// the stack map.
|
||||
//
|
||||
// TODO: One thing we *could* do to make this a little less error
|
||||
// prone, would be to assert/check that the nearest GC safepoint
|
||||
// found is within `max_encoded_size(any kind of call instruction)`
|
||||
// our queried PC for the target architecture.
|
||||
//
|
||||
// 2. Cranelift's stack maps only handle the stack, not
|
||||
// registers. However, some references that are arguments to a call
|
||||
// may need to be in registers. In these cases, what Cranelift will
|
||||
// do is:
|
||||
//
|
||||
// a. spill all the live references,
|
||||
// b. insert a GC safepoint for those references,
|
||||
// c. reload the references into registers, and finally
|
||||
// d. make the call.
|
||||
//
|
||||
// Step (c) adds drift between the GC safepoint and the location of
|
||||
// the call, which is where we actually walk the stack frame and
|
||||
// collect its live references.
|
||||
//
|
||||
// Luckily, the spill stack slots for the live references are still
|
||||
// up to date, so we can still find all the on-stack roots.
|
||||
// Furthermore, we do not have a moving GC, so we don't need to worry
|
||||
// whether the following code will reuse the references in registers
|
||||
// (which would not have been updated to point to the moved objects)
|
||||
// or reload from the stack slots (which would have been updated to
|
||||
// point to the moved objects).
|
||||
|
||||
let index = match stack_maps.binary_search_by_key(&offset, |i| i.code_offset) {
|
||||
// Exact hit.
|
||||
Ok(i) => i,
|
||||
|
||||
// `Err(0)` means that the associated stack map would have been the
|
||||
// first element in the array if this pc had an associated stack
|
||||
// map, but this pc does not have an associated stack map. This can
|
||||
// only happen inside a Wasm frame if there are no live refs at this
|
||||
// pc.
|
||||
Err(0) => return None,
|
||||
|
||||
Err(i) => i - 1,
|
||||
};
|
||||
|
||||
Some(&stack_maps[index].stack_map)
|
||||
}
|
||||
|
||||
fn func(&self, pc: usize) -> Option<(DefinedFuncIndex, u32)> {
|
||||
let (index, start, _) = self.module.func_by_pc(pc)?;
|
||||
Some((index, (pc - start) as u32))
|
||||
@@ -260,7 +328,7 @@ impl GlobalModuleRegistry {
|
||||
|
||||
match info.module.func(pc) {
|
||||
Some((index, offset)) => {
|
||||
let (addr_map, _) = info.module.module.func_info(index);
|
||||
let (addr_map, _, _) = info.module.module.func_info(index);
|
||||
RegisteredModule::instr_pos(offset, addr_map).is_some()
|
||||
}
|
||||
None => false,
|
||||
|
||||
@@ -16,11 +16,11 @@ use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use std::task::{Context, Poll};
|
||||
use wasmtime_environ::wasm::WasmFuncType;
|
||||
use wasmtime_jit::{CompiledModule, ModuleCode};
|
||||
use wasmtime_jit::ModuleCode;
|
||||
use wasmtime_runtime::{
|
||||
InstanceAllocator, InstanceHandle, OnDemandInstanceAllocator, SignalHandler, StackMapRegistry,
|
||||
TrapInfo, VMContext, VMExternRef, VMExternRefActivationsTable, VMInterrupts,
|
||||
VMSharedSignatureIndex, VMTrampoline,
|
||||
InstanceAllocator, InstanceHandle, OnDemandInstanceAllocator, SignalHandler, TrapInfo,
|
||||
VMContext, VMExternRef, VMExternRefActivationsTable, VMInterrupts, VMSharedSignatureIndex,
|
||||
VMTrampoline,
|
||||
};
|
||||
|
||||
/// Used to associate instances with the store.
|
||||
@@ -75,7 +75,6 @@ pub(crate) struct StoreInner {
|
||||
instances: RefCell<Vec<StoreInstance>>,
|
||||
signal_handler: RefCell<Option<Box<SignalHandler<'static>>>>,
|
||||
externref_activations_table: VMExternRefActivationsTable,
|
||||
stack_map_registry: StackMapRegistry,
|
||||
modules: RefCell<ModuleRegistry>,
|
||||
trampolines: RefCell<TrampolineMap>,
|
||||
// Numbers of resources instantiated in this store.
|
||||
@@ -139,7 +138,6 @@ impl Store {
|
||||
instances: RefCell::new(Vec::new()),
|
||||
signal_handler: RefCell::new(None),
|
||||
externref_activations_table: VMExternRefActivationsTable::new(),
|
||||
stack_map_registry: StackMapRegistry::default(),
|
||||
modules: RefCell::new(ModuleRegistry::default()),
|
||||
trampolines: RefCell::new(TrampolineMap::default()),
|
||||
instance_count: Default::default(),
|
||||
@@ -207,26 +205,7 @@ impl Store {
|
||||
|
||||
pub(crate) fn register_module(&self, module: &Module) {
|
||||
// Register the module with the registry
|
||||
if !self.inner.modules.borrow_mut().register(module) {
|
||||
return;
|
||||
}
|
||||
|
||||
// We need to know about all the stack maps of all instantiated modules
|
||||
// so when performing a GC we know about all wasm frames that we find
|
||||
// on the stack.
|
||||
self.register_stack_maps(module.compiled_module());
|
||||
}
|
||||
|
||||
fn register_stack_maps(&self, module: &CompiledModule) {
|
||||
self.stack_map_registry()
|
||||
.register_stack_maps(module.stack_maps().map(|(func, stack_maps)| unsafe {
|
||||
let ptr = (*func).as_ptr();
|
||||
let len = (*func).len();
|
||||
let start = ptr as usize;
|
||||
let end = ptr as usize + len;
|
||||
let range = start..end;
|
||||
(range, stack_maps)
|
||||
}));
|
||||
self.inner.modules.borrow_mut().register(module);
|
||||
}
|
||||
|
||||
// This is used to register a `Func` with the store
|
||||
@@ -448,10 +427,6 @@ impl Store {
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub(crate) fn stack_map_registry(&self) -> &StackMapRegistry {
|
||||
&self.inner.stack_map_registry
|
||||
}
|
||||
|
||||
pub(crate) fn modules(&self) -> &RefCell<ModuleRegistry> {
|
||||
&self.inner.modules
|
||||
}
|
||||
@@ -471,20 +446,21 @@ impl Store {
|
||||
///
|
||||
/// It is fine to call this several times: only the first call will have an effect.
|
||||
pub unsafe fn notify_switched_thread(&self) {
|
||||
wasmtime_runtime::init_traps(frame_info::GlobalFrameInfo::is_wasm_pc)
|
||||
wasmtime_runtime::init_traps(crate::module::GlobalModuleRegistry::is_wasm_pc)
|
||||
.expect("failed to initialize per-threads traps");
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub(crate) fn stack_map_lookup(&self) -> *const dyn wasmtime_runtime::StackMapLookup {
|
||||
self.inner.as_ref()
|
||||
}
|
||||
|
||||
/// Perform garbage collection of `ExternRef`s.
|
||||
pub fn gc(&self) {
|
||||
// For this crate's API, we ensure that `set_stack_canary` invariants
|
||||
// are upheld for all host-->Wasm calls, and we register every module
|
||||
// used with this store in `self.inner.stack_map_registry`.
|
||||
// are upheld for all host-->Wasm calls.
|
||||
unsafe {
|
||||
wasmtime_runtime::gc(
|
||||
&self.inner.stack_map_registry,
|
||||
&self.inner.externref_activations_table,
|
||||
);
|
||||
wasmtime_runtime::gc(self.inner.as_ref(), &self.inner.externref_activations_table);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -951,6 +927,13 @@ impl Drop for StoreInner {
|
||||
}
|
||||
}
|
||||
|
||||
impl wasmtime_runtime::StackMapLookup for StoreInner {
|
||||
fn lookup(&self, pc: usize) -> Option<*const wasmtime_environ::ir::StackMap> {
|
||||
// The address of the stack map is stable for the lifetime of the store
|
||||
self.modules.borrow().lookup_stack_map(pc).map(|m| m as _)
|
||||
}
|
||||
}
|
||||
|
||||
/// A threadsafe handle used to interrupt instances executing within a
|
||||
/// particular `Store`.
|
||||
///
|
||||
|
||||
@@ -19,8 +19,8 @@ use std::sync::Arc;
|
||||
use wasmtime_environ::{entity::PrimaryMap, wasm, Module};
|
||||
use wasmtime_runtime::{
|
||||
Imports, InstanceAllocationRequest, InstanceAllocator, InstanceHandle,
|
||||
OnDemandInstanceAllocator, StackMapRegistry, VMExternRefActivationsTable, VMFunctionBody,
|
||||
VMFunctionImport, VMSharedSignatureIndex,
|
||||
OnDemandInstanceAllocator, VMExternRefActivationsTable, VMFunctionBody, VMFunctionImport,
|
||||
VMSharedSignatureIndex,
|
||||
};
|
||||
|
||||
/// A wrapper around `wasmtime_runtime::InstanceHandle` which pairs it with the
|
||||
@@ -77,7 +77,7 @@ fn create_handle(
|
||||
externref_activations_table: store.externref_activations_table()
|
||||
as *const VMExternRefActivationsTable
|
||||
as *mut _,
|
||||
stack_map_registry: store.stack_map_registry() as *const StackMapRegistry as *mut _,
|
||||
stack_map_lookup: Some(store.stack_map_lookup()),
|
||||
},
|
||||
)?;
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ use wasmtime_environ::entity::PrimaryMap;
|
||||
use wasmtime_environ::wasm::DefinedFuncIndex;
|
||||
use wasmtime_environ::Module;
|
||||
use wasmtime_runtime::{
|
||||
Imports, InstanceAllocationRequest, InstanceAllocator, StackMapRegistry,
|
||||
Imports, InstanceAllocationRequest, InstanceAllocator, StackMapLookup,
|
||||
VMExternRefActivationsTable, VMFunctionBody, VMFunctionImport, VMSharedSignatureIndex,
|
||||
};
|
||||
|
||||
@@ -43,7 +43,7 @@ pub(crate) fn create_handle(
|
||||
externref_activations_table: store.externref_activations_table()
|
||||
as *const VMExternRefActivationsTable
|
||||
as *mut _,
|
||||
stack_map_registry: store.stack_map_registry() as *const StackMapRegistry as *mut _,
|
||||
stack_map_lookup: &store,
|
||||
})?;
|
||||
|
||||
Ok(store.add_instance(handle, true))
|
||||
|
||||
@@ -283,7 +283,7 @@ pub fn create_function(
|
||||
host_state: Box::new(trampoline_state),
|
||||
interrupts: std::ptr::null(),
|
||||
externref_activations_table: std::ptr::null_mut(),
|
||||
stack_map_registry: std::ptr::null_mut(),
|
||||
stack_map_lookup: None,
|
||||
})?,
|
||||
trampoline,
|
||||
))
|
||||
@@ -315,7 +315,7 @@ pub unsafe fn create_raw_function(
|
||||
host_state,
|
||||
interrupts: std::ptr::null(),
|
||||
externref_activations_table: std::ptr::null_mut(),
|
||||
stack_map_registry: std::ptr::null_mut(),
|
||||
stack_map_lookup: None,
|
||||
})?,
|
||||
)
|
||||
}
|
||||
|
||||
@@ -98,7 +98,7 @@ impl Val {
|
||||
let externref_ptr = x.inner.as_raw();
|
||||
store
|
||||
.externref_activations_table()
|
||||
.insert_with_gc(x.inner, store.stack_map_registry());
|
||||
.insert_with_gc(x.inner, &*store.stack_map_lookup());
|
||||
ptr::write(p as *mut *mut u8, externref_ptr)
|
||||
}
|
||||
Val::FuncRef(f) => ptr::write(
|
||||
|
||||
Reference in New Issue
Block a user