Merge pull request #2842 from peterhuene/engine-sig-registry

Additional performance improvements for module instantiation.
This commit is contained in:
Peter Huene
2021-04-16 13:59:01 -07:00
committed by GitHub
28 changed files with 954 additions and 920 deletions

View File

@@ -6,7 +6,7 @@
// struct VMContext { // struct VMContext {
// interrupts: *const VMInterrupts, // interrupts: *const VMInterrupts,
// externref_activations_table: *mut VMExternRefActivationsTable, // externref_activations_table: *mut VMExternRefActivationsTable,
// stack_map_registry: *mut StackMapRegistry, // module_info_lookup: *const dyn ModuleInfoLookup,
// signature_ids: [VMSharedSignatureIndex; module.num_signature_ids], // signature_ids: [VMSharedSignatureIndex; module.num_signature_ids],
// imported_functions: [VMFunctionImport; module.num_imported_functions], // imported_functions: [VMFunctionImport; module.num_imported_functions],
// imported_tables: [VMTableImport; module.num_imported_tables], // imported_tables: [VMTableImport; module.num_imported_tables],
@@ -77,7 +77,7 @@ pub struct VMOffsets {
// precalculated offsets of various member fields // precalculated offsets of various member fields
interrupts: u32, interrupts: u32,
externref_activations_table: u32, externref_activations_table: u32,
stack_map_registry: u32, module_info_lookup: u32,
signature_ids: u32, signature_ids: u32,
imported_functions: u32, imported_functions: u32,
imported_tables: u32, imported_tables: u32,
@@ -149,7 +149,7 @@ impl From<VMOffsetsFields> for VMOffsets {
num_defined_globals: fields.num_defined_globals, num_defined_globals: fields.num_defined_globals,
interrupts: 0, interrupts: 0,
externref_activations_table: 0, externref_activations_table: 0,
stack_map_registry: 0, module_info_lookup: 0,
signature_ids: 0, signature_ids: 0,
imported_functions: 0, imported_functions: 0,
imported_tables: 0, imported_tables: 0,
@@ -168,13 +168,13 @@ impl From<VMOffsetsFields> for VMOffsets {
.interrupts .interrupts
.checked_add(u32::from(fields.pointer_size)) .checked_add(u32::from(fields.pointer_size))
.unwrap(); .unwrap();
ret.stack_map_registry = ret ret.module_info_lookup = ret
.externref_activations_table .externref_activations_table
.checked_add(u32::from(fields.pointer_size)) .checked_add(u32::from(fields.pointer_size))
.unwrap(); .unwrap();
ret.signature_ids = ret ret.signature_ids = ret
.stack_map_registry .module_info_lookup
.checked_add(u32::from(fields.pointer_size)) .checked_add(u32::from(fields.pointer_size * 2))
.unwrap(); .unwrap();
ret.imported_functions = ret ret.imported_functions = ret
.signature_ids .signature_ids
@@ -507,10 +507,10 @@ impl VMOffsets {
self.externref_activations_table self.externref_activations_table
} }
/// The offset of the `*mut StackMapRegistry` member. /// The offset of the `*const dyn ModuleInfoLookup` member.
#[inline] #[inline]
pub fn vmctx_stack_map_registry(&self) -> u32 { pub fn vmctx_module_info_lookup(&self) -> u32 {
self.stack_map_registry self.module_info_lookup
} }
/// The offset of the `signature_ids` array. /// The offset of the `signature_ids` array.

View File

@@ -176,11 +176,13 @@ struct FinishedFunctions(PrimaryMap<DefinedFuncIndex, *mut [VMFunctionBody]>);
unsafe impl Send for FinishedFunctions {} unsafe impl Send for FinishedFunctions {}
unsafe impl Sync for FinishedFunctions {} unsafe impl Sync for FinishedFunctions {}
/// Information about a function, such as trap information, address map,
/// and stack maps.
#[derive(Serialize, Deserialize, Clone)] #[derive(Serialize, Deserialize, Clone)]
struct FunctionInfo { pub struct FunctionInfo {
traps: Vec<TrapInformation>, pub traps: Vec<TrapInformation>,
address_map: FunctionAddressMap, pub address_map: FunctionAddressMap,
stack_maps: Vec<StackMapInformation>, pub stack_maps: Vec<StackMapInformation>,
} }
/// This is intended to mirror the type tables in `wasmtime_environ`, except that /// This is intended to mirror the type tables in `wasmtime_environ`, except that
@@ -362,11 +364,10 @@ impl CompiledModule {
} }
/// Gets the function information for a given function index. /// Gets the function information for a given function index.
pub fn func_info(&self, index: DefinedFuncIndex) -> (&FunctionAddressMap, &[TrapInformation]) { pub fn func_info(&self, index: DefinedFuncIndex) -> &FunctionInfo {
self.artifacts self.artifacts
.funcs .funcs
.get(index) .get(index)
.map(|f| (&f.address_map, f.traps.as_ref()))
.expect("defined function should be present") .expect("defined function should be present")
} }

View File

@@ -99,18 +99,16 @@
//! Examination of Deferred Reference Counting and Cycle Detection* by Quinane: //! Examination of Deferred Reference Counting and Cycle Detection* by Quinane:
//! <https://openresearch-repository.anu.edu.au/bitstream/1885/42030/2/hon-thesis.pdf> //! <https://openresearch-repository.anu.edu.au/bitstream/1885/42030/2/hon-thesis.pdf>
use std::alloc::Layout;
use std::any::Any; use std::any::Any;
use std::cell::{Cell, RefCell, UnsafeCell}; use std::cell::{Cell, RefCell, UnsafeCell};
use std::cmp::Ordering; use std::cmp::Ordering;
use std::collections::BTreeMap;
use std::collections::HashSet; use std::collections::HashSet;
use std::hash::{Hash, Hasher}; use std::hash::{Hash, Hasher};
use std::mem; use std::mem;
use std::ops::Deref; use std::ops::Deref;
use std::ptr::{self, NonNull}; use std::ptr::{self, NonNull};
use std::rc::Rc; use std::{alloc::Layout, sync::Arc};
use wasmtime_environ::{ir::StackMap, StackMapInformation}; use wasmtime_environ::ir::StackMap;
/// An external reference to some opaque data. /// An external reference to some opaque data.
/// ///
@@ -596,10 +594,10 @@ impl VMExternRefActivationsTable {
pub unsafe fn insert_with_gc( pub unsafe fn insert_with_gc(
&self, &self,
externref: VMExternRef, externref: VMExternRef,
stack_maps_registry: &StackMapRegistry, module_info_lookup: &dyn ModuleInfoLookup,
) { ) {
if let Err(externref) = self.try_insert(externref) { if let Err(externref) = self.try_insert(externref) {
self.gc_and_insert_slow(externref, stack_maps_registry); self.gc_and_insert_slow(externref, module_info_lookup);
} }
} }
@@ -607,9 +605,9 @@ impl VMExternRefActivationsTable {
unsafe fn gc_and_insert_slow( unsafe fn gc_and_insert_slow(
&self, &self,
externref: VMExternRef, externref: VMExternRef,
stack_maps_registry: &StackMapRegistry, module_info_lookup: &dyn ModuleInfoLookup,
) { ) {
gc(stack_maps_registry, self); gc(module_info_lookup, self);
// Might as well insert right into the hash set, rather than the bump // Might as well insert right into the hash set, rather than the bump
// chunk, since we are already on a slow path and we get de-duplication // chunk, since we are already on a slow path and we get de-duplication
@@ -743,182 +741,28 @@ impl VMExternRefActivationsTable {
} }
} }
/// A registry of stack maps for currently active Wasm modules. /// Used by the runtime to lookup information about a module given a
#[derive(Default)] /// program counter value.
pub struct StackMapRegistry { pub trait ModuleInfoLookup: 'static {
inner: RefCell<StackMapRegistryInner>, /// Lookup the module information from a program counter value.
fn lookup(&self, pc: usize) -> Option<Arc<dyn ModuleInfo>>;
} }
#[derive(Default)] /// Used by the runtime to query module information.
struct StackMapRegistryInner { pub trait ModuleInfo {
/// A map from the highest pc in a module, to its stack maps. /// Lookup the stack map at a program counter value.
/// fn lookup_stack_map(&self, pc: usize) -> Option<&StackMap>;
/// For details, see the comment above `GlobalFrameInfo::ranges`.
ranges: BTreeMap<usize, ModuleStackMaps>,
} }
#[derive(Debug)] pub(crate) struct EmptyModuleInfoLookup;
struct ModuleStackMaps {
/// The range of PCs that this module covers. Different modules must always
/// have distinct ranges.
range: std::ops::Range<usize>,
/// A map from a PC in this module (that is a GC safepoint) to its impl ModuleInfoLookup for EmptyModuleInfoLookup {
/// associated stack map. If `None` then it means that the PC is the start fn lookup(&self, _pc: usize) -> Option<Arc<dyn ModuleInfo>> {
/// of a range which has no stack map. None
pc_to_stack_map: Vec<(usize, Option<Rc<StackMap>>)>,
}
impl StackMapRegistry {
/// Register the stack maps for a given module.
///
/// The stack maps should be given as an iterator over a function's PC range
/// in memory (that is, where the JIT actually allocated and emitted the
/// function's code at), and the stack maps and code offsets within that
/// range for each of its GC safepoints.
pub fn register_stack_maps<'a>(
&self,
stack_maps: impl IntoIterator<Item = (std::ops::Range<usize>, &'a [StackMapInformation])>,
) {
let mut min = usize::max_value();
let mut max = 0;
let mut pc_to_stack_map = vec![];
let mut last_is_none_marker = true;
for (range, infos) in stack_maps {
let len = range.end - range.start;
min = std::cmp::min(min, range.start);
max = std::cmp::max(max, range.end);
// Add a marker between functions indicating that this function's pc
// starts with no stack map so when our binary search later on finds
// a pc between the start of the function and the function's first
// stack map it doesn't think the previous stack map is our stack
// map.
//
// We skip this if the previous entry pushed was also a `None`
// marker, in which case the starting pc already has no stack map.
// This is also skipped if the first `code_offset` is zero since
// what we'll push applies for the first pc anyway.
if !last_is_none_marker && (infos.is_empty() || infos[0].code_offset > 0) {
pc_to_stack_map.push((range.start, None));
last_is_none_marker = true;
}
for info in infos {
assert!((info.code_offset as usize) < len);
pc_to_stack_map.push((
range.start + (info.code_offset as usize),
Some(Rc::new(info.stack_map.clone())),
));
last_is_none_marker = false;
}
}
if pc_to_stack_map.is_empty() {
// Nothing to register.
return;
}
let module_stack_maps = ModuleStackMaps {
range: min..max,
pc_to_stack_map,
};
let mut inner = self.inner.borrow_mut();
// Assert that this chunk of ranges doesn't collide with any other known
// chunks.
if let Some((_, prev)) = inner.ranges.range(max..).next() {
assert!(prev.range.start > max);
}
if let Some((prev_end, _)) = inner.ranges.range(..=min).next_back() {
assert!(*prev_end < min);
}
let old = inner.ranges.insert(max, module_stack_maps);
assert!(old.is_none());
}
/// Lookup the stack map for the given PC, if any.
pub fn lookup_stack_map(&self, pc: usize) -> Option<Rc<StackMap>> {
let inner = self.inner.borrow();
let stack_maps = inner.module_stack_maps(pc)?;
// Do a binary search to find the stack map for the given PC.
//
// Because GC safepoints are technically only associated with a single
// PC, we should ideally only care about `Ok(index)` values returned
// from the binary search. However, safepoints are inserted right before
// calls, and there are two things that can disturb the PC/offset
// associated with the safepoint versus the PC we actually use to query
// for the stack map:
//
// 1. The `backtrace` crate gives us the PC in a frame that will be
// *returned to*, and where execution will continue from, rather than
// the PC of the call we are currently at. So we would need to
// disassemble one instruction backwards to query the actual PC for
// the stack map.
//
// TODO: One thing we *could* do to make this a little less error
// prone, would be to assert/check that the nearest GC safepoint
// found is within `max_encoded_size(any kind of call instruction)`
// our queried PC for the target architecture.
//
// 2. Cranelift's stack maps only handle the stack, not
// registers. However, some references that are arguments to a call
// may need to be in registers. In these cases, what Cranelift will
// do is:
//
// a. spill all the live references,
// b. insert a GC safepoint for those references,
// c. reload the references into registers, and finally
// d. make the call.
//
// Step (c) adds drift between the GC safepoint and the location of
// the call, which is where we actually walk the stack frame and
// collect its live references.
//
// Luckily, the spill stack slots for the live references are still
// up to date, so we can still find all the on-stack roots.
// Furthermore, we do not have a moving GC, so we don't need to worry
// whether the following code will reuse the references in registers
// (which would not have been updated to point to the moved objects)
// or reload from the stack slots (which would have been updated to
// point to the moved objects).
let index = match stack_maps
.pc_to_stack_map
.binary_search_by_key(&pc, |(pc, _stack_map)| *pc)
{
// Exact hit.
Ok(i) => i,
// `Err(0)` means that the associated stack map would have been the
// first element in the array if this pc had an associated stack
// map, but this pc does not have an associated stack map. This can
// only happen inside a Wasm frame if there are no live refs at this
// pc.
Err(0) => return None,
Err(n) => n - 1,
};
let stack_map = stack_maps.pc_to_stack_map[index].1.as_ref()?.clone();
Some(stack_map)
} }
} }
impl StackMapRegistryInner { pub(crate) const EMPTY_MODULE_LOOKUP: EmptyModuleInfoLookup = EmptyModuleInfoLookup;
fn module_stack_maps(&self, pc: usize) -> Option<&ModuleStackMaps> {
let (end, stack_maps) = self.ranges.range(pc..).next()?;
if pc < stack_maps.range.start || *end < pc {
None
} else {
Some(stack_maps)
}
}
}
#[derive(Debug, Default)] #[derive(Debug, Default)]
struct DebugOnly<T> { struct DebugOnly<T> {
@@ -965,7 +809,7 @@ impl<T> std::ops::DerefMut for DebugOnly<T> {
/// Additionally, you must have registered the stack maps for every Wasm module /// Additionally, you must have registered the stack maps for every Wasm module
/// that has frames on the stack with the given `stack_maps_registry`. /// that has frames on the stack with the given `stack_maps_registry`.
pub unsafe fn gc( pub unsafe fn gc(
stack_maps_registry: &StackMapRegistry, module_info_lookup: &dyn ModuleInfoLookup,
externref_activations_table: &VMExternRefActivationsTable, externref_activations_table: &VMExternRefActivationsTable,
) { ) {
// We borrow the precise stack roots `RefCell` for the whole duration of // We borrow the precise stack roots `RefCell` for the whole duration of
@@ -1003,8 +847,7 @@ pub unsafe fn gc(
if cfg!(debug_assertions) { if cfg!(debug_assertions) {
// Assert that there aren't any Wasm frames on the stack. // Assert that there aren't any Wasm frames on the stack.
backtrace::trace(|frame| { backtrace::trace(|frame| {
let stack_map = stack_maps_registry.lookup_stack_map(frame.ip() as usize); assert!(module_info_lookup.lookup(frame.ip() as usize).is_none());
assert!(stack_map.is_none());
true true
}); });
} }
@@ -1048,28 +891,30 @@ pub unsafe fn gc(
let pc = frame.ip() as usize; let pc = frame.ip() as usize;
let sp = frame.sp() as usize; let sp = frame.sp() as usize;
if let Some(stack_map) = stack_maps_registry.lookup_stack_map(pc) { if let Some(module_info) = module_info_lookup.lookup(pc) {
debug_assert!(sp != 0, "we should always get a valid SP for Wasm frames"); if let Some(stack_map) = module_info.lookup_stack_map(pc) {
debug_assert!(sp != 0, "we should always get a valid SP for Wasm frames");
for i in 0..(stack_map.mapped_words() as usize) { for i in 0..(stack_map.mapped_words() as usize) {
if stack_map.get_bit(i) { if stack_map.get_bit(i) {
// Stack maps have one bit per word in the frame, and the // Stack maps have one bit per word in the frame, and the
// zero^th bit is the *lowest* addressed word in the frame, // zero^th bit is the *lowest* addressed word in the frame,
// i.e. the closest to the SP. So to get the `i`^th word in // i.e. the closest to the SP. So to get the `i`^th word in
// this frame, we add `i * sizeof(word)` to the SP. // this frame, we add `i * sizeof(word)` to the SP.
let ptr_to_ref = sp + i * mem::size_of::<usize>(); let ptr_to_ref = sp + i * mem::size_of::<usize>();
let r = std::ptr::read(ptr_to_ref as *const *mut VMExternData); let r = std::ptr::read(ptr_to_ref as *const *mut VMExternData);
debug_assert!( debug_assert!(
r.is_null() || activations_table_set.contains(&r), r.is_null() || activations_table_set.contains(&r),
"every on-stack externref inside a Wasm frame should \ "every on-stack externref inside a Wasm frame should \
have an entry in the VMExternRefActivationsTable" have an entry in the VMExternRefActivationsTable"
);
if let Some(r) = NonNull::new(r) {
VMExternRefActivationsTable::insert_precise_stack_root(
&mut precise_stack_roots,
r,
); );
if let Some(r) = NonNull::new(r) {
VMExternRefActivationsTable::insert_precise_stack_root(
&mut precise_stack_roots,
r,
);
}
} }
} }
} }

View File

@@ -3,7 +3,7 @@
//! `InstanceHandle` is a reference-counting handle for an `Instance`. //! `InstanceHandle` is a reference-counting handle for an `Instance`.
use crate::export::Export; use crate::export::Export;
use crate::externref::{StackMapRegistry, VMExternRefActivationsTable}; use crate::externref::{ModuleInfoLookup, VMExternRefActivationsTable};
use crate::memory::{Memory, RuntimeMemoryCreator}; use crate::memory::{Memory, RuntimeMemoryCreator};
use crate::table::{Table, TableElement}; use crate::table::{Table, TableElement};
use crate::traphandlers::Trap; use crate::traphandlers::Trap;
@@ -249,9 +249,9 @@ impl Instance {
unsafe { self.vmctx_plus_offset(self.offsets.vmctx_externref_activations_table()) } unsafe { self.vmctx_plus_offset(self.offsets.vmctx_externref_activations_table()) }
} }
/// Return a pointer to the `StackMapRegistry`. /// Return a pointer to the `ModuleInfoLookup`.
pub fn stack_map_registry(&self) -> *mut *mut StackMapRegistry { pub fn module_info_lookup(&self) -> *mut *const dyn ModuleInfoLookup {
unsafe { self.vmctx_plus_offset(self.offsets.vmctx_stack_map_registry()) } unsafe { self.vmctx_plus_offset(self.offsets.vmctx_module_info_lookup()) }
} }
/// Return a reference to the vmctx used by compiled wasm code. /// Return a reference to the vmctx used by compiled wasm code.

View File

@@ -1,4 +1,4 @@
use crate::externref::{StackMapRegistry, VMExternRefActivationsTable}; use crate::externref::{ModuleInfoLookup, VMExternRefActivationsTable, EMPTY_MODULE_LOOKUP};
use crate::imports::Imports; use crate::imports::Imports;
use crate::instance::{Instance, InstanceHandle, RuntimeMemoryCreator}; use crate::instance::{Instance, InstanceHandle, RuntimeMemoryCreator};
use crate::memory::{DefaultMemoryCreator, Memory}; use crate::memory::{DefaultMemoryCreator, Memory};
@@ -57,8 +57,8 @@ pub struct InstanceAllocationRequest<'a> {
/// The pointer to the reference activations table to use for the instance. /// The pointer to the reference activations table to use for the instance.
pub externref_activations_table: *mut VMExternRefActivationsTable, pub externref_activations_table: *mut VMExternRefActivationsTable,
/// The pointer to the stack map registry to use for the instance. /// The pointer to the module info lookup to use for the instance.
pub stack_map_registry: *mut StackMapRegistry, pub module_info_lookup: Option<*const dyn ModuleInfoLookup>,
} }
/// An link error while instantiating a module. /// An link error while instantiating a module.
@@ -447,7 +447,7 @@ unsafe fn initialize_vmcontext(instance: &Instance, req: InstanceAllocationReque
*instance.interrupts() = req.interrupts; *instance.interrupts() = req.interrupts;
*instance.externref_activations_table() = req.externref_activations_table; *instance.externref_activations_table() = req.externref_activations_table;
*instance.stack_map_registry() = req.stack_map_registry; *instance.module_info_lookup() = req.module_info_lookup.unwrap_or(&EMPTY_MODULE_LOOKUP);
// Initialize shared signatures // Initialize shared signatures
let mut ptr = instance.signature_ids_ptr(); let mut ptr = instance.signature_ids_ptr();

View File

@@ -1370,7 +1370,7 @@ mod test {
host_state: Box::new(()), host_state: Box::new(()),
interrupts: std::ptr::null(), interrupts: std::ptr::null(),
externref_activations_table: std::ptr::null_mut(), externref_activations_table: std::ptr::null_mut(),
stack_map_registry: std::ptr::null_mut(), module_info_lookup: None,
}, },
) )
.expect("allocation should succeed"), .expect("allocation should succeed"),
@@ -1394,7 +1394,7 @@ mod test {
host_state: Box::new(()), host_state: Box::new(()),
interrupts: std::ptr::null(), interrupts: std::ptr::null(),
externref_activations_table: std::ptr::null_mut(), externref_activations_table: std::ptr::null_mut(),
stack_map_registry: std::ptr::null_mut(), module_info_lookup: None,
}, },
) { ) {
Err(InstantiationError::Limit(3)) => {} Err(InstantiationError::Limit(3)) => {}

View File

@@ -523,7 +523,7 @@ mod test {
host_state: Box::new(()), host_state: Box::new(()),
interrupts: ptr::null(), interrupts: ptr::null(),
externref_activations_table: ptr::null_mut(), externref_activations_table: ptr::null_mut(),
stack_map_registry: ptr::null_mut(), module_info_lookup: None,
}, },
) )
.expect("instance should allocate"), .expect("instance should allocate"),

View File

@@ -449,8 +449,8 @@ pub unsafe extern "C" fn wasmtime_activations_table_insert_with_gc(
let externref = VMExternRef::clone_from_raw(externref); let externref = VMExternRef::clone_from_raw(externref);
let instance = (&mut *vmctx).instance(); let instance = (&mut *vmctx).instance();
let activations_table = &**instance.externref_activations_table(); let activations_table = &**instance.externref_activations_table();
let registry = &**instance.stack_map_registry(); let module_info_lookup = &**instance.module_info_lookup();
activations_table.insert_with_gc(externref, registry); activations_table.insert_with_gc(externref, module_info_lookup);
} }
/// Perform a Wasm `global.get` for `externref` globals. /// Perform a Wasm `global.get` for `externref` globals.
@@ -466,8 +466,8 @@ pub unsafe extern "C" fn wasmtime_externref_global_get(
Some(externref) => { Some(externref) => {
let raw = externref.as_raw(); let raw = externref.as_raw();
let activations_table = &**instance.externref_activations_table(); let activations_table = &**instance.externref_activations_table();
let registry = &**instance.stack_map_registry(); let module_info_lookup = &**instance.module_info_lookup();
activations_table.insert_with_gc(externref, registry); activations_table.insert_with_gc(externref, module_info_lookup);
raw raw
} }
} }

View File

@@ -25,7 +25,7 @@
//! use a thread-local to store information about how to unwind. Additionally //! use a thread-local to store information about how to unwind. Additionally
//! this requires that the check of whether a pc is a wasm trap or not is a //! this requires that the check of whether a pc is a wasm trap or not is a
//! global check rather than a per-thread check. This necessitates the existence //! global check rather than a per-thread check. This necessitates the existence
//! of `GlobalFrameInfo` in the `wasmtime` crate. //! of `GlobalModuleRegistry` in the `wasmtime` crate.
//! //!
//! Otherwise this file heavily uses the `mach` Rust crate for type and //! Otherwise this file heavily uses the `mach` Rust crate for type and
//! function declarations. Many bits and pieces are copied or translated from //! function declarations. Many bits and pieces are copied or translated from

View File

@@ -319,6 +319,10 @@ impl HostFuncMap {
fn async_required(&self) -> bool { fn async_required(&self) -> bool {
self.funcs.values().any(|f| f.1) self.funcs.values().any(|f| f.1)
} }
fn iter(&self) -> impl Iterator<Item = &HostFunc> {
self.funcs.values().map(|v| &*v.0)
}
} }
macro_rules! generate_wrap_async_host_func { macro_rules! generate_wrap_async_host_func {
@@ -1318,6 +1322,10 @@ impl Config {
for_each_function_signature!(generate_wrap_async_host_func); for_each_function_signature!(generate_wrap_async_host_func);
pub(crate) fn host_funcs(&self) -> impl Iterator<Item = &HostFunc> {
self.host_funcs.iter()
}
pub(crate) fn get_host_func(&self, module: &str, name: &str) -> Option<&HostFunc> { pub(crate) fn get_host_func(&self, module: &str, name: &str) -> Option<&HostFunc> {
self.host_funcs.get(module, name) self.host_funcs.get(module, name)
} }

View File

@@ -1,10 +1,41 @@
use crate::signatures::{SignatureCollection, SignatureRegistry};
use crate::Config; use crate::Config;
use anyhow::Result; use anyhow::Result;
use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
#[cfg(feature = "cache")] #[cfg(feature = "cache")]
use wasmtime_cache::CacheConfig; use wasmtime_cache::CacheConfig;
use wasmtime_jit::Compiler; use wasmtime_jit::Compiler;
use wasmtime_runtime::{debug_builtins, InstanceAllocator}; use wasmtime_runtime::{debug_builtins, InstanceAllocator, InstanceHandle, VMCallerCheckedAnyfunc};
/// This is used as a Send+Sync wrapper around two data structures relating to
/// host functions defined on `Config`:
///
/// * `anyfuncs` - this stores a mapping between the host function instance and
/// a `VMCallerCheckedAnyfunc` that can be used as the function's value in Wasmtime's ABI.
/// The address of the anyfunc needs to be stable, thus the boxed value.
///
/// * `signatures` - this stores the collection of shared signatures registered for every
/// usable host functions with this engine.
struct EngineHostFuncs {
anyfuncs: HashMap<InstanceHandle, Box<VMCallerCheckedAnyfunc>>,
signatures: SignatureCollection,
}
impl EngineHostFuncs {
fn new(registry: &SignatureRegistry) -> Self {
Self {
anyfuncs: HashMap::new(),
signatures: SignatureCollection::new(registry),
}
}
}
// This is safe for send and sync as it is read-only once the
// engine is constructed and the host functions live with the config,
// which the engine keeps a strong reference to.
unsafe impl Send for EngineHostFuncs {}
unsafe impl Sync for EngineHostFuncs {}
/// An `Engine` which is a global context for compilation and management of wasm /// An `Engine` which is a global context for compilation and management of wasm
/// modules. /// modules.
@@ -37,6 +68,8 @@ struct EngineInner {
config: Config, config: Config,
compiler: Compiler, compiler: Compiler,
allocator: Box<dyn InstanceAllocator>, allocator: Box<dyn InstanceAllocator>,
signatures: SignatureRegistry,
host_funcs: EngineHostFuncs,
} }
impl Engine { impl Engine {
@@ -46,11 +79,29 @@ impl Engine {
debug_builtins::ensure_exported(); debug_builtins::ensure_exported();
config.validate()?; config.validate()?;
let allocator = config.build_allocator()?; let allocator = config.build_allocator()?;
let registry = SignatureRegistry::new();
let mut host_funcs = EngineHostFuncs::new(&registry);
// Register all the host function signatures with the collection
for func in config.host_funcs() {
let sig = host_funcs
.signatures
.register(func.ty.as_wasm_func_type(), func.trampoline);
// Cloning the instance handle is safe as host functions outlive the engine
host_funcs.anyfuncs.insert(
unsafe { func.instance.clone() },
Box::new(func.anyfunc(sig)),
);
}
Ok(Engine { Ok(Engine {
inner: Arc::new(EngineInner { inner: Arc::new(EngineInner {
config: config.clone(), config: config.clone(),
compiler: config.build_compiler(allocator.as_ref()), compiler: config.build_compiler(allocator.as_ref()),
allocator, allocator,
signatures: registry,
host_funcs,
}), }),
}) })
} }
@@ -79,6 +130,25 @@ impl Engine {
Arc::ptr_eq(&a.inner, &b.inner) Arc::ptr_eq(&a.inner, &b.inner)
} }
pub(crate) fn signatures(&self) -> &SignatureRegistry {
&self.inner.signatures
}
pub(crate) fn host_func_signatures(&self) -> &SignatureCollection {
&self.inner.host_funcs.signatures
}
pub(crate) fn host_func_anyfunc(
&self,
instance: &InstanceHandle,
) -> Option<&VMCallerCheckedAnyfunc> {
self.inner
.host_funcs
.anyfuncs
.get(instance)
.map(AsRef::as_ref)
}
/// Ahead-of-time (AOT) compiles a WebAssembly module. /// Ahead-of-time (AOT) compiles a WebAssembly module.
/// ///
/// The `bytes` provided must be in one of two formats: /// The `bytes` provided must be in one of two formats:

View File

@@ -1,4 +1,4 @@
use crate::{sig_registry::SignatureRegistry, trampoline::StoreInstanceHandle}; use crate::trampoline::StoreInstanceHandle;
use crate::{Config, Extern, FuncType, Store, Trap, Val, ValType}; use crate::{Config, Extern, FuncType, Store, Trap, Val, ValType};
use anyhow::{bail, Context as _, Result}; use anyhow::{bail, Context as _, Result};
use smallvec::{smallvec, SmallVec}; use smallvec::{smallvec, SmallVec};
@@ -22,9 +22,9 @@ use wasmtime_runtime::{
/// This differs from `Func` in that it is not associated with a `Store`. /// This differs from `Func` in that it is not associated with a `Store`.
/// Host functions are associated with a `Config`. /// Host functions are associated with a `Config`.
pub(crate) struct HostFunc { pub(crate) struct HostFunc {
ty: FuncType, pub ty: FuncType,
instance: InstanceHandle, pub instance: InstanceHandle,
trampoline: VMTrampoline, pub trampoline: VMTrampoline,
} }
impl HostFunc { impl HostFunc {
@@ -73,6 +73,23 @@ impl HostFunc {
} }
} }
/// Gets a caller-checked anyfunc for this host function given a shared signature index.
///
/// The shared signature index must have been registered for the signature of
/// this host function.
pub fn anyfunc(&self, sig: VMSharedSignatureIndex) -> VMCallerCheckedAnyfunc {
let mut anyfunc = match self
.instance
.lookup_by_declaration(&EntityIndex::Function(FuncIndex::from_u32(0)))
{
wasmtime_runtime::Export::Function(f) => unsafe { f.anyfunc.as_ref() }.clone(),
_ => unreachable!(),
};
anyfunc.type_index = sig;
anyfunc
}
/// Converts a `HostFunc` to a `Func`. /// Converts a `HostFunc` to a `Func`.
/// ///
/// # Safety /// # Safety
@@ -88,11 +105,11 @@ impl HostFunc {
}; };
let export = ExportFunction { let export = ExportFunction {
anyfunc: std::ptr::NonNull::new_unchecked(store.get_host_anyfunc( anyfunc: store
&self.instance, .engine()
&self.ty, .host_func_anyfunc(&self.instance)
self.trampoline, .unwrap()
)), .into(),
}; };
Func { Func {
@@ -408,13 +425,9 @@ impl Func {
Func::invoke(&store, &ty_clone, caller_vmctx, values_vec, &func) Func::invoke(&store, &ty_clone, caller_vmctx, values_vec, &func)
}); });
let (instance, trampoline) = crate::trampoline::create_function( let (instance, trampoline) =
&ty, crate::trampoline::create_function(&ty, func, store.engine().config(), Some(store))
func, .expect("failed to create function");
store.engine().config(),
Some(&mut store.signatures().borrow_mut()),
)
.expect("failed to create function");
let idx = EntityIndex::Function(FuncIndex::from_u32(0)); let idx = EntityIndex::Function(FuncIndex::from_u32(0));
let (instance, export) = match instance.lookup_by_declaration(&idx) { let (instance, export) = match instance.lookup_by_declaration(&idx) {
@@ -734,7 +747,7 @@ impl Func {
/// # } /// # }
/// ``` /// ```
pub fn wrap<Params, Results>(store: &Store, func: impl IntoFunc<Params, Results>) -> Func { pub fn wrap<Params, Results>(store: &Store, func: impl IntoFunc<Params, Results>) -> Func {
let (_, instance, trampoline) = func.into_func(Some(&mut store.signatures().borrow_mut())); let (_, instance, trampoline) = func.into_func(Some(store));
let (instance, export) = unsafe { let (instance, export) = unsafe {
let idx = EntityIndex::Function(FuncIndex::from_u32(0)); let idx = EntityIndex::Function(FuncIndex::from_u32(0));
@@ -759,35 +772,26 @@ impl Func {
/// Returns the underlying wasm type that this `Func` has. /// Returns the underlying wasm type that this `Func` has.
pub fn ty(&self) -> FuncType { pub fn ty(&self) -> FuncType {
// Signatures should always be registered in the store's registry of // Signatures should always be registered in the engine's registry of
// shared signatures, so we should be able to unwrap safely here. // shared signatures, so we should be able to unwrap safely here.
let signatures = self.instance.store.signatures().borrow(); FuncType::from_wasm_func_type(
let (wft, _) = signatures self.instance
.lookup_shared(self.sig_index()) .store
.expect("signature should be registered"); .engine()
.signatures()
// This is only called with `Export::Function`, and since it's coming .lookup_type(self.sig_index())
// from wasmtime_runtime itself we should support all the types coming .expect("signature should be registered"),
// out of it, so assert such here. )
FuncType::from_wasm_func_type(&wft)
} }
/// Returns the number of parameters that this function takes. /// Returns the number of parameters that this function takes.
pub fn param_arity(&self) -> usize { pub fn param_arity(&self) -> usize {
let signatures = self.instance.store.signatures().borrow(); self.ty().params().len()
let (sig, _) = signatures
.lookup_shared(self.sig_index())
.expect("signature should be registered");
sig.params.len()
} }
/// Returns the number of results this function produces. /// Returns the number of results this function produces.
pub fn result_arity(&self) -> usize { pub fn result_arity(&self) -> usize {
let signatures = self.instance.store.signatures().borrow(); self.ty().results().len()
let (sig, _) = signatures
.lookup_shared(self.sig_index())
.expect("signature should be registered");
sig.returns.len()
} }
/// Invokes this function with the `params` given, returning the results and /// Invokes this function with the `params` given, returning the results and
@@ -907,21 +911,12 @@ impl Func {
} }
pub(crate) unsafe fn from_wasmtime_function(export: &ExportFunction, store: &Store) -> Self { pub(crate) unsafe fn from_wasmtime_function(export: &ExportFunction, store: &Store) -> Self {
// Each function signature in a module should have a trampoline stored
// on that module as well, so unwrap the result here since otherwise
// it's a bug in wasmtime.
let anyfunc = export.anyfunc.as_ref(); let anyfunc = export.anyfunc.as_ref();
let trampoline = store
.signatures()
.borrow()
.lookup_shared(anyfunc.type_index)
.expect("failed to retrieve trampoline from module")
.1;
Func { Func {
instance: store.existing_vmctx(anyfunc.vmctx), instance: store.existing_vmctx(anyfunc.vmctx),
export: export.clone(), export: export.clone(),
trampoline, trampoline: store.lookup_trampoline(&*anyfunc),
} }
} }
@@ -1542,10 +1537,7 @@ for_each_function_signature!(impl_host_abi);
/// as an implementation detail of this crate. /// as an implementation detail of this crate.
pub trait IntoFunc<Params, Results> { pub trait IntoFunc<Params, Results> {
#[doc(hidden)] #[doc(hidden)]
fn into_func( fn into_func(self, store: Option<&Store>) -> (FuncType, InstanceHandle, VMTrampoline);
self,
registry: Option<&mut SignatureRegistry>,
) -> (FuncType, InstanceHandle, VMTrampoline);
} }
/// A structure representing the *caller's* context when creating a function /// A structure representing the *caller's* context when creating a function
@@ -1658,12 +1650,12 @@ macro_rules! impl_into_func {
$($args: WasmTy,)* $($args: WasmTy,)*
R: WasmRet, R: WasmRet,
{ {
fn into_func(self, registry: Option<&mut SignatureRegistry>) -> (FuncType, InstanceHandle, VMTrampoline) { fn into_func(self, store: Option<&Store>) -> (FuncType, InstanceHandle, VMTrampoline) {
let f = move |_: Caller<'_>, $($args:$args),*| { let f = move |_: Caller<'_>, $($args:$args),*| {
self($($args),*) self($($args),*)
}; };
f.into_func(registry) f.into_func(store)
} }
} }
@@ -1674,7 +1666,7 @@ macro_rules! impl_into_func {
$($args: WasmTy,)* $($args: WasmTy,)*
R: WasmRet, R: WasmRet,
{ {
fn into_func(self, registry: Option<&mut SignatureRegistry>) -> (FuncType, InstanceHandle, VMTrampoline) { fn into_func(self, store: Option<&Store>) -> (FuncType, InstanceHandle, VMTrampoline) {
/// This shim is called by Wasm code, constructs a `Caller`, /// This shim is called by Wasm code, constructs a `Caller`,
/// calls the wrapped host function, and returns the translated /// calls the wrapped host function, and returns the translated
/// result back to Wasm. /// result back to Wasm.
@@ -1807,10 +1799,10 @@ macro_rules! impl_into_func {
let trampoline = host_trampoline::<$($args,)* R>; let trampoline = host_trampoline::<$($args,)* R>;
// If not given a registry, use a default signature index that is guaranteed to trap // If not given a store, use a default signature index that is guaranteed to trap.
// if the function is called indirectly without first being associated with a store (a bug condition). // If the function is called indirectly without first being associated with a store (a bug condition).
let shared_signature_id = registry let shared_signature_id = store
.map(|r| r.register(ty.as_wasm_func_type(), trampoline)) .map(|s| s.signatures().borrow_mut().register(ty.as_wasm_func_type(), trampoline))
.unwrap_or(VMSharedSignatureIndex::default()); .unwrap_or(VMSharedSignatureIndex::default());
let instance = unsafe { let instance = unsafe {

View File

@@ -207,7 +207,7 @@ unsafe impl WasmTy for Option<ExternRef> {
unsafe { unsafe {
store store
.externref_activations_table() .externref_activations_table()
.insert_with_gc(x.inner, store.stack_map_registry()); .insert_with_gc(x.inner, store.module_info_lookup());
} }
abi abi
} else { } else {

View File

@@ -13,9 +13,9 @@ use wasmtime_environ::wasm::{
}; };
use wasmtime_environ::Initializer; use wasmtime_environ::Initializer;
use wasmtime_runtime::{ use wasmtime_runtime::{
Imports, InstanceAllocationRequest, InstantiationError, RuntimeInstance, StackMapRegistry, Imports, InstanceAllocationRequest, InstantiationError, RuntimeInstance, VMContext,
VMContext, VMExternRefActivationsTable, VMFunctionBody, VMFunctionImport, VMGlobalImport, VMExternRefActivationsTable, VMFunctionBody, VMFunctionImport, VMGlobalImport, VMMemoryImport,
VMMemoryImport, VMTableImport, VMTableImport,
}; };
/// An instantiated WebAssembly module. /// An instantiated WebAssembly module.
@@ -362,6 +362,7 @@ impl<'a> Instantiator<'a> {
let expected_ty = let expected_ty =
self.cur.module.compiled_module().module().type_of(*index); self.cur.module.compiled_module().module().type_of(*index);
matching::MatchCx { matching::MatchCx {
signatures: self.cur.module.signatures(),
types: self.cur.module.types(), types: self.cur.module.types(),
store: self.store, store: self.store,
} }
@@ -505,29 +506,25 @@ impl<'a> Instantiator<'a> {
fn instantiate_raw(&self) -> Result<StoreInstanceHandle> { fn instantiate_raw(&self) -> Result<StoreInstanceHandle> {
let compiled_module = self.cur.module.compiled_module(); let compiled_module = self.cur.module.compiled_module();
// Register the module just before instantiation to ensure we have a // Register the module just before instantiation to ensure we keep the module
// trampoline registered for every signature and to preserve the module's // properly referenced while in use by the store.
// compiled JIT code within the `Store`. self.store.modules().borrow_mut().register(&self.cur.module);
self.store.register_module(&self.cur.module);
unsafe { unsafe {
let engine = self.store.engine(); let engine = self.store.engine();
let allocator = engine.allocator(); let allocator = engine.allocator();
let signatures = self.store.signatures().borrow();
let signatures = signatures.lookup_table(&self.cur.module);
let instance = allocator.allocate(InstanceAllocationRequest { let instance = allocator.allocate(InstanceAllocationRequest {
module: compiled_module.module().clone(), module: compiled_module.module().clone(),
finished_functions: compiled_module.finished_functions(), finished_functions: compiled_module.finished_functions(),
imports: self.cur.build(), imports: self.cur.build(),
shared_signatures: (&signatures).into(), shared_signatures: self.cur.module.signatures().as_module_map().into(),
host_state: Box::new(()), host_state: Box::new(()),
interrupts: self.store.interrupts(), interrupts: self.store.interrupts(),
externref_activations_table: self.store.externref_activations_table() externref_activations_table: self.store.externref_activations_table()
as *const VMExternRefActivationsTable as *const VMExternRefActivationsTable
as *mut _, as *mut _,
stack_map_registry: self.store.stack_map_registry() as *const StackMapRegistry module_info_lookup: Some(self.store.module_info_lookup()),
as *mut _,
})?; })?;
// After we've created the `InstanceHandle` we still need to run // After we've created the `InstanceHandle` we still need to run

View File

@@ -282,13 +282,12 @@ mod func;
mod config; mod config;
mod engine; mod engine;
mod externals; mod externals;
mod frame_info;
mod instance; mod instance;
mod linker; mod linker;
mod memory; mod memory;
mod module; mod module;
mod r#ref; mod r#ref;
mod sig_registry; mod signatures;
mod store; mod store;
mod trampoline; mod trampoline;
mod trap; mod trap;
@@ -298,12 +297,11 @@ mod values;
pub use crate::config::*; pub use crate::config::*;
pub use crate::engine::*; pub use crate::engine::*;
pub use crate::externals::*; pub use crate::externals::*;
pub use crate::frame_info::{FrameInfo, FrameSymbol};
pub use crate::func::*; pub use crate::func::*;
pub use crate::instance::Instance; pub use crate::instance::Instance;
pub use crate::linker::*; pub use crate::linker::*;
pub use crate::memory::*; pub use crate::memory::*;
pub use crate::module::Module; pub use crate::module::{FrameInfo, FrameSymbol, Module};
pub use crate::r#ref::ExternRef; pub use crate::r#ref::ExternRef;
pub use crate::store::*; pub use crate::store::*;
pub use crate::trap::*; pub use crate::trap::*;

View File

@@ -1,4 +1,7 @@
use crate::types::{ExportType, ExternType, ImportType}; use crate::{
signatures::SignatureCollection,
types::{ExportType, ExternType, ImportType},
};
use crate::{Engine, ModuleType}; use crate::{Engine, ModuleType};
use anyhow::{bail, Context, Result}; use anyhow::{bail, Context, Result};
use std::fs; use std::fs;
@@ -11,8 +14,10 @@ use wasmtime_environ::entity::PrimaryMap;
use wasmtime_environ::wasm::ModuleIndex; use wasmtime_environ::wasm::ModuleIndex;
use wasmtime_jit::{CompilationArtifacts, CompiledModule, TypeTables}; use wasmtime_jit::{CompilationArtifacts, CompiledModule, TypeTables};
mod registry;
mod serialization; mod serialization;
pub use registry::{FrameInfo, FrameSymbol, GlobalModuleRegistry, ModuleRegistry};
pub use serialization::SerializedModule; pub use serialization::SerializedModule;
/// A compiled WebAssembly module, ready to be instantiated. /// A compiled WebAssembly module, ready to be instantiated.
@@ -102,6 +107,8 @@ struct ModuleInner {
/// Type information of this module and all `artifact_upvars` compiled /// Type information of this module and all `artifact_upvars` compiled
/// modules. /// modules.
types: Arc<TypeTables>, types: Arc<TypeTables>,
/// Registered shared signature for the module.
signatures: Arc<SignatureCollection>,
} }
impl Module { impl Module {
@@ -313,25 +320,95 @@ impl Module {
} }
}; };
let mut modules = CompiledModule::from_artifacts_list( let modules = CompiledModule::from_artifacts_list(
artifacts, artifacts,
engine.compiler().isa(), engine.compiler().isa(),
&*engine.config().profiler, &*engine.config().profiler,
)?; )?;
Self::from_parts(engine, modules, main_module, Arc::new(types), &[])
}
fn from_parts(
engine: &Engine,
mut modules: Vec<Arc<CompiledModule>>,
main_module: usize,
types: Arc<TypeTables>,
module_upvars: &[serialization::SerializedModuleUpvar],
) -> Result<Self> {
// Validate the module can be used with the current allocator
engine.allocator().validate(modules[main_module].module())?;
let signatures = Arc::new(SignatureCollection::new_for_module(
engine.signatures(),
&types.wasm_signatures,
modules.iter().flat_map(|m| m.trampolines().iter().cloned()),
));
let module = modules.remove(main_module); let module = modules.remove(main_module);
// Validate the module can be used with the current allocator let module_upvars = module_upvars
engine.allocator().validate(module.module())?; .iter()
.map(|m| {
mk(
engine,
&modules,
&types,
m.index,
&m.artifact_upvars,
&m.module_upvars,
&signatures,
)
})
.collect::<Result<Vec<_>>>()?;
Ok(Module { return Ok(Self {
inner: Arc::new(ModuleInner { inner: Arc::new(ModuleInner {
engine: engine.clone(), engine: engine.clone(),
types,
module, module,
types: Arc::new(types),
artifact_upvars: modules, artifact_upvars: modules,
module_upvars: Vec::new(), module_upvars,
signatures,
}), }),
}) });
fn mk(
engine: &Engine,
artifacts: &[Arc<CompiledModule>],
types: &Arc<TypeTables>,
module_index: usize,
artifact_upvars: &[usize],
module_upvars: &[serialization::SerializedModuleUpvar],
signatures: &Arc<SignatureCollection>,
) -> Result<Module> {
Ok(Module {
inner: Arc::new(ModuleInner {
engine: engine.clone(),
types: types.clone(),
module: artifacts[module_index].clone(),
artifact_upvars: artifact_upvars
.iter()
.map(|i| artifacts[*i].clone())
.collect(),
module_upvars: module_upvars
.into_iter()
.map(|m| {
mk(
engine,
artifacts,
types,
m.index,
&m.artifact_upvars,
&m.module_upvars,
signatures,
)
})
.collect::<Result<Vec<_>>>()?,
signatures: signatures.clone(),
}),
})
}
} }
/// Validates `binary` input data as a WebAssembly binary given the /// Validates `binary` input data as a WebAssembly binary given the
@@ -416,8 +493,8 @@ impl Module {
) -> Module { ) -> Module {
Module { Module {
inner: Arc::new(ModuleInner { inner: Arc::new(ModuleInner {
types: self.types().clone(), types: self.inner.types.clone(),
engine: self.engine().clone(), engine: self.inner.engine.clone(),
module: self.inner.artifact_upvars[artifact_index].clone(), module: self.inner.artifact_upvars[artifact_index].clone(),
artifact_upvars: artifact_upvars artifact_upvars: artifact_upvars
.iter() .iter()
@@ -432,6 +509,7 @@ impl Module {
wasmtime_environ::ModuleUpvar::Local(i) => modules[i].clone(), wasmtime_environ::ModuleUpvar::Local(i) => modules[i].clone(),
}) })
.collect(), .collect(),
signatures: self.inner.signatures.clone(),
}), }),
} }
} }
@@ -448,6 +526,10 @@ impl Module {
&self.inner.types &self.inner.types
} }
pub(crate) fn signatures(&self) -> &Arc<SignatureCollection> {
&self.inner.signatures
}
/// Looks up the module upvar value at the `index` specified. /// Looks up the module upvar value at the `index` specified.
/// ///
/// Note that this panics if `index` is out of bounds since this should /// Note that this panics if `index` is out of bounds since this should

View File

@@ -1,35 +1,38 @@
use std::collections::BTreeMap; //! Implements a registry of modules for a store.
use std::sync::Arc;
use std::sync::Mutex;
use wasmtime_environ::entity::EntityRef;
use wasmtime_environ::ir;
use wasmtime_environ::wasm::DefinedFuncIndex;
use wasmtime_environ::{FunctionAddressMap, TrapInformation};
use wasmtime_jit::CompiledModule;
/// This is a structure that lives within a `Store` and retains information use crate::{signatures::SignatureCollection, Module};
/// about all modules registered with the `Store` via instantiation. use std::{
/// collections::BTreeMap,
/// "frame information" here refers to things like determining whether a sync::{Arc, Mutex},
/// program counter is a wasm program counter, and additionally mapping program };
/// counters to wasm filenames, modules, line numbers, etc. This store of use wasmtime_environ::{
/// information lives as long as a `Store` lives since modules are never entity::EntityRef,
/// unloaded today. ir::{self, StackMap},
#[derive(Default)] wasm::DefinedFuncIndex,
pub struct StoreFrameInfo { FunctionAddressMap, TrapInformation,
/// An internal map that keeps track of backtrace frame information for };
/// each module. use wasmtime_jit::CompiledModule;
/// use wasmtime_runtime::{ModuleInfo, VMCallerCheckedAnyfunc, VMTrampoline};
/// This map is morally a map of ranges to a map of information for that
/// module. Each module is expected to reside in a disjoint section of lazy_static::lazy_static! {
/// contiguous memory. No modules can overlap. static ref GLOBAL_MODULES: Mutex<GlobalModuleRegistry> = Default::default();
///
/// The key of this map is the highest address in the module and the value
/// is the module's information, which also contains the start address.
ranges: BTreeMap<usize, ModuleFrameInfo>,
} }
impl StoreFrameInfo { fn func_by_pc(module: &CompiledModule, pc: usize) -> Option<(DefinedFuncIndex, u32)> {
let (index, start, _) = module.func_by_pc(pc)?;
Some((index, (pc - start) as u32))
}
/// Used for registering modules with a store.
///
/// The map is from the ending (exclusive) address for the module code to
/// the registered module.
///
/// The `BTreeMap` is used to quickly locate a module based on a program counter value.
#[derive(Default)]
pub struct ModuleRegistry(BTreeMap<usize, Arc<RegisteredModule>>);
impl ModuleRegistry {
/// Fetches frame information about a program counter in a backtrace. /// Fetches frame information about a program counter in a backtrace.
/// ///
/// Returns an object if this `pc` is known to some previously registered /// Returns an object if this `pc` is known to some previously registered
@@ -48,8 +51,14 @@ impl StoreFrameInfo {
self.module(pc)?.lookup_trap_info(pc) self.module(pc)?.lookup_trap_info(pc)
} }
fn module(&self, pc: usize) -> Option<&ModuleFrameInfo> { /// Fetches information about a registered module given a program counter value.
let (end, info) = self.ranges.range(pc..).next()?; pub fn lookup_module(&self, pc: usize) -> Option<Arc<dyn ModuleInfo>> {
self.module(pc)
.map(|m| -> Arc<dyn ModuleInfo> { m.clone() })
}
fn module(&self, pc: usize) -> Option<&Arc<RegisteredModule>> {
let (end, info) = self.0.range(pc..).next()?;
if pc < info.start || *end < pc { if pc < info.start || *end < pc {
return None; return None;
} }
@@ -57,12 +66,13 @@ impl StoreFrameInfo {
Some(info) Some(info)
} }
/// Registers a new compiled module's frame information. /// Registers a new module with the registry.
pub fn register(&mut self, module: &Arc<CompiledModule>) { pub fn register(&mut self, module: &Module) {
let (start, end) = module.code().range(); let compiled_module = module.compiled_module();
let (start, end) = compiled_module.code().range();
// Ignore modules with no code or finished functions // Ignore modules with no code or finished functions
if start == end || module.finished_functions().is_empty() { if start == end || compiled_module.finished_functions().is_empty() {
return; return;
} }
@@ -70,44 +80,58 @@ impl StoreFrameInfo {
// may be a valid PC value // may be a valid PC value
let end = end - 1; let end = end - 1;
// Ensure the module isn't already present in the registry
// This is expected when a module is instantiated multiple times in the same store
if let Some(m) = self.0.get(&end) {
assert_eq!(m.start, start);
return;
}
// Assert that this module's code doesn't collide with any other registered modules // Assert that this module's code doesn't collide with any other registered modules
if let Some((_, prev)) = self.ranges.range(end..).next() { if let Some((_, prev)) = self.0.range(end..).next() {
assert!(prev.start > end); assert!(prev.start > end);
} }
if let Some((prev_end, _)) = self.ranges.range(..=start).next_back() {
if let Some((prev_end, _)) = self.0.range(..=start).next_back() {
assert!(*prev_end < start); assert!(*prev_end < start);
} }
let prev = self.ranges.insert( let prev = self.0.insert(
end, end,
ModuleFrameInfo { Arc::new(RegisteredModule {
start, start,
module: module.clone(), module: compiled_module.clone(),
}, signatures: module.signatures().clone(),
}),
); );
assert!(prev.is_none()); assert!(prev.is_none());
GLOBAL_INFO.lock().unwrap().register(start, end, module); GLOBAL_MODULES.lock().unwrap().register(start, end, module);
}
/// Looks up a trampoline from an anyfunc.
pub fn lookup_trampoline(&self, anyfunc: &VMCallerCheckedAnyfunc) -> Option<VMTrampoline> {
let module = self.module(anyfunc.func_ptr.as_ptr() as usize)?;
module.signatures.trampoline(anyfunc.type_index)
} }
} }
impl Drop for StoreFrameInfo { impl Drop for ModuleRegistry {
fn drop(&mut self) { fn drop(&mut self) {
let mut info = GLOBAL_INFO.lock().unwrap(); let mut info = GLOBAL_MODULES.lock().unwrap();
for end in self.ranges.keys() { for end in self.0.keys() {
info.unregister(*end); info.unregister(*end);
} }
} }
} }
/// Represents a module's frame information. struct RegisteredModule {
#[derive(Clone)]
pub struct ModuleFrameInfo {
start: usize, start: usize,
module: Arc<CompiledModule>, module: Arc<CompiledModule>,
signatures: Arc<SignatureCollection>,
} }
impl ModuleFrameInfo { impl RegisteredModule {
/// Determines if the related module has unparsed debug information. /// Determines if the related module has unparsed debug information.
pub fn has_unparsed_debuginfo(&self) -> bool { pub fn has_unparsed_debuginfo(&self) -> bool {
self.module.has_unparsed_debuginfo() self.module.has_unparsed_debuginfo()
@@ -118,9 +142,9 @@ impl ModuleFrameInfo {
/// Returns an object if this `pc` is known to this module, or returns `None` /// Returns an object if this `pc` is known to this module, or returns `None`
/// if no information can be found. /// if no information can be found.
pub fn lookup_frame_info(&self, pc: usize) -> Option<FrameInfo> { pub fn lookup_frame_info(&self, pc: usize) -> Option<FrameInfo> {
let (index, offset) = self.func(pc)?; let (index, offset) = func_by_pc(&self.module, pc)?;
let (addr_map, _) = self.module.func_info(index); let info = self.module.func_info(index);
let pos = Self::instr_pos(offset, addr_map); let pos = Self::instr_pos(offset, &info.address_map);
// In debug mode for now assert that we found a mapping for `pc` within // In debug mode for now assert that we found a mapping for `pc` within
// the function, because otherwise something is buggy along the way and // the function, because otherwise something is buggy along the way and
@@ -129,8 +153,8 @@ impl ModuleFrameInfo {
debug_assert!(pos.is_some(), "failed to find instruction for {:x}", pc); debug_assert!(pos.is_some(), "failed to find instruction for {:x}", pc);
let instr = match pos { let instr = match pos {
Some(pos) => addr_map.instructions[pos].srcloc, Some(pos) => info.address_map.instructions[pos].srcloc,
None => addr_map.start_srcloc, None => info.address_map.start_srcloc,
}; };
// Use our wasm-relative pc to symbolize this frame. If there's a // Use our wasm-relative pc to symbolize this frame. If there's a
@@ -173,24 +197,20 @@ impl ModuleFrameInfo {
func_index: index.index() as u32, func_index: index.index() as u32,
func_name: module.func_names.get(&index).cloned(), func_name: module.func_names.get(&index).cloned(),
instr, instr,
func_start: addr_map.start_srcloc, func_start: info.address_map.start_srcloc,
symbols, symbols,
}) })
} }
/// Fetches trap information about a program counter in a backtrace. /// Fetches trap information about a program counter in a backtrace.
pub fn lookup_trap_info(&self, pc: usize) -> Option<&TrapInformation> { pub fn lookup_trap_info(&self, pc: usize) -> Option<&TrapInformation> {
let (index, offset) = self.func(pc)?; let (index, offset) = func_by_pc(&self.module, pc)?;
let (_, traps) = self.module.func_info(index); let info = self.module.func_info(index);
let idx = traps let idx = info
.traps
.binary_search_by_key(&offset, |info| info.code_offset) .binary_search_by_key(&offset, |info| info.code_offset)
.ok()?; .ok()?;
Some(&traps[idx]) Some(&info.traps[idx])
}
fn func(&self, pc: usize) -> Option<(DefinedFuncIndex, u32)> {
let (index, start, _) = self.module.func_by_pc(pc)?;
Some((index, (pc - start) as u32))
} }
fn instr_pos(offset: u32, addr_map: &FunctionAddressMap) -> Option<usize> { fn instr_pos(offset: u32, addr_map: &FunctionAddressMap) -> Option<usize> {
@@ -214,56 +234,117 @@ impl ModuleFrameInfo {
} }
} }
/// This is the dual of `StoreFrameInfo` and is stored globally (as the name impl ModuleInfo for RegisteredModule {
/// implies) rather than simply in one `Store`. fn lookup_stack_map(&self, pc: usize) -> Option<&StackMap> {
let (index, offset) = func_by_pc(&self.module, pc)?;
let info = self.module.func_info(index);
// Do a binary search to find the stack map for the given offset.
//
// Because GC safepoints are technically only associated with a single
// PC, we should ideally only care about `Ok(index)` values returned
// from the binary search. However, safepoints are inserted right before
// calls, and there are two things that can disturb the PC/offset
// associated with the safepoint versus the PC we actually use to query
// for the stack map:
//
// 1. The `backtrace` crate gives us the PC in a frame that will be
// *returned to*, and where execution will continue from, rather than
// the PC of the call we are currently at. So we would need to
// disassemble one instruction backwards to query the actual PC for
// the stack map.
//
// TODO: One thing we *could* do to make this a little less error
// prone, would be to assert/check that the nearest GC safepoint
// found is within `max_encoded_size(any kind of call instruction)`
// our queried PC for the target architecture.
//
// 2. Cranelift's stack maps only handle the stack, not
// registers. However, some references that are arguments to a call
// may need to be in registers. In these cases, what Cranelift will
// do is:
//
// a. spill all the live references,
// b. insert a GC safepoint for those references,
// c. reload the references into registers, and finally
// d. make the call.
//
// Step (c) adds drift between the GC safepoint and the location of
// the call, which is where we actually walk the stack frame and
// collect its live references.
//
// Luckily, the spill stack slots for the live references are still
// up to date, so we can still find all the on-stack roots.
// Furthermore, we do not have a moving GC, so we don't need to worry
// whether the following code will reuse the references in registers
// (which would not have been updated to point to the moved objects)
// or reload from the stack slots (which would have been updated to
// point to the moved objects).
let index = match info
.stack_maps
.binary_search_by_key(&offset, |i| i.code_offset)
{
// Exact hit.
Ok(i) => i,
// `Err(0)` means that the associated stack map would have been the
// first element in the array if this pc had an associated stack
// map, but this pc does not have an associated stack map. This can
// only happen inside a Wasm frame if there are no live refs at this
// pc.
Err(0) => return None,
Err(i) => i - 1,
};
Some(&info.stack_maps[index].stack_map)
}
}
// Counterpart to `RegisteredModule`, but stored in the global registry.
struct GlobalRegisteredModule {
start: usize,
module: Arc<CompiledModule>,
/// Note that modules can be instantiated in many stores, so the purpose of
/// this field is to keep track of how many stores have registered a
/// module. Information is only removed from the global registry when this
/// reference count reaches 0.
references: usize,
}
/// This is the global module registry that stores information for all modules
/// that are currently in use by any `Store`.
/// ///
/// The purpose of this map is to be called from signal handlers to determine /// The purpose of this map is to be called from signal handlers to determine
/// whether a program counter is a wasm trap or not. Specifically macOS has /// whether a program counter is a wasm trap or not. Specifically macOS has
/// no contextual information about the thread available, hence the necessity /// no contextual information about the thread available, hence the necessity
/// for global state rather than using thread local state. /// for global state rather than using thread local state.
/// ///
/// This is similar to `StoreFrameInfo` except that it has less information and /// This is similar to `ModuleRegistry` except that it has less information and
/// supports removal. Any time anything is registered with a `StoreFrameInfo` /// supports removal. Any time anything is registered with a `ModuleRegistry`
/// it is also automatically registered with the singleton global frame /// it is also automatically registered with the singleton global module
/// information. When a `StoreFrameInfo` is destroyed then all of its entries /// registry. When a `ModuleRegistry` is destroyed then all of its entries
/// are removed from the global frame information. /// are removed from the global module registry.
#[derive(Default)] #[derive(Default)]
pub struct GlobalFrameInfo { pub struct GlobalModuleRegistry(BTreeMap<usize, GlobalRegisteredModule>);
// The map here behaves the same way as `StoreFrameInfo`.
ranges: BTreeMap<usize, GlobalModuleFrameInfo>,
}
/// This is the equivalent of `ModuleFrameInfo` except it keeps a reference count. impl GlobalModuleRegistry {
struct GlobalModuleFrameInfo {
module: ModuleFrameInfo,
/// Note that modules can be instantiated in many stores, so the purpose of
/// this field is to keep track of how many stores have registered a
/// module. Information is only removed from the global store when this
/// reference count reaches 0.
references: usize,
}
lazy_static::lazy_static! {
static ref GLOBAL_INFO: Mutex<GlobalFrameInfo> = Default::default();
}
impl GlobalFrameInfo {
/// Returns whether the `pc`, according to globally registered information, /// Returns whether the `pc`, according to globally registered information,
/// is a wasm trap or not. /// is a wasm trap or not.
pub(crate) fn is_wasm_pc(pc: usize) -> bool { pub(crate) fn is_wasm_pc(pc: usize) -> bool {
let info = GLOBAL_INFO.lock().unwrap(); let modules = GLOBAL_MODULES.lock().unwrap();
match info.ranges.range(pc..).next() { match modules.0.range(pc..).next() {
Some((end, info)) => { Some((end, entry)) => {
if pc < info.module.start || *end < pc { if pc < entry.start || *end < pc {
return false; return false;
} }
match info.module.func(pc) { match func_by_pc(&entry.module, pc) {
Some((index, offset)) => { Some((index, offset)) => {
let (addr_map, _) = info.module.module.func_info(index); let info = entry.module.func_info(index);
ModuleFrameInfo::instr_pos(offset, addr_map).is_some() RegisteredModule::instr_pos(offset, &info.address_map).is_some()
} }
None => false, None => false,
} }
@@ -274,32 +355,27 @@ impl GlobalFrameInfo {
/// Registers a new region of code, described by `(start, end)` and with /// Registers a new region of code, described by `(start, end)` and with
/// the given function information, with the global information. /// the given function information, with the global information.
fn register(&mut self, start: usize, end: usize, module: &Arc<CompiledModule>) { fn register(&mut self, start: usize, end: usize, module: &Module) {
let info = self let info = self.0.entry(end).or_insert_with(|| GlobalRegisteredModule {
.ranges start,
.entry(end) module: module.compiled_module().clone(),
.or_insert_with(|| GlobalModuleFrameInfo { references: 0,
module: ModuleFrameInfo { });
start,
module: module.clone(),
},
references: 0,
});
// Note that ideally we'd debug_assert that the information previously // Note that ideally we'd debug_assert that the information previously
// stored, if any, matches the `functions` we were given, but for now we // stored, if any, matches the `functions` we were given, but for now we
// just do some simple checks to hope it's the same. // just do some simple checks to hope it's the same.
assert_eq!(info.module.start, start); assert_eq!(info.start, start);
info.references += 1; info.references += 1;
} }
/// Unregisters a region of code (keyed by the `end` address) from this /// Unregisters a region of code (keyed by the `end` address) from the
/// global information. /// global information.
fn unregister(&mut self, end: usize) { fn unregister(&mut self, end: usize) {
let info = self.ranges.get_mut(&end).unwrap(); let info = self.0.get_mut(&end).unwrap();
info.references -= 1; info.references -= 1;
if info.references == 0 { if info.references == 0 {
self.ranges.remove(&end); self.0.remove(&end);
} }
} }
} }
@@ -321,19 +397,6 @@ pub struct FrameInfo {
symbols: Vec<FrameSymbol>, symbols: Vec<FrameSymbol>,
} }
/// Debug information for a symbol that is attached to a [`FrameInfo`].
///
/// When DWARF debug information is present in a wasm file then this structure
/// can be found on a [`FrameInfo`] and can be used to learn about filenames,
/// line numbers, etc, which are the origin of a function in a stack trace.
#[derive(Debug)]
pub struct FrameSymbol {
name: Option<String>,
file: Option<String>,
line: Option<u32>,
column: Option<u32>,
}
impl FrameInfo { impl FrameInfo {
/// Returns the WebAssembly function index for this frame. /// Returns the WebAssembly function index for this frame.
/// ///
@@ -405,6 +468,19 @@ impl FrameInfo {
} }
} }
/// Debug information for a symbol that is attached to a [`FrameInfo`].
///
/// When DWARF debug information is present in a wasm file then this structure
/// can be found on a [`FrameInfo`] and can be used to learn about filenames,
/// line numbers, etc, which are the origin of a function in a stack trace.
#[derive(Debug)]
pub struct FrameSymbol {
name: Option<String>,
file: Option<String>,
line: Option<u32>,
column: Option<u32>,
}
impl FrameSymbol { impl FrameSymbol {
/// Returns the function name associated with this symbol. /// Returns the function name associated with this symbol.
/// ///
@@ -463,7 +539,7 @@ fn test_frame_info() -> Result<(), anyhow::Error> {
)?; )?;
// Create an instance to ensure the frame information is registered. // Create an instance to ensure the frame information is registered.
Instance::new(&store, &module, &[])?; Instance::new(&store, &module, &[])?;
let info = store.frame_info().borrow(); let modules = store.modules().borrow();
for (i, alloc) in module.compiled_module().finished_functions() { for (i, alloc) in module.compiled_module().finished_functions() {
let (start, end) = unsafe { let (start, end) = unsafe {
let ptr = (**alloc).as_ptr(); let ptr = (**alloc).as_ptr();
@@ -471,7 +547,7 @@ fn test_frame_info() -> Result<(), anyhow::Error> {
(ptr as usize, ptr as usize + len) (ptr as usize, ptr as usize + len)
}; };
for pc in start..end { for pc in start..end {
let (frame, _) = info.lookup_frame_info(pc).unwrap(); let (frame, _) = modules.lookup_frame_info(pc).unwrap();
assert!(frame.func_index() == i.as_u32()); assert!(frame.func_index() == i.as_u32());
} }
} }

View File

@@ -1,6 +1,5 @@
//! Implements module serialization. //! Implements module serialization.
use super::ModuleInner;
use crate::{Engine, Module, OptLevel}; use crate::{Engine, Module, OptLevel};
use anyhow::{anyhow, bail, Context, Result}; use anyhow::{anyhow, bail, Context, Result};
use bincode::Options; use bincode::Options;
@@ -10,8 +9,7 @@ use std::fmt;
use std::str::FromStr; use std::str::FromStr;
use std::sync::Arc; use std::sync::Arc;
use std::{collections::HashMap, fmt::Display}; use std::{collections::HashMap, fmt::Display};
use wasmtime_environ::Tunables; use wasmtime_environ::{isa::TargetIsa, settings, Tunables};
use wasmtime_environ::{isa::TargetIsa, settings};
use wasmtime_jit::{ use wasmtime_jit::{
CompilationArtifacts, CompilationStrategy, CompiledModule, Compiler, TypeTables, CompilationArtifacts, CompilationStrategy, CompiledModule, Compiler, TypeTables,
}; };
@@ -123,55 +121,44 @@ impl From<settings::OptLevel> for OptLevel {
} }
} }
/// A small helper struct which defines modules are serialized. /// A small helper struct for serialized module upvars.
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
struct SerializedModuleData<'a> { pub struct SerializedModuleUpvar {
/// All compiled artifacts needed by this module, where the last entry in /// The module's index into the compilation artifact.
/// this list is the artifacts for the module itself. pub index: usize,
artifacts: Vec<MyCow<'a, CompilationArtifacts>>, /// Indexes into the list of all compilation artifacts for this module.
pub artifact_upvars: Vec<usize>,
/// Closed-over module values that are also needed for this module. /// Closed-over module values that are also needed for this module.
modules: Vec<SerializedModuleData<'a>>, pub module_upvars: Vec<SerializedModuleUpvar>,
/// The index into the list of type tables that are used for this module's
/// type tables.
type_tables: usize,
} }
impl<'a> SerializedModuleData<'a> { impl SerializedModuleUpvar {
pub fn new(module: &'a Module) -> (Self, Vec<MyCow<'a, TypeTables>>) { pub fn new(module: &Module, artifacts: &[Arc<CompiledModule>]) -> Self {
let mut pushed = HashMap::new(); // TODO: improve upon the linear searches in the artifact list
let mut tables = Vec::new(); let index = artifacts
return (module_data(module, &mut pushed, &mut tables), tables); .iter()
.position(|a| Arc::as_ptr(a) == Arc::as_ptr(&module.inner.module))
.expect("module should be in artifacts list");
fn module_data<'a>( SerializedModuleUpvar {
module: &'a Module, index,
type_tables_pushed: &mut HashMap<usize, usize>, artifact_upvars: module
type_tables: &mut Vec<MyCow<'a, TypeTables>>, .inner
) -> SerializedModuleData<'a> { .artifact_upvars
// Deduplicate `Arc<TypeTables>` using our two parameters to ensure we .iter()
// serialize type tables as little as possible. .map(|m| {
let ptr = Arc::as_ptr(module.types()); artifacts
let type_tables_idx = *type_tables_pushed.entry(ptr as usize).or_insert_with(|| { .iter()
type_tables.push(MyCow::Borrowed(module.types())); .position(|a| Arc::as_ptr(a) == Arc::as_ptr(m))
type_tables.len() - 1 .expect("artifact should be in artifacts list")
}); })
SerializedModuleData { .collect(),
artifacts: module module_upvars: module
.inner .inner
.artifact_upvars .module_upvars
.iter() .iter()
.map(|i| MyCow::Borrowed(i.compilation_artifacts())) .map(|m| SerializedModuleUpvar::new(m, artifacts))
.chain(Some(MyCow::Borrowed( .collect(),
module.compiled_module().compilation_artifacts(),
)))
.collect(),
modules: module
.inner
.module_upvars
.iter()
.map(|i| module_data(i, type_tables_pushed, type_tables))
.collect(),
type_tables: type_tables_idx,
}
} }
} }
} }
@@ -212,14 +199,36 @@ pub struct SerializedModule<'a> {
strategy: CompilationStrategy, strategy: CompilationStrategy,
tunables: Tunables, tunables: Tunables,
features: WasmFeatures, features: WasmFeatures,
data: SerializedModuleData<'a>, artifacts: Vec<MyCow<'a, CompilationArtifacts>>,
tables: Vec<MyCow<'a, TypeTables>>, module_upvars: Vec<SerializedModuleUpvar>,
types: MyCow<'a, TypeTables>,
} }
impl<'a> SerializedModule<'a> { impl<'a> SerializedModule<'a> {
pub fn new(module: &'a Module) -> Self { pub fn new(module: &'a Module) -> Self {
let (data, tables) = SerializedModuleData::new(module); let compiler = module.engine().compiler();
Self::with_data(module.engine().compiler(), data, tables) let artifacts = module
.inner
.artifact_upvars
.iter()
.map(|m| MyCow::Borrowed(m.compilation_artifacts()))
.chain(Some(MyCow::Borrowed(
module.inner.module.compilation_artifacts(),
)))
.collect::<Vec<_>>();
let module_upvars = module
.inner
.module_upvars
.iter()
.map(|m| SerializedModuleUpvar::new(m, &module.inner.artifact_upvars))
.collect::<Vec<_>>();
Self::with_data(
compiler,
artifacts,
module_upvars,
MyCow::Borrowed(module.types()),
)
} }
pub fn from_artifacts( pub fn from_artifacts(
@@ -229,19 +238,17 @@ impl<'a> SerializedModule<'a> {
) -> Self { ) -> Self {
Self::with_data( Self::with_data(
compiler, compiler,
SerializedModuleData { artifacts.iter().map(MyCow::Borrowed).collect(),
artifacts: artifacts.iter().map(MyCow::Borrowed).collect(), Vec::new(),
modules: Vec::new(), MyCow::Borrowed(types),
type_tables: 0,
},
vec![MyCow::Borrowed(types)],
) )
} }
fn with_data( fn with_data(
compiler: &Compiler, compiler: &Compiler,
data: SerializedModuleData<'a>, artifacts: Vec<MyCow<'a, CompilationArtifacts>>,
tables: Vec<MyCow<'a, TypeTables>>, module_upvars: Vec<SerializedModuleUpvar>,
types: MyCow<'a, TypeTables>,
) -> Self { ) -> Self {
let isa = compiler.isa(); let isa = compiler.isa();
@@ -260,8 +267,9 @@ impl<'a> SerializedModule<'a> {
strategy: compiler.strategy(), strategy: compiler.strategy(),
tunables: compiler.tunables().clone(), tunables: compiler.tunables().clone(),
features: compiler.features().into(), features: compiler.features().into(),
data, artifacts,
tables, module_upvars,
types,
} }
} }
@@ -276,47 +284,26 @@ impl<'a> SerializedModule<'a> {
self.check_tunables(compiler)?; self.check_tunables(compiler)?;
self.check_features(compiler)?; self.check_features(compiler)?;
let types = self let modules = CompiledModule::from_artifacts_list(
.tables self.artifacts
.into_iter() .into_iter()
.map(|t| Arc::new(t.unwrap_owned())) .map(|i| i.unwrap_owned())
.collect::<Vec<_>>(); .collect(),
let module = mk(engine, &types, self.data)?; engine.compiler().isa(),
&*engine.config().profiler,
)?;
// Validate the module can be used with the current allocator assert!(!modules.is_empty());
engine.allocator().validate(module.inner.module.module())?;
return Ok(module); let main_module = modules.len() - 1;
fn mk( Module::from_parts(
engine: &Engine, engine,
types: &Vec<Arc<TypeTables>>, modules,
data: SerializedModuleData<'_>, main_module,
) -> Result<Module> { Arc::new(self.types.unwrap_owned()),
let mut artifacts = CompiledModule::from_artifacts_list( &self.module_upvars,
data.artifacts )
.into_iter()
.map(|i| i.unwrap_owned())
.collect(),
engine.compiler().isa(),
&*engine.config().profiler,
)?;
let inner = ModuleInner {
engine: engine.clone(),
types: types[data.type_tables].clone(),
module: artifacts.pop().unwrap(),
artifact_upvars: artifacts,
module_upvars: data
.modules
.into_iter()
.map(|m| mk(engine, types, m))
.collect::<Result<Vec<_>>>()?,
};
Ok(Module {
inner: Arc::new(inner),
})
}
} }
pub fn to_bytes(&self) -> Result<Vec<u8>> { pub fn to_bytes(&self) -> Result<Vec<u8>> {

View File

@@ -1,155 +0,0 @@
//! Implement a registry of function signatures, for fast indirect call
//! signature checking.
use crate::Module;
use std::collections::{hash_map, HashMap};
use std::convert::TryFrom;
use wasmtime_environ::entity::PrimaryMap;
use wasmtime_environ::wasm::{SignatureIndex, WasmFuncType};
use wasmtime_runtime::{VMSharedSignatureIndex, VMTrampoline};
/// WebAssembly requires that the caller and callee signatures in an indirect
/// call must match. To implement this efficiently, keep a registry of all
/// signatures, shared by all instances, so that call sites can just do an
/// index comparison.
#[derive(Debug, Default)]
pub struct SignatureRegistry {
// Map from a wasm actual function type to the index that it is assigned,
// shared amongst all wasm modules.
wasm2index: HashMap<WasmFuncType, VMSharedSignatureIndex>,
// Map of all known wasm function signatures in this registry. This is
// keyed by `VMSharedSignatureIndex` above.
index_map: Vec<Entry>,
}
#[derive(Debug)]
struct Entry {
// The WebAssembly type signature, using wasm types.
wasm: WasmFuncType,
// The native trampoline used to invoke this type signature from `Func`.
// Note that the code memory for this trampoline is not owned by this
// type, but instead it's expected to be owned by the store that this
// registry lives within.
trampoline: Option<VMTrampoline>,
}
impl SignatureRegistry {
/// Registers all signatures within a module into this registry all at once.
///
/// This will also internally register trampolines compiled in the module.
pub fn register_module(&mut self, module: &Module) {
// Register a unique index for all types in this module, even if they
// don't have a trampoline.
let signatures = &module.types().wasm_signatures;
for ty in module.compiled_module().module().types.values() {
if let wasmtime_environ::ModuleType::Function(index) = ty {
self.register_one(&signatures[*index], None);
}
}
// Once we've got a shared index for all types used then also fill in
// any trampolines that the module has compiled as well.
for (index, trampoline) in module.compiled_module().trampolines() {
let shared = self.wasm2index[&signatures[*index]];
let entry = &mut self.index_map[shared.bits() as usize];
if entry.trampoline.is_none() {
entry.trampoline = Some(*trampoline);
}
}
}
/// Register a signature and return its unique index.
pub fn register(
&mut self,
wasm: &WasmFuncType,
trampoline: VMTrampoline,
) -> VMSharedSignatureIndex {
self.register_one(wasm, Some(trampoline))
}
fn register_one(
&mut self,
wasm: &WasmFuncType,
trampoline: Option<VMTrampoline>,
) -> VMSharedSignatureIndex {
let len = self.wasm2index.len();
match self.wasm2index.entry(wasm.clone()) {
hash_map::Entry::Occupied(entry) => {
let ret = *entry.get();
let entry = &mut self.index_map[ret.bits() as usize];
// If the entry does not previously have a trampoline, then
// overwrite it with whatever was specified by this function.
if entry.trampoline.is_none() {
entry.trampoline = trampoline;
}
ret
}
hash_map::Entry::Vacant(entry) => {
// Keep `signature_hash` len under 2**32 -- VMSharedSignatureIndex::new(std::u32::MAX)
// is reserved for VMSharedSignatureIndex::default().
assert!(
len < std::u32::MAX as usize,
"Invariant check: signature_hash.len() < std::u32::MAX"
);
debug_assert_eq!(len, self.index_map.len());
let index = VMSharedSignatureIndex::new(u32::try_from(len).unwrap());
self.index_map.push(Entry {
wasm: wasm.clone(),
trampoline,
});
entry.insert(index);
index
}
}
}
/// Looks up a shared index from the wasm signature itself.
pub fn lookup(&self, wasm: &WasmFuncType) -> Option<VMSharedSignatureIndex> {
self.wasm2index.get(wasm).cloned()
}
/// Builds a lookup table for a module from the possible module's signature
/// indices to the shared signature index within this registry.
pub fn lookup_table(
&self,
module: &Module,
) -> PrimaryMap<SignatureIndex, VMSharedSignatureIndex> {
// For module-linking using modules this builds up a map that is
// too large. This builds up a map for everything in `TypeTables` but
// that's all the types for all modules in a whole module linking graph,
// which our `module` may not be using.
//
// For all non-module-linking-using modules, though, this is not an
// issue. This is optimizing for the non-module-linking case right now
// and it seems like module linking will likely change to the point that
// this will no longer be an issue in the future.
let signatures = &module.types().wasm_signatures;
let mut map = PrimaryMap::with_capacity(signatures.len());
for wasm in signatures.values() {
map.push(
self.wasm2index
.get(wasm)
.cloned()
.unwrap_or(VMSharedSignatureIndex::new(u32::MAX)),
);
}
map
}
/// Looks up information known about a shared signature index.
///
/// Note that for this operation to be semantically correct the `idx` must
/// have previously come from a call to `register` of this same object.
pub fn lookup_shared(
&self,
idx: VMSharedSignatureIndex,
) -> Option<(&WasmFuncType, VMTrampoline)> {
let (wasm, trampoline) = self
.index_map
.get(idx.bits() as usize)
.map(|e| (&e.wasm, e.trampoline))?;
Some((wasm, trampoline?))
}
}

View File

@@ -0,0 +1,262 @@
//! Implement a registry of function signatures, for fast indirect call
//! signature checking.
use std::{
collections::{hash_map::Entry, HashMap},
sync::RwLock,
};
use std::{convert::TryFrom, sync::Arc};
use wasmtime_environ::entity::PrimaryMap;
use wasmtime_environ::wasm::{SignatureIndex, WasmFuncType};
use wasmtime_runtime::{VMSharedSignatureIndex, VMTrampoline};
/// Represents a collection of shared signatures.
///
/// This is used to register shared signatures with a shared signature registry.
///
/// The collection will unregister any contained signatures with the registry
/// when dropped.
#[derive(Debug)]
pub struct SignatureCollection {
registry: Arc<RwLock<SignatureRegistryInner>>,
signatures: PrimaryMap<SignatureIndex, VMSharedSignatureIndex>,
trampolines: HashMap<VMSharedSignatureIndex, (usize, VMTrampoline)>,
}
impl SignatureCollection {
/// Creates a new, empty signature collection given a signature registry.
pub fn new(registry: &SignatureRegistry) -> Self {
Self {
registry: registry.0.clone(),
signatures: PrimaryMap::new(),
trampolines: HashMap::new(),
}
}
/// Creates a signature collection for a module given the module's signatures
/// and trampolines.
pub fn new_for_module(
registry: &SignatureRegistry,
signatures: &PrimaryMap<SignatureIndex, WasmFuncType>,
trampolines: impl Iterator<Item = (SignatureIndex, VMTrampoline)>,
) -> Self {
let (signatures, trampolines) = registry
.0
.write()
.unwrap()
.register_for_module(signatures, trampolines);
Self {
registry: registry.0.clone(),
signatures,
trampolines,
}
}
/// Treats the signature collection as a map from a module signature index to
/// registered shared signature indexes.
///
/// This is used for looking up module shared signature indexes during module
/// instantiation.
pub fn as_module_map(&self) -> &PrimaryMap<SignatureIndex, VMSharedSignatureIndex> {
&self.signatures
}
/// Gets the shared signature index given a module signature index.
pub fn shared_signature(&self, index: SignatureIndex) -> Option<VMSharedSignatureIndex> {
self.signatures.get(index).copied()
}
/// Gets a trampoline for a registered signature.
pub fn trampoline(&self, index: VMSharedSignatureIndex) -> Option<VMTrampoline> {
self.trampolines
.get(&index)
.map(|(_, trampoline)| *trampoline)
}
/// Registers a single function with the collection.
///
/// Returns the shared signature index for the function.
pub fn register(
&mut self,
ty: &WasmFuncType,
trampoline: VMTrampoline,
) -> VMSharedSignatureIndex {
let index = self.registry.write().unwrap().register(ty);
let entry = match self.trampolines.entry(index) {
Entry::Occupied(e) => e.into_mut(),
Entry::Vacant(e) => e.insert((0, trampoline)),
};
// Increment the ref count
entry.0 += 1;
index
}
}
impl Drop for SignatureCollection {
fn drop(&mut self) {
if !self.signatures.is_empty() || !self.trampolines.is_empty() {
self.registry.write().unwrap().unregister_signatures(self);
}
}
}
#[derive(Debug)]
struct RegistryEntry {
references: usize,
ty: WasmFuncType,
}
#[derive(Debug, Default)]
struct SignatureRegistryInner {
map: HashMap<WasmFuncType, VMSharedSignatureIndex>,
entries: Vec<Option<RegistryEntry>>,
free: Vec<VMSharedSignatureIndex>,
}
impl SignatureRegistryInner {
fn register_for_module(
&mut self,
signatures: &PrimaryMap<SignatureIndex, WasmFuncType>,
trampolines: impl Iterator<Item = (SignatureIndex, VMTrampoline)>,
) -> (
PrimaryMap<SignatureIndex, VMSharedSignatureIndex>,
HashMap<VMSharedSignatureIndex, (usize, VMTrampoline)>,
) {
let mut sigs = PrimaryMap::default();
let mut map = HashMap::default();
for (_, ty) in signatures.iter() {
sigs.push(self.register(ty));
}
for (index, trampoline) in trampolines {
map.insert(sigs[index], (1, trampoline));
}
(sigs, map)
}
fn register(&mut self, ty: &WasmFuncType) -> VMSharedSignatureIndex {
let len = self.map.len();
let index = match self.map.entry(ty.clone()) {
Entry::Occupied(e) => *e.get(),
Entry::Vacant(e) => {
let (index, entry) = match self.free.pop() {
Some(index) => (index, &mut self.entries[index.bits() as usize]),
None => {
// Keep `index_map` len under 2**32 -- VMSharedSignatureIndex::new(std::u32::MAX)
// is reserved for VMSharedSignatureIndex::default().
assert!(
len < std::u32::MAX as usize,
"Invariant check: index_map.len() < std::u32::MAX"
);
debug_assert_eq!(len, self.entries.len());
let index = VMSharedSignatureIndex::new(u32::try_from(len).unwrap());
self.entries.push(None);
(index, self.entries.last_mut().unwrap())
}
};
// The entry should be missing for one just allocated or
// taken from the free list
assert!(entry.is_none());
*entry = Some(RegistryEntry {
references: 0,
ty: ty.clone(),
});
*e.insert(index)
}
};
self.entries[index.bits() as usize]
.as_mut()
.unwrap()
.references += 1;
index
}
fn unregister_signatures(&mut self, collection: &SignatureCollection) {
// If the collection has a populated signatures map, use it to deregister
// This is always 1:1 from entry to registration
if !collection.signatures.is_empty() {
for (_, index) in collection.signatures.iter() {
self.unregister_entry(*index, 1);
}
} else {
// Otherwise, use the trampolines map, which has reference counts related
// to the stored index
for (index, (count, _)) in collection.trampolines.iter() {
self.unregister_entry(*index, *count);
}
}
}
fn unregister_entry(&mut self, index: VMSharedSignatureIndex, count: usize) {
let removed = {
let entry = self.entries[index.bits() as usize].as_mut().unwrap();
debug_assert!(entry.references >= count);
entry.references -= count;
if entry.references == 0 {
self.map.remove(&entry.ty);
self.free.push(index);
true
} else {
false
}
};
if removed {
self.entries[index.bits() as usize] = None;
}
}
}
// `SignatureRegistryInner` implements `Drop` in debug builds to assert that
// all signatures have been unregistered for the registry.
#[cfg(debug_assertions)]
impl Drop for SignatureRegistryInner {
fn drop(&mut self) {
assert!(
self.map.is_empty() && self.free.len() == self.entries.len(),
"signature registry not empty"
);
}
}
/// Implements a shared signature registry.
///
/// WebAssembly requires that the caller and callee signatures in an indirect
/// call must match. To implement this efficiently, keep a registry of all
/// signatures, shared by all instances, so that call sites can just do an
/// index comparison.
#[derive(Debug)]
pub struct SignatureRegistry(Arc<RwLock<SignatureRegistryInner>>);
impl SignatureRegistry {
/// Creates a new shared signature registry.
pub fn new() -> Self {
Self(Arc::new(RwLock::new(SignatureRegistryInner::default())))
}
/// Looks up a function type from a shared signature index.
pub fn lookup_type(&self, index: VMSharedSignatureIndex) -> Option<WasmFuncType> {
self.0
.read()
.unwrap()
.entries
.get(index.bits() as usize)
.and_then(|e| e.as_ref().map(|e| &e.ty).cloned())
}
}

View File

@@ -1,12 +1,11 @@
use crate::frame_info; use crate::{
use crate::frame_info::StoreFrameInfo; module::ModuleRegistry, signatures::SignatureCollection, trampoline::StoreInstanceHandle,
use crate::sig_registry::SignatureRegistry; Engine, Func, Module, Trap,
use crate::trampoline::StoreInstanceHandle; };
use crate::{Engine, Func, FuncType, Module, Trap};
use anyhow::{bail, Result}; use anyhow::{bail, Result};
use std::any::{Any, TypeId}; use std::any::{Any, TypeId};
use std::cell::{Cell, RefCell}; use std::cell::{Cell, RefCell};
use std::collections::{hash_map::Entry, HashMap, HashSet}; use std::collections::{hash_map::Entry, HashMap};
use std::convert::TryFrom; use std::convert::TryFrom;
use std::fmt; use std::fmt;
use std::future::Future; use std::future::Future;
@@ -16,12 +15,10 @@ use std::ptr;
use std::rc::Rc; use std::rc::Rc;
use std::sync::Arc; use std::sync::Arc;
use std::task::{Context, Poll}; use std::task::{Context, Poll};
use wasmtime_environ::wasm;
use wasmtime_jit::{CompiledModule, ModuleCode};
use wasmtime_runtime::{ use wasmtime_runtime::{
Export, InstanceAllocator, InstanceHandle, OnDemandInstanceAllocator, SignalHandler, InstanceAllocator, InstanceHandle, ModuleInfo, OnDemandInstanceAllocator, SignalHandler,
StackMapRegistry, TrapInfo, VMCallerCheckedAnyfunc, VMContext, VMExternRef, TrapInfo, VMCallerCheckedAnyfunc, VMContext, VMExternRef, VMExternRefActivationsTable,
VMExternRefActivationsTable, VMInterrupts, VMTrampoline, VMInterrupts, VMTrampoline,
}; };
/// Used to associate instances with the store. /// Used to associate instances with the store.
@@ -72,20 +69,13 @@ pub struct Store {
pub(crate) struct StoreInner { pub(crate) struct StoreInner {
engine: Engine, engine: Engine,
/// The map of all host functions registered with this store's signature registry
host_funcs: RefCell<HashMap<InstanceHandle, Box<VMCallerCheckedAnyfunc>>>,
interrupts: Arc<VMInterrupts>, interrupts: Arc<VMInterrupts>,
signatures: RefCell<SignatureRegistry>,
instances: RefCell<Vec<StoreInstance>>, instances: RefCell<Vec<StoreInstance>>,
signal_handler: RefCell<Option<Box<SignalHandler<'static>>>>, signal_handler: RefCell<Option<Box<SignalHandler<'static>>>>,
externref_activations_table: VMExternRefActivationsTable, externref_activations_table: VMExternRefActivationsTable,
stack_map_registry: StackMapRegistry, modules: RefCell<ModuleRegistry>,
/// Information about JIT code which allows us to test if a program counter // The signatures and trampolines for `Func` objects
/// is in JIT code, lookup trap information, etc. signatures: RefCell<SignatureCollection>,
frame_info: RefCell<StoreFrameInfo>,
/// Set of all compiled modules that we're holding a strong reference to
/// the module's code for. This includes JIT functions, trampolines, etc.
modules: RefCell<HashSet<ArcModuleCode>>,
// Numbers of resources instantiated in this store. // Numbers of resources instantiated in this store.
instance_count: Cell<usize>, instance_count: Cell<usize>,
memory_count: Cell<usize>, memory_count: Cell<usize>,
@@ -137,21 +127,18 @@ impl Store {
// once-per-thread. Platforms like Unix, however, only require this // once-per-thread. Platforms like Unix, however, only require this
// once-per-program. In any case this is safe to call many times and // once-per-program. In any case this is safe to call many times and
// each one that's not relevant just won't do anything. // each one that's not relevant just won't do anything.
wasmtime_runtime::init_traps(frame_info::GlobalFrameInfo::is_wasm_pc) wasmtime_runtime::init_traps(crate::module::GlobalModuleRegistry::is_wasm_pc)
.expect("failed to initialize trap handling"); .expect("failed to initialize trap handling");
Store { Store {
inner: Rc::new(StoreInner { inner: Rc::new(StoreInner {
engine: engine.clone(), engine: engine.clone(),
host_funcs: RefCell::new(HashMap::new()),
interrupts: Arc::new(Default::default()), interrupts: Arc::new(Default::default()),
signatures: RefCell::new(Default::default()),
instances: RefCell::new(Vec::new()), instances: RefCell::new(Vec::new()),
signal_handler: RefCell::new(None), signal_handler: RefCell::new(None),
externref_activations_table: VMExternRefActivationsTable::new(), externref_activations_table: VMExternRefActivationsTable::new(),
stack_map_registry: StackMapRegistry::default(), modules: RefCell::new(ModuleRegistry::default()),
frame_info: Default::default(), signatures: RefCell::new(SignatureCollection::new(engine.signatures())),
modules: Default::default(),
instance_count: Default::default(), instance_count: Default::default(),
memory_count: Default::default(), memory_count: Default::default(),
table_count: Default::default(), table_count: Default::default(),
@@ -181,35 +168,6 @@ impl Store {
}) })
} }
pub(crate) fn get_host_anyfunc(
&self,
instance: &InstanceHandle,
ty: &FuncType,
trampoline: VMTrampoline,
) -> *mut VMCallerCheckedAnyfunc {
let mut funcs = self.inner.host_funcs.borrow_mut();
let anyfunc = funcs.entry(unsafe { instance.clone() }).or_insert_with(|| {
let mut anyfunc = match instance
.lookup_by_declaration(&wasm::EntityIndex::Function(wasm::FuncIndex::from_u32(0)))
{
Export::Function(f) => unsafe { f.anyfunc.as_ref() }.clone(),
_ => unreachable!(),
};
// Register the function with this store's signature registry
anyfunc.type_index = self
.inner
.signatures
.borrow_mut()
.register(ty.as_wasm_func_type(), trampoline);
Box::new(anyfunc)
});
&mut **anyfunc
}
/// Returns the [`Engine`] that this store is associated with. /// Returns the [`Engine`] that this store is associated with.
#[inline] #[inline]
pub fn engine(&self) -> &Engine { pub fn engine(&self) -> &Engine {
@@ -244,64 +202,32 @@ impl Store {
} }
} }
pub(crate) fn signatures(&self) -> &RefCell<SignatureRegistry> { pub(crate) fn signatures(&self) -> &RefCell<SignatureCollection> {
&self.inner.signatures &self.inner.signatures
} }
pub(crate) fn register_module(&self, module: &Module) { pub(crate) fn lookup_trampoline(&self, anyfunc: &VMCallerCheckedAnyfunc) -> VMTrampoline {
// With a module being instantiated into this `Store` we need to // Look up the trampoline with the store's trampolines (from `Func`).
// preserve its jit-code. References to this module's code and if let Some(trampoline) = self
// trampolines are not owning-references so it's our responsibility to
// keep it all alive within the `Store`.
//
// If this module is already present in the store then we skip all
// further registration steps.
let first = self
.inner .inner
.modules .signatures
.borrow_mut() .borrow()
.insert(ArcModuleCode(module.compiled_module().code().clone())); .trampoline(anyfunc.type_index)
if !first { {
return; return trampoline;
} }
// All modules register their JIT code in a store for two reasons // Look up the trampoline with the registered modules
// currently: if let Some(trampoline) = self.inner.modules.borrow().lookup_trampoline(anyfunc) {
// return trampoline;
// * First we only catch signals/traps if the program counter falls }
// within the jit code of an instantiated wasm module. This ensures
// we don't catch accidental Rust/host segfaults. // Lastly, check with the engine (for `HostFunc`)
//
// * Second when generating a backtrace we'll use this mapping to
// only generate wasm frames for instruction pointers that fall
// within jit code.
self.inner self.inner
.frame_info .engine
.borrow_mut() .host_func_signatures()
.register(module.compiled_module()); .trampoline(anyfunc.type_index)
.expect("trampoline missing")
// We need to know about all the stack maps of all instantiated modules
// so when performing a GC we know about all wasm frames that we find
// on the stack.
self.register_stack_maps(module.compiled_module());
// Signatures are loaded into our `SignatureRegistry` here
// once-per-module (and once-per-signature). This allows us to create
// a `Func` wrapper for any function in the module, which requires that
// we know about the signature and trampoline for all instances.
self.signatures().borrow_mut().register_module(module);
}
fn register_stack_maps(&self, module: &CompiledModule) {
self.stack_map_registry()
.register_stack_maps(module.stack_maps().map(|(func, stack_maps)| unsafe {
let ptr = (*func).as_ptr();
let len = (*func).len();
let start = ptr as usize;
let end = ptr as usize + len;
let range = start..end;
(range, stack_maps)
}));
} }
pub(crate) fn bump_resource_counts(&self, module: &Module) -> Result<()> { pub(crate) fn bump_resource_counts(&self, module: &Module) -> Result<()> {
@@ -363,7 +289,7 @@ impl Store {
.borrow() .borrow()
.iter() .iter()
.any(|i| i.handle.vmctx_ptr() == handle.vmctx_ptr()) .any(|i| i.handle.vmctx_ptr() == handle.vmctx_ptr())
|| self.inner.host_funcs.borrow().get(&handle).is_some() || self.inner.engine.host_func_anyfunc(&handle).is_some()
); );
StoreInstanceHandle { StoreInstanceHandle {
store: self.clone(), store: self.clone(),
@@ -490,12 +416,8 @@ impl Store {
} }
#[inline] #[inline]
pub(crate) fn stack_map_registry(&self) -> &StackMapRegistry { pub(crate) fn modules(&self) -> &RefCell<ModuleRegistry> {
&self.inner.stack_map_registry &self.inner.modules
}
pub(crate) fn frame_info(&self) -> &RefCell<StoreFrameInfo> {
&self.inner.frame_info
} }
/// Notifies that the current Store (and all referenced entities) has been moved over to a /// Notifies that the current Store (and all referenced entities) has been moved over to a
@@ -513,20 +435,21 @@ impl Store {
/// ///
/// It is fine to call this several times: only the first call will have an effect. /// It is fine to call this several times: only the first call will have an effect.
pub unsafe fn notify_switched_thread(&self) { pub unsafe fn notify_switched_thread(&self) {
wasmtime_runtime::init_traps(frame_info::GlobalFrameInfo::is_wasm_pc) wasmtime_runtime::init_traps(crate::module::GlobalModuleRegistry::is_wasm_pc)
.expect("failed to initialize per-threads traps"); .expect("failed to initialize per-threads traps");
} }
#[inline]
pub(crate) fn module_info_lookup(&self) -> &dyn wasmtime_runtime::ModuleInfoLookup {
self.inner.as_ref()
}
/// Perform garbage collection of `ExternRef`s. /// Perform garbage collection of `ExternRef`s.
pub fn gc(&self) { pub fn gc(&self) {
// For this crate's API, we ensure that `set_stack_canary` invariants // For this crate's API, we ensure that `set_stack_canary` invariants
// are upheld for all host-->Wasm calls, and we register every module // are upheld for all host-->Wasm calls.
// used with this store in `self.inner.stack_map_registry`.
unsafe { unsafe {
wasmtime_runtime::gc( wasmtime_runtime::gc(self.inner.as_ref(), &self.inner.externref_activations_table);
&self.inner.stack_map_registry,
&self.inner.externref_activations_table,
);
} }
} }
@@ -987,6 +910,12 @@ impl Drop for StoreInner {
} }
} }
impl wasmtime_runtime::ModuleInfoLookup for StoreInner {
fn lookup(&self, pc: usize) -> Option<Arc<dyn ModuleInfo>> {
self.modules.borrow().lookup_module(pc)
}
}
/// A threadsafe handle used to interrupt instances executing within a /// A threadsafe handle used to interrupt instances executing within a
/// particular `Store`. /// particular `Store`.
/// ///
@@ -1015,24 +944,6 @@ impl InterruptHandle {
} }
} }
// Wrapper struct to implement hash/equality based on the pointer value of the
// `Arc` in question.
struct ArcModuleCode(Arc<ModuleCode>);
impl PartialEq for ArcModuleCode {
fn eq(&self, other: &ArcModuleCode) -> bool {
Arc::ptr_eq(&self.0, &other.0)
}
}
impl Eq for ArcModuleCode {}
impl Hash for ArcModuleCode {
fn hash<H: Hasher>(&self, hasher: &mut H) {
Arc::as_ptr(&self.0).hash(hasher)
}
}
struct Reset<'a, T: Copy>(&'a Cell<T>, T); struct Reset<'a, T: Copy>(&'a Cell<T>, T);
impl<T: Copy> Drop for Reset<'_, T> { impl<T: Copy> Drop for Reset<'_, T> {

View File

@@ -19,8 +19,8 @@ use std::sync::Arc;
use wasmtime_environ::{entity::PrimaryMap, wasm, Module}; use wasmtime_environ::{entity::PrimaryMap, wasm, Module};
use wasmtime_runtime::{ use wasmtime_runtime::{
Imports, InstanceAllocationRequest, InstanceAllocator, InstanceHandle, Imports, InstanceAllocationRequest, InstanceAllocator, InstanceHandle,
OnDemandInstanceAllocator, StackMapRegistry, VMExternRefActivationsTable, VMFunctionBody, OnDemandInstanceAllocator, VMExternRefActivationsTable, VMFunctionBody, VMFunctionImport,
VMFunctionImport, VMSharedSignatureIndex, VMSharedSignatureIndex,
}; };
/// A wrapper around `wasmtime_runtime::InstanceHandle` which pairs it with the /// A wrapper around `wasmtime_runtime::InstanceHandle` which pairs it with the
@@ -77,7 +77,7 @@ fn create_handle(
externref_activations_table: store.externref_activations_table() externref_activations_table: store.externref_activations_table()
as *const VMExternRefActivationsTable as *const VMExternRefActivationsTable
as *mut _, as *mut _,
stack_map_registry: store.stack_map_registry() as *const StackMapRegistry as *mut _, module_info_lookup: Some(store.module_info_lookup()),
}, },
)?; )?;

View File

@@ -1,51 +0,0 @@
//! Support for a calling of an imported function.
use crate::trampoline::StoreInstanceHandle;
use crate::Store;
use anyhow::Result;
use std::any::Any;
use std::sync::Arc;
use wasmtime_environ::entity::PrimaryMap;
use wasmtime_environ::wasm::DefinedFuncIndex;
use wasmtime_environ::Module;
use wasmtime_runtime::{
Imports, InstanceAllocationRequest, InstanceAllocator, StackMapRegistry,
VMExternRefActivationsTable, VMFunctionBody, VMFunctionImport, VMSharedSignatureIndex,
};
pub(crate) fn create_handle(
module: Module,
store: &Store,
finished_functions: PrimaryMap<DefinedFuncIndex, *mut [VMFunctionBody]>,
host_state: Box<dyn Any>,
func_imports: &[VMFunctionImport],
shared_signature_id: Option<VMSharedSignatureIndex>,
) -> Result<StoreInstanceHandle> {
let mut imports = Imports::default();
imports.functions = func_imports;
let module = Arc::new(module);
unsafe {
// Use the default allocator when creating handles associated with host objects
// The configured instance allocator should only be used when creating module instances
// as we don't want host objects to count towards instance limits.
let handle = store
.engine()
.config()
.default_instance_allocator
.allocate(InstanceAllocationRequest {
module: module.clone(),
finished_functions: &finished_functions,
imports,
shared_signatures: shared_signature_id.into(),
host_state,
interrupts: store.interrupts(),
externref_activations_table: store.externref_activations_table()
as *const VMExternRefActivationsTable
as *mut _,
stack_map_registry: store.stack_map_registry() as *const StackMapRegistry as *mut _,
})?;
Ok(store.add_instance(handle, true))
}
}

View File

@@ -1,6 +1,6 @@
//! Support for a calling of an imported function. //! Support for a calling of an imported function.
use crate::{sig_registry::SignatureRegistry, Config, FuncType, Trap}; use crate::{Config, FuncType, Store, Trap};
use anyhow::Result; use anyhow::Result;
use std::any::Any; use std::any::Any;
use std::cmp; use std::cmp;
@@ -262,15 +262,19 @@ pub fn create_function(
ft: &FuncType, ft: &FuncType,
func: Box<dyn Fn(*mut VMContext, *mut u128) -> Result<(), Trap>>, func: Box<dyn Fn(*mut VMContext, *mut u128) -> Result<(), Trap>>,
config: &Config, config: &Config,
registry: Option<&mut SignatureRegistry>, store: Option<&Store>,
) -> Result<(InstanceHandle, VMTrampoline)> { ) -> Result<(InstanceHandle, VMTrampoline)> {
let (module, finished_functions, trampoline, trampoline_state) = let (module, finished_functions, trampoline, trampoline_state) =
create_function_trampoline(config, ft, func)?; create_function_trampoline(config, ft, func)?;
// If there is no signature registry, use the default signature index which is // If there is no store, use the default signature index which is
// guaranteed to trap if there is ever an indirect call on the function (should not happen) // guaranteed to trap if there is ever an indirect call on the function (should not happen)
let shared_signature_id = registry let shared_signature_id = store
.map(|r| r.register(ft.as_wasm_func_type(), trampoline)) .map(|s| {
s.signatures()
.borrow_mut()
.register(ft.as_wasm_func_type(), trampoline)
})
.unwrap_or(VMSharedSignatureIndex::default()); .unwrap_or(VMSharedSignatureIndex::default());
unsafe { unsafe {
@@ -283,7 +287,7 @@ pub fn create_function(
host_state: Box::new(trampoline_state), host_state: Box::new(trampoline_state),
interrupts: std::ptr::null(), interrupts: std::ptr::null(),
externref_activations_table: std::ptr::null_mut(), externref_activations_table: std::ptr::null_mut(),
stack_map_registry: std::ptr::null_mut(), module_info_lookup: None,
})?, })?,
trampoline, trampoline,
)) ))
@@ -315,7 +319,7 @@ pub unsafe fn create_raw_function(
host_state, host_state,
interrupts: std::ptr::null(), interrupts: std::ptr::null(),
externref_activations_table: std::ptr::null_mut(), externref_activations_table: std::ptr::null_mut(),
stack_map_registry: std::ptr::null_mut(), module_info_lookup: None,
})?, })?,
) )
} }

View File

@@ -161,7 +161,7 @@ impl Trap {
maybe_interrupted, maybe_interrupted,
} => { } => {
let mut code = store let mut code = store
.frame_info() .modules()
.borrow() .borrow()
.lookup_trap_info(pc) .lookup_trap_info(pc)
.map(|info| info.trap_code) .map(|info| info.trap_code)
@@ -239,7 +239,7 @@ impl Trap {
// (the call instruction) so we subtract one as the lookup. // (the call instruction) so we subtract one as the lookup.
let pc_to_lookup = if Some(pc) == trap_pc { pc } else { pc - 1 }; let pc_to_lookup = if Some(pc) == trap_pc { pc } else { pc - 1 };
if let Some((info, has_unparsed_debuginfo)) = if let Some((info, has_unparsed_debuginfo)) =
store.frame_info().borrow().lookup_frame_info(pc_to_lookup) store.modules().borrow().lookup_frame_info(pc_to_lookup)
{ {
wasm_trace.push(info); wasm_trace.push(info);

View File

@@ -204,8 +204,7 @@ impl ExternType {
) -> ExternType { ) -> ExternType {
match ty { match ty {
EntityType::Function(idx) => { EntityType::Function(idx) => {
let sig = &types.wasm_signatures[*idx]; FuncType::from_wasm_func_type(types.wasm_signatures[*idx].clone()).into()
FuncType::from_wasm_func_type(sig).into()
} }
EntityType::Global(ty) => GlobalType::from_wasmtime_global(ty).into(), EntityType::Global(ty) => GlobalType::from_wasmtime_global(ty).into(),
EntityType::Memory(ty) => MemoryType::from_wasmtime_memory(ty).into(), EntityType::Memory(ty) => MemoryType::from_wasmtime_memory(ty).into(),
@@ -298,8 +297,8 @@ impl FuncType {
&self.sig &self.sig
} }
pub(crate) fn from_wasm_func_type(sig: &wasm::WasmFuncType) -> FuncType { pub(crate) fn from_wasm_func_type(sig: wasm::WasmFuncType) -> FuncType {
FuncType { sig: sig.clone() } Self { sig }
} }
} }

View File

@@ -1,4 +1,4 @@
use crate::{Extern, Store}; use crate::{signatures::SignatureCollection, Extern, Store};
use anyhow::{bail, Context, Result}; use anyhow::{bail, Context, Result};
use wasmtime_environ::wasm::{ use wasmtime_environ::wasm::{
EntityType, Global, InstanceTypeIndex, Memory, ModuleTypeIndex, SignatureIndex, Table, EntityType, Global, InstanceTypeIndex, Memory, ModuleTypeIndex, SignatureIndex, Table,
@@ -6,6 +6,7 @@ use wasmtime_environ::wasm::{
use wasmtime_jit::TypeTables; use wasmtime_jit::TypeTables;
pub struct MatchCx<'a> { pub struct MatchCx<'a> {
pub signatures: &'a SignatureCollection,
pub types: &'a TypeTables, pub types: &'a TypeTables,
pub store: &'a Store, pub store: &'a Store,
} }
@@ -70,12 +71,7 @@ impl MatchCx<'_> {
} }
pub fn func(&self, expected: SignatureIndex, actual: &crate::Func) -> Result<()> { pub fn func(&self, expected: SignatureIndex, actual: &crate::Func) -> Result<()> {
let matches = match self let matches = match self.signatures.shared_signature(expected) {
.store
.signatures()
.borrow()
.lookup(&self.types.wasm_signatures[expected])
{
Some(idx) => actual.sig_index() == idx, Some(idx) => actual.sig_index() == idx,
// If our expected signature isn't registered, then there's no way // If our expected signature isn't registered, then there's no way
// that `actual` can match it. // that `actual` can match it.
@@ -114,15 +110,19 @@ impl MatchCx<'_> {
let module = actual.compiled_module().module(); let module = actual.compiled_module().module();
self.imports_match( self.imports_match(
expected, expected,
actual.signatures(),
actual.types(), actual.types(),
module.imports().map(|(name, field, ty)| { module.imports().map(|(name, field, ty)| {
assert!(field.is_none()); // should be true if module linking is enabled assert!(field.is_none()); // should be true if module linking is enabled
(name, ty) (name, ty)
}), }),
)?; )?;
self.exports_match(expected_sig.exports, actual.types(), |name| { self.exports_match(
module.exports.get(name).map(|idx| module.type_of(*idx)) expected_sig.exports,
})?; actual.signatures(),
actual.types(),
|name| module.exports.get(name).map(|idx| module.type_of(*idx)),
)?;
Ok(()) Ok(())
} }
@@ -133,6 +133,7 @@ impl MatchCx<'_> {
fn imports_match<'a>( fn imports_match<'a>(
&self, &self,
expected: ModuleTypeIndex, expected: ModuleTypeIndex,
actual_signatures: &SignatureCollection,
actual_types: &TypeTables, actual_types: &TypeTables,
actual_imports: impl Iterator<Item = (&'a str, EntityType)>, actual_imports: impl Iterator<Item = (&'a str, EntityType)>,
) -> Result<()> { ) -> Result<()> {
@@ -146,10 +147,11 @@ impl MatchCx<'_> {
None => bail!("expected type doesn't import {:?}", name), None => bail!("expected type doesn't import {:?}", name),
}; };
MatchCx { MatchCx {
signatures: actual_signatures,
types: actual_types, types: actual_types,
store: self.store, store: self.store,
} }
.extern_ty_matches(&actual_ty, expected_ty, self.types) .extern_ty_matches(&actual_ty, expected_ty, self.signatures, self.types)
.with_context(|| format!("module import {:?} incompatible", name))?; .with_context(|| format!("module import {:?} incompatible", name))?;
} }
Ok(()) Ok(())
@@ -160,6 +162,7 @@ impl MatchCx<'_> {
fn exports_match( fn exports_match(
&self, &self,
expected: InstanceTypeIndex, expected: InstanceTypeIndex,
actual_signatures: &SignatureCollection,
actual_types: &TypeTables, actual_types: &TypeTables,
lookup: impl Fn(&str) -> Option<EntityType>, lookup: impl Fn(&str) -> Option<EntityType>,
) -> Result<()> { ) -> Result<()> {
@@ -169,7 +172,7 @@ impl MatchCx<'_> {
for (name, expected) in self.types.instance_signatures[expected].exports.iter() { for (name, expected) in self.types.instance_signatures[expected].exports.iter() {
match lookup(name) { match lookup(name) {
Some(ty) => self Some(ty) => self
.extern_ty_matches(expected, &ty, actual_types) .extern_ty_matches(expected, &ty, actual_signatures, actual_types)
.with_context(|| format!("export {:?} incompatible", name))?, .with_context(|| format!("export {:?} incompatible", name))?,
None => bail!("failed to find export {:?}", name), None => bail!("failed to find export {:?}", name),
} }
@@ -183,6 +186,7 @@ impl MatchCx<'_> {
&self, &self,
expected: &EntityType, expected: &EntityType,
actual_ty: &EntityType, actual_ty: &EntityType,
actual_signatures: &SignatureCollection,
actual_types: &TypeTables, actual_types: &TypeTables,
) -> Result<()> { ) -> Result<()> {
let actual_desc = match actual_ty { let actual_desc = match actual_ty {
@@ -221,7 +225,7 @@ impl MatchCx<'_> {
EntityType::Instance(expected) => match actual_ty { EntityType::Instance(expected) => match actual_ty {
EntityType::Instance(actual) => { EntityType::Instance(actual) => {
let sig = &actual_types.instance_signatures[*actual]; let sig = &actual_types.instance_signatures[*actual];
self.exports_match(*expected, actual_types, |name| { self.exports_match(*expected, actual_signatures, actual_types, |name| {
sig.exports.get(name).cloned() sig.exports.get(name).cloned()
})?; })?;
Ok(()) Ok(())
@@ -237,15 +241,19 @@ impl MatchCx<'_> {
self.imports_match( self.imports_match(
*expected, *expected,
actual_signatures,
actual_types, actual_types,
actual_module_sig actual_module_sig
.imports .imports
.iter() .iter()
.map(|(module, ty)| (module.as_str(), ty.clone())), .map(|(module, ty)| (module.as_str(), ty.clone())),
)?; )?;
self.exports_match(expected_module_sig.exports, actual_types, |name| { self.exports_match(
actual_instance_sig.exports.get(name).cloned() expected_module_sig.exports,
})?; actual_signatures,
actual_types,
|name| actual_instance_sig.exports.get(name).cloned(),
)?;
Ok(()) Ok(())
} }
_ => bail!("expected module, but found {}", actual_desc), _ => bail!("expected module, but found {}", actual_desc),

View File

@@ -98,7 +98,7 @@ impl Val {
let externref_ptr = x.inner.as_raw(); let externref_ptr = x.inner.as_raw();
store store
.externref_activations_table() .externref_activations_table()
.insert_with_gc(x.inner, store.stack_map_registry()); .insert_with_gc(x.inner, store.module_info_lookup());
ptr::write(p as *mut *mut u8, externref_ptr) ptr::write(p as *mut *mut u8, externref_ptr)
} }
Val::FuncRef(f) => ptr::write( Val::FuncRef(f) => ptr::write(