wasmtime: Add support for func.ref and table.grow with funcrefs

`funcref`s are implemented as `NonNull<VMCallerCheckedAnyfunc>`.

This should be more efficient than using a `VMExternRef` that points at a
`VMCallerCheckedAnyfunc` because it gets rid of an indirection, dynamic
allocation, and some reference counting.

Note that the null function reference is *NOT* a null pointer; it is a
`VMCallerCheckedAnyfunc` that has a null `func_ptr` member.

Part of #929
This commit is contained in:
Nick Fitzgerald
2020-06-18 11:04:40 -07:00
parent ddc2ce8080
commit 58bb5dd953
37 changed files with 603 additions and 305 deletions

View File

@@ -21,13 +21,15 @@ use std::any::Any;
use std::cell::RefCell;
use std::collections::HashMap;
use std::convert::TryFrom;
use std::ptr::NonNull;
use std::sync::Arc;
use std::{mem, ptr, slice};
use thiserror::Error;
use wasmtime_environ::entity::{packed_option::ReservedValue, BoxedSlice, EntityRef, PrimaryMap};
use wasmtime_environ::wasm::{
DataIndex, DefinedFuncIndex, DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex,
ElemIndex, FuncIndex, GlobalIndex, GlobalInit, MemoryIndex, SignatureIndex, TableIndex,
ElemIndex, FuncIndex, GlobalIndex, GlobalInit, MemoryIndex, SignatureIndex, TableElementType,
TableIndex,
};
use wasmtime_environ::{ir, DataInitializer, EntityIndex, Module, TableElements, VMOffsets};
@@ -54,7 +56,7 @@ pub(crate) struct Instance {
/// Passive elements in this instantiation. As `elem.drop`s happen, these
/// entries get removed. A missing entry is considered equivalent to an
/// empty slice.
passive_elements: RefCell<HashMap<ElemIndex, Box<[VMCallerCheckedAnyfunc]>>>,
passive_elements: RefCell<HashMap<ElemIndex, Box<[*mut VMCallerCheckedAnyfunc]>>>,
/// Passive data segments from our module. As `data.drop`s happen, entries
/// get removed. A missing entry is considered equivalent to an empty slice.
@@ -273,23 +275,10 @@ impl Instance {
pub fn lookup_by_declaration(&self, export: &EntityIndex) -> Export {
match export {
EntityIndex::Function(index) => {
let signature = self.signature_id(self.module.local.functions[*index]);
let (address, vmctx) =
if let Some(def_index) = self.module.local.defined_func_index(*index) {
(
self.finished_functions[def_index] as *const _,
self.vmctx_ptr(),
)
} else {
let import = self.imported_function(*index);
(import.body, import.vmctx)
};
ExportFunction {
address,
signature,
vmctx,
}
.into()
let anyfunc = self.get_caller_checked_anyfunc(*index).unwrap();
let anyfunc =
NonNull::new(anyfunc as *const VMCallerCheckedAnyfunc as *mut _).unwrap();
ExportFunction { anyfunc }.into()
}
EntityIndex::Table(index) => {
let (definition, vmctx) =
@@ -448,6 +437,11 @@ impl Instance {
foreign_instance.memory_size(foreign_index)
}
pub(crate) fn table_element_type(&self, table_index: TableIndex) -> TableElementType {
let table = self.get_table(table_index);
table.element_type()
}
/// Grow table by the specified amount of elements, filling them with
/// `init_value`.
///
@@ -513,30 +507,25 @@ impl Instance {
Layout::from_size_align(size, align).unwrap()
}
/// Get a `VMCallerCheckedAnyfunc` for the given `FuncIndex`.
fn get_caller_checked_anyfunc(&self, index: FuncIndex) -> VMCallerCheckedAnyfunc {
/// Get a `&VMCallerCheckedAnyfunc` for the given `FuncIndex`.
///
/// Returns `None` if the index is the reserved index value.
///
/// The returned reference is a stable reference that won't be moved and can
/// be passed into JIT code.
pub(crate) fn get_caller_checked_anyfunc(
&self,
index: FuncIndex,
) -> Option<&VMCallerCheckedAnyfunc> {
if index == FuncIndex::reserved_value() {
return VMCallerCheckedAnyfunc::default();
return None;
}
let sig = self.module.local.functions[index];
let type_index = self.signature_id(sig);
Some(unsafe { &*self.anyfunc_ptr(index) })
}
let (func_ptr, vmctx) = if let Some(def_index) = self.module.local.defined_func_index(index)
{
(
self.finished_functions[def_index] as *const _,
self.vmctx_ptr(),
)
} else {
let import = self.imported_function(index);
(import.body, import.vmctx)
};
VMCallerCheckedAnyfunc {
func_ptr,
type_index,
vmctx,
}
unsafe fn anyfunc_ptr(&self, index: FuncIndex) -> *mut VMCallerCheckedAnyfunc {
self.vmctx_plus_offset(self.offsets.vmctx_anyfunc(index))
}
/// The `table.init` operation: initializes a portion of a table with a
@@ -574,7 +563,7 @@ impl Instance {
// TODO(#983): investigate replacing this get/set loop with a `memcpy`.
for (dst, src) in (dst..dst + len).zip(src..src + len) {
table
.set(dst, TableElement::FuncRef(elem[src as usize].clone()))
.set(dst, TableElement::FuncRef(elem[src as usize]))
.expect("should never panic because we already did the bounds check above");
}
@@ -932,6 +921,30 @@ impl InstanceHandle {
*instance.externref_activations_table() = externref_activations_table;
*instance.stack_map_registry() = stack_map_registry;
for (index, sig) in instance.module.local.functions.iter() {
let type_index = instance.signature_id(*sig);
let (func_ptr, vmctx) =
if let Some(def_index) = instance.module.local.defined_func_index(index) {
(
NonNull::new(instance.finished_functions[def_index] as *mut _).unwrap(),
instance.vmctx_ptr(),
)
} else {
let import = instance.imported_function(index);
(import.body, import.vmctx)
};
ptr::write(
instance.anyfunc_ptr(index),
VMCallerCheckedAnyfunc {
func_ptr,
type_index,
vmctx,
},
);
}
// Perform infallible initialization in this constructor, while fallible
// initialization is deferred to the `initialize` method.
initialize_passive_elements(instance);
@@ -1246,12 +1259,14 @@ fn initialize_tables(instance: &Instance) -> Result<(), InstantiationError> {
}
for (i, func_idx) in init.elements.iter().enumerate() {
let anyfunc = instance.get_caller_checked_anyfunc(*func_idx);
let anyfunc = instance.get_caller_checked_anyfunc(*func_idx).map_or(
ptr::null_mut(),
|f: &VMCallerCheckedAnyfunc| {
f as *const VMCallerCheckedAnyfunc as *mut VMCallerCheckedAnyfunc
},
);
table
.set(
u32::try_from(start + i).unwrap(),
TableElement::FuncRef(anyfunc),
)
.set(u32::try_from(start + i).unwrap(), anyfunc.into())
.unwrap();
}
}
@@ -1280,7 +1295,14 @@ fn initialize_passive_elements(instance: &Instance) {
*idx,
segments
.iter()
.map(|s| instance.get_caller_checked_anyfunc(*s))
.map(|s| {
instance.get_caller_checked_anyfunc(*s).map_or(
ptr::null_mut(),
|f: &VMCallerCheckedAnyfunc| {
f as *const VMCallerCheckedAnyfunc as *mut _
},
)
})
.collect(),
)
}),