Rename VMCallerCheckedAnyfunc to VMCallerCheckedFuncRef (#5738)

At some point what is now `funcref` was called `anyfunc` and the spec changed,
but we didn't update our internal names. This does that.

Co-authored-by: Jamey Sharp <jsharp@fastly.com>
This commit is contained in:
Nick Fitzgerald
2023-02-07 14:09:02 -08:00
committed by GitHub
parent edfa10d607
commit 317cc51337
23 changed files with 126 additions and 126 deletions

View File

@@ -95,7 +95,7 @@ impl ComponentCompiler for Compiler {
None => builder.ins().iconst(pointer_type, 0),
});
// realloc: *mut VMCallerCheckedAnyfunc
// realloc: *mut VMCallerCheckedFuncRef
host_sig.params.push(ir::AbiParam::new(pointer_type));
callee_args.push(match realloc {
Some(idx) => builder.ins().load(

View File

@@ -1586,21 +1586,21 @@ impl<'module_environment> cranelift_wasm::FuncEnvironment for FuncEnvironment<'m
) -> WasmResult<ir::Inst> {
let pointer_type = self.pointer_type();
// Get the anyfunc pointer (the funcref) from the table.
let anyfunc_ptr = self.get_or_init_funcref_table_elem(builder, table_index, table, callee);
// Get the funcref pointer from the table.
let funcref_ptr = self.get_or_init_funcref_table_elem(builder, table_index, table, callee);
// Check for whether the table element is null, and trap if so.
builder
.ins()
.trapz(anyfunc_ptr, ir::TrapCode::IndirectCallToNull);
.trapz(funcref_ptr, ir::TrapCode::IndirectCallToNull);
// Dereference anyfunc pointer to get the function address.
// Dereference the funcref pointer to get the function address.
let mem_flags = ir::MemFlags::trusted();
let func_addr = builder.ins().load(
pointer_type,
mem_flags,
anyfunc_ptr,
i32::from(self.offsets.ptr.vmcaller_checked_anyfunc_func_ptr()),
funcref_ptr,
i32::from(self.offsets.ptr.vmcaller_checked_func_ref_func_ptr()),
);
// If necessary, check the signature.
@@ -1612,7 +1612,7 @@ impl<'module_environment> cranelift_wasm::FuncEnvironment for FuncEnvironment<'m
let base = builder.ins().global_value(pointer_type, vmctx);
// Load the caller ID. This requires loading the
// `*mut VMCallerCheckedAnyfunc` base pointer from `VMContext`
// `*mut VMCallerCheckedFuncRef` base pointer from `VMContext`
// and then loading, based on `SignatureIndex`, the
// corresponding entry.
let mem_flags = ir::MemFlags::trusted().with_readonly();
@@ -1635,8 +1635,8 @@ impl<'module_environment> cranelift_wasm::FuncEnvironment for FuncEnvironment<'m
let callee_sig_id = builder.ins().load(
sig_id_type,
mem_flags,
anyfunc_ptr,
i32::from(self.offsets.ptr.vmcaller_checked_anyfunc_type_index()),
funcref_ptr,
i32::from(self.offsets.ptr.vmcaller_checked_func_ref_type_index()),
);
// Check that they match.
@@ -1657,8 +1657,8 @@ impl<'module_environment> cranelift_wasm::FuncEnvironment for FuncEnvironment<'m
let vmctx = builder.ins().load(
pointer_type,
mem_flags,
anyfunc_ptr,
i32::from(self.offsets.ptr.vmcaller_checked_anyfunc_vmctx()),
funcref_ptr,
i32::from(self.offsets.ptr.vmcaller_checked_func_ref_vmctx()),
);
real_call_args.push(vmctx);
real_call_args.push(caller_vmctx);

View File

@@ -184,7 +184,7 @@ pub enum GlobalInitializer {
/// A core wasm function was "generated" via `canon lower` of a function
/// that was `canon lift`'d in the same component, meaning that the function
/// always traps. This is recorded within the `VMComponentContext` as a new
/// `VMCallerCheckedAnyfunc` that's available for use.
/// `VMCallerCheckedFuncRef` that's available for use.
AlwaysTrap(AlwaysTrap),
/// A core wasm linear memory is going to be saved into the
@@ -213,7 +213,7 @@ pub enum GlobalInitializer {
SaveModuleImport(RuntimeImportIndex),
/// Similar to `ExtractMemory` and friends and indicates that a
/// `VMCallerCheckedAnyfunc` needs to be initialized for a transcoder
/// `VMCallerCheckedFuncRef` needs to be initialized for a transcoder
/// function and this will later be used to instantiate an adapter module.
Transcoder(Transcoder),
}
@@ -469,7 +469,7 @@ pub enum StringEncoding {
pub struct Transcoder {
/// The index of the transcoder being defined and initialized.
///
/// This indicates which `VMCallerCheckedAnyfunc` slot is written to in a
/// This indicates which `VMCallerCheckedFuncRef` slot is written to in a
/// `VMComponentContext`.
pub index: RuntimeTranscoderIndex,
/// The transcoding operation being performed.

View File

@@ -181,7 +181,7 @@ indices! {
/// Index into the list of transcoders identified during compilation.
///
/// This is used to index the `VMCallerCheckedAnyfunc` slots reserved for
/// This is used to index the `VMCallerCheckedFuncRef` slots reserved for
/// string encoders which reference linear memories defined within a
/// component.
pub struct RuntimeTranscoderIndex(u32);

View File

@@ -6,13 +6,13 @@
// store: *mut dyn Store,
// limits: *const VMRuntimeLimits,
// flags: [VMGlobalDefinition; component.num_runtime_component_instances],
// lowering_anyfuncs: [VMCallerCheckedAnyfunc; component.num_lowerings],
// always_trap_anyfuncs: [VMCallerCheckedAnyfunc; component.num_always_trap],
// transcoder_anyfuncs: [VMCallerCheckedAnyfunc; component.num_transcoders],
// lowering_anyfuncs: [VMCallerCheckedFuncRef; component.num_lowerings],
// always_trap_anyfuncs: [VMCallerCheckedFuncRef; component.num_always_trap],
// transcoder_anyfuncs: [VMCallerCheckedFuncRef; component.num_transcoders],
// lowerings: [VMLowering; component.num_lowerings],
// memories: [*mut VMMemoryDefinition; component.num_memories],
// reallocs: [*mut VMCallerCheckedAnyfunc; component.num_reallocs],
// post_returns: [*mut VMCallerCheckedAnyfunc; component.num_post_returns],
// reallocs: [*mut VMCallerCheckedFuncRef; component.num_reallocs],
// post_returns: [*mut VMCallerCheckedFuncRef; component.num_post_returns],
// }
use crate::component::{
@@ -57,7 +57,7 @@ pub struct VMComponentOffsets<P> {
/// least 1).
pub num_runtime_component_instances: u32,
/// Number of "always trap" functions which have their
/// `VMCallerCheckedAnyfunc` stored inline in the `VMComponentContext`.
/// `VMCallerCheckedFuncRef` stored inline in the `VMComponentContext`.
pub num_always_trap: u32,
/// Number of transcoders needed for string conversion.
pub num_transcoders: u32,
@@ -148,9 +148,9 @@ impl<P: PtrSize> VMComponentOffsets<P> {
align(16),
size(flags) = cmul(ret.num_runtime_component_instances, ret.ptr.size_of_vmglobal_definition()),
align(u32::from(ret.ptr.size())),
size(lowering_anyfuncs) = cmul(ret.num_lowerings, ret.ptr.size_of_vmcaller_checked_anyfunc()),
size(always_trap_anyfuncs) = cmul(ret.num_always_trap, ret.ptr.size_of_vmcaller_checked_anyfunc()),
size(transcoder_anyfuncs) = cmul(ret.num_transcoders, ret.ptr.size_of_vmcaller_checked_anyfunc()),
size(lowering_anyfuncs) = cmul(ret.num_lowerings, ret.ptr.size_of_vmcaller_checked_func_ref()),
size(always_trap_anyfuncs) = cmul(ret.num_always_trap, ret.ptr.size_of_vmcaller_checked_func_ref()),
size(transcoder_anyfuncs) = cmul(ret.num_transcoders, ret.ptr.size_of_vmcaller_checked_func_ref()),
size(lowerings) = cmul(ret.num_lowerings, ret.ptr.size() * 2),
size(memories) = cmul(ret.num_runtime_memories, ret.ptr.size()),
size(reallocs) = cmul(ret.num_runtime_reallocs, ret.ptr.size()),
@@ -210,12 +210,12 @@ impl<P: PtrSize> VMComponentOffsets<P> {
self.lowering_anyfuncs
}
/// The offset of `VMCallerCheckedAnyfunc` for the `index` specified.
/// The offset of `VMCallerCheckedFuncRef` for the `index` specified.
#[inline]
pub fn lowering_anyfunc(&self, index: LoweredIndex) -> u32 {
assert!(index.as_u32() < self.num_lowerings);
self.lowering_anyfuncs()
+ index.as_u32() * u32::from(self.ptr.size_of_vmcaller_checked_anyfunc())
+ index.as_u32() * u32::from(self.ptr.size_of_vmcaller_checked_func_ref())
}
/// The offset of the `always_trap_anyfuncs` field.
@@ -224,12 +224,12 @@ impl<P: PtrSize> VMComponentOffsets<P> {
self.always_trap_anyfuncs
}
/// The offset of `VMCallerCheckedAnyfunc` for the `index` specified.
/// The offset of `VMCallerCheckedFuncRef` for the `index` specified.
#[inline]
pub fn always_trap_anyfunc(&self, index: RuntimeAlwaysTrapIndex) -> u32 {
assert!(index.as_u32() < self.num_always_trap);
self.always_trap_anyfuncs()
+ index.as_u32() * u32::from(self.ptr.size_of_vmcaller_checked_anyfunc())
+ index.as_u32() * u32::from(self.ptr.size_of_vmcaller_checked_func_ref())
}
/// The offset of the `transcoder_anyfuncs` field.
@@ -238,12 +238,12 @@ impl<P: PtrSize> VMComponentOffsets<P> {
self.transcoder_anyfuncs
}
/// The offset of `VMCallerCheckedAnyfunc` for the `index` specified.
/// The offset of `VMCallerCheckedFuncRef` for the `index` specified.
#[inline]
pub fn transcoder_anyfunc(&self, index: RuntimeTranscoderIndex) -> u32 {
assert!(index.as_u32() < self.num_transcoders);
self.transcoder_anyfuncs()
+ index.as_u32() * u32::from(self.ptr.size_of_vmcaller_checked_anyfunc())
+ index.as_u32() * u32::from(self.ptr.size_of_vmcaller_checked_func_ref())
}
/// The offset of the `lowerings` field.
@@ -309,7 +309,7 @@ impl<P: PtrSize> VMComponentOffsets<P> {
self.reallocs
}
/// The offset of the `*mut VMCallerCheckedAnyfunc` for the runtime index
/// The offset of the `*mut VMCallerCheckedFuncRef` for the runtime index
/// provided.
#[inline]
pub fn runtime_realloc(&self, index: RuntimeReallocIndex) -> u32 {
@@ -323,7 +323,7 @@ impl<P: PtrSize> VMComponentOffsets<P> {
self.post_returns
}
/// The offset of the `*mut VMCallerCheckedAnyfunc` for the runtime index
/// The offset of the `*mut VMCallerCheckedFuncRef` for the runtime index
/// provided.
#[inline]
pub fn runtime_post_return(&self, index: RuntimePostReturnIndex) -> u32 {

View File

@@ -809,7 +809,7 @@ pub struct Module {
pub num_imported_globals: usize,
/// Number of functions that "escape" from this module may need to have a
/// `VMCallerCheckedAnyfunc` constructed for them.
/// `VMCallerCheckedFuncRef` constructed for them.
///
/// This is also the number of functions in the `functions` array below with
/// an `anyfunc` index (and is the maximum anyfunc index).

View File

@@ -20,7 +20,7 @@
// memories: [*mut VMMemoryDefinition; module.num_defined_memories],
// owned_memories: [VMMemoryDefinition; module.num_owned_memories],
// globals: [VMGlobalDefinition; module.num_defined_globals],
// anyfuncs: [VMCallerCheckedAnyfunc; module.num_escaped_funcs],
// anyfuncs: [VMCallerCheckedFuncRef; module.num_escaped_funcs],
// }
use crate::{
@@ -101,26 +101,26 @@ pub trait PtrSize {
/// The offset of the `func_ptr` field.
#[allow(clippy::erasing_op)]
#[inline]
fn vmcaller_checked_anyfunc_func_ptr(&self) -> u8 {
fn vmcaller_checked_func_ref_func_ptr(&self) -> u8 {
0 * self.size()
}
/// The offset of the `type_index` field.
#[allow(clippy::identity_op)]
#[inline]
fn vmcaller_checked_anyfunc_type_index(&self) -> u8 {
fn vmcaller_checked_func_ref_type_index(&self) -> u8 {
1 * self.size()
}
/// The offset of the `vmctx` field.
#[inline]
fn vmcaller_checked_anyfunc_vmctx(&self) -> u8 {
fn vmcaller_checked_func_ref_vmctx(&self) -> u8 {
2 * self.size()
}
/// Return the size of `VMCallerCheckedAnyfunc`.
/// Return the size of `VMCallerCheckedFuncRef`.
#[inline]
fn size_of_vmcaller_checked_anyfunc(&self) -> u8 {
fn size_of_vmcaller_checked_func_ref(&self) -> u8 {
3 * self.size()
}
@@ -233,7 +233,7 @@ pub struct VMOffsetsFields<P> {
pub num_owned_memories: u32,
/// The number of defined globals in the module.
pub num_defined_globals: u32,
/// The number of escaped functions in the module, the size of the anyfunc
/// The number of escaped functions in the module, the size of the funcref
/// array.
pub num_escaped_funcs: u32,
}
@@ -428,7 +428,7 @@ impl<P: PtrSize> From<VMOffsetsFields<P>> for VMOffsets<P> {
= cmul(ret.num_defined_globals, ret.ptr.size_of_vmglobal_definition()),
size(defined_anyfuncs) = cmul(
ret.num_escaped_funcs,
ret.ptr.size_of_vmcaller_checked_anyfunc(),
ret.ptr.size_of_vmcaller_checked_func_ref(),
),
}
@@ -749,14 +749,14 @@ impl<P: PtrSize> VMOffsets<P> {
+ index.as_u32() * u32::from(self.ptr.size_of_vmglobal_definition())
}
/// Return the offset to the `VMCallerCheckedAnyfunc` for the given function
/// Return the offset to the `VMCallerCheckedFuncRef` for the given function
/// index (either imported or defined).
#[inline]
pub fn vmctx_anyfunc(&self, index: AnyfuncIndex) -> u32 {
assert!(!index.is_reserved_value());
assert!(index.as_u32() < self.num_escaped_funcs);
self.vmctx_anyfuncs_begin()
+ index.as_u32() * u32::from(self.ptr.size_of_vmcaller_checked_anyfunc())
+ index.as_u32() * u32::from(self.ptr.size_of_vmcaller_checked_func_ref())
}
/// Return the offset to the `body` field in `*const VMFunctionBody` index `index`.

View File

@@ -7,7 +7,7 @@
//! cranelift-compiled adapters, will use this `VMComponentContext` as well.
use crate::{
Store, VMCallerCheckedAnyfunc, VMFunctionBody, VMGlobalDefinition, VMMemoryDefinition,
Store, VMCallerCheckedFuncRef, VMFunctionBody, VMGlobalDefinition, VMMemoryDefinition,
VMOpaqueContext, VMSharedSignatureIndex, ValRaw,
};
use memoffset::offset_of;
@@ -79,7 +79,7 @@ pub type VMLoweringCallee = extern "C" fn(
data: *mut u8,
flags: InstanceFlags,
opt_memory: *mut VMMemoryDefinition,
opt_realloc: *mut VMCallerCheckedAnyfunc,
opt_realloc: *mut VMCallerCheckedFuncRef,
string_encoding: StringEncoding,
args_and_results: *mut ValRaw,
nargs_and_results: usize,
@@ -201,7 +201,7 @@ impl ComponentInstance {
///
/// This can only be called after `idx` has been initialized at runtime
/// during the instantiation process of a component.
pub fn runtime_realloc(&self, idx: RuntimeReallocIndex) -> NonNull<VMCallerCheckedAnyfunc> {
pub fn runtime_realloc(&self, idx: RuntimeReallocIndex) -> NonNull<VMCallerCheckedFuncRef> {
unsafe {
let ret = *self.vmctx_plus_offset::<NonNull<_>>(self.offsets.runtime_realloc(idx));
debug_assert!(ret.as_ptr() as usize != INVALID_PTR);
@@ -216,7 +216,7 @@ impl ComponentInstance {
pub fn runtime_post_return(
&self,
idx: RuntimePostReturnIndex,
) -> NonNull<VMCallerCheckedAnyfunc> {
) -> NonNull<VMCallerCheckedFuncRef> {
unsafe {
let ret = *self.vmctx_plus_offset::<NonNull<_>>(self.offsets.runtime_post_return(idx));
debug_assert!(ret.as_ptr() as usize != INVALID_PTR);
@@ -246,7 +246,7 @@ impl ComponentInstance {
///
/// This can only be called after `idx` has been initialized at runtime
/// during the instantiation process of a component.
pub fn lowering_anyfunc(&self, idx: LoweredIndex) -> NonNull<VMCallerCheckedAnyfunc> {
pub fn lowering_anyfunc(&self, idx: LoweredIndex) -> NonNull<VMCallerCheckedFuncRef> {
unsafe { self.anyfunc(self.offsets.lowering_anyfunc(idx)) }
}
@@ -254,7 +254,7 @@ impl ComponentInstance {
pub fn always_trap_anyfunc(
&self,
idx: RuntimeAlwaysTrapIndex,
) -> NonNull<VMCallerCheckedAnyfunc> {
) -> NonNull<VMCallerCheckedFuncRef> {
unsafe { self.anyfunc(self.offsets.always_trap_anyfunc(idx)) }
}
@@ -262,12 +262,12 @@ impl ComponentInstance {
pub fn transcoder_anyfunc(
&self,
idx: RuntimeTranscoderIndex,
) -> NonNull<VMCallerCheckedAnyfunc> {
) -> NonNull<VMCallerCheckedFuncRef> {
unsafe { self.anyfunc(self.offsets.transcoder_anyfunc(idx)) }
}
unsafe fn anyfunc(&self, offset: u32) -> NonNull<VMCallerCheckedAnyfunc> {
let ret = self.vmctx_plus_offset::<VMCallerCheckedAnyfunc>(offset);
unsafe fn anyfunc(&self, offset: u32) -> NonNull<VMCallerCheckedFuncRef> {
let ret = self.vmctx_plus_offset::<VMCallerCheckedFuncRef>(offset);
debug_assert!((*ret).func_ptr.as_ptr() as usize != INVALID_PTR);
debug_assert!((*ret).vmctx as usize != INVALID_PTR);
NonNull::new(ret).unwrap()
@@ -294,7 +294,7 @@ impl ComponentInstance {
pub fn set_runtime_realloc(
&mut self,
idx: RuntimeReallocIndex,
ptr: NonNull<VMCallerCheckedAnyfunc>,
ptr: NonNull<VMCallerCheckedFuncRef>,
) {
unsafe {
let storage = self.vmctx_plus_offset(self.offsets.runtime_realloc(idx));
@@ -307,7 +307,7 @@ impl ComponentInstance {
pub fn set_runtime_post_return(
&mut self,
idx: RuntimePostReturnIndex,
ptr: NonNull<VMCallerCheckedAnyfunc>,
ptr: NonNull<VMCallerCheckedFuncRef>,
) {
unsafe {
let storage = self.vmctx_plus_offset(self.offsets.runtime_post_return(idx));
@@ -378,7 +378,7 @@ impl ComponentInstance {
) {
debug_assert!(*self.vmctx_plus_offset::<usize>(offset) == INVALID_PTR);
let vmctx = self.vmctx();
*self.vmctx_plus_offset(offset) = VMCallerCheckedAnyfunc {
*self.vmctx_plus_offset(offset) = VMCallerCheckedFuncRef {
func_ptr,
type_index,
vmctx: VMOpaqueContext::from_vmcomponent(vmctx),
@@ -510,7 +510,7 @@ impl OwnedComponentInstance {
pub fn set_runtime_realloc(
&mut self,
idx: RuntimeReallocIndex,
ptr: NonNull<VMCallerCheckedAnyfunc>,
ptr: NonNull<VMCallerCheckedFuncRef>,
) {
unsafe { self.instance_mut().set_runtime_realloc(idx, ptr) }
}
@@ -519,7 +519,7 @@ impl OwnedComponentInstance {
pub fn set_runtime_post_return(
&mut self,
idx: RuntimePostReturnIndex,
ptr: NonNull<VMCallerCheckedAnyfunc>,
ptr: NonNull<VMCallerCheckedFuncRef>,
) {
unsafe { self.instance_mut().set_runtime_post_return(idx, ptr) }
}

View File

@@ -1,5 +1,5 @@
use crate::vmcontext::{
VMCallerCheckedAnyfunc, VMContext, VMGlobalDefinition, VMMemoryDefinition, VMTableDefinition,
VMCallerCheckedFuncRef, VMContext, VMGlobalDefinition, VMMemoryDefinition, VMTableDefinition,
};
use std::ptr::NonNull;
use wasmtime_environ::{DefinedMemoryIndex, Global, MemoryPlan, TablePlan};
@@ -22,11 +22,11 @@ pub enum Export {
/// A function export value.
#[derive(Debug, Clone, Copy)]
pub struct ExportFunction {
/// The `VMCallerCheckedAnyfunc` for this exported function.
/// The `VMCallerCheckedFuncRef` for this exported function.
///
/// Note that exported functions cannot be a null funcref, so this is a
/// non-null pointer.
pub anyfunc: NonNull<VMCallerCheckedAnyfunc>,
pub anyfunc: NonNull<VMCallerCheckedFuncRef>,
}
// It's part of the contract of using `ExportFunction` that synchronization

View File

@@ -7,7 +7,7 @@ use crate::externref::VMExternRefActivationsTable;
use crate::memory::{Memory, RuntimeMemoryCreator};
use crate::table::{Table, TableElement, TableElementType};
use crate::vmcontext::{
VMBuiltinFunctionsArray, VMCallerCheckedAnyfunc, VMContext, VMFunctionImport,
VMBuiltinFunctionsArray, VMCallerCheckedFuncRef, VMContext, VMFunctionImport,
VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition, VMMemoryImport, VMOpaqueContext,
VMRuntimeLimits, VMTableDefinition, VMTableImport, VMCONTEXT_MAGIC,
};
@@ -334,7 +334,7 @@ impl Instance {
fn get_exported_func(&mut self, index: FuncIndex) -> ExportFunction {
let anyfunc = self.get_caller_checked_anyfunc(index).unwrap();
let anyfunc = NonNull::new(anyfunc as *const VMCallerCheckedAnyfunc as *mut _).unwrap();
let anyfunc = NonNull::new(anyfunc as *const VMCallerCheckedFuncRef as *mut _).unwrap();
ExportFunction { anyfunc }
}
@@ -498,7 +498,7 @@ impl Instance {
Layout::from_size_align(size, align).unwrap()
}
/// Construct a new VMCallerCheckedAnyfunc for the given function
/// Construct a new VMCallerCheckedFuncRef for the given function
/// (imported or defined in this module) and store into the given
/// location. Used during lazy initialization.
///
@@ -511,7 +511,7 @@ impl Instance {
&mut self,
index: FuncIndex,
sig: SignatureIndex,
into: *mut VMCallerCheckedAnyfunc,
into: *mut VMCallerCheckedFuncRef,
) {
let type_index = unsafe {
let base: *const VMSharedSignatureIndex =
@@ -532,7 +532,7 @@ impl Instance {
// Safety: we have a `&mut self`, so we have exclusive access
// to this Instance.
unsafe {
*into = VMCallerCheckedAnyfunc {
*into = VMCallerCheckedFuncRef {
vmctx,
type_index,
func_ptr: NonNull::new(func_ptr).expect("Non-null function pointer"),
@@ -540,7 +540,7 @@ impl Instance {
}
}
/// Get a `&VMCallerCheckedAnyfunc` for the given `FuncIndex`.
/// Get a `&VMCallerCheckedFuncRef` for the given `FuncIndex`.
///
/// Returns `None` if the index is the reserved index value.
///
@@ -549,7 +549,7 @@ impl Instance {
pub(crate) fn get_caller_checked_anyfunc(
&mut self,
index: FuncIndex,
) -> Option<*mut VMCallerCheckedAnyfunc> {
) -> Option<*mut VMCallerCheckedFuncRef> {
if index == FuncIndex::reserved_value() {
return None;
}
@@ -583,8 +583,8 @@ impl Instance {
// all!
let func = &self.module().functions[index];
let sig = func.signature;
let anyfunc: *mut VMCallerCheckedAnyfunc = self
.vmctx_plus_offset::<VMCallerCheckedAnyfunc>(
let anyfunc: *mut VMCallerCheckedFuncRef = self
.vmctx_plus_offset::<VMCallerCheckedFuncRef>(
self.offsets().vmctx_anyfunc(func.anyfunc),
);
self.construct_anyfunc(index, sig, anyfunc);
@@ -1034,7 +1034,7 @@ impl Instance {
}
GlobalInit::RefFunc(f) => {
*(*to).as_anyfunc_mut() = self.get_caller_checked_anyfunc(f).unwrap()
as *const VMCallerCheckedAnyfunc;
as *const VMCallerCheckedFuncRef;
}
GlobalInit::RefNullConst => match global.wasm_ty {
// `VMGlobalDefinition::new()` already zeroed out the bits

View File

@@ -70,7 +70,7 @@ pub use crate::traphandlers::{
Backtrace, SignalHandler, TlsRestore, Trap, TrapReason,
};
pub use crate::vmcontext::{
VMCallerCheckedAnyfunc, VMContext, VMFunctionBody, VMFunctionImport, VMGlobalDefinition,
VMCallerCheckedFuncRef, VMContext, VMFunctionBody, VMFunctionImport, VMGlobalDefinition,
VMGlobalImport, VMHostFuncContext, VMInvokeArgument, VMMemoryDefinition, VMMemoryImport,
VMOpaqueContext, VMRuntimeLimits, VMSharedSignatureIndex, VMTableDefinition, VMTableImport,
VMTrampoline, ValRaw,

View File

@@ -56,7 +56,7 @@
use crate::externref::VMExternRef;
use crate::table::{Table, TableElementType};
use crate::vmcontext::{VMCallerCheckedAnyfunc, VMContext};
use crate::vmcontext::{VMCallerCheckedFuncRef, VMContext};
use crate::TrapReason;
use anyhow::Result;
use std::mem;
@@ -198,14 +198,14 @@ unsafe fn table_grow(
vmctx: *mut VMContext,
table_index: u32,
delta: u32,
// NB: we don't know whether this is a pointer to a `VMCallerCheckedAnyfunc`
// NB: we don't know whether this is a pointer to a `VMCallerCheckedFuncRef`
// or is a `VMExternRef` until we look at the table type.
init_value: *mut u8,
) -> Result<u32> {
let instance = (*vmctx).instance_mut();
let table_index = TableIndex::from_u32(table_index);
let element = match instance.table_element_type(table_index) {
TableElementType::Func => (init_value as *mut VMCallerCheckedAnyfunc).into(),
TableElementType::Func => (init_value as *mut VMCallerCheckedFuncRef).into(),
TableElementType::Extern => {
let init_value = if init_value.is_null() {
None
@@ -230,7 +230,7 @@ unsafe fn table_fill(
table_index: u32,
dst: u32,
// NB: we don't know whether this is a `VMExternRef` or a pointer to a
// `VMCallerCheckedAnyfunc` until we look at the table's element type.
// `VMCallerCheckedFuncRef` until we look at the table's element type.
val: *mut u8,
len: u32,
) -> Result<(), Trap> {
@@ -239,7 +239,7 @@ unsafe fn table_fill(
let table = &mut *instance.get_table(table_index);
match table.element_type() {
TableElementType::Func => {
let val = val as *mut VMCallerCheckedAnyfunc;
let val = val as *mut VMCallerCheckedFuncRef;
table.fill(dst, val.into(), len)
}
TableElementType::Extern => {

View File

@@ -2,7 +2,7 @@
//!
//! `Table` is to WebAssembly tables what `LinearMemory` is to WebAssembly linear memories.
use crate::vmcontext::{VMCallerCheckedAnyfunc, VMTableDefinition};
use crate::vmcontext::{VMCallerCheckedFuncRef, VMTableDefinition};
use crate::{Store, VMExternRef};
use anyhow::{bail, format_err, Error, Result};
use std::convert::{TryFrom, TryInto};
@@ -16,7 +16,7 @@ use wasmtime_environ::{TablePlan, Trap, WasmType, FUNCREF_INIT_BIT, FUNCREF_MASK
#[derive(Clone)]
pub enum TableElement {
/// A `funcref`.
FuncRef(*mut VMCallerCheckedAnyfunc),
FuncRef(*mut VMCallerCheckedFuncRef),
/// An `exrernref`.
ExternRef(Option<VMExternRef>),
/// An uninitialized funcref value. This should never be exposed
@@ -32,7 +32,7 @@ pub enum TableElementType {
Extern,
}
// The usage of `*mut VMCallerCheckedAnyfunc` is safe w.r.t. thread safety, this
// The usage of `*mut VMCallerCheckedFuncRef` is safe w.r.t. thread safety, this
// just relies on thread-safety of `VMExternRef` itself.
unsafe impl Send for TableElement where VMExternRef: Send {}
unsafe impl Sync for TableElement where VMExternRef: Sync {}
@@ -119,8 +119,8 @@ impl TableElement {
}
}
impl From<*mut VMCallerCheckedAnyfunc> for TableElement {
fn from(f: *mut VMCallerCheckedAnyfunc) -> TableElement {
impl From<*mut VMCallerCheckedFuncRef> for TableElement {
fn from(f: *mut VMCallerCheckedFuncRef) -> TableElement {
TableElement::FuncRef(f)
}
}
@@ -266,7 +266,7 @@ impl Table {
pub fn init_funcs(
&mut self,
dst: u32,
items: impl ExactSizeIterator<Item = *mut VMCallerCheckedAnyfunc>,
items: impl ExactSizeIterator<Item = *mut VMCallerCheckedFuncRef>,
) -> Result<(), Trap> {
assert!(self.element_type() == TableElementType::Func);

View File

@@ -514,22 +514,22 @@ impl VMGlobalDefinition {
/// Return a reference to the value as an anyfunc.
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_anyfunc(&self) -> *const VMCallerCheckedAnyfunc {
pub unsafe fn as_anyfunc(&self) -> *const VMCallerCheckedFuncRef {
*(self
.storage
.as_ref()
.as_ptr()
.cast::<*const VMCallerCheckedAnyfunc>())
.cast::<*const VMCallerCheckedFuncRef>())
}
/// Return a mutable reference to the value as an anyfunc.
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_anyfunc_mut(&mut self) -> &mut *const VMCallerCheckedAnyfunc {
pub unsafe fn as_anyfunc_mut(&mut self) -> &mut *const VMCallerCheckedFuncRef {
&mut *(self
.storage
.as_mut()
.as_mut_ptr()
.cast::<*const VMCallerCheckedAnyfunc>())
.cast::<*const VMCallerCheckedFuncRef>())
}
}
@@ -582,7 +582,7 @@ impl Default for VMSharedSignatureIndex {
/// by the caller.
#[derive(Debug, Clone)]
#[repr(C)]
pub struct VMCallerCheckedAnyfunc {
pub struct VMCallerCheckedFuncRef {
/// Function body.
pub func_ptr: NonNull<VMFunctionBody>,
/// Function signature id.
@@ -597,12 +597,12 @@ pub struct VMCallerCheckedAnyfunc {
// If more elements are added here, remember to add offset_of tests below!
}
unsafe impl Send for VMCallerCheckedAnyfunc {}
unsafe impl Sync for VMCallerCheckedAnyfunc {}
unsafe impl Send for VMCallerCheckedFuncRef {}
unsafe impl Sync for VMCallerCheckedFuncRef {}
#[cfg(test)]
mod test_vmcaller_checked_anyfunc {
use super::VMCallerCheckedAnyfunc;
use super::VMCallerCheckedFuncRef;
use memoffset::offset_of;
use std::mem::size_of;
use wasmtime_environ::{Module, PtrSize, VMOffsets};
@@ -612,20 +612,20 @@ mod test_vmcaller_checked_anyfunc {
let module = Module::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMCallerCheckedAnyfunc>(),
usize::from(offsets.ptr.size_of_vmcaller_checked_anyfunc())
size_of::<VMCallerCheckedFuncRef>(),
usize::from(offsets.ptr.size_of_vmcaller_checked_func_ref())
);
assert_eq!(
offset_of!(VMCallerCheckedAnyfunc, func_ptr),
usize::from(offsets.ptr.vmcaller_checked_anyfunc_func_ptr())
offset_of!(VMCallerCheckedFuncRef, func_ptr),
usize::from(offsets.ptr.vmcaller_checked_func_ref_func_ptr())
);
assert_eq!(
offset_of!(VMCallerCheckedAnyfunc, type_index),
usize::from(offsets.ptr.vmcaller_checked_anyfunc_type_index())
offset_of!(VMCallerCheckedFuncRef, type_index),
usize::from(offsets.ptr.vmcaller_checked_func_ref_type_index())
);
assert_eq!(
offset_of!(VMCallerCheckedAnyfunc, vmctx),
usize::from(offsets.ptr.vmcaller_checked_anyfunc_vmctx())
offset_of!(VMCallerCheckedFuncRef, vmctx),
usize::from(offsets.ptr.vmcaller_checked_func_ref_vmctx())
);
}
}
@@ -1144,11 +1144,11 @@ pub type VMTrampoline =
/// target context.
///
/// This context is used to represent that contexts specified in
/// `VMCallerCheckedAnyfunc` can have any type and don't have an implicit
/// `VMCallerCheckedFuncRef` can have any type and don't have an implicit
/// structure. Neither wasmtime nor cranelift-generated code can rely on the
/// structure of an opaque context in general and only the code which configured
/// the context is able to rely on a particular structure. This is because the
/// context pointer configured for `VMCallerCheckedAnyfunc` is guaranteed to be
/// context pointer configured for `VMCallerCheckedFuncRef` is guaranteed to be
/// the first parameter passed.
///
/// Note that Wasmtime currently has a layout where all contexts that are casted

View File

@@ -4,7 +4,7 @@
use wasmtime_environ::VM_HOST_FUNC_MAGIC;
use super::{VMCallerCheckedAnyfunc, VMFunctionBody, VMOpaqueContext, VMSharedSignatureIndex};
use super::{VMCallerCheckedFuncRef, VMFunctionBody, VMOpaqueContext, VMSharedSignatureIndex};
use std::{
any::Any,
ptr::{self, NonNull},
@@ -20,7 +20,7 @@ pub struct VMHostFuncContext {
magic: u32,
// _padding: u32, // (on 64-bit systems)
pub(crate) host_func: NonNull<VMFunctionBody>,
wasm_to_host_trampoline: VMCallerCheckedAnyfunc,
wasm_to_host_trampoline: VMCallerCheckedFuncRef,
host_state: Box<dyn Any + Send + Sync>,
}
@@ -41,7 +41,7 @@ impl VMHostFuncContext {
signature: VMSharedSignatureIndex,
host_state: Box<dyn Any + Send + Sync>,
) -> Box<VMHostFuncContext> {
let wasm_to_host_trampoline = VMCallerCheckedAnyfunc {
let wasm_to_host_trampoline = VMCallerCheckedFuncRef {
func_ptr: NonNull::new(crate::trampolines::wasm_to_host_trampoline as _).unwrap(),
type_index: signature,
vmctx: ptr::null_mut(),
@@ -58,7 +58,7 @@ impl VMHostFuncContext {
}
/// Get the Wasm-to-host trampoline for this host function context.
pub fn wasm_to_host_trampoline(&self) -> NonNull<VMCallerCheckedAnyfunc> {
pub fn wasm_to_host_trampoline(&self) -> NonNull<VMCallerCheckedFuncRef> {
NonNull::from(&self.wasm_to_host_trampoline)
}

View File

@@ -50,7 +50,7 @@ struct CompiledComponentInfo {
/// section of `code_memory`.
///
/// These trampolines are the function pointer within the
/// `VMCallerCheckedAnyfunc` and will delegate indirectly to a host function
/// `VMCallerCheckedFuncRef` and will delegate indirectly to a host function
/// pointer when called.
lowerings: PrimaryMap<LoweredIndex, FunctionLoc>,

View File

@@ -15,7 +15,7 @@ use wasmtime_environ::component::{
use wasmtime_runtime::component::{
InstanceFlags, VMComponentContext, VMLowering, VMLoweringCallee,
};
use wasmtime_runtime::{VMCallerCheckedAnyfunc, VMMemoryDefinition, VMOpaqueContext};
use wasmtime_runtime::{VMCallerCheckedFuncRef, VMMemoryDefinition, VMOpaqueContext};
pub struct HostFunc {
entrypoint: VMLoweringCallee,
@@ -43,7 +43,7 @@ impl HostFunc {
data: *mut u8,
flags: InstanceFlags,
memory: *mut VMMemoryDefinition,
realloc: *mut VMCallerCheckedAnyfunc,
realloc: *mut VMCallerCheckedFuncRef,
string_encoding: StringEncoding,
storage: *mut ValRaw,
storage_len: usize,
@@ -150,7 +150,7 @@ unsafe fn call_host<T, Params, Return, F>(
cx: *mut VMOpaqueContext,
mut flags: InstanceFlags,
memory: *mut VMMemoryDefinition,
realloc: *mut VMCallerCheckedAnyfunc,
realloc: *mut VMCallerCheckedFuncRef,
string_encoding: StringEncoding,
storage: &mut [ValRaw],
closure: F,
@@ -280,7 +280,7 @@ unsafe fn call_host_dynamic<T, F>(
cx: *mut VMOpaqueContext,
mut flags: InstanceFlags,
memory: *mut VMMemoryDefinition,
realloc: *mut VMCallerCheckedAnyfunc,
realloc: *mut VMCallerCheckedFuncRef,
string_encoding: StringEncoding,
storage: &mut [ValRaw],
closure: F,
@@ -398,7 +398,7 @@ extern "C" fn dynamic_entrypoint<T, F>(
data: *mut u8,
flags: InstanceFlags,
memory: *mut VMMemoryDefinition,
realloc: *mut VMCallerCheckedAnyfunc,
realloc: *mut VMCallerCheckedFuncRef,
string_encoding: StringEncoding,
storage: *mut ValRaw,
storage_len: usize,

View File

@@ -3,7 +3,7 @@ use crate::StoreContextMut;
use anyhow::{bail, Result};
use std::ptr::NonNull;
use wasmtime_environ::component::StringEncoding;
use wasmtime_runtime::{VMCallerCheckedAnyfunc, VMMemoryDefinition};
use wasmtime_runtime::{VMCallerCheckedFuncRef, VMMemoryDefinition};
/// Runtime representation of canonical ABI options in the component model.
///
@@ -30,7 +30,7 @@ pub struct Options {
/// function.
///
/// Safely using this pointer has the same restrictions as `memory` above.
realloc: Option<NonNull<VMCallerCheckedAnyfunc>>,
realloc: Option<NonNull<VMCallerCheckedFuncRef>>,
/// The encoding used for strings, if found.
///
@@ -57,7 +57,7 @@ impl Options {
pub unsafe fn new(
store_id: StoreId,
memory: Option<NonNull<VMMemoryDefinition>>,
realloc: Option<NonNull<VMCallerCheckedAnyfunc>>,
realloc: Option<NonNull<VMCallerCheckedFuncRef>>,
string_encoding: StringEncoding,
) -> Options {
Options {

View File

@@ -11,7 +11,7 @@ use std::pin::Pin;
use std::ptr::NonNull;
use std::sync::Arc;
use wasmtime_runtime::{
ExportFunction, InstanceHandle, VMCallerCheckedAnyfunc, VMContext, VMFunctionBody,
ExportFunction, InstanceHandle, VMCallerCheckedFuncRef, VMContext, VMFunctionBody,
VMFunctionImport, VMHostFuncContext, VMOpaqueContext, VMSharedSignatureIndex, VMTrampoline,
};
@@ -497,7 +497,7 @@ impl Func {
pub(crate) unsafe fn from_caller_checked_anyfunc(
store: &mut StoreOpaque,
raw: *mut VMCallerCheckedAnyfunc,
raw: *mut VMCallerCheckedFuncRef,
) -> Option<Func> {
let anyfunc = NonNull::new(raw)?;
debug_assert!(anyfunc.as_ref().type_index != VMSharedSignatureIndex::default());
@@ -891,7 +891,7 @@ impl Func {
pub(crate) unsafe fn call_unchecked_raw<T>(
store: &mut StoreContextMut<'_, T>,
anyfunc: NonNull<VMCallerCheckedAnyfunc>,
anyfunc: NonNull<VMCallerCheckedFuncRef>,
trampoline: VMTrampoline,
params_and_returns: *mut ValRaw,
) -> Result<()> {
@@ -1066,7 +1066,7 @@ impl Func {
pub(crate) fn caller_checked_anyfunc(
&self,
store: &StoreOpaque,
) -> NonNull<VMCallerCheckedAnyfunc> {
) -> NonNull<VMCallerCheckedFuncRef> {
store.store_data()[self.0].export().anyfunc
}

View File

@@ -6,7 +6,7 @@ use std::marker;
use std::mem::{self, MaybeUninit};
use std::ptr;
use wasmtime_runtime::{
VMCallerCheckedAnyfunc, VMContext, VMFunctionBody, VMOpaqueContext, VMSharedSignatureIndex,
VMCallerCheckedFuncRef, VMContext, VMFunctionBody, VMOpaqueContext, VMSharedSignatureIndex,
};
/// A statically typed WebAssembly function.
@@ -130,7 +130,7 @@ where
pub(crate) unsafe fn call_raw<T>(
store: &mut StoreContextMut<'_, T>,
func: ptr::NonNull<VMCallerCheckedAnyfunc>,
func: ptr::NonNull<VMCallerCheckedFuncRef>,
params: Params,
) -> Result<Results> {
// double-check that params/results match for this function's type in
@@ -409,7 +409,7 @@ unsafe impl WasmTy for Option<ExternRef> {
}
unsafe impl WasmTy for Option<Func> {
type Abi = *mut wasmtime_runtime::VMCallerCheckedAnyfunc;
type Abi = *mut wasmtime_runtime::VMCallerCheckedFuncRef;
#[inline]
fn valtype() -> ValType {

View File

@@ -11,7 +11,7 @@ use std::{
sync::{Arc, RwLock},
};
use wasmtime_jit::CodeMemory;
use wasmtime_runtime::{ModuleInfo, VMCallerCheckedAnyfunc, VMTrampoline};
use wasmtime_runtime::{ModuleInfo, VMCallerCheckedFuncRef, VMTrampoline};
/// Used for registering modules with a store.
///
@@ -125,7 +125,7 @@ impl ModuleRegistry {
}
/// Looks up a trampoline from an anyfunc.
pub fn lookup_trampoline(&self, anyfunc: &VMCallerCheckedAnyfunc) -> Option<VMTrampoline> {
pub fn lookup_trampoline(&self, anyfunc: &VMCallerCheckedFuncRef) -> Option<VMTrampoline> {
let (code, _offset) = self.code(anyfunc.func_ptr.as_ptr() as usize)?;
code.code.signatures().trampoline(anyfunc.type_index)
}

View File

@@ -95,7 +95,7 @@ use std::sync::Arc;
use std::task::{Context, Poll};
use wasmtime_runtime::{
InstanceAllocationRequest, InstanceAllocator, InstanceHandle, ModuleInfo,
OnDemandInstanceAllocator, SignalHandler, StorePtr, VMCallerCheckedAnyfunc, VMContext,
OnDemandInstanceAllocator, SignalHandler, StorePtr, VMCallerCheckedFuncRef, VMContext,
VMExternRef, VMExternRefActivationsTable, VMRuntimeLimits, VMSharedSignatureIndex,
VMTrampoline,
};
@@ -1263,7 +1263,7 @@ impl StoreOpaque {
/// `self.host_trampolines` we lazily populate `self.host_trampolines` by
/// iterating over `self.store_data().funcs`, inserting trampolines as we
/// go. If we find the right trampoline then it's returned.
pub fn lookup_trampoline(&mut self, anyfunc: &VMCallerCheckedAnyfunc) -> VMTrampoline {
pub fn lookup_trampoline(&mut self, anyfunc: &VMCallerCheckedFuncRef) -> VMTrampoline {
// First try to see if the `anyfunc` belongs to any module. Each module
// has its own map of trampolines-per-type-index and the code pointer in
// the `anyfunc` will enable us to quickly find a module.

View File

@@ -409,7 +409,7 @@ next.
WebAssembly tables contain reference types, currently either `funcref` or
`externref`. A `funcref` in Wasmtime is represented as `*mut
VMCallerCheckedAnyfunc` and an `externref` is represented as `VMExternRef`
VMCallerCheckedFuncRef` and an `externref` is represented as `VMExternRef`
(which is internally `*mut VMExternData`). Tables are consequently represented
as vectors of pointers. Table storage memory management by default goes through
Rust's `Vec` which uses `malloc` and friends for memory. With the pooling