Rename VMCallerCheckedAnyfunc to VMCallerCheckedFuncRef (#5738)

At some point what is now `funcref` was called `anyfunc` and the spec changed,
but we didn't update our internal names. This does that.

Co-authored-by: Jamey Sharp <jsharp@fastly.com>
This commit is contained in:
Nick Fitzgerald
2023-02-07 14:09:02 -08:00
committed by GitHub
parent edfa10d607
commit 317cc51337
23 changed files with 126 additions and 126 deletions

View File

@@ -7,7 +7,7 @@
//! cranelift-compiled adapters, will use this `VMComponentContext` as well.
use crate::{
Store, VMCallerCheckedAnyfunc, VMFunctionBody, VMGlobalDefinition, VMMemoryDefinition,
Store, VMCallerCheckedFuncRef, VMFunctionBody, VMGlobalDefinition, VMMemoryDefinition,
VMOpaqueContext, VMSharedSignatureIndex, ValRaw,
};
use memoffset::offset_of;
@@ -79,7 +79,7 @@ pub type VMLoweringCallee = extern "C" fn(
data: *mut u8,
flags: InstanceFlags,
opt_memory: *mut VMMemoryDefinition,
opt_realloc: *mut VMCallerCheckedAnyfunc,
opt_realloc: *mut VMCallerCheckedFuncRef,
string_encoding: StringEncoding,
args_and_results: *mut ValRaw,
nargs_and_results: usize,
@@ -201,7 +201,7 @@ impl ComponentInstance {
///
/// This can only be called after `idx` has been initialized at runtime
/// during the instantiation process of a component.
pub fn runtime_realloc(&self, idx: RuntimeReallocIndex) -> NonNull<VMCallerCheckedAnyfunc> {
pub fn runtime_realloc(&self, idx: RuntimeReallocIndex) -> NonNull<VMCallerCheckedFuncRef> {
unsafe {
let ret = *self.vmctx_plus_offset::<NonNull<_>>(self.offsets.runtime_realloc(idx));
debug_assert!(ret.as_ptr() as usize != INVALID_PTR);
@@ -216,7 +216,7 @@ impl ComponentInstance {
pub fn runtime_post_return(
&self,
idx: RuntimePostReturnIndex,
) -> NonNull<VMCallerCheckedAnyfunc> {
) -> NonNull<VMCallerCheckedFuncRef> {
unsafe {
let ret = *self.vmctx_plus_offset::<NonNull<_>>(self.offsets.runtime_post_return(idx));
debug_assert!(ret.as_ptr() as usize != INVALID_PTR);
@@ -246,7 +246,7 @@ impl ComponentInstance {
///
/// This can only be called after `idx` has been initialized at runtime
/// during the instantiation process of a component.
pub fn lowering_anyfunc(&self, idx: LoweredIndex) -> NonNull<VMCallerCheckedAnyfunc> {
pub fn lowering_anyfunc(&self, idx: LoweredIndex) -> NonNull<VMCallerCheckedFuncRef> {
unsafe { self.anyfunc(self.offsets.lowering_anyfunc(idx)) }
}
@@ -254,7 +254,7 @@ impl ComponentInstance {
pub fn always_trap_anyfunc(
&self,
idx: RuntimeAlwaysTrapIndex,
) -> NonNull<VMCallerCheckedAnyfunc> {
) -> NonNull<VMCallerCheckedFuncRef> {
unsafe { self.anyfunc(self.offsets.always_trap_anyfunc(idx)) }
}
@@ -262,12 +262,12 @@ impl ComponentInstance {
pub fn transcoder_anyfunc(
&self,
idx: RuntimeTranscoderIndex,
) -> NonNull<VMCallerCheckedAnyfunc> {
) -> NonNull<VMCallerCheckedFuncRef> {
unsafe { self.anyfunc(self.offsets.transcoder_anyfunc(idx)) }
}
unsafe fn anyfunc(&self, offset: u32) -> NonNull<VMCallerCheckedAnyfunc> {
let ret = self.vmctx_plus_offset::<VMCallerCheckedAnyfunc>(offset);
unsafe fn anyfunc(&self, offset: u32) -> NonNull<VMCallerCheckedFuncRef> {
let ret = self.vmctx_plus_offset::<VMCallerCheckedFuncRef>(offset);
debug_assert!((*ret).func_ptr.as_ptr() as usize != INVALID_PTR);
debug_assert!((*ret).vmctx as usize != INVALID_PTR);
NonNull::new(ret).unwrap()
@@ -294,7 +294,7 @@ impl ComponentInstance {
pub fn set_runtime_realloc(
&mut self,
idx: RuntimeReallocIndex,
ptr: NonNull<VMCallerCheckedAnyfunc>,
ptr: NonNull<VMCallerCheckedFuncRef>,
) {
unsafe {
let storage = self.vmctx_plus_offset(self.offsets.runtime_realloc(idx));
@@ -307,7 +307,7 @@ impl ComponentInstance {
pub fn set_runtime_post_return(
&mut self,
idx: RuntimePostReturnIndex,
ptr: NonNull<VMCallerCheckedAnyfunc>,
ptr: NonNull<VMCallerCheckedFuncRef>,
) {
unsafe {
let storage = self.vmctx_plus_offset(self.offsets.runtime_post_return(idx));
@@ -378,7 +378,7 @@ impl ComponentInstance {
) {
debug_assert!(*self.vmctx_plus_offset::<usize>(offset) == INVALID_PTR);
let vmctx = self.vmctx();
*self.vmctx_plus_offset(offset) = VMCallerCheckedAnyfunc {
*self.vmctx_plus_offset(offset) = VMCallerCheckedFuncRef {
func_ptr,
type_index,
vmctx: VMOpaqueContext::from_vmcomponent(vmctx),
@@ -510,7 +510,7 @@ impl OwnedComponentInstance {
pub fn set_runtime_realloc(
&mut self,
idx: RuntimeReallocIndex,
ptr: NonNull<VMCallerCheckedAnyfunc>,
ptr: NonNull<VMCallerCheckedFuncRef>,
) {
unsafe { self.instance_mut().set_runtime_realloc(idx, ptr) }
}
@@ -519,7 +519,7 @@ impl OwnedComponentInstance {
pub fn set_runtime_post_return(
&mut self,
idx: RuntimePostReturnIndex,
ptr: NonNull<VMCallerCheckedAnyfunc>,
ptr: NonNull<VMCallerCheckedFuncRef>,
) {
unsafe { self.instance_mut().set_runtime_post_return(idx, ptr) }
}

View File

@@ -1,5 +1,5 @@
use crate::vmcontext::{
VMCallerCheckedAnyfunc, VMContext, VMGlobalDefinition, VMMemoryDefinition, VMTableDefinition,
VMCallerCheckedFuncRef, VMContext, VMGlobalDefinition, VMMemoryDefinition, VMTableDefinition,
};
use std::ptr::NonNull;
use wasmtime_environ::{DefinedMemoryIndex, Global, MemoryPlan, TablePlan};
@@ -22,11 +22,11 @@ pub enum Export {
/// A function export value.
#[derive(Debug, Clone, Copy)]
pub struct ExportFunction {
/// The `VMCallerCheckedAnyfunc` for this exported function.
/// The `VMCallerCheckedFuncRef` for this exported function.
///
/// Note that exported functions cannot be a null funcref, so this is a
/// non-null pointer.
pub anyfunc: NonNull<VMCallerCheckedAnyfunc>,
pub anyfunc: NonNull<VMCallerCheckedFuncRef>,
}
// It's part of the contract of using `ExportFunction` that synchronization

View File

@@ -7,7 +7,7 @@ use crate::externref::VMExternRefActivationsTable;
use crate::memory::{Memory, RuntimeMemoryCreator};
use crate::table::{Table, TableElement, TableElementType};
use crate::vmcontext::{
VMBuiltinFunctionsArray, VMCallerCheckedAnyfunc, VMContext, VMFunctionImport,
VMBuiltinFunctionsArray, VMCallerCheckedFuncRef, VMContext, VMFunctionImport,
VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition, VMMemoryImport, VMOpaqueContext,
VMRuntimeLimits, VMTableDefinition, VMTableImport, VMCONTEXT_MAGIC,
};
@@ -334,7 +334,7 @@ impl Instance {
fn get_exported_func(&mut self, index: FuncIndex) -> ExportFunction {
let anyfunc = self.get_caller_checked_anyfunc(index).unwrap();
let anyfunc = NonNull::new(anyfunc as *const VMCallerCheckedAnyfunc as *mut _).unwrap();
let anyfunc = NonNull::new(anyfunc as *const VMCallerCheckedFuncRef as *mut _).unwrap();
ExportFunction { anyfunc }
}
@@ -498,7 +498,7 @@ impl Instance {
Layout::from_size_align(size, align).unwrap()
}
/// Construct a new VMCallerCheckedAnyfunc for the given function
/// Construct a new VMCallerCheckedFuncRef for the given function
/// (imported or defined in this module) and store into the given
/// location. Used during lazy initialization.
///
@@ -511,7 +511,7 @@ impl Instance {
&mut self,
index: FuncIndex,
sig: SignatureIndex,
into: *mut VMCallerCheckedAnyfunc,
into: *mut VMCallerCheckedFuncRef,
) {
let type_index = unsafe {
let base: *const VMSharedSignatureIndex =
@@ -532,7 +532,7 @@ impl Instance {
// Safety: we have a `&mut self`, so we have exclusive access
// to this Instance.
unsafe {
*into = VMCallerCheckedAnyfunc {
*into = VMCallerCheckedFuncRef {
vmctx,
type_index,
func_ptr: NonNull::new(func_ptr).expect("Non-null function pointer"),
@@ -540,7 +540,7 @@ impl Instance {
}
}
/// Get a `&VMCallerCheckedAnyfunc` for the given `FuncIndex`.
/// Get a `&VMCallerCheckedFuncRef` for the given `FuncIndex`.
///
/// Returns `None` if the index is the reserved index value.
///
@@ -549,7 +549,7 @@ impl Instance {
pub(crate) fn get_caller_checked_anyfunc(
&mut self,
index: FuncIndex,
) -> Option<*mut VMCallerCheckedAnyfunc> {
) -> Option<*mut VMCallerCheckedFuncRef> {
if index == FuncIndex::reserved_value() {
return None;
}
@@ -583,8 +583,8 @@ impl Instance {
// all!
let func = &self.module().functions[index];
let sig = func.signature;
let anyfunc: *mut VMCallerCheckedAnyfunc = self
.vmctx_plus_offset::<VMCallerCheckedAnyfunc>(
let anyfunc: *mut VMCallerCheckedFuncRef = self
.vmctx_plus_offset::<VMCallerCheckedFuncRef>(
self.offsets().vmctx_anyfunc(func.anyfunc),
);
self.construct_anyfunc(index, sig, anyfunc);
@@ -1034,7 +1034,7 @@ impl Instance {
}
GlobalInit::RefFunc(f) => {
*(*to).as_anyfunc_mut() = self.get_caller_checked_anyfunc(f).unwrap()
as *const VMCallerCheckedAnyfunc;
as *const VMCallerCheckedFuncRef;
}
GlobalInit::RefNullConst => match global.wasm_ty {
// `VMGlobalDefinition::new()` already zeroed out the bits

View File

@@ -70,7 +70,7 @@ pub use crate::traphandlers::{
Backtrace, SignalHandler, TlsRestore, Trap, TrapReason,
};
pub use crate::vmcontext::{
VMCallerCheckedAnyfunc, VMContext, VMFunctionBody, VMFunctionImport, VMGlobalDefinition,
VMCallerCheckedFuncRef, VMContext, VMFunctionBody, VMFunctionImport, VMGlobalDefinition,
VMGlobalImport, VMHostFuncContext, VMInvokeArgument, VMMemoryDefinition, VMMemoryImport,
VMOpaqueContext, VMRuntimeLimits, VMSharedSignatureIndex, VMTableDefinition, VMTableImport,
VMTrampoline, ValRaw,

View File

@@ -56,7 +56,7 @@
use crate::externref::VMExternRef;
use crate::table::{Table, TableElementType};
use crate::vmcontext::{VMCallerCheckedAnyfunc, VMContext};
use crate::vmcontext::{VMCallerCheckedFuncRef, VMContext};
use crate::TrapReason;
use anyhow::Result;
use std::mem;
@@ -198,14 +198,14 @@ unsafe fn table_grow(
vmctx: *mut VMContext,
table_index: u32,
delta: u32,
// NB: we don't know whether this is a pointer to a `VMCallerCheckedAnyfunc`
// NB: we don't know whether this is a pointer to a `VMCallerCheckedFuncRef`
// or is a `VMExternRef` until we look at the table type.
init_value: *mut u8,
) -> Result<u32> {
let instance = (*vmctx).instance_mut();
let table_index = TableIndex::from_u32(table_index);
let element = match instance.table_element_type(table_index) {
TableElementType::Func => (init_value as *mut VMCallerCheckedAnyfunc).into(),
TableElementType::Func => (init_value as *mut VMCallerCheckedFuncRef).into(),
TableElementType::Extern => {
let init_value = if init_value.is_null() {
None
@@ -230,7 +230,7 @@ unsafe fn table_fill(
table_index: u32,
dst: u32,
// NB: we don't know whether this is a `VMExternRef` or a pointer to a
// `VMCallerCheckedAnyfunc` until we look at the table's element type.
// `VMCallerCheckedFuncRef` until we look at the table's element type.
val: *mut u8,
len: u32,
) -> Result<(), Trap> {
@@ -239,7 +239,7 @@ unsafe fn table_fill(
let table = &mut *instance.get_table(table_index);
match table.element_type() {
TableElementType::Func => {
let val = val as *mut VMCallerCheckedAnyfunc;
let val = val as *mut VMCallerCheckedFuncRef;
table.fill(dst, val.into(), len)
}
TableElementType::Extern => {

View File

@@ -2,7 +2,7 @@
//!
//! `Table` is to WebAssembly tables what `LinearMemory` is to WebAssembly linear memories.
use crate::vmcontext::{VMCallerCheckedAnyfunc, VMTableDefinition};
use crate::vmcontext::{VMCallerCheckedFuncRef, VMTableDefinition};
use crate::{Store, VMExternRef};
use anyhow::{bail, format_err, Error, Result};
use std::convert::{TryFrom, TryInto};
@@ -16,7 +16,7 @@ use wasmtime_environ::{TablePlan, Trap, WasmType, FUNCREF_INIT_BIT, FUNCREF_MASK
#[derive(Clone)]
pub enum TableElement {
/// A `funcref`.
FuncRef(*mut VMCallerCheckedAnyfunc),
FuncRef(*mut VMCallerCheckedFuncRef),
/// An `exrernref`.
ExternRef(Option<VMExternRef>),
/// An uninitialized funcref value. This should never be exposed
@@ -32,7 +32,7 @@ pub enum TableElementType {
Extern,
}
// The usage of `*mut VMCallerCheckedAnyfunc` is safe w.r.t. thread safety, this
// The usage of `*mut VMCallerCheckedFuncRef` is safe w.r.t. thread safety, this
// just relies on thread-safety of `VMExternRef` itself.
unsafe impl Send for TableElement where VMExternRef: Send {}
unsafe impl Sync for TableElement where VMExternRef: Sync {}
@@ -119,8 +119,8 @@ impl TableElement {
}
}
impl From<*mut VMCallerCheckedAnyfunc> for TableElement {
fn from(f: *mut VMCallerCheckedAnyfunc) -> TableElement {
impl From<*mut VMCallerCheckedFuncRef> for TableElement {
fn from(f: *mut VMCallerCheckedFuncRef) -> TableElement {
TableElement::FuncRef(f)
}
}
@@ -266,7 +266,7 @@ impl Table {
pub fn init_funcs(
&mut self,
dst: u32,
items: impl ExactSizeIterator<Item = *mut VMCallerCheckedAnyfunc>,
items: impl ExactSizeIterator<Item = *mut VMCallerCheckedFuncRef>,
) -> Result<(), Trap> {
assert!(self.element_type() == TableElementType::Func);

View File

@@ -514,22 +514,22 @@ impl VMGlobalDefinition {
/// Return a reference to the value as an anyfunc.
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_anyfunc(&self) -> *const VMCallerCheckedAnyfunc {
pub unsafe fn as_anyfunc(&self) -> *const VMCallerCheckedFuncRef {
*(self
.storage
.as_ref()
.as_ptr()
.cast::<*const VMCallerCheckedAnyfunc>())
.cast::<*const VMCallerCheckedFuncRef>())
}
/// Return a mutable reference to the value as an anyfunc.
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_anyfunc_mut(&mut self) -> &mut *const VMCallerCheckedAnyfunc {
pub unsafe fn as_anyfunc_mut(&mut self) -> &mut *const VMCallerCheckedFuncRef {
&mut *(self
.storage
.as_mut()
.as_mut_ptr()
.cast::<*const VMCallerCheckedAnyfunc>())
.cast::<*const VMCallerCheckedFuncRef>())
}
}
@@ -582,7 +582,7 @@ impl Default for VMSharedSignatureIndex {
/// by the caller.
#[derive(Debug, Clone)]
#[repr(C)]
pub struct VMCallerCheckedAnyfunc {
pub struct VMCallerCheckedFuncRef {
/// Function body.
pub func_ptr: NonNull<VMFunctionBody>,
/// Function signature id.
@@ -597,12 +597,12 @@ pub struct VMCallerCheckedAnyfunc {
// If more elements are added here, remember to add offset_of tests below!
}
unsafe impl Send for VMCallerCheckedAnyfunc {}
unsafe impl Sync for VMCallerCheckedAnyfunc {}
unsafe impl Send for VMCallerCheckedFuncRef {}
unsafe impl Sync for VMCallerCheckedFuncRef {}
#[cfg(test)]
mod test_vmcaller_checked_anyfunc {
use super::VMCallerCheckedAnyfunc;
use super::VMCallerCheckedFuncRef;
use memoffset::offset_of;
use std::mem::size_of;
use wasmtime_environ::{Module, PtrSize, VMOffsets};
@@ -612,20 +612,20 @@ mod test_vmcaller_checked_anyfunc {
let module = Module::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMCallerCheckedAnyfunc>(),
usize::from(offsets.ptr.size_of_vmcaller_checked_anyfunc())
size_of::<VMCallerCheckedFuncRef>(),
usize::from(offsets.ptr.size_of_vmcaller_checked_func_ref())
);
assert_eq!(
offset_of!(VMCallerCheckedAnyfunc, func_ptr),
usize::from(offsets.ptr.vmcaller_checked_anyfunc_func_ptr())
offset_of!(VMCallerCheckedFuncRef, func_ptr),
usize::from(offsets.ptr.vmcaller_checked_func_ref_func_ptr())
);
assert_eq!(
offset_of!(VMCallerCheckedAnyfunc, type_index),
usize::from(offsets.ptr.vmcaller_checked_anyfunc_type_index())
offset_of!(VMCallerCheckedFuncRef, type_index),
usize::from(offsets.ptr.vmcaller_checked_func_ref_type_index())
);
assert_eq!(
offset_of!(VMCallerCheckedAnyfunc, vmctx),
usize::from(offsets.ptr.vmcaller_checked_anyfunc_vmctx())
offset_of!(VMCallerCheckedFuncRef, vmctx),
usize::from(offsets.ptr.vmcaller_checked_func_ref_vmctx())
);
}
}
@@ -1144,11 +1144,11 @@ pub type VMTrampoline =
/// target context.
///
/// This context is used to represent that contexts specified in
/// `VMCallerCheckedAnyfunc` can have any type and don't have an implicit
/// `VMCallerCheckedFuncRef` can have any type and don't have an implicit
/// structure. Neither wasmtime nor cranelift-generated code can rely on the
/// structure of an opaque context in general and only the code which configured
/// the context is able to rely on a particular structure. This is because the
/// context pointer configured for `VMCallerCheckedAnyfunc` is guaranteed to be
/// context pointer configured for `VMCallerCheckedFuncRef` is guaranteed to be
/// the first parameter passed.
///
/// Note that Wasmtime currently has a layout where all contexts that are casted

View File

@@ -4,7 +4,7 @@
use wasmtime_environ::VM_HOST_FUNC_MAGIC;
use super::{VMCallerCheckedAnyfunc, VMFunctionBody, VMOpaqueContext, VMSharedSignatureIndex};
use super::{VMCallerCheckedFuncRef, VMFunctionBody, VMOpaqueContext, VMSharedSignatureIndex};
use std::{
any::Any,
ptr::{self, NonNull},
@@ -20,7 +20,7 @@ pub struct VMHostFuncContext {
magic: u32,
// _padding: u32, // (on 64-bit systems)
pub(crate) host_func: NonNull<VMFunctionBody>,
wasm_to_host_trampoline: VMCallerCheckedAnyfunc,
wasm_to_host_trampoline: VMCallerCheckedFuncRef,
host_state: Box<dyn Any + Send + Sync>,
}
@@ -41,7 +41,7 @@ impl VMHostFuncContext {
signature: VMSharedSignatureIndex,
host_state: Box<dyn Any + Send + Sync>,
) -> Box<VMHostFuncContext> {
let wasm_to_host_trampoline = VMCallerCheckedAnyfunc {
let wasm_to_host_trampoline = VMCallerCheckedFuncRef {
func_ptr: NonNull::new(crate::trampolines::wasm_to_host_trampoline as _).unwrap(),
type_index: signature,
vmctx: ptr::null_mut(),
@@ -58,7 +58,7 @@ impl VMHostFuncContext {
}
/// Get the Wasm-to-host trampoline for this host function context.
pub fn wasm_to_host_trampoline(&self) -> NonNull<VMCallerCheckedAnyfunc> {
pub fn wasm_to_host_trampoline(&self) -> NonNull<VMCallerCheckedFuncRef> {
NonNull::from(&self.wasm_to_host_trampoline)
}