Eliminate the intermediate "base" global variables.

This commit is contained in:
Dan Gohman
2019-01-03 11:19:38 -08:00
parent 3431122440
commit 3ce2127bfa
3 changed files with 287 additions and 457 deletions

View File

@@ -2,7 +2,7 @@ use cast;
use cranelift_codegen::cursor::FuncCursor;
use cranelift_codegen::ir;
use cranelift_codegen::ir::condcodes::*;
use cranelift_codegen::ir::immediates::{Imm64, Offset32, Uimm64};
use cranelift_codegen::ir::immediates::{Offset32, Uimm64};
use cranelift_codegen::ir::types::*;
use cranelift_codegen::ir::{
AbiParam, ArgumentPurpose, ExtFuncData, FuncRef, Function, InstBuilder, Signature,
@@ -10,8 +10,8 @@ use cranelift_codegen::ir::{
use cranelift_codegen::isa::TargetFrontendConfig;
use cranelift_entity::EntityRef;
use cranelift_wasm::{
self, DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, FuncIndex, GlobalIndex,
GlobalVariable, MemoryIndex, SignatureIndex, TableIndex, WasmResult,
self, FuncIndex, GlobalIndex, GlobalVariable, MemoryIndex, SignatureIndex, TableIndex,
WasmResult,
};
use module::{MemoryPlan, MemoryStyle, Module, TableStyle};
use std::clone::Clone;
@@ -59,32 +59,6 @@ pub struct FuncEnvironment<'module_environment> {
/// The Cranelift global holding the vmctx address.
vmctx: Option<ir::GlobalValue>,
/// The Cranelift global holding the base address of the signature IDs vector.
/// TODO: Now that the bases are just offsets from vmctx rather than loads, we
/// can eliminate these base variables.
signature_ids_base: Option<ir::GlobalValue>,
/// The Cranelift global holding the base address of the imported functions table.
imported_functions_base: Option<ir::GlobalValue>,
/// The Cranelift global holding the base address of the imported tables table.
imported_tables_base: Option<ir::GlobalValue>,
/// The Cranelift global holding the base address of the imported memories table.
imported_memories_base: Option<ir::GlobalValue>,
/// The Cranelift global holding the base address of the imported globals table.
imported_globals_base: Option<ir::GlobalValue>,
/// The Cranelift global holding the base address of the tables vector.
tables_base: Option<ir::GlobalValue>,
/// The Cranelift global holding the base address of the memories vector.
memories_base: Option<ir::GlobalValue>,
/// The Cranelift global holding the base address of the globals vector.
globals_base: Option<ir::GlobalValue>,
/// The external function declaration for implementing wasm's `memory.size`
/// for locally-defined 32-bit memories.
memory32_size_extfunc: Option<FuncRef>,
@@ -111,14 +85,6 @@ impl<'module_environment> FuncEnvironment<'module_environment> {
target_config,
module,
vmctx: None,
signature_ids_base: None,
imported_functions_base: None,
imported_tables_base: None,
imported_memories_base: None,
imported_globals_base: None,
tables_base: None,
memories_base: None,
globals_base: None,
memory32_size_extfunc: None,
imported_memory32_size_extfunc: None,
memory_grow_extfunc: None,
@@ -139,118 +105,6 @@ impl<'module_environment> FuncEnvironment<'module_environment> {
})
}
fn get_imported_functions_base(&mut self, func: &mut Function) -> ir::GlobalValue {
self.imported_functions_base.unwrap_or_else(|| {
let pointer_type = self.pointer_type();
let vmctx = self.vmctx(func);
let new_base = func.create_global_value(ir::GlobalValueData::IAddImm {
base: vmctx,
offset: Imm64::new(self.offsets.vmctx_imported_functions()),
global_type: pointer_type,
});
self.imported_functions_base = Some(new_base);
new_base
})
}
fn get_imported_tables_base(&mut self, func: &mut Function) -> ir::GlobalValue {
self.imported_tables_base.unwrap_or_else(|| {
let pointer_type = self.pointer_type();
let vmctx = self.vmctx(func);
let new_base = func.create_global_value(ir::GlobalValueData::IAddImm {
base: vmctx,
offset: Imm64::new(self.offsets.vmctx_imported_tables()),
global_type: pointer_type,
});
self.imported_tables_base = Some(new_base);
new_base
})
}
fn get_imported_memories_base(&mut self, func: &mut Function) -> ir::GlobalValue {
self.imported_memories_base.unwrap_or_else(|| {
let pointer_type = self.pointer_type();
let vmctx = self.vmctx(func);
let new_base = func.create_global_value(ir::GlobalValueData::IAddImm {
base: vmctx,
offset: Imm64::new(self.offsets.vmctx_imported_memories()),
global_type: pointer_type,
});
self.imported_memories_base = Some(new_base);
new_base
})
}
fn get_imported_globals_base(&mut self, func: &mut Function) -> ir::GlobalValue {
self.imported_globals_base.unwrap_or_else(|| {
let pointer_type = self.pointer_type();
let vmctx = self.vmctx(func);
let new_base = func.create_global_value(ir::GlobalValueData::IAddImm {
base: vmctx,
offset: Imm64::new(self.offsets.vmctx_imported_globals()),
global_type: pointer_type,
});
self.imported_globals_base = Some(new_base);
new_base
})
}
fn get_tables_base(&mut self, func: &mut Function) -> ir::GlobalValue {
self.tables_base.unwrap_or_else(|| {
let pointer_type = self.pointer_type();
let vmctx = self.vmctx(func);
let new_base = func.create_global_value(ir::GlobalValueData::IAddImm {
base: vmctx,
offset: Imm64::new(self.offsets.vmctx_tables()),
global_type: pointer_type,
});
self.tables_base = Some(new_base);
new_base
})
}
fn get_memories_base(&mut self, func: &mut Function) -> ir::GlobalValue {
self.memories_base.unwrap_or_else(|| {
let pointer_type = self.pointer_type();
let vmctx = self.vmctx(func);
let new_base = func.create_global_value(ir::GlobalValueData::IAddImm {
base: vmctx,
offset: Imm64::new(self.offsets.vmctx_memories()),
global_type: pointer_type,
});
self.memories_base = Some(new_base);
new_base
})
}
fn get_globals_base(&mut self, func: &mut Function) -> ir::GlobalValue {
self.globals_base.unwrap_or_else(|| {
let pointer_type = self.pointer_type();
let vmctx = self.vmctx(func);
let new_base = func.create_global_value(ir::GlobalValueData::IAddImm {
base: vmctx,
offset: Imm64::new(self.offsets.vmctx_globals()),
global_type: pointer_type,
});
self.globals_base = Some(new_base);
new_base
})
}
fn get_signature_ids_base(&mut self, func: &mut Function) -> ir::GlobalValue {
self.signature_ids_base.unwrap_or_else(|| {
let pointer_type = self.pointer_type();
let vmctx = self.vmctx(func);
let new_base = func.create_global_value(ir::GlobalValueData::IAddImm {
base: vmctx,
offset: Imm64::new(self.offsets.vmctx_signature_ids()),
global_type: pointer_type,
});
self.signature_ids_base = Some(new_base);
new_base
})
}
fn get_memory_grow_sig(&self, func: &mut Function) -> ir::SigRef {
func.import_signature(Signature {
params: vec![
@@ -361,33 +215,40 @@ impl<'module_environment> cranelift_wasm::FuncEnvironment for FuncEnvironment<'m
fn make_table(&mut self, func: &mut ir::Function, index: TableIndex) -> ir::Table {
let pointer_type = self.pointer_type();
let (table, def_index) = if let Some(def_index) = self.module.defined_table_index(index) {
let table = self.get_tables_base(func);
(table, def_index)
} else {
let imported_tables_base = self.get_imported_tables_base(func);
let from_offset = self.offsets.index_vmtable_import_from(index);
let table = func.create_global_value(ir::GlobalValueData::Load {
base: imported_tables_base,
offset: Offset32::new(from_offset),
global_type: pointer_type,
readonly: true,
});
(table, DefinedTableIndex::new(0))
let (ptr, base_offset, current_elements_offset) = {
let vmctx = self.vmctx(func);
if let Some(def_index) = self.module.defined_table_index(index) {
let base_offset =
cast::i32(self.offsets.vmctx_vmtable_definition_base(def_index)).unwrap();
let current_elements_offset = cast::i32(
self.offsets
.vmctx_vmtable_definition_current_elements(def_index),
)
.unwrap();
(vmctx, base_offset, current_elements_offset)
} else {
let from_offset = self.offsets.vmctx_vmtable_import_from(index);
let table = func.create_global_value(ir::GlobalValueData::Load {
base: vmctx,
offset: Offset32::new(cast::i32(from_offset).unwrap()),
global_type: pointer_type,
readonly: true,
});
let base_offset = i32::from(self.offsets.vmtable_definition_base());
let current_elements_offset =
i32::from(self.offsets.vmtable_definition_current_elements());
(table, base_offset, current_elements_offset)
}
};
let base_offset = self.offsets.index_vmtable_definition_base(def_index);
let current_elements_offset = self
.offsets
.index_vmtable_definition_current_elements(def_index);
let base_gv = func.create_global_value(ir::GlobalValueData::Load {
base: table,
base: ptr,
offset: Offset32::new(base_offset),
global_type: pointer_type,
readonly: false,
});
let bound_gv = func.create_global_value(ir::GlobalValueData::Load {
base: table,
base: ptr,
offset: Offset32::new(current_elements_offset),
global_type: self.offsets.type_of_vmtable_definition_current_elements(),
readonly: false,
@@ -411,24 +272,31 @@ impl<'module_environment> cranelift_wasm::FuncEnvironment for FuncEnvironment<'m
fn make_heap(&mut self, func: &mut ir::Function, index: MemoryIndex) -> ir::Heap {
let pointer_type = self.pointer_type();
let (memory, def_index) = if let Some(def_index) = self.module.defined_memory_index(index) {
let memory = self.get_memories_base(func);
(memory, def_index)
} else {
let imported_memories_base = self.get_imported_memories_base(func);
let from_offset = self.offsets.index_vmmemory_import_from(index);
let memory = func.create_global_value(ir::GlobalValueData::Load {
base: imported_memories_base,
offset: Offset32::new(from_offset),
global_type: pointer_type,
readonly: true,
});
(memory, DefinedMemoryIndex::new(0))
let (ptr, base_offset, current_length_offset) = {
let vmctx = self.vmctx(func);
if let Some(def_index) = self.module.defined_memory_index(index) {
let base_offset =
cast::i32(self.offsets.vmctx_vmmemory_definition_base(def_index)).unwrap();
let current_length_offset = cast::i32(
self.offsets
.vmctx_vmmemory_definition_current_length(def_index),
)
.unwrap();
(vmctx, base_offset, current_length_offset)
} else {
let from_offset = self.offsets.vmctx_vmmemory_import_from(index);
let memory = func.create_global_value(ir::GlobalValueData::Load {
base: vmctx,
offset: Offset32::new(cast::i32(from_offset).unwrap()),
global_type: pointer_type,
readonly: true,
});
let base_offset = i32::from(self.offsets.vmmemory_definition_base());
let current_length_offset =
i32::from(self.offsets.vmmemory_definition_current_length());
(memory, base_offset, current_length_offset)
}
};
let base_offset = self.offsets.index_vmmemory_definition_base(def_index);
let current_length_offset = self
.offsets
.index_vmmemory_definition_current_length(def_index);
// If we have a declared maximum, we can make this a "static" heap, which is
// allocated up front and never moved.
@@ -439,7 +307,7 @@ impl<'module_environment> cranelift_wasm::FuncEnvironment for FuncEnvironment<'m
offset_guard_size,
} => {
let heap_bound = func.create_global_value(ir::GlobalValueData::Load {
base: memory,
base: ptr,
offset: Offset32::new(current_length_offset),
global_type: self.offsets.type_of_vmmemory_definition_current_length(),
readonly: false,
@@ -466,7 +334,7 @@ impl<'module_environment> cranelift_wasm::FuncEnvironment for FuncEnvironment<'m
};
let heap_base = func.create_global_value(ir::GlobalValueData::Load {
base: memory,
base: ptr,
offset: Offset32::new(base_offset),
global_type: pointer_type,
readonly: readonly_base,
@@ -483,24 +351,25 @@ impl<'module_environment> cranelift_wasm::FuncEnvironment for FuncEnvironment<'m
fn make_global(&mut self, func: &mut ir::Function, index: GlobalIndex) -> GlobalVariable {
let pointer_type = self.pointer_type();
let (global, def_index) = if let Some(def_index) = self.module.defined_global_index(index) {
let global = self.get_globals_base(func);
(global, def_index)
} else {
let imported_globals_base = self.get_imported_globals_base(func);
let from_offset = self.offsets.index_vmglobal_import_from(index);
let global = func.create_global_value(ir::GlobalValueData::Load {
base: imported_globals_base,
offset: Offset32::new(from_offset),
global_type: pointer_type,
readonly: true,
});
(global, DefinedGlobalIndex::new(0))
let (ptr, offset) = {
let vmctx = self.vmctx(func);
if let Some(def_index) = self.module.defined_global_index(index) {
let offset = cast::i32(self.offsets.vmctx_vmglobal_definition(def_index)).unwrap();
(vmctx, offset)
} else {
let from_offset = self.offsets.vmctx_vmglobal_import_from(index);
let global = func.create_global_value(ir::GlobalValueData::Load {
base: vmctx,
offset: Offset32::new(cast::i32(from_offset).unwrap()),
global_type: pointer_type,
readonly: true,
});
(global, 0)
}
};
let offset = self.offsets.index_vmglobal_definition(def_index);
GlobalVariable::Memory {
gv: global,
gv: ptr,
offset: offset.into(),
ty: self.module.globals[index].ty,
}
@@ -542,24 +411,15 @@ impl<'module_environment> cranelift_wasm::FuncEnvironment for FuncEnvironment<'m
TableStyle::CallerChecksSignature => {
let sig_id_size = self.offsets.size_of_vmshared_signature_index();
let sig_id_type = Type::int(u16::from(sig_id_size) * 8).unwrap();
let signature_ids_base = self.get_signature_ids_base(pos.func);
let sig_ids = pos.ins().global_value(pointer_type, signature_ids_base);
let vmctx = self.vmctx(pos.func);
let base = pos.ins().global_value(pointer_type, vmctx);
let offset =
cast::i32(self.offsets.vmctx_vmshared_signature_id(sig_index)).unwrap();
// Load the caller ID.
let mut mem_flags = ir::MemFlags::trusted();
mem_flags.set_readonly();
let caller_sig_id = pos.ins().load(
sig_id_type,
mem_flags,
sig_ids,
cast::i32(
sig_index
.as_u32()
.checked_mul(u32::from(sig_id_size))
.unwrap(),
)
.unwrap(),
);
let caller_sig_id = pos.ins().load(sig_id_type, mem_flags, base, offset);
// Load the callee ID.
let mem_flags = ir::MemFlags::trusted();
@@ -620,19 +480,19 @@ impl<'module_environment> cranelift_wasm::FuncEnvironment for FuncEnvironment<'m
// so that we don't have to patch the code at runtime.
let pointer_type = self.pointer_type();
let sig_ref = pos.func.dfg.ext_funcs[callee].signature;
let imported_functions_base = self.get_imported_functions_base(&mut pos.func);
let base = pos
.ins()
.global_value(pointer_type, imported_functions_base);
let vmctx = self.vmctx(&mut pos.func);
let base = pos.ins().global_value(pointer_type, vmctx);
let mem_flags = ir::MemFlags::trusted();
// Load the callee address.
let body_offset = self.offsets.index_vmfunction_import_body(callee_index);
let body_offset =
cast::i32(self.offsets.vmctx_vmfunction_import_body(callee_index)).unwrap();
let func_addr = pos.ins().load(pointer_type, mem_flags, base, body_offset);
// Append the callee vmctx address.
let vmctx_offset = self.offsets.index_vmfunction_import_vmctx(callee_index);
let vmctx_offset =
cast::i32(self.offsets.vmctx_vmfunction_import_vmctx(callee_index)).unwrap();
let vmctx = pos.ins().load(pointer_type, mem_flags, base, vmctx_offset);
real_call_args.push(vmctx);

View File

@@ -2,7 +2,6 @@
//! module.
use cranelift_codegen::ir;
use cranelift_entity::EntityRef;
use cranelift_wasm::{
DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, FuncIndex, GlobalIndex, MemoryIndex,
SignatureIndex, TableIndex,
@@ -236,236 +235,225 @@ impl VMOffsets {
/// Offsets for `VMContext`.
impl VMOffsets {
/// The offset of the `signature_ids` field.
pub fn vmctx_signature_ids(&self) -> i64 {
/// The offset of the `signature_ids` array.
pub fn vmctx_signature_ids_begin(&self) -> u64 {
0
}
/// The offset of the `tables` field.
/// The offset of the `tables` array.
#[allow(clippy::erasing_op)]
pub fn vmctx_imported_functions(&self) -> i64 {
self.vmctx_signature_ids()
+ self.num_signature_ids as i64 * i64::from(self.size_of_vmshared_signature_index())
pub fn vmctx_imported_functions_begin(&self) -> u64 {
self.vmctx_signature_ids_begin()
+ self.num_signature_ids * u64::from(self.size_of_vmshared_signature_index())
}
/// The offset of the `tables` field.
/// The offset of the `tables` array.
#[allow(clippy::identity_op)]
pub fn vmctx_imported_tables(&self) -> i64 {
self.vmctx_imported_functions()
+ self.num_imported_functions as i64 * i64::from(self.size_of_vmfunction_import())
pub fn vmctx_imported_tables_begin(&self) -> u64 {
self.vmctx_imported_functions_begin()
+ self.num_imported_functions * u64::from(self.size_of_vmfunction_import())
}
/// The offset of the `memories` field.
pub fn vmctx_imported_memories(&self) -> i64 {
self.vmctx_imported_tables()
+ self.num_imported_tables as i64 * i64::from(self.size_of_vmtable_import())
/// The offset of the `memories` array.
pub fn vmctx_imported_memories_begin(&self) -> u64 {
self.vmctx_imported_tables_begin()
+ self.num_imported_tables * u64::from(self.size_of_vmtable_import())
}
/// The offset of the `globals` field.
pub fn vmctx_imported_globals(&self) -> i64 {
self.vmctx_imported_memories()
+ self.num_imported_memories as i64 * i64::from(self.size_of_vmmemory_import())
/// The offset of the `globals` array.
pub fn vmctx_imported_globals_begin(&self) -> u64 {
self.vmctx_imported_memories_begin()
+ self.num_imported_memories * u64::from(self.size_of_vmmemory_import())
}
/// The offset of the `tables` field.
pub fn vmctx_tables(&self) -> i64 {
self.vmctx_imported_globals()
+ self.num_imported_globals as i64 * i64::from(self.size_of_vmglobal_import())
/// The offset of the `tables` array.
pub fn vmctx_tables_begin(&self) -> u64 {
self.vmctx_imported_globals_begin()
+ self.num_imported_globals * u64::from(self.size_of_vmglobal_import())
}
/// The offset of the `memories` field.
pub fn vmctx_memories(&self) -> i64 {
self.vmctx_tables()
+ self.num_defined_tables as i64 * i64::from(self.size_of_vmtable_definition())
/// The offset of the `memories` array.
pub fn vmctx_memories_begin(&self) -> u64 {
self.vmctx_tables_begin()
+ self.num_defined_tables * u64::from(self.size_of_vmtable_definition())
}
/// The offset of the `globals` field.
pub fn vmctx_globals(&self) -> i64 {
self.vmctx_memories()
+ self.num_defined_memories as i64 * i64::from(self.size_of_vmmemory_definition())
/// The offset of the `globals` array.
pub fn vmctx_globals_begin(&self) -> u64 {
self.vmctx_memories_begin()
+ self.num_defined_memories * u64::from(self.size_of_vmmemory_definition())
}
/// Return the size of the `VMContext` allocation.
#[allow(dead_code)]
pub fn size_of_vmctx(&self) -> i64 {
self.vmctx_globals()
+ self.num_defined_globals as i64 * i64::from(self.size_of_vmglobal_definition())
pub fn size_of_vmctx(&self) -> u64 {
self.vmctx_globals_begin()
+ self.num_defined_globals * u64::from(self.size_of_vmglobal_definition())
}
/// Return the offset from the `signature_ids` pointer to `VMSharedSignatureId` index `index`.
pub fn index_vmshared_signature_id(&self, index: SignatureIndex) -> i32 {
assert!((index.index() as u64) < self.num_signature_ids);
cast::i32(
index
.as_u32()
.checked_mul(u32::from(self.size_of_vmshared_signature_index()))
.unwrap(),
)
.unwrap()
}
/// Return the offset from the `imported_functions` pointer to `VMFunctionImport` index `index`.
pub fn index_vmfunction_import(&self, index: FuncIndex) -> i32 {
assert!((index.index() as u64) < self.num_imported_functions);
cast::i32(
index
.as_u32()
.checked_mul(u32::from(self.size_of_vmfunction_import()))
.unwrap(),
)
.unwrap()
}
/// Return the offset from the `imported_tables` pointer to `VMTableImport` index `index`.
pub fn index_vmtable_import(&self, index: TableIndex) -> i32 {
assert!((index.index() as u64) < self.num_imported_tables);
cast::i32(
index
.as_u32()
.checked_mul(u32::from(self.size_of_vmtable_import()))
.unwrap(),
)
.unwrap()
}
/// Return the offset from the `imported_memories` pointer to `VMMemoryImport` index `index`.
pub fn index_vmmemory_import(&self, index: MemoryIndex) -> i32 {
assert!((index.index() as u64) < self.num_imported_memories);
cast::i32(
index
.as_u32()
.checked_mul(u32::from(self.size_of_vmmemory_import()))
.unwrap(),
)
.unwrap()
}
/// Return the offset from the `imported_globals` pointer to `VMGlobalImport` index `index`.
pub fn index_vmglobal_import(&self, index: GlobalIndex) -> i32 {
assert!((index.index() as u64) < self.num_imported_globals);
cast::i32(
index
.as_u32()
.checked_mul(u32::from(self.size_of_vmglobal_import()))
.unwrap(),
)
.unwrap()
}
/// Return the offset from the `tables` pointer to `VMTableDefinition` index `index`.
pub fn index_vmtable_definition(&self, index: DefinedTableIndex) -> i32 {
assert!((index.index() as u64) < self.num_defined_tables);
cast::i32(
index
.as_u32()
.checked_mul(u32::from(self.size_of_vmtable_definition()))
.unwrap(),
)
.unwrap()
}
/// Return the offset from the `memories` pointer to `VMMemoryDefinition` index `index`.
pub fn index_vmmemory_definition(&self, index: DefinedMemoryIndex) -> i32 {
assert!((index.index() as u64) < self.num_defined_memories);
cast::i32(
index
.as_u32()
.checked_mul(u32::from(self.size_of_vmmemory_definition()))
.unwrap(),
)
.unwrap()
}
/// Return the offset from the `globals` pointer to the `VMGlobalDefinition`
/// index `index`.
pub fn index_vmglobal_definition(&self, index: DefinedGlobalIndex) -> i32 {
assert!((index.index() as u64) < self.num_defined_globals);
cast::i32(
index
.as_u32()
.checked_mul(u32::from(self.size_of_vmglobal_definition()))
.unwrap(),
)
.unwrap()
}
/// Return the offset from the `imported_functions` pointer to the
/// `body` field in `*const VMFunctionBody` index `index`.
pub fn index_vmfunction_import_body(&self, index: FuncIndex) -> i32 {
self.index_vmfunction_import(index)
.checked_add(i32::from(self.vmfunction_import_body()))
/// Return the offset to `VMSharedSignatureId` index `index`.
pub fn vmctx_vmshared_signature_id(&self, index: SignatureIndex) -> u64 {
assert!(u64::from(index.as_u32()) < self.num_signature_ids);
self.vmctx_signature_ids_begin()
.checked_add(
u64::from(index.as_u32())
.checked_mul(u64::from(self.size_of_vmshared_signature_index()))
.unwrap(),
)
.unwrap()
}
/// Return the offset from the `imported_functions` pointer to the
/// `vmctx` field in `*const VMFunctionBody` index `index`.
pub fn index_vmfunction_import_vmctx(&self, index: FuncIndex) -> i32 {
self.index_vmfunction_import(index)
.checked_add(i32::from(self.vmfunction_import_vmctx()))
/// Return the offset to `VMFunctionImport` index `index`.
pub fn vmctx_vmfunction_import(&self, index: FuncIndex) -> u64 {
assert!(u64::from(index.as_u32()) < self.num_imported_functions);
self.vmctx_imported_functions_begin()
.checked_add(
u64::from(index.as_u32())
.checked_mul(u64::from(self.size_of_vmfunction_import()))
.unwrap(),
)
.unwrap()
}
/// Return the offset from the `tables` pointer to the `from` field in
/// `VMTableImport` index `index`.
pub fn index_vmtable_import_from(&self, index: TableIndex) -> i32 {
self.index_vmtable_import(index)
.checked_add(i32::from(self.vmtable_import_from()))
/// Return the offset to `VMTableImport` index `index`.
pub fn vmctx_vmtable_import(&self, index: TableIndex) -> u64 {
assert!(u64::from(index.as_u32()) < self.num_imported_tables);
self.vmctx_imported_tables_begin()
.checked_add(
u64::from(index.as_u32())
.checked_mul(u64::from(self.size_of_vmtable_import()))
.unwrap(),
)
.unwrap()
}
/// Return the offset from the `tables` pointer to the `base` field in
/// `VMTableDefinition` index `index`.
pub fn index_vmtable_definition_base(&self, index: DefinedTableIndex) -> i32 {
self.index_vmtable_definition(index)
.checked_add(i32::from(self.vmtable_definition_base()))
/// Return the offset to `VMMemoryImport` index `index`.
pub fn vmctx_vmmemory_import(&self, index: MemoryIndex) -> u64 {
assert!(u64::from(index.as_u32()) < self.num_imported_memories);
self.vmctx_imported_memories_begin()
.checked_add(
u64::from(index.as_u32())
.checked_mul(u64::from(self.size_of_vmmemory_import()))
.unwrap(),
)
.unwrap()
}
/// Return the offset from the `tables` pointer to the `current_elements` field in
/// `VMTableDefinition` index `index`.
pub fn index_vmtable_definition_current_elements(&self, index: DefinedTableIndex) -> i32 {
self.index_vmtable_definition(index)
.checked_add(i32::from(self.vmtable_definition_current_elements()))
/// Return the offset to `VMGlobalImport` index `index`.
pub fn vmctx_vmglobal_import(&self, index: GlobalIndex) -> u64 {
assert!(u64::from(index.as_u32()) < self.num_imported_globals);
self.vmctx_imported_globals_begin()
.checked_add(
u64::from(index.as_u32())
.checked_mul(u64::from(self.size_of_vmglobal_import()))
.unwrap(),
)
.unwrap()
}
/// Return the offset from the `memories` pointer to the `from` field in
/// `VMMemoryImport` index `index`.
pub fn index_vmmemory_import_from(&self, index: MemoryIndex) -> i32 {
self.index_vmmemory_import(index)
.checked_add(i32::from(self.vmmemory_import_from()))
/// Return the offset to `VMTableDefinition` index `index`.
pub fn vmctx_vmtable_definition(&self, index: DefinedTableIndex) -> u64 {
assert!(u64::from(index.as_u32()) < self.num_defined_tables);
self.vmctx_tables_begin()
.checked_add(
u64::from(index.as_u32())
.checked_mul(u64::from(self.size_of_vmtable_definition()))
.unwrap(),
)
.unwrap()
}
/// Return the offset from the `memories` pointer to the `vmctx` field in
/// `VMMemoryImport` index `index`.
pub fn index_vmmemory_import_vmctx(&self, index: MemoryIndex) -> i32 {
self.index_vmmemory_import(index)
.checked_add(i32::from(self.vmmemory_import_vmctx()))
/// Return the offset to `VMMemoryDefinition` index `index`.
pub fn vmctx_vmmemory_definition(&self, index: DefinedMemoryIndex) -> u64 {
assert!(u64::from(index.as_u32()) < self.num_defined_memories);
self.vmctx_memories_begin()
.checked_add(
u64::from(index.as_u32())
.checked_mul(u64::from(self.size_of_vmmemory_definition()))
.unwrap(),
)
.unwrap()
}
/// Return the offset from the `memories` pointer to the `base` field in
/// `VMMemoryDefinition` index `index`.
pub fn index_vmmemory_definition_base(&self, index: DefinedMemoryIndex) -> i32 {
self.index_vmmemory_definition(index)
.checked_add(i32::from(self.vmmemory_definition_base()))
/// Return the offset to the `VMGlobalDefinition` index `index`.
pub fn vmctx_vmglobal_definition(&self, index: DefinedGlobalIndex) -> u64 {
assert!(u64::from(index.as_u32()) < self.num_defined_globals);
self.vmctx_globals_begin()
.checked_add(
u64::from(index.as_u32())
.checked_mul(u64::from(self.size_of_vmglobal_definition()))
.unwrap(),
)
.unwrap()
}
/// Return the offset from the `memories` pointer to the `current_length` field in
/// `VMMemoryDefinition` index `index`.
pub fn index_vmmemory_definition_current_length(&self, index: DefinedMemoryIndex) -> i32 {
self.index_vmmemory_definition(index)
.checked_add(i32::from(self.vmmemory_definition_current_length()))
/// Return the offset to the `body` field in `*const VMFunctionBody` index `index`.
pub fn vmctx_vmfunction_import_body(&self, index: FuncIndex) -> u64 {
self.vmctx_vmfunction_import(index)
.checked_add(u64::from(self.vmfunction_import_body()))
.unwrap()
}
/// Return the offset from the `imported_globals` pointer to the `from` field in
/// `VMGlobalImport` index `index`.
pub fn index_vmglobal_import_from(&self, index: GlobalIndex) -> i32 {
self.index_vmglobal_import(index)
.checked_add(i32::from(self.vmglobal_import_from()))
/// Return the offset to the `vmctx` field in `*const VMFunctionBody` index `index`.
pub fn vmctx_vmfunction_import_vmctx(&self, index: FuncIndex) -> u64 {
self.vmctx_vmfunction_import(index)
.checked_add(u64::from(self.vmfunction_import_vmctx()))
.unwrap()
}
/// Return the offset to the `from` field in `VMTableImport` index `index`.
pub fn vmctx_vmtable_import_from(&self, index: TableIndex) -> u64 {
self.vmctx_vmtable_import(index)
.checked_add(u64::from(self.vmtable_import_from()))
.unwrap()
}
/// Return the offset to the `base` field in `VMTableDefinition` index `index`.
pub fn vmctx_vmtable_definition_base(&self, index: DefinedTableIndex) -> u64 {
self.vmctx_vmtable_definition(index)
.checked_add(u64::from(self.vmtable_definition_base()))
.unwrap()
}
/// Return the offset to the `current_elements` field in `VMTableDefinition` index `index`.
pub fn vmctx_vmtable_definition_current_elements(&self, index: DefinedTableIndex) -> u64 {
self.vmctx_vmtable_definition(index)
.checked_add(u64::from(self.vmtable_definition_current_elements()))
.unwrap()
}
/// Return the offset to the `from` field in `VMMemoryImport` index `index`.
pub fn vmctx_vmmemory_import_from(&self, index: MemoryIndex) -> u64 {
self.vmctx_vmmemory_import(index)
.checked_add(u64::from(self.vmmemory_import_from()))
.unwrap()
}
/// Return the offset to the `vmctx` field in `VMMemoryImport` index `index`.
pub fn vmctx_vmmemory_import_vmctx(&self, index: MemoryIndex) -> u64 {
self.vmctx_vmmemory_import(index)
.checked_add(u64::from(self.vmmemory_import_vmctx()))
.unwrap()
}
/// Return the offset to the `base` field in `VMMemoryDefinition` index `index`.
pub fn vmctx_vmmemory_definition_base(&self, index: DefinedMemoryIndex) -> u64 {
self.vmctx_vmmemory_definition(index)
.checked_add(u64::from(self.vmmemory_definition_base()))
.unwrap()
}
/// Return the offset to the `current_length` field in `VMMemoryDefinition` index `index`.
pub fn vmctx_vmmemory_definition_current_length(&self, index: DefinedMemoryIndex) -> u64 {
self.vmctx_vmmemory_definition(index)
.checked_add(u64::from(self.vmmemory_definition_current_length()))
.unwrap()
}
/// Return the offset to the `from` field in `VMGlobalImport` index `index`.
pub fn vmctx_vmglobal_import_from(&self, index: GlobalIndex) -> u64 {
self.vmctx_vmglobal_import(index)
.checked_add(u64::from(self.vmglobal_import_from()))
.unwrap()
}
}

View File

@@ -32,10 +32,8 @@ fn signature_id(
) -> VMSharedSignatureIndex {
#[allow(clippy::cast_ptr_alignment)]
unsafe {
let ptr = (vmctx as *const VMContext as *const u8).add(
offsets.vmctx_signature_ids() as usize
+ offsets.index_vmshared_signature_id(index) as usize,
);
let ptr = (vmctx as *const VMContext as *const u8)
.add(offsets.vmctx_vmshared_signature_id(index) as usize);
*(ptr as *const VMSharedSignatureIndex)
}
}
@@ -47,10 +45,8 @@ fn imported_function<'vmctx>(
) -> &'vmctx VMFunctionImport {
#[allow(clippy::cast_ptr_alignment)]
unsafe {
let ptr = (vmctx as *const VMContext as *const u8).add(
offsets.vmctx_imported_functions() as usize
+ offsets.index_vmfunction_import(index) as usize,
);
let ptr = (vmctx as *const VMContext as *const u8)
.add(offsets.vmctx_vmfunction_import(index) as usize);
&*(ptr as *const VMFunctionImport)
}
}
@@ -91,7 +87,7 @@ impl InstanceContents {
fn signature_ids_ptr(&mut self) -> *mut VMSharedSignatureIndex {
unsafe {
(&mut self.vmctx as *mut VMContext as *mut u8)
.add(self.offsets.vmctx_signature_ids() as usize)
.add(self.offsets.vmctx_signature_ids_begin() as usize)
as *mut VMSharedSignatureIndex
}
}
@@ -105,7 +101,7 @@ impl InstanceContents {
fn imported_functions_ptr(&mut self) -> *mut VMFunctionImport {
unsafe {
(&mut self.vmctx as *mut VMContext as *mut u8)
.add(self.offsets.vmctx_imported_functions() as usize)
.add(self.offsets.vmctx_imported_functions_begin() as usize)
as *mut VMFunctionImport
}
}
@@ -113,10 +109,8 @@ impl InstanceContents {
/// Return the index `VMTableImport`.
fn imported_table(&self, index: TableIndex) -> &VMTableImport {
unsafe {
let ptr = (&self.vmctx as *const VMContext as *const u8).add(
self.offsets.vmctx_imported_tables() as usize
+ self.offsets.index_vmtable_import(index) as usize,
);
let ptr = (&self.vmctx as *const VMContext as *const u8)
.add(self.offsets.vmctx_vmtable_import(index) as usize);
&*(ptr as *const VMTableImport)
}
}
@@ -125,7 +119,7 @@ impl InstanceContents {
fn imported_tables_ptr(&mut self) -> *mut VMTableImport {
unsafe {
(&mut self.vmctx as *mut VMContext as *mut u8)
.add(self.offsets.vmctx_imported_tables() as usize)
.add(self.offsets.vmctx_imported_tables_begin() as usize)
as *mut VMTableImport
}
}
@@ -133,10 +127,8 @@ impl InstanceContents {
/// Return the indexed `VMMemoryImport`.
fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport {
unsafe {
let ptr = (&self.vmctx as *const VMContext as *const u8).add(
self.offsets.vmctx_imported_memories() as usize
+ self.offsets.index_vmmemory_import(index) as usize,
);
let ptr = (&self.vmctx as *const VMContext as *const u8)
.add(self.offsets.vmctx_vmmemory_import(index) as usize);
&*(ptr as *const VMMemoryImport)
}
}
@@ -145,7 +137,7 @@ impl InstanceContents {
fn imported_memories_ptr(&mut self) -> *mut VMMemoryImport {
unsafe {
(&mut self.vmctx as *mut VMContext as *mut u8)
.add(self.offsets.vmctx_imported_memories() as usize)
.add(self.offsets.vmctx_imported_memories_begin() as usize)
as *mut VMMemoryImport
}
}
@@ -153,10 +145,8 @@ impl InstanceContents {
/// Return the indexed `VMGlobalImport`.
fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport {
unsafe {
let ptr = (&self.vmctx as *const VMContext as *const u8).add(
self.offsets.vmctx_imported_globals() as usize
+ self.offsets.index_vmglobal_import(index) as usize,
);
let ptr = (&self.vmctx as *const VMContext as *const u8)
.add(self.offsets.vmctx_vmglobal_import(index) as usize);
&*(ptr as *const VMGlobalImport)
}
}
@@ -165,7 +155,7 @@ impl InstanceContents {
fn imported_globals_ptr(&mut self) -> *mut VMGlobalImport {
unsafe {
(&mut self.vmctx as *mut VMContext as *mut u8)
.add(self.offsets.vmctx_imported_globals() as usize)
.add(self.offsets.vmctx_imported_globals_begin() as usize)
as *mut VMGlobalImport
}
}
@@ -174,10 +164,8 @@ impl InstanceContents {
#[allow(dead_code)]
fn table(&self, index: DefinedTableIndex) -> &VMTableDefinition {
unsafe {
let ptr = (&self.vmctx as *const VMContext as *const u8).add(
self.offsets.vmctx_tables() as usize
+ self.offsets.index_vmtable_definition(index) as usize,
);
let ptr = (&self.vmctx as *const VMContext as *const u8)
.add(self.offsets.vmctx_vmtable_definition(index) as usize);
&*(ptr as *const VMTableDefinition)
}
}
@@ -185,10 +173,8 @@ impl InstanceContents {
/// Return the indexed `VMTableDefinition`.
fn table_mut(&mut self, index: DefinedTableIndex) -> &mut VMTableDefinition {
unsafe {
let ptr = (&self.vmctx as *const VMContext as *mut u8).add(
self.offsets.vmctx_tables() as usize
+ self.offsets.index_vmtable_definition(index) as usize,
);
let ptr = (&self.vmctx as *const VMContext as *mut u8)
.add(self.offsets.vmctx_vmtable_definition(index) as usize);
&mut *(ptr as *mut VMTableDefinition)
}
}
@@ -196,7 +182,8 @@ impl InstanceContents {
/// Return a pointer to the `VMTableDefinition`s.
fn tables_ptr(&mut self) -> *mut VMTableDefinition {
unsafe {
(&self.vmctx as *const VMContext as *mut u8).add(self.offsets.vmctx_tables() as usize)
(&self.vmctx as *const VMContext as *mut u8)
.add(self.offsets.vmctx_tables_begin() as usize)
as *mut VMTableDefinition
}
}
@@ -204,10 +191,8 @@ impl InstanceContents {
/// Return the indexed `VMMemoryDefinition`.
fn memory(&self, index: DefinedMemoryIndex) -> &VMMemoryDefinition {
unsafe {
let ptr = (&self.vmctx as *const VMContext as *const u8).add(
self.offsets.vmctx_memories() as usize
+ self.offsets.index_vmmemory_definition(index) as usize,
);
let ptr = (&self.vmctx as *const VMContext as *const u8)
.add(self.offsets.vmctx_vmmemory_definition(index) as usize);
&*(ptr as *const VMMemoryDefinition)
}
}
@@ -215,10 +200,8 @@ impl InstanceContents {
/// Return the indexed `VMMemoryDefinition`.
fn memory_mut(&mut self, index: DefinedMemoryIndex) -> &mut VMMemoryDefinition {
unsafe {
let ptr = (&self.vmctx as *const VMContext as *mut u8).add(
self.offsets.vmctx_memories() as usize
+ self.offsets.index_vmmemory_definition(index) as usize,
);
let ptr = (&self.vmctx as *const VMContext as *mut u8)
.add(self.offsets.vmctx_vmmemory_definition(index) as usize);
&mut *(ptr as *mut VMMemoryDefinition)
}
}
@@ -226,7 +209,8 @@ impl InstanceContents {
/// Return a pointer to the `VMMemoryDefinition`s.
fn memories_ptr(&mut self) -> *mut VMMemoryDefinition {
unsafe {
(&self.vmctx as *const VMContext as *mut u8).add(self.offsets.vmctx_memories() as usize)
(&self.vmctx as *const VMContext as *mut u8)
.add(self.offsets.vmctx_memories_begin() as usize)
as *mut VMMemoryDefinition
}
}
@@ -235,10 +219,8 @@ impl InstanceContents {
#[allow(dead_code)]
fn global(&self, index: DefinedGlobalIndex) -> &VMGlobalDefinition {
unsafe {
let ptr = (&self.vmctx as *const VMContext as *const u8).add(
self.offsets.vmctx_globals() as usize
+ self.offsets.index_vmglobal_definition(index) as usize,
);
let ptr = (&self.vmctx as *const VMContext as *const u8)
.add(self.offsets.vmctx_vmglobal_definition(index) as usize);
&*(ptr as *const VMGlobalDefinition)
}
}
@@ -246,10 +228,8 @@ impl InstanceContents {
/// Return the indexed `VMGlobalDefinition`.
fn global_mut(&mut self, index: DefinedGlobalIndex) -> &mut VMGlobalDefinition {
unsafe {
let ptr = (&self.vmctx as *const VMContext as *mut u8).add(
self.offsets.vmctx_globals() as usize
+ self.offsets.index_vmglobal_definition(index) as usize,
);
let ptr = (&self.vmctx as *const VMContext as *mut u8)
.add(self.offsets.vmctx_vmglobal_definition(index) as usize);
&mut *(ptr as *mut VMGlobalDefinition)
}
}
@@ -258,7 +238,8 @@ impl InstanceContents {
fn globals_ptr(&mut self) -> *mut VMGlobalDefinition {
unsafe {
(&mut self.vmctx as *mut VMContext as *mut u8)
.add(self.offsets.vmctx_globals() as usize) as *mut VMGlobalDefinition
.add(self.offsets.vmctx_globals_begin() as usize)
as *mut VMGlobalDefinition
}
}
@@ -317,7 +298,7 @@ impl InstanceContents {
pub(crate) fn table_index(&self, table: &mut VMTableDefinition) -> DefinedTableIndex {
let offsets = &self.offsets;
let begin = unsafe {
(&self.vmctx as *const VMContext as *mut u8).add(offsets.vmctx_tables() as usize)
(&self.vmctx as *const VMContext as *mut u8).add(offsets.vmctx_tables_begin() as usize)
} as *mut VMTableDefinition;
let end: *mut VMTableDefinition = table;
// TODO: Use `offset_from` once it stablizes.
@@ -332,7 +313,8 @@ impl InstanceContents {
pub(crate) fn memory_index(&self, memory: &mut VMMemoryDefinition) -> DefinedMemoryIndex {
let offsets = &self.offsets;
let begin = unsafe {
(&self.vmctx as *const VMContext as *mut u8).add(offsets.vmctx_memories() as usize)
(&self.vmctx as *const VMContext as *mut u8)
.add(offsets.vmctx_memories_begin() as usize)
} as *mut VMMemoryDefinition;
let end: *mut VMMemoryDefinition = memory;
// TODO: Use `offset_from` once it stablizes.