Address review feedback
This commit is contained in:
@@ -14,8 +14,7 @@ use crate::vmcontext::{
|
||||
VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition, VMMemoryImport, VMSharedSignatureIndex,
|
||||
VMTableDefinition, VMTableImport,
|
||||
};
|
||||
use crate::{TrapDescription, TrapRegistration};
|
||||
use backtrace::Backtrace;
|
||||
use crate::TrapRegistration;
|
||||
use memoffset::offset_of;
|
||||
use more_asserts::assert_lt;
|
||||
use std::alloc::{self, Layout};
|
||||
@@ -90,7 +89,8 @@ pub(crate) struct Instance {
|
||||
tables: BoxedSlice<DefinedTableIndex, Table>,
|
||||
|
||||
/// Passive elements in this instantiation. As `elem.drop`s happen, these
|
||||
/// entries get replaced into empty slices.
|
||||
/// entries get removed. A missing entry is considered equivalent to an
|
||||
/// empty slice.
|
||||
passive_elements: RefCell<HashMap<PassiveElemIndex, Box<[VMCallerCheckedAnyfunc]>>>,
|
||||
|
||||
/// Pointers to functions in executable memory.
|
||||
@@ -598,15 +598,10 @@ impl Instance {
|
||||
.map_or(true, |n| n as usize > elem.len())
|
||||
|| dst.checked_add(len).map_or(true, |m| m > table.size())
|
||||
{
|
||||
return Err(Trap::Wasm {
|
||||
desc: TrapDescription {
|
||||
source_loc,
|
||||
trap_code: ir::TrapCode::TableOutOfBounds,
|
||||
},
|
||||
backtrace: Backtrace::new(),
|
||||
});
|
||||
return Err(Trap::wasm(source_loc, ir::TrapCode::TableOutOfBounds));
|
||||
}
|
||||
|
||||
// TODO(#983): investigate replacing this get/set loop with a `memcpy`.
|
||||
for (dst, src) in (dst..dst + len).zip(src..src + len) {
|
||||
table
|
||||
.set(dst, elem[src as usize].clone())
|
||||
@@ -621,10 +616,9 @@ impl Instance {
|
||||
// https://webassembly.github.io/reference-types/core/exec/instructions.html#exec-elem-drop
|
||||
|
||||
let mut passive_elements = self.passive_elements.borrow_mut();
|
||||
// Note that dropping a non-passive element is a no-op (not a trap).
|
||||
if let Some(elem) = passive_elements.get_mut(&elem_index) {
|
||||
mem::replace(elem, vec![].into_boxed_slice());
|
||||
}
|
||||
passive_elements.remove(&elem_index);
|
||||
// Note that we don't check that we actually removed an element because
|
||||
// dropping a non-passive element is a no-op (not a trap).
|
||||
}
|
||||
|
||||
/// Do a `memory.copy` for a locally defined memory.
|
||||
@@ -652,23 +646,17 @@ impl Instance {
|
||||
.checked_add(len)
|
||||
.map_or(true, |m| m as usize > memory.current_length)
|
||||
{
|
||||
return Err(Trap::Wasm {
|
||||
desc: TrapDescription {
|
||||
source_loc,
|
||||
trap_code: ir::TrapCode::HeapOutOfBounds,
|
||||
},
|
||||
backtrace: Backtrace::new(),
|
||||
});
|
||||
return Err(Trap::wasm(source_loc, ir::TrapCode::HeapOutOfBounds));
|
||||
}
|
||||
|
||||
let dst = isize::try_from(dst).unwrap();
|
||||
let src = isize::try_from(src).unwrap();
|
||||
let dst = usize::try_from(dst).unwrap();
|
||||
let src = usize::try_from(src).unwrap();
|
||||
|
||||
// Bounds and casts are checked above, by this point we know that
|
||||
// everything is safe.
|
||||
unsafe {
|
||||
let dst = memory.base.offset(dst);
|
||||
let src = memory.base.offset(src);
|
||||
let dst = memory.base.add(dst);
|
||||
let src = memory.base.add(src);
|
||||
ptr::copy(src, dst, len as usize);
|
||||
}
|
||||
|
||||
@@ -712,13 +700,7 @@ impl Instance {
|
||||
.checked_add(len)
|
||||
.map_or(true, |m| m as usize > memory.current_length)
|
||||
{
|
||||
return Err(Trap::Wasm {
|
||||
desc: TrapDescription {
|
||||
source_loc,
|
||||
trap_code: ir::TrapCode::HeapOutOfBounds,
|
||||
},
|
||||
backtrace: Backtrace::new(),
|
||||
});
|
||||
return Err(Trap::wasm(source_loc, ir::TrapCode::HeapOutOfBounds));
|
||||
}
|
||||
|
||||
let dst = isize::try_from(dst).unwrap();
|
||||
@@ -760,7 +742,7 @@ impl Instance {
|
||||
/// imported, foreign table.
|
||||
pub(crate) fn get_table(&self, table_index: TableIndex) -> &Table {
|
||||
if let Some(defined_table_index) = self.module.local.defined_table_index(table_index) {
|
||||
&self.tables[defined_table_index]
|
||||
self.get_defined_table(defined_table_index)
|
||||
} else {
|
||||
self.get_foreign_table(table_index)
|
||||
}
|
||||
@@ -1083,9 +1065,9 @@ fn check_table_init_bounds(instance: &Instance) -> Result<(), InstantiationError
|
||||
|
||||
let size = usize::try_from(table.size()).unwrap();
|
||||
if size < start + init.elements.len() {
|
||||
return Err(InstantiationError::TableOutOfBounds(
|
||||
"elements segment does not fit".to_owned(),
|
||||
));
|
||||
return Err(InstantiationError::Link(LinkError(
|
||||
"table out of bounds: elements segment does not fit".to_owned(),
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1140,9 +1122,9 @@ fn check_memory_init_bounds(
|
||||
unsafe {
|
||||
let mem_slice = get_memory_slice(init, instance);
|
||||
if mem_slice.get_mut(start..start + init.data.len()).is_none() {
|
||||
return Err(InstantiationError::MemoryOutOfBounds(
|
||||
"data segment does not fit".into(),
|
||||
));
|
||||
return Err(InstantiationError::Link(LinkError(
|
||||
"memory out of bounds: data segment does not fit".into(),
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1190,9 +1172,10 @@ fn initialize_tables(instance: &Instance) -> Result<(), InstantiationError> {
|
||||
// bounds, then we report an error. This is required by the bulk memory
|
||||
// proposal and the way that it uses `table.init` during instantiation.
|
||||
if start > table.size() as usize {
|
||||
return Err(InstantiationError::TableOutOfBounds(
|
||||
"active element segment does not fit in table".into(),
|
||||
));
|
||||
return Err(InstantiationError::Trap(Trap::wasm(
|
||||
ir::SourceLoc::default(),
|
||||
ir::TrapCode::HeapOutOfBounds,
|
||||
)));
|
||||
}
|
||||
|
||||
for (i, func_idx) in init.elements.iter().enumerate() {
|
||||
@@ -1205,9 +1188,10 @@ fn initialize_tables(instance: &Instance) -> Result<(), InstantiationError> {
|
||||
// enabled, these become runtime traps and the intermediate
|
||||
// table slot writes are visible.
|
||||
.map_err(|()| {
|
||||
InstantiationError::TableOutOfBounds(
|
||||
"active element segment does not fit in table".into(),
|
||||
)
|
||||
InstantiationError::Trap(Trap::wasm(
|
||||
ir::SourceLoc::default(),
|
||||
ir::TrapCode::HeapOutOfBounds,
|
||||
))
|
||||
})?;
|
||||
}
|
||||
}
|
||||
@@ -1230,6 +1214,7 @@ fn initialize_passive_elements(instance: &Instance) {
|
||||
.module
|
||||
.passive_elements
|
||||
.iter()
|
||||
.filter(|(_, segments)| !segments.is_empty())
|
||||
.map(|(idx, segments)| {
|
||||
(
|
||||
*idx,
|
||||
@@ -1279,9 +1264,10 @@ fn initialize_memories(
|
||||
// since this is dictated by the updated spec for the bulk memory
|
||||
// proposal.
|
||||
if num_uncopied > 0 {
|
||||
return Err(InstantiationError::MemoryOutOfBounds(
|
||||
"active data segment does not fit in memory".into(),
|
||||
));
|
||||
return Err(InstantiationError::Trap(Trap::wasm(
|
||||
ir::SourceLoc::default(),
|
||||
ir::TrapCode::HeapOutOfBounds,
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1342,24 +1328,14 @@ pub enum InstantiationError {
|
||||
#[error("Insufficient resources: {0}")]
|
||||
Resource(String),
|
||||
|
||||
/// A table out of bounds error.
|
||||
///
|
||||
/// Depending on whether bulk memory is enabled or not, this is either a
|
||||
/// link error or a trap raised during instantiation.
|
||||
#[error("Table out of bounds error: {0}")]
|
||||
TableOutOfBounds(String),
|
||||
|
||||
/// A memory out of bounds error.
|
||||
///
|
||||
/// Depending on whether bulk memory is enabled or not, this is either a
|
||||
/// link error or a trap raised during instantiation.
|
||||
#[error("Memory out of bounds error: {0}")]
|
||||
MemoryOutOfBounds(String),
|
||||
|
||||
/// A wasm link error occured.
|
||||
#[error("Failed to link module")]
|
||||
Link(#[from] LinkError),
|
||||
|
||||
/// A trap ocurred during instantiation, after linking.
|
||||
#[error("Trap occurred during instantiation")]
|
||||
Trap(#[source] Trap),
|
||||
|
||||
/// A compilation error occured.
|
||||
#[error("Trap occurred while invoking start function")]
|
||||
StartTrap(#[source] Trap),
|
||||
|
||||
@@ -1,14 +1,42 @@
|
||||
//! Runtime library calls. Note that wasm compilers may sometimes perform these
|
||||
//! inline rather than calling them, particularly when CPUs have special
|
||||
//! instructions which compute them directly.
|
||||
//! Runtime library calls.
|
||||
//!
|
||||
//! Note that Wasm compilers may sometimes perform these inline rather than
|
||||
//! calling them, particularly when CPUs have special instructions which compute
|
||||
//! them directly.
|
||||
//!
|
||||
//! These functions are called by compiled Wasm code, and therefore must take
|
||||
//! certain care about some things:
|
||||
//!
|
||||
//! * They must always be `pub extern "C"` and should only contain basic, raw
|
||||
//! i32/i64/f32/f64/pointer parameters that are safe to pass across the system
|
||||
//! ABI!
|
||||
//!
|
||||
//! * If any nested function propagates an `Err(trap)` out to the library
|
||||
//! function frame, we need to raise it. This involves some nasty and quite
|
||||
//! unsafe code under the covers! Notable, after raising the trap, drops
|
||||
//! **will not** be run for local variables! This can lead to things like
|
||||
//! leaking `InstanceHandle`s which leads to never deallocating JIT code,
|
||||
//! instances, and modules! Therefore, always use nested blocks to ensure
|
||||
//! drops run before raising a trap:
|
||||
//!
|
||||
//! ```
|
||||
//! pub extern "C" fn my_lib_function(...) {
|
||||
//! let result = {
|
||||
//! // Do everything in here so drops run at the end of the block.
|
||||
//! ...
|
||||
//! };
|
||||
//! if let Err(trap) = result {
|
||||
//! // Now we can safely raise the trap without leaking!
|
||||
//! raise_lib_trap(trap);
|
||||
//! }
|
||||
//! }
|
||||
//! ```
|
||||
|
||||
use crate::table::Table;
|
||||
use crate::traphandlers::raise_lib_trap;
|
||||
use crate::vmcontext::VMContext;
|
||||
use wasmtime_environ::ir;
|
||||
use wasmtime_environ::wasm::{
|
||||
DefinedMemoryIndex, DefinedTableIndex, MemoryIndex, PassiveElemIndex, TableIndex,
|
||||
};
|
||||
use wasmtime_environ::wasm::{DefinedMemoryIndex, MemoryIndex, PassiveElemIndex, TableIndex};
|
||||
|
||||
/// Implementation of f32.ceil
|
||||
pub extern "C" fn wasmtime_f32_ceil(x: f32) -> f32 {
|
||||
@@ -93,7 +121,6 @@ pub extern "C" fn wasmtime_f64_nearest(x: f64) -> f64 {
|
||||
}
|
||||
|
||||
/// Implementation of memory.grow for locally-defined 32-bit memories.
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn wasmtime_memory32_grow(
|
||||
vmctx: *mut VMContext,
|
||||
delta: u32,
|
||||
@@ -108,7 +135,6 @@ pub unsafe extern "C" fn wasmtime_memory32_grow(
|
||||
}
|
||||
|
||||
/// Implementation of memory.grow for imported 32-bit memories.
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn wasmtime_imported_memory32_grow(
|
||||
vmctx: *mut VMContext,
|
||||
delta: u32,
|
||||
@@ -123,7 +149,6 @@ pub unsafe extern "C" fn wasmtime_imported_memory32_grow(
|
||||
}
|
||||
|
||||
/// Implementation of memory.size for locally-defined 32-bit memories.
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn wasmtime_memory32_size(vmctx: *mut VMContext, memory_index: u32) -> u32 {
|
||||
let instance = (&mut *vmctx).instance();
|
||||
let memory_index = DefinedMemoryIndex::from_u32(memory_index);
|
||||
@@ -132,7 +157,6 @@ pub unsafe extern "C" fn wasmtime_memory32_size(vmctx: *mut VMContext, memory_in
|
||||
}
|
||||
|
||||
/// Implementation of memory.size for imported 32-bit memories.
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn wasmtime_imported_memory32_size(
|
||||
vmctx: *mut VMContext,
|
||||
memory_index: u32,
|
||||
@@ -143,9 +167,8 @@ pub unsafe extern "C" fn wasmtime_imported_memory32_size(
|
||||
instance.imported_memory_size(memory_index)
|
||||
}
|
||||
|
||||
/// Implementation of `table.copy` when both tables are locally defined.
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn wasmtime_table_copy_defined_defined(
|
||||
/// Implementation of `table.copy`.
|
||||
pub unsafe extern "C" fn wasmtime_table_copy(
|
||||
vmctx: *mut VMContext,
|
||||
dst_table_index: u32,
|
||||
src_table_index: u32,
|
||||
@@ -154,87 +177,21 @@ pub unsafe extern "C" fn wasmtime_table_copy_defined_defined(
|
||||
len: u32,
|
||||
source_loc: u32,
|
||||
) {
|
||||
let dst_table_index = DefinedTableIndex::from_u32(dst_table_index);
|
||||
let src_table_index = DefinedTableIndex::from_u32(src_table_index);
|
||||
let source_loc = ir::SourceLoc::new(source_loc);
|
||||
let instance = (&mut *vmctx).instance();
|
||||
let dst_table = instance.get_defined_table(dst_table_index);
|
||||
let src_table = instance.get_defined_table(src_table_index);
|
||||
if let Err(trap) = Table::copy(dst_table, src_table, dst, src, len, source_loc) {
|
||||
raise_lib_trap(trap);
|
||||
}
|
||||
}
|
||||
|
||||
/// Implementation of `table.copy` when the destination table is locally defined
|
||||
/// and the source table is imported.
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn wasmtime_table_copy_defined_imported(
|
||||
vmctx: *mut VMContext,
|
||||
dst_table_index: u32,
|
||||
src_table_index: u32,
|
||||
dst: u32,
|
||||
src: u32,
|
||||
len: u32,
|
||||
source_loc: u32,
|
||||
) {
|
||||
let dst_table_index = DefinedTableIndex::from_u32(dst_table_index);
|
||||
let src_table_index = TableIndex::from_u32(src_table_index);
|
||||
let source_loc = ir::SourceLoc::new(source_loc);
|
||||
let instance = (&mut *vmctx).instance();
|
||||
let dst_table = instance.get_defined_table(dst_table_index);
|
||||
let src_table = instance.get_foreign_table(src_table_index);
|
||||
if let Err(trap) = Table::copy(dst_table, src_table, dst, src, len, source_loc) {
|
||||
raise_lib_trap(trap);
|
||||
}
|
||||
}
|
||||
|
||||
/// Implementation of `table.copy` when the destination table is imported
|
||||
/// and the source table is locally defined.
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn wasmtime_table_copy_imported_defined(
|
||||
vmctx: *mut VMContext,
|
||||
dst_table_index: u32,
|
||||
src_table_index: u32,
|
||||
dst: u32,
|
||||
src: u32,
|
||||
len: u32,
|
||||
source_loc: u32,
|
||||
) {
|
||||
let dst_table_index = TableIndex::from_u32(dst_table_index);
|
||||
let src_table_index = DefinedTableIndex::from_u32(src_table_index);
|
||||
let source_loc = ir::SourceLoc::new(source_loc);
|
||||
let instance = (&mut *vmctx).instance();
|
||||
let dst_table = instance.get_foreign_table(dst_table_index);
|
||||
let src_table = instance.get_defined_table(src_table_index);
|
||||
if let Err(trap) = Table::copy(dst_table, src_table, dst, src, len, source_loc) {
|
||||
raise_lib_trap(trap);
|
||||
}
|
||||
}
|
||||
|
||||
/// Implementation of `table.copy` when both tables are imported.
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn wasmtime_table_copy_imported_imported(
|
||||
vmctx: *mut VMContext,
|
||||
dst_table_index: u32,
|
||||
src_table_index: u32,
|
||||
dst: u32,
|
||||
src: u32,
|
||||
len: u32,
|
||||
source_loc: u32,
|
||||
) {
|
||||
let dst_table_index = TableIndex::from_u32(dst_table_index);
|
||||
let src_table_index = TableIndex::from_u32(src_table_index);
|
||||
let source_loc = ir::SourceLoc::new(source_loc);
|
||||
let instance = (&mut *vmctx).instance();
|
||||
let dst_table = instance.get_foreign_table(dst_table_index);
|
||||
let src_table = instance.get_foreign_table(src_table_index);
|
||||
if let Err(trap) = Table::copy(dst_table, src_table, dst, src, len, source_loc) {
|
||||
let result = {
|
||||
let dst_table_index = TableIndex::from_u32(dst_table_index);
|
||||
let src_table_index = TableIndex::from_u32(src_table_index);
|
||||
let source_loc = ir::SourceLoc::new(source_loc);
|
||||
let instance = (&mut *vmctx).instance();
|
||||
let dst_table = instance.get_table(dst_table_index);
|
||||
let src_table = instance.get_table(src_table_index);
|
||||
Table::copy(dst_table, src_table, dst, src, len, source_loc)
|
||||
};
|
||||
if let Err(trap) = result {
|
||||
raise_lib_trap(trap);
|
||||
}
|
||||
}
|
||||
|
||||
/// Implementation of `table.init`.
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn wasmtime_table_init(
|
||||
vmctx: *mut VMContext,
|
||||
table_index: u32,
|
||||
@@ -244,19 +201,19 @@ pub unsafe extern "C" fn wasmtime_table_init(
|
||||
len: u32,
|
||||
source_loc: u32,
|
||||
) {
|
||||
let table_index = TableIndex::from_u32(table_index);
|
||||
let source_loc = ir::SourceLoc::new(source_loc);
|
||||
let elem_index = PassiveElemIndex::from_u32(elem_index);
|
||||
|
||||
let instance = (&mut *vmctx).instance();
|
||||
|
||||
if let Err(trap) = instance.table_init(table_index, elem_index, dst, src, len, source_loc) {
|
||||
let result = {
|
||||
let table_index = TableIndex::from_u32(table_index);
|
||||
let source_loc = ir::SourceLoc::new(source_loc);
|
||||
let elem_index = PassiveElemIndex::from_u32(elem_index);
|
||||
let instance = (&mut *vmctx).instance();
|
||||
instance.table_init(table_index, elem_index, dst, src, len, source_loc)
|
||||
};
|
||||
if let Err(trap) = result {
|
||||
raise_lib_trap(trap);
|
||||
}
|
||||
}
|
||||
|
||||
/// Implementation of `elem.drop`.
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn wasmtime_elem_drop(vmctx: *mut VMContext, elem_index: u32) {
|
||||
let elem_index = PassiveElemIndex::from_u32(elem_index);
|
||||
let instance = (&mut *vmctx).instance();
|
||||
@@ -264,8 +221,7 @@ pub unsafe extern "C" fn wasmtime_elem_drop(vmctx: *mut VMContext, elem_index: u
|
||||
}
|
||||
|
||||
/// Implementation of `memory.copy` for locally defined memories.
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn wasmtime_memory_copy(
|
||||
pub unsafe extern "C" fn wasmtime_defined_memory_copy(
|
||||
vmctx: *mut VMContext,
|
||||
memory_index: u32,
|
||||
dst: u32,
|
||||
@@ -273,16 +229,18 @@ pub unsafe extern "C" fn wasmtime_memory_copy(
|
||||
len: u32,
|
||||
source_loc: u32,
|
||||
) {
|
||||
let memory_index = DefinedMemoryIndex::from_u32(memory_index);
|
||||
let source_loc = ir::SourceLoc::new(source_loc);
|
||||
let instance = (&mut *vmctx).instance();
|
||||
if let Err(trap) = instance.defined_memory_copy(memory_index, dst, src, len, source_loc) {
|
||||
let result = {
|
||||
let memory_index = DefinedMemoryIndex::from_u32(memory_index);
|
||||
let source_loc = ir::SourceLoc::new(source_loc);
|
||||
let instance = (&mut *vmctx).instance();
|
||||
instance.defined_memory_copy(memory_index, dst, src, len, source_loc)
|
||||
};
|
||||
if let Err(trap) = result {
|
||||
raise_lib_trap(trap);
|
||||
}
|
||||
}
|
||||
|
||||
/// Implementation of `memory.copy` for imported memories.
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn wasmtime_imported_memory_copy(
|
||||
vmctx: *mut VMContext,
|
||||
memory_index: u32,
|
||||
@@ -291,16 +249,18 @@ pub unsafe extern "C" fn wasmtime_imported_memory_copy(
|
||||
len: u32,
|
||||
source_loc: u32,
|
||||
) {
|
||||
let memory_index = MemoryIndex::from_u32(memory_index);
|
||||
let source_loc = ir::SourceLoc::new(source_loc);
|
||||
let instance = (&mut *vmctx).instance();
|
||||
if let Err(trap) = instance.imported_memory_copy(memory_index, dst, src, len, source_loc) {
|
||||
let result = {
|
||||
let memory_index = MemoryIndex::from_u32(memory_index);
|
||||
let source_loc = ir::SourceLoc::new(source_loc);
|
||||
let instance = (&mut *vmctx).instance();
|
||||
instance.imported_memory_copy(memory_index, dst, src, len, source_loc)
|
||||
};
|
||||
if let Err(trap) = result {
|
||||
raise_lib_trap(trap);
|
||||
}
|
||||
}
|
||||
|
||||
/// Implementation of `memory.fill` for locally defined memories.
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn wasmtime_memory_fill(
|
||||
vmctx: *mut VMContext,
|
||||
memory_index: u32,
|
||||
@@ -309,16 +269,18 @@ pub unsafe extern "C" fn wasmtime_memory_fill(
|
||||
len: u32,
|
||||
source_loc: u32,
|
||||
) {
|
||||
let memory_index = DefinedMemoryIndex::from_u32(memory_index);
|
||||
let source_loc = ir::SourceLoc::new(source_loc);
|
||||
let instance = (&mut *vmctx).instance();
|
||||
if let Err(trap) = instance.defined_memory_fill(memory_index, dst, val, len, source_loc) {
|
||||
let result = {
|
||||
let memory_index = DefinedMemoryIndex::from_u32(memory_index);
|
||||
let source_loc = ir::SourceLoc::new(source_loc);
|
||||
let instance = (&mut *vmctx).instance();
|
||||
instance.defined_memory_fill(memory_index, dst, val, len, source_loc)
|
||||
};
|
||||
if let Err(trap) = result {
|
||||
raise_lib_trap(trap);
|
||||
}
|
||||
}
|
||||
|
||||
/// Implementation of `memory.fill` for imported memories.
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn wasmtime_imported_memory_fill(
|
||||
vmctx: *mut VMContext,
|
||||
memory_index: u32,
|
||||
@@ -327,10 +289,13 @@ pub unsafe extern "C" fn wasmtime_imported_memory_fill(
|
||||
len: u32,
|
||||
source_loc: u32,
|
||||
) {
|
||||
let memory_index = MemoryIndex::from_u32(memory_index);
|
||||
let source_loc = ir::SourceLoc::new(source_loc);
|
||||
let instance = (&mut *vmctx).instance();
|
||||
if let Err(trap) = instance.imported_memory_fill(memory_index, dst, val, len, source_loc) {
|
||||
let result = {
|
||||
let memory_index = MemoryIndex::from_u32(memory_index);
|
||||
let source_loc = ir::SourceLoc::new(source_loc);
|
||||
let instance = (&mut *vmctx).instance();
|
||||
instance.imported_memory_fill(memory_index, dst, val, len, source_loc)
|
||||
};
|
||||
if let Err(trap) = result {
|
||||
raise_lib_trap(trap);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,8 +3,7 @@
|
||||
//! `Table` is to WebAssembly tables what `LinearMemory` is to WebAssembly linear memories.
|
||||
|
||||
use crate::vmcontext::{VMCallerCheckedAnyfunc, VMTableDefinition};
|
||||
use crate::{Trap, TrapDescription};
|
||||
use backtrace::Backtrace;
|
||||
use crate::Trap;
|
||||
use std::cell::RefCell;
|
||||
use std::convert::{TryFrom, TryInto};
|
||||
use wasmtime_environ::wasm::TableElementType;
|
||||
@@ -112,13 +111,7 @@ impl Table {
|
||||
.checked_add(len)
|
||||
.map_or(true, |m| m > dst_table.size())
|
||||
{
|
||||
return Err(Trap::Wasm {
|
||||
desc: TrapDescription {
|
||||
source_loc,
|
||||
trap_code: ir::TrapCode::TableOutOfBounds,
|
||||
},
|
||||
backtrace: Backtrace::new(),
|
||||
});
|
||||
return Err(Trap::wasm(source_loc, ir::TrapCode::TableOutOfBounds));
|
||||
}
|
||||
|
||||
let srcs = src_index..src_index + len;
|
||||
@@ -126,6 +119,8 @@ impl Table {
|
||||
|
||||
// Note on the unwraps: the bounds check above means that these will
|
||||
// never panic.
|
||||
//
|
||||
// TODO(#983): investigate replacing this get/set loop with a `memcpy`.
|
||||
if dst_index <= src_index {
|
||||
for (s, d) in (srcs).zip(dsts) {
|
||||
dst_table.set(d, src_table.get(s).unwrap()).unwrap();
|
||||
|
||||
@@ -140,6 +140,20 @@ impl fmt::Display for Trap {
|
||||
|
||||
impl std::error::Error for Trap {}
|
||||
|
||||
impl Trap {
|
||||
/// Construct a new Wasm trap with the given source location and trap code.
|
||||
///
|
||||
/// Internally saves a backtrace when constructed.
|
||||
pub fn wasm(source_loc: ir::SourceLoc, trap_code: ir::TrapCode) -> Self {
|
||||
let desc = TrapDescription {
|
||||
source_loc,
|
||||
trap_code,
|
||||
};
|
||||
let backtrace = Backtrace::new();
|
||||
Trap::Wasm { desc, backtrace }
|
||||
}
|
||||
}
|
||||
|
||||
/// Call the wasm function pointed to by `callee`.
|
||||
///
|
||||
/// * `vmctx` - the callee vmctx argument
|
||||
|
||||
@@ -549,22 +549,16 @@ impl VMBuiltinFunctionsArray {
|
||||
ptrs[BuiltinFunctionIndex::get_imported_memory32_size_index().index() as usize] =
|
||||
wasmtime_imported_memory32_size as usize;
|
||||
|
||||
ptrs[BuiltinFunctionIndex::get_table_copy_defined_defined_index().index() as usize] =
|
||||
wasmtime_table_copy_defined_defined as usize;
|
||||
ptrs[BuiltinFunctionIndex::get_table_copy_defined_imported_index().index() as usize] =
|
||||
wasmtime_table_copy_defined_imported as usize;
|
||||
ptrs[BuiltinFunctionIndex::get_table_copy_imported_defined_index().index() as usize] =
|
||||
wasmtime_table_copy_imported_defined as usize;
|
||||
ptrs[BuiltinFunctionIndex::get_table_copy_imported_imported_index().index() as usize] =
|
||||
wasmtime_table_copy_imported_imported as usize;
|
||||
ptrs[BuiltinFunctionIndex::get_table_copy_index().index() as usize] =
|
||||
wasmtime_table_copy as usize;
|
||||
|
||||
ptrs[BuiltinFunctionIndex::get_table_init_index().index() as usize] =
|
||||
wasmtime_table_init as usize;
|
||||
ptrs[BuiltinFunctionIndex::get_elem_drop_index().index() as usize] =
|
||||
wasmtime_elem_drop as usize;
|
||||
|
||||
ptrs[BuiltinFunctionIndex::get_memory_copy_index().index() as usize] =
|
||||
wasmtime_memory_copy as usize;
|
||||
ptrs[BuiltinFunctionIndex::get_defined_memory_copy_index().index() as usize] =
|
||||
wasmtime_defined_memory_copy as usize;
|
||||
ptrs[BuiltinFunctionIndex::get_imported_memory_copy_index().index() as usize] =
|
||||
wasmtime_imported_memory_copy as usize;
|
||||
ptrs[BuiltinFunctionIndex::get_memory_fill_index().index() as usize] =
|
||||
|
||||
Reference in New Issue
Block a user