Address review feedback

This commit is contained in:
Nick Fitzgerald
2020-02-25 12:59:03 -08:00
parent 39307b2b36
commit ef0cabf8b4
15 changed files with 219 additions and 346 deletions

View File

@@ -26,40 +26,38 @@ fn main() -> anyhow::Result<()> {
writeln!(out, "#[allow(non_snake_case)]")?; writeln!(out, "#[allow(non_snake_case)]")?;
writeln!(out, "mod {} {{", strategy)?; writeln!(out, "mod {} {{", strategy)?;
test_directory(&mut out, "tests/misc_testsuite", strategy)?; with_test_module(&mut out, "misc", |out| {
let spec_tests = test_directory(&mut out, "tests/spec_testsuite", strategy)?; test_directory(out, "tests/misc_testsuite", strategy)?;
test_directory_module(out, "tests/misc_testsuite/bulk-memory-operations", strategy)?;
test_directory_module(out, "tests/misc_testsuite/reference-types", strategy)?;
Ok(())
})?;
with_test_module(&mut out, "spec", |out| {
let spec_tests = test_directory(out, "tests/spec_testsuite", strategy)?;
// Skip running spec_testsuite tests if the submodule isn't checked // Skip running spec_testsuite tests if the submodule isn't checked
// out. // out.
if spec_tests > 0 { if spec_tests > 0 {
test_directory(&mut out, "tests/spec_testsuite/proposals/simd", strategy) test_directory_module(out, "tests/spec_testsuite/proposals/simd", strategy)?;
.expect("generating tests"); test_directory_module(out, "tests/spec_testsuite/proposals/multi-value", strategy)?;
test_directory_module(
test_directory( out,
&mut out,
"tests/spec_testsuite/proposals/multi-value",
strategy,
)
.expect("generating tests");
test_directory(
&mut out,
"tests/spec_testsuite/proposals/reference-types", "tests/spec_testsuite/proposals/reference-types",
strategy, strategy,
) )?;
.expect("generating tests"); test_directory_module(
out,
test_directory(
&mut out,
"tests/spec_testsuite/proposals/bulk-memory-operations", "tests/spec_testsuite/proposals/bulk-memory-operations",
strategy, strategy,
) )?;
.expect("generating tests");
} else { } else {
println!( println!(
"cargo:warning=The spec testsuite is disabled. To enable, run `git submodule \ "cargo:warning=The spec testsuite is disabled. To enable, run `git submodule \
update --remote`." update --remote`."
); );
} }
Ok(())
})?;
writeln!(out, "}}")?; writeln!(out, "}}")?;
} }
@@ -72,6 +70,16 @@ fn main() -> anyhow::Result<()> {
Ok(()) Ok(())
} }
fn test_directory_module(
out: &mut String,
path: impl AsRef<Path>,
strategy: &str,
) -> anyhow::Result<usize> {
let path = path.as_ref();
let testsuite = &extract_name(path);
with_test_module(out, testsuite, |out| test_directory(out, path, strategy))
}
fn test_directory( fn test_directory(
out: &mut String, out: &mut String,
path: impl AsRef<Path>, path: impl AsRef<Path>,
@@ -100,11 +108,10 @@ fn test_directory(
dir_entries.sort(); dir_entries.sort();
let testsuite = &extract_name(path); let testsuite = &extract_name(path);
start_test_module(out, testsuite)?;
for entry in dir_entries.iter() { for entry in dir_entries.iter() {
write_testsuite_tests(out, entry, testsuite, strategy)?; write_testsuite_tests(out, entry, testsuite, strategy)?;
} }
finish_test_module(out)?;
Ok(dir_entries.len()) Ok(dir_entries.len())
} }
@@ -119,14 +126,19 @@ fn extract_name(path: impl AsRef<Path>) -> String {
.replace("/", "_") .replace("/", "_")
} }
fn start_test_module(out: &mut String, testsuite: &str) -> anyhow::Result<()> { fn with_test_module<T>(
writeln!(out, "mod {} {{", testsuite)?; out: &mut String,
Ok(()) testsuite: &str,
} f: impl FnOnce(&mut String) -> anyhow::Result<T>,
) -> anyhow::Result<T> {
out.push_str("mod ");
out.push_str(testsuite);
out.push_str(" {\n");
let result = f(out)?;
fn finish_test_module(out: &mut String) -> anyhow::Result<()> {
out.push_str("}\n"); out.push_str("}\n");
Ok(()) Ok(result)
} }
fn write_testsuite_tests( fn write_testsuite_tests(
@@ -180,6 +192,8 @@ fn ignore(testsuite: &str, testname: &str, strategy: &str) -> bool {
("simd", "simd_load_splat") => return true, // FIXME Unsupported feature: proposed SIMD operator V8x16LoadSplat { memarg: MemoryImmediate { flags: 0, offset: 0 } } ("simd", "simd_load_splat") => return true, // FIXME Unsupported feature: proposed SIMD operator V8x16LoadSplat { memarg: MemoryImmediate { flags: 0, offset: 0 } }
("simd", "simd_splat") => return true, // FIXME Unsupported feature: proposed SIMD operator I8x16ShrS ("simd", "simd_splat") => return true, // FIXME Unsupported feature: proposed SIMD operator I8x16ShrS
// Still working on implementing these. See #929.
("reference_types", "table_copy_on_imported_tables") => return false,
("reference_types", _) => return true, ("reference_types", _) => return true,
// Still working on implementing these. See #928 // Still working on implementing these. See #928

View File

@@ -4,7 +4,7 @@ use crate::runtime::{Config, Store};
use crate::trap::Trap; use crate::trap::Trap;
use anyhow::{Error, Result}; use anyhow::{Error, Result};
use wasmtime_jit::{CompiledModule, Resolver}; use wasmtime_jit::{CompiledModule, Resolver};
use wasmtime_runtime::{Export, InstanceHandle, InstantiationError, LinkError}; use wasmtime_runtime::{Export, InstanceHandle, InstantiationError};
struct SimpleResolver<'a> { struct SimpleResolver<'a> {
imports: &'a [Extern], imports: &'a [Extern],
@@ -32,15 +32,8 @@ fn instantiate(
) )
.map_err(|e| -> Error { .map_err(|e| -> Error {
match e { match e {
InstantiationError::StartTrap(trap) => Trap::from_jit(trap).into(), InstantiationError::StartTrap(trap) | InstantiationError::Trap(trap) => {
e @ InstantiationError::TableOutOfBounds(_) Trap::from_jit(trap).into()
| e @ InstantiationError::MemoryOutOfBounds(_) => {
let msg = e.to_string();
if config.validating_config.operator_config.enable_bulk_memory {
Trap::new(msg).into()
} else {
InstantiationError::Link(LinkError(msg)).into()
}
} }
other => other.into(), other => other.into(),
} }

View File

@@ -45,50 +45,36 @@ impl BuiltinFunctionIndex {
} }
/// Returns an index for wasm's `table.copy` when both tables are locally /// Returns an index for wasm's `table.copy` when both tables are locally
/// defined. /// defined.
pub const fn get_table_copy_defined_defined_index() -> Self { pub const fn get_table_copy_index() -> Self {
Self(4) Self(4)
} }
/// Returns an index for wasm's `table.copy` when the destination table is
/// locally defined and the source table is imported.
pub const fn get_table_copy_defined_imported_index() -> Self {
Self(5)
}
/// Returns an index for wasm's `table.copy` when the destination table is
/// imported and the source table is locally defined.
pub const fn get_table_copy_imported_defined_index() -> Self {
Self(6)
}
/// Returns an index for wasm's `table.copy` when both tables are imported.
pub const fn get_table_copy_imported_imported_index() -> Self {
Self(7)
}
/// Returns an index for wasm's `table.init`. /// Returns an index for wasm's `table.init`.
pub const fn get_table_init_index() -> Self { pub const fn get_table_init_index() -> Self {
Self(8) Self(5)
} }
/// Returns an index for wasm's `elem.drop`. /// Returns an index for wasm's `elem.drop`.
pub const fn get_elem_drop_index() -> Self { pub const fn get_elem_drop_index() -> Self {
Self(9) Self(6)
} }
/// Returns an index for wasm's `memory.copy` for locally defined memories. /// Returns an index for wasm's `memory.copy` for locally defined memories.
pub const fn get_memory_copy_index() -> Self { pub const fn get_defined_memory_copy_index() -> Self {
Self(10) Self(7)
} }
/// Returns an index for wasm's `memory.copy` for imported memories. /// Returns an index for wasm's `memory.copy` for imported memories.
pub const fn get_imported_memory_copy_index() -> Self { pub const fn get_imported_memory_copy_index() -> Self {
Self(11) Self(8)
} }
/// Returns an index for wasm's `memory.fill` for locally defined memories. /// Returns an index for wasm's `memory.fill` for locally defined memories.
pub const fn get_memory_fill_index() -> Self { pub const fn get_memory_fill_index() -> Self {
Self(12) Self(9)
} }
/// Returns an index for wasm's `memory.fill` for imported memories. /// Returns an index for wasm's `memory.fill` for imported memories.
pub const fn get_imported_memory_fill_index() -> Self { pub const fn get_imported_memory_fill_index() -> Self {
Self(13) Self(10)
} }
/// Returns the total number of builtin functions. /// Returns the total number of builtin functions.
pub const fn builtin_functions_total_number() -> u32 { pub const fn builtin_functions_total_number() -> u32 {
14 11
} }
/// Return the index as an u32 number. /// Return the index as an u32 number.
@@ -245,7 +231,6 @@ impl<'module_environment> FuncEnvironment<'module_environment> {
} }
} }
// NB: All `table_copy` libcall variants have the same signature.
fn get_table_copy_sig(&mut self, func: &mut Function) -> ir::SigRef { fn get_table_copy_sig(&mut self, func: &mut Function) -> ir::SigRef {
let sig = self.table_copy_sig.unwrap_or_else(|| { let sig = self.table_copy_sig.unwrap_or_else(|| {
func.import_signature(Signature { func.import_signature(Signature {
@@ -279,61 +264,12 @@ impl<'module_environment> FuncEnvironment<'module_environment> {
src_table_index: TableIndex, src_table_index: TableIndex,
) -> (ir::SigRef, usize, usize, BuiltinFunctionIndex) { ) -> (ir::SigRef, usize, usize, BuiltinFunctionIndex) {
let sig = self.get_table_copy_sig(func); let sig = self.get_table_copy_sig(func);
match (
self.module.is_imported_table(dst_table_index),
self.module.is_imported_table(src_table_index),
) {
(false, false) => {
let dst_table_index = self
.module
.defined_table_index(dst_table_index)
.unwrap()
.index();
let src_table_index = self
.module
.defined_table_index(src_table_index)
.unwrap()
.index();
(
sig,
dst_table_index,
src_table_index,
BuiltinFunctionIndex::get_table_copy_defined_defined_index(),
)
}
(false, true) => {
let dst_table_index = self
.module
.defined_table_index(dst_table_index)
.unwrap()
.index();
(
sig,
dst_table_index,
src_table_index.as_u32() as usize,
BuiltinFunctionIndex::get_table_copy_defined_imported_index(),
)
}
(true, false) => {
let src_table_index = self
.module
.defined_table_index(src_table_index)
.unwrap()
.index();
( (
sig,
dst_table_index.as_u32() as usize,
src_table_index,
BuiltinFunctionIndex::get_table_copy_imported_defined_index(),
)
}
(true, true) => (
sig, sig,
dst_table_index.as_u32() as usize, dst_table_index.as_u32() as usize,
src_table_index.as_u32() as usize, src_table_index.as_u32() as usize,
BuiltinFunctionIndex::get_table_copy_imported_imported_index(), BuiltinFunctionIndex::get_table_copy_index(),
), )
}
} }
fn get_table_init_sig(&mut self, func: &mut Function) -> ir::SigRef { fn get_table_init_sig(&mut self, func: &mut Function) -> ir::SigRef {
@@ -431,7 +367,7 @@ impl<'module_environment> FuncEnvironment<'module_environment> {
( (
sig, sig,
defined_memory_index.index(), defined_memory_index.index(),
BuiltinFunctionIndex::get_memory_copy_index(), BuiltinFunctionIndex::get_defined_memory_copy_index(),
) )
} else { } else {
( (

View File

@@ -14,8 +14,7 @@ use crate::vmcontext::{
VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition, VMMemoryImport, VMSharedSignatureIndex, VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition, VMMemoryImport, VMSharedSignatureIndex,
VMTableDefinition, VMTableImport, VMTableDefinition, VMTableImport,
}; };
use crate::{TrapDescription, TrapRegistration}; use crate::TrapRegistration;
use backtrace::Backtrace;
use memoffset::offset_of; use memoffset::offset_of;
use more_asserts::assert_lt; use more_asserts::assert_lt;
use std::alloc::{self, Layout}; use std::alloc::{self, Layout};
@@ -90,7 +89,8 @@ pub(crate) struct Instance {
tables: BoxedSlice<DefinedTableIndex, Table>, tables: BoxedSlice<DefinedTableIndex, Table>,
/// Passive elements in this instantiation. As `elem.drop`s happen, these /// Passive elements in this instantiation. As `elem.drop`s happen, these
/// entries get replaced into empty slices. /// entries get removed. A missing entry is considered equivalent to an
/// empty slice.
passive_elements: RefCell<HashMap<PassiveElemIndex, Box<[VMCallerCheckedAnyfunc]>>>, passive_elements: RefCell<HashMap<PassiveElemIndex, Box<[VMCallerCheckedAnyfunc]>>>,
/// Pointers to functions in executable memory. /// Pointers to functions in executable memory.
@@ -598,15 +598,10 @@ impl Instance {
.map_or(true, |n| n as usize > elem.len()) .map_or(true, |n| n as usize > elem.len())
|| dst.checked_add(len).map_or(true, |m| m > table.size()) || dst.checked_add(len).map_or(true, |m| m > table.size())
{ {
return Err(Trap::Wasm { return Err(Trap::wasm(source_loc, ir::TrapCode::TableOutOfBounds));
desc: TrapDescription {
source_loc,
trap_code: ir::TrapCode::TableOutOfBounds,
},
backtrace: Backtrace::new(),
});
} }
// TODO(#983): investigate replacing this get/set loop with a `memcpy`.
for (dst, src) in (dst..dst + len).zip(src..src + len) { for (dst, src) in (dst..dst + len).zip(src..src + len) {
table table
.set(dst, elem[src as usize].clone()) .set(dst, elem[src as usize].clone())
@@ -621,10 +616,9 @@ impl Instance {
// https://webassembly.github.io/reference-types/core/exec/instructions.html#exec-elem-drop // https://webassembly.github.io/reference-types/core/exec/instructions.html#exec-elem-drop
let mut passive_elements = self.passive_elements.borrow_mut(); let mut passive_elements = self.passive_elements.borrow_mut();
// Note that dropping a non-passive element is a no-op (not a trap). passive_elements.remove(&elem_index);
if let Some(elem) = passive_elements.get_mut(&elem_index) { // Note that we don't check that we actually removed an element because
mem::replace(elem, vec![].into_boxed_slice()); // dropping a non-passive element is a no-op (not a trap).
}
} }
/// Do a `memory.copy` for a locally defined memory. /// Do a `memory.copy` for a locally defined memory.
@@ -652,23 +646,17 @@ impl Instance {
.checked_add(len) .checked_add(len)
.map_or(true, |m| m as usize > memory.current_length) .map_or(true, |m| m as usize > memory.current_length)
{ {
return Err(Trap::Wasm { return Err(Trap::wasm(source_loc, ir::TrapCode::HeapOutOfBounds));
desc: TrapDescription {
source_loc,
trap_code: ir::TrapCode::HeapOutOfBounds,
},
backtrace: Backtrace::new(),
});
} }
let dst = isize::try_from(dst).unwrap(); let dst = usize::try_from(dst).unwrap();
let src = isize::try_from(src).unwrap(); let src = usize::try_from(src).unwrap();
// Bounds and casts are checked above, by this point we know that // Bounds and casts are checked above, by this point we know that
// everything is safe. // everything is safe.
unsafe { unsafe {
let dst = memory.base.offset(dst); let dst = memory.base.add(dst);
let src = memory.base.offset(src); let src = memory.base.add(src);
ptr::copy(src, dst, len as usize); ptr::copy(src, dst, len as usize);
} }
@@ -712,13 +700,7 @@ impl Instance {
.checked_add(len) .checked_add(len)
.map_or(true, |m| m as usize > memory.current_length) .map_or(true, |m| m as usize > memory.current_length)
{ {
return Err(Trap::Wasm { return Err(Trap::wasm(source_loc, ir::TrapCode::HeapOutOfBounds));
desc: TrapDescription {
source_loc,
trap_code: ir::TrapCode::HeapOutOfBounds,
},
backtrace: Backtrace::new(),
});
} }
let dst = isize::try_from(dst).unwrap(); let dst = isize::try_from(dst).unwrap();
@@ -760,7 +742,7 @@ impl Instance {
/// imported, foreign table. /// imported, foreign table.
pub(crate) fn get_table(&self, table_index: TableIndex) -> &Table { pub(crate) fn get_table(&self, table_index: TableIndex) -> &Table {
if let Some(defined_table_index) = self.module.local.defined_table_index(table_index) { if let Some(defined_table_index) = self.module.local.defined_table_index(table_index) {
&self.tables[defined_table_index] self.get_defined_table(defined_table_index)
} else { } else {
self.get_foreign_table(table_index) self.get_foreign_table(table_index)
} }
@@ -1083,9 +1065,9 @@ fn check_table_init_bounds(instance: &Instance) -> Result<(), InstantiationError
let size = usize::try_from(table.size()).unwrap(); let size = usize::try_from(table.size()).unwrap();
if size < start + init.elements.len() { if size < start + init.elements.len() {
return Err(InstantiationError::TableOutOfBounds( return Err(InstantiationError::Link(LinkError(
"elements segment does not fit".to_owned(), "table out of bounds: elements segment does not fit".to_owned(),
)); )));
} }
} }
@@ -1140,9 +1122,9 @@ fn check_memory_init_bounds(
unsafe { unsafe {
let mem_slice = get_memory_slice(init, instance); let mem_slice = get_memory_slice(init, instance);
if mem_slice.get_mut(start..start + init.data.len()).is_none() { if mem_slice.get_mut(start..start + init.data.len()).is_none() {
return Err(InstantiationError::MemoryOutOfBounds( return Err(InstantiationError::Link(LinkError(
"data segment does not fit".into(), "memory out of bounds: data segment does not fit".into(),
)); )));
} }
} }
} }
@@ -1190,9 +1172,10 @@ fn initialize_tables(instance: &Instance) -> Result<(), InstantiationError> {
// bounds, then we report an error. This is required by the bulk memory // bounds, then we report an error. This is required by the bulk memory
// proposal and the way that it uses `table.init` during instantiation. // proposal and the way that it uses `table.init` during instantiation.
if start > table.size() as usize { if start > table.size() as usize {
return Err(InstantiationError::TableOutOfBounds( return Err(InstantiationError::Trap(Trap::wasm(
"active element segment does not fit in table".into(), ir::SourceLoc::default(),
)); ir::TrapCode::HeapOutOfBounds,
)));
} }
for (i, func_idx) in init.elements.iter().enumerate() { for (i, func_idx) in init.elements.iter().enumerate() {
@@ -1205,9 +1188,10 @@ fn initialize_tables(instance: &Instance) -> Result<(), InstantiationError> {
// enabled, these become runtime traps and the intermediate // enabled, these become runtime traps and the intermediate
// table slot writes are visible. // table slot writes are visible.
.map_err(|()| { .map_err(|()| {
InstantiationError::TableOutOfBounds( InstantiationError::Trap(Trap::wasm(
"active element segment does not fit in table".into(), ir::SourceLoc::default(),
) ir::TrapCode::HeapOutOfBounds,
))
})?; })?;
} }
} }
@@ -1230,6 +1214,7 @@ fn initialize_passive_elements(instance: &Instance) {
.module .module
.passive_elements .passive_elements
.iter() .iter()
.filter(|(_, segments)| !segments.is_empty())
.map(|(idx, segments)| { .map(|(idx, segments)| {
( (
*idx, *idx,
@@ -1279,9 +1264,10 @@ fn initialize_memories(
// since this is dictated by the updated spec for the bulk memory // since this is dictated by the updated spec for the bulk memory
// proposal. // proposal.
if num_uncopied > 0 { if num_uncopied > 0 {
return Err(InstantiationError::MemoryOutOfBounds( return Err(InstantiationError::Trap(Trap::wasm(
"active data segment does not fit in memory".into(), ir::SourceLoc::default(),
)); ir::TrapCode::HeapOutOfBounds,
)));
} }
} }
} }
@@ -1342,24 +1328,14 @@ pub enum InstantiationError {
#[error("Insufficient resources: {0}")] #[error("Insufficient resources: {0}")]
Resource(String), Resource(String),
/// A table out of bounds error.
///
/// Depending on whether bulk memory is enabled or not, this is either a
/// link error or a trap raised during instantiation.
#[error("Table out of bounds error: {0}")]
TableOutOfBounds(String),
/// A memory out of bounds error.
///
/// Depending on whether bulk memory is enabled or not, this is either a
/// link error or a trap raised during instantiation.
#[error("Memory out of bounds error: {0}")]
MemoryOutOfBounds(String),
/// A wasm link error occured. /// A wasm link error occured.
#[error("Failed to link module")] #[error("Failed to link module")]
Link(#[from] LinkError), Link(#[from] LinkError),
/// A trap ocurred during instantiation, after linking.
#[error("Trap occurred during instantiation")]
Trap(#[source] Trap),
/// A compilation error occured. /// A compilation error occured.
#[error("Trap occurred while invoking start function")] #[error("Trap occurred while invoking start function")]
StartTrap(#[source] Trap), StartTrap(#[source] Trap),

View File

@@ -1,14 +1,42 @@
//! Runtime library calls. Note that wasm compilers may sometimes perform these //! Runtime library calls.
//! inline rather than calling them, particularly when CPUs have special //!
//! instructions which compute them directly. //! Note that Wasm compilers may sometimes perform these inline rather than
//! calling them, particularly when CPUs have special instructions which compute
//! them directly.
//!
//! These functions are called by compiled Wasm code, and therefore must take
//! certain care about some things:
//!
//! * They must always be `pub extern "C"` and should only contain basic, raw
//! i32/i64/f32/f64/pointer parameters that are safe to pass across the system
//! ABI!
//!
//! * If any nested function propagates an `Err(trap)` out to the library
//! function frame, we need to raise it. This involves some nasty and quite
//! unsafe code under the covers! Notable, after raising the trap, drops
//! **will not** be run for local variables! This can lead to things like
//! leaking `InstanceHandle`s which leads to never deallocating JIT code,
//! instances, and modules! Therefore, always use nested blocks to ensure
//! drops run before raising a trap:
//!
//! ```
//! pub extern "C" fn my_lib_function(...) {
//! let result = {
//! // Do everything in here so drops run at the end of the block.
//! ...
//! };
//! if let Err(trap) = result {
//! // Now we can safely raise the trap without leaking!
//! raise_lib_trap(trap);
//! }
//! }
//! ```
use crate::table::Table; use crate::table::Table;
use crate::traphandlers::raise_lib_trap; use crate::traphandlers::raise_lib_trap;
use crate::vmcontext::VMContext; use crate::vmcontext::VMContext;
use wasmtime_environ::ir; use wasmtime_environ::ir;
use wasmtime_environ::wasm::{ use wasmtime_environ::wasm::{DefinedMemoryIndex, MemoryIndex, PassiveElemIndex, TableIndex};
DefinedMemoryIndex, DefinedTableIndex, MemoryIndex, PassiveElemIndex, TableIndex,
};
/// Implementation of f32.ceil /// Implementation of f32.ceil
pub extern "C" fn wasmtime_f32_ceil(x: f32) -> f32 { pub extern "C" fn wasmtime_f32_ceil(x: f32) -> f32 {
@@ -93,7 +121,6 @@ pub extern "C" fn wasmtime_f64_nearest(x: f64) -> f64 {
} }
/// Implementation of memory.grow for locally-defined 32-bit memories. /// Implementation of memory.grow for locally-defined 32-bit memories.
#[no_mangle]
pub unsafe extern "C" fn wasmtime_memory32_grow( pub unsafe extern "C" fn wasmtime_memory32_grow(
vmctx: *mut VMContext, vmctx: *mut VMContext,
delta: u32, delta: u32,
@@ -108,7 +135,6 @@ pub unsafe extern "C" fn wasmtime_memory32_grow(
} }
/// Implementation of memory.grow for imported 32-bit memories. /// Implementation of memory.grow for imported 32-bit memories.
#[no_mangle]
pub unsafe extern "C" fn wasmtime_imported_memory32_grow( pub unsafe extern "C" fn wasmtime_imported_memory32_grow(
vmctx: *mut VMContext, vmctx: *mut VMContext,
delta: u32, delta: u32,
@@ -123,7 +149,6 @@ pub unsafe extern "C" fn wasmtime_imported_memory32_grow(
} }
/// Implementation of memory.size for locally-defined 32-bit memories. /// Implementation of memory.size for locally-defined 32-bit memories.
#[no_mangle]
pub unsafe extern "C" fn wasmtime_memory32_size(vmctx: *mut VMContext, memory_index: u32) -> u32 { pub unsafe extern "C" fn wasmtime_memory32_size(vmctx: *mut VMContext, memory_index: u32) -> u32 {
let instance = (&mut *vmctx).instance(); let instance = (&mut *vmctx).instance();
let memory_index = DefinedMemoryIndex::from_u32(memory_index); let memory_index = DefinedMemoryIndex::from_u32(memory_index);
@@ -132,7 +157,6 @@ pub unsafe extern "C" fn wasmtime_memory32_size(vmctx: *mut VMContext, memory_in
} }
/// Implementation of memory.size for imported 32-bit memories. /// Implementation of memory.size for imported 32-bit memories.
#[no_mangle]
pub unsafe extern "C" fn wasmtime_imported_memory32_size( pub unsafe extern "C" fn wasmtime_imported_memory32_size(
vmctx: *mut VMContext, vmctx: *mut VMContext,
memory_index: u32, memory_index: u32,
@@ -143,77 +167,8 @@ pub unsafe extern "C" fn wasmtime_imported_memory32_size(
instance.imported_memory_size(memory_index) instance.imported_memory_size(memory_index)
} }
/// Implementation of `table.copy` when both tables are locally defined. /// Implementation of `table.copy`.
#[no_mangle] pub unsafe extern "C" fn wasmtime_table_copy(
pub unsafe extern "C" fn wasmtime_table_copy_defined_defined(
vmctx: *mut VMContext,
dst_table_index: u32,
src_table_index: u32,
dst: u32,
src: u32,
len: u32,
source_loc: u32,
) {
let dst_table_index = DefinedTableIndex::from_u32(dst_table_index);
let src_table_index = DefinedTableIndex::from_u32(src_table_index);
let source_loc = ir::SourceLoc::new(source_loc);
let instance = (&mut *vmctx).instance();
let dst_table = instance.get_defined_table(dst_table_index);
let src_table = instance.get_defined_table(src_table_index);
if let Err(trap) = Table::copy(dst_table, src_table, dst, src, len, source_loc) {
raise_lib_trap(trap);
}
}
/// Implementation of `table.copy` when the destination table is locally defined
/// and the source table is imported.
#[no_mangle]
pub unsafe extern "C" fn wasmtime_table_copy_defined_imported(
vmctx: *mut VMContext,
dst_table_index: u32,
src_table_index: u32,
dst: u32,
src: u32,
len: u32,
source_loc: u32,
) {
let dst_table_index = DefinedTableIndex::from_u32(dst_table_index);
let src_table_index = TableIndex::from_u32(src_table_index);
let source_loc = ir::SourceLoc::new(source_loc);
let instance = (&mut *vmctx).instance();
let dst_table = instance.get_defined_table(dst_table_index);
let src_table = instance.get_foreign_table(src_table_index);
if let Err(trap) = Table::copy(dst_table, src_table, dst, src, len, source_loc) {
raise_lib_trap(trap);
}
}
/// Implementation of `table.copy` when the destination table is imported
/// and the source table is locally defined.
#[no_mangle]
pub unsafe extern "C" fn wasmtime_table_copy_imported_defined(
vmctx: *mut VMContext,
dst_table_index: u32,
src_table_index: u32,
dst: u32,
src: u32,
len: u32,
source_loc: u32,
) {
let dst_table_index = TableIndex::from_u32(dst_table_index);
let src_table_index = DefinedTableIndex::from_u32(src_table_index);
let source_loc = ir::SourceLoc::new(source_loc);
let instance = (&mut *vmctx).instance();
let dst_table = instance.get_foreign_table(dst_table_index);
let src_table = instance.get_defined_table(src_table_index);
if let Err(trap) = Table::copy(dst_table, src_table, dst, src, len, source_loc) {
raise_lib_trap(trap);
}
}
/// Implementation of `table.copy` when both tables are imported.
#[no_mangle]
pub unsafe extern "C" fn wasmtime_table_copy_imported_imported(
vmctx: *mut VMContext, vmctx: *mut VMContext,
dst_table_index: u32, dst_table_index: u32,
src_table_index: u32, src_table_index: u32,
@@ -222,19 +177,21 @@ pub unsafe extern "C" fn wasmtime_table_copy_imported_imported(
len: u32, len: u32,
source_loc: u32, source_loc: u32,
) { ) {
let result = {
let dst_table_index = TableIndex::from_u32(dst_table_index); let dst_table_index = TableIndex::from_u32(dst_table_index);
let src_table_index = TableIndex::from_u32(src_table_index); let src_table_index = TableIndex::from_u32(src_table_index);
let source_loc = ir::SourceLoc::new(source_loc); let source_loc = ir::SourceLoc::new(source_loc);
let instance = (&mut *vmctx).instance(); let instance = (&mut *vmctx).instance();
let dst_table = instance.get_foreign_table(dst_table_index); let dst_table = instance.get_table(dst_table_index);
let src_table = instance.get_foreign_table(src_table_index); let src_table = instance.get_table(src_table_index);
if let Err(trap) = Table::copy(dst_table, src_table, dst, src, len, source_loc) { Table::copy(dst_table, src_table, dst, src, len, source_loc)
};
if let Err(trap) = result {
raise_lib_trap(trap); raise_lib_trap(trap);
} }
} }
/// Implementation of `table.init`. /// Implementation of `table.init`.
#[no_mangle]
pub unsafe extern "C" fn wasmtime_table_init( pub unsafe extern "C" fn wasmtime_table_init(
vmctx: *mut VMContext, vmctx: *mut VMContext,
table_index: u32, table_index: u32,
@@ -244,19 +201,19 @@ pub unsafe extern "C" fn wasmtime_table_init(
len: u32, len: u32,
source_loc: u32, source_loc: u32,
) { ) {
let result = {
let table_index = TableIndex::from_u32(table_index); let table_index = TableIndex::from_u32(table_index);
let source_loc = ir::SourceLoc::new(source_loc); let source_loc = ir::SourceLoc::new(source_loc);
let elem_index = PassiveElemIndex::from_u32(elem_index); let elem_index = PassiveElemIndex::from_u32(elem_index);
let instance = (&mut *vmctx).instance(); let instance = (&mut *vmctx).instance();
instance.table_init(table_index, elem_index, dst, src, len, source_loc)
if let Err(trap) = instance.table_init(table_index, elem_index, dst, src, len, source_loc) { };
if let Err(trap) = result {
raise_lib_trap(trap); raise_lib_trap(trap);
} }
} }
/// Implementation of `elem.drop`. /// Implementation of `elem.drop`.
#[no_mangle]
pub unsafe extern "C" fn wasmtime_elem_drop(vmctx: *mut VMContext, elem_index: u32) { pub unsafe extern "C" fn wasmtime_elem_drop(vmctx: *mut VMContext, elem_index: u32) {
let elem_index = PassiveElemIndex::from_u32(elem_index); let elem_index = PassiveElemIndex::from_u32(elem_index);
let instance = (&mut *vmctx).instance(); let instance = (&mut *vmctx).instance();
@@ -264,8 +221,7 @@ pub unsafe extern "C" fn wasmtime_elem_drop(vmctx: *mut VMContext, elem_index: u
} }
/// Implementation of `memory.copy` for locally defined memories. /// Implementation of `memory.copy` for locally defined memories.
#[no_mangle] pub unsafe extern "C" fn wasmtime_defined_memory_copy(
pub unsafe extern "C" fn wasmtime_memory_copy(
vmctx: *mut VMContext, vmctx: *mut VMContext,
memory_index: u32, memory_index: u32,
dst: u32, dst: u32,
@@ -273,16 +229,18 @@ pub unsafe extern "C" fn wasmtime_memory_copy(
len: u32, len: u32,
source_loc: u32, source_loc: u32,
) { ) {
let result = {
let memory_index = DefinedMemoryIndex::from_u32(memory_index); let memory_index = DefinedMemoryIndex::from_u32(memory_index);
let source_loc = ir::SourceLoc::new(source_loc); let source_loc = ir::SourceLoc::new(source_loc);
let instance = (&mut *vmctx).instance(); let instance = (&mut *vmctx).instance();
if let Err(trap) = instance.defined_memory_copy(memory_index, dst, src, len, source_loc) { instance.defined_memory_copy(memory_index, dst, src, len, source_loc)
};
if let Err(trap) = result {
raise_lib_trap(trap); raise_lib_trap(trap);
} }
} }
/// Implementation of `memory.copy` for imported memories. /// Implementation of `memory.copy` for imported memories.
#[no_mangle]
pub unsafe extern "C" fn wasmtime_imported_memory_copy( pub unsafe extern "C" fn wasmtime_imported_memory_copy(
vmctx: *mut VMContext, vmctx: *mut VMContext,
memory_index: u32, memory_index: u32,
@@ -291,16 +249,18 @@ pub unsafe extern "C" fn wasmtime_imported_memory_copy(
len: u32, len: u32,
source_loc: u32, source_loc: u32,
) { ) {
let result = {
let memory_index = MemoryIndex::from_u32(memory_index); let memory_index = MemoryIndex::from_u32(memory_index);
let source_loc = ir::SourceLoc::new(source_loc); let source_loc = ir::SourceLoc::new(source_loc);
let instance = (&mut *vmctx).instance(); let instance = (&mut *vmctx).instance();
if let Err(trap) = instance.imported_memory_copy(memory_index, dst, src, len, source_loc) { instance.imported_memory_copy(memory_index, dst, src, len, source_loc)
};
if let Err(trap) = result {
raise_lib_trap(trap); raise_lib_trap(trap);
} }
} }
/// Implementation of `memory.fill` for locally defined memories. /// Implementation of `memory.fill` for locally defined memories.
#[no_mangle]
pub unsafe extern "C" fn wasmtime_memory_fill( pub unsafe extern "C" fn wasmtime_memory_fill(
vmctx: *mut VMContext, vmctx: *mut VMContext,
memory_index: u32, memory_index: u32,
@@ -309,16 +269,18 @@ pub unsafe extern "C" fn wasmtime_memory_fill(
len: u32, len: u32,
source_loc: u32, source_loc: u32,
) { ) {
let result = {
let memory_index = DefinedMemoryIndex::from_u32(memory_index); let memory_index = DefinedMemoryIndex::from_u32(memory_index);
let source_loc = ir::SourceLoc::new(source_loc); let source_loc = ir::SourceLoc::new(source_loc);
let instance = (&mut *vmctx).instance(); let instance = (&mut *vmctx).instance();
if let Err(trap) = instance.defined_memory_fill(memory_index, dst, val, len, source_loc) { instance.defined_memory_fill(memory_index, dst, val, len, source_loc)
};
if let Err(trap) = result {
raise_lib_trap(trap); raise_lib_trap(trap);
} }
} }
/// Implementation of `memory.fill` for imported memories. /// Implementation of `memory.fill` for imported memories.
#[no_mangle]
pub unsafe extern "C" fn wasmtime_imported_memory_fill( pub unsafe extern "C" fn wasmtime_imported_memory_fill(
vmctx: *mut VMContext, vmctx: *mut VMContext,
memory_index: u32, memory_index: u32,
@@ -327,10 +289,13 @@ pub unsafe extern "C" fn wasmtime_imported_memory_fill(
len: u32, len: u32,
source_loc: u32, source_loc: u32,
) { ) {
let result = {
let memory_index = MemoryIndex::from_u32(memory_index); let memory_index = MemoryIndex::from_u32(memory_index);
let source_loc = ir::SourceLoc::new(source_loc); let source_loc = ir::SourceLoc::new(source_loc);
let instance = (&mut *vmctx).instance(); let instance = (&mut *vmctx).instance();
if let Err(trap) = instance.imported_memory_fill(memory_index, dst, val, len, source_loc) { instance.imported_memory_fill(memory_index, dst, val, len, source_loc)
};
if let Err(trap) = result {
raise_lib_trap(trap); raise_lib_trap(trap);
} }
} }

View File

@@ -3,8 +3,7 @@
//! `Table` is to WebAssembly tables what `LinearMemory` is to WebAssembly linear memories. //! `Table` is to WebAssembly tables what `LinearMemory` is to WebAssembly linear memories.
use crate::vmcontext::{VMCallerCheckedAnyfunc, VMTableDefinition}; use crate::vmcontext::{VMCallerCheckedAnyfunc, VMTableDefinition};
use crate::{Trap, TrapDescription}; use crate::Trap;
use backtrace::Backtrace;
use std::cell::RefCell; use std::cell::RefCell;
use std::convert::{TryFrom, TryInto}; use std::convert::{TryFrom, TryInto};
use wasmtime_environ::wasm::TableElementType; use wasmtime_environ::wasm::TableElementType;
@@ -112,13 +111,7 @@ impl Table {
.checked_add(len) .checked_add(len)
.map_or(true, |m| m > dst_table.size()) .map_or(true, |m| m > dst_table.size())
{ {
return Err(Trap::Wasm { return Err(Trap::wasm(source_loc, ir::TrapCode::TableOutOfBounds));
desc: TrapDescription {
source_loc,
trap_code: ir::TrapCode::TableOutOfBounds,
},
backtrace: Backtrace::new(),
});
} }
let srcs = src_index..src_index + len; let srcs = src_index..src_index + len;
@@ -126,6 +119,8 @@ impl Table {
// Note on the unwraps: the bounds check above means that these will // Note on the unwraps: the bounds check above means that these will
// never panic. // never panic.
//
// TODO(#983): investigate replacing this get/set loop with a `memcpy`.
if dst_index <= src_index { if dst_index <= src_index {
for (s, d) in (srcs).zip(dsts) { for (s, d) in (srcs).zip(dsts) {
dst_table.set(d, src_table.get(s).unwrap()).unwrap(); dst_table.set(d, src_table.get(s).unwrap()).unwrap();

View File

@@ -140,6 +140,20 @@ impl fmt::Display for Trap {
impl std::error::Error for Trap {} impl std::error::Error for Trap {}
impl Trap {
/// Construct a new Wasm trap with the given source location and trap code.
///
/// Internally saves a backtrace when constructed.
pub fn wasm(source_loc: ir::SourceLoc, trap_code: ir::TrapCode) -> Self {
let desc = TrapDescription {
source_loc,
trap_code,
};
let backtrace = Backtrace::new();
Trap::Wasm { desc, backtrace }
}
}
/// Call the wasm function pointed to by `callee`. /// Call the wasm function pointed to by `callee`.
/// ///
/// * `vmctx` - the callee vmctx argument /// * `vmctx` - the callee vmctx argument

View File

@@ -549,22 +549,16 @@ impl VMBuiltinFunctionsArray {
ptrs[BuiltinFunctionIndex::get_imported_memory32_size_index().index() as usize] = ptrs[BuiltinFunctionIndex::get_imported_memory32_size_index().index() as usize] =
wasmtime_imported_memory32_size as usize; wasmtime_imported_memory32_size as usize;
ptrs[BuiltinFunctionIndex::get_table_copy_defined_defined_index().index() as usize] = ptrs[BuiltinFunctionIndex::get_table_copy_index().index() as usize] =
wasmtime_table_copy_defined_defined as usize; wasmtime_table_copy as usize;
ptrs[BuiltinFunctionIndex::get_table_copy_defined_imported_index().index() as usize] =
wasmtime_table_copy_defined_imported as usize;
ptrs[BuiltinFunctionIndex::get_table_copy_imported_defined_index().index() as usize] =
wasmtime_table_copy_imported_defined as usize;
ptrs[BuiltinFunctionIndex::get_table_copy_imported_imported_index().index() as usize] =
wasmtime_table_copy_imported_imported as usize;
ptrs[BuiltinFunctionIndex::get_table_init_index().index() as usize] = ptrs[BuiltinFunctionIndex::get_table_init_index().index() as usize] =
wasmtime_table_init as usize; wasmtime_table_init as usize;
ptrs[BuiltinFunctionIndex::get_elem_drop_index().index() as usize] = ptrs[BuiltinFunctionIndex::get_elem_drop_index().index() as usize] =
wasmtime_elem_drop as usize; wasmtime_elem_drop as usize;
ptrs[BuiltinFunctionIndex::get_memory_copy_index().index() as usize] = ptrs[BuiltinFunctionIndex::get_defined_memory_copy_index().index() as usize] =
wasmtime_memory_copy as usize; wasmtime_defined_memory_copy as usize;
ptrs[BuiltinFunctionIndex::get_imported_memory_copy_index().index() as usize] = ptrs[BuiltinFunctionIndex::get_imported_memory_copy_index().index() as usize] =
wasmtime_imported_memory_copy as usize; wasmtime_imported_memory_copy as usize;
ptrs[BuiltinFunctionIndex::get_memory_fill_index().index() as usize] = ptrs[BuiltinFunctionIndex::get_memory_fill_index().index() as usize] =

View File

@@ -12,33 +12,19 @@ fn run_wast(wast: &str, strategy: Strategy) -> anyhow::Result<()> {
let simd = wast.iter().any(|s| s == "simd"); let simd = wast.iter().any(|s| s == "simd");
let bulk_mem = wast.iter().any(|s| s == "bulk-memory-operations");
// Some simd tests assume support for multiple tables, which are introduced // Some simd tests assume support for multiple tables, which are introduced
// by reference types. // by reference types.
let reftypes = simd || wast.iter().any(|s| s == "reference-types"); let reftypes = simd || wast.iter().any(|s| s == "reference-types");
// Reference types assumes support for bulk memory.
let bulk_mem = reftypes
|| wast.iter().any(|s| s == "bulk-memory-operations")
|| wast.iter().any(|s| s == "table_copy.wast")
|| wast.iter().any(|s| s == "elem_drop.wast")
|| wast.iter().any(|s| s == "elem-ref-null.wast")
|| wast.iter().any(|s| s == "memory-copy.wast")
|| wast.iter().any(|s| s == "imported-memory-copy.wast")
|| wast
.iter()
.any(|s| s == "table_copy_on_imported_tables.wast");
// And bulk memory also assumes support for reference types (e.g. multiple
// tables).
let reftypes = reftypes || bulk_mem;
let multi_val = wast.iter().any(|s| s == "multi-value"); let multi_val = wast.iter().any(|s| s == "multi-value");
let mut cfg = Config::new(); let mut cfg = Config::new();
cfg.wasm_simd(simd) cfg.wasm_simd(simd)
.wasm_bulk_memory(bulk_mem)
.wasm_reference_types(reftypes) .wasm_reference_types(reftypes)
.wasm_multi_value(multi_val) .wasm_multi_value(multi_val)
.wasm_bulk_memory(bulk_mem)
.strategy(strategy)? .strategy(strategy)?
.cranelift_debug_verifier(true); .cranelift_debug_verifier(true);