Files
wasmtime/crates/runtime/src/libcalls.rs
Nick Fitzgerald 674a6208d8 Implement data.drop and memory.init and get the rest of the bulk memory spec tests passing (#1264)
* Enable the already-passing `bulk-memoryoperations/imports.wast` test

* Implement support for the `memory.init` instruction and passive data

This adds support for passive data segments and the `memory.init` instruction
from the bulk memory operations proposal. Passive data segments are stored on
the Wasm module and then `memory.init` instructions copy their contents into
memory.

* Implement the `data.drop` instruction

This allows wasm modules to deallocate passive data segments that it doesn't
need anymore. We keep track of which segments have not been dropped on an
`Instance` and when dropping them, remove the entry from the instance's hash
map. The module always needs all of the segments for new instantiations.

* Enable final bulk memory operations spec test

This requires special casing an expected error message for an `assert_trap`,
since the expected error message contains the index of an uninitialized table
element, but our trap implementation doesn't save that diagnostic information
and shepherd it out.
2020-03-10 09:30:11 -05:00

331 lines
9.7 KiB
Rust

//! Runtime library calls.
//!
//! Note that Wasm compilers may sometimes perform these inline rather than
//! calling them, particularly when CPUs have special instructions which compute
//! them directly.
//!
//! These functions are called by compiled Wasm code, and therefore must take
//! certain care about some things:
//!
//! * They must always be `pub extern "C"` and should only contain basic, raw
//! i32/i64/f32/f64/pointer parameters that are safe to pass across the system
//! ABI!
//!
//! * If any nested function propagates an `Err(trap)` out to the library
//! function frame, we need to raise it. This involves some nasty and quite
//! unsafe code under the covers! Notable, after raising the trap, drops
//! **will not** be run for local variables! This can lead to things like
//! leaking `InstanceHandle`s which leads to never deallocating JIT code,
//! instances, and modules! Therefore, always use nested blocks to ensure
//! drops run before raising a trap:
//!
//! ```ignore
//! pub extern "C" fn my_lib_function(...) {
//! let result = {
//! // Do everything in here so drops run at the end of the block.
//! ...
//! };
//! if let Err(trap) = result {
//! // Now we can safely raise the trap without leaking!
//! raise_lib_trap(trap);
//! }
//! }
//! ```
use crate::table::Table;
use crate::traphandlers::raise_lib_trap;
use crate::vmcontext::VMContext;
use wasmtime_environ::ir;
use wasmtime_environ::wasm::{DataIndex, DefinedMemoryIndex, ElemIndex, MemoryIndex, TableIndex};
/// Implementation of f32.ceil
pub extern "C" fn wasmtime_f32_ceil(x: f32) -> f32 {
x.ceil()
}
/// Implementation of f32.floor
pub extern "C" fn wasmtime_f32_floor(x: f32) -> f32 {
x.floor()
}
/// Implementation of f32.trunc
pub extern "C" fn wasmtime_f32_trunc(x: f32) -> f32 {
x.trunc()
}
/// Implementation of f32.nearest
#[allow(clippy::float_arithmetic, clippy::float_cmp)]
pub extern "C" fn wasmtime_f32_nearest(x: f32) -> f32 {
// Rust doesn't have a nearest function, so do it manually.
if x == 0.0 {
// Preserve the sign of zero.
x
} else {
// Nearest is either ceil or floor depending on which is nearest or even.
let u = x.ceil();
let d = x.floor();
let um = (x - u).abs();
let dm = (x - d).abs();
if um < dm
|| (um == dm && {
let h = u / 2.;
h.floor() == h
})
{
u
} else {
d
}
}
}
/// Implementation of f64.ceil
pub extern "C" fn wasmtime_f64_ceil(x: f64) -> f64 {
x.ceil()
}
/// Implementation of f64.floor
pub extern "C" fn wasmtime_f64_floor(x: f64) -> f64 {
x.floor()
}
/// Implementation of f64.trunc
pub extern "C" fn wasmtime_f64_trunc(x: f64) -> f64 {
x.trunc()
}
/// Implementation of f64.nearest
#[allow(clippy::float_arithmetic, clippy::float_cmp)]
pub extern "C" fn wasmtime_f64_nearest(x: f64) -> f64 {
// Rust doesn't have a nearest function, so do it manually.
if x == 0.0 {
// Preserve the sign of zero.
x
} else {
// Nearest is either ceil or floor depending on which is nearest or even.
let u = x.ceil();
let d = x.floor();
let um = (x - u).abs();
let dm = (x - d).abs();
if um < dm
|| (um == dm && {
let h = u / 2.;
h.floor() == h
})
{
u
} else {
d
}
}
}
/// Implementation of memory.grow for locally-defined 32-bit memories.
pub unsafe extern "C" fn wasmtime_memory32_grow(
vmctx: *mut VMContext,
delta: u32,
memory_index: u32,
) -> u32 {
let instance = (&mut *vmctx).instance();
let memory_index = DefinedMemoryIndex::from_u32(memory_index);
instance
.memory_grow(memory_index, delta)
.unwrap_or(u32::max_value())
}
/// Implementation of memory.grow for imported 32-bit memories.
pub unsafe extern "C" fn wasmtime_imported_memory32_grow(
vmctx: *mut VMContext,
delta: u32,
memory_index: u32,
) -> u32 {
let instance = (&mut *vmctx).instance();
let memory_index = MemoryIndex::from_u32(memory_index);
instance
.imported_memory_grow(memory_index, delta)
.unwrap_or(u32::max_value())
}
/// Implementation of memory.size for locally-defined 32-bit memories.
pub unsafe extern "C" fn wasmtime_memory32_size(vmctx: *mut VMContext, memory_index: u32) -> u32 {
let instance = (&mut *vmctx).instance();
let memory_index = DefinedMemoryIndex::from_u32(memory_index);
instance.memory_size(memory_index)
}
/// Implementation of memory.size for imported 32-bit memories.
pub unsafe extern "C" fn wasmtime_imported_memory32_size(
vmctx: *mut VMContext,
memory_index: u32,
) -> u32 {
let instance = (&mut *vmctx).instance();
let memory_index = MemoryIndex::from_u32(memory_index);
instance.imported_memory_size(memory_index)
}
/// Implementation of `table.copy`.
pub unsafe extern "C" fn wasmtime_table_copy(
vmctx: *mut VMContext,
dst_table_index: u32,
src_table_index: u32,
dst: u32,
src: u32,
len: u32,
source_loc: u32,
) {
let result = {
let dst_table_index = TableIndex::from_u32(dst_table_index);
let src_table_index = TableIndex::from_u32(src_table_index);
let source_loc = ir::SourceLoc::new(source_loc);
let instance = (&mut *vmctx).instance();
let dst_table = instance.get_table(dst_table_index);
let src_table = instance.get_table(src_table_index);
Table::copy(dst_table, src_table, dst, src, len, source_loc)
};
if let Err(trap) = result {
raise_lib_trap(trap);
}
}
/// Implementation of `table.init`.
pub unsafe extern "C" fn wasmtime_table_init(
vmctx: *mut VMContext,
table_index: u32,
elem_index: u32,
dst: u32,
src: u32,
len: u32,
source_loc: u32,
) {
let result = {
let table_index = TableIndex::from_u32(table_index);
let source_loc = ir::SourceLoc::new(source_loc);
let elem_index = ElemIndex::from_u32(elem_index);
let instance = (&mut *vmctx).instance();
instance.table_init(table_index, elem_index, dst, src, len, source_loc)
};
if let Err(trap) = result {
raise_lib_trap(trap);
}
}
/// Implementation of `elem.drop`.
pub unsafe extern "C" fn wasmtime_elem_drop(vmctx: *mut VMContext, elem_index: u32) {
let elem_index = ElemIndex::from_u32(elem_index);
let instance = (&mut *vmctx).instance();
instance.elem_drop(elem_index);
}
/// Implementation of `memory.copy` for locally defined memories.
pub unsafe extern "C" fn wasmtime_defined_memory_copy(
vmctx: *mut VMContext,
memory_index: u32,
dst: u32,
src: u32,
len: u32,
source_loc: u32,
) {
let result = {
let memory_index = DefinedMemoryIndex::from_u32(memory_index);
let source_loc = ir::SourceLoc::new(source_loc);
let instance = (&mut *vmctx).instance();
instance.defined_memory_copy(memory_index, dst, src, len, source_loc)
};
if let Err(trap) = result {
raise_lib_trap(trap);
}
}
/// Implementation of `memory.copy` for imported memories.
pub unsafe extern "C" fn wasmtime_imported_memory_copy(
vmctx: *mut VMContext,
memory_index: u32,
dst: u32,
src: u32,
len: u32,
source_loc: u32,
) {
let result = {
let memory_index = MemoryIndex::from_u32(memory_index);
let source_loc = ir::SourceLoc::new(source_loc);
let instance = (&mut *vmctx).instance();
instance.imported_memory_copy(memory_index, dst, src, len, source_loc)
};
if let Err(trap) = result {
raise_lib_trap(trap);
}
}
/// Implementation of `memory.fill` for locally defined memories.
pub unsafe extern "C" fn wasmtime_memory_fill(
vmctx: *mut VMContext,
memory_index: u32,
dst: u32,
val: u32,
len: u32,
source_loc: u32,
) {
let result = {
let memory_index = DefinedMemoryIndex::from_u32(memory_index);
let source_loc = ir::SourceLoc::new(source_loc);
let instance = (&mut *vmctx).instance();
instance.defined_memory_fill(memory_index, dst, val, len, source_loc)
};
if let Err(trap) = result {
raise_lib_trap(trap);
}
}
/// Implementation of `memory.fill` for imported memories.
pub unsafe extern "C" fn wasmtime_imported_memory_fill(
vmctx: *mut VMContext,
memory_index: u32,
dst: u32,
val: u32,
len: u32,
source_loc: u32,
) {
let result = {
let memory_index = MemoryIndex::from_u32(memory_index);
let source_loc = ir::SourceLoc::new(source_loc);
let instance = (&mut *vmctx).instance();
instance.imported_memory_fill(memory_index, dst, val, len, source_loc)
};
if let Err(trap) = result {
raise_lib_trap(trap);
}
}
/// Implementation of `memory.init`.
pub unsafe extern "C" fn wasmtime_memory_init(
vmctx: *mut VMContext,
memory_index: u32,
data_index: u32,
dst: u32,
src: u32,
len: u32,
source_loc: u32,
) {
let result = {
let memory_index = MemoryIndex::from_u32(memory_index);
let data_index = DataIndex::from_u32(data_index);
let source_loc = ir::SourceLoc::new(source_loc);
let instance = (&mut *vmctx).instance();
instance.memory_init(memory_index, data_index, dst, src, len, source_loc)
};
if let Err(trap) = result {
raise_lib_trap(trap);
}
}
/// Implementation of `data.drop`.
pub unsafe extern "C" fn wasmtime_data_drop(vmctx: *mut VMContext, data_index: u32) {
let data_index = DataIndex::from_u32(data_index);
let instance = (&mut *vmctx).instance();
instance.data_drop(data_index)
}