Account for fuel before unconditionally trapping Wasm accesses (#5447)
* Account for fuel before unconditionally trapping Wasm accesses Fixes #5445 * Add a test for fuel accounting and unconditionally trapping memory accesses
This commit is contained in:
@@ -2316,17 +2316,18 @@ where
|
|||||||
// offsets in `memarg` are <=2gb, which means we get the fast path of one
|
// offsets in `memarg` are <=2gb, which means we get the fast path of one
|
||||||
// `heap_addr` instruction plus a hardcoded i32-offset in memory-related
|
// `heap_addr` instruction plus a hardcoded i32-offset in memory-related
|
||||||
// instructions.
|
// instructions.
|
||||||
|
let heap = environ.heaps()[heap].clone();
|
||||||
let addr = match u32::try_from(memarg.offset) {
|
let addr = match u32::try_from(memarg.offset) {
|
||||||
// If our offset fits within a u32, then we can place the it into the
|
// If our offset fits within a u32, then we can place the it into the
|
||||||
// offset immediate of the `heap_addr` instruction.
|
// offset immediate of the `heap_addr` instruction.
|
||||||
Ok(offset) => bounds_checks::bounds_check_and_compute_addr(
|
Ok(offset) => bounds_checks::bounds_check_and_compute_addr(
|
||||||
builder,
|
builder,
|
||||||
&*environ,
|
environ,
|
||||||
&environ.heaps()[heap],
|
&heap,
|
||||||
index,
|
index,
|
||||||
offset,
|
offset,
|
||||||
access_size,
|
access_size,
|
||||||
),
|
)?,
|
||||||
|
|
||||||
// If the offset doesn't fit within a u32, then we can't pass it
|
// If the offset doesn't fit within a u32, then we can't pass it
|
||||||
// directly into `heap_addr`.
|
// directly into `heap_addr`.
|
||||||
@@ -2355,20 +2356,19 @@ where
|
|||||||
// relatively odd/rare. In the future if needed we can look into
|
// relatively odd/rare. In the future if needed we can look into
|
||||||
// optimizing this more.
|
// optimizing this more.
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
let index_type = environ.heaps()[heap].index_type;
|
let offset = builder.ins().iconst(heap.index_type, memarg.offset as i64);
|
||||||
let offset = builder.ins().iconst(index_type, memarg.offset as i64);
|
|
||||||
let adjusted_index =
|
let adjusted_index =
|
||||||
builder
|
builder
|
||||||
.ins()
|
.ins()
|
||||||
.uadd_overflow_trap(index, offset, ir::TrapCode::HeapOutOfBounds);
|
.uadd_overflow_trap(index, offset, ir::TrapCode::HeapOutOfBounds);
|
||||||
bounds_checks::bounds_check_and_compute_addr(
|
bounds_checks::bounds_check_and_compute_addr(
|
||||||
builder,
|
builder,
|
||||||
&*environ,
|
environ,
|
||||||
&environ.heaps()[heap],
|
&heap,
|
||||||
adjusted_index,
|
adjusted_index,
|
||||||
0,
|
0,
|
||||||
access_size,
|
access_size,
|
||||||
)
|
)?
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let addr = match addr {
|
let addr = match addr {
|
||||||
|
|||||||
@@ -8,21 +8,22 @@
|
|||||||
//!
|
//!
|
||||||
//! bounds check the memory access and translate it into a native memory access.
|
//! bounds check the memory access and translate it into a native memory access.
|
||||||
|
|
||||||
use crate::{HeapData, HeapStyle, TargetEnvironment};
|
use crate::{FuncEnvironment, HeapData, HeapStyle};
|
||||||
use cranelift_codegen::{
|
use cranelift_codegen::{
|
||||||
cursor::{Cursor, FuncCursor},
|
cursor::{Cursor, FuncCursor},
|
||||||
ir::{self, condcodes::IntCC, InstBuilder, RelSourceLoc},
|
ir::{self, condcodes::IntCC, InstBuilder, RelSourceLoc},
|
||||||
};
|
};
|
||||||
use cranelift_frontend::FunctionBuilder;
|
use cranelift_frontend::FunctionBuilder;
|
||||||
|
use wasmtime_types::WasmResult;
|
||||||
|
|
||||||
/// Helper used to emit bounds checks (as necessary) and compute the native
|
/// Helper used to emit bounds checks (as necessary) and compute the native
|
||||||
/// address of a heap access.
|
/// address of a heap access.
|
||||||
///
|
///
|
||||||
/// Returns the `ir::Value` holding the native address of the heap access, or
|
/// Returns the `ir::Value` holding the native address of the heap access, or
|
||||||
/// `None` if the heap access will unconditionally trap.
|
/// `None` if the heap access will unconditionally trap.
|
||||||
pub fn bounds_check_and_compute_addr<TE>(
|
pub fn bounds_check_and_compute_addr<Env>(
|
||||||
builder: &mut FunctionBuilder,
|
builder: &mut FunctionBuilder,
|
||||||
env: &TE,
|
env: &mut Env,
|
||||||
heap: &HeapData,
|
heap: &HeapData,
|
||||||
// Dynamic operand indexing into the heap.
|
// Dynamic operand indexing into the heap.
|
||||||
index: ir::Value,
|
index: ir::Value,
|
||||||
@@ -30,9 +31,9 @@ pub fn bounds_check_and_compute_addr<TE>(
|
|||||||
offset: u32,
|
offset: u32,
|
||||||
// Static size of the heap access.
|
// Static size of the heap access.
|
||||||
access_size: u8,
|
access_size: u8,
|
||||||
) -> Option<ir::Value>
|
) -> WasmResult<Option<ir::Value>>
|
||||||
where
|
where
|
||||||
TE: TargetEnvironment + ?Sized,
|
Env: FuncEnvironment + ?Sized,
|
||||||
{
|
{
|
||||||
let index = cast_index_to_pointer_ty(
|
let index = cast_index_to_pointer_ty(
|
||||||
index,
|
index,
|
||||||
@@ -62,7 +63,7 @@ where
|
|||||||
// different bounds checks and optimizations of those bounds checks. It is
|
// different bounds checks and optimizations of those bounds checks. It is
|
||||||
// intentionally written in a straightforward case-matching style that will
|
// intentionally written in a straightforward case-matching style that will
|
||||||
// hopefully make it easy to port to ISLE one day.
|
// hopefully make it easy to port to ISLE one day.
|
||||||
match heap.style {
|
Ok(match heap.style {
|
||||||
// ====== Dynamic Memories ======
|
// ====== Dynamic Memories ======
|
||||||
//
|
//
|
||||||
// 1. First special case for when `offset + access_size == 1`:
|
// 1. First special case for when `offset + access_size == 1`:
|
||||||
@@ -216,6 +217,7 @@ where
|
|||||||
// bound`, since we will end up being out-of-bounds regardless of the
|
// bound`, since we will end up being out-of-bounds regardless of the
|
||||||
// given `index`.
|
// given `index`.
|
||||||
HeapStyle::Static { bound } if offset_and_size > bound.into() => {
|
HeapStyle::Static { bound } if offset_and_size > bound.into() => {
|
||||||
|
env.before_unconditionally_trapping_memory_access(builder)?;
|
||||||
builder.ins().trap(ir::TrapCode::HeapOutOfBounds);
|
builder.ins().trap(ir::TrapCode::HeapOutOfBounds);
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
@@ -325,7 +327,7 @@ where
|
|||||||
None,
|
None,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn cast_index_to_pointer_ty(
|
fn cast_index_to_pointer_ty(
|
||||||
|
|||||||
@@ -490,6 +490,18 @@ pub trait FuncEnvironment: TargetEnvironment {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Optional callback for the `FuncEnvironment` performing this translation
|
||||||
|
/// to maintain, prepare, or finalize custom, internal state when we
|
||||||
|
/// statically determine that a Wasm memory access will unconditionally
|
||||||
|
/// trap, rendering the rest of the block unreachable. Called just before
|
||||||
|
/// the unconditional trap is emitted.
|
||||||
|
fn before_unconditionally_trapping_memory_access(
|
||||||
|
&mut self,
|
||||||
|
_builder: &mut FunctionBuilder,
|
||||||
|
) -> WasmResult<()> {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
/// Optional callback for the `FunctionEnvironment` performing this translation to perform work
|
/// Optional callback for the `FunctionEnvironment` performing this translation to perform work
|
||||||
/// before the function body is translated.
|
/// before the function body is translated.
|
||||||
fn before_translate_function(
|
fn before_translate_function(
|
||||||
|
|||||||
@@ -2078,6 +2078,17 @@ impl<'module_environment> cranelift_wasm::FuncEnvironment for FuncEnvironment<'m
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn before_unconditionally_trapping_memory_access(
|
||||||
|
&mut self,
|
||||||
|
builder: &mut FunctionBuilder,
|
||||||
|
) -> WasmResult<()> {
|
||||||
|
if self.tunables.consume_fuel {
|
||||||
|
self.fuel_increment_var(builder);
|
||||||
|
self.fuel_save_from_var(builder);
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
fn before_translate_function(
|
fn before_translate_function(
|
||||||
&mut self,
|
&mut self,
|
||||||
builder: &mut FunctionBuilder,
|
builder: &mut FunctionBuilder,
|
||||||
|
|||||||
@@ -183,3 +183,43 @@ fn manual_edge_cases() {
|
|||||||
assert!(store.consume_fuel(i64::MAX as u64 + 1).is_err());
|
assert!(store.consume_fuel(i64::MAX as u64 + 1).is_err());
|
||||||
assert_eq!(store.consume_fuel(i64::MAX as u64).unwrap(), 0);
|
assert_eq!(store.consume_fuel(i64::MAX as u64).unwrap(), 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn unconditionally_trapping_memory_accesses_save_fuel_before_trapping() {
|
||||||
|
let mut config = Config::new();
|
||||||
|
config.consume_fuel(true);
|
||||||
|
config.static_memory_maximum_size(0x1_0000);
|
||||||
|
|
||||||
|
let engine = Engine::new(&config).unwrap();
|
||||||
|
|
||||||
|
let module = Module::new(
|
||||||
|
&engine,
|
||||||
|
r#"
|
||||||
|
(module
|
||||||
|
(memory 1 1)
|
||||||
|
(func (export "f") (param i32) (result i32)
|
||||||
|
local.get 0
|
||||||
|
local.get 0
|
||||||
|
i32.add
|
||||||
|
;; This offset is larger than our memory max size and therefore
|
||||||
|
;; will unconditionally trap.
|
||||||
|
i32.load8_s offset=0xffffffff))
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let mut store = Store::new(&engine, ());
|
||||||
|
store.add_fuel(1_000).unwrap();
|
||||||
|
|
||||||
|
let instance = Instance::new(&mut store, &module, &[]).unwrap();
|
||||||
|
let f = instance
|
||||||
|
.get_typed_func::<i32, i32>(&mut store, "f")
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let trap = f.call(&mut store, 0).unwrap_err();
|
||||||
|
assert_eq!(trap.downcast::<Trap>().unwrap(), Trap::MemoryOutOfBounds);
|
||||||
|
|
||||||
|
// The `i32.add` consumed some fuel before the unconditionally trapping
|
||||||
|
// memory access.
|
||||||
|
assert!(store.fuel_consumed().unwrap() > 0);
|
||||||
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user