Do not allow partial segment initialization for tables and memories

This commit is contained in:
Nick Fitzgerald
2020-02-26 13:10:52 -08:00
parent 235833ab97
commit 66634cc796
3 changed files with 100 additions and 36 deletions

View File

@@ -20,7 +20,6 @@ use more_asserts::assert_lt;
use std::alloc::{self, Layout}; use std::alloc::{self, Layout};
use std::any::Any; use std::any::Any;
use std::cell::{Cell, RefCell}; use std::cell::{Cell, RefCell};
use std::cmp;
use std::collections::HashMap; use std::collections::HashMap;
use std::collections::HashSet; use std::collections::HashSet;
use std::convert::TryFrom; use std::convert::TryFrom;
@@ -212,6 +211,16 @@ impl Instance {
unsafe { self.vmctx_plus_offset(self.offsets.vmctx_tables_begin()) } unsafe { self.vmctx_plus_offset(self.offsets.vmctx_tables_begin()) }
} }
/// Get a locally defined or imported memory.
pub(crate) fn get_memory(&self, index: MemoryIndex) -> VMMemoryDefinition {
if let Some(defined_index) = self.module.local.defined_memory_index(index) {
self.memory(defined_index)
} else {
let import = self.imported_memory(index);
*unsafe { import.from.as_ref().unwrap() }
}
}
/// Return the indexed `VMMemoryDefinition`. /// Return the indexed `VMMemoryDefinition`.
fn memory(&self, index: DefinedMemoryIndex) -> VMMemoryDefinition { fn memory(&self, index: DefinedMemoryIndex) -> VMMemoryDefinition {
unsafe { *self.memory_ptr(index) } unsafe { *self.memory_ptr(index) }
@@ -1168,10 +1177,10 @@ fn initialize_tables(instance: &Instance) -> Result<(), InstantiationError> {
let start = get_table_init_start(init, instance); let start = get_table_init_start(init, instance);
let table = instance.get_table(init.table_index); let table = instance.get_table(init.table_index);
// Even if there aren't any elements being initialized, if its out of if start
// bounds, then we report an error. This is required by the bulk memory .checked_add(init.elements.len())
// proposal and the way that it uses `table.init` during instantiation. .map_or(true, |end| end > table.size() as usize)
if start > table.size() as usize { {
return Err(InstantiationError::Trap(Trap::wasm( return Err(InstantiationError::Trap(Trap::wasm(
ir::SourceLoc::default(), ir::SourceLoc::default(),
ir::TrapCode::HeapOutOfBounds, ir::TrapCode::HeapOutOfBounds,
@@ -1182,17 +1191,7 @@ fn initialize_tables(instance: &Instance) -> Result<(), InstantiationError> {
let anyfunc = instance.get_caller_checked_anyfunc(*func_idx); let anyfunc = instance.get_caller_checked_anyfunc(*func_idx);
table table
.set(u32::try_from(start + i).unwrap(), anyfunc) .set(u32::try_from(start + i).unwrap(), anyfunc)
// Note that when bulk memory is disabled, this will never fail .unwrap();
// since we bounds check table element initialization before
// doing any table slot writes. However when bulk memory is
// enabled, these become runtime traps and the intermediate
// table slot writes are visible.
.map_err(|()| {
InstantiationError::Trap(Trap::wasm(
ir::SourceLoc::default(),
ir::TrapCode::HeapOutOfBounds,
))
})?;
} }
} }
@@ -1246,29 +1245,24 @@ fn initialize_memories(
data_initializers: &[DataInitializer<'_>], data_initializers: &[DataInitializer<'_>],
) -> Result<(), InstantiationError> { ) -> Result<(), InstantiationError> {
for init in data_initializers { for init in data_initializers {
let memory = instance.get_memory(init.location.memory_index);
let start = get_memory_init_start(init, instance); let start = get_memory_init_start(init, instance);
unsafe { if start
let mem_slice = get_memory_slice(init, instance); .checked_add(init.data.len())
.map_or(true, |end| end > memory.current_length)
let end = start.saturating_add(init.data.len()); {
let actual_end = cmp::min(mem_slice.len(), end);
let num_uncopied = end - actual_end;
let to_init = &mut mem_slice[start..actual_end];
to_init.copy_from_slice(&init.data[..init.data.len() - num_uncopied]);
// Note that if bulk memory is disabled, we'll never hit this case
// because we did an eager bounds check in
// `check_memory_init_bounds`. However when bulk memory is enabled,
// we need the incremental writes up to the OOB trap to be visible,
// since this is dictated by the updated spec for the bulk memory
// proposal.
if num_uncopied > 0 {
return Err(InstantiationError::Trap(Trap::wasm( return Err(InstantiationError::Trap(Trap::wasm(
ir::SourceLoc::default(), ir::SourceLoc::default(),
ir::TrapCode::HeapOutOfBounds, ir::TrapCode::HeapOutOfBounds,
))); )));
} }
unsafe {
let mem_slice = get_memory_slice(init, instance);
let end = start + init.data.len();
let to_init = &mut mem_slice[start..end];
to_init.copy_from_slice(init.data);
} }
} }

View File

@@ -0,0 +1,35 @@
(module $m
(memory (export "mem") 1)
(func (export "load") (param i32) (result i32)
local.get 0
i32.load8_u))
(register "m" $m)
(assert_trap
(module
(memory (import "m" "mem") 1)
;; This is in bounds, and should get written to the memory.
(data (i32.const 0) "abc")
;; Partially out of bounds. None of these bytes should get written, and
;; instantiation should trap.
(data (i32.const 65530) "zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz")
)
"out of bounds"
)
;; The first data segment got written.
(assert_return (invoke $m "load" (i32.const 0)) (i32.const 97))
(assert_return (invoke $m "load" (i32.const 1)) (i32.const 98))
(assert_return (invoke $m "load" (i32.const 2)) (i32.const 99))
;; The second did not get partially written.
(assert_return (invoke $m "load" (i32.const 65530)) (i32.const 0))
(assert_return (invoke $m "load" (i32.const 65531)) (i32.const 0))
(assert_return (invoke $m "load" (i32.const 65532)) (i32.const 0))
(assert_return (invoke $m "load" (i32.const 65533)) (i32.const 0))
(assert_return (invoke $m "load" (i32.const 65534)) (i32.const 0))
(assert_return (invoke $m "load" (i32.const 65535)) (i32.const 0))

View File

@@ -0,0 +1,35 @@
(module $m
(table (export "table") funcref (elem $zero $zero $zero $zero $zero $zero $zero $zero $zero $zero))
(func $zero (result i32)
(i32.const 0))
(func (export "indirect-call") (param i32) (result i32)
local.get 0
call_indirect (result i32)))
(register "m" $m)
(assert_trap
(module
(table (import "m" "table") 10 funcref)
(func $one (result i32)
(i32.const 1))
;; An in-bounds segment that should get initialized in the table.
(elem (i32.const 7) $one)
;; Part of this segment is out of bounds, so none of its elements should be
;; initialized into the table, and it should trap.
(elem (i32.const 9) $one $one $one)
)
"out of bounds"
)
;; The first `$one` segment *was* initialized OK.
(assert_return (invoke "indirect-call" (i32.const 7)) (i32.const 1))
;; The second `$one` segment is partially out of bounds, and therefore none of
;; its elements were written into the table.
(assert_return (invoke "indirect-call" (i32.const 9)) (i32.const 0))