Use try_from replacing cast in wasmtime-runtime.

This commit is contained in:
Ari Lotter
2019-06-09 04:50:45 -04:00
committed by Jakub Konka
parent f3f6ab0583
commit 8dc1d90352
4 changed files with 31 additions and 33 deletions

View File

@@ -21,7 +21,6 @@ lazy_static = "1.2.0"
libc = { version = "0.2.48", default-features = false }
errno = "0.2.4"
memoffset = "0.3.0"
cast = { version = "0.2.2", default-features = false }
failure = { version = "0.1.3", default-features = false }
failure_derive = { version = "0.1.3", default-features = false }
indexmap = "1.0.2"

View File

@@ -30,6 +30,7 @@ use indexmap;
use std::borrow::ToOwned;
use std::boxed::Box;
use std::collections::{HashMap, HashSet};
use std::convert::TryFrom;
use std::rc::Rc;
use std::string::{String, ToString};
use wasmtime_environ::{DataInitializer, Module, TableElements, VMOffsets};
@@ -42,7 +43,7 @@ fn signature_id(
#[allow(clippy::cast_ptr_alignment)]
unsafe {
let ptr = (vmctx as *const VMContext as *const u8)
.add(cast::usize(offsets.vmctx_vmshared_signature_id(index)));
.add(usize::try_from(offsets.vmctx_vmshared_signature_id(index)).unwrap());
*(ptr as *const VMSharedSignatureIndex)
}
}
@@ -55,7 +56,7 @@ fn imported_function<'vmctx>(
#[allow(clippy::cast_ptr_alignment)]
unsafe {
let ptr = (vmctx as *const VMContext as *const u8)
.add(cast::usize(offsets.vmctx_vmfunction_import(index)));
.add(usize::try_from(offsets.vmctx_vmfunction_import(index)).unwrap());
&*(ptr as *const VMFunctionImport)
}
}
@@ -68,7 +69,7 @@ fn imported_table<'vmctx>(
#[allow(clippy::cast_ptr_alignment)]
unsafe {
let ptr = (vmctx as *const VMContext as *const u8)
.add(cast::usize(offsets.vmctx_vmtable_import(index)));
.add(usize::try_from(offsets.vmctx_vmtable_import(index)).unwrap());
&*(ptr as *const VMTableImport)
}
}
@@ -81,7 +82,7 @@ fn imported_memory<'vmctx>(
#[allow(clippy::cast_ptr_alignment)]
unsafe {
let ptr = (vmctx as *const VMContext as *const u8)
.add(cast::usize(offsets.vmctx_vmmemory_import(index)));
.add(usize::try_from(offsets.vmctx_vmmemory_import(index)).unwrap());
&*(ptr as *const VMMemoryImport)
}
}
@@ -93,7 +94,7 @@ fn imported_global<'vmctx>(
) -> &'vmctx VMGlobalImport {
unsafe {
let ptr = (vmctx as *const VMContext as *const u8)
.add(cast::usize(offsets.vmctx_vmglobal_import(index)));
.add(usize::try_from(offsets.vmctx_vmglobal_import(index)).unwrap());
#[allow(clippy::cast_ptr_alignment)]
&*(ptr as *const VMGlobalImport)
}
@@ -106,7 +107,7 @@ fn table<'vmctx>(
) -> &'vmctx VMTableDefinition {
unsafe {
let ptr = (vmctx as *const VMContext as *const u8)
.add(cast::usize(offsets.vmctx_vmtable_definition(index)));
.add(usize::try_from(offsets.vmctx_vmtable_definition(index)).unwrap());
#[allow(clippy::cast_ptr_alignment)]
&*(ptr as *const VMTableDefinition)
}
@@ -119,7 +120,7 @@ fn table_mut<'vmctx>(
) -> &'vmctx mut VMTableDefinition {
unsafe {
let ptr = (vmctx as *mut VMContext as *mut u8)
.add(cast::usize(offsets.vmctx_vmtable_definition(index)));
.add(usize::try_from(offsets.vmctx_vmtable_definition(index)).unwrap());
#[allow(clippy::cast_ptr_alignment)]
&mut *(ptr as *mut VMTableDefinition)
}
@@ -132,7 +133,7 @@ fn memory<'vmctx>(
) -> &'vmctx VMMemoryDefinition {
unsafe {
let ptr = (vmctx as *const VMContext as *const u8)
.add(cast::usize(offsets.vmctx_vmmemory_definition(index)));
.add(usize::try_from(offsets.vmctx_vmmemory_definition(index)).unwrap());
#[allow(clippy::cast_ptr_alignment)]
&*(ptr as *const VMMemoryDefinition)
}
@@ -145,7 +146,7 @@ fn memory_mut<'vmctx>(
) -> &'vmctx mut VMMemoryDefinition {
unsafe {
let ptr = (vmctx as *mut VMContext as *mut u8)
.add(cast::usize(offsets.vmctx_vmmemory_definition(index)));
.add(usize::try_from(offsets.vmctx_vmmemory_definition(index)).unwrap());
#[allow(clippy::cast_ptr_alignment)]
&mut *(ptr as *mut VMMemoryDefinition)
}
@@ -158,7 +159,7 @@ fn global<'vmctx>(
) -> &'vmctx VMGlobalDefinition {
unsafe {
let ptr = (vmctx as *const VMContext as *const u8)
.add(cast::usize(offsets.vmctx_vmglobal_definition(index)));
.add(usize::try_from(offsets.vmctx_vmglobal_definition(index)).unwrap());
#[allow(clippy::cast_ptr_alignment)]
&*(ptr as *const VMGlobalDefinition)
}
@@ -171,7 +172,7 @@ fn global_mut<'vmctx>(
) -> &'vmctx mut VMGlobalDefinition {
unsafe {
let ptr = (vmctx as *mut VMContext as *mut u8)
.add(cast::usize(offsets.vmctx_vmglobal_definition(index)));
.add(usize::try_from(offsets.vmctx_vmglobal_definition(index)).unwrap());
#[allow(clippy::cast_ptr_alignment)]
&mut *(ptr as *mut VMGlobalDefinition)
}
@@ -237,7 +238,7 @@ impl Instance {
fn signature_ids_ptr(&mut self) -> *mut VMSharedSignatureIndex {
unsafe {
(&mut self.vmctx as *mut VMContext as *mut u8)
.add(cast::usize(self.offsets.vmctx_signature_ids_begin()))
.add(usize::try_from(self.offsets.vmctx_signature_ids_begin()).unwrap())
as *mut VMSharedSignatureIndex
}
}
@@ -251,7 +252,7 @@ impl Instance {
fn imported_functions_ptr(&mut self) -> *mut VMFunctionImport {
unsafe {
(&mut self.vmctx as *mut VMContext as *mut u8)
.add(cast::usize(self.offsets.vmctx_imported_functions_begin()))
.add(usize::try_from(self.offsets.vmctx_imported_functions_begin()).unwrap())
as *mut VMFunctionImport
}
}
@@ -266,7 +267,7 @@ impl Instance {
fn imported_tables_ptr(&mut self) -> *mut VMTableImport {
unsafe {
(&mut self.vmctx as *mut VMContext as *mut u8)
.add(cast::usize(self.offsets.vmctx_imported_tables_begin()))
.add(usize::try_from(self.offsets.vmctx_imported_tables_begin()).unwrap())
as *mut VMTableImport
}
}
@@ -280,7 +281,7 @@ impl Instance {
fn imported_memories_ptr(&mut self) -> *mut VMMemoryImport {
unsafe {
(&mut self.vmctx as *mut VMContext as *mut u8)
.add(cast::usize(self.offsets.vmctx_imported_memories_begin()))
.add(usize::try_from(self.offsets.vmctx_imported_memories_begin()).unwrap())
as *mut VMMemoryImport
}
}
@@ -294,7 +295,7 @@ impl Instance {
fn imported_globals_ptr(&mut self) -> *mut VMGlobalImport {
unsafe {
(&mut self.vmctx as *mut VMContext as *mut u8)
.add(cast::usize(self.offsets.vmctx_imported_globals_begin()))
.add(usize::try_from(self.offsets.vmctx_imported_globals_begin()).unwrap())
as *mut VMGlobalImport
}
}
@@ -315,7 +316,7 @@ impl Instance {
fn tables_ptr(&mut self) -> *mut VMTableDefinition {
unsafe {
(&mut self.vmctx as *mut VMContext as *mut u8)
.add(cast::usize(self.offsets.vmctx_tables_begin()))
.add(usize::try_from(self.offsets.vmctx_tables_begin()).unwrap())
as *mut VMTableDefinition
}
}
@@ -334,7 +335,7 @@ impl Instance {
fn memories_ptr(&mut self) -> *mut VMMemoryDefinition {
unsafe {
(&mut self.vmctx as *mut VMContext as *mut u8)
.add(cast::usize(self.offsets.vmctx_memories_begin()))
.add(usize::try_from(self.offsets.vmctx_memories_begin()).unwrap())
as *mut VMMemoryDefinition
}
}
@@ -354,7 +355,7 @@ impl Instance {
fn globals_ptr(&mut self) -> *mut VMGlobalDefinition {
unsafe {
(&mut self.vmctx as *mut VMContext as *mut u8)
.add(cast::usize(self.offsets.vmctx_globals_begin()))
.add(usize::try_from(self.offsets.vmctx_globals_begin()).unwrap())
as *mut VMGlobalDefinition
}
}
@@ -508,7 +509,7 @@ impl Instance {
let offsets = &self.offsets;
let begin = unsafe {
(&self.vmctx as *const VMContext as *const u8)
.add(cast::usize(offsets.vmctx_tables_begin()))
.add(usize::try_from(offsets.vmctx_tables_begin()).unwrap())
} as *const VMTableDefinition;
let end: *const VMTableDefinition = table;
// TODO: Use `offset_from` once it stablizes.
@@ -524,7 +525,7 @@ impl Instance {
let offsets = &self.offsets;
let begin = unsafe {
(&self.vmctx as *const VMContext as *const u8)
.add(cast::usize(offsets.vmctx_memories_begin()))
.add(usize::try_from(offsets.vmctx_memories_begin()).unwrap())
} as *const VMMemoryDefinition;
let end: *const VMMemoryDefinition = memory;
// TODO: Use `offset_from` once it stablizes.
@@ -653,7 +654,7 @@ impl InstanceHandle {
let mut instance_mmap = Mmap::with_at_least(
mem::size_of::<Instance>()
.checked_add(cast::usize(offsets.size_of_vmctx()))
.checked_add(usize::try_from(offsets.size_of_vmctx()).unwrap())
.unwrap(),
)
.map_err(InstantiationError::Resource)?;
@@ -990,7 +991,7 @@ fn get_memory_init_start(init: &DataInitializer<'_>, instance: &mut Instance) ->
} else {
instance.imported_global(base).from
};
start += cast::usize(*unsafe { (*global).as_u32() });
start += usize::try_from(*unsafe { (*global).as_u32() }).unwrap();
}
start
@@ -1055,7 +1056,7 @@ fn get_table_init_start(init: &TableElements, instance: &mut Instance) -> usize
} else {
instance.imported_global(base).from
};
start += cast::usize(*unsafe { (*global).as_u32() });
start += usize::try_from(*unsafe { (*global).as_u32() }).unwrap();
}
start

View File

@@ -4,6 +4,7 @@
use crate::mmap::Mmap;
use crate::vmcontext::VMMemoryDefinition;
use std::convert::TryFrom;
use std::string::String;
use wasmtime_environ::{MemoryPlan, MemoryStyle, WASM_MAX_PAGES, WASM_PAGE_SIZE};
@@ -101,9 +102,9 @@ impl LinearMemory {
return None;
}
let delta_bytes = cast::usize(delta) * WASM_PAGE_SIZE as usize;
let prev_bytes = cast::usize(prev_pages) * WASM_PAGE_SIZE as usize;
let new_bytes = cast::usize(new_pages) * WASM_PAGE_SIZE as usize;
let delta_bytes = usize::try_from(delta).unwrap() * WASM_PAGE_SIZE as usize;
let prev_bytes = usize::try_from(prev_pages).unwrap() * WASM_PAGE_SIZE as usize;
let new_bytes = usize::try_from(new_pages).unwrap() * WASM_PAGE_SIZE as usize;
if new_bytes > self.mmap.len() - self.offset_guard_size {
// If the new size is within the declared maximum, but needs more memory than we

View File

@@ -2,9 +2,9 @@
//! signature checking.
use crate::vmcontext::VMSharedSignatureIndex;
use cast;
use cranelift_codegen::ir;
use std::collections::{hash_map, HashMap};
use std::convert::TryFrom;
/// WebAssembly requires that the caller and callee signatures in an indirect
/// call must match. To implement this efficiently, keep a registry of all
@@ -29,10 +29,7 @@ impl SignatureRegistry {
match self.signature_hash.entry(sig.clone()) {
hash_map::Entry::Occupied(entry) => *entry.get(),
hash_map::Entry::Vacant(entry) => {
#[cfg(target_pointer_width = "32")]
let sig_id = VMSharedSignatureIndex::new(cast::u32(len));
#[cfg(target_pointer_width = "64")]
let sig_id = VMSharedSignatureIndex::new(cast::u32(len).unwrap());
let sig_id = VMSharedSignatureIndex::new(u32::try_from(len).unwrap());
entry.insert(sig_id);
sig_id
}