Use cast::usize rather than as usize to avoid silent overflow.
This commit is contained in:
@@ -33,7 +33,7 @@ fn signature_id(
|
|||||||
#[allow(clippy::cast_ptr_alignment)]
|
#[allow(clippy::cast_ptr_alignment)]
|
||||||
unsafe {
|
unsafe {
|
||||||
let ptr = (vmctx as *const VMContext as *const u8)
|
let ptr = (vmctx as *const VMContext as *const u8)
|
||||||
.add(offsets.vmctx_vmshared_signature_id(index) as usize);
|
.add(cast::usize(offsets.vmctx_vmshared_signature_id(index)));
|
||||||
*(ptr as *const VMSharedSignatureIndex)
|
*(ptr as *const VMSharedSignatureIndex)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -46,7 +46,7 @@ fn imported_function<'vmctx>(
|
|||||||
#[allow(clippy::cast_ptr_alignment)]
|
#[allow(clippy::cast_ptr_alignment)]
|
||||||
unsafe {
|
unsafe {
|
||||||
let ptr = (vmctx as *const VMContext as *const u8)
|
let ptr = (vmctx as *const VMContext as *const u8)
|
||||||
.add(offsets.vmctx_vmfunction_import(index) as usize);
|
.add(cast::usize(offsets.vmctx_vmfunction_import(index)));
|
||||||
&*(ptr as *const VMFunctionImport)
|
&*(ptr as *const VMFunctionImport)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -87,7 +87,7 @@ impl InstanceContents {
|
|||||||
fn signature_ids_ptr(&mut self) -> *mut VMSharedSignatureIndex {
|
fn signature_ids_ptr(&mut self) -> *mut VMSharedSignatureIndex {
|
||||||
unsafe {
|
unsafe {
|
||||||
(&mut self.vmctx as *mut VMContext as *mut u8)
|
(&mut self.vmctx as *mut VMContext as *mut u8)
|
||||||
.add(self.offsets.vmctx_signature_ids_begin() as usize)
|
.add(cast::usize(self.offsets.vmctx_signature_ids_begin()))
|
||||||
as *mut VMSharedSignatureIndex
|
as *mut VMSharedSignatureIndex
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -101,7 +101,7 @@ impl InstanceContents {
|
|||||||
fn imported_functions_ptr(&mut self) -> *mut VMFunctionImport {
|
fn imported_functions_ptr(&mut self) -> *mut VMFunctionImport {
|
||||||
unsafe {
|
unsafe {
|
||||||
(&mut self.vmctx as *mut VMContext as *mut u8)
|
(&mut self.vmctx as *mut VMContext as *mut u8)
|
||||||
.add(self.offsets.vmctx_imported_functions_begin() as usize)
|
.add(cast::usize(self.offsets.vmctx_imported_functions_begin()))
|
||||||
as *mut VMFunctionImport
|
as *mut VMFunctionImport
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -110,7 +110,7 @@ impl InstanceContents {
|
|||||||
fn imported_table(&self, index: TableIndex) -> &VMTableImport {
|
fn imported_table(&self, index: TableIndex) -> &VMTableImport {
|
||||||
unsafe {
|
unsafe {
|
||||||
let ptr = (&self.vmctx as *const VMContext as *const u8)
|
let ptr = (&self.vmctx as *const VMContext as *const u8)
|
||||||
.add(self.offsets.vmctx_vmtable_import(index) as usize);
|
.add(cast::usize(self.offsets.vmctx_vmtable_import(index)));
|
||||||
&*(ptr as *const VMTableImport)
|
&*(ptr as *const VMTableImport)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -119,7 +119,7 @@ impl InstanceContents {
|
|||||||
fn imported_tables_ptr(&mut self) -> *mut VMTableImport {
|
fn imported_tables_ptr(&mut self) -> *mut VMTableImport {
|
||||||
unsafe {
|
unsafe {
|
||||||
(&mut self.vmctx as *mut VMContext as *mut u8)
|
(&mut self.vmctx as *mut VMContext as *mut u8)
|
||||||
.add(self.offsets.vmctx_imported_tables_begin() as usize)
|
.add(cast::usize(self.offsets.vmctx_imported_tables_begin()))
|
||||||
as *mut VMTableImport
|
as *mut VMTableImport
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -128,7 +128,7 @@ impl InstanceContents {
|
|||||||
fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport {
|
fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport {
|
||||||
unsafe {
|
unsafe {
|
||||||
let ptr = (&self.vmctx as *const VMContext as *const u8)
|
let ptr = (&self.vmctx as *const VMContext as *const u8)
|
||||||
.add(self.offsets.vmctx_vmmemory_import(index) as usize);
|
.add(cast::usize(self.offsets.vmctx_vmmemory_import(index)));
|
||||||
&*(ptr as *const VMMemoryImport)
|
&*(ptr as *const VMMemoryImport)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -137,7 +137,7 @@ impl InstanceContents {
|
|||||||
fn imported_memories_ptr(&mut self) -> *mut VMMemoryImport {
|
fn imported_memories_ptr(&mut self) -> *mut VMMemoryImport {
|
||||||
unsafe {
|
unsafe {
|
||||||
(&mut self.vmctx as *mut VMContext as *mut u8)
|
(&mut self.vmctx as *mut VMContext as *mut u8)
|
||||||
.add(self.offsets.vmctx_imported_memories_begin() as usize)
|
.add(cast::usize(self.offsets.vmctx_imported_memories_begin()))
|
||||||
as *mut VMMemoryImport
|
as *mut VMMemoryImport
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -146,7 +146,7 @@ impl InstanceContents {
|
|||||||
fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport {
|
fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport {
|
||||||
unsafe {
|
unsafe {
|
||||||
let ptr = (&self.vmctx as *const VMContext as *const u8)
|
let ptr = (&self.vmctx as *const VMContext as *const u8)
|
||||||
.add(self.offsets.vmctx_vmglobal_import(index) as usize);
|
.add(cast::usize(self.offsets.vmctx_vmglobal_import(index)));
|
||||||
&*(ptr as *const VMGlobalImport)
|
&*(ptr as *const VMGlobalImport)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -155,7 +155,7 @@ impl InstanceContents {
|
|||||||
fn imported_globals_ptr(&mut self) -> *mut VMGlobalImport {
|
fn imported_globals_ptr(&mut self) -> *mut VMGlobalImport {
|
||||||
unsafe {
|
unsafe {
|
||||||
(&mut self.vmctx as *mut VMContext as *mut u8)
|
(&mut self.vmctx as *mut VMContext as *mut u8)
|
||||||
.add(self.offsets.vmctx_imported_globals_begin() as usize)
|
.add(cast::usize(self.offsets.vmctx_imported_globals_begin()))
|
||||||
as *mut VMGlobalImport
|
as *mut VMGlobalImport
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -165,7 +165,7 @@ impl InstanceContents {
|
|||||||
fn table(&self, index: DefinedTableIndex) -> &VMTableDefinition {
|
fn table(&self, index: DefinedTableIndex) -> &VMTableDefinition {
|
||||||
unsafe {
|
unsafe {
|
||||||
let ptr = (&self.vmctx as *const VMContext as *const u8)
|
let ptr = (&self.vmctx as *const VMContext as *const u8)
|
||||||
.add(self.offsets.vmctx_vmtable_definition(index) as usize);
|
.add(cast::usize(self.offsets.vmctx_vmtable_definition(index)));
|
||||||
&*(ptr as *const VMTableDefinition)
|
&*(ptr as *const VMTableDefinition)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -174,7 +174,7 @@ impl InstanceContents {
|
|||||||
fn table_mut(&mut self, index: DefinedTableIndex) -> &mut VMTableDefinition {
|
fn table_mut(&mut self, index: DefinedTableIndex) -> &mut VMTableDefinition {
|
||||||
unsafe {
|
unsafe {
|
||||||
let ptr = (&self.vmctx as *const VMContext as *mut u8)
|
let ptr = (&self.vmctx as *const VMContext as *mut u8)
|
||||||
.add(self.offsets.vmctx_vmtable_definition(index) as usize);
|
.add(cast::usize(self.offsets.vmctx_vmtable_definition(index)));
|
||||||
&mut *(ptr as *mut VMTableDefinition)
|
&mut *(ptr as *mut VMTableDefinition)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -183,7 +183,7 @@ impl InstanceContents {
|
|||||||
fn tables_ptr(&mut self) -> *mut VMTableDefinition {
|
fn tables_ptr(&mut self) -> *mut VMTableDefinition {
|
||||||
unsafe {
|
unsafe {
|
||||||
(&self.vmctx as *const VMContext as *mut u8)
|
(&self.vmctx as *const VMContext as *mut u8)
|
||||||
.add(self.offsets.vmctx_tables_begin() as usize)
|
.add(cast::usize(self.offsets.vmctx_tables_begin()))
|
||||||
as *mut VMTableDefinition
|
as *mut VMTableDefinition
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -192,7 +192,7 @@ impl InstanceContents {
|
|||||||
fn memory(&self, index: DefinedMemoryIndex) -> &VMMemoryDefinition {
|
fn memory(&self, index: DefinedMemoryIndex) -> &VMMemoryDefinition {
|
||||||
unsafe {
|
unsafe {
|
||||||
let ptr = (&self.vmctx as *const VMContext as *const u8)
|
let ptr = (&self.vmctx as *const VMContext as *const u8)
|
||||||
.add(self.offsets.vmctx_vmmemory_definition(index) as usize);
|
.add(cast::usize(self.offsets.vmctx_vmmemory_definition(index)));
|
||||||
&*(ptr as *const VMMemoryDefinition)
|
&*(ptr as *const VMMemoryDefinition)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -201,7 +201,7 @@ impl InstanceContents {
|
|||||||
fn memory_mut(&mut self, index: DefinedMemoryIndex) -> &mut VMMemoryDefinition {
|
fn memory_mut(&mut self, index: DefinedMemoryIndex) -> &mut VMMemoryDefinition {
|
||||||
unsafe {
|
unsafe {
|
||||||
let ptr = (&self.vmctx as *const VMContext as *mut u8)
|
let ptr = (&self.vmctx as *const VMContext as *mut u8)
|
||||||
.add(self.offsets.vmctx_vmmemory_definition(index) as usize);
|
.add(cast::usize(self.offsets.vmctx_vmmemory_definition(index)));
|
||||||
&mut *(ptr as *mut VMMemoryDefinition)
|
&mut *(ptr as *mut VMMemoryDefinition)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -210,7 +210,7 @@ impl InstanceContents {
|
|||||||
fn memories_ptr(&mut self) -> *mut VMMemoryDefinition {
|
fn memories_ptr(&mut self) -> *mut VMMemoryDefinition {
|
||||||
unsafe {
|
unsafe {
|
||||||
(&self.vmctx as *const VMContext as *mut u8)
|
(&self.vmctx as *const VMContext as *mut u8)
|
||||||
.add(self.offsets.vmctx_memories_begin() as usize)
|
.add(cast::usize(self.offsets.vmctx_memories_begin()))
|
||||||
as *mut VMMemoryDefinition
|
as *mut VMMemoryDefinition
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -220,7 +220,7 @@ impl InstanceContents {
|
|||||||
fn global(&self, index: DefinedGlobalIndex) -> &VMGlobalDefinition {
|
fn global(&self, index: DefinedGlobalIndex) -> &VMGlobalDefinition {
|
||||||
unsafe {
|
unsafe {
|
||||||
let ptr = (&self.vmctx as *const VMContext as *const u8)
|
let ptr = (&self.vmctx as *const VMContext as *const u8)
|
||||||
.add(self.offsets.vmctx_vmglobal_definition(index) as usize);
|
.add(cast::usize(self.offsets.vmctx_vmglobal_definition(index)));
|
||||||
&*(ptr as *const VMGlobalDefinition)
|
&*(ptr as *const VMGlobalDefinition)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -229,7 +229,7 @@ impl InstanceContents {
|
|||||||
fn global_mut(&mut self, index: DefinedGlobalIndex) -> &mut VMGlobalDefinition {
|
fn global_mut(&mut self, index: DefinedGlobalIndex) -> &mut VMGlobalDefinition {
|
||||||
unsafe {
|
unsafe {
|
||||||
let ptr = (&self.vmctx as *const VMContext as *mut u8)
|
let ptr = (&self.vmctx as *const VMContext as *mut u8)
|
||||||
.add(self.offsets.vmctx_vmglobal_definition(index) as usize);
|
.add(cast::usize(self.offsets.vmctx_vmglobal_definition(index)));
|
||||||
&mut *(ptr as *mut VMGlobalDefinition)
|
&mut *(ptr as *mut VMGlobalDefinition)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -238,7 +238,7 @@ impl InstanceContents {
|
|||||||
fn globals_ptr(&mut self) -> *mut VMGlobalDefinition {
|
fn globals_ptr(&mut self) -> *mut VMGlobalDefinition {
|
||||||
unsafe {
|
unsafe {
|
||||||
(&mut self.vmctx as *mut VMContext as *mut u8)
|
(&mut self.vmctx as *mut VMContext as *mut u8)
|
||||||
.add(self.offsets.vmctx_globals_begin() as usize)
|
.add(cast::usize(self.offsets.vmctx_globals_begin()))
|
||||||
as *mut VMGlobalDefinition
|
as *mut VMGlobalDefinition
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -298,7 +298,8 @@ impl InstanceContents {
|
|||||||
pub(crate) fn table_index(&self, table: &mut VMTableDefinition) -> DefinedTableIndex {
|
pub(crate) fn table_index(&self, table: &mut VMTableDefinition) -> DefinedTableIndex {
|
||||||
let offsets = &self.offsets;
|
let offsets = &self.offsets;
|
||||||
let begin = unsafe {
|
let begin = unsafe {
|
||||||
(&self.vmctx as *const VMContext as *mut u8).add(offsets.vmctx_tables_begin() as usize)
|
(&self.vmctx as *const VMContext as *mut u8)
|
||||||
|
.add(cast::usize(offsets.vmctx_tables_begin()))
|
||||||
} as *mut VMTableDefinition;
|
} as *mut VMTableDefinition;
|
||||||
let end: *mut VMTableDefinition = table;
|
let end: *mut VMTableDefinition = table;
|
||||||
// TODO: Use `offset_from` once it stablizes.
|
// TODO: Use `offset_from` once it stablizes.
|
||||||
@@ -314,7 +315,7 @@ impl InstanceContents {
|
|||||||
let offsets = &self.offsets;
|
let offsets = &self.offsets;
|
||||||
let begin = unsafe {
|
let begin = unsafe {
|
||||||
(&self.vmctx as *const VMContext as *mut u8)
|
(&self.vmctx as *const VMContext as *mut u8)
|
||||||
.add(offsets.vmctx_memories_begin() as usize)
|
.add(cast::usize(offsets.vmctx_memories_begin()))
|
||||||
} as *mut VMMemoryDefinition;
|
} as *mut VMMemoryDefinition;
|
||||||
let end: *mut VMMemoryDefinition = memory;
|
let end: *mut VMMemoryDefinition = memory;
|
||||||
// TODO: Use `offset_from` once it stablizes.
|
// TODO: Use `offset_from` once it stablizes.
|
||||||
@@ -682,7 +683,7 @@ fn check_table_init_bounds(
|
|||||||
} else {
|
} else {
|
||||||
contents.imported_global(base).from
|
contents.imported_global(base).from
|
||||||
};
|
};
|
||||||
start += unsafe { *(&*global).as_u32() } as usize;
|
start += cast::usize(unsafe { *(&*global).as_u32() });
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Refactor this.
|
// TODO: Refactor this.
|
||||||
@@ -721,7 +722,7 @@ fn check_memory_init_bounds(
|
|||||||
} else {
|
} else {
|
||||||
contents.imported_global(base).from
|
contents.imported_global(base).from
|
||||||
};
|
};
|
||||||
start += unsafe { *(&*global).as_u32() } as usize;
|
start += cast::usize(unsafe { *(&*global).as_u32() });
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Refactor this.
|
// TODO: Refactor this.
|
||||||
@@ -773,7 +774,7 @@ fn initialize_tables(
|
|||||||
} else {
|
} else {
|
||||||
contents.imported_global(base).from
|
contents.imported_global(base).from
|
||||||
};
|
};
|
||||||
start += unsafe { *(&*global).as_i32() } as u32 as usize;
|
start += cast::usize(unsafe { *(&*global).as_u32() });
|
||||||
}
|
}
|
||||||
|
|
||||||
let slice = if let Some(defined_table_index) = module.defined_table_index(init.table_index)
|
let slice = if let Some(defined_table_index) = module.defined_table_index(init.table_index)
|
||||||
@@ -837,7 +838,7 @@ fn initialize_memories(
|
|||||||
} else {
|
} else {
|
||||||
contents.imported_global(base).from
|
contents.imported_global(base).from
|
||||||
};
|
};
|
||||||
start += unsafe { *(&*global).as_i32() } as u32 as usize;
|
start += cast::usize(unsafe { *(&*global).as_u32() });
|
||||||
}
|
}
|
||||||
|
|
||||||
let memory = if let Some(defined_memory_index) =
|
let memory = if let Some(defined_memory_index) =
|
||||||
|
|||||||
Reference in New Issue
Block a user