diff --git a/crates/jit/src/code_memory.rs b/crates/jit/src/code_memory.rs index 49dc86a9e1..49e6d5ee2d 100644 --- a/crates/jit/src/code_memory.rs +++ b/crates/jit/src/code_memory.rs @@ -14,6 +14,11 @@ pub struct CodeMemory { published: usize, } +fn _assert() { + fn _assert_send_sync() {} + _assert_send_sync::(); +} + impl CodeMemory { /// Create a new `CodeMemory` instance. pub fn new() -> Self { diff --git a/crates/runtime/src/mmap.rs b/crates/runtime/src/mmap.rs index 570cfb1a32..8cba107404 100644 --- a/crates/runtime/src/mmap.rs +++ b/crates/runtime/src/mmap.rs @@ -1,11 +1,8 @@ //! Low-level abstraction for allocating and managing zero-filled pages //! of memory. -#[cfg(not(target_os = "windows"))] -use libc; use more_asserts::assert_le; use more_asserts::assert_lt; -use region; use std::io; use std::ptr; use std::slice; @@ -19,7 +16,11 @@ fn round_up_to_page_size(size: usize, page_size: usize) -> usize { /// and initially-zeroed memory and a length. #[derive(Debug)] pub struct Mmap { - ptr: *mut u8, + // Note that this is stored as a `usize` instead of a `*const` or `*mut` + // pointer to allow this structure to be natively `Send` and `Sync` without + // `unsafe impl`. This type is sendable across threads and shareable since + // the coordination all happens at the OS layer. + ptr: usize, len: usize, } @@ -29,8 +30,9 @@ impl Mmap { // Rust's slices require non-null pointers, even when empty. `Vec` // contains code to create a non-null dangling pointer value when // constructed empty, so we reuse that here. + let empty = Vec::::new(); Self { - ptr: Vec::new().as_mut_ptr(), + ptr: empty.as_ptr() as usize, len: 0, } } @@ -78,7 +80,7 @@ impl Mmap { } Self { - ptr: ptr as *mut u8, + ptr: ptr as usize, len: mapping_size, } } else { @@ -98,7 +100,7 @@ impl Mmap { } let mut result = Self { - ptr: ptr as *mut u8, + ptr: ptr as usize, len: mapping_size, }; @@ -142,7 +144,7 @@ impl Mmap { } Self { - ptr: ptr as *mut u8, + ptr: ptr as usize, len: mapping_size, } } else { @@ -154,7 +156,7 @@ impl Mmap { } let mut result = Self { - ptr: ptr as *mut u8, + ptr: ptr as usize, len: mapping_size, }; @@ -179,7 +181,8 @@ impl Mmap { assert_lt!(start, self.len - len); // Commit the accessible size. - unsafe { region::protect(self.ptr.add(start), len, region::Protection::ReadWrite) } + let ptr = self.ptr as *const u8; + unsafe { region::protect(ptr.add(start), len, region::Protection::ReadWrite) } .map_err(|e| e.to_string()) } @@ -198,9 +201,10 @@ impl Mmap { assert_lt!(start, self.len - len); // Commit the accessible size. + let ptr = self.ptr as *const u8; if unsafe { VirtualAlloc( - self.ptr.add(start) as *mut c_void, + ptr.add(start) as *mut c_void, len, MEM_COMMIT, PAGE_READWRITE, @@ -216,22 +220,22 @@ impl Mmap { /// Return the allocated memory as a slice of u8. pub fn as_slice(&self) -> &[u8] { - unsafe { slice::from_raw_parts(self.ptr, self.len) } + unsafe { slice::from_raw_parts(self.ptr as *const u8, self.len) } } /// Return the allocated memory as a mutable slice of u8. pub fn as_mut_slice(&mut self) -> &mut [u8] { - unsafe { slice::from_raw_parts_mut(self.ptr, self.len) } + unsafe { slice::from_raw_parts_mut(self.ptr as *mut u8, self.len) } } /// Return the allocated memory as a pointer to u8. pub fn as_ptr(&self) -> *const u8 { - self.ptr + self.ptr as *const u8 } /// Return the allocated memory as a mutable pointer to u8. pub fn as_mut_ptr(&mut self) -> *mut u8 { - self.ptr + self.ptr as *mut u8 } /// Return the length of the allocated memory. @@ -266,6 +270,11 @@ impl Drop for Mmap { } } +fn _assert() { + fn _assert_send_sync() {} + _assert_send_sync::(); +} + #[cfg(test)] mod tests { use super::*;