Update CodeMemory to be Send + Sync (#780)

* Update `CodeMemory` to be `Send + Sync`

This commit updates the `CodeMemory` type in wasmtime to be both `Send`
and `Sync` by updating the implementation of `Mmap` to not store raw
pointers. This avoids the need for an `unsafe impl` and leaves the
unsafety as it is currently.

* Run rustfmt

* Rename `offset` to `ptr`
This commit is contained in:
Alex Crichton
2020-01-09 16:22:49 -06:00
committed by GitHub
parent 9ee05447c7
commit 317f598969
2 changed files with 29 additions and 15 deletions

View File

@@ -14,6 +14,11 @@ pub struct CodeMemory {
published: usize,
}
fn _assert() {
fn _assert_send_sync<T: Send + Sync>() {}
_assert_send_sync::<CodeMemory>();
}
impl CodeMemory {
/// Create a new `CodeMemory` instance.
pub fn new() -> Self {

View File

@@ -1,11 +1,8 @@
//! Low-level abstraction for allocating and managing zero-filled pages
//! of memory.
#[cfg(not(target_os = "windows"))]
use libc;
use more_asserts::assert_le;
use more_asserts::assert_lt;
use region;
use std::io;
use std::ptr;
use std::slice;
@@ -19,7 +16,11 @@ fn round_up_to_page_size(size: usize, page_size: usize) -> usize {
/// and initially-zeroed memory and a length.
#[derive(Debug)]
pub struct Mmap {
ptr: *mut u8,
// Note that this is stored as a `usize` instead of a `*const` or `*mut`
// pointer to allow this structure to be natively `Send` and `Sync` without
// `unsafe impl`. This type is sendable across threads and shareable since
// the coordination all happens at the OS layer.
ptr: usize,
len: usize,
}
@@ -29,8 +30,9 @@ impl Mmap {
// Rust's slices require non-null pointers, even when empty. `Vec`
// contains code to create a non-null dangling pointer value when
// constructed empty, so we reuse that here.
let empty = Vec::<u8>::new();
Self {
ptr: Vec::new().as_mut_ptr(),
ptr: empty.as_ptr() as usize,
len: 0,
}
}
@@ -78,7 +80,7 @@ impl Mmap {
}
Self {
ptr: ptr as *mut u8,
ptr: ptr as usize,
len: mapping_size,
}
} else {
@@ -98,7 +100,7 @@ impl Mmap {
}
let mut result = Self {
ptr: ptr as *mut u8,
ptr: ptr as usize,
len: mapping_size,
};
@@ -142,7 +144,7 @@ impl Mmap {
}
Self {
ptr: ptr as *mut u8,
ptr: ptr as usize,
len: mapping_size,
}
} else {
@@ -154,7 +156,7 @@ impl Mmap {
}
let mut result = Self {
ptr: ptr as *mut u8,
ptr: ptr as usize,
len: mapping_size,
};
@@ -179,7 +181,8 @@ impl Mmap {
assert_lt!(start, self.len - len);
// Commit the accessible size.
unsafe { region::protect(self.ptr.add(start), len, region::Protection::ReadWrite) }
let ptr = self.ptr as *const u8;
unsafe { region::protect(ptr.add(start), len, region::Protection::ReadWrite) }
.map_err(|e| e.to_string())
}
@@ -198,9 +201,10 @@ impl Mmap {
assert_lt!(start, self.len - len);
// Commit the accessible size.
let ptr = self.ptr as *const u8;
if unsafe {
VirtualAlloc(
self.ptr.add(start) as *mut c_void,
ptr.add(start) as *mut c_void,
len,
MEM_COMMIT,
PAGE_READWRITE,
@@ -216,22 +220,22 @@ impl Mmap {
/// Return the allocated memory as a slice of u8.
pub fn as_slice(&self) -> &[u8] {
unsafe { slice::from_raw_parts(self.ptr, self.len) }
unsafe { slice::from_raw_parts(self.ptr as *const u8, self.len) }
}
/// Return the allocated memory as a mutable slice of u8.
pub fn as_mut_slice(&mut self) -> &mut [u8] {
unsafe { slice::from_raw_parts_mut(self.ptr, self.len) }
unsafe { slice::from_raw_parts_mut(self.ptr as *mut u8, self.len) }
}
/// Return the allocated memory as a pointer to u8.
pub fn as_ptr(&self) -> *const u8 {
self.ptr
self.ptr as *const u8
}
/// Return the allocated memory as a mutable pointer to u8.
pub fn as_mut_ptr(&mut self) -> *mut u8 {
self.ptr
self.ptr as *mut u8
}
/// Return the length of the allocated memory.
@@ -266,6 +270,11 @@ impl Drop for Mmap {
}
}
fn _assert() {
fn _assert_send_sync<T: Send + Sync>() {}
_assert_send_sync::<Mmap>();
}
#[cfg(test)]
mod tests {
use super::*;