Initial reorg.

This is largely the same as #305, but updated for the current tree.
This commit is contained in:
Dan Gohman
2019-11-07 17:11:06 -08:00
parent 2c69546a24
commit 22641de629
351 changed files with 52 additions and 52 deletions

39
crates/jit/Cargo.toml Normal file
View File

@@ -0,0 +1,39 @@
[package]
name = "wasmtime-jit"
version = "0.2.0"
authors = ["The Wasmtime Project Developers"]
description = "JIT-style execution for WebAsssembly code in Cranelift"
categories = ["wasm"]
keywords = ["webassembly", "wasm"]
repository = "https://github.com/CraneStation/wasmtime"
license = "Apache-2.0 WITH LLVM-exception"
readme = "README.md"
edition = "2018"
[dependencies]
cranelift-codegen = { version = "0.49", features = ["enable-serde"] }
cranelift-entity = { version = "0.49", features = ["enable-serde"] }
cranelift-wasm = { version = "0.49", features = ["enable-serde"] }
cranelift-frontend = { version = "0.49" }
wasmtime-environ = { path = "../environ", default-features = false }
wasmtime-runtime = { path = "../runtime", default-features = false }
wasmtime-debug = { path = "../debug", default-features = false }
region = "2.0.0"
failure = { version = "0.1.3", default-features = false }
thiserror = "1.0.4"
target-lexicon = { version = "0.9.0", default-features = false }
hashbrown = { version = "0.6.0", optional = true }
wasmparser = { version = "0.39.2", default-features = false }
[target.'cfg(target_os = "windows")'.dependencies]
winapi = { version = "0.3.7", features = ["winnt", "impl-default"] }
[features]
default = ["std"]
std = ["cranelift-codegen/std", "cranelift-wasm/std", "wasmtime-environ/std", "wasmtime-debug/std", "wasmtime-runtime/std", "wasmparser/std"]
core = ["hashbrown/nightly", "cranelift-codegen/core", "cranelift-wasm/core", "wasmtime-environ/core", "wasmtime-debug/core", "wasmparser/core"]
lightbeam = ["wasmtime-environ/lightbeam"]
[badges]
maintenance = { status = "experimental" }
travis-ci = { repository = "CraneStation/wasmtime" }

220
crates/jit/LICENSE Normal file
View File

@@ -0,0 +1,220 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
--- LLVM Exceptions to the Apache 2.0 License ----
As an exception, if, as a result of your compiling your source code, portions
of this Software are embedded into an Object form of such source code, you
may redistribute such embedded portions in such Object form without complying
with the conditions of Sections 4(a), 4(b) and 4(d) of the License.
In addition, if you combine or link compiled forms of this Software with
software that is licensed under the GPLv2 ("Combined Software") and if a
court of competent jurisdiction determines that the patent provision (Section
3), the indemnity provision (Section 9) or other Section of the License
conflicts with the conditions of the GPLv2, you may retroactively and
prospectively choose to deem waived or otherwise exclude such Section(s) of
the License, but only in their entirety and only with respect to the Combined
Software.

6
crates/jit/README.md Normal file
View File

@@ -0,0 +1,6 @@
This is the `wasmtime-jit` crate, which contains JIT-based execution
for wasm, using the wasm ABI defined by [`wasmtime-environ`] and the
runtime support provided by [`wasmtime-runtime`].
[`wasmtime-environ`]: https://crates.io/crates/wasmtime-environ
[`wasmtime-runtime`]: https://crates.io/crates/wasmtime-runtime

298
crates/jit/src/action.rs Normal file
View File

@@ -0,0 +1,298 @@
//! Support for performing actions with a wasm module from the outside.
use crate::compiler::Compiler;
use crate::instantiate::SetupError;
use alloc::string::String;
use alloc::vec::Vec;
use core::cmp::max;
use core::{fmt, mem, ptr, slice};
use cranelift_codegen::ir;
use thiserror::Error;
use wasmtime_runtime::{wasmtime_call_trampoline, Export, InstanceHandle, VMInvokeArgument};
/// A runtime value.
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum RuntimeValue {
/// A runtime value with type i32.
I32(i32),
/// A runtime value with type i64.
I64(i64),
/// A runtime value with type f32.
F32(u32),
/// A runtime value with type f64.
F64(u64),
/// A runtime value with type v128
V128([u8; 16]),
}
impl RuntimeValue {
/// Return the type of this `RuntimeValue`.
pub fn value_type(self) -> ir::Type {
match self {
Self::I32(_) => ir::types::I32,
Self::I64(_) => ir::types::I64,
Self::F32(_) => ir::types::F32,
Self::F64(_) => ir::types::F64,
Self::V128(_) => ir::types::I8X16,
}
}
/// Assuming this `RuntimeValue` holds an `i32`, return that value.
pub fn unwrap_i32(self) -> i32 {
match self {
Self::I32(x) => x,
_ => panic!("unwrapping value of type {} as i32", self.value_type()),
}
}
/// Assuming this `RuntimeValue` holds an `i64`, return that value.
pub fn unwrap_i64(self) -> i64 {
match self {
Self::I64(x) => x,
_ => panic!("unwrapping value of type {} as i64", self.value_type()),
}
}
/// Assuming this `RuntimeValue` holds an `f32`, return that value.
pub fn unwrap_f32(self) -> f32 {
f32::from_bits(self.unwrap_f32_bits())
}
/// Assuming this `RuntimeValue` holds an `f32`, return the bits of that value as a `u32`.
pub fn unwrap_f32_bits(self) -> u32 {
match self {
Self::F32(x) => x,
_ => panic!("unwrapping value of type {} as f32", self.value_type()),
}
}
/// Assuming this `RuntimeValue` holds an `f64`, return that value.
pub fn unwrap_f64(self) -> f64 {
f64::from_bits(self.unwrap_f64_bits())
}
/// Assuming this `RuntimeValue` holds an `f64`, return the bits of that value as a `u64`.
pub fn unwrap_f64_bits(self) -> u64 {
match self {
Self::F64(x) => x,
_ => panic!("unwrapping value of type {} as f64", self.value_type()),
}
}
}
impl fmt::Display for RuntimeValue {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::I32(x) => write!(f, "{}: i32", x),
Self::I64(x) => write!(f, "{}: i64", x),
Self::F32(x) => write!(f, "{}: f32", x),
Self::F64(x) => write!(f, "{}: f64", x),
Self::V128(x) => write!(f, "{:?}: v128", x.to_vec()),
}
}
}
/// The result of invoking a wasm function or reading a wasm global.
#[derive(Debug)]
pub enum ActionOutcome {
/// The action returned normally. Its return values are provided.
Returned {
/// The return values.
values: Vec<RuntimeValue>,
},
/// A trap occurred while the action was executing.
Trapped {
/// The trap message.
message: String,
},
}
/// An error detected while invoking a wasm function or reading a wasm global.
/// Note that at this level, traps are not reported errors, but are rather
/// returned through `ActionOutcome`.
#[derive(Error, Debug)]
pub enum ActionError {
/// An internal implementation error occurred.
#[error("{0}")]
Setup(#[from] SetupError),
/// No field with the specified name was present.
#[error("Unknown field: {0}")]
Field(String),
/// The field was present but was the wrong kind (eg. function, table, global, or memory).
#[error("Kind error: {0}")]
Kind(String),
/// The field was present but was the wrong type (eg. i32, i64, f32, or f64).
#[error("Type error: {0}")]
Type(String),
}
/// Invoke a function through an `InstanceHandle` identified by an export name.
pub fn invoke(
compiler: &mut Compiler,
instance: &mut InstanceHandle,
function_name: &str,
args: &[RuntimeValue],
) -> Result<ActionOutcome, ActionError> {
let (address, signature, callee_vmctx) = match instance.lookup(function_name) {
Some(Export::Function {
address,
signature,
vmctx,
}) => (address, signature, vmctx),
Some(_) => {
return Err(ActionError::Kind(format!(
"exported item \"{}\" is not a function",
function_name
)));
}
None => {
return Err(ActionError::Field(format!(
"no export named \"{}\"",
function_name
)));
}
};
for (index, value) in args.iter().enumerate() {
// Add one to account for the leading vmctx argument.
assert_eq!(value.value_type(), signature.params[index + 1].value_type);
}
// TODO: Support values larger than v128. And pack the values into memory
// instead of just using fixed-sized slots.
// Subtract one becase we don't pass the vmctx argument in `values_vec`.
let value_size = mem::size_of::<VMInvokeArgument>();
let mut values_vec: Vec<VMInvokeArgument> =
vec![VMInvokeArgument::new(); max(signature.params.len() - 1, signature.returns.len())];
// Store the argument values into `values_vec`.
for (index, arg) in args.iter().enumerate() {
unsafe {
let ptr = values_vec.as_mut_ptr().add(index);
match arg {
RuntimeValue::I32(x) => ptr::write(ptr as *mut i32, *x),
RuntimeValue::I64(x) => ptr::write(ptr as *mut i64, *x),
RuntimeValue::F32(x) => ptr::write(ptr as *mut u32, *x),
RuntimeValue::F64(x) => ptr::write(ptr as *mut u64, *x),
RuntimeValue::V128(x) => ptr::write(ptr as *mut [u8; 16], *x),
}
}
}
// Get the trampoline to call for this function.
let exec_code_buf = compiler
.get_trampoline(address, &signature, value_size)
.map_err(ActionError::Setup)?;
// Make all JIT code produced thus far executable.
compiler.publish_compiled_code();
// Call the trampoline.
if let Err(message) = unsafe {
wasmtime_call_trampoline(
callee_vmctx,
exec_code_buf,
values_vec.as_mut_ptr() as *mut u8,
)
} {
return Ok(ActionOutcome::Trapped { message });
}
// Load the return values out of `values_vec`.
let values = signature
.returns
.iter()
.enumerate()
.map(|(index, abi_param)| unsafe {
let ptr = values_vec.as_ptr().add(index);
match abi_param.value_type {
ir::types::I32 => RuntimeValue::I32(ptr::read(ptr as *const i32)),
ir::types::I64 => RuntimeValue::I64(ptr::read(ptr as *const i64)),
ir::types::F32 => RuntimeValue::F32(ptr::read(ptr as *const u32)),
ir::types::F64 => RuntimeValue::F64(ptr::read(ptr as *const u64)),
ir::types::I8X16 => RuntimeValue::V128(ptr::read(ptr as *const [u8; 16])),
other => panic!("unsupported value type {:?}", other),
}
})
.collect();
Ok(ActionOutcome::Returned { values })
}
/// Returns a slice of the contents of allocated linear memory.
pub fn inspect_memory<'instance>(
instance: &'instance InstanceHandle,
memory_name: &str,
start: usize,
len: usize,
) -> Result<&'instance [u8], ActionError> {
let definition = match unsafe { instance.lookup_immutable(memory_name) } {
Some(Export::Memory {
definition,
memory: _memory,
vmctx: _vmctx,
}) => definition,
Some(_) => {
return Err(ActionError::Kind(format!(
"exported item \"{}\" is not a linear memory",
memory_name
)));
}
None => {
return Err(ActionError::Field(format!(
"no export named \"{}\"",
memory_name
)));
}
};
Ok(unsafe {
let memory_def = &*definition;
&slice::from_raw_parts(memory_def.base, memory_def.current_length)[start..start + len]
})
}
/// Read a global in the given instance identified by an export name.
pub fn get(instance: &InstanceHandle, global_name: &str) -> Result<RuntimeValue, ActionError> {
let (definition, global) = match unsafe { instance.lookup_immutable(global_name) } {
Some(Export::Global {
definition,
vmctx: _,
global,
}) => (definition, global),
Some(_) => {
return Err(ActionError::Kind(format!(
"exported item \"{}\" is not a global variable",
global_name
)));
}
None => {
return Err(ActionError::Field(format!(
"no export named \"{}\"",
global_name
)));
}
};
unsafe {
let global_def = &*definition;
Ok(match global.ty {
ir::types::I32 => RuntimeValue::I32(*global_def.as_i32()),
ir::types::I64 => RuntimeValue::I64(*global_def.as_i64()),
ir::types::F32 => RuntimeValue::F32(*global_def.as_f32_bits()),
ir::types::F64 => RuntimeValue::F64(*global_def.as_f64_bits()),
other => {
return Err(ActionError::Type(format!(
"global with type {} not supported",
other
)));
}
})
}
}

View File

@@ -0,0 +1,195 @@
//! Memory management for executable code.
use crate::function_table::FunctionTable;
use alloc::boxed::Box;
use alloc::string::String;
use alloc::vec::Vec;
use core::{cmp, mem};
use region;
use wasmtime_environ::{Compilation, CompiledFunction};
use wasmtime_runtime::{Mmap, VMFunctionBody};
/// Memory manager for executable code.
pub struct CodeMemory {
current: (Mmap, FunctionTable),
mmaps: Vec<(Mmap, FunctionTable)>,
position: usize,
published: usize,
}
impl CodeMemory {
/// Create a new `CodeMemory` instance.
pub fn new() -> Self {
Self {
current: (Mmap::new(), FunctionTable::new()),
mmaps: Vec::new(),
position: 0,
published: 0,
}
}
/// Allocate a continuous memory block for a single compiled function.
/// TODO: Reorganize the code that calls this to emit code directly into the
/// mmap region rather than into a Vec that we need to copy in.
pub fn allocate_for_function(
&mut self,
func: &CompiledFunction,
) -> Result<&mut [VMFunctionBody], String> {
let size = Self::function_allocation_size(func);
let start = self.position as u32;
let (buf, table) = self.allocate(size)?;
let (_, _, _, vmfunc) = Self::copy_function(func, start, buf, table);
Ok(vmfunc)
}
/// Allocate a continuous memory block for a compilation.
///
/// Allocates memory for both the function bodies as well as function unwind data.
pub fn allocate_for_compilation(
&mut self,
compilation: &Compilation,
) -> Result<Box<[&mut [VMFunctionBody]]>, String> {
let total_len = compilation
.into_iter()
.fold(0, |acc, func| acc + Self::function_allocation_size(func));
let mut start = self.position as u32;
let (mut buf, mut table) = self.allocate(total_len)?;
let mut result = Vec::with_capacity(compilation.len());
for func in compilation.into_iter() {
let (next_start, next_buf, next_table, vmfunc) =
Self::copy_function(func, start, buf, table);
result.push(vmfunc);
start = next_start;
buf = next_buf;
table = next_table;
}
Ok(result.into_boxed_slice())
}
/// Make all allocated memory executable.
pub fn publish(&mut self) {
self.push_current(0)
.expect("failed to push current memory map");
for (m, t) in &mut self.mmaps[self.published..] {
if m.len() != 0 {
unsafe {
region::protect(m.as_mut_ptr(), m.len(), region::Protection::ReadExecute)
}
.expect("unable to make memory readonly and executable");
}
t.publish(m.as_ptr() as u64)
.expect("failed to publish function table");
}
self.published = self.mmaps.len();
}
/// Allocate `size` bytes of memory which can be made executable later by
/// calling `publish()`. Note that we allocate the memory as writeable so
/// that it can be written to and patched, though we make it readonly before
/// actually executing from it.
///
/// TODO: Add an alignment flag.
fn allocate(&mut self, size: usize) -> Result<(&mut [u8], &mut FunctionTable), String> {
if self.current.0.len() - self.position < size {
self.push_current(cmp::max(0x10000, size))?;
}
let old_position = self.position;
self.position += size;
Ok((
&mut self.current.0.as_mut_slice()[old_position..self.position],
&mut self.current.1,
))
}
/// Calculates the allocation size of the given compiled function.
fn function_allocation_size(func: &CompiledFunction) -> usize {
if func.unwind_info.is_empty() {
func.body.len()
} else {
// Account for necessary unwind information alignment padding (32-bit)
((func.body.len() + 3) & !3) + func.unwind_info.len()
}
}
/// Copies the data of the compiled function to the given buffer.
///
/// This will also add the function to the current function table.
fn copy_function<'a>(
func: &CompiledFunction,
func_start: u32,
buf: &'a mut [u8],
table: &'a mut FunctionTable,
) -> (
u32,
&'a mut [u8],
&'a mut FunctionTable,
&'a mut [VMFunctionBody],
) {
let func_end = func_start + (func.body.len() as u32);
let (body, remainder) = buf.split_at_mut(func.body.len());
body.copy_from_slice(&func.body);
let vmfunc = Self::view_as_mut_vmfunc_slice(body);
if func.unwind_info.is_empty() {
return (func_end, remainder, table, vmfunc);
}
// Keep unwind information 32-bit aligned (round up to the nearest 4 byte boundary)
let padding = ((func.body.len() + 3) & !3) - func.body.len();
let (unwind, remainder) = remainder.split_at_mut(padding + func.unwind_info.len());
unwind[padding..].copy_from_slice(&func.unwind_info);
let unwind_start = func_end + (padding as u32);
let unwind_end = unwind_start + (func.unwind_info.len() as u32);
table.add_function(func_start, func_end, unwind_start);
(unwind_end, remainder, table, vmfunc)
}
/// Convert mut a slice from u8 to VMFunctionBody.
fn view_as_mut_vmfunc_slice(slice: &mut [u8]) -> &mut [VMFunctionBody] {
let byte_ptr: *mut [u8] = slice;
let body_ptr = byte_ptr as *mut [VMFunctionBody];
unsafe { &mut *body_ptr }
}
/// Pushes the current Mmap (and function table) and allocates a new Mmap of the given size.
fn push_current(&mut self, new_size: usize) -> Result<(), String> {
let previous = mem::replace(
&mut self.current,
(
if new_size == 0 {
Mmap::new()
} else {
Mmap::with_at_least(cmp::max(0x10000, new_size))?
},
FunctionTable::new(),
),
);
if previous.0.len() > 0 {
self.mmaps.push(previous);
} else {
assert!(previous.1.len() == 0);
}
self.position = 0;
Ok(())
}
}

424
crates/jit/src/compiler.rs Normal file
View File

@@ -0,0 +1,424 @@
//! JIT compilation.
use super::HashMap;
use crate::code_memory::CodeMemory;
use crate::instantiate::SetupError;
use crate::target_tunables::target_tunables;
use alloc::boxed::Box;
use alloc::string::String;
use alloc::vec::Vec;
use core::convert::TryFrom;
use cranelift_codegen::ir::InstBuilder;
use cranelift_codegen::isa::{TargetFrontendConfig, TargetIsa};
use cranelift_codegen::Context;
use cranelift_codegen::{binemit, ir};
use cranelift_entity::{EntityRef, PrimaryMap};
use cranelift_frontend::{FunctionBuilder, FunctionBuilderContext};
use cranelift_wasm::{DefinedFuncIndex, DefinedMemoryIndex, ModuleTranslationState};
use wasmtime_debug::{emit_debugsections_image, DebugInfoData};
use wasmtime_environ::{
Compilation, CompileError, CompiledFunction, Compiler as _C, FunctionBodyData, Module,
ModuleVmctxInfo, Relocations, Traps, Tunables, VMOffsets,
};
use wasmtime_runtime::{
get_mut_trap_registry, InstantiationError, SignatureRegistry, TrapRegistrationGuard,
VMFunctionBody,
};
/// Select which kind of compilation to use.
#[derive(Copy, Clone, Debug)]
pub enum CompilationStrategy {
/// Let Wasmtime pick the strategy.
Auto,
/// Compile all functions with Cranelift.
Cranelift,
/// Compile all functions with Lightbeam.
#[cfg(feature = "lightbeam")]
Lightbeam,
}
/// A WebAssembly code JIT compiler.
///
/// A `Compiler` instance owns the executable memory that it allocates.
///
/// TODO: Evolve this to support streaming rather than requiring a `&[u8]`
/// containing a whole wasm module at once.
///
/// TODO: Consider using cranelift-module.
pub struct Compiler {
isa: Box<dyn TargetIsa>,
code_memory: CodeMemory,
trap_registration_guards: Vec<TrapRegistrationGuard>,
trampoline_park: HashMap<*const VMFunctionBody, *const VMFunctionBody>,
signatures: SignatureRegistry,
strategy: CompilationStrategy,
/// The `FunctionBuilderContext`, shared between trampline function compilations.
fn_builder_ctx: FunctionBuilderContext,
}
impl Compiler {
/// Construct a new `Compiler`.
pub fn new(isa: Box<dyn TargetIsa>, strategy: CompilationStrategy) -> Self {
Self {
isa,
code_memory: CodeMemory::new(),
trap_registration_guards: Vec::new(),
trampoline_park: HashMap::new(),
signatures: SignatureRegistry::new(),
fn_builder_ctx: FunctionBuilderContext::new(),
strategy,
}
}
}
impl Drop for Compiler {
fn drop(&mut self) {
// We must deregister traps before freeing the code memory.
// Otherwise, we have a race:
// - Compiler #1 dropped code memory, but hasn't deregistered the trap yet
// - Compiler #2 allocated code memory and tries to register a trap,
// but the trap at certain address happens to be already registered,
// since Compiler #1 hasn't deregistered it yet => assertion in trap registry fails.
// Having a custom drop implementation we are independent from the field order
// in the struct what reduces potential human error.
self.trap_registration_guards.clear();
}
}
impl Compiler {
/// Return the target's frontend configuration settings.
pub fn frontend_config(&self) -> TargetFrontendConfig {
self.isa.frontend_config()
}
/// Return the tunables in use by this engine.
pub fn tunables(&self) -> Tunables {
target_tunables(self.isa.triple())
}
/// Compile the given function bodies.
pub(crate) fn compile<'data>(
&mut self,
module: &Module,
module_translation: &ModuleTranslationState,
function_body_inputs: PrimaryMap<DefinedFuncIndex, FunctionBodyData<'data>>,
debug_data: Option<DebugInfoData>,
) -> Result<
(
PrimaryMap<DefinedFuncIndex, *mut [VMFunctionBody]>,
PrimaryMap<DefinedFuncIndex, ir::JumpTableOffsets>,
Relocations,
Option<Vec<u8>>,
),
SetupError,
> {
let (compilation, relocations, address_transform, value_ranges, stack_slots, traps) =
match self.strategy {
// For now, interpret `Auto` as `Cranelift` since that's the most stable
// implementation.
CompilationStrategy::Auto | CompilationStrategy::Cranelift => {
wasmtime_environ::cranelift::Cranelift::compile_module(
module,
module_translation,
function_body_inputs,
&*self.isa,
debug_data.is_some(),
)
}
#[cfg(feature = "lightbeam")]
CompilationStrategy::Lightbeam => {
wasmtime_environ::lightbeam::Lightbeam::compile_module(
module,
module_translation,
function_body_inputs,
&*self.isa,
debug_data.is_some(),
)
}
}
.map_err(SetupError::Compile)?;
let allocated_functions =
allocate_functions(&mut self.code_memory, &compilation).map_err(|message| {
SetupError::Instantiate(InstantiationError::Resource(format!(
"failed to allocate memory for functions: {}",
message
)))
})?;
register_traps(
&allocated_functions,
&traps,
&mut self.trap_registration_guards,
);
let dbg = if let Some(debug_data) = debug_data {
let target_config = self.isa.frontend_config();
let triple = self.isa.triple().clone();
let mut funcs = Vec::new();
for (i, allocated) in allocated_functions.into_iter() {
let ptr = (*allocated) as *const u8;
let body_len = compilation.get(i).body.len();
funcs.push((ptr, body_len));
}
let module_vmctx_info = {
let ofs = VMOffsets::new(target_config.pointer_bytes(), &module);
let memory_offset =
ofs.vmctx_vmmemory_definition_base(DefinedMemoryIndex::new(0)) as i64;
ModuleVmctxInfo {
memory_offset,
stack_slots,
}
};
let bytes = emit_debugsections_image(
triple,
&target_config,
&debug_data,
&module_vmctx_info,
&address_transform,
&value_ranges,
&funcs,
)
.map_err(|e| SetupError::DebugInfo(e))?;
Some(bytes)
} else {
None
};
let jt_offsets = compilation.get_jt_offsets();
Ok((allocated_functions, jt_offsets, relocations, dbg))
}
/// Create a trampoline for invoking a function.
pub(crate) fn get_trampoline(
&mut self,
callee_address: *const VMFunctionBody,
signature: &ir::Signature,
value_size: usize,
) -> Result<*const VMFunctionBody, SetupError> {
use super::hash_map::Entry::{Occupied, Vacant};
Ok(match self.trampoline_park.entry(callee_address) {
Occupied(entry) => *entry.get(),
Vacant(entry) => {
let body = make_trampoline(
&*self.isa,
&mut self.code_memory,
&mut self.fn_builder_ctx,
callee_address,
signature,
value_size,
)?;
entry.insert(body);
body
}
})
}
/// Create and publish a trampoline for invoking a function.
pub fn get_published_trampoline(
&mut self,
callee_address: *const VMFunctionBody,
signature: &ir::Signature,
value_size: usize,
) -> Result<*const VMFunctionBody, SetupError> {
let result = self.get_trampoline(callee_address, signature, value_size)?;
self.publish_compiled_code();
Ok(result)
}
/// Make memory containing compiled code executable.
pub(crate) fn publish_compiled_code(&mut self) {
self.code_memory.publish();
}
/// Shared signature registry.
pub fn signatures(&mut self) -> &mut SignatureRegistry {
&mut self.signatures
}
}
/// Create a trampoline for invoking a function.
fn make_trampoline(
isa: &dyn TargetIsa,
code_memory: &mut CodeMemory,
fn_builder_ctx: &mut FunctionBuilderContext,
callee_address: *const VMFunctionBody,
signature: &ir::Signature,
value_size: usize,
) -> Result<*const VMFunctionBody, SetupError> {
let pointer_type = isa.pointer_type();
let mut wrapper_sig = ir::Signature::new(isa.frontend_config().default_call_conv);
// Add the `vmctx` parameter.
wrapper_sig.params.push(ir::AbiParam::special(
pointer_type,
ir::ArgumentPurpose::VMContext,
));
// Add the `values_vec` parameter.
wrapper_sig.params.push(ir::AbiParam::new(pointer_type));
let mut context = Context::new();
context.func = ir::Function::with_name_signature(ir::ExternalName::user(0, 0), wrapper_sig);
{
let mut builder = FunctionBuilder::new(&mut context.func, fn_builder_ctx);
let block0 = builder.create_ebb();
builder.append_ebb_params_for_function_params(block0);
builder.switch_to_block(block0);
builder.seal_block(block0);
let (vmctx_ptr_val, values_vec_ptr_val) = {
let params = builder.func.dfg.ebb_params(block0);
(params[0], params[1])
};
// Load the argument values out of `values_vec`.
let mflags = ir::MemFlags::trusted();
let callee_args = signature
.params
.iter()
.enumerate()
.map(|(i, r)| {
match r.purpose {
// i - 1 because vmctx isn't passed through `values_vec`.
ir::ArgumentPurpose::Normal => builder.ins().load(
r.value_type,
mflags,
values_vec_ptr_val,
((i - 1) * value_size) as i32,
),
ir::ArgumentPurpose::VMContext => vmctx_ptr_val,
other => panic!("unsupported argument purpose {}", other),
}
})
.collect::<Vec<_>>();
let new_sig = builder.import_signature(signature.clone());
// TODO: It's possible to make this a direct call. We just need Cranelift
// to support functions declared with an immediate integer address.
// ExternalName::Absolute(u64). Let's do it.
let callee_value = builder.ins().iconst(pointer_type, callee_address as i64);
let call = builder
.ins()
.call_indirect(new_sig, callee_value, &callee_args);
let results = builder.func.dfg.inst_results(call).to_vec();
// Store the return values into `values_vec`.
let mflags = ir::MemFlags::trusted();
for (i, r) in results.iter().enumerate() {
builder
.ins()
.store(mflags, *r, values_vec_ptr_val, (i * value_size) as i32);
}
builder.ins().return_(&[]);
builder.finalize()
}
let mut code_buf = Vec::new();
let mut unwind_info = Vec::new();
let mut reloc_sink = RelocSink {};
let mut trap_sink = binemit::NullTrapSink {};
let mut stackmap_sink = binemit::NullStackmapSink {};
context
.compile_and_emit(
isa,
&mut code_buf,
&mut reloc_sink,
&mut trap_sink,
&mut stackmap_sink,
)
.map_err(|error| SetupError::Compile(CompileError::Codegen(error)))?;
context.emit_unwind_info(isa, &mut unwind_info);
Ok(code_memory
.allocate_for_function(&CompiledFunction {
body: code_buf,
jt_offsets: context.func.jt_offsets,
unwind_info,
})
.map_err(|message| SetupError::Instantiate(InstantiationError::Resource(message)))?
.as_ptr())
}
fn allocate_functions(
code_memory: &mut CodeMemory,
compilation: &Compilation,
) -> Result<PrimaryMap<DefinedFuncIndex, *mut [VMFunctionBody]>, String> {
let fat_ptrs = code_memory.allocate_for_compilation(compilation)?;
// Second, create a PrimaryMap from result vector of pointers.
let mut result = PrimaryMap::with_capacity(compilation.len());
for i in 0..fat_ptrs.len() {
let fat_ptr: *mut [VMFunctionBody] = fat_ptrs[i];
result.push(fat_ptr);
}
Ok(result)
}
fn register_traps(
allocated_functions: &PrimaryMap<DefinedFuncIndex, *mut [VMFunctionBody]>,
traps: &Traps,
trap_registration_guards: &mut Vec<TrapRegistrationGuard>,
) {
let mut trap_registry = get_mut_trap_registry();
for (func_addr, func_traps) in allocated_functions.values().zip(traps.values()) {
for trap_desc in func_traps.iter() {
let func_addr = *func_addr as *const u8 as usize;
let offset = usize::try_from(trap_desc.code_offset).unwrap();
let trap_addr = func_addr + offset;
let guard =
trap_registry.register_trap(trap_addr, trap_desc.source_loc, trap_desc.trap_code);
trap_registration_guards.push(guard);
}
}
}
/// We don't expect trampoline compilation to produce any relocations, so
/// this `RelocSink` just asserts that it doesn't recieve any.
struct RelocSink {}
impl binemit::RelocSink for RelocSink {
fn reloc_ebb(
&mut self,
_offset: binemit::CodeOffset,
_reloc: binemit::Reloc,
_ebb_offset: binemit::CodeOffset,
) {
panic!("trampoline compilation should not produce ebb relocs");
}
fn reloc_external(
&mut self,
_offset: binemit::CodeOffset,
_reloc: binemit::Reloc,
_name: &ir::ExternalName,
_addend: binemit::Addend,
) {
panic!("trampoline compilation should not produce external symbol relocs");
}
fn reloc_constant(
&mut self,
_code_offset: binemit::CodeOffset,
_reloc: binemit::Reloc,
_constant_offset: ir::ConstantOffset,
) {
panic!("trampoline compilation should not produce constant relocs");
}
fn reloc_jt(
&mut self,
_offset: binemit::CodeOffset,
_reloc: binemit::Reloc,
_jt: ir::JumpTable,
) {
panic!("trampoline compilation should not produce jump table relocs");
}
}

251
crates/jit/src/context.rs Normal file
View File

@@ -0,0 +1,251 @@
use crate::action::{get, inspect_memory, invoke};
use crate::HashMap;
use crate::{
instantiate, ActionError, ActionOutcome, CompilationStrategy, CompiledModule, Compiler,
InstanceHandle, Namespace, RuntimeValue, SetupError,
};
use alloc::boxed::Box;
use alloc::rc::Rc;
use alloc::string::{String, ToString};
use core::cell::RefCell;
use cranelift_codegen::isa::TargetIsa;
use thiserror::Error;
use wasmparser::{validate, OperatorValidatorConfig, ValidatingParserConfig};
/// Indicates an unknown instance was specified.
#[derive(Error, Debug)]
#[error("no instance {instance_name} present")]
pub struct UnknownInstance {
instance_name: String,
}
/// Error message used by `WastContext`.
#[derive(Error, Debug)]
pub enum ContextError {
/// An unknown instance name was used.
#[error("{0}")]
Instance(#[from] UnknownInstance),
/// An error occured while performing an action.
#[error("{0}")]
Action(#[from] ActionError),
}
/// The collection of features configurable during compilation
#[derive(Clone, Default)]
pub struct Features {
/// marks whether the proposed thread feature is enabled or disabled
pub threads: bool,
/// marks whether the proposed reference type feature is enabled or disabled
pub reference_types: bool,
/// marks whether the proposed SIMD feature is enabled or disabled
pub simd: bool,
/// marks whether the proposed bulk memory feature is enabled or disabled
pub bulk_memory: bool,
/// marks whether the proposed multi-value feature is enabled or disabled
pub multi_value: bool,
}
impl Into<ValidatingParserConfig> for Features {
fn into(self) -> ValidatingParserConfig {
ValidatingParserConfig {
operator_config: OperatorValidatorConfig {
enable_threads: self.threads,
enable_reference_types: self.reference_types,
enable_bulk_memory: self.bulk_memory,
enable_simd: self.simd,
enable_multi_value: self.multi_value,
},
}
}
}
/// A convenient context for compiling and executing WebAssembly instances.
pub struct Context {
namespace: Namespace,
compiler: Box<Compiler>,
global_exports: Rc<RefCell<HashMap<String, Option<wasmtime_runtime::Export>>>>,
debug_info: bool,
features: Features,
}
impl Context {
/// Construct a new instance of `Context`.
pub fn new(compiler: Box<Compiler>) -> Self {
Self {
namespace: Namespace::new(),
compiler,
global_exports: Rc::new(RefCell::new(HashMap::new())),
debug_info: false,
features: Default::default(),
}
}
/// Get debug_info settings.
pub fn debug_info(&self) -> bool {
self.debug_info
}
/// Set debug_info settings.
pub fn set_debug_info(&mut self, value: bool) {
self.debug_info = value;
}
/// Construct a new instance of `Context` with the given target.
pub fn with_isa(isa: Box<dyn TargetIsa>, strategy: CompilationStrategy) -> Self {
Self::new(Box::new(Compiler::new(isa, strategy)))
}
/// Retrieve the context features
pub fn features(&self) -> &Features {
&self.features
}
/// Construct a new instance with the given features from the current `Context`
pub fn with_features(self, features: Features) -> Self {
Self { features, ..self }
}
fn validate(&mut self, data: &[u8]) -> Result<(), String> {
// TODO: Fix Cranelift to be able to perform validation itself, rather
// than calling into wasmparser ourselves here.
validate(data, Some(self.features.clone().into()))
.map_err(|e| format!("module did not validate: {}", e.to_string()))
}
fn instantiate(&mut self, data: &[u8]) -> Result<InstanceHandle, SetupError> {
self.validate(&data).map_err(SetupError::Validate)?;
let debug_info = self.debug_info();
instantiate(
&mut *self.compiler,
&data,
&mut self.namespace,
Rc::clone(&self.global_exports),
debug_info,
)
}
/// Return the instance associated with the given name.
pub fn get_instance(
&mut self,
instance_name: &str,
) -> Result<&mut InstanceHandle, UnknownInstance> {
self.namespace
.get_instance(instance_name)
.ok_or_else(|| UnknownInstance {
instance_name: instance_name.to_string(),
})
}
/// Instantiate a module instance and register the instance.
pub fn instantiate_module(
&mut self,
instance_name: Option<String>,
data: &[u8],
) -> Result<InstanceHandle, ActionError> {
let instance = self.instantiate(data).map_err(ActionError::Setup)?;
self.optionally_name_instance(instance_name, instance.clone());
Ok(instance)
}
/// Compile a module.
pub fn compile_module(&mut self, data: &[u8]) -> Result<CompiledModule, SetupError> {
self.validate(&data).map_err(SetupError::Validate)?;
let debug_info = self.debug_info();
CompiledModule::new(
&mut *self.compiler,
data,
&mut self.namespace,
Rc::clone(&self.global_exports),
debug_info,
)
}
/// If `name` isn't None, register it for the given instance.
pub fn optionally_name_instance(&mut self, name: Option<String>, instance: InstanceHandle) {
if let Some(name) = name {
self.namespace.name_instance(name, instance);
}
}
/// Register a name for the given instance.
pub fn name_instance(&mut self, name: String, instance: InstanceHandle) {
self.namespace.name_instance(name, instance);
}
/// Register an additional name for an existing registered instance.
pub fn alias(&mut self, name: &str, as_name: String) -> Result<(), UnknownInstance> {
let instance = self.get_instance(&name)?.clone();
self.name_instance(as_name, instance);
Ok(())
}
/// Invoke an exported function from a named instance.
pub fn invoke_named(
&mut self,
instance_name: &str,
field: &str,
args: &[RuntimeValue],
) -> Result<ActionOutcome, ContextError> {
let mut instance = self
.get_instance(&instance_name)
.map_err(ContextError::Instance)?
.clone();
self.invoke(&mut instance, field, args)
.map_err(ContextError::Action)
}
/// Invoke an exported function from an instance.
pub fn invoke(
&mut self,
instance: &mut InstanceHandle,
field: &str,
args: &[RuntimeValue],
) -> Result<ActionOutcome, ActionError> {
invoke(&mut *self.compiler, instance, field, &args)
}
/// Get the value of an exported global variable from an instance.
pub fn get_named(
&mut self,
instance_name: &str,
field: &str,
) -> Result<ActionOutcome, ContextError> {
let instance = self
.get_instance(&instance_name)
.map_err(ContextError::Instance)?
.clone();
self.get(&instance, field).map_err(ContextError::Action)
}
/// Get the value of an exported global variable from an instance.
pub fn get(
&mut self,
instance: &InstanceHandle,
field: &str,
) -> Result<ActionOutcome, ActionError> {
get(instance, field).map(|value| ActionOutcome::Returned {
values: vec![value],
})
}
/// Get a slice of memory from an instance.
pub fn inspect_memory<'instance>(
&self,
instance: &'instance InstanceHandle,
field_name: &str,
start: usize,
len: usize,
) -> Result<&'instance [u8], ActionError> {
inspect_memory(instance, field_name, start, len)
}
/// Return a handle to the global_exports mapping, needed by some modules
/// for instantiation.
pub fn get_global_exports(
&mut self,
) -> Rc<RefCell<HashMap<String, Option<wasmtime_runtime::Export>>>> {
Rc::clone(&mut self.global_exports)
}
}

View File

@@ -0,0 +1,134 @@
//! Runtime function table.
//!
//! This module is primarily used to track JIT functions on Windows for stack walking and unwind.
/// Represents a runtime function table.
///
/// The runtime function table is not implemented for non-Windows target platforms.
#[cfg(not(target_os = "windows"))]
pub(crate) struct FunctionTable;
#[cfg(not(target_os = "windows"))]
impl FunctionTable {
/// Creates a new function table.
pub fn new() -> Self {
Self
}
/// Returns the number of functions in the table, also referred to as its 'length'.
///
/// For non-Windows platforms, the table will always be empty.
pub fn len(&self) -> usize {
0
}
/// Adds a function to the table based off of the start offset, end offset, and unwind offset.
///
/// The offsets are from the "module base", which is provided when the table is published.
///
/// For non-Windows platforms, this is a no-op.
pub fn add_function(&mut self, _start: u32, _end: u32, _unwind: u32) {}
/// Publishes the function table using the given base address.
///
/// A published function table will automatically be deleted when it is dropped.
///
/// For non-Windows platforms, this is a no-op.
pub fn publish(&mut self, _base_address: u64) -> Result<(), String> {
Ok(())
}
}
/// Represents a runtime function table.
///
/// This is used to register JIT code with the operating system to enable stack walking and unwinding.
#[cfg(all(target_os = "windows", target_arch = "x86_64"))]
pub(crate) struct FunctionTable {
functions: Vec<winapi::um::winnt::RUNTIME_FUNCTION>,
published: bool,
}
#[cfg(all(target_os = "windows", target_arch = "x86_64"))]
impl FunctionTable {
/// Creates a new function table.
pub fn new() -> Self {
Self {
functions: Vec::new(),
published: false,
}
}
/// Returns the number of functions in the table, also referred to as its 'length'.
pub fn len(&self) -> usize {
self.functions.len()
}
/// Adds a function to the table based off of the start offset, end offset, and unwind offset.
///
/// The offsets are from the "module base", which is provided when the table is published.
pub fn add_function(&mut self, start: u32, end: u32, unwind: u32) {
use winapi::um::winnt;
assert!(!self.published, "table has already been published");
let mut entry = winnt::RUNTIME_FUNCTION::default();
entry.BeginAddress = start;
entry.EndAddress = end;
unsafe {
*entry.u.UnwindInfoAddress_mut() = unwind;
}
self.functions.push(entry);
}
/// Publishes the function table using the given base address.
///
/// A published function table will automatically be deleted when it is dropped.
pub fn publish(&mut self, base_address: u64) -> Result<(), String> {
use winapi::um::winnt;
if self.published {
return Err("function table was already published".into());
}
self.published = true;
if self.functions.is_empty() {
return Ok(());
}
unsafe {
// Windows heap allocations are 32-bit aligned, but assert just in case
assert!(
(self.functions.as_mut_ptr() as u64) % 4 == 0,
"function table allocation was not aligned"
);
if winnt::RtlAddFunctionTable(
self.functions.as_mut_ptr(),
self.functions.len() as u32,
base_address,
) == 0
{
return Err("failed to add function table".into());
}
}
Ok(())
}
}
#[cfg(target_os = "windows")]
impl Drop for FunctionTable {
fn drop(&mut self) {
use winapi::um::winnt;
if self.published {
unsafe {
winnt::RtlDeleteFunctionTable(self.functions.as_mut_ptr());
}
}
}
}

View File

@@ -0,0 +1,282 @@
//! Define the `instantiate` function, which takes a byte array containing an
//! encoded wasm module and returns a live wasm instance. Also, define
//! `CompiledModule` to allow compiling and instantiating to be done as separate
//! steps.
use super::HashMap;
use crate::compiler::Compiler;
use crate::link::link_module;
use crate::resolver::Resolver;
use alloc::boxed::Box;
use alloc::rc::Rc;
use alloc::string::String;
use alloc::vec::Vec;
use core::cell::RefCell;
use cranelift_entity::{BoxedSlice, PrimaryMap};
use cranelift_wasm::{DefinedFuncIndex, SignatureIndex};
#[cfg(feature = "std")]
use std::io::Write;
use thiserror::Error;
use wasmtime_debug::read_debuginfo;
use wasmtime_environ::{
CompileError, DataInitializer, DataInitializerLocation, Module, ModuleEnvironment,
};
use wasmtime_runtime::{
Export, GdbJitImageRegistration, Imports, InstanceHandle, InstantiationError, VMFunctionBody,
VMSharedSignatureIndex,
};
/// An error condition while setting up a wasm instance, be it validation,
/// compilation, or instantiation.
#[derive(Error, Debug)]
pub enum SetupError {
/// The module did not pass validation.
#[error("Validation error: {0}")]
Validate(String),
/// A wasm translation error occured.
#[error("WebAssembly compilation error: {0}")]
Compile(#[from] CompileError),
/// Some runtime resource was unavailable or insufficient, or the start function
/// trapped.
#[error("Instantiation error: {0}")]
Instantiate(#[from] InstantiationError),
/// Debug information generation error occured.
#[error("Debug information error: {0}")]
DebugInfo(failure::Error),
}
/// This is similar to `CompiledModule`, but references the data initializers
/// from the wasm buffer rather than holding its own copy.
struct RawCompiledModule<'data> {
module: Module,
finished_functions: BoxedSlice<DefinedFuncIndex, *const VMFunctionBody>,
imports: Imports,
data_initializers: Box<[DataInitializer<'data>]>,
signatures: BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
dbg_jit_registration: Option<GdbJitImageRegistration>,
}
impl<'data> RawCompiledModule<'data> {
/// Create a new `RawCompiledModule` by compiling the wasm module in `data` and instatiating it.
fn new(
compiler: &mut Compiler,
data: &'data [u8],
resolver: &mut dyn Resolver,
debug_info: bool,
) -> Result<Self, SetupError> {
let environ = ModuleEnvironment::new(compiler.frontend_config(), compiler.tunables());
let translation = environ
.translate(data)
.map_err(|error| SetupError::Compile(CompileError::Wasm(error)))?;
let debug_data = if debug_info {
Some(read_debuginfo(&data))
} else {
None
};
let (allocated_functions, jt_offsets, relocations, dbg_image) = compiler.compile(
&translation.module,
translation.module_translation.as_ref().unwrap(),
translation.function_body_inputs,
debug_data,
)?;
let imports = link_module(
&translation.module,
&allocated_functions,
&jt_offsets,
relocations,
resolver,
)
.map_err(|err| SetupError::Instantiate(InstantiationError::Link(err)))?;
// Gather up the pointers to the compiled functions.
let finished_functions: BoxedSlice<DefinedFuncIndex, *const VMFunctionBody> =
allocated_functions
.into_iter()
.map(|(_index, allocated)| {
let fatptr: *const [VMFunctionBody] = *allocated;
fatptr as *const VMFunctionBody
})
.collect::<PrimaryMap<_, _>>()
.into_boxed_slice();
// Compute indices into the shared signature table.
let signatures = {
let signature_registry = compiler.signatures();
translation
.module
.signatures
.values()
.map(|sig| signature_registry.register(sig))
.collect::<PrimaryMap<_, _>>()
};
// Make all code compiled thus far executable.
compiler.publish_compiled_code();
#[cfg(feature = "std")]
let dbg_jit_registration = if let Some(img) = dbg_image {
let mut bytes = Vec::new();
bytes.write_all(&img).expect("all written");
let reg = GdbJitImageRegistration::register(bytes);
Some(reg)
} else {
None
};
Ok(Self {
module: translation.module,
finished_functions,
imports,
data_initializers: translation.data_initializers.into_boxed_slice(),
signatures: signatures.into_boxed_slice(),
dbg_jit_registration,
})
}
}
/// A compiled wasm module, ready to be instantiated.
pub struct CompiledModule {
module: Rc<Module>,
finished_functions: BoxedSlice<DefinedFuncIndex, *const VMFunctionBody>,
imports: Imports,
data_initializers: Box<[OwnedDataInitializer]>,
signatures: BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
global_exports: Rc<RefCell<HashMap<String, Option<Export>>>>,
dbg_jit_registration: Option<Rc<GdbJitImageRegistration>>,
}
impl CompiledModule {
/// Compile a data buffer into a `CompiledModule`, which may then be instantiated.
pub fn new<'data>(
compiler: &mut Compiler,
data: &'data [u8],
resolver: &mut dyn Resolver,
global_exports: Rc<RefCell<HashMap<String, Option<Export>>>>,
debug_info: bool,
) -> Result<Self, SetupError> {
let raw = RawCompiledModule::<'data>::new(compiler, data, resolver, debug_info)?;
Ok(Self::from_parts(
raw.module,
global_exports,
raw.finished_functions,
raw.imports,
raw.data_initializers
.iter()
.map(OwnedDataInitializer::new)
.collect::<Vec<_>>()
.into_boxed_slice(),
raw.signatures.clone(),
raw.dbg_jit_registration,
))
}
/// Construct a `CompiledModule` from component parts.
pub fn from_parts(
module: Module,
global_exports: Rc<RefCell<HashMap<String, Option<Export>>>>,
finished_functions: BoxedSlice<DefinedFuncIndex, *const VMFunctionBody>,
imports: Imports,
data_initializers: Box<[OwnedDataInitializer]>,
signatures: BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
dbg_jit_registration: Option<GdbJitImageRegistration>,
) -> Self {
Self {
module: Rc::new(module),
global_exports: Rc::clone(&global_exports),
finished_functions,
imports,
data_initializers,
signatures,
dbg_jit_registration: dbg_jit_registration.map(|r| Rc::new(r)),
}
}
/// Crate an `Instance` from this `CompiledModule`.
///
/// Note that if only one instance of this module is needed, it may be more
/// efficient to call the top-level `instantiate`, since that avoids copying
/// the data initializers.
pub fn instantiate(&mut self) -> Result<InstanceHandle, InstantiationError> {
let data_initializers = self
.data_initializers
.iter()
.map(|init| DataInitializer {
location: init.location.clone(),
data: &*init.data,
})
.collect::<Vec<_>>();
InstanceHandle::new(
Rc::clone(&self.module),
Rc::clone(&self.global_exports),
self.finished_functions.clone(),
self.imports.clone(),
&data_initializers,
self.signatures.clone(),
self.dbg_jit_registration.as_ref().map(|r| Rc::clone(&r)),
Box::new(()),
)
}
/// Return a reference-counting pointer to a module.
pub fn module(&self) -> Rc<Module> {
self.module.clone()
}
/// Return a reference to a module.
pub fn module_ref(&self) -> &Module {
&self.module
}
}
/// Similar to `DataInitializer`, but owns its own copy of the data rather
/// than holding a slice of the original module.
pub struct OwnedDataInitializer {
/// The location where the initialization is to be performed.
location: DataInitializerLocation,
/// The initialization data.
data: Box<[u8]>,
}
impl OwnedDataInitializer {
fn new(borrowed: &DataInitializer<'_>) -> Self {
Self {
location: borrowed.location.clone(),
data: borrowed.data.to_vec().into_boxed_slice(),
}
}
}
/// Create a new wasm instance by compiling the wasm module in `data` and instatiating it.
///
/// This is equivalent to createing a `CompiledModule` and calling `instantiate()` on it,
/// but avoids creating an intermediate copy of the data initializers.
pub fn instantiate(
compiler: &mut Compiler,
data: &[u8],
resolver: &mut dyn Resolver,
global_exports: Rc<RefCell<HashMap<String, Option<Export>>>>,
debug_info: bool,
) -> Result<InstanceHandle, SetupError> {
let raw = RawCompiledModule::new(compiler, data, resolver, debug_info)?;
InstanceHandle::new(
Rc::new(raw.module),
global_exports,
raw.finished_functions,
raw.imports,
&*raw.data_initializers,
raw.signatures,
raw.dbg_jit_registration.map(|r| Rc::new(r)),
Box::new(()),
)
.map_err(SetupError::Instantiate)
}

59
crates/jit/src/lib.rs Normal file
View File

@@ -0,0 +1,59 @@
//! JIT-style runtime for WebAssembly using Cranelift.
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
#![warn(unused_import_braces)]
#![cfg_attr(feature = "std", deny(unstable_features))]
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
#![cfg_attr(
feature = "cargo-clippy",
allow(clippy::new_without_default, clippy::new_without_default_derive)
)]
#![cfg_attr(
feature = "cargo-clippy",
warn(
clippy::float_arithmetic,
clippy::mut_mut,
clippy::nonminimal_bool,
clippy::option_map_unwrap_or,
clippy::option_map_unwrap_or_else,
clippy::print_stdout,
clippy::unicode_not_nfc,
clippy::use_self
)
)]
#[macro_use]
extern crate alloc;
#[cfg(not(feature = "std"))]
use hashbrown::{hash_map, HashMap, HashSet};
#[cfg(feature = "std")]
use std::collections::{hash_map, HashMap, HashSet};
mod action;
mod code_memory;
mod compiler;
mod context;
mod function_table;
mod instantiate;
mod link;
mod namespace;
mod resolver;
mod target_tunables;
pub use crate::action::{ActionError, ActionOutcome, RuntimeValue};
pub use crate::code_memory::CodeMemory;
pub use crate::compiler::{CompilationStrategy, Compiler};
pub use crate::context::{Context, ContextError, Features, UnknownInstance};
pub use crate::instantiate::{instantiate, CompiledModule, SetupError};
pub use crate::link::link_module;
pub use crate::namespace::Namespace;
pub use crate::resolver::{NullResolver, Resolver};
pub use crate::target_tunables::target_tunables;
// Re-export `InstanceHandle` so that users won't need to separately depend on
// wasmtime-runtime in common cases.
pub use wasmtime_runtime::{InstanceHandle, InstantiationError};
/// Version number of this crate.
pub const VERSION: &str = env!("CARGO_PKG_VERSION");

412
crates/jit/src/link.rs Normal file
View File

@@ -0,0 +1,412 @@
//! Linking for JIT-compiled code.
use crate::resolver::Resolver;
use crate::HashSet;
use alloc::vec::Vec;
use core::ptr::write_unaligned;
use cranelift_codegen::binemit::Reloc;
use cranelift_codegen::ir::JumpTableOffsets;
use cranelift_entity::PrimaryMap;
use cranelift_wasm::{DefinedFuncIndex, Global, GlobalInit, Memory, Table, TableElementType};
use wasmtime_environ::{
MemoryPlan, MemoryStyle, Module, Relocation, RelocationTarget, Relocations, TablePlan,
};
use wasmtime_runtime::libcalls;
use wasmtime_runtime::{
Export, Imports, InstanceHandle, LinkError, VMFunctionBody, VMFunctionImport, VMGlobalImport,
VMMemoryImport, VMTableImport,
};
/// Links a module that has been compiled with `compiled_module` in `wasmtime-environ`.
pub fn link_module(
module: &Module,
allocated_functions: &PrimaryMap<DefinedFuncIndex, *mut [VMFunctionBody]>,
jt_offsets: &PrimaryMap<DefinedFuncIndex, JumpTableOffsets>,
relocations: Relocations,
resolver: &mut dyn Resolver,
) -> Result<Imports, LinkError> {
let mut dependencies = HashSet::new();
let mut function_imports = PrimaryMap::with_capacity(module.imported_funcs.len());
for (index, (ref module_name, ref field)) in module.imported_funcs.iter() {
match resolver.resolve(module_name, field) {
Some(export_value) => match export_value {
Export::Function {
address,
signature,
vmctx,
} => {
let import_signature = &module.signatures[module.functions[index]];
if signature != *import_signature {
// TODO: If the difference is in the calling convention,
// we could emit a wrapper function to fix it up.
return Err(LinkError(
format!("{}/{}: incompatible import type: exported function with signature {} incompatible with function import with signature {}",
module_name, field,
signature, import_signature)
));
}
dependencies.insert(unsafe { InstanceHandle::from_vmctx(vmctx) });
function_imports.push(VMFunctionImport {
body: address,
vmctx,
});
}
Export::Table { .. } | Export::Memory { .. } | Export::Global { .. } => {
return Err(LinkError(format!(
"{}/{}: incompatible import type: export incompatible with function import",
module_name, field
)));
}
},
None => {
return Err(LinkError(format!(
"{}/{}: unknown import function: function not provided",
module_name, field
)));
}
}
}
let mut table_imports = PrimaryMap::with_capacity(module.imported_tables.len());
for (index, (ref module_name, ref field)) in module.imported_tables.iter() {
match resolver.resolve(module_name, field) {
Some(export_value) => match export_value {
Export::Table {
definition,
vmctx,
table,
} => {
let import_table = &module.table_plans[index];
if !is_table_compatible(&table, import_table) {
return Err(LinkError(format!(
"{}/{}: incompatible import type: exported table incompatible with table import",
module_name, field,
)));
}
dependencies.insert(unsafe { InstanceHandle::from_vmctx(vmctx) });
table_imports.push(VMTableImport {
from: definition,
vmctx,
});
}
Export::Global { .. } | Export::Memory { .. } | Export::Function { .. } => {
return Err(LinkError(format!(
"{}/{}: incompatible import type: export incompatible with table import",
module_name, field
)));
}
},
None => {
return Err(LinkError(format!(
"unknown import: no provided import table for {}/{}",
module_name, field
)));
}
}
}
let mut memory_imports = PrimaryMap::with_capacity(module.imported_memories.len());
for (index, (ref module_name, ref field)) in module.imported_memories.iter() {
match resolver.resolve(module_name, field) {
Some(export_value) => match export_value {
Export::Memory {
definition,
vmctx,
memory,
} => {
let import_memory = &module.memory_plans[index];
if !is_memory_compatible(&memory, import_memory) {
return Err(LinkError(format!(
"{}/{}: incompatible import type: exported memory incompatible with memory import",
module_name, field
)));
}
// Sanity-check: Ensure that the imported memory has at least
// guard-page protections the importing module expects it to have.
match (memory.style, &import_memory.style) {
(
MemoryStyle::Static { bound },
MemoryStyle::Static {
bound: import_bound,
},
) => {
assert!(bound >= *import_bound);
}
_ => (),
}
assert!(memory.offset_guard_size >= import_memory.offset_guard_size);
dependencies.insert(unsafe { InstanceHandle::from_vmctx(vmctx) });
memory_imports.push(VMMemoryImport {
from: definition,
vmctx,
});
}
Export::Table { .. } | Export::Global { .. } | Export::Function { .. } => {
return Err(LinkError(format!(
"{}/{}: incompatible import type: export incompatible with memory import",
module_name, field
)));
}
},
None => {
return Err(LinkError(format!(
"unknown import: no provided import memory for {}/{}",
module_name, field
)));
}
}
}
let mut global_imports = PrimaryMap::with_capacity(module.imported_globals.len());
for (index, (ref module_name, ref field)) in module.imported_globals.iter() {
match resolver.resolve(module_name, field) {
Some(export_value) => match export_value {
Export::Table { .. } | Export::Memory { .. } | Export::Function { .. } => {
return Err(LinkError(format!(
"{}/{}: incompatible import type: exported global incompatible with global import",
module_name, field
)));
}
Export::Global {
definition,
vmctx,
global,
} => {
let imported_global = module.globals[index];
if !is_global_compatible(&global, &imported_global) {
return Err(LinkError(format!(
"{}/{}: incompatible import type: exported global incompatible with global import",
module_name, field
)));
}
dependencies.insert(unsafe { InstanceHandle::from_vmctx(vmctx) });
global_imports.push(VMGlobalImport { from: definition });
}
},
None => {
return Err(LinkError(format!(
"unknown import: no provided import global for {}/{}",
module_name, field
)));
}
}
}
// Apply relocations, now that we have virtual addresses for everything.
relocate(allocated_functions, jt_offsets, relocations, module);
Ok(Imports::new(
dependencies,
function_imports,
table_imports,
memory_imports,
global_imports,
))
}
fn is_global_compatible(exported: &Global, imported: &Global) -> bool {
match imported.initializer {
GlobalInit::Import => (),
_ => panic!("imported Global should have an Imported initializer"),
}
let Global {
ty: exported_ty,
mutability: exported_mutability,
initializer: _exported_initializer,
} = exported;
let Global {
ty: imported_ty,
mutability: imported_mutability,
initializer: _imported_initializer,
} = imported;
exported_ty == imported_ty && imported_mutability == exported_mutability
}
fn is_table_element_type_compatible(
exported_type: TableElementType,
imported_type: TableElementType,
) -> bool {
match exported_type {
TableElementType::Func => match imported_type {
TableElementType::Func => true,
_ => false,
},
TableElementType::Val(exported_val_ty) => match imported_type {
TableElementType::Val(imported_val_ty) => exported_val_ty == imported_val_ty,
_ => false,
},
}
}
fn is_table_compatible(exported: &TablePlan, imported: &TablePlan) -> bool {
let TablePlan {
table:
Table {
ty: exported_ty,
minimum: exported_minimum,
maximum: exported_maximum,
},
style: _exported_style,
} = exported;
let TablePlan {
table:
Table {
ty: imported_ty,
minimum: imported_minimum,
maximum: imported_maximum,
},
style: _imported_style,
} = imported;
is_table_element_type_compatible(*exported_ty, *imported_ty)
&& imported_minimum <= exported_minimum
&& (imported_maximum.is_none()
|| (!exported_maximum.is_none()
&& imported_maximum.unwrap() >= exported_maximum.unwrap()))
}
fn is_memory_compatible(exported: &MemoryPlan, imported: &MemoryPlan) -> bool {
let MemoryPlan {
memory:
Memory {
minimum: exported_minimum,
maximum: exported_maximum,
shared: exported_shared,
},
style: _exported_style,
offset_guard_size: _exported_offset_guard_size,
} = exported;
let MemoryPlan {
memory:
Memory {
minimum: imported_minimum,
maximum: imported_maximum,
shared: imported_shared,
},
style: _imported_style,
offset_guard_size: _imported_offset_guard_size,
} = imported;
imported_minimum <= exported_minimum
&& (imported_maximum.is_none()
|| (!exported_maximum.is_none()
&& imported_maximum.unwrap() >= exported_maximum.unwrap()))
&& exported_shared == imported_shared
}
/// Performs the relocations inside the function bytecode, provided the necessary metadata.
fn relocate(
allocated_functions: &PrimaryMap<DefinedFuncIndex, *mut [VMFunctionBody]>,
jt_offsets: &PrimaryMap<DefinedFuncIndex, JumpTableOffsets>,
relocations: PrimaryMap<DefinedFuncIndex, Vec<Relocation>>,
module: &Module,
) {
for (i, function_relocs) in relocations.into_iter() {
for r in function_relocs {
use self::libcalls::*;
let target_func_address: usize = match r.reloc_target {
RelocationTarget::UserFunc(index) => match module.defined_func_index(index) {
Some(f) => {
let fatptr: *const [VMFunctionBody] = allocated_functions[f];
fatptr as *const VMFunctionBody as usize
}
None => panic!("direct call to import"),
},
RelocationTarget::Memory32Grow => wasmtime_memory32_grow as usize,
RelocationTarget::Memory32Size => wasmtime_memory32_size as usize,
RelocationTarget::ImportedMemory32Grow => wasmtime_imported_memory32_grow as usize,
RelocationTarget::ImportedMemory32Size => wasmtime_imported_memory32_size as usize,
RelocationTarget::LibCall(libcall) => {
use cranelift_codegen::ir::LibCall::*;
match libcall {
CeilF32 => wasmtime_f32_ceil as usize,
FloorF32 => wasmtime_f32_floor as usize,
TruncF32 => wasmtime_f32_trunc as usize,
NearestF32 => wasmtime_f32_nearest as usize,
CeilF64 => wasmtime_f64_ceil as usize,
FloorF64 => wasmtime_f64_floor as usize,
TruncF64 => wasmtime_f64_trunc as usize,
NearestF64 => wasmtime_f64_nearest as usize,
#[cfg(not(target_os = "windows"))]
Probestack => __rust_probestack as usize,
#[cfg(all(target_os = "windows", target_env = "gnu"))]
Probestack => ___chkstk as usize,
#[cfg(all(
target_os = "windows",
target_env = "msvc",
target_pointer_width = "64"
))]
Probestack => __chkstk as usize,
other => panic!("unexpected libcall: {}", other),
}
}
RelocationTarget::JumpTable(func_index, jt) => {
match module.defined_func_index(func_index) {
Some(f) => {
let offset = *jt_offsets
.get(f)
.and_then(|ofs| ofs.get(jt))
.expect("func jump table");
let fatptr: *const [VMFunctionBody] = allocated_functions[f];
fatptr as *const VMFunctionBody as usize + offset as usize
}
None => panic!("func index of jump table"),
}
}
};
let fatptr: *const [VMFunctionBody] = allocated_functions[i];
let body = fatptr as *const VMFunctionBody;
match r.reloc {
#[cfg(target_pointer_width = "64")]
Reloc::Abs8 => unsafe {
let reloc_address = body.add(r.offset as usize) as usize;
let reloc_addend = r.addend as isize;
let reloc_abs = (target_func_address as u64)
.checked_add(reloc_addend as u64)
.unwrap();
write_unaligned(reloc_address as *mut u64, reloc_abs);
},
#[cfg(target_pointer_width = "32")]
Reloc::X86PCRel4 => unsafe {
let reloc_address = body.add(r.offset as usize) as usize;
let reloc_addend = r.addend as isize;
let reloc_delta_u32 = (target_func_address as u32)
.wrapping_sub(reloc_address as u32)
.checked_add(reloc_addend as u32)
.unwrap();
write_unaligned(reloc_address as *mut u32, reloc_delta_u32);
},
#[cfg(target_pointer_width = "32")]
Reloc::X86CallPCRel4 => {
// ignore
}
Reloc::X86PCRelRodata4 => {
// ignore
}
_ => panic!("unsupported reloc kind"),
}
}
}
}
/// A declaration for the stack probe function in Rust's standard library, for
/// catching callstack overflow.
extern "C" {
#[cfg(not(target_os = "windows"))]
pub fn __rust_probestack();
#[cfg(all(
target_os = "windows",
target_env = "msvc",
target_pointer_width = "64"
))]
pub fn __chkstk();
// ___chkstk (note the triple underscore) is implemented in compiler-builtins/src/x86_64.rs
// by the Rust compiler for the MinGW target
#[cfg(all(target_os = "windows", target_env = "gnu",))]
pub fn ___chkstk();
}

View File

@@ -0,0 +1,47 @@
//! The core WebAssembly spec does not specify how imports are to be resolved
//! to exports. This file provides one possible way to manage multiple instances
//! and resolve imports to exports among them.
use super::HashMap;
use crate::resolver::Resolver;
use alloc::string::String;
use wasmtime_runtime::{Export, InstanceHandle};
/// A namespace containing instances keyed by name.
///
/// Note that `Namespace` implements the `Resolver` trait, so it can resolve
/// imports using defined exports.
pub struct Namespace {
/// Mapping from identifiers to indices in `self.instances`.
names: HashMap<String, InstanceHandle>,
}
impl Namespace {
/// Construct a new `Namespace`.
pub fn new() -> Self {
Self {
names: HashMap::new(),
}
}
/// Install a new `InstanceHandle` in this `Namespace`, optionally with the
/// given name.
pub fn name_instance(&mut self, name: String, instance: InstanceHandle) {
self.names.insert(name, instance);
}
/// Get the instance registered with the given `instance_name`.
pub fn get_instance(&mut self, name: &str) -> Option<&mut InstanceHandle> {
self.names.get_mut(name)
}
}
impl Resolver for Namespace {
fn resolve(&mut self, name: &str, field: &str) -> Option<Export> {
if let Some(instance) = self.names.get_mut(name) {
instance.lookup(field)
} else {
None
}
}
}

View File

@@ -0,0 +1,19 @@
//! Define the `Resolver` trait, allowing custom resolution for external
//! references.
use wasmtime_runtime::Export;
/// Import resolver connects imports with available exported values.
pub trait Resolver {
/// Resolve the given module/field combo.
fn resolve(&mut self, module: &str, field: &str) -> Option<Export>;
}
/// `Resolver` implementation that always resolves to `None`.
pub struct NullResolver {}
impl Resolver for NullResolver {
fn resolve(&mut self, _module: &str, _field: &str) -> Option<Export> {
None
}
}

View File

@@ -0,0 +1,22 @@
use core::cmp::min;
use target_lexicon::{OperatingSystem, Triple};
use wasmtime_environ::Tunables;
/// Return a `Tunables` instance tuned for the given target platform.
pub fn target_tunables(triple: &Triple) -> Tunables {
let mut result = Tunables::default();
match triple.operating_system {
OperatingSystem::Windows => {
// For now, use a smaller footprint on Windows so that we don't
// don't outstrip the paging file.
// TODO: Make this configurable.
result.static_memory_bound = min(result.static_memory_bound, 0x100);
result.static_memory_offset_guard_size =
min(result.static_memory_offset_guard_size, 0x10000);
}
_ => {}
}
result
}