Add reference types R32 and R64

-Add resumable_trap, safepoint, isnull, and null instructions
-Add Stackmap struct and StackmapSink trait

Co-authored-by: Mir Ahmed <mirahmed753@gmail.com>
Co-authored-by: Dan Gohman <sunfish@mozilla.com>
This commit is contained in:
Carmen Kwan
2019-07-23 16:28:54 -07:00
committed by Dan Gohman
parent b659262d2a
commit 19257f80c1
47 changed files with 1027 additions and 62 deletions

View File

@@ -13,9 +13,11 @@
//! that a `MemoryCodeSink` will always write binary machine code to raw memory. It forwards any
//! relocations to a `RelocSink` trait object. Relocations are less frequent than the
//! `CodeSink::put*` methods, so the performance impact of the virtual callbacks is less severe.
use super::{Addend, CodeInfo, CodeOffset, CodeSink, Reloc};
use crate::ir::{ExternalName, JumpTable, SourceLoc, TrapCode};
use crate::binemit::stackmap::Stackmap;
use crate::ir::entities::Value;
use crate::ir::{ExternalName, Function, JumpTable, SourceLoc, TrapCode};
use crate::isa::TargetIsa;
use core::ptr::write_unaligned;
/// A `CodeSink` that writes binary machine code directly into memory.
@@ -36,6 +38,7 @@ pub struct MemoryCodeSink<'a> {
offset: isize,
relocs: &'a mut dyn RelocSink,
traps: &'a mut dyn TrapSink,
stackmaps: &'a mut dyn StackmapSink,
/// Information about the generated code and read-only data.
pub info: CodeInfo,
}
@@ -49,6 +52,7 @@ impl<'a> MemoryCodeSink<'a> {
data: *mut u8,
relocs: &'a mut dyn RelocSink,
traps: &'a mut dyn TrapSink,
stackmaps: &'a mut dyn StackmapSink,
) -> Self {
Self {
data,
@@ -61,6 +65,7 @@ impl<'a> MemoryCodeSink<'a> {
},
relocs,
traps,
stackmaps,
}
}
}
@@ -149,6 +154,12 @@ impl<'a> CodeSink for MemoryCodeSink<'a> {
self.info.rodata_size = self.offset() - (self.info.jumptables_size + self.info.code_size);
self.info.total_size = self.offset();
}
fn add_stackmap(&mut self, val_list: &[Value], func: &Function, isa: &dyn TargetIsa) {
let ofs = self.offset();
let stackmap = Stackmap::from_values(&val_list, func, isa);
self.stackmaps.add_stackmap(ofs, stackmap);
}
}
/// A `TrapSink` implementation that does nothing, which is convenient when
@@ -158,3 +169,16 @@ pub struct NullTrapSink {}
impl TrapSink for NullTrapSink {
fn trap(&mut self, _offset: CodeOffset, _srcloc: SourceLoc, _code: TrapCode) {}
}
/// A trait for emitting stackmaps.
pub trait StackmapSink {
/// Output a bitmap of the stack representing the live reference variables at this code offset.
fn add_stackmap(&mut self, _: CodeOffset, _: Stackmap);
}
/// Placeholder StackmapSink that does nothing.
pub struct NullStackmapSink {}
impl StackmapSink for NullStackmapSink {
fn add_stackmap(&mut self, _: CodeOffset, _: Stackmap) {}
}

View File

@@ -6,13 +6,18 @@
mod memorysink;
mod relaxation;
mod shrink;
mod stackmap;
pub use self::memorysink::{MemoryCodeSink, NullTrapSink, RelocSink, TrapSink};
pub use self::memorysink::{
MemoryCodeSink, NullStackmapSink, NullTrapSink, RelocSink, StackmapSink, TrapSink,
};
pub use self::relaxation::relax_branches;
pub use self::shrink::shrink_instructions;
pub use crate::regalloc::RegDiversions;
pub use self::stackmap::Stackmap;
use crate::ir::entities::Value;
use crate::ir::{ExternalName, Function, Inst, JumpTable, SourceLoc, TrapCode};
use crate::isa::TargetIsa;
pub use crate::regalloc::RegDiversions;
use core::fmt;
#[cfg(feature = "enable-serde")]
use serde::{Deserialize, Serialize};
@@ -141,6 +146,9 @@ pub trait CodeSink {
/// Read-only data output is complete, we're done.
fn end_codegen(&mut self);
/// Add a stackmap at the current code offset.
fn add_stackmap(&mut self, _: &[Value], _: &Function, _: &dyn TargetIsa);
}
/// Report a bad encoding error.
@@ -157,17 +165,17 @@ pub fn bad_encoding(func: &Function, inst: Inst) -> ! {
///
/// This function is called from the `TargetIsa::emit_function()` implementations with the
/// appropriate instruction emitter.
pub fn emit_function<CS, EI>(func: &Function, emit_inst: EI, sink: &mut CS)
pub fn emit_function<CS, EI>(func: &Function, emit_inst: EI, sink: &mut CS, isa: &dyn TargetIsa)
where
CS: CodeSink,
EI: Fn(&Function, Inst, &mut RegDiversions, &mut CS),
EI: Fn(&Function, Inst, &mut RegDiversions, &mut CS, &dyn TargetIsa),
{
let mut divert = RegDiversions::new();
for ebb in func.layout.ebbs() {
divert.clear();
debug_assert_eq!(func.offsets[ebb], sink.offset());
for inst in func.layout.ebb_insts(ebb) {
emit_inst(func, inst, &mut divert, sink);
emit_inst(func, inst, &mut divert, sink, isa);
}
}

View File

@@ -0,0 +1,122 @@
use crate::bitset::BitSet;
use crate::ir;
use crate::isa::TargetIsa;
use std::vec::Vec;
/// Wrapper class for longer bit vectors that cannot be represented by a single BitSet.
#[derive(Clone, Debug)]
pub struct Stackmap {
bitmap: Vec<BitSet<u32>>,
}
impl Stackmap {
/// Create a stackmap based on where references are located on a function's stack.
pub fn from_values(
args: &[ir::entities::Value],
func: &ir::Function,
isa: &dyn TargetIsa,
) -> Self {
let loc = &func.locations;
let mut live_ref_in_stack_slot = std::collections::HashSet::new();
// References can be in registers, and live registers values are pushed onto the stack before calls and traps.
// TODO: Implement register maps. If a register containing a reference is spilled and reused after a safepoint,
// it could contain a stale reference value if the garbage collector relocated the value.
for val in args {
if let Some(value_loc) = loc.get(*val) {
match *value_loc {
ir::ValueLoc::Stack(stack_slot) => live_ref_in_stack_slot.insert(stack_slot),
_ => false,
};
}
}
// SpiderMonkey stackmap structure:
// <trap reg dump> + <general spill> + <frame> + <inbound args>
// Bit vector goes from lower addresses to higher addresses.
// TODO: Get trap register layout from Spidermonkey and prepend to bitvector below.
let stack = &func.stack_slots;
let frame_size = stack.frame_size.unwrap();
let word_size = ir::stackslot::StackSize::from(isa.pointer_bytes());
let num_words = (frame_size / word_size) as usize;
let mut vec = std::vec::Vec::with_capacity(num_words);
vec.resize(num_words, false);
// Frame (includes spills and inbound args).
for (ss, ssd) in stack.iter() {
if live_ref_in_stack_slot.contains(&ss) {
// Assumption: greater magnitude of offset imply higher address.
let index = (((ssd.offset.unwrap().abs() as u32) - ssd.size) / word_size) as usize;
vec[index] = true;
}
}
Stackmap::from_vec(&vec)
}
/// Create a vec of Bitsets from a vec of bools.
pub fn from_vec(vec: &Vec<bool>) -> Self {
let mut rem = vec.len();
let num_word = ((rem as f32) / 32.0).ceil() as usize;
let mut bitmap = Vec::with_capacity(num_word);
for i in 0..num_word {
let mut curr_word = 0;
let count = if rem > 32 { 32 } else { rem };
for j in 0..count {
if vec[i * 32 + j] {
curr_word |= 1 << j;
}
}
bitmap.push(BitSet::<u32>(curr_word));
rem -= count;
}
Self { bitmap }
}
/// Returns a specified bit.
pub fn get_bit(&self, bit_index: usize) -> bool {
assert!(bit_index < 32 * self.bitmap.len());
let word_index = bit_index / 32;
let word_offset = (bit_index % 32) as u8;
self.bitmap[word_index].contains(word_offset)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn stackmaps() {
let vec: Vec<bool> = Vec::new();
assert!(Stackmap::from_vec(&vec).bitmap.is_empty());
let mut vec: [bool; 32] = Default::default();
let set_true_idx = [5, 7, 24, 31];
for idx in set_true_idx.iter() {
vec[*idx] = true;
}
let mut vec = vec.to_vec();
assert_eq!(
vec![BitSet::<u32>(2164261024)],
Stackmap::from_vec(&vec).bitmap
);
vec.push(false);
vec.push(true);
let res = Stackmap::from_vec(&vec);
assert_eq!(
vec![BitSet::<u32>(2164261024), BitSet::<u32>(2)],
res.bitmap
);
assert!(res.get_bit(5));
assert!(res.get_bit(31));
assert!(res.get_bit(33));
assert!(!res.get_bit(1));
}
}