Merge pull request #2111 from fitzgen/rename-stackmap-to-stack-map

Rename "Stackmap" to "StackMap"
This commit is contained in:
Nick Fitzgerald
2020-08-07 10:46:38 -07:00
committed by GitHub
44 changed files with 218 additions and 211 deletions

View File

@@ -14,7 +14,7 @@
//! relocations to a `RelocSink` trait object. Relocations are less frequent than the
//! `CodeSink::put*` methods, so the performance impact of the virtual callbacks is less severe.
use super::{Addend, CodeInfo, CodeOffset, CodeSink, Reloc};
use crate::binemit::stackmap::Stackmap;
use crate::binemit::stack_map::StackMap;
use crate::ir::entities::Value;
use crate::ir::{ConstantOffset, ExternalName, Function, JumpTable, Opcode, SourceLoc, TrapCode};
use crate::isa::TargetIsa;
@@ -38,7 +38,7 @@ pub struct MemoryCodeSink<'a> {
offset: isize,
relocs: &'a mut dyn RelocSink,
traps: &'a mut dyn TrapSink,
stackmaps: &'a mut dyn StackmapSink,
stack_maps: &'a mut dyn StackMapSink,
/// Information about the generated code and read-only data.
pub info: CodeInfo,
}
@@ -54,7 +54,7 @@ impl<'a> MemoryCodeSink<'a> {
data: *mut u8,
relocs: &'a mut dyn RelocSink,
traps: &'a mut dyn TrapSink,
stackmaps: &'a mut dyn StackmapSink,
stack_maps: &'a mut dyn StackMapSink,
) -> Self {
Self {
data,
@@ -67,7 +67,7 @@ impl<'a> MemoryCodeSink<'a> {
},
relocs,
traps,
stackmaps,
stack_maps,
}
}
}
@@ -182,10 +182,10 @@ impl<'a> CodeSink for MemoryCodeSink<'a> {
self.info.total_size = self.offset();
}
fn add_stackmap(&mut self, val_list: &[Value], func: &Function, isa: &dyn TargetIsa) {
fn add_stack_map(&mut self, val_list: &[Value], func: &Function, isa: &dyn TargetIsa) {
let ofs = self.offset();
let stackmap = Stackmap::from_values(&val_list, func, isa);
self.stackmaps.add_stackmap(ofs, stackmap);
let stack_map = StackMap::from_values(&val_list, func, isa);
self.stack_maps.add_stack_map(ofs, stack_map);
}
fn add_call_site(&mut self, opcode: Opcode, loc: SourceLoc) {
@@ -227,15 +227,15 @@ impl TrapSink for NullTrapSink {
fn trap(&mut self, _offset: CodeOffset, _srcloc: SourceLoc, _code: TrapCode) {}
}
/// A trait for emitting stackmaps.
pub trait StackmapSink {
/// A trait for emitting stack maps.
pub trait StackMapSink {
/// Output a bitmap of the stack representing the live reference variables at this code offset.
fn add_stackmap(&mut self, _: CodeOffset, _: Stackmap);
fn add_stack_map(&mut self, _: CodeOffset, _: StackMap);
}
/// Placeholder StackmapSink that does nothing.
pub struct NullStackmapSink {}
/// Placeholder StackMapSink that does nothing.
pub struct NullStackMapSink {}
impl StackmapSink for NullStackmapSink {
fn add_stackmap(&mut self, _: CodeOffset, _: Stackmap) {}
impl StackMapSink for NullStackMapSink {
fn add_stack_map(&mut self, _: CodeOffset, _: StackMap) {}
}

View File

@@ -6,15 +6,15 @@
mod memorysink;
mod relaxation;
mod shrink;
mod stackmap;
mod stack_map;
pub use self::memorysink::{
MemoryCodeSink, NullRelocSink, NullStackmapSink, NullTrapSink, RelocSink, StackmapSink,
MemoryCodeSink, NullRelocSink, NullStackMapSink, NullTrapSink, RelocSink, StackMapSink,
TrapSink,
};
pub use self::relaxation::relax_branches;
pub use self::shrink::shrink_instructions;
pub use self::stackmap::Stackmap;
pub use self::stack_map::StackMap;
use crate::ir::entities::Value;
use crate::ir::{
ConstantOffset, ExternalName, Function, Inst, JumpTable, Opcode, SourceLoc, TrapCode,
@@ -164,8 +164,8 @@ pub trait CodeSink {
/// Read-only data output is complete, we're done.
fn end_codegen(&mut self);
/// Add a stackmap at the current code offset.
fn add_stackmap(&mut self, _: &[Value], _: &Function, _: &dyn TargetIsa);
/// Add a stack map at the current code offset.
fn add_stack_map(&mut self, _: &[Value], _: &Function, _: &dyn TargetIsa);
/// Add a call site for a call with the given opcode, returning at the current offset.
fn add_call_site(&mut self, _: Opcode, _: SourceLoc) {

View File

@@ -59,7 +59,7 @@ const NUM_BITS: usize = core::mem::size_of::<Num>() * 8;
/// | ... |
/// ```
///
/// An individual `Stackmap` is associated with just one instruction pointer
/// An individual `StackMap` is associated with just one instruction pointer
/// within the function, contains the size of the stack frame, and represents
/// the stack frame as a bitmap. There is one bit per word in the stack frame,
/// and if the bit is set, then the word contains a live GC reference.
@@ -70,13 +70,14 @@ const NUM_BITS: usize = core::mem::size_of::<Num>() * 8;
/// callee's stack map.
#[derive(Clone, Debug, PartialEq, Eq)]
#[cfg_attr(feature = "enable-serde", derive(serde::Deserialize, serde::Serialize))]
pub struct Stackmap {
pub struct StackMap {
bitmap: Vec<BitSet<Num>>,
mapped_words: u32,
}
impl Stackmap {
/// Create a stackmap based on where references are located on a function's stack.
impl StackMap {
/// Create a `StackMap` based on where references are located on a
/// function's stack.
pub fn from_values(
args: &[ir::entities::Value],
func: &ir::Function,
@@ -101,7 +102,7 @@ impl Stackmap {
let stack = &func.stack_slots;
let info = func.stack_slots.layout_info.unwrap();
// Refer to the doc comment for `Stackmap` above to understand the
// Refer to the doc comment for `StackMap` above to understand the
// bitmap representation used here.
let map_size = (info.frame_size + info.inbound_args_size) as usize;
let word_size = isa.pointer_bytes() as usize;
@@ -171,9 +172,9 @@ mod tests {
use super::*;
#[test]
fn stackmaps() {
fn stack_maps() {
let vec: Vec<bool> = Vec::new();
assert!(Stackmap::from_slice(&vec).bitmap.is_empty());
assert!(StackMap::from_slice(&vec).bitmap.is_empty());
let mut vec: [bool; NUM_BITS] = Default::default();
let set_true_idx = [5, 7, 24, 31];
@@ -185,12 +186,12 @@ mod tests {
let mut vec = vec.to_vec();
assert_eq!(
vec![BitSet::<Num>(2164261024)],
Stackmap::from_slice(&vec).bitmap
StackMap::from_slice(&vec).bitmap
);
vec.push(false);
vec.push(true);
let res = Stackmap::from_slice(&vec);
let res = StackMap::from_slice(&vec);
assert_eq!(
vec![BitSet::<Num>(2164261024), BitSet::<Num>(2)],
res.bitmap

View File

@@ -10,7 +10,7 @@
//! single ISA instance.
use crate::binemit::{
relax_branches, shrink_instructions, CodeInfo, MemoryCodeSink, RelocSink, StackmapSink,
relax_branches, shrink_instructions, CodeInfo, MemoryCodeSink, RelocSink, StackMapSink,
TrapSink,
};
use crate::dce::do_dce;
@@ -127,13 +127,19 @@ impl Context {
mem: &mut Vec<u8>,
relocs: &mut dyn RelocSink,
traps: &mut dyn TrapSink,
stackmaps: &mut dyn StackmapSink,
stack_maps: &mut dyn StackMapSink,
) -> CodegenResult<CodeInfo> {
let info = self.compile(isa)?;
let old_len = mem.len();
mem.resize(old_len + info.total_size as usize, 0);
let new_info = unsafe {
self.emit_to_memory(isa, mem.as_mut_ptr().add(old_len), relocs, traps, stackmaps)
self.emit_to_memory(
isa,
mem.as_mut_ptr().add(old_len),
relocs,
traps,
stack_maps,
)
};
debug_assert!(new_info == info);
Ok(info)
@@ -222,10 +228,10 @@ impl Context {
mem: *mut u8,
relocs: &mut dyn RelocSink,
traps: &mut dyn TrapSink,
stackmaps: &mut dyn StackmapSink,
stack_maps: &mut dyn StackMapSink,
) -> CodeInfo {
let _tt = timing::binemit();
let mut sink = MemoryCodeSink::new(mem, relocs, traps, stackmaps);
let mut sink = MemoryCodeSink::new(mem, relocs, traps, stack_maps);
if let Some(ref result) = &self.mach_compile_result {
result.buffer.emit(&mut sink);
} else {

View File

@@ -63,7 +63,7 @@ pub fn is_constant_64bit(func: &Function, inst: Inst) -> Option<u64> {
}
/// Is the given instruction a safepoint (i.e., potentially causes a GC, depending on the
/// embedding, and so requires reftyped values to be enumerated with a stackmap)?
/// embedding, and so requires reftyped values to be enumerated with a stack map)?
pub fn is_safepoint(func: &Function, inst: Inst) -> bool {
let op = func.dfg[inst].opcode();
op.is_resumable_trap() || op.is_call()

View File

@@ -90,7 +90,7 @@
//! - Return v1 in memory at `[P+8]`.
//! - Return v0 in memory at `[P+16]`.
use crate::binemit::Stackmap;
use crate::binemit::StackMap;
use crate::ir;
use crate::ir::types;
use crate::ir::types::*;
@@ -1074,10 +1074,10 @@ impl ABIBody for AArch64ABIBody {
store_stack(MemArg::NominalSPOffset(sp_off, ty), from_reg, ty)
}
fn spillslots_to_stackmap(&self, slots: &[SpillSlot], state: &EmitState) -> Stackmap {
fn spillslots_to_stack_map(&self, slots: &[SpillSlot], state: &EmitState) -> StackMap {
assert!(state.virtual_sp_offset >= 0);
trace!(
"spillslots_to_stackmap: slots = {:?}, state = {:?}",
"spillslots_to_stack_map: slots = {:?}, state = {:?}",
slots,
state
);
@@ -1094,7 +1094,7 @@ impl ABIBody for AArch64ABIBody {
bits[first_spillslot_word + slot] = true;
}
Stackmap::from_slice(&bits[..])
StackMap::from_slice(&bits[..])
}
fn gen_prologue(&mut self) -> Vec<Inst> {

View File

@@ -1,6 +1,6 @@
//! AArch64 ISA: binary code emission.
use crate::binemit::{CodeOffset, Reloc, Stackmap};
use crate::binemit::{CodeOffset, Reloc, StackMap};
use crate::ir::constant::ConstantData;
use crate::ir::types::*;
use crate::ir::TrapCode;
@@ -429,8 +429,8 @@ pub struct EmitState {
pub(crate) virtual_sp_offset: i64,
/// Offset of FP from nominal-SP.
pub(crate) nominal_sp_to_fp: i64,
/// Safepoint stackmap for upcoming instruction, as provided to `pre_safepoint()`.
stackmap: Option<Stackmap>,
/// Safepoint stack map for upcoming instruction, as provided to `pre_safepoint()`.
stack_map: Option<StackMap>,
}
impl MachInstEmitState<Inst> for EmitState {
@@ -438,22 +438,22 @@ impl MachInstEmitState<Inst> for EmitState {
EmitState {
virtual_sp_offset: 0,
nominal_sp_to_fp: abi.frame_size() as i64,
stackmap: None,
stack_map: None,
}
}
fn pre_safepoint(&mut self, stackmap: Stackmap) {
self.stackmap = Some(stackmap);
fn pre_safepoint(&mut self, stack_map: StackMap) {
self.stack_map = Some(stack_map);
}
}
impl EmitState {
fn take_stackmap(&mut self) -> Option<Stackmap> {
self.stackmap.take()
fn take_stack_map(&mut self) -> Option<StackMap> {
self.stack_map.take()
}
fn clear_post_insn(&mut self) {
self.stackmap = None;
self.stack_map = None;
}
}
@@ -1854,8 +1854,8 @@ impl MachInstEmit for Inst {
// Noop; this is just a placeholder for epilogues.
}
&Inst::Call { ref info } => {
if let Some(s) = state.take_stackmap() {
sink.add_stackmap(StackmapExtent::UpcomingBytes(4), s);
if let Some(s) = state.take_stack_map() {
sink.add_stack_map(StackMapExtent::UpcomingBytes(4), s);
}
sink.add_reloc(info.loc, Reloc::Arm64Call, &info.dest, 0);
sink.put4(enc_jump26(0b100101, 0));
@@ -1864,8 +1864,8 @@ impl MachInstEmit for Inst {
}
}
&Inst::CallInd { ref info } => {
if let Some(s) = state.take_stackmap() {
sink.add_stackmap(StackmapExtent::UpcomingBytes(4), s);
if let Some(s) = state.take_stack_map() {
sink.add_stack_map(StackMapExtent::UpcomingBytes(4), s);
}
sink.put4(0b1101011_0001_11111_000000_00000_00000 | (machreg_to_gpr(info.rn) << 5));
if info.opcode.is_call() {
@@ -1922,8 +1922,8 @@ impl MachInstEmit for Inst {
&Inst::Udf { trap_info } => {
let (srcloc, code) = trap_info;
sink.add_trap(srcloc, code);
if let Some(s) = state.take_stackmap() {
sink.add_stackmap(StackmapExtent::UpcomingBytes(4), s);
if let Some(s) = state.take_stack_map() {
sink.add_stack_map(StackMapExtent::UpcomingBytes(4), s);
}
sink.put4(0xd4a00000);
}

View File

@@ -82,7 +82,7 @@ impl CodeSink for TestCodeSink {
fn end_codegen(&mut self) {}
fn add_stackmap(&mut self, _val_list: &[Value], _func: &Function, _isa: &dyn TargetIsa) {}
fn add_stack_map(&mut self, _val_list: &[Value], _func: &Function, _isa: &dyn TargetIsa) {}
fn add_call_site(&mut self, _opcode: Opcode, _srcloc: SourceLoc) {}
}

View File

@@ -1,6 +1,6 @@
//! Implementation of the standard x64 ABI.
use crate::binemit::Stackmap;
use crate::binemit::StackMap;
use crate::ir::{self, types, ArgumentExtension, StackSlot, Type};
use crate::isa::{x64::inst::*, CallConv};
use crate::machinst::*;
@@ -492,10 +492,10 @@ impl ABIBody for X64ABIBody {
)
}
fn spillslots_to_stackmap(&self, slots: &[SpillSlot], state: &EmitState) -> Stackmap {
fn spillslots_to_stack_map(&self, slots: &[SpillSlot], state: &EmitState) -> StackMap {
assert!(state.virtual_sp_offset >= 0);
trace!(
"spillslots_to_stackmap: slots = {:?}, state = {:?}",
"spillslots_to_stack_map: slots = {:?}, state = {:?}",
slots,
state
);
@@ -511,7 +511,7 @@ impl ABIBody for X64ABIBody {
bits[first_spillslot_word + slot] = true;
}
Stackmap::from_slice(&bits[..])
StackMap::from_slice(&bits[..])
}
fn gen_prologue(&mut self) -> Vec<Inst> {

View File

@@ -1349,8 +1349,8 @@ pub(crate) fn emit(
Inst::CallKnown {
dest, loc, opcode, ..
} => {
if let Some(s) = state.take_stackmap() {
sink.add_stackmap(StackmapExtent::UpcomingBytes(5), s);
if let Some(s) = state.take_stack_map() {
sink.add_stack_map(StackMapExtent::UpcomingBytes(5), s);
}
sink.put1(0xE8);
// The addend adjusts for the difference between the end of the instruction and the
@@ -1393,8 +1393,8 @@ pub(crate) fn emit(
);
}
}
if let Some(s) = state.take_stackmap() {
sink.add_stackmap(StackmapExtent::StartedAtOffset(start_offset), s);
if let Some(s) = state.take_stack_map() {
sink.add_stack_map(StackMapExtent::StartedAtOffset(start_offset), s);
}
if opcode.is_call() {
sink.add_call_site(*loc, *opcode);
@@ -2432,8 +2432,8 @@ pub(crate) fn emit(
Inst::Ud2 { trap_info } => {
sink.add_trap(trap_info.0, trap_info.1);
if let Some(s) = state.take_stackmap() {
sink.add_stackmap(StackmapExtent::UpcomingBytes(2), s);
if let Some(s) = state.take_stack_map() {
sink.add_stack_map(StackMapExtent::UpcomingBytes(2), s);
}
sink.put1(0x0f);
sink.put1(0x0b);

View File

@@ -4,7 +4,7 @@
#![allow(non_snake_case)]
#![allow(non_camel_case_types)]
use crate::binemit::{CodeOffset, Stackmap};
use crate::binemit::{CodeOffset, StackMap};
use crate::ir::{types, ExternalName, Opcode, SourceLoc, TrapCode, Type};
use crate::machinst::*;
use crate::{settings, settings::Flags, CodegenError, CodegenResult};
@@ -2225,8 +2225,8 @@ pub struct EmitState {
pub(crate) virtual_sp_offset: i64,
/// Offset of FP from nominal-SP.
pub(crate) nominal_sp_to_fp: i64,
/// Safepoint stackmap for upcoming instruction, as provided to `pre_safepoint()`.
stackmap: Option<Stackmap>,
/// Safepoint stack map for upcoming instruction, as provided to `pre_safepoint()`.
stack_map: Option<StackMap>,
}
impl MachInstEmit for Inst {
@@ -2246,22 +2246,22 @@ impl MachInstEmitState<Inst> for EmitState {
EmitState {
virtual_sp_offset: 0,
nominal_sp_to_fp: abi.frame_size() as i64,
stackmap: None,
stack_map: None,
}
}
fn pre_safepoint(&mut self, stackmap: Stackmap) {
self.stackmap = Some(stackmap);
fn pre_safepoint(&mut self, stack_map: StackMap) {
self.stack_map = Some(stack_map);
}
}
impl EmitState {
fn take_stackmap(&mut self) -> Option<Stackmap> {
self.stackmap.take()
fn take_stack_map(&mut self) -> Option<StackMap> {
self.stack_map.take()
}
fn clear_post_insn(&mut self) {
self.stackmap = None;
self.stack_map = None;
}
}

View File

@@ -1,6 +1,6 @@
//! ABI definitions.
use crate::binemit::Stackmap;
use crate::binemit::StackMap;
use crate::ir::StackSlot;
use crate::machinst::*;
use crate::settings;
@@ -96,14 +96,14 @@ pub trait ABIBody {
/// Store to a spillslot.
fn store_spillslot(&self, slot: SpillSlot, ty: Type, from_reg: Reg) -> Self::I;
/// Generate a stackmap, given a list of spillslots and the emission state
/// Generate a stack map, given a list of spillslots and the emission state
/// at a given program point (prior to emission fo the safepointing
/// instruction).
fn spillslots_to_stackmap(
fn spillslots_to_stack_map(
&self,
slots: &[SpillSlot],
state: &<Self::I as MachInstEmit>::State,
) -> Stackmap;
) -> StackMap;
/// Generate a prologue, post-regalloc. This should include any stack
/// frame or other setup necessary to use the other methods (`load_arg`,

View File

@@ -140,7 +140,7 @@
//! Given these invariants, we argue why each optimization preserves execution
//! semantics below (grep for "Preserves execution semantics").
use crate::binemit::{Addend, CodeOffset, CodeSink, Reloc, Stackmap};
use crate::binemit::{Addend, CodeOffset, CodeSink, Reloc, StackMap};
use crate::ir::{ExternalName, Opcode, SourceLoc, TrapCode};
use crate::machinst::{BlockIndex, MachInstLabelUse, VCodeInst};
use crate::timing;
@@ -169,8 +169,8 @@ pub struct MachBuffer<I: VCodeInst> {
call_sites: SmallVec<[MachCallSite; 16]>,
/// Any source location mappings referring to this code.
srclocs: SmallVec<[MachSrcLoc; 64]>,
/// Any stackmaps referring to this code.
stackmaps: SmallVec<[MachStackMap; 8]>,
/// Any stack maps referring to this code.
stack_maps: SmallVec<[MachStackMap; 8]>,
/// The current source location in progress (after `start_srcloc()` and
/// before `end_srcloc()`). This is a (start_offset, src_loc) tuple.
cur_srcloc: Option<(CodeOffset, SourceLoc)>,
@@ -234,8 +234,8 @@ pub struct MachBufferFinalized {
call_sites: SmallVec<[MachCallSite; 16]>,
/// Any source location mappings referring to this code.
srclocs: SmallVec<[MachSrcLoc; 64]>,
/// Any stackmaps referring to this code.
stackmaps: SmallVec<[MachStackMap; 8]>,
/// Any stack maps referring to this code.
stack_maps: SmallVec<[MachStackMap; 8]>,
}
static UNKNOWN_LABEL_OFFSET: CodeOffset = 0xffff_ffff;
@@ -261,8 +261,8 @@ impl MachLabel {
}
}
/// A stackmap extent, when creating a stackmap.
pub enum StackmapExtent {
/// A stack map extent, when creating a stack map.
pub enum StackMapExtent {
/// The stack map starts at this instruction, and ends after the number of upcoming bytes
/// (note: this is a code offset diff).
UpcomingBytes(CodeOffset),
@@ -282,7 +282,7 @@ impl<I: VCodeInst> MachBuffer<I> {
traps: SmallVec::new(),
call_sites: SmallVec::new(),
srclocs: SmallVec::new(),
stackmaps: SmallVec::new(),
stack_maps: SmallVec::new(),
cur_srcloc: None,
label_offsets: SmallVec::new(),
label_aliases: SmallVec::new(),
@@ -1151,7 +1151,7 @@ impl<I: VCodeInst> MachBuffer<I> {
traps: self.traps,
call_sites: self.call_sites,
srclocs: self.srclocs,
stackmaps: self.stackmaps,
stack_maps: self.stack_maps,
}
}
@@ -1212,28 +1212,28 @@ impl<I: VCodeInst> MachBuffer<I> {
}
}
/// Add stackmap metadata for this program point: a set of stack offsets (from SP upward) that
/// contain live references.
/// Add stack map metadata for this program point: a set of stack offsets
/// (from SP upward) that contain live references.
///
/// The `offset_to_fp` value is the offset from the nominal SP (at which the `stack_offsets`
/// are based) and the FP value. By subtracting `offset_to_fp` from each `stack_offsets`
/// element, one can obtain live-reference offsets from FP instead.
pub fn add_stackmap(&mut self, extent: StackmapExtent, stackmap: Stackmap) {
pub fn add_stack_map(&mut self, extent: StackMapExtent, stack_map: StackMap) {
let (start, end) = match extent {
StackmapExtent::UpcomingBytes(insn_len) => {
StackMapExtent::UpcomingBytes(insn_len) => {
let start_offset = self.cur_offset();
(start_offset, start_offset + insn_len)
}
StackmapExtent::StartedAtOffset(start_offset) => {
StackMapExtent::StartedAtOffset(start_offset) => {
let end_offset = self.cur_offset();
debug_assert!(end_offset >= start_offset);
(start_offset, end_offset)
}
};
self.stackmaps.push(MachStackMap {
self.stack_maps.push(MachStackMap {
offset: start,
offset_end: end,
stackmap,
stack_map,
});
}
}
@@ -1295,9 +1295,9 @@ impl MachBufferFinalized {
sink.end_codegen();
}
/// Get the stackmap metadata for this code.
pub fn stackmaps(&self) -> &[MachStackMap] {
&self.stackmaps[..]
/// Get the stack map metadata for this code.
pub fn stack_maps(&self) -> &[MachStackMap] {
&self.stack_maps[..]
}
}
@@ -1378,17 +1378,17 @@ pub struct MachSrcLoc {
pub loc: SourceLoc,
}
/// Record of stackmap metadata: stack offsets containing references.
/// Record of stack map metadata: stack offsets containing references.
#[derive(Clone, Debug)]
pub struct MachStackMap {
/// The code offset at which this stackmap applies.
/// The code offset at which this stack map applies.
pub offset: CodeOffset,
/// The code offset just past the "end" of the instruction: that is, the
/// offset of the first byte of the following instruction, or equivalently,
/// the start offset plus the instruction length.
pub offset_end: CodeOffset,
/// The Stackmap itself.
pub stackmap: Stackmap,
/// The stack map itself.
pub stack_map: StackMap,
}
/// Record of branch instruction in the buffer, to facilitate editing.

View File

@@ -23,7 +23,7 @@ where
// Build the lowering context.
let lower = Lower::new(f, abi, block_order)?;
// Lower the IR.
let (mut vcode, stackmap_request_info) = {
let (mut vcode, stack_map_request_info) = {
let _tt = timing::vcode_lower();
lower.lower(b)?
};
@@ -62,11 +62,11 @@ where
// If either there are no reference-typed values, or else there are
// but there are no safepoints at which we need to know about them,
// then we don't need stackmaps.
let sri = if stackmap_request_info.reftyped_vregs.len() > 0
&& stackmap_request_info.safepoint_insns.len() > 0
// then we don't need stack maps.
let sri = if stack_map_request_info.reftyped_vregs.len() > 0
&& stack_map_request_info.safepoint_insns.len() > 0
{
Some(&stackmap_request_info)
Some(&stack_map_request_info)
} else {
None
};

View File

@@ -778,10 +778,10 @@ impl<'func, I: VCodeInst> Lower<'func, I> {
self.copy_bbs_to_vcode();
// Now that we've emitted all instructions into the VCodeBuilder, let's build the VCode.
let (vcode, stackmap_info) = self.vcode.build();
let (vcode, stack_map_info) = self.vcode.build();
debug!("built vcode: {:?}", vcode);
Ok((vcode, stackmap_info))
Ok((vcode, stack_map_info))
}
/// Get the actual inputs for a value. This is the implementation for

View File

@@ -96,7 +96,7 @@
//!
//! ```
use crate::binemit::{CodeInfo, CodeOffset, Stackmap};
use crate::binemit::{CodeInfo, CodeOffset, StackMap};
use crate::ir::condcodes::IntCC;
use crate::ir::{Function, Type};
use crate::result::CodegenResult;
@@ -279,7 +279,7 @@ pub trait MachInstEmitState<I: MachInst>: Default + Clone + Debug {
fn new(abi: &dyn ABIBody<I = I>) -> Self;
/// Update the emission state before emitting an instruction that is a
/// safepoint.
fn pre_safepoint(&mut self, _stackmap: Stackmap) {}
fn pre_safepoint(&mut self, _stack_map: StackMap) {}
}
/// The result of a `MachBackend::compile_function()` call. Contains machine

View File

@@ -94,7 +94,7 @@ pub struct VCode<I: VCodeInst> {
safepoint_insns: Vec<InsnIndex>,
/// For each safepoint entry in `safepoint_insns`, a list of `SpillSlot`s.
/// These are used to generate actual stackmaps at emission. Filled in
/// These are used to generate actual stack maps at emission. Filled in
/// post-regalloc.
safepoint_slots: Vec<Vec<SpillSlot>>,
}
@@ -117,8 +117,8 @@ pub struct VCodeBuilder<I: VCodeInst> {
/// In-progress VCode.
vcode: VCode<I>,
/// In-progress stackmap-request info.
stackmap_info: StackmapRequestInfo,
/// In-progress stack map-request info.
stack_map_info: StackmapRequestInfo,
/// Index of the last block-start in the vcode.
block_start: InsnIndex,
@@ -135,7 +135,7 @@ impl<I: VCodeInst> VCodeBuilder<I> {
pub fn new(abi: Box<dyn ABIBody<I = I>>, block_order: BlockLoweringOrder) -> VCodeBuilder<I> {
let reftype_class = I::ref_type_regclass(abi.flags());
let vcode = VCode::new(abi, block_order);
let stackmap_info = StackmapRequestInfo {
let stack_map_info = StackmapRequestInfo {
reftype_class,
reftyped_vregs: vec![],
safepoint_insns: vec![],
@@ -143,7 +143,7 @@ impl<I: VCodeInst> VCodeBuilder<I> {
VCodeBuilder {
vcode,
stackmap_info,
stack_map_info,
block_start: 0,
succ_start: 0,
cur_srcloc: SourceLoc::default(),
@@ -169,7 +169,7 @@ impl<I: VCodeInst> VCodeBuilder<I> {
}
self.vcode.vreg_types[vreg.get_index()] = ty;
if is_reftype(ty) {
self.stackmap_info.reftyped_vregs.push(vreg);
self.stack_map_info.reftyped_vregs.push(vreg);
self.vcode.have_ref_values = true;
}
}
@@ -222,7 +222,7 @@ impl<I: VCodeInst> VCodeBuilder<I> {
self.vcode.insts.push(insn);
self.vcode.srclocs.push(self.cur_srcloc);
if is_safepoint {
self.stackmap_info
self.stack_map_info
.safepoint_insns
.push(InstIx::new((self.vcode.insts.len() - 1) as u32));
}
@@ -239,12 +239,12 @@ impl<I: VCodeInst> VCodeBuilder<I> {
}
/// Build the final VCode, returning the vcode itself as well as auxiliary
/// information, such as the stackmap request information.
/// information, such as the stack map request information.
pub fn build(self) -> (VCode<I>, StackmapRequestInfo) {
// TODO: come up with an abstraction for "vcode and auxiliary data". The
// auxiliary data needs to be separate from the vcode so that it can be
// referenced as the vcode is mutated (e.g. by the register allocator).
(self.vcode, self.stackmap_info)
(self.vcode, self.stack_map_info)
}
}
@@ -460,11 +460,11 @@ impl<I: VCodeInst> VCode<I> {
&& self.safepoint_insns[safepoint_idx] == iix
{
if self.safepoint_slots[safepoint_idx].len() > 0 {
let stackmap = self.abi.spillslots_to_stackmap(
let stack_map = self.abi.spillslots_to_stack_map(
&self.safepoint_slots[safepoint_idx][..],
&state,
);
state.pre_safepoint(stackmap);
state.pre_safepoint(stack_map);
}
safepoint_idx += 1;
}

View File

@@ -14,7 +14,7 @@ use crate::regalloc::coloring::Coloring;
use crate::regalloc::live_value_tracker::LiveValueTracker;
use crate::regalloc::liveness::Liveness;
use crate::regalloc::reload::Reload;
use crate::regalloc::safepoint::emit_stackmaps;
use crate::regalloc::safepoint::emit_stack_maps;
use crate::regalloc::spilling::Spilling;
use crate::regalloc::virtregs::VirtRegs;
use crate::result::CodegenResult;
@@ -206,7 +206,7 @@ impl Context {
// This function runs after register allocation has taken
// place, meaning values have locations assigned already.
if isa.flags().enable_safepoints() {
emit_stackmaps(func, domtree, &self.liveness, &mut self.tracker, isa);
emit_stack_maps(func, domtree, &self.liveness, &mut self.tracker, isa);
} else {
// Make sure no references are used.
for val in func.dfg.values() {

View File

@@ -23,4 +23,4 @@ mod spilling;
pub use self::context::Context;
pub use self::diversion::{EntryRegDiversions, RegDiversions};
pub use self::register_set::RegisterSet;
pub use self::safepoint::emit_stackmaps;
pub use self::safepoint::emit_stack_maps;

View File

@@ -32,9 +32,9 @@ fn insert_and_encode_safepoint<'f>(
}
}
// The emit_stackmaps() function analyzes each instruction to retrieve the liveness of
// The emit_stack_maps() function analyzes each instruction to retrieve the liveness of
// the defs and operands by traversing a function's blocks in layout order.
pub fn emit_stackmaps(
pub fn emit_stack_maps(
func: &mut Function,
domtree: &DominatorTree,
liveness: &Liveness,