Merge pull request #1729 from cfallin/machinst-branch-opt

Fix MachBuffer branch optimization.
This commit is contained in:
Chris Fallin
2020-05-20 14:43:57 -07:00
committed by GitHub
3 changed files with 447 additions and 81 deletions

View File

@@ -1163,17 +1163,13 @@ impl MachInstEmit for Inst {
} }
&Inst::Jump { ref dest } => { &Inst::Jump { ref dest } => {
let off = sink.cur_offset(); let off = sink.cur_offset();
// Emit the jump itself. // Indicate that the jump uses a label, if so, so that a fixup can occur later.
sink.put4(enc_jump26(0b000101, dest.as_offset26_or_zero()));
// After the jump has been emitted, indicate that it uses a
// label, if so, so that a fixup can occur later. This happens
// after we emit the bytes because the fixup might occur right
// away (so the bytes must actually exist now).
if let Some(l) = dest.as_label() { if let Some(l) = dest.as_label() {
sink.use_label_at_offset(off, l, LabelUse::Branch26); sink.use_label_at_offset(off, l, LabelUse::Branch26);
let cur_off = sink.cur_offset(); sink.add_uncond_branch(off, off + 4, l);
sink.add_uncond_branch(off, cur_off, l);
} }
// Emit the jump itself.
sink.put4(enc_jump26(0b000101, dest.as_offset26_or_zero()));
} }
&Inst::Ret => { &Inst::Ret => {
sink.put4(0xd65f03c0); sink.put4(0xd65f03c0);
@@ -1208,28 +1204,27 @@ impl MachInstEmit for Inst {
} => { } => {
// Conditional part first. // Conditional part first.
let cond_off = sink.cur_offset(); let cond_off = sink.cur_offset();
sink.put4(enc_conditional_br(taken, kind));
if let Some(l) = taken.as_label() { if let Some(l) = taken.as_label() {
sink.use_label_at_offset(cond_off, l, LabelUse::Branch19); sink.use_label_at_offset(cond_off, l, LabelUse::Branch19);
let cur_off = sink.cur_offset();
let inverted = enc_conditional_br(taken, kind.invert()).to_le_bytes(); let inverted = enc_conditional_br(taken, kind.invert()).to_le_bytes();
sink.add_cond_branch(cond_off, cur_off, l, &inverted[..]); sink.add_cond_branch(cond_off, cond_off + 4, l, &inverted[..]);
} }
// Unconditional part. sink.put4(enc_conditional_br(taken, kind));
// Unconditional part next.
let uncond_off = sink.cur_offset(); let uncond_off = sink.cur_offset();
sink.put4(enc_jump26(0b000101, not_taken.as_offset26_or_zero()));
if let Some(l) = not_taken.as_label() { if let Some(l) = not_taken.as_label() {
sink.use_label_at_offset(uncond_off, l, LabelUse::Branch26); sink.use_label_at_offset(uncond_off, l, LabelUse::Branch26);
let cur_off = sink.cur_offset(); sink.add_uncond_branch(uncond_off, uncond_off + 4, l);
sink.add_uncond_branch(uncond_off, cur_off, l);
} }
sink.put4(enc_jump26(0b000101, not_taken.as_offset26_or_zero()));
} }
&Inst::OneWayCondBr { target, kind } => { &Inst::OneWayCondBr { target, kind } => {
let off = sink.cur_offset(); let off = sink.cur_offset();
sink.put4(enc_conditional_br(target, kind));
if let Some(l) = target.as_label() { if let Some(l) = target.as_label() {
sink.use_label_at_offset(off, l, LabelUse::Branch19); sink.use_label_at_offset(off, l, LabelUse::Branch19);
} }
sink.put4(enc_conditional_br(target, kind));
} }
&Inst::IndirectBr { rn, .. } => { &Inst::IndirectBr { rn, .. } => {
sink.put4(enc_br(rn)); sink.put4(enc_br(rn));
@@ -1307,12 +1302,12 @@ impl MachInstEmit for Inst {
for &target in targets.iter() { for &target in targets.iter() {
let word_off = sink.cur_offset(); let word_off = sink.cur_offset();
let off_into_table = word_off - jt_off; let off_into_table = word_off - jt_off;
sink.put4(off_into_table);
sink.use_label_at_offset( sink.use_label_at_offset(
word_off, word_off,
target.as_label().unwrap(), target.as_label().unwrap(),
LabelUse::PCRel32, LabelUse::PCRel32,
); );
sink.put4(off_into_table);
} }
// Lowering produces an EmitIsland before using a JTSequence, so we can safely // Lowering produces an EmitIsland before using a JTSequence, so we can safely

View File

@@ -812,14 +812,14 @@ pub(crate) fn emit(inst: &Inst, sink: &mut MachBuffer<Inst>) {
let disp = dest.as_offset32_or_zero() - 5; let disp = dest.as_offset32_or_zero() - 5;
let disp = disp as u32; let disp = disp as u32;
let br_start = sink.cur_offset(); let br_start = sink.cur_offset();
sink.put1(0xE9); let br_disp_off = br_start + 1;
let br_disp_off = sink.cur_offset(); let br_end = br_start + 5;
sink.put4(disp);
let br_end = sink.cur_offset();
if let Some(l) = dest.as_label() { if let Some(l) = dest.as_label() {
sink.use_label_at_offset(br_disp_off, l, LabelUse::Rel32); sink.use_label_at_offset(br_disp_off, l, LabelUse::Rel32);
sink.add_uncond_branch(br_start, br_end, l); sink.add_uncond_branch(br_start, br_end, l);
} }
sink.put1(0xE9);
sink.put4(disp);
} }
Inst::JmpCondSymm { Inst::JmpCondSymm {
cc, cc,
@@ -835,31 +835,31 @@ pub(crate) fn emit(inst: &Inst, sink: &mut MachBuffer<Inst>) {
let taken_disp = taken.as_offset32_or_zero() - 6; let taken_disp = taken.as_offset32_or_zero() - 6;
let taken_disp = taken_disp as u32; let taken_disp = taken_disp as u32;
let cond_start = sink.cur_offset(); let cond_start = sink.cur_offset();
sink.put1(0x0F); let cond_disp_off = cond_start + 2;
sink.put1(0x80 + cc.get_enc()); let cond_end = cond_start + 6;
let cond_disp_off = sink.cur_offset();
sink.put4(taken_disp);
let cond_end = sink.cur_offset();
if let Some(l) = taken.as_label() { if let Some(l) = taken.as_label() {
sink.use_label_at_offset(cond_disp_off, l, LabelUse::Rel32); sink.use_label_at_offset(cond_disp_off, l, LabelUse::Rel32);
let inverted: [u8; 6] = let inverted: [u8; 6] =
[0x0F, 0x80 + (cc.invert().get_enc()), 0xFA, 0xFF, 0xFF, 0xFF]; [0x0F, 0x80 + (cc.invert().get_enc()), 0xFA, 0xFF, 0xFF, 0xFF];
sink.add_cond_branch(cond_start, cond_end, l, &inverted[..]); sink.add_cond_branch(cond_start, cond_end, l, &inverted[..]);
} }
sink.put1(0x0F);
sink.put1(0x80 + cc.get_enc());
sink.put4(taken_disp);
// Unconditional part. // Unconditional part.
let nt_disp = not_taken.as_offset32_or_zero() - 5; let nt_disp = not_taken.as_offset32_or_zero() - 5;
let nt_disp = nt_disp as u32; let nt_disp = nt_disp as u32;
let uncond_start = sink.cur_offset(); let uncond_start = sink.cur_offset();
sink.put1(0xE9); let uncond_disp_off = uncond_start + 1;
let uncond_disp_off = sink.cur_offset(); let uncond_end = uncond_start + 5;
sink.put4(nt_disp);
let uncond_end = sink.cur_offset();
if let Some(l) = not_taken.as_label() { if let Some(l) = not_taken.as_label() {
sink.use_label_at_offset(uncond_disp_off, l, LabelUse::Rel32); sink.use_label_at_offset(uncond_disp_off, l, LabelUse::Rel32);
sink.add_uncond_branch(uncond_start, uncond_end, l); sink.add_uncond_branch(uncond_start, uncond_end, l);
} }
sink.put1(0xE9);
sink.put4(nt_disp);
} }
Inst::JmpUnknown { target } => { Inst::JmpUnknown { target } => {
match target { match target {

View File

@@ -105,6 +105,40 @@
//! (fixups happen at island emission time, at which point latest-branches are //! (fixups happen at island emission time, at which point latest-branches are
//! cleared, or at the end of emission), so we are sure to catch and redirect //! cleared, or at the end of emission), so we are sure to catch and redirect
//! all possible paths to this instruction. //! all possible paths to this instruction.
//!
//! # Branch-optimization Correctness
//!
//! The branch-optimization mechanism depends on a few data structures with
//! invariants, which are always held outside the scope of top-level public
//! methods:
//!
//! - The latest-branches list. Each entry describes a span of the buffer
//! (start/end offsets), the label target, the corresponding fixup-list entry
//! index, and the bytes (must be the same length) for the inverted form, if
//! conditional. The list of labels that are bound to the start-offset of this
//! branch is *complete* (if any label has a resolved offset equal to `start`
//! and is not an alias, it must appear in this list) and *precise* (no label
//! in this list can be bound to another offset). No label in this list should
//! be an alias. No two branch ranges can overlap, and branches are in
//! ascending-offset order.
//!
//! - The labels-at-tail list. This contains all MachLabels that have been bound
//! to (whose resolved offsets are equal to) the tail offset of the buffer.
//! No label in this list should be an alias.
//!
//! - The label_offsets array, containing the bound offset of a label or
//! UNKNOWN. No label can be bound at an offset greater than the current
//! buffer tail.
//!
//! - The label_aliases array, containing another label to which a label is
//! bound or UNKNOWN. A label's resolved offset is the resolved offset
//! of the label it is aliased to, if this is set.
//!
//! We argue below, at each method, how the invariants in these data structures
//! are maintained (grep for "Post-invariant").
//!
//! Given these invariants, we argue why each optimization preserves execution
//! semantics below (grep for "Preserves execution semantics").
use crate::binemit::{Addend, CodeOffset, CodeSink, Reloc}; use crate::binemit::{Addend, CodeOffset, CodeSink, Reloc};
use crate::ir::{ExternalName, Opcode, SourceLoc, TrapCode}; use crate::ir::{ExternalName, Opcode, SourceLoc, TrapCode};
@@ -162,8 +196,21 @@ pub struct MachBuffer<I: VCodeInst> {
/// Latest branches, to facilitate in-place editing for better fallthrough /// Latest branches, to facilitate in-place editing for better fallthrough
/// behavior and empty-block removal. /// behavior and empty-block removal.
latest_branches: SmallVec<[MachBranch; 4]>, latest_branches: SmallVec<[MachBranch; 4]>,
/// All labels, in offset order. /// All labels at the current offset (emission tail). This is lazily
labels_by_offset: SmallVec<[(MachLabel, CodeOffset); 16]>, /// cleared: it is actually accurate as long as the current offset is
/// `labels_at_tail_off`, but if `cur_offset()` has grown larger, it should
/// be considered as empty.
///
/// For correctness, this *must* be complete (i.e., the vector must contain
/// all labels whose offsets are resolved to the current tail), because we
/// rely on it to update labels when we truncate branches.
labels_at_tail: SmallVec<[MachLabel; 4]>,
/// The last offset at which `labels_at_tail` is valid. It is conceptually
/// always describing the tail of the buffer, but we do not clear
/// `labels_at_tail` eagerly when the tail grows, rather we lazily clear it
/// when the offset has grown past this (`labels_at_tail_off`) point.
/// Always <= `cur_offset()`.
labels_at_tail_off: CodeOffset,
} }
/// A `MachBuffer` once emission is completed: holds generated code and records, /// A `MachBuffer` once emission is completed: holds generated code and records,
@@ -223,10 +270,60 @@ impl<I: VCodeInst> MachBuffer<I> {
island_deadline: UNKNOWN_LABEL_OFFSET, island_deadline: UNKNOWN_LABEL_OFFSET,
island_worst_case_size: 0, island_worst_case_size: 0,
latest_branches: SmallVec::new(), latest_branches: SmallVec::new(),
labels_by_offset: SmallVec::new(), labels_at_tail: SmallVec::new(),
labels_at_tail_off: 0,
} }
} }
/// Debug-only: check invariants of labels and branch-records described
/// under "Branch-optimization Correctness" above.
#[cfg(debug)]
fn check_label_branch_invariants(&self) {
let cur_off = self.cur_offset();
// Check that every entry in latest_branches has *correct*
// labels_at_this_branch lists. We do not check completeness because
// that would require building a reverse index, which is too slow even
// for a debug invariant check.
let mut last_end = 0;
for b in &self.latest_branches {
debug_assert!(b.start < b.end);
debug_assert!(b.end <= cur_off);
debug_assert!(b.start >= last_end);
last_end = b.end;
for &l in &b.labels_at_this_branch {
debug_assert_eq!(self.resolve_label_offset(l), b.start);
debug_assert_eq!(self.label_aliases[l.0 as usize], UNKNOWN_LABEL);
}
}
// Check that every label is unresolved, or resolved at or before
// cur_offset. If at cur_offset, must be in `labels_at_tail`.
for (i, &off) in self.label_offsets.iter().enumerate() {
let label = MachLabel(i as u32);
debug_assert!(off == UNKNOWN_LABEL_OFFSET || off <= cur_off);
if off == cur_off {
debug_assert!(
self.labels_at_tail_off == cur_off && self.labels_at_tail.contains(&label)
);
}
}
// Check that every label in `labels_at_tail_off` is precise, i.e.,
// resolves to the cur offset.
debug_assert!(self.labels_at_tail_off <= cur_off);
if self.labels_at_tail_off == cur_off {
for &l in &self.labels_at_tail {
debug_assert_eq!(self.resolve_label_offset(l), cur_off);
debug_assert_eq!(self.label_aliases[l.0 as usize], UNKNOWN_LABEL);
}
}
}
#[cfg(not(debug))]
fn check_label_branch_invariants(&self) {
// Nothing.
}
/// Current offset from start of buffer. /// Current offset from start of buffer.
pub fn cur_offset(&self) -> CodeOffset { pub fn cur_offset(&self) -> CodeOffset {
self.data.len() as CodeOffset self.data.len() as CodeOffset
@@ -236,6 +333,15 @@ impl<I: VCodeInst> MachBuffer<I> {
pub fn put1(&mut self, value: u8) { pub fn put1(&mut self, value: u8) {
trace!("MachBuffer: put byte @ {}: {:x}", self.cur_offset(), value); trace!("MachBuffer: put byte @ {}: {:x}", self.cur_offset(), value);
self.data.push(value); self.data.push(value);
// Post-invariant: conceptual-labels_at_tail contains a complete and
// precise list of labels bound at `cur_offset()`. We have advanced
// `cur_offset()`, hence if it had been equal to `labels_at_tail_off`
// before, it is not anymore (and it cannot become equal, because
// `labels_at_tail_off` is always <= `cur_offset()`). Thus the list is
// conceptually empty (even though it is only lazily cleared). No labels
// can be bound at this new offset (by invariant on `label_offsets`).
// Hence the invariant holds.
} }
/// Add 2 bytes. /// Add 2 bytes.
@@ -247,6 +353,8 @@ impl<I: VCodeInst> MachBuffer<I> {
); );
let bytes = value.to_le_bytes(); let bytes = value.to_le_bytes();
self.data.extend_from_slice(&bytes[..]); self.data.extend_from_slice(&bytes[..]);
// Post-invariant: as for `put1()`.
} }
/// Add 4 bytes. /// Add 4 bytes.
@@ -258,6 +366,8 @@ impl<I: VCodeInst> MachBuffer<I> {
); );
let bytes = value.to_le_bytes(); let bytes = value.to_le_bytes();
self.data.extend_from_slice(&bytes[..]); self.data.extend_from_slice(&bytes[..]);
// Post-invariant: as for `put1()`.
} }
/// Add 8 bytes. /// Add 8 bytes.
@@ -269,6 +379,8 @@ impl<I: VCodeInst> MachBuffer<I> {
); );
let bytes = value.to_le_bytes(); let bytes = value.to_le_bytes();
self.data.extend_from_slice(&bytes[..]); self.data.extend_from_slice(&bytes[..]);
// Post-invariant: as for `put1()`.
} }
/// Add a slice of bytes. /// Add a slice of bytes.
@@ -279,6 +391,8 @@ impl<I: VCodeInst> MachBuffer<I> {
data.len() data.len()
); );
self.data.extend_from_slice(data); self.data.extend_from_slice(data);
// Post-invariant: as for `put1()`.
} }
/// Reserve appended space and return a mutable slice referring to it. /// Reserve appended space and return a mutable slice referring to it.
@@ -288,6 +402,8 @@ impl<I: VCodeInst> MachBuffer<I> {
let new_len = self.data.len() + len; let new_len = self.data.len() + len;
self.data.resize(new_len, 0); self.data.resize(new_len, 0);
&mut self.data[off..] &mut self.data[off..]
// Post-invariant: as for `put1()`.
} }
/// Align up to the given alignment. /// Align up to the given alignment.
@@ -297,6 +413,8 @@ impl<I: VCodeInst> MachBuffer<I> {
while self.cur_offset() & (align_to - 1) != 0 { while self.cur_offset() & (align_to - 1) != 0 {
self.put1(0); self.put1(0);
} }
// Post-invariant: as for `put1()`.
} }
/// Allocate a `Label` to refer to some offset. May not be bound to a fixed /// Allocate a `Label` to refer to some offset. May not be bound to a fixed
@@ -307,6 +425,9 @@ impl<I: VCodeInst> MachBuffer<I> {
self.label_aliases.push(UNKNOWN_LABEL); self.label_aliases.push(UNKNOWN_LABEL);
trace!("MachBuffer: new label -> {:?}", MachLabel(l)); trace!("MachBuffer: new label -> {:?}", MachLabel(l));
MachLabel(l) MachLabel(l)
// Post-invariant: the only mutation is to add a new label; it has no
// bound offset yet, so it trivially satisfies all invariants.
} }
/// Reserve the first N MachLabels for blocks. /// Reserve the first N MachLabels for blocks.
@@ -316,19 +437,50 @@ impl<I: VCodeInst> MachBuffer<I> {
self.label_offsets self.label_offsets
.resize(blocks as usize, UNKNOWN_LABEL_OFFSET); .resize(blocks as usize, UNKNOWN_LABEL_OFFSET);
self.label_aliases.resize(blocks as usize, UNKNOWN_LABEL); self.label_aliases.resize(blocks as usize, UNKNOWN_LABEL);
// Post-invariant: as for `get_label()`.
} }
/// Bind a label to the current offset. /// Bind a label to the current offset. A label can only be bound once.
pub fn bind_label(&mut self, label: MachLabel) { pub fn bind_label(&mut self, label: MachLabel) {
trace!( trace!(
"MachBuffer: bind label {:?} at offset {}", "MachBuffer: bind label {:?} at offset {}",
label, label,
self.cur_offset() self.cur_offset()
); );
debug_assert_eq!(self.label_offsets[label.0 as usize], UNKNOWN_LABEL_OFFSET);
debug_assert_eq!(self.label_aliases[label.0 as usize], UNKNOWN_LABEL);
let offset = self.cur_offset(); let offset = self.cur_offset();
self.label_offsets[label.0 as usize] = offset; self.label_offsets[label.0 as usize] = offset;
self.labels_by_offset.push((label, offset)); self.lazily_clear_labels_at_tail();
self.labels_at_tail.push(label);
// Invariants hold: bound offset of label is <= cur_offset (in fact it
// is equal). If the `labels_at_tail` list was complete and precise
// before, it is still, because we have bound this label to the current
// offset and added it to the list (which contains all labels at the
// current offset).
self.check_label_branch_invariants();
self.optimize_branches(); self.optimize_branches();
// Post-invariant: by `optimize_branches()` (see argument there).
self.check_label_branch_invariants();
}
/// Lazily clear `labels_at_tail` if the tail offset has moved beyond the
/// offset that it applies to.
fn lazily_clear_labels_at_tail(&mut self) {
let offset = self.cur_offset();
if offset > self.labels_at_tail_off {
self.labels_at_tail_off = offset;
self.labels_at_tail.clear();
}
// Post-invariant: either labels_at_tail_off was at cur_offset, and
// state is untouched, or was less than cur_offset, in which case the
// labels_at_tail list was conceptually empty, and is now actually
// empty.
} }
/// Resolve a label to an offset, if known. May return `UNKNOWN_LABEL_OFFSET`. /// Resolve a label to an offset, if known. May return `UNKNOWN_LABEL_OFFSET`.
@@ -339,15 +491,16 @@ impl<I: VCodeInst> MachBuffer<I> {
} else { } else {
self.label_offsets[label.0 as usize] self.label_offsets[label.0 as usize]
} }
// Post-invariant: no mutations.
} }
/// Emit a reference to the given label with the given reference type (i.e., /// Emit a reference to the given label with the given reference type (i.e.,
/// branch-instruction format) at the current offset. This is like a /// branch-instruction format) at the current offset. This is like a
/// relocation, but handled internally. /// relocation, but handled internally.
/// ///
/// Because the offset of the label may already be known and the patch may /// This can be called before the branch is actually emitted; fixups will
/// happen immediately, the buffer must already contain bytes at `offset` up /// not happen until an island is emitted or the buffer is finished.
/// to `offset + kind.patch_size()`.
pub fn use_label_at_offset(&mut self, offset: CodeOffset, label: MachLabel, kind: I::LabelUse) { pub fn use_label_at_offset(&mut self, offset: CodeOffset, label: MachLabel, kind: I::LabelUse) {
trace!( trace!(
"MachBuffer: use_label_at_offset: offset {} label {:?} kind {:?}", "MachBuffer: use_label_at_offset: offset {} label {:?} kind {:?}",
@@ -355,7 +508,6 @@ impl<I: VCodeInst> MachBuffer<I> {
label, label,
kind kind
); );
debug_assert!(offset + kind.patch_size() <= self.cur_offset());
// Add the fixup, and update the worst-case island size based on a // Add the fixup, and update the worst-case island size based on a
// veneer for this label use. // veneer for this label use.
@@ -372,26 +524,51 @@ impl<I: VCodeInst> MachBuffer<I> {
if deadline < self.island_deadline { if deadline < self.island_deadline {
self.island_deadline = deadline; self.island_deadline = deadline;
} }
// Post-invariant: no mutations to branches/labels data structures.
self.check_label_branch_invariants();
} }
/// Inform the buffer of an unconditional branch at the given offset, /// Inform the buffer of an unconditional branch at the given offset,
/// targetting the given label. May be used to optimize branches. /// targetting the given label. May be used to optimize branches.
/// The last added label-use must correspond to this branch. /// The last added label-use must correspond to this branch.
/// This must be called when the current offset is equal to `start`; i.e.,
/// before actually emitting the branch. This implies that for a branch that
/// uses a label and is eligible for optimizations by the MachBuffer, the
/// proper sequence is:
///
/// - Call `use_label_at_offset()` to emit the fixup record.
/// - Call `add_uncond_branch()` to make note of the branch.
/// - Emit the bytes for the branch's machine code.
///
/// Additional requirement: no labels may be bound between `start` and `end`
/// (exclusive on both ends).
pub fn add_uncond_branch(&mut self, start: CodeOffset, end: CodeOffset, target: MachLabel) { pub fn add_uncond_branch(&mut self, start: CodeOffset, end: CodeOffset, target: MachLabel) {
assert!(self.cur_offset() == start);
debug_assert!(end > start);
assert!(!self.fixup_records.is_empty()); assert!(!self.fixup_records.is_empty());
let fixup = self.fixup_records.len() - 1; let fixup = self.fixup_records.len() - 1;
self.lazily_clear_labels_at_tail();
self.latest_branches.push(MachBranch { self.latest_branches.push(MachBranch {
start, start,
end, end,
target, target,
fixup, fixup,
inverted: None, inverted: None,
labels_at_this_branch: self.labels_at_tail.clone(),
}); });
// Post-invariant: we asserted branch start is current tail; the list of
// labels at branch is cloned from list of labels at current tail.
self.check_label_branch_invariants();
} }
/// Inform the buffer of a conditional branch at the given offset, /// Inform the buffer of a conditional branch at the given offset,
/// targetting the given label. May be used to optimize branches. /// targetting the given label. May be used to optimize branches.
/// The last added label-use must correspond to this branch. /// The last added label-use must correspond to this branch.
///
/// Additional requirement: no labels may be bound between `start` and `end`
/// (exclusive on both ends).
pub fn add_cond_branch( pub fn add_cond_branch(
&mut self, &mut self,
start: CodeOffset, start: CodeOffset,
@@ -399,47 +576,127 @@ impl<I: VCodeInst> MachBuffer<I> {
target: MachLabel, target: MachLabel,
inverted: &[u8], inverted: &[u8],
) { ) {
assert!(self.cur_offset() == start);
debug_assert!(end > start);
assert!(!self.fixup_records.is_empty()); assert!(!self.fixup_records.is_empty());
debug_assert!(inverted.len() == (end - start) as usize);
let fixup = self.fixup_records.len() - 1; let fixup = self.fixup_records.len() - 1;
let inverted = Some(SmallVec::from(inverted)); let inverted = Some(SmallVec::from(inverted));
self.lazily_clear_labels_at_tail();
self.latest_branches.push(MachBranch { self.latest_branches.push(MachBranch {
start, start,
end, end,
target, target,
fixup, fixup,
inverted, inverted,
labels_at_this_branch: self.labels_at_tail.clone(),
}); });
// Post-invariant: we asserted branch start is current tail; labels at
// branch list is cloned from list of labels at current tail.
self.check_label_branch_invariants();
} }
fn truncate_last_branch(&mut self) { fn truncate_last_branch(&mut self) {
self.lazily_clear_labels_at_tail();
// Invariants hold at this point.
let b = self.latest_branches.pop().unwrap(); let b = self.latest_branches.pop().unwrap();
assert!(b.end == self.cur_offset()); assert!(b.end == self.cur_offset());
// State:
// [PRE CODE]
// Offset b.start, b.labels_at_this_branch:
// [BRANCH CODE]
// cur_off, self.labels_at_tail -->
// (end of buffer)
self.data.truncate(b.start as usize); self.data.truncate(b.start as usize);
self.fixup_records.truncate(b.fixup); self.fixup_records.truncate(b.fixup);
// State:
// [PRE CODE]
// cur_off, Offset b.start, b.labels_at_this_branch:
// (end of buffer)
//
// self.labels_at_tail --> (past end of buffer)
let cur_off = self.cur_offset(); let cur_off = self.cur_offset();
self.labels_at_tail_off = cur_off;
// State:
// [PRE CODE]
// cur_off, Offset b.start, b.labels_at_this_branch,
// self.labels_at_tail:
// (end of buffer)
//
// resolve_label_offset(l) for l in labels_at_tail:
// (past end of buffer)
trace!( trace!(
"truncate_last_branch: truncated {:?}; off now {}", "truncate_last_branch: truncated {:?}; off now {}",
b, b,
cur_off cur_off
); );
for &mut (l, ref mut off) in self.labels_by_offset.iter_mut().rev() {
if *off > cur_off { // Fix up resolved label offsets for labels at tail.
*off = cur_off; for &l in &self.labels_at_tail {
trace!(" -> label {:?} reassigned to {}", l, cur_off);
self.label_offsets[l.0 as usize] = cur_off; self.label_offsets[l.0 as usize] = cur_off;
} else {
break;
}
} }
// Old labels_at_this_branch are now at cur_off.
self.labels_at_tail
.extend(b.labels_at_this_branch.into_iter());
// Post-invariant: this operation is defined to truncate the buffer,
// which moves cur_off backward, and to move labels at the end of the
// buffer back to the start-of-branch offset.
//
// latest_branches satisfies all invariants:
// - it has no branches past the end of the buffer (branches are in
// order, we removed the last one, and we truncated the buffer to just
// before the start of that branch)
// - no labels were moved to lower offsets than the (new) cur_off, so
// the labels_at_this_branch list for any other branch need not change.
//
// labels_at_tail satisfies all invariants:
// - all labels that were at the tail after the truncated branch are
// moved backward to just before the branch, which becomes the new tail;
// thus every element in the list should remain (ensured by `.extend()`
// above).
// - all labels that refer to the new tail, which is the start-offset of
// the truncated branch, must be present. The `labels_at_this_branch`
// list in the truncated branch's record is a complete and precise list
// of exactly these labels; we append these to labels_at_tail.
// - labels_at_tail_off is at cur_off after truncation occurs, so the
// list is valid (not to be lazily cleared).
//
// The stated operation was performed:
// - For each label at the end of the buffer prior to this method, it
// now resolves to the new (truncated) end of the buffer: it must have
// been in `labels_at_tail` (this list is precise and complete, and
// the tail was at the end of the truncated branch on entry), and we
// iterate over this list and set `label_offsets` to the new tail.
// None of these labels could have been an alias (by invariant), so
// `label_offsets` is authoritative for each.
// - No other labels will be past the end of the buffer, because of the
// requirement that no labels be bound to the middle of branch ranges
// (see comments to `add_{cond,uncond}_branch()`).
// - The buffer is truncated to just before the last branch, and the
// fixup record referring to that last branch is removed.
self.check_label_branch_invariants();
} }
fn optimize_branches(&mut self) { fn optimize_branches(&mut self) {
self.lazily_clear_labels_at_tail();
// Invariants valid at this point.
trace!( trace!(
"enter optimize_branches:\n b = {:?}\n l = {:?}\n f = {:?}", "enter optimize_branches:\n b = {:?}\n l = {:?}\n f = {:?}",
self.latest_branches, self.latest_branches,
self.labels_by_offset, self.labels_at_tail,
self.fixup_records self.fixup_records
); );
// We continue to munch on branches at the tail of the buffer until no
// more rules apply. Note that the loop only continues if a branch is
// actually truncated (or if labels are redirected away from a branch),
// so this always makes progress.
while let Some(b) = self.latest_branches.last() { while let Some(b) = self.latest_branches.last() {
let cur_off = self.cur_offset(); let cur_off = self.cur_offset();
trace!("optimize_branches: last branch {:?} at off {}", b, cur_off); trace!("optimize_branches: last branch {:?} at off {}", b, cur_off);
@@ -451,45 +708,141 @@ impl<I: VCodeInst> MachBuffer<I> {
break; break;
} }
// Invariant: we are looking at a branch that ends at the tail of
// the buffer.
// For any branch, conditional or unconditional:
// - If the target is a label at the current offset, then remove
// the conditional branch, and reset all labels that targetted
// the current offset (end of branch) to the truncated
// end-of-code.
//
// Preserves execution semantics: a branch to its own fallthrough
// address is equivalent to a no-op; in both cases, nextPC is the
// fallthrough.
if self.resolve_label_offset(b.target) == cur_off {
trace!("branch with target == cur off; truncating");
self.truncate_last_branch();
continue;
}
// If latest is an unconditional branch: // If latest is an unconditional branch:
// - For each label at this point, make the label an alias of //
// the branch target. We can now assume below that the // - If the branch's target is not its own start address, then for
// unconditional branch is reachable only via fallthrough, and we // each label at the start of branch, make the label an alias of the
// are free to remove it in an optimization. // branch target, and remove the label from the "labels at this
// branch" list.
//
// - Preserves execution semantics: an unconditional branch's
// only effect is to set PC to a new PC; this change simply
// collapses one step in the step-semantics.
//
// - Post-invariant: the labels that were bound to the start of
// this branch become aliases, so they must not be present in any
// labels-at-this-branch list or the labels-at-tail list. The
// labels are removed form the latest-branch record's
// labels-at-this-branch list, and are never placed in the
// labels-at-tail list. Furthermore, it is correct that they are
// not in either list, because they are now aliases, and labels
// that are aliases remain aliases forever.
//
// - If there is a prior unconditional branch that ends just before // - If there is a prior unconditional branch that ends just before
// this one begins, then we can truncate this branch, because it is // this one begins, and this branch has no labels bound to its
// entirely unreachable (due to above). Trim the end of the // start, then we can truncate this branch, because it is entirely
// `labels_by_offset` array and continue around the loop. // unreachable (we have redirected all labels that make it
// reachable otherwise). Do so and continue around the loop.
//
// - Preserves execution semantics: the branch is unreachable,
// because execution can only flow into an instruction from the
// prior instruction's fallthrough or from a branch bound to that
// instruction's start offset. Unconditional branches have no
// fallthrough, so if the prior instruction is an unconditional
// branch, no fallthrough entry can happen. The
// labels-at-this-branch list is complete (by invariant), so if it
// is empty, then the instruction is entirely unreachable. Thus,
// it can be removed.
//
// - Post-invariant: ensured by truncate_last_branch().
//
// - If there is a prior conditional branch whose target label // - If there is a prior conditional branch whose target label
// resolves to the current offset (branches around the // resolves to the current offset (branches around the
// unconditional branch), then remove the unconditional branch, // unconditional branch), then remove the unconditional branch,
// and make the target of the unconditional the target of the // and make the target of the unconditional the target of the
// conditional instead. // conditional instead.
//
// - Preserves execution semantics: previously we had:
//
// L1:
// cond_br L2
// br L3
// L2:
// (end of buffer)
//
// by removing the last branch, we have:
//
// L1:
// cond_br L2
// L2:
// (end of buffer)
//
// we then fix up the records for the conditional branch to
// have:
//
// L1:
// cond_br.inverted L3
// L2:
//
// In the original code, control flow reaches L2 when the
// conditional branch's predicate is true, and L3 otherwise. In
// the optimized code, the same is true.
//
// - Post-invariant: all edits to latest_branches and
// labels_at_tail are performed by `truncate_last_branch()`,
// which maintains the invariants at each step.
if b.is_uncond() { if b.is_uncond() {
// Set any label equal to current branch's start as an alias of // Set any label equal to current branch's start as an alias of
// the branch's target. // the branch's target, if the target is not the branch itself
for &(l, off) in self.labels_by_offset.iter().rev() { // (i.e., an infinite loop).
trace!(" -> uncond: latest label {:?} at off {}", l, off); if self.resolve_label_offset(b.target) != b.start {
if off > b.start { let redirected = b.labels_at_this_branch.len();
continue; for &l in &b.labels_at_this_branch {
} else if off == b.start { trace!(
trace!(" -> setting alias to {:?}", b.target); " -> label at start of branch {:?} redirected to target {:?}",
l,
b.target
);
self.label_aliases[l.0 as usize] = b.target; self.label_aliases[l.0 as usize] = b.target;
} else { // NOTE: we continue to ensure the invariant that labels
break; // pointing to tail of buffer are in `labels_at_tail`
} // because we already ensured above that the last branch
// cannot have a target of `cur_off`; so we never have
// to put the label into `labels_at_tail` when moving it
// here.
} }
// Maintain invariant: all branches have been redirected
// and are no longer pointing at the start of this branch.
let mut_b = self.latest_branches.last_mut().unwrap();
mut_b.labels_at_this_branch.clear();
// If the branch target is the next offset, if redirected > 0 {
trace!(" -> after label redirects, restarting loop");
continue;
}
}
let b = self.latest_branches.last().unwrap();
// Examine any immediately preceding branch. // Examine any immediately preceding branch.
if self.latest_branches.len() > 1 { if self.latest_branches.len() > 1 {
let prev_b = &self.latest_branches[self.latest_branches.len() - 2]; let prev_b = &self.latest_branches[self.latest_branches.len() - 2];
trace!(" -> more than one branch; prev_b = {:?}", prev_b); trace!(" -> more than one branch; prev_b = {:?}", prev_b);
// This uncond is immediately after another uncond; we've // This uncond is immediately after another uncond; we
// already redirected labels to this uncond away; so we can // should have already redirected labels to this uncond away
// truncate this uncond. // (but check to be sure); so we can truncate this uncond.
if prev_b.is_uncond() && prev_b.end == b.start { if prev_b.is_uncond()
&& prev_b.end == b.start
&& b.labels_at_this_branch.is_empty()
{
trace!(" -> uncond follows another uncond; truncating"); trace!(" -> uncond follows another uncond; truncating");
self.truncate_last_branch(); self.truncate_last_branch();
continue; continue;
@@ -506,34 +859,38 @@ impl<I: VCodeInst> MachBuffer<I> {
&& self.resolve_label_offset(prev_b.target) == cur_off && self.resolve_label_offset(prev_b.target) == cur_off
{ {
trace!(" -> uncond follows a conditional, and conditional's target resolves to current offset"); trace!(" -> uncond follows a conditional, and conditional's target resolves to current offset");
// Save the target of the uncond (this becomes the
// target of the cond), and truncate the uncond.
let target = b.target; let target = b.target;
let data = prev_b.inverted.clone().unwrap(); let data = prev_b.inverted.clone().unwrap();
self.truncate_last_branch(); self.truncate_last_branch();
// Mutate the code and cond branch.
let off_before_edit = self.cur_offset();
let prev_b = self.latest_branches.last_mut().unwrap(); let prev_b = self.latest_branches.last_mut().unwrap();
let not_inverted = SmallVec::from( let not_inverted = SmallVec::from(
&self.data[(prev_b.start as usize)..(prev_b.end as usize)], &self.data[(prev_b.start as usize)..(prev_b.end as usize)],
); );
// Low-level edit: replaces bytes of branch with
// inverted form. cur_off remains the same afterward, so
// we do not need to modify label data structures.
self.data.truncate(prev_b.start as usize); self.data.truncate(prev_b.start as usize);
self.data.extend_from_slice(&data[..]); self.data.extend_from_slice(&data[..]);
// Save the original code as the inversion of the
// inverted branch, in case we later edit this branch
// again.
prev_b.inverted = Some(not_inverted); prev_b.inverted = Some(not_inverted);
self.fixup_records[prev_b.fixup].label = target; self.fixup_records[prev_b.fixup].label = target;
trace!(" -> reassigning target of condbr to {:?}", target); trace!(" -> reassigning target of condbr to {:?}", target);
prev_b.target = target; prev_b.target = target;
debug_assert_eq!(off_before_edit, self.cur_offset());
continue; continue;
} }
} }
} }
// For any branch, conditional or unconditional:
// - If the target is a label at the current offset, then remove
// the conditional branch, and reset all labels that targetted
// the current offset (end of branch) to the truncated
// end-of-code.
if self.resolve_label_offset(b.target) == cur_off {
trace!("branch with target == cur off; truncating");
self.truncate_last_branch();
}
// If we couldn't do anything with the last branch, then break. // If we couldn't do anything with the last branch, then break.
break; break;
} }
@@ -543,12 +900,16 @@ impl<I: VCodeInst> MachBuffer<I> {
trace!( trace!(
"leave optimize_branches:\n b = {:?}\n l = {:?}\n f = {:?}", "leave optimize_branches:\n b = {:?}\n l = {:?}\n f = {:?}",
self.latest_branches, self.latest_branches,
self.labels_by_offset, self.labels_at_tail,
self.fixup_records self.fixup_records
); );
} }
fn purge_latest_branches(&mut self) { fn purge_latest_branches(&mut self) {
// All of our branch simplification rules work only if a branch ends at
// the tail of the buffer, with no following code; and branches are in
// order in latest_branches; so if the last entry ends prior to
// cur_offset, then clear all entries.
let cur_off = self.cur_offset(); let cur_off = self.cur_offset();
if let Some(l) = self.latest_branches.last() { if let Some(l) = self.latest_branches.last() {
if l.end < cur_off { if l.end < cur_off {
@@ -556,6 +917,10 @@ impl<I: VCodeInst> MachBuffer<I> {
self.latest_branches.clear(); self.latest_branches.clear();
} }
} }
// Post-invariant: no invariant requires any branch to appear in
// `latest_branches`; it is always optional. The list-clear above thus
// preserves all semantics.
} }
/// Emit a constant at some point in the future, binding the given label to /// Emit a constant at some point in the future, binding the given label to
@@ -929,6 +1294,12 @@ struct MachBranch {
target: MachLabel, target: MachLabel,
fixup: usize, fixup: usize,
inverted: Option<SmallVec<[u8; 8]>>, inverted: Option<SmallVec<[u8; 8]>>,
/// All labels pointing to the start of this branch. For correctness, this
/// *must* be complete (i.e., must contain all labels whose resolved offsets
/// are at the start of this branch): we rely on being able to redirect all
/// labels that could jump to this branch before removing it, if it is
/// otherwise unreachable.
labels_at_this_branch: SmallVec<[MachLabel; 4]>,
} }
impl MachBranch { impl MachBranch {