Update x64 backend to use new lowering APIs.
This commit is contained in:
@@ -5,7 +5,6 @@ use std::string::{String, ToString};
|
||||
|
||||
use regalloc::{RealRegUniverse, Reg, RegClass, RegUsageCollector};
|
||||
|
||||
use crate::binemit::CodeOffset;
|
||||
use crate::machinst::*;
|
||||
|
||||
use super::regs::show_ireg_sized;
|
||||
@@ -375,43 +374,27 @@ impl fmt::Debug for CC {
|
||||
/// from end of current instruction).
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub enum BranchTarget {
|
||||
/// An unresolved reference to a BlockIndex, as passed into
|
||||
/// `lower_branch_group()`.
|
||||
Block(BlockIndex),
|
||||
/// An unresolved reference to a MachLabel.
|
||||
Label(MachLabel),
|
||||
|
||||
/// A resolved reference to another instruction, after
|
||||
/// `Inst::with_block_offsets()`. This offset is in bytes.
|
||||
ResolvedOffset(BlockIndex, isize),
|
||||
/// A resolved reference to another instruction, in bytes.
|
||||
ResolvedOffset(isize),
|
||||
}
|
||||
|
||||
impl ShowWithRRU for BranchTarget {
|
||||
fn show_rru(&self, _mb_rru: Option<&RealRegUniverse>) -> String {
|
||||
match self {
|
||||
BranchTarget::Block(bix) => format!("(Block {})", bix),
|
||||
BranchTarget::ResolvedOffset(bix, offs) => format!("(Block {}, offset {})", bix, offs),
|
||||
BranchTarget::Label(l) => format!("{:?}", l),
|
||||
BranchTarget::ResolvedOffset(offs) => format!("(offset {})", offs),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl BranchTarget {
|
||||
/// Lower the branch target given offsets of each block.
|
||||
pub fn lower(&mut self, targets: &[CodeOffset], my_offset: CodeOffset) {
|
||||
/// Get the label.
|
||||
pub fn as_label(&self) -> Option<MachLabel> {
|
||||
match self {
|
||||
&mut BranchTarget::Block(bix) => {
|
||||
let bix = bix as usize;
|
||||
assert!(bix < targets.len());
|
||||
let block_offset_in_func = targets[bix];
|
||||
let branch_offset = (block_offset_in_func as isize) - (my_offset as isize);
|
||||
*self = BranchTarget::ResolvedOffset(bix as BlockIndex, branch_offset);
|
||||
}
|
||||
&mut BranchTarget::ResolvedOffset(..) => {}
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the block index.
|
||||
pub fn as_block_index(&self) -> Option<BlockIndex> {
|
||||
match self {
|
||||
&BranchTarget::Block(bix) => Some(bix),
|
||||
&BranchTarget::Label(l) => Some(l),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
@@ -421,31 +404,17 @@ impl BranchTarget {
|
||||
/// byte of the target. It does not take into account the Intel-specific
|
||||
/// rule that a branch offset is encoded as relative to the start of the
|
||||
/// following instruction. That is a problem for the emitter to deal
|
||||
/// with.
|
||||
pub fn as_offset_i32(&self) -> Option<i32> {
|
||||
/// with. If a label, returns zero.
|
||||
pub fn as_offset32_or_zero(&self) -> i32 {
|
||||
match self {
|
||||
&BranchTarget::ResolvedOffset(_, off) => {
|
||||
&BranchTarget::ResolvedOffset(off) => {
|
||||
// Leave a bit of slack so that the emitter is guaranteed to
|
||||
// be able to add the length of the jump instruction encoding
|
||||
// to this value and still have a value in signed-32 range.
|
||||
if off >= -0x7FFF_FF00isize && off <= 0x7FFF_FF00isize {
|
||||
Some(off as i32)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
assert!(off >= -0x7FFF_FF00 && off <= 0x7FFF_FF00);
|
||||
off as i32
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Map the block index given a transform map.
|
||||
pub fn map(&mut self, block_index_map: &[BlockIndex]) {
|
||||
match self {
|
||||
&mut BranchTarget::Block(ref mut bix) => {
|
||||
let n = block_index_map[*bix as usize];
|
||||
*bix = n;
|
||||
}
|
||||
_ => panic!("BranchTarget::map() called on already-lowered BranchTarget!"),
|
||||
_ => 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user