use alloc::vec; use alloc::vec::Vec; use smallvec::{smallvec, SmallVec}; use std::convert::TryFrom; use crate::{ cfg::CFGInfo, Allocation, Block, Edit, Function, Inst, MachineEnv, Operand, OperandConstraint, OperandKind, OperandPos, Output, PReg, ProgPoint, RegAllocError, RegClass, SpillSlot, VReg, }; #[derive(Default, Clone, Copy)] struct VRegData { pub preg: Option, pub slot_idx: Option, pub reftype: bool, } #[derive(Default, Clone, Copy)] struct PRegData { pub vreg: Option, pub stack_pseudo: bool, } #[derive(Default, Clone, Copy)] struct BlockData { pub allocated: bool, } struct FastAllocState<'a, F: Function> { pub vregs: Vec, pub pregs: Vec, pub blocks: Vec, pub preorder: Vec, pub reg_order_int: Vec, pub reg_order_float: Vec, pub cur_stack_slot_idx: u32, pub reftype_vregs_in_pregs_count: u32, pub stack_slot_count_int: u8, pub stack_slot_count_float: u8, pub allocs: Vec, pub inst_alloc_offsets: Vec, pub edits: Vec<(ProgPoint, Edit)>, pub safepoint_slots: Vec<(ProgPoint, Allocation)>, pub func: &'a F, pub mach_env: &'a MachineEnv, pub cfg: &'a CFGInfo, } impl<'a, F: Function> FastAllocState<'a, F> { pub fn init(func: &'a F, mach_env: &'a MachineEnv, cfg: &'a CFGInfo) -> Self { let vregs = { let mut vregs = Vec::with_capacity(func.num_vregs()); vregs.resize(func.num_vregs(), VRegData::default()); for vreg in func.reftype_vregs() { vregs[vreg.vreg()].reftype = true; } vregs }; let pregs = { let mut pregs = Vec::with_capacity(PReg::NUM_INDEX); pregs.resize(PReg::NUM_INDEX, PRegData::default()); for preg in &mach_env.fixed_stack_slots { pregs[preg.index()].stack_pseudo = true; } pregs }; let blocks = { let mut blocks = Vec::with_capacity(func.num_blocks()); blocks.resize(func.num_blocks(), BlockData::default()); blocks }; let reg_order_int = { let class = RegClass::Int as usize; let amount = mach_env.preferred_regs_by_class[class].len() + mach_env.non_preferred_regs_by_class[class].len(); let mut reg_order = Vec::with_capacity(amount); reg_order.extend_from_slice(&mach_env.preferred_regs_by_class[class]); reg_order.extend_from_slice(&mach_env.non_preferred_regs_by_class[class]); reg_order }; let reg_order_float = { let class = RegClass::Float as usize; let amount = mach_env.preferred_regs_by_class[class].len() + mach_env.non_preferred_regs_by_class[class].len(); let mut reg_order = Vec::with_capacity(amount); reg_order.extend_from_slice(&mach_env.preferred_regs_by_class[class]); reg_order.extend_from_slice(&mach_env.non_preferred_regs_by_class[class]); reg_order }; let mut inst_alloc_offsets = Vec::with_capacity(func.num_insts()); inst_alloc_offsets.resize(func.num_insts(), 0); Self { vregs, pregs, blocks, preorder: Self::calc_preorder(func), reg_order_int, reg_order_float, cur_stack_slot_idx: 0, reftype_vregs_in_pregs_count: 0, stack_slot_count_int: u8::try_from(func.spillslot_size(RegClass::Int)) .expect("that's a big integer"), stack_slot_count_float: u8::try_from(func.spillslot_size(RegClass::Float)) .expect("that's a big float"), allocs: Vec::new(), inst_alloc_offsets, edits: Vec::new(), safepoint_slots: Vec::new(), func, mach_env, cfg, } } pub fn get_or_alloc_stack_slot(&mut self, vreg: VReg) -> u32 { if let Some(idx) = self.vregs[vreg.vreg()].slot_idx { return idx; } self.alloc_stack_slot(vreg) } pub fn alloc_stack_slot(&mut self, vreg: VReg) -> u32 { let data = &mut self.vregs[vreg.vreg()]; if data.slot_idx.is_some() { panic!("Trying to allocate already allocated stack slot"); } let size = if vreg.class() == RegClass::Int { self.stack_slot_count_int } else { self.stack_slot_count_float }; let idx = self.cur_stack_slot_idx; self.cur_stack_slot_idx += size as u32; data.slot_idx = Some(idx); idx } pub fn move_to_preg(&mut self, vreg: VReg, preg: PReg, pos: ProgPoint) { if let Some(vreg) = &self.pregs[preg.index()].vreg { let vdata = &mut self.vregs[*vreg as usize]; debug_assert!(vdata.preg.is_some()); debug_assert_eq!(vdata.preg.unwrap(), preg); vdata.preg = None; } if let Some(preg) = &self.vregs[vreg.vreg()].preg { // TODO: allow multiple pregs for a single vreg? let pdata = &mut self.pregs[preg.index()]; debug_assert!(pdata.vreg.is_some()); debug_assert_eq!(pdata.vreg.unwrap(), vreg.vreg() as u32); pdata.vreg = None; } let vdata = &mut self.vregs[vreg.vreg()]; let pdata = &mut self.pregs[preg.index()]; if vdata.slot_idx.is_none() { panic!("Trying to move from vreg that has no stack slot to preg"); } self.edits.push(( pos, Edit::Move { from: Allocation::stack(SpillSlot::new(vdata.slot_idx.unwrap() as usize)), to: Allocation::reg(preg), }, )); vdata.preg = Some(preg); pdata.vreg = Some(vreg.vreg() as u32); if vdata.reftype { self.reftype_vregs_in_pregs_count += 1; } } pub fn move_to_stack(&mut self, preg: PReg, vreg: VReg, pos: ProgPoint) { let vdata = &mut self.vregs[vreg.vreg()]; let pdata = &mut self.pregs[preg.index()]; if pdata.vreg.is_none() || vdata.preg.is_none() { panic!("Trying to move from unallocated preg/vreg to stack"); } debug_assert_eq!(vdata.preg.unwrap(), preg); debug_assert_eq!(pdata.vreg.unwrap(), vreg.vreg() as u32); if vdata.slot_idx.is_none() { panic!("Trying to move to vreg without stack slot"); } self.edits.push(( pos, Edit::Move { from: Allocation::reg(preg), to: Allocation::stack(SpillSlot::new(vdata.slot_idx.unwrap() as usize)), }, )); } pub fn clear_preg(&mut self, preg: PReg) { let pdata = &mut self.pregs[preg.index()]; if let Some(vreg) = pdata.vreg { let vdata = &mut self.vregs[vreg as usize]; debug_assert_eq!(vdata.preg.unwrap(), preg); vdata.preg = None; pdata.vreg = None; if vdata.reftype { self.reftype_vregs_in_pregs_count -= 1; } } } pub fn clear_vreg_from_reg(&mut self, vreg: VReg) { let vdata = &mut self.vregs[vreg.vreg()]; if let Some(preg) = vdata.preg { debug_assert_eq!(self.pregs[preg.index()].vreg.unwrap(), vreg.vreg() as u32); self.pregs[preg.index()].vreg = None; vdata.preg = None; if vdata.reftype { self.reftype_vregs_in_pregs_count -= 1; } } } fn calc_preorder(func: &F) -> Vec { let entry = func.entry_block(); let mut ret = vec![entry]; struct State<'a> { block: Block, succs: &'a [Block], next_succ: usize, } let mut stack: SmallVec<[State; 64]> = smallvec![]; stack.push(State { block: entry, succs: func.block_succs(entry), next_succ: 0, }); while let Some(ref mut state) = stack.last_mut() { if state.next_succ >= state.succs.len() { stack.pop(); continue; } let block = state.succs[state.next_succ]; let succs = func.block_succs(block); ret.push(block); state.next_succ += 1; if state.next_succ >= state.succs.len() { stack.pop(); } if !succs.is_empty() { stack.push(State { block, succs: func.block_succs(block), next_succ: 0, }); } } ret } } pub fn run( func: &F, mach_env: &MachineEnv, cfg: CFGInfo, ) -> Result { if func.multi_spillslot_named_by_last_slot() { panic!("MultiSpillslotIndexPos not supported"); } let mut state = FastAllocState::init(func, mach_env, &cfg); todo!("") } fn setup_entry_params<'a, F: Function>( state: &mut FastAllocState<'a, F>, ) -> Result<(), RegAllocError> { // we need to set the vreg location for the initial block parameters and copy them to the stack let entry = state.func.entry_block(); todo!("") }