From 6e6e408a056a4bc3d6cf3ed556480a7460db95a4 Mon Sep 17 00:00:00 2001 From: Tobias Schwarz Date: Wed, 24 May 2023 20:00:52 +0200 Subject: [PATCH] WIP --- src/ion/fast_alloc2.rs | 170 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 170 insertions(+) diff --git a/src/ion/fast_alloc2.rs b/src/ion/fast_alloc2.rs index f5fbbcb..ba9d079 100644 --- a/src/ion/fast_alloc2.rs +++ b/src/ion/fast_alloc2.rs @@ -685,6 +685,7 @@ impl<'a, F: Function> FastAlloc<'a, F> { } // TODO: hardcode operand patterns for ISA, e.g. Early Use 1, Early Use 2, Late Reuse(1) for x86 and Early Use 1, Early Use 2, Late Def 1 for ARM + // (also find out pattern when memory operand is used) // we created an ordered list of operands that is divided into six blocks: // - fixed uses @@ -774,6 +775,10 @@ impl<'a, F: Function> FastAlloc<'a, F> { let op = &operands[op_idx]; debug_assert_eq!(op.kind(), OperandKind::Use); + if let Some(preg) = op.as_fixed_nonallocatable() { + panic!("TODO") + } + match op.constraint() { OperandConstraint::FixedReg(preg) => { if fixed_use_regs.contains(preg) { @@ -892,8 +897,160 @@ impl<'a, F: Function> FastAlloc<'a, F> { } trace!("Fourth alloc pass: Non-fixed uses and early defs"); + while op_lookup_idx < nf_use_end { + let op_idx = op_lookup[op_lookup_idx] as usize; + op_lookup_idx += 1; + let op = &operands[op_idx]; + debug_assert!(op.kind() == OperandKind::Use || op.pos() == OperandPos::Early); + debug_assert_eq!(op.constraint(), OperandConstraint::Reg); + + match op.constraint() { + OperandConstraint::Reg => { + if op.kind() == OperandKind::Use { + match self.vregs[op.vreg().vreg()].preg() { + Some(preg) => { + self.allocs[alloc_idx + op_idx] = Allocation::reg(preg); + regs_allocated.add(preg); + + if op.pos() == OperandPos::Late { + late_def_disallow.add(preg); + } + trace!(" -> Allocated op {} to {}", op_idx, preg); + } + None => { + let preg = self.find_free_reg( + op.vreg().class(), + regs_allocated, + true, + inst, + block, + block_last_inst, + )?; + debug_assert!(self.pregs[preg.index()].vreg().is_none()); + debug_assert!(self.vregs[op.vreg().vreg()] + .stack_slot() + .is_none()); + self.move_vreg_to_preg( + op.vreg(), + preg, + ProgPoint::before(inst), + inst, + block, + block_last_inst, + ); + trace!(" -> Allocated op {} to {}", op_idx, preg); + } + } + self.vregs[op.vreg().vreg()].cur_use_idx -= 1; + } else { + let preg = self.find_free_reg( + op.vreg().class(), + regs_allocated, + true, + inst, + block, + block_last_inst, + )?; + debug_assert!(self.pregs[preg.index()].vreg().is_none()); + debug_assert!(self.vregs[op.vreg().vreg()].stack_slot().is_none()); + self.allocs[alloc_idx + op_idx] = Allocation::reg(preg); + self.assign_preg(preg, op.vreg()); + regs_allocated.add(preg); + trace!(" -> Allocated op {} to {}", op_idx, preg); + } + } + _ => unreachable!(), + } + } + trace!("Fifth alloc pass: Non-fixed defs and reuses"); + // need to handle reuses first + let op_lookup_idx_bak = op_lookup_idx; + while op_lookup_idx < nf_def_end { + let op_idx = op_lookup[op_lookup_idx] as usize; + op_lookup_idx += 1; + let op = &operands[op_idx]; + debug_assert!(op.kind() == OperandKind::Def); + + match op.constraint() { + OperandConstraint::Reg => { + continue; + } + OperandConstraint::Reuse(reuse_idx) => { + let alloc = self.allocs[alloc_idx + reuse_idx]; + debug_assert!(alloc.is_reg()); + let preg = alloc.as_reg().unwrap(); + late_def_disallow.add(preg); + self.allocs[alloc_idx + op_idx] = alloc; + self.assign_preg(preg, op.vreg()); + trace!(" -> Allocated op {} to {}", op_idx, preg); + } + _ => unreachable!(), + } + } + + op_lookup_idx = op_lookup_idx_bak; + while op_lookup_idx < nf_def_end { + let op_idx = op_lookup[op_lookup_idx] as usize; + op_lookup_idx += 1; + let op = &operands[op_idx]; + debug_assert!(op.kind() == OperandKind::Def); + + match op.constraint() { + OperandConstraint::Reg => { + let preg = self.find_free_reg( + op.vreg().class(), + late_def_disallow, + true, + inst, + block, + block_last_inst, + )?; + self.allocs[alloc_idx + op_idx] = Allocation::reg(preg); + late_def_disallow.add(preg); + self.assign_preg(preg, op.vreg()); + trace!(" -> Allocated op {} to {}", op_idx, preg); + } + OperandConstraint::Reuse(_) => { + continue; + } + _ => unreachable!(), + } + } + trace!("Sixth alloc pass: Any defs"); + while op_lookup_idx < op_lookup.len() { + let op_idx = op_lookup[op_lookup_idx] as usize; + op_lookup_idx += 1; + let op = &operands[op_idx]; + debug_assert_eq!(op.kind(), OperandKind::Def); + assert_eq!(op.constraint(), OperandConstraint::Any); + + let mut reg_blacklist = late_def_disallow; + if op.pos() == OperandPos::Early { + reg_blacklist.union_from(regs_allocated); + } + let preg = self.find_free_reg( + op.vreg().class(), + reg_blacklist, + false, + inst, + block, + block_last_inst, + ); + + if let Ok(preg) = preg { + self.allocs[alloc_idx + op_idx] = Allocation::reg(preg); + late_def_disallow.add(preg); + self.assign_preg(preg, op.vreg()); + trace!(" -> Allocated op {} to {}", op_idx, preg); + } else { + let slot = self.alloc_stack_slot(op.vreg()); + self.allocs[alloc_idx + op_idx] = + Allocation::stack(SpillSlot::new(slot as usize)); + trace!(" -> Allocated op {} to slot {}", op_idx, slot); + } + } todo!("") } todo!("") @@ -963,6 +1120,19 @@ impl<'a, F: Function> FastAlloc<'a, F> { // Helper functions + // spills if necessary + fn find_free_reg( + &mut self, + reg_class: RegClass, + reg_blacklist: PRegSet, + spill: bool, + inst: Inst, + block: Block, + block_last_inst: usize, + ) -> Result { + todo!("") + } + // Moving // Moves vreg to preg and while spilling any vreg alive in preg