Removed implicit indirection when computing heap base. (#363)
Fix expected legalized heap_addr
This commit is contained in:
committed by
Dan Gohman
parent
f97ad59991
commit
b3c3ca331b
@@ -53,8 +53,7 @@ ebb0(v0: i32, v999: i64):
|
|||||||
; Boundscheck should be eliminated.
|
; Boundscheck should be eliminated.
|
||||||
; Checks here are assuming that no pipehole opts fold the load offsets.
|
; Checks here are assuming that no pipehole opts fold the load offsets.
|
||||||
; nextln: $(xoff=$V) = uextend.i64 v0
|
; nextln: $(xoff=$V) = uextend.i64 v0
|
||||||
; nextln: $(haddr=$V) = iadd_imm v999, 64
|
; nextln: $(hbase=$V) = iadd_imm v999, 64
|
||||||
; nextln: $(hbase=$V) = load.i64 notrap aligned $haddr
|
|
||||||
; nextln: v1 = iadd $hbase, $xoff
|
; nextln: v1 = iadd $hbase, $xoff
|
||||||
v2 = load.f32 v1+16
|
v2 = load.f32 v1+16
|
||||||
; nextln: v2 = load.f32 v1+16
|
; nextln: v2 = load.f32 v1+16
|
||||||
@@ -101,8 +100,7 @@ ebb0(v0: i32, v999: i64):
|
|||||||
; check: $ok:
|
; check: $ok:
|
||||||
; Checks here are assuming that no pipehole opts fold the load offsets.
|
; Checks here are assuming that no pipehole opts fold the load offsets.
|
||||||
; nextln: $(xoff=$V) = uextend.i64 v0
|
; nextln: $(xoff=$V) = uextend.i64 v0
|
||||||
; nextln: $(haddr=$V) = iadd_imm.i64 v999, 64
|
; nextln: $(hbase=$V) = iadd_imm.i64 v999, 64
|
||||||
; nextln: $(hbase=$V) = load.i64 notrap aligned $haddr
|
|
||||||
; nextln: v1 = iadd $hbase, $xoff
|
; nextln: v1 = iadd $hbase, $xoff
|
||||||
v2 = load.f32 v1+0x7fff_ffff
|
v2 = load.f32 v1+0x7fff_ffff
|
||||||
; nextln: v2 = load.f32 v1+0x7fff_ffff
|
; nextln: v2 = load.f32 v1+0x7fff_ffff
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
use cursor::{Cursor, FuncCursor};
|
use cursor::{Cursor, FuncCursor};
|
||||||
use flowgraph::ControlFlowGraph;
|
use flowgraph::ControlFlowGraph;
|
||||||
use ir::condcodes::IntCC;
|
use ir::condcodes::IntCC;
|
||||||
use ir::{self, InstBuilder, MemFlags};
|
use ir::{self, InstBuilder};
|
||||||
use isa::TargetIsa;
|
use isa::TargetIsa;
|
||||||
|
|
||||||
/// Expand a `heap_addr` instruction according to the definition of the heap.
|
/// Expand a `heap_addr` instruction according to the definition of the heap.
|
||||||
@@ -57,13 +57,7 @@ fn dynamic_addr(
|
|||||||
pos.use_srcloc(inst);
|
pos.use_srcloc(inst);
|
||||||
|
|
||||||
// Start with the bounds check. Trap if `offset + size > bound`.
|
// Start with the bounds check. Trap if `offset + size > bound`.
|
||||||
let bound_addr = pos.ins().global_value(addr_ty, bound_gv);
|
let bound = pos.ins().global_value(addr_ty, bound_gv);
|
||||||
let mut mflags = MemFlags::new();
|
|
||||||
// The bound variable is requied to be accessible and aligned.
|
|
||||||
mflags.set_notrap();
|
|
||||||
mflags.set_aligned();
|
|
||||||
let bound = pos.ins().load(offset_ty, mflags, bound_addr, 0);
|
|
||||||
|
|
||||||
let oob;
|
let oob;
|
||||||
if size == 1 {
|
if size == 1 {
|
||||||
// `offset > bound - 1` is the same as `offset >= bound`.
|
// `offset > bound - 1` is the same as `offset >= bound`.
|
||||||
@@ -163,12 +157,7 @@ fn offset_addr(
|
|||||||
match pos.func.heaps[heap].base {
|
match pos.func.heaps[heap].base {
|
||||||
ir::HeapBase::ReservedReg => unimplemented!(),
|
ir::HeapBase::ReservedReg => unimplemented!(),
|
||||||
ir::HeapBase::GlobalValue(base_gv) => {
|
ir::HeapBase::GlobalValue(base_gv) => {
|
||||||
let base_addr = pos.ins().global_value(addr_ty, base_gv);
|
let base = pos.ins().global_value(addr_ty, base_gv);
|
||||||
let mut mflags = MemFlags::new();
|
|
||||||
// The base address variable is requied to be accessible and aligned.
|
|
||||||
mflags.set_notrap();
|
|
||||||
mflags.set_aligned();
|
|
||||||
let base = pos.ins().load(addr_ty, mflags, base_addr, 0);
|
|
||||||
pos.func.dfg.replace(inst).iadd(base, offset);
|
pos.func.dfg.replace(inst).iadd(base, offset);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user