Split EntityMap into entity::PrimaryMap and entity::EntityMap.
The new PrimaryMap replaces the primary EntityMap and the PrimaryEntityData marker trait which was causing some confusion. We now have a clear division between the two types of maps: - PrimaryMap is used to assign entity numbers to the primary data for an entity. - EntityMap is a secondary mapping adding additional info. The split also means that the secondary EntityMap can now behave as if all keys have a default value. This means that we can get rid of the annoying ensure() and get_or_default() methods ther were used everywhere instead of indexing. Just use normal indexing now; non-existent keys will return the default value.
This commit is contained in:
@@ -107,11 +107,11 @@ impl SubTest for TestBinEmit {
|
|||||||
// Give an encoding to any instruction that doesn't already have one.
|
// Give an encoding to any instruction that doesn't already have one.
|
||||||
for ebb in func.layout.ebbs() {
|
for ebb in func.layout.ebbs() {
|
||||||
for inst in func.layout.ebb_insts(ebb) {
|
for inst in func.layout.ebb_insts(ebb) {
|
||||||
if !func.encodings.get_or_default(inst).is_legal() {
|
if !func.encodings[inst].is_legal() {
|
||||||
if let Ok(enc) = isa.encode(&func.dfg,
|
if let Ok(enc) = isa.encode(&func.dfg,
|
||||||
&func.dfg[inst],
|
&func.dfg[inst],
|
||||||
func.dfg.ctrl_typevar(inst)) {
|
func.dfg.ctrl_typevar(inst)) {
|
||||||
*func.encodings.ensure(inst) = enc;
|
func.encodings[inst] = enc;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -158,7 +158,7 @@ impl SubTest for TestBinEmit {
|
|||||||
ebb);
|
ebb);
|
||||||
for inst in func.layout.ebb_insts(ebb) {
|
for inst in func.layout.ebb_insts(ebb) {
|
||||||
sink.text.clear();
|
sink.text.clear();
|
||||||
let enc = func.encodings.get_or_default(inst);
|
let enc = func.encodings[inst];
|
||||||
|
|
||||||
// Send legal encodings into the emitter.
|
// Send legal encodings into the emitter.
|
||||||
if enc.is_legal() {
|
if enc.is_legal() {
|
||||||
|
|||||||
@@ -67,7 +67,7 @@ pub fn relax_branches(func: &mut Function, isa: &TargetIsa) -> Result<CodeOffset
|
|||||||
}
|
}
|
||||||
|
|
||||||
while let Some(inst) = cur.next_inst() {
|
while let Some(inst) = cur.next_inst() {
|
||||||
let enc = cur.func.encodings.get_or_default(inst);
|
let enc = cur.func.encodings[inst];
|
||||||
let size = encinfo.bytes(enc);
|
let size = encinfo.bytes(enc);
|
||||||
|
|
||||||
// See if this might be a branch that is out of range.
|
// See if this might be a branch that is out of range.
|
||||||
|
|||||||
@@ -157,7 +157,7 @@ impl<'c, 'f> ir::InstInserterBase<'c> for &'c mut EncCursor<'f> {
|
|||||||
// Assign an encoding.
|
// Assign an encoding.
|
||||||
match self.isa
|
match self.isa
|
||||||
.encode(&self.func.dfg, &self.func.dfg[inst], ctrl_typevar) {
|
.encode(&self.func.dfg, &self.func.dfg[inst], ctrl_typevar) {
|
||||||
Ok(e) => *self.func.encodings.ensure(inst) = e,
|
Ok(e) => self.func.encodings[inst] = e,
|
||||||
Err(_) => panic!("can't encode {}", self.display_inst(inst)),
|
Err(_) => panic!("can't encode {}", self.display_inst(inst)),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
//! A Dominator Tree represented as mappings of Ebbs to their immediate dominator.
|
//! A Dominator Tree represented as mappings of Ebbs to their immediate dominator.
|
||||||
|
|
||||||
use entity_map::EntityMap;
|
use entity::EntityMap;
|
||||||
use flowgraph::{ControlFlowGraph, BasicBlock};
|
use flowgraph::{ControlFlowGraph, BasicBlock};
|
||||||
use ir::{Ebb, Inst, Function, Layout, ProgramOrder, ExpandedProgramPoint};
|
use ir::{Ebb, Inst, Function, Layout, ProgramOrder, ExpandedProgramPoint};
|
||||||
use packed_option::PackedOption;
|
use packed_option::PackedOption;
|
||||||
@@ -339,7 +339,7 @@ impl DominatorTree {
|
|||||||
pub fn recompute_split_ebb(&mut self, old_ebb: Ebb, new_ebb: Ebb, split_jump_inst: Inst) {
|
pub fn recompute_split_ebb(&mut self, old_ebb: Ebb, new_ebb: Ebb, split_jump_inst: Inst) {
|
||||||
if !self.is_reachable(old_ebb) {
|
if !self.is_reachable(old_ebb) {
|
||||||
// old_ebb is unreachable, it stays so and new_ebb is unreachable too
|
// old_ebb is unreachable, it stays so and new_ebb is unreachable too
|
||||||
*self.nodes.ensure(new_ebb) = Default::default();
|
self.nodes[new_ebb] = Default::default();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// We use the RPO comparison on the postorder list so we invert the operands of the
|
// We use the RPO comparison on the postorder list so we invert the operands of the
|
||||||
@@ -350,7 +350,7 @@ impl DominatorTree {
|
|||||||
.binary_search_by(|probe| self.rpo_cmp_ebb(old_ebb, *probe))
|
.binary_search_by(|probe| self.rpo_cmp_ebb(old_ebb, *probe))
|
||||||
.expect("the old ebb is not declared to the dominator tree");
|
.expect("the old ebb is not declared to the dominator tree");
|
||||||
let new_ebb_rpo = self.insert_after_rpo(old_ebb, old_ebb_postorder_index, new_ebb);
|
let new_ebb_rpo = self.insert_after_rpo(old_ebb, old_ebb_postorder_index, new_ebb);
|
||||||
*self.nodes.ensure(new_ebb) = DomNode {
|
self.nodes[new_ebb] = DomNode {
|
||||||
rpo_number: new_ebb_rpo,
|
rpo_number: new_ebb_rpo,
|
||||||
idom: Some(split_jump_inst).into(),
|
idom: Some(split_jump_inst).into(),
|
||||||
};
|
};
|
||||||
|
|||||||
55
lib/cretonne/src/entity/keys.rs
Normal file
55
lib/cretonne/src/entity/keys.rs
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
//! A double-ended iterator over entity references.
|
||||||
|
|
||||||
|
use entity::EntityRef;
|
||||||
|
use std::marker::PhantomData;
|
||||||
|
|
||||||
|
/// Iterate over all keys in order.
|
||||||
|
pub struct Keys<K: EntityRef> {
|
||||||
|
pos: usize,
|
||||||
|
rev_pos: usize,
|
||||||
|
unused: PhantomData<K>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: EntityRef> Keys<K> {
|
||||||
|
/// Create a `Keys` iterator that visits `count` entities starting from 0.
|
||||||
|
pub fn new(count: usize) -> Keys<K> {
|
||||||
|
Keys {
|
||||||
|
pos: 0,
|
||||||
|
rev_pos: count,
|
||||||
|
unused: PhantomData,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: EntityRef> Iterator for Keys<K> {
|
||||||
|
type Item = K;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
if self.pos < self.rev_pos {
|
||||||
|
let k = K::new(self.pos);
|
||||||
|
self.pos += 1;
|
||||||
|
Some(k)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||||
|
let size = self.rev_pos - self.pos;
|
||||||
|
(size, Some(size))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: EntityRef> DoubleEndedIterator for Keys<K> {
|
||||||
|
fn next_back(&mut self) -> Option<Self::Item> {
|
||||||
|
if self.rev_pos > self.pos {
|
||||||
|
let k = K::new(self.rev_pos - 1);
|
||||||
|
self.rev_pos -= 1;
|
||||||
|
Some(k)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: EntityRef> ExactSizeIterator for Keys<K> {}
|
||||||
135
lib/cretonne/src/entity/map.rs
Normal file
135
lib/cretonne/src/entity/map.rs
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
//! Densely numbered entity references as mapping keys.
|
||||||
|
//!
|
||||||
|
//! The `EntityMap` data structure uses the dense index space to implement a map with a vector.
|
||||||
|
//! Unlike `PrimaryMap`, and `EntityMap` can't be used to allocate entity references. It is used to
|
||||||
|
//! associate secondary information with entities.
|
||||||
|
|
||||||
|
use entity::{EntityRef, Keys};
|
||||||
|
use std::marker::PhantomData;
|
||||||
|
use std::ops::{Index, IndexMut};
|
||||||
|
|
||||||
|
/// A mapping `K -> V` for densely indexed entity references.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct EntityMap<K, V>
|
||||||
|
where K: EntityRef,
|
||||||
|
V: Clone
|
||||||
|
{
|
||||||
|
elems: Vec<V>,
|
||||||
|
default: V,
|
||||||
|
unused: PhantomData<K>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Shared `EntityMap` implementation for all value types.
|
||||||
|
impl<K, V> EntityMap<K, V>
|
||||||
|
where K: EntityRef,
|
||||||
|
V: Clone
|
||||||
|
{
|
||||||
|
/// Create a new empty map.
|
||||||
|
pub fn new() -> Self
|
||||||
|
where V: Default
|
||||||
|
{
|
||||||
|
EntityMap {
|
||||||
|
elems: Vec::new(),
|
||||||
|
default: Default::default(),
|
||||||
|
unused: PhantomData,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the element at `k` if it exists.
|
||||||
|
pub fn get(&self, k: K) -> Option<&V> {
|
||||||
|
self.elems.get(k.index())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Is this map completely empty?
|
||||||
|
pub fn is_empty(&self) -> bool {
|
||||||
|
self.elems.is_empty()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove all entries from this map.
|
||||||
|
pub fn clear(&mut self) {
|
||||||
|
self.elems.clear()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Iterate over all the keys in this map.
|
||||||
|
pub fn keys(&self) -> Keys<K> {
|
||||||
|
Keys::new(self.elems.len())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Resize the map to have `n` entries by adding default entries as needed.
|
||||||
|
pub fn resize(&mut self, n: usize) {
|
||||||
|
self.elems.resize(n, self.default.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Immutable indexing into an `EntityMap`.
|
||||||
|
///
|
||||||
|
/// All keys are permitted. Untouched entries have the default value.
|
||||||
|
impl<K, V> Index<K> for EntityMap<K, V>
|
||||||
|
where K: EntityRef,
|
||||||
|
V: Clone
|
||||||
|
{
|
||||||
|
type Output = V;
|
||||||
|
|
||||||
|
fn index(&self, k: K) -> &V {
|
||||||
|
self.get(k).unwrap_or(&self.default)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Mutable indexing into an `EntityMap`.
|
||||||
|
///
|
||||||
|
/// The map grows as needed to accommodate new keys.
|
||||||
|
impl<K, V> IndexMut<K> for EntityMap<K, V>
|
||||||
|
where K: EntityRef,
|
||||||
|
V: Clone
|
||||||
|
{
|
||||||
|
fn index_mut(&mut self, k: K) -> &mut V {
|
||||||
|
let i = k.index();
|
||||||
|
if i >= self.elems.len() {
|
||||||
|
self.resize(i + 1);
|
||||||
|
}
|
||||||
|
&mut self.elems[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
// `EntityRef` impl for testing.
|
||||||
|
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||||
|
struct E(u32);
|
||||||
|
|
||||||
|
impl EntityRef for E {
|
||||||
|
fn new(i: usize) -> Self {
|
||||||
|
E(i as u32)
|
||||||
|
}
|
||||||
|
fn index(self) -> usize {
|
||||||
|
self.0 as usize
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn basic() {
|
||||||
|
let r0 = E(0);
|
||||||
|
let r1 = E(1);
|
||||||
|
let r2 = E(2);
|
||||||
|
let mut m = EntityMap::new();
|
||||||
|
|
||||||
|
let v: Vec<E> = m.keys().collect();
|
||||||
|
assert_eq!(v, []);
|
||||||
|
|
||||||
|
m[r2] = 3;
|
||||||
|
m[r1] = 5;
|
||||||
|
|
||||||
|
assert_eq!(m[r1], 5);
|
||||||
|
assert_eq!(m[r2], 3);
|
||||||
|
|
||||||
|
let v: Vec<E> = m.keys().collect();
|
||||||
|
assert_eq!(v, [r0, r1, r2]);
|
||||||
|
|
||||||
|
let shared = &m;
|
||||||
|
assert_eq!(shared[r0], 0);
|
||||||
|
assert_eq!(shared[r1], 5);
|
||||||
|
assert_eq!(shared[r2], 3);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -5,6 +5,14 @@
|
|||||||
//!
|
//!
|
||||||
//! Various data structures based on the entity references are defined in sub-modules.
|
//! Various data structures based on the entity references are defined in sub-modules.
|
||||||
|
|
||||||
|
mod keys;
|
||||||
|
mod map;
|
||||||
|
mod primary;
|
||||||
|
|
||||||
|
pub use self::keys::Keys;
|
||||||
|
pub use self::map::EntityMap;
|
||||||
|
pub use self::primary::PrimaryMap;
|
||||||
|
|
||||||
/// A type wrapping a small integer index should implement `EntityRef` so it can be used as the key
|
/// A type wrapping a small integer index should implement `EntityRef` so it can be used as the key
|
||||||
/// of an `EntityMap` or `SparseMap`.
|
/// of an `EntityMap` or `SparseMap`.
|
||||||
pub trait EntityRef: Copy + Eq {
|
pub trait EntityRef: Copy + Eq {
|
||||||
|
|||||||
141
lib/cretonne/src/entity/primary.rs
Normal file
141
lib/cretonne/src/entity/primary.rs
Normal file
@@ -0,0 +1,141 @@
|
|||||||
|
//! Densely numbered entity references as mapping keys.
|
||||||
|
//!
|
||||||
|
//! The `PrimaryMap` data structure uses the dense index space to implement a map with a vector.
|
||||||
|
//!
|
||||||
|
//! A primary map contains the main definition of an entity, and it can be used to allocate new
|
||||||
|
//! entity references with the `push` method.
|
||||||
|
//!
|
||||||
|
//! There should only be a single `PrimaryMap` instance for a given `EntityRef` type, otherwise
|
||||||
|
//! conflicting references will be created.
|
||||||
|
|
||||||
|
use entity::{EntityRef, Keys};
|
||||||
|
use std::marker::PhantomData;
|
||||||
|
use std::ops::{Index, IndexMut};
|
||||||
|
|
||||||
|
/// A mapping `K -> V` for densely indexed entity references.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct PrimaryMap<K, V>
|
||||||
|
where K: EntityRef
|
||||||
|
{
|
||||||
|
elems: Vec<V>,
|
||||||
|
unused: PhantomData<K>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K, V> PrimaryMap<K, V>
|
||||||
|
where K: EntityRef
|
||||||
|
{
|
||||||
|
/// Create a new empty map.
|
||||||
|
pub fn new() -> Self {
|
||||||
|
PrimaryMap {
|
||||||
|
elems: Vec::new(),
|
||||||
|
unused: PhantomData,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if `k` is a valid key in the map.
|
||||||
|
pub fn is_valid(&self, k: K) -> bool {
|
||||||
|
k.index() < self.elems.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the element at `k` if it exists.
|
||||||
|
pub fn get(&self, k: K) -> Option<&V> {
|
||||||
|
self.elems.get(k.index())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Is this map completely empty?
|
||||||
|
pub fn is_empty(&self) -> bool {
|
||||||
|
self.elems.is_empty()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the total number of entity references created.
|
||||||
|
pub fn len(&self) -> usize {
|
||||||
|
self.elems.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Iterate over all the keys in this map.
|
||||||
|
pub fn keys(&self) -> Keys<K> {
|
||||||
|
Keys::new(self.elems.len())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove all entries from this map.
|
||||||
|
pub fn clear(&mut self) {
|
||||||
|
self.elems.clear()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the key that will be assigned to the next pushed value.
|
||||||
|
pub fn next_key(&self) -> K {
|
||||||
|
K::new(self.elems.len())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Append `v` to the mapping, assigning a new key which is returned.
|
||||||
|
pub fn push(&mut self, v: V) -> K {
|
||||||
|
let k = self.next_key();
|
||||||
|
self.elems.push(v);
|
||||||
|
k
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Immutable indexing into an `PrimaryMap`.
|
||||||
|
/// The indexed value must be in the map.
|
||||||
|
impl<K, V> Index<K> for PrimaryMap<K, V>
|
||||||
|
where K: EntityRef
|
||||||
|
{
|
||||||
|
type Output = V;
|
||||||
|
|
||||||
|
fn index(&self, k: K) -> &V {
|
||||||
|
&self.elems[k.index()]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Mutable indexing into an `PrimaryMap`.
|
||||||
|
impl<K, V> IndexMut<K> for PrimaryMap<K, V>
|
||||||
|
where K: EntityRef
|
||||||
|
{
|
||||||
|
fn index_mut(&mut self, k: K) -> &mut V {
|
||||||
|
&mut self.elems[k.index()]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
// `EntityRef` impl for testing.
|
||||||
|
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||||
|
struct E(u32);
|
||||||
|
|
||||||
|
impl EntityRef for E {
|
||||||
|
fn new(i: usize) -> Self {
|
||||||
|
E(i as u32)
|
||||||
|
}
|
||||||
|
fn index(self) -> usize {
|
||||||
|
self.0 as usize
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn basic() {
|
||||||
|
let r0 = E(0);
|
||||||
|
let r1 = E(1);
|
||||||
|
let m = PrimaryMap::<E, isize>::new();
|
||||||
|
|
||||||
|
let v: Vec<E> = m.keys().collect();
|
||||||
|
assert_eq!(v, []);
|
||||||
|
|
||||||
|
assert!(!m.is_valid(r0));
|
||||||
|
assert!(!m.is_valid(r1));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn push() {
|
||||||
|
let mut m = PrimaryMap::new();
|
||||||
|
let k1: E = m.push(12);
|
||||||
|
let k2 = m.push(33);
|
||||||
|
|
||||||
|
assert_eq!(m[k1], 12);
|
||||||
|
assert_eq!(m[k2], 33);
|
||||||
|
|
||||||
|
let v: Vec<E> = m.keys().collect();
|
||||||
|
assert_eq!(v, [k1, k2]);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,263 +0,0 @@
|
|||||||
//! Densely numbered entity references as mapping keys.
|
|
||||||
//!
|
|
||||||
//! The `EntityMap` data structure uses the dense index space to implement a map with a vector.
|
|
||||||
//! There are primary and secondary entity maps:
|
|
||||||
//!
|
|
||||||
//! - A *primary* `EntityMap` contains the main definition of an entity, and it can be used to
|
|
||||||
//! allocate new entity references with the `push` method. The values stores in a primary map
|
|
||||||
//! must implement the `PrimaryEntityData` marker trait.
|
|
||||||
//! - A *secondary* `EntityMap` contains additional data about entities kept in a primary map. The
|
|
||||||
//! values need to implement `Clone + Default` traits so the map can be grown with `ensure`.
|
|
||||||
|
|
||||||
use entity::EntityRef;
|
|
||||||
use std::marker::PhantomData;
|
|
||||||
use std::ops::{Index, IndexMut};
|
|
||||||
|
|
||||||
/// A mapping `K -> V` for densely indexed entity references.
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct EntityMap<K, V>
|
|
||||||
where K: EntityRef
|
|
||||||
{
|
|
||||||
elems: Vec<V>,
|
|
||||||
unused: PhantomData<K>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Shared `EntityMap` implementation for all value types.
|
|
||||||
impl<K, V> EntityMap<K, V>
|
|
||||||
where K: EntityRef
|
|
||||||
{
|
|
||||||
/// Create a new empty map.
|
|
||||||
pub fn new() -> Self {
|
|
||||||
EntityMap {
|
|
||||||
elems: Vec::new(),
|
|
||||||
unused: PhantomData,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Check if `k` is a valid key in the map.
|
|
||||||
pub fn is_valid(&self, k: K) -> bool {
|
|
||||||
k.index() < self.elems.len()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the element at `k` if it exists.
|
|
||||||
pub fn get(&self, k: K) -> Option<&V> {
|
|
||||||
self.elems.get(k.index())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Is this map completely empty?
|
|
||||||
pub fn is_empty(&self) -> bool {
|
|
||||||
self.elems.is_empty()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Remove all entries from this map.
|
|
||||||
pub fn clear(&mut self) {
|
|
||||||
self.elems.clear()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Iterate over all the keys in this map.
|
|
||||||
pub fn keys(&self) -> Keys<K> {
|
|
||||||
Keys {
|
|
||||||
pos: 0,
|
|
||||||
rev_pos: self.elems.len(),
|
|
||||||
unused: PhantomData,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A marker trait for data stored in primary entity maps.
|
|
||||||
///
|
|
||||||
/// A primary entity map can be used to allocate new entity references with the `push` method. It
|
|
||||||
/// is important that entity references can't be created anywhere else, so the data stored in a
|
|
||||||
/// primary entity map must be tagged as `PrimaryEntityData` to unlock the `push` method.
|
|
||||||
pub trait PrimaryEntityData {}
|
|
||||||
|
|
||||||
/// Additional methods for primary entry maps only.
|
|
||||||
///
|
|
||||||
/// These are identified by the `PrimaryEntityData` marker trait.
|
|
||||||
impl<K, V> EntityMap<K, V>
|
|
||||||
where K: EntityRef,
|
|
||||||
V: PrimaryEntityData
|
|
||||||
{
|
|
||||||
/// Get the key that will be assigned to the next pushed value.
|
|
||||||
pub fn next_key(&self) -> K {
|
|
||||||
K::new(self.elems.len())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Append `v` to the mapping, assigning a new key which is returned.
|
|
||||||
pub fn push(&mut self, v: V) -> K {
|
|
||||||
let k = self.next_key();
|
|
||||||
self.elems.push(v);
|
|
||||||
k
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the total number of entity references created.
|
|
||||||
pub fn len(&self) -> usize {
|
|
||||||
self.elems.len()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Additional methods for value types that implement `Clone` and `Default`.
|
|
||||||
///
|
|
||||||
/// When the value type implements these additional traits, the `EntityMap` can be resized
|
|
||||||
/// explicitly with the `ensure` method.
|
|
||||||
///
|
|
||||||
/// Use this for secondary maps that are mapping keys created by another primary map.
|
|
||||||
impl<K, V> EntityMap<K, V>
|
|
||||||
where K: EntityRef,
|
|
||||||
V: Clone + Default
|
|
||||||
{
|
|
||||||
/// Create a new secondary `EntityMap` that is prepared to hold `n` elements.
|
|
||||||
///
|
|
||||||
/// Use this when the length of the primary map is known:
|
|
||||||
/// ```
|
|
||||||
/// let secondary_map = EntityMap::with_capacity(primary_map.len());
|
|
||||||
/// ```
|
|
||||||
pub fn with_capacity(n: usize) -> Self {
|
|
||||||
let mut map = EntityMap {
|
|
||||||
elems: Vec::with_capacity(n),
|
|
||||||
unused: PhantomData,
|
|
||||||
};
|
|
||||||
map.elems.resize(n, V::default());
|
|
||||||
map
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Resize the map to have `n` entries by adding default entries as needed.
|
|
||||||
pub fn resize(&mut self, n: usize) {
|
|
||||||
self.elems.resize(n, V::default());
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Ensure that `k` is a valid key but adding default entries if necessary.
|
|
||||||
///
|
|
||||||
/// Return a mutable reference to the corresponding entry.
|
|
||||||
pub fn ensure(&mut self, k: K) -> &mut V {
|
|
||||||
if !self.is_valid(k) {
|
|
||||||
self.resize(k.index() + 1)
|
|
||||||
}
|
|
||||||
&mut self.elems[k.index()]
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the element at `k` or the default value if `k` is out of range.
|
|
||||||
pub fn get_or_default(&self, k: K) -> V {
|
|
||||||
self.elems.get(k.index()).cloned().unwrap_or_default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Immutable indexing into an `EntityMap`.
|
|
||||||
/// The indexed value must be in the map, either because it was created by `push`, or the key was
|
|
||||||
/// passed to `ensure`.
|
|
||||||
impl<K, V> Index<K> for EntityMap<K, V>
|
|
||||||
where K: EntityRef
|
|
||||||
{
|
|
||||||
type Output = V;
|
|
||||||
|
|
||||||
fn index(&self, k: K) -> &V {
|
|
||||||
&self.elems[k.index()]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Mutable indexing into an `EntityMap`.
|
|
||||||
/// Use `ensure` instead if the key is not known to be valid.
|
|
||||||
impl<K, V> IndexMut<K> for EntityMap<K, V>
|
|
||||||
where K: EntityRef
|
|
||||||
{
|
|
||||||
fn index_mut(&mut self, k: K) -> &mut V {
|
|
||||||
&mut self.elems[k.index()]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Iterate over all keys in order.
|
|
||||||
pub struct Keys<K>
|
|
||||||
where K: EntityRef
|
|
||||||
{
|
|
||||||
pos: usize,
|
|
||||||
rev_pos: usize,
|
|
||||||
unused: PhantomData<K>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<K> Iterator for Keys<K>
|
|
||||||
where K: EntityRef
|
|
||||||
{
|
|
||||||
type Item = K;
|
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
|
||||||
if self.pos < self.rev_pos {
|
|
||||||
let k = K::new(self.pos);
|
|
||||||
self.pos += 1;
|
|
||||||
Some(k)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<K> DoubleEndedIterator for Keys<K>
|
|
||||||
where K: EntityRef
|
|
||||||
{
|
|
||||||
fn next_back(&mut self) -> Option<Self::Item> {
|
|
||||||
if self.rev_pos > self.pos {
|
|
||||||
let k = K::new(self.rev_pos - 1);
|
|
||||||
self.rev_pos -= 1;
|
|
||||||
Some(k)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
// `EntityRef` impl for testing.
|
|
||||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
|
||||||
struct E(u32);
|
|
||||||
|
|
||||||
impl EntityRef for E {
|
|
||||||
fn new(i: usize) -> Self {
|
|
||||||
E(i as u32)
|
|
||||||
}
|
|
||||||
fn index(self) -> usize {
|
|
||||||
self.0 as usize
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrimaryEntityData for isize {}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn basic() {
|
|
||||||
let r0 = E(0);
|
|
||||||
let r1 = E(1);
|
|
||||||
let r2 = E(2);
|
|
||||||
let mut m = EntityMap::new();
|
|
||||||
|
|
||||||
let v: Vec<E> = m.keys().collect();
|
|
||||||
assert_eq!(v, []);
|
|
||||||
|
|
||||||
assert!(!m.is_valid(r0));
|
|
||||||
m.ensure(r2);
|
|
||||||
m[r2] = 3;
|
|
||||||
assert!(m.is_valid(r1));
|
|
||||||
m[r1] = 5;
|
|
||||||
|
|
||||||
assert_eq!(m[r1], 5);
|
|
||||||
assert_eq!(m[r2], 3);
|
|
||||||
|
|
||||||
let v: Vec<E> = m.keys().collect();
|
|
||||||
assert_eq!(v, [r0, r1, r2]);
|
|
||||||
|
|
||||||
let shared = &m;
|
|
||||||
assert_eq!(shared[r0], 0);
|
|
||||||
assert_eq!(shared[r1], 5);
|
|
||||||
assert_eq!(shared[r2], 3);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn push() {
|
|
||||||
let mut m = EntityMap::new();
|
|
||||||
let k1: E = m.push(12);
|
|
||||||
let k2 = m.push(33);
|
|
||||||
|
|
||||||
assert_eq!(m[k1], 12);
|
|
||||||
assert_eq!(m[k2], 33);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -25,7 +25,7 @@
|
|||||||
|
|
||||||
use ir::{Function, Inst, Ebb};
|
use ir::{Function, Inst, Ebb};
|
||||||
use ir::instructions::BranchInfo;
|
use ir::instructions::BranchInfo;
|
||||||
use entity_map::EntityMap;
|
use entity::EntityMap;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
|
||||||
/// A basic block denoted by its enclosing Ebb and last instruction.
|
/// A basic block denoted by its enclosing Ebb and last instruction.
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
//! Data flow graph tracking Instructions, Values, and EBBs.
|
//! Data flow graph tracking Instructions, Values, and EBBs.
|
||||||
|
|
||||||
use entity_map::{EntityMap, PrimaryEntityData};
|
use entity::{PrimaryMap, EntityMap};
|
||||||
use isa::TargetIsa;
|
use isa::TargetIsa;
|
||||||
use ir::builder::{InsertBuilder, ReplaceBuilder};
|
use ir::builder::{InsertBuilder, ReplaceBuilder};
|
||||||
use ir::extfunc::ExtFuncData;
|
use ir::extfunc::ExtFuncData;
|
||||||
@@ -27,7 +27,7 @@ pub struct DataFlowGraph {
|
|||||||
/// Data about all of the instructions in the function, including opcodes and operands.
|
/// Data about all of the instructions in the function, including opcodes and operands.
|
||||||
/// The instructions in this map are not in program order. That is tracked by `Layout`, along
|
/// The instructions in this map are not in program order. That is tracked by `Layout`, along
|
||||||
/// with the EBB containing each instruction.
|
/// with the EBB containing each instruction.
|
||||||
insts: EntityMap<Inst, InstructionData>,
|
insts: PrimaryMap<Inst, InstructionData>,
|
||||||
|
|
||||||
/// List of result values for each instruction.
|
/// List of result values for each instruction.
|
||||||
///
|
///
|
||||||
@@ -38,7 +38,7 @@ pub struct DataFlowGraph {
|
|||||||
/// Extended basic blocks in the function and their arguments.
|
/// Extended basic blocks in the function and their arguments.
|
||||||
/// This map is not in program order. That is handled by `Layout`, and so is the sequence of
|
/// This map is not in program order. That is handled by `Layout`, and so is the sequence of
|
||||||
/// instructions contained in each EBB.
|
/// instructions contained in each EBB.
|
||||||
ebbs: EntityMap<Ebb, EbbData>,
|
ebbs: PrimaryMap<Ebb, EbbData>,
|
||||||
|
|
||||||
/// Memory pool of value lists.
|
/// Memory pool of value lists.
|
||||||
///
|
///
|
||||||
@@ -50,33 +50,27 @@ pub struct DataFlowGraph {
|
|||||||
pub value_lists: ValueListPool,
|
pub value_lists: ValueListPool,
|
||||||
|
|
||||||
/// Primary value table with entries for all values.
|
/// Primary value table with entries for all values.
|
||||||
values: EntityMap<Value, ValueData>,
|
values: PrimaryMap<Value, ValueData>,
|
||||||
|
|
||||||
/// Function signature table. These signatures are referenced by indirect call instructions as
|
/// Function signature table. These signatures are referenced by indirect call instructions as
|
||||||
/// well as the external function references.
|
/// well as the external function references.
|
||||||
pub signatures: EntityMap<SigRef, Signature>,
|
pub signatures: PrimaryMap<SigRef, Signature>,
|
||||||
|
|
||||||
/// External function references. These are functions that can be called directly.
|
/// External function references. These are functions that can be called directly.
|
||||||
pub ext_funcs: EntityMap<FuncRef, ExtFuncData>,
|
pub ext_funcs: PrimaryMap<FuncRef, ExtFuncData>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrimaryEntityData for InstructionData {}
|
|
||||||
impl PrimaryEntityData for EbbData {}
|
|
||||||
impl PrimaryEntityData for ValueData {}
|
|
||||||
impl PrimaryEntityData for Signature {}
|
|
||||||
impl PrimaryEntityData for ExtFuncData {}
|
|
||||||
|
|
||||||
impl DataFlowGraph {
|
impl DataFlowGraph {
|
||||||
/// Create a new empty `DataFlowGraph`.
|
/// Create a new empty `DataFlowGraph`.
|
||||||
pub fn new() -> DataFlowGraph {
|
pub fn new() -> DataFlowGraph {
|
||||||
DataFlowGraph {
|
DataFlowGraph {
|
||||||
insts: EntityMap::new(),
|
insts: PrimaryMap::new(),
|
||||||
results: EntityMap::new(),
|
results: EntityMap::new(),
|
||||||
ebbs: EntityMap::new(),
|
ebbs: PrimaryMap::new(),
|
||||||
value_lists: ValueListPool::new(),
|
value_lists: ValueListPool::new(),
|
||||||
values: EntityMap::new(),
|
values: PrimaryMap::new(),
|
||||||
signatures: EntityMap::new(),
|
signatures: PrimaryMap::new(),
|
||||||
ext_funcs: EntityMap::new(),
|
ext_funcs: PrimaryMap::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -115,7 +109,7 @@ impl DataFlowGraph {
|
|||||||
/// Resolve value aliases.
|
/// Resolve value aliases.
|
||||||
///
|
///
|
||||||
/// Find the original SSA value that `value` aliases.
|
/// Find the original SSA value that `value` aliases.
|
||||||
fn resolve_aliases(values: &EntityMap<Value, ValueData>, value: Value) -> Value {
|
fn resolve_aliases(values: &PrimaryMap<Value, ValueData>, value: Value) -> Value {
|
||||||
let mut v = value;
|
let mut v = value;
|
||||||
|
|
||||||
// Note that values may be empty here.
|
// Note that values may be empty here.
|
||||||
|
|||||||
@@ -3,9 +3,10 @@
|
|||||||
//! The `Function` struct defined in this module owns all of its extended basic blocks and
|
//! The `Function` struct defined in this module owns all of its extended basic blocks and
|
||||||
//! instructions.
|
//! instructions.
|
||||||
|
|
||||||
use entity_map::{EntityMap, PrimaryEntityData};
|
use entity::{PrimaryMap, EntityMap};
|
||||||
use ir::{FunctionName, CallConv, Signature, JumpTableData, GlobalVarData, DataFlowGraph, Layout};
|
use ir;
|
||||||
use ir::{JumpTables, InstEncodings, ValueLocations, StackSlots, GlobalVars, EbbOffsets};
|
use ir::{FunctionName, CallConv, Signature, DataFlowGraph, Layout};
|
||||||
|
use ir::{InstEncodings, ValueLocations, JumpTables, StackSlots, EbbOffsets};
|
||||||
use isa::TargetIsa;
|
use isa::TargetIsa;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use write::write_function;
|
use write::write_function;
|
||||||
@@ -26,7 +27,7 @@ pub struct Function {
|
|||||||
pub stack_slots: StackSlots,
|
pub stack_slots: StackSlots,
|
||||||
|
|
||||||
/// Global variables referenced.
|
/// Global variables referenced.
|
||||||
pub global_vars: GlobalVars,
|
pub global_vars: PrimaryMap<ir::GlobalVar, ir::GlobalVarData>,
|
||||||
|
|
||||||
/// Jump tables used in this function.
|
/// Jump tables used in this function.
|
||||||
pub jump_tables: JumpTables,
|
pub jump_tables: JumpTables,
|
||||||
@@ -52,9 +53,6 @@ pub struct Function {
|
|||||||
pub offsets: EbbOffsets,
|
pub offsets: EbbOffsets,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrimaryEntityData for JumpTableData {}
|
|
||||||
impl PrimaryEntityData for GlobalVarData {}
|
|
||||||
|
|
||||||
impl Function {
|
impl Function {
|
||||||
/// Create a function with the given name and signature.
|
/// Create a function with the given name and signature.
|
||||||
pub fn with_name_signature(name: FunctionName, sig: Signature) -> Function {
|
pub fn with_name_signature(name: FunctionName, sig: Signature) -> Function {
|
||||||
@@ -62,8 +60,8 @@ impl Function {
|
|||||||
name,
|
name,
|
||||||
signature: sig,
|
signature: sig,
|
||||||
stack_slots: StackSlots::new(),
|
stack_slots: StackSlots::new(),
|
||||||
global_vars: GlobalVars::new(),
|
global_vars: PrimaryMap::new(),
|
||||||
jump_tables: EntityMap::new(),
|
jump_tables: PrimaryMap::new(),
|
||||||
dfg: DataFlowGraph::new(),
|
dfg: DataFlowGraph::new(),
|
||||||
layout: Layout::new(),
|
layout: Layout::new(),
|
||||||
encodings: EntityMap::new(),
|
encodings: EntityMap::new(),
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
use std::cmp;
|
use std::cmp;
|
||||||
use std::iter::{Iterator, IntoIterator};
|
use std::iter::{Iterator, IntoIterator};
|
||||||
use entity_map::EntityMap;
|
use entity::EntityMap;
|
||||||
use packed_option::PackedOption;
|
use packed_option::PackedOption;
|
||||||
use ir::{Ebb, Inst, Type, DataFlowGraph};
|
use ir::{Ebb, Inst, Type, DataFlowGraph};
|
||||||
use ir::builder::InstInserterBase;
|
use ir::builder::InstInserterBase;
|
||||||
@@ -278,7 +278,7 @@ impl Layout {
|
|||||||
impl Layout {
|
impl Layout {
|
||||||
/// Is `ebb` currently part of the layout?
|
/// Is `ebb` currently part of the layout?
|
||||||
pub fn is_ebb_inserted(&self, ebb: Ebb) -> bool {
|
pub fn is_ebb_inserted(&self, ebb: Ebb) -> bool {
|
||||||
Some(ebb) == self.first_ebb || (self.ebbs.is_valid(ebb) && self.ebbs[ebb].prev.is_some())
|
Some(ebb) == self.first_ebb || self.ebbs[ebb].prev.is_some()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Insert `ebb` as the last EBB in the layout.
|
/// Insert `ebb` as the last EBB in the layout.
|
||||||
@@ -286,7 +286,7 @@ impl Layout {
|
|||||||
assert!(!self.is_ebb_inserted(ebb),
|
assert!(!self.is_ebb_inserted(ebb),
|
||||||
"Cannot append EBB that is already in the layout");
|
"Cannot append EBB that is already in the layout");
|
||||||
{
|
{
|
||||||
let node = self.ebbs.ensure(ebb);
|
let node = &mut self.ebbs[ebb];
|
||||||
assert!(node.first_inst.is_none() && node.last_inst.is_none());
|
assert!(node.first_inst.is_none() && node.last_inst.is_none());
|
||||||
node.prev = self.last_ebb.into();
|
node.prev = self.last_ebb.into();
|
||||||
node.next = None.into();
|
node.next = None.into();
|
||||||
@@ -308,7 +308,7 @@ impl Layout {
|
|||||||
"EBB Insertion point not in the layout");
|
"EBB Insertion point not in the layout");
|
||||||
let after = self.ebbs[before].prev;
|
let after = self.ebbs[before].prev;
|
||||||
{
|
{
|
||||||
let node = self.ebbs.ensure(ebb);
|
let node = &mut self.ebbs[ebb];
|
||||||
node.next = before.into();
|
node.next = before.into();
|
||||||
node.prev = after;
|
node.prev = after;
|
||||||
}
|
}
|
||||||
@@ -328,7 +328,7 @@ impl Layout {
|
|||||||
"EBB Insertion point not in the layout");
|
"EBB Insertion point not in the layout");
|
||||||
let before = self.ebbs[after].next;
|
let before = self.ebbs[after].next;
|
||||||
{
|
{
|
||||||
let node = self.ebbs.ensure(ebb);
|
let node = &mut self.ebbs[ebb];
|
||||||
node.next = before;
|
node.next = before;
|
||||||
node.prev = after.into();
|
node.prev = after.into();
|
||||||
}
|
}
|
||||||
@@ -406,11 +406,7 @@ impl<'f> IntoIterator for &'f Layout {
|
|||||||
impl Layout {
|
impl Layout {
|
||||||
/// Get the EBB containing `inst`, or `None` if `inst` is not inserted in the layout.
|
/// Get the EBB containing `inst`, or `None` if `inst` is not inserted in the layout.
|
||||||
pub fn inst_ebb(&self, inst: Inst) -> Option<Ebb> {
|
pub fn inst_ebb(&self, inst: Inst) -> Option<Ebb> {
|
||||||
if self.insts.is_valid(inst) {
|
|
||||||
self.insts[inst].ebb.into()
|
self.insts[inst].ebb.into()
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the EBB containing the program point `pp`. Panic if `pp` is not in the layout.
|
/// Get the EBB containing the program point `pp`. Panic if `pp` is not in the layout.
|
||||||
@@ -433,7 +429,7 @@ impl Layout {
|
|||||||
{
|
{
|
||||||
let ebb_node = &mut self.ebbs[ebb];
|
let ebb_node = &mut self.ebbs[ebb];
|
||||||
{
|
{
|
||||||
let inst_node = self.insts.ensure(inst);
|
let inst_node = &mut self.insts[inst];
|
||||||
inst_node.ebb = ebb.into();
|
inst_node.ebb = ebb.into();
|
||||||
inst_node.prev = ebb_node.last_inst;
|
inst_node.prev = ebb_node.last_inst;
|
||||||
assert!(inst_node.next.is_none());
|
assert!(inst_node.next.is_none());
|
||||||
@@ -465,7 +461,7 @@ impl Layout {
|
|||||||
.expect("Instruction before insertion point not in the layout");
|
.expect("Instruction before insertion point not in the layout");
|
||||||
let after = self.insts[before].prev;
|
let after = self.insts[before].prev;
|
||||||
{
|
{
|
||||||
let inst_node = self.insts.ensure(inst);
|
let inst_node = &mut self.insts[inst];
|
||||||
inst_node.ebb = ebb.into();
|
inst_node.ebb = ebb.into();
|
||||||
inst_node.next = before.into();
|
inst_node.next = before.into();
|
||||||
inst_node.prev = after;
|
inst_node.prev = after;
|
||||||
@@ -543,7 +539,7 @@ impl Layout {
|
|||||||
let next_ebb = self.ebbs[old_ebb].next;
|
let next_ebb = self.ebbs[old_ebb].next;
|
||||||
let last_inst = self.ebbs[old_ebb].last_inst;
|
let last_inst = self.ebbs[old_ebb].last_inst;
|
||||||
{
|
{
|
||||||
let node = self.ebbs.ensure(new_ebb);
|
let node = &mut self.ebbs[new_ebb];
|
||||||
node.prev = old_ebb.into();
|
node.prev = old_ebb.into();
|
||||||
node.next = next_ebb;
|
node.next = next_ebb;
|
||||||
node.first_inst = before.into();
|
node.first_inst = before.into();
|
||||||
|
|||||||
@@ -36,20 +36,17 @@ pub use ir::types::Type;
|
|||||||
pub use ir::valueloc::{ValueLoc, ArgumentLoc};
|
pub use ir::valueloc::{ValueLoc, ArgumentLoc};
|
||||||
|
|
||||||
use binemit;
|
use binemit;
|
||||||
use entity_map::EntityMap;
|
use entity::{PrimaryMap, EntityMap};
|
||||||
use isa;
|
use isa;
|
||||||
|
|
||||||
/// Map of value locations.
|
/// Map of value locations.
|
||||||
pub type ValueLocations = EntityMap<Value, ValueLoc>;
|
pub type ValueLocations = EntityMap<Value, ValueLoc>;
|
||||||
|
|
||||||
/// Map of jump tables.
|
/// Map of jump tables.
|
||||||
pub type JumpTables = EntityMap<JumpTable, JumpTableData>;
|
pub type JumpTables = PrimaryMap<JumpTable, JumpTableData>;
|
||||||
|
|
||||||
/// Map of instruction encodings.
|
/// Map of instruction encodings.
|
||||||
pub type InstEncodings = EntityMap<Inst, isa::Encoding>;
|
pub type InstEncodings = EntityMap<Inst, isa::Encoding>;
|
||||||
|
|
||||||
/// Code offsets for EBBs.
|
/// Code offsets for EBBs.
|
||||||
pub type EbbOffsets = EntityMap<Ebb, binemit::CodeOffset>;
|
pub type EbbOffsets = EntityMap<Ebb, binemit::CodeOffset>;
|
||||||
|
|
||||||
/// Map of global variables.
|
|
||||||
pub type GlobalVars = EntityMap<GlobalVar, GlobalVarData>;
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
//! The `StackSlotData` struct keeps track of a single stack slot in a function.
|
//! The `StackSlotData` struct keeps track of a single stack slot in a function.
|
||||||
//!
|
//!
|
||||||
|
|
||||||
use entity_map::{EntityMap, PrimaryEntityData, Keys};
|
use entity::{PrimaryMap, Keys};
|
||||||
use ir::{Type, StackSlot};
|
use ir::{Type, StackSlot};
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::ops::Index;
|
use std::ops::Index;
|
||||||
@@ -124,15 +124,13 @@ impl fmt::Display for StackSlotData {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrimaryEntityData for StackSlotData {}
|
|
||||||
|
|
||||||
/// Stack frame manager.
|
/// Stack frame manager.
|
||||||
///
|
///
|
||||||
/// Keep track of all the stack slots used by a function.
|
/// Keep track of all the stack slots used by a function.
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct StackSlots {
|
pub struct StackSlots {
|
||||||
/// All allocated stack slots.
|
/// All allocated stack slots.
|
||||||
slots: EntityMap<StackSlot, StackSlotData>,
|
slots: PrimaryMap<StackSlot, StackSlotData>,
|
||||||
|
|
||||||
/// All the outgoing stack slots, ordered by offset.
|
/// All the outgoing stack slots, ordered by offset.
|
||||||
outgoing: Vec<StackSlot>,
|
outgoing: Vec<StackSlot>,
|
||||||
@@ -152,7 +150,7 @@ impl StackSlots {
|
|||||||
/// Create an empty stack slot manager.
|
/// Create an empty stack slot manager.
|
||||||
pub fn new() -> StackSlots {
|
pub fn new() -> StackSlots {
|
||||||
StackSlots {
|
StackSlots {
|
||||||
slots: EntityMap::new(),
|
slots: PrimaryMap::new(),
|
||||||
outgoing: Vec::new(),
|
outgoing: Vec::new(),
|
||||||
frame_size: None,
|
frame_size: None,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -585,7 +585,7 @@ fn spill_entry_arguments(func: &mut Function, entry: Ebb) {
|
|||||||
.zip(func.dfg.ebb_args(entry)) {
|
.zip(func.dfg.ebb_args(entry)) {
|
||||||
if let ArgumentLoc::Stack(offset) = abi.location {
|
if let ArgumentLoc::Stack(offset) = abi.location {
|
||||||
let ss = func.stack_slots.make_incoming_arg(abi.value_type, offset);
|
let ss = func.stack_slots.make_incoming_arg(abi.value_type, offset);
|
||||||
*func.locations.ensure(arg) = ValueLoc::Stack(ss);
|
func.locations[arg] = ValueLoc::Stack(ss);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -646,7 +646,7 @@ fn spill_call_arguments(dfg: &mut DataFlowGraph,
|
|||||||
// Insert the spill instructions and rewrite call arguments.
|
// Insert the spill instructions and rewrite call arguments.
|
||||||
for (idx, arg, ss) in arglist {
|
for (idx, arg, ss) in arglist {
|
||||||
let stack_val = dfg.ins(pos).spill(arg);
|
let stack_val = dfg.ins(pos).spill(arg);
|
||||||
*locations.ensure(stack_val) = ValueLoc::Stack(ss);
|
locations[stack_val] = ValueLoc::Stack(ss);
|
||||||
dfg.inst_variable_args_mut(inst)[idx] = stack_val;
|
dfg.inst_variable_args_mut(inst)[idx] = stack_val;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -73,7 +73,7 @@ pub fn legalize_function(func: &mut ir::Function,
|
|||||||
match isa.encode(&pos.func.dfg,
|
match isa.encode(&pos.func.dfg,
|
||||||
&pos.func.dfg[inst],
|
&pos.func.dfg[inst],
|
||||||
pos.func.dfg.ctrl_typevar(inst)) {
|
pos.func.dfg.ctrl_typevar(inst)) {
|
||||||
Ok(encoding) => *pos.func.encodings.ensure(inst) = encoding,
|
Ok(encoding) => pos.func.encodings[inst] = encoding,
|
||||||
Err(action) => {
|
Err(action) => {
|
||||||
// We should transform the instruction into legal equivalents.
|
// We should transform the instruction into legal equivalents.
|
||||||
let changed = action(inst, pos.func, cfg);
|
let changed = action(inst, pos.func, cfg);
|
||||||
|
|||||||
@@ -19,7 +19,6 @@ pub mod binemit;
|
|||||||
pub mod bitset;
|
pub mod bitset;
|
||||||
pub mod dominator_tree;
|
pub mod dominator_tree;
|
||||||
pub mod entity_list;
|
pub mod entity_list;
|
||||||
pub mod entity_map;
|
|
||||||
pub mod flowgraph;
|
pub mod flowgraph;
|
||||||
pub mod ir;
|
pub mod ir;
|
||||||
pub mod isa;
|
pub mod isa;
|
||||||
|
|||||||
@@ -2,7 +2,8 @@
|
|||||||
//! and parent in the loop tree.
|
//! and parent in the loop tree.
|
||||||
|
|
||||||
use dominator_tree::DominatorTree;
|
use dominator_tree::DominatorTree;
|
||||||
use entity_map::{EntityMap, PrimaryEntityData, Keys};
|
use entity::{PrimaryMap, Keys};
|
||||||
|
use entity::EntityMap;
|
||||||
use flowgraph::ControlFlowGraph;
|
use flowgraph::ControlFlowGraph;
|
||||||
use ir::{Function, Ebb, Layout};
|
use ir::{Function, Ebb, Layout};
|
||||||
use packed_option::PackedOption;
|
use packed_option::PackedOption;
|
||||||
@@ -17,7 +18,7 @@ entity_impl!(Loop, "loop");
|
|||||||
/// Loops are referenced by the Loop object, and for each loop you can access its header EBB,
|
/// Loops are referenced by the Loop object, and for each loop you can access its header EBB,
|
||||||
/// its eventual parent in the loop tree and all the EBB belonging to the loop.
|
/// its eventual parent in the loop tree and all the EBB belonging to the loop.
|
||||||
pub struct LoopAnalysis {
|
pub struct LoopAnalysis {
|
||||||
loops: EntityMap<Loop, LoopData>,
|
loops: PrimaryMap<Loop, LoopData>,
|
||||||
ebb_loop_map: EntityMap<Ebb, PackedOption<Loop>>,
|
ebb_loop_map: EntityMap<Ebb, PackedOption<Loop>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -26,8 +27,6 @@ struct LoopData {
|
|||||||
parent: PackedOption<Loop>,
|
parent: PackedOption<Loop>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrimaryEntityData for LoopData {}
|
|
||||||
|
|
||||||
impl LoopData {
|
impl LoopData {
|
||||||
/// Creates a `LoopData` object with the loop header and its eventual parent in the loop tree.
|
/// Creates a `LoopData` object with the loop header and its eventual parent in the loop tree.
|
||||||
pub fn new(header: Ebb, parent: Option<Loop>) -> LoopData {
|
pub fn new(header: Ebb, parent: Option<Loop>) -> LoopData {
|
||||||
@@ -44,7 +43,7 @@ impl LoopAnalysis {
|
|||||||
/// a function.
|
/// a function.
|
||||||
pub fn new() -> LoopAnalysis {
|
pub fn new() -> LoopAnalysis {
|
||||||
LoopAnalysis {
|
LoopAnalysis {
|
||||||
loops: EntityMap::new(),
|
loops: PrimaryMap::new(),
|
||||||
ebb_loop_map: EntityMap::new(),
|
ebb_loop_map: EntityMap::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -707,7 +707,7 @@ impl<'a> Context<'a> {
|
|||||||
self.divert.regmove(m.value, m.from, m.to);
|
self.divert.regmove(m.value, m.from, m.to);
|
||||||
let inst = dfg.ins(pos).regmove(m.value, m.from, m.to);
|
let inst = dfg.ins(pos).regmove(m.value, m.from, m.to);
|
||||||
match self.isa.encode(dfg, &dfg[inst], ty) {
|
match self.isa.encode(dfg, &dfg[inst], ty) {
|
||||||
Ok(encoding) => *encodings.ensure(inst) = encoding,
|
Ok(encoding) => encodings[inst] = encoding,
|
||||||
_ => panic!("Can't encode {} {}", m.rc, dfg.display_inst(inst, self.isa)),
|
_ => panic!("Can't encode {} {}", m.rc, dfg.display_inst(inst, self.isa)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,7 +7,7 @@
|
|||||||
//! These register diversions are local to an EBB. No values can be diverted when entering a new
|
//! These register diversions are local to an EBB. No values can be diverted when entering a new
|
||||||
//! EBB.
|
//! EBB.
|
||||||
|
|
||||||
use entity_map::EntityMap;
|
use entity::EntityMap;
|
||||||
use ir::{Value, ValueLoc};
|
use ir::{Value, ValueLoc};
|
||||||
use isa::RegUnit;
|
use isa::RegUnit;
|
||||||
|
|
||||||
|
|||||||
@@ -453,7 +453,7 @@ impl<'a> Context<'a> {
|
|||||||
.make_spill_slot(self.cur.func.dfg.value_type(value));
|
.make_spill_slot(self.cur.func.dfg.value_type(value));
|
||||||
for &v in self.virtregs.congruence_class(&value) {
|
for &v in self.virtregs.congruence_class(&value) {
|
||||||
self.liveness.spill(v);
|
self.liveness.spill(v);
|
||||||
*self.cur.func.locations.ensure(v) = ValueLoc::Stack(ss);
|
self.cur.func.locations[v] = ValueLoc::Stack(ss);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -12,7 +12,7 @@
|
|||||||
//! memory-to-memory copies when a spilled value is passed as an EBB argument.
|
//! memory-to-memory copies when a spilled value is passed as an EBB argument.
|
||||||
|
|
||||||
use entity_list::{EntityList, ListPool};
|
use entity_list::{EntityList, ListPool};
|
||||||
use entity_map::{EntityMap, PrimaryEntityData, Keys};
|
use entity::{PrimaryMap, EntityMap, Keys};
|
||||||
use ir::Value;
|
use ir::Value;
|
||||||
use packed_option::PackedOption;
|
use packed_option::PackedOption;
|
||||||
use ref_slice::ref_slice;
|
use ref_slice::ref_slice;
|
||||||
@@ -23,7 +23,6 @@ pub struct VirtReg(u32);
|
|||||||
entity_impl!(VirtReg, "vreg");
|
entity_impl!(VirtReg, "vreg");
|
||||||
|
|
||||||
type ValueList = EntityList<Value>;
|
type ValueList = EntityList<Value>;
|
||||||
impl PrimaryEntityData for ValueList {}
|
|
||||||
|
|
||||||
/// Collection of virtual registers.
|
/// Collection of virtual registers.
|
||||||
///
|
///
|
||||||
@@ -37,7 +36,7 @@ pub struct VirtRegs {
|
|||||||
///
|
///
|
||||||
/// The list of values ion a virtual register is kept sorted according to the dominator tree's
|
/// The list of values ion a virtual register is kept sorted according to the dominator tree's
|
||||||
/// RPO of the value defs.
|
/// RPO of the value defs.
|
||||||
vregs: EntityMap<VirtReg, ValueList>,
|
vregs: PrimaryMap<VirtReg, ValueList>,
|
||||||
|
|
||||||
/// Each value belongs to at most one virtual register.
|
/// Each value belongs to at most one virtual register.
|
||||||
value_vregs: EntityMap<Value, PackedOption<VirtReg>>,
|
value_vregs: EntityMap<Value, PackedOption<VirtReg>>,
|
||||||
@@ -49,7 +48,7 @@ impl VirtRegs {
|
|||||||
pub fn new() -> VirtRegs {
|
pub fn new() -> VirtRegs {
|
||||||
VirtRegs {
|
VirtRegs {
|
||||||
pool: ListPool::new(),
|
pool: ListPool::new(),
|
||||||
vregs: EntityMap::new(),
|
vregs: PrimaryMap::new(),
|
||||||
value_vregs: EntityMap::new(),
|
value_vregs: EntityMap::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -63,7 +62,7 @@ impl VirtRegs {
|
|||||||
|
|
||||||
/// Get the virtual register containing `value`, if any.
|
/// Get the virtual register containing `value`, if any.
|
||||||
pub fn get(&self, value: Value) -> Option<VirtReg> {
|
pub fn get(&self, value: Value) -> Option<VirtReg> {
|
||||||
self.value_vregs.get_or_default(value).into()
|
self.value_vregs[value].into()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the list of values in `vreg`. The values are ordered according to `DomTree::rpo_cmp` of
|
/// Get the list of values in `vreg`. The values are ordered according to `DomTree::rpo_cmp` of
|
||||||
@@ -133,7 +132,7 @@ impl VirtRegs {
|
|||||||
|
|
||||||
self.vregs[vreg].extend(values.iter().cloned(), &mut self.pool);
|
self.vregs[vreg].extend(values.iter().cloned(), &mut self.pool);
|
||||||
for &v in values {
|
for &v in values {
|
||||||
*self.value_vregs.ensure(v) = vreg.into();
|
self.value_vregs[v] = vreg.into();
|
||||||
}
|
}
|
||||||
|
|
||||||
vreg
|
vreg
|
||||||
|
|||||||
@@ -35,8 +35,7 @@
|
|||||||
//! - `SparseMap` requires the values to implement `SparseMapValue<K>` which means that they must
|
//! - `SparseMap` requires the values to implement `SparseMapValue<K>` which means that they must
|
||||||
//! contain their own key.
|
//! contain their own key.
|
||||||
|
|
||||||
use entity_map::EntityMap;
|
use entity::{EntityRef, EntityMap};
|
||||||
use entity::EntityRef;
|
|
||||||
use std::mem;
|
use std::mem;
|
||||||
use std::slice;
|
use std::slice;
|
||||||
use std::u32;
|
use std::u32;
|
||||||
@@ -151,7 +150,7 @@ impl<K, V> SparseMap<K, V>
|
|||||||
let idx = self.dense.len();
|
let idx = self.dense.len();
|
||||||
assert!(idx <= u32::MAX as usize, "SparseMap overflow");
|
assert!(idx <= u32::MAX as usize, "SparseMap overflow");
|
||||||
self.dense.push(value);
|
self.dense.push(value);
|
||||||
*self.sparse.ensure(key) = idx as u32;
|
self.sparse[key] = idx as u32;
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -63,7 +63,7 @@ impl<'a> LivenessVerifier<'a> {
|
|||||||
fn check_insts(&self) -> Result {
|
fn check_insts(&self) -> Result {
|
||||||
for ebb in self.func.layout.ebbs() {
|
for ebb in self.func.layout.ebbs() {
|
||||||
for inst in self.func.layout.ebb_insts(ebb) {
|
for inst in self.func.layout.ebb_insts(ebb) {
|
||||||
let encoding = self.func.encodings.get_or_default(inst);
|
let encoding = self.func.encodings[inst];
|
||||||
|
|
||||||
// Check the defs.
|
// Check the defs.
|
||||||
for &val in self.func.dfg.inst_results(inst) {
|
for &val in self.func.dfg.inst_results(inst) {
|
||||||
|
|||||||
@@ -688,7 +688,7 @@ impl<'a> Verifier<'a> {
|
|||||||
for (&arg, &abi) in args.iter().zip(expected_args) {
|
for (&arg, &abi) in args.iter().zip(expected_args) {
|
||||||
// Value types have already been checked by `typecheck_variable_args_iterator()`.
|
// Value types have already been checked by `typecheck_variable_args_iterator()`.
|
||||||
if let ArgumentLoc::Stack(offset) = abi.location {
|
if let ArgumentLoc::Stack(offset) = abi.location {
|
||||||
let arg_loc = self.func.locations.get_or_default(arg);
|
let arg_loc = self.func.locations[arg];
|
||||||
if let ValueLoc::Stack(ss) = arg_loc {
|
if let ValueLoc::Stack(ss) = arg_loc {
|
||||||
// Argument value is assigned to a stack slot as expected.
|
// Argument value is assigned to a stack slot as expected.
|
||||||
self.verify_stack_slot(inst, ss)?;
|
self.verify_stack_slot(inst, ss)?;
|
||||||
@@ -811,7 +811,7 @@ impl<'a> Verifier<'a> {
|
|||||||
None => return Ok(()),
|
None => return Ok(()),
|
||||||
};
|
};
|
||||||
|
|
||||||
let encoding = self.func.encodings.get_or_default(inst);
|
let encoding = self.func.encodings[inst];
|
||||||
if encoding.is_legal() {
|
if encoding.is_legal() {
|
||||||
let verify_encoding =
|
let verify_encoding =
|
||||||
isa.encode(&self.func.dfg,
|
isa.encode(&self.func.dfg,
|
||||||
|
|||||||
@@ -194,7 +194,7 @@ fn write_instruction(w: &mut Write,
|
|||||||
if !func.locations.is_empty() {
|
if !func.locations.is_empty() {
|
||||||
let regs = isa.register_info();
|
let regs = isa.register_info();
|
||||||
for &r in func.dfg.inst_results(inst) {
|
for &r in func.dfg.inst_results(inst) {
|
||||||
write!(s, ",{}", func.locations.get_or_default(r).display(®s))?
|
write!(s, ",{}", func.locations[r].display(®s))?
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
write!(s, "]")?;
|
write!(s, "]")?;
|
||||||
|
|||||||
@@ -6,8 +6,7 @@ use cretonne::ir::instructions::BranchInfo;
|
|||||||
use cretonne::ir::function::DisplayFunction;
|
use cretonne::ir::function::DisplayFunction;
|
||||||
use cretonne::isa::TargetIsa;
|
use cretonne::isa::TargetIsa;
|
||||||
use ssa::{SSABuilder, SideEffects, Block};
|
use ssa::{SSABuilder, SideEffects, Block};
|
||||||
use cretonne::entity_map::{EntityMap, PrimaryEntityData};
|
use cretonne::entity::{EntityRef, EntityMap};
|
||||||
use cretonne::entity::EntityRef;
|
|
||||||
use std::hash::Hash;
|
use std::hash::Hash;
|
||||||
|
|
||||||
/// Permanent structure used for translating into Cretonne IL.
|
/// Permanent structure used for translating into Cretonne IL.
|
||||||
@@ -38,8 +37,6 @@ struct EbbData {
|
|||||||
user_arg_count: usize,
|
user_arg_count: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrimaryEntityData for EbbData {}
|
|
||||||
|
|
||||||
struct Position {
|
struct Position {
|
||||||
ebb: Ebb,
|
ebb: Ebb,
|
||||||
basic_block: Block,
|
basic_block: Block,
|
||||||
@@ -231,7 +228,7 @@ impl<'a, Variable> FunctionBuilder<'a, Variable>
|
|||||||
pub fn create_ebb(&mut self) -> Ebb {
|
pub fn create_ebb(&mut self) -> Ebb {
|
||||||
let ebb = self.func.dfg.make_ebb();
|
let ebb = self.func.dfg.make_ebb();
|
||||||
self.builder.ssa.declare_ebb_header_block(ebb);
|
self.builder.ssa.declare_ebb_header_block(ebb);
|
||||||
*self.builder.ebbs.ensure(ebb) = EbbData {
|
self.builder.ebbs[ebb] = EbbData {
|
||||||
filled: false,
|
filled: false,
|
||||||
pristine: true,
|
pristine: true,
|
||||||
user_arg_count: 0,
|
user_arg_count: 0,
|
||||||
@@ -286,7 +283,7 @@ impl<'a, Variable> FunctionBuilder<'a, Variable>
|
|||||||
|
|
||||||
/// In order to use a variable in a `use_var`, you need to declare its type with this method.
|
/// In order to use a variable in a `use_var`, you need to declare its type with this method.
|
||||||
pub fn declare_var(&mut self, var: Variable, ty: Type) {
|
pub fn declare_var(&mut self, var: Variable, ty: Type) {
|
||||||
*self.builder.types.ensure(var) = ty;
|
self.builder.types[var] = ty;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the Cretonne IL value corresponding to the utilization at the current program
|
/// Returns the Cretonne IL value corresponding to the utilization at the current program
|
||||||
@@ -560,7 +557,7 @@ impl<'a, Variable> FunctionBuilder<'a, Variable>
|
|||||||
|
|
||||||
fn handle_ssa_side_effects(&mut self, side_effects: SideEffects) {
|
fn handle_ssa_side_effects(&mut self, side_effects: SideEffects) {
|
||||||
for split_ebb in side_effects.split_ebbs_created {
|
for split_ebb in side_effects.split_ebbs_created {
|
||||||
self.builder.ebbs.ensure(split_ebb).filled = true
|
self.builder.ebbs[split_ebb].filled = true
|
||||||
}
|
}
|
||||||
for modified_ebb in side_effects.instructions_added_to_ebbs {
|
for modified_ebb in side_effects.instructions_added_to_ebbs {
|
||||||
self.builder.ebbs[modified_ebb].pristine = false
|
self.builder.ebbs[modified_ebb].pristine = false
|
||||||
|
|||||||
@@ -9,8 +9,7 @@ use cretonne::ir::{Ebb, Value, Inst, Type, DataFlowGraph, JumpTables, Layout, Cu
|
|||||||
InstBuilder};
|
InstBuilder};
|
||||||
use cretonne::ir::instructions::BranchInfo;
|
use cretonne::ir::instructions::BranchInfo;
|
||||||
use std::hash::Hash;
|
use std::hash::Hash;
|
||||||
use cretonne::entity_map::{EntityMap, PrimaryEntityData};
|
use cretonne::entity::{EntityRef, PrimaryMap, EntityMap};
|
||||||
use cretonne::entity::EntityRef;
|
|
||||||
use cretonne::packed_option::PackedOption;
|
use cretonne::packed_option::PackedOption;
|
||||||
use cretonne::packed_option::ReservedValue;
|
use cretonne::packed_option::ReservedValue;
|
||||||
use std::u32;
|
use std::u32;
|
||||||
@@ -41,7 +40,7 @@ pub struct SSABuilder<Variable>
|
|||||||
variables: EntityMap<Variable, HashMap<Block, Value>>,
|
variables: EntityMap<Variable, HashMap<Block, Value>>,
|
||||||
// Records the position of the basic blocks and the list of values used but not defined in the
|
// Records the position of the basic blocks and the list of values used but not defined in the
|
||||||
// block.
|
// block.
|
||||||
blocks: EntityMap<Block, BlockData<Variable>>,
|
blocks: PrimaryMap<Block, BlockData<Variable>>,
|
||||||
// Records the basic blocks at the beginning of the `Ebb`s.
|
// Records the basic blocks at the beginning of the `Ebb`s.
|
||||||
ebb_headers: EntityMap<Ebb, PackedOption<Block>>,
|
ebb_headers: EntityMap<Ebb, PackedOption<Block>>,
|
||||||
}
|
}
|
||||||
@@ -66,7 +65,6 @@ enum BlockData<Variable> {
|
|||||||
// The block is implicitely sealed at creation.
|
// The block is implicitely sealed at creation.
|
||||||
EbbBody { predecessor: Block },
|
EbbBody { predecessor: Block },
|
||||||
}
|
}
|
||||||
impl<Variable> PrimaryEntityData for BlockData<Variable> {}
|
|
||||||
|
|
||||||
impl<Variable> BlockData<Variable> {
|
impl<Variable> BlockData<Variable> {
|
||||||
fn add_predecessor(&mut self, pred: Block, inst: Inst) {
|
fn add_predecessor(&mut self, pred: Block, inst: Inst) {
|
||||||
@@ -135,7 +133,7 @@ impl<Variable> SSABuilder<Variable>
|
|||||||
pub fn new() -> SSABuilder<Variable> {
|
pub fn new() -> SSABuilder<Variable> {
|
||||||
SSABuilder {
|
SSABuilder {
|
||||||
variables: EntityMap::new(),
|
variables: EntityMap::new(),
|
||||||
blocks: EntityMap::new(),
|
blocks: PrimaryMap::new(),
|
||||||
ebb_headers: EntityMap::new(),
|
ebb_headers: EntityMap::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -193,7 +191,7 @@ impl<Variable> SSABuilder<Variable>
|
|||||||
/// The SSA value is passed as an argument because it should be created with
|
/// The SSA value is passed as an argument because it should be created with
|
||||||
/// `ir::DataFlowGraph::append_result`.
|
/// `ir::DataFlowGraph::append_result`.
|
||||||
pub fn def_var(&mut self, var: Variable, val: Value, block: Block) {
|
pub fn def_var(&mut self, var: Variable, val: Value, block: Block) {
|
||||||
self.variables.ensure(var).insert(block, val);
|
self.variables[var].insert(block, val);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Declares a use of a variable in a given basic block. Returns the SSA value corresponding
|
/// Declares a use of a variable in a given basic block. Returns the SSA value corresponding
|
||||||
@@ -296,7 +294,7 @@ impl<Variable> SSABuilder<Variable>
|
|||||||
ebb: ebb,
|
ebb: ebb,
|
||||||
undef_variables: Vec::new(),
|
undef_variables: Vec::new(),
|
||||||
}));
|
}));
|
||||||
*self.ebb_headers.ensure(ebb) = block.into();
|
self.ebb_headers[ebb] = block.into();
|
||||||
block
|
block
|
||||||
}
|
}
|
||||||
/// Gets the header block corresponding to an Ebb, panics if the Ebb or the header block
|
/// Gets the header block corresponding to an Ebb, panics if the Ebb or the header block
|
||||||
|
|||||||
@@ -1388,7 +1388,7 @@ impl<'a> Parser<'a> {
|
|||||||
.expect("duplicate inst references created");
|
.expect("duplicate inst references created");
|
||||||
|
|
||||||
if let Some(encoding) = encoding {
|
if let Some(encoding) = encoding {
|
||||||
*ctx.function.encodings.ensure(inst) = encoding;
|
ctx.function.encodings[inst] = encoding;
|
||||||
}
|
}
|
||||||
|
|
||||||
if results.len() != num_results {
|
if results.len() != num_results {
|
||||||
@@ -1421,7 +1421,7 @@ impl<'a> Parser<'a> {
|
|||||||
.inst_results(inst)
|
.inst_results(inst)
|
||||||
.iter()
|
.iter()
|
||||||
.zip(result_locations) {
|
.zip(result_locations) {
|
||||||
*ctx.function.locations.ensure(value) = loc;
|
ctx.function.locations[value] = loc;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user