Flatten the Value reference representation.
All values are now references into the value table, so drop the distinction between direct and table values. Direct values don't exist any more. Also remove the parser support for the 'vxNN' syntax. Only 'vNN' values can be parsed now.
This commit is contained in:
@@ -34,7 +34,7 @@ pub enum Token<'a> {
|
||||
Float(&'a str), // Floating point immediate
|
||||
Integer(&'a str), // Integer immediate
|
||||
Type(types::Type), // i32, f32, b32x4, ...
|
||||
Value(Value), // v12, vx7
|
||||
Value(Value), // v12, v7
|
||||
Ebb(Ebb), // ebb3
|
||||
StackSlot(u32), // ss3
|
||||
JumpTable(u32), // jt2
|
||||
@@ -306,8 +306,7 @@ impl<'a> Lexer<'a> {
|
||||
// decoded token.
|
||||
fn numbered_entity(prefix: &str, number: u32) -> Option<Token<'a>> {
|
||||
match prefix {
|
||||
"v" => Value::direct_with_number(number).map(|v| Token::Value(v)),
|
||||
"vx" => Value::table_with_number(number).map(|v| Token::Value(v)),
|
||||
"v" => Value::with_number(number).map(|v| Token::Value(v)),
|
||||
"ebb" => Ebb::with_number(number).map(|ebb| Token::Ebb(ebb)),
|
||||
"ss" => Some(Token::StackSlot(number)),
|
||||
"jt" => Some(Token::JumpTable(number)),
|
||||
@@ -531,15 +530,14 @@ mod tests {
|
||||
let mut lex = Lexer::new("v0 v00 vx01 ebb1234567890 ebb5234567890 v1x vx1 vxvx4 \
|
||||
function0 function b1 i32x4 f32x5");
|
||||
assert_eq!(lex.next(),
|
||||
token(Token::Value(Value::direct_with_number(0).unwrap()), 1));
|
||||
token(Token::Value(Value::with_number(0).unwrap()), 1));
|
||||
assert_eq!(lex.next(), token(Token::Identifier("v00"), 1));
|
||||
assert_eq!(lex.next(), token(Token::Identifier("vx01"), 1));
|
||||
assert_eq!(lex.next(),
|
||||
token(Token::Ebb(Ebb::with_number(1234567890).unwrap()), 1));
|
||||
assert_eq!(lex.next(), token(Token::Identifier("ebb5234567890"), 1));
|
||||
assert_eq!(lex.next(), token(Token::Identifier("v1x"), 1));
|
||||
assert_eq!(lex.next(),
|
||||
token(Token::Value(Value::table_with_number(1).unwrap()), 1));
|
||||
assert_eq!(lex.next(), token(Token::Identifier("vx1"), 1));
|
||||
assert_eq!(lex.next(), token(Token::Identifier("vxvx4"), 1));
|
||||
assert_eq!(lex.next(), token(Token::Identifier("function0"), 1));
|
||||
assert_eq!(lex.next(), token(Token::Identifier("function"), 1));
|
||||
|
||||
@@ -1011,7 +1011,7 @@ impl<'a> Parser<'a> {
|
||||
// We need to parse instruction results here because they are shared
|
||||
// between the parsing of value aliases and the parsing of instructions.
|
||||
//
|
||||
// inst-results ::= Value(v) { "," Value(vx) }
|
||||
// inst-results ::= Value(v) { "," Value(v) }
|
||||
let results = self.parse_inst_results()?;
|
||||
|
||||
match self.token() {
|
||||
@@ -1032,7 +1032,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
|
||||
// Parse parenthesized list of EBB arguments. Returns a vector of (u32, Type) pairs with the
|
||||
// source vx numbers of the defined values and the defined types.
|
||||
// source value numbers of the defined values and the defined types.
|
||||
//
|
||||
// ebb-args ::= * "(" ebb-arg { "," ebb-arg } ")"
|
||||
fn parse_ebb_args(&mut self, ctx: &mut Context, ebb: Ebb) -> Result<()> {
|
||||
@@ -1056,19 +1056,19 @@ impl<'a> Parser<'a> {
|
||||
|
||||
// Parse a single EBB argument declaration, and append it to `ebb`.
|
||||
//
|
||||
// ebb-arg ::= * Value(vx) ":" Type(t)
|
||||
// ebb-arg ::= * Value(v) ":" Type(t)
|
||||
//
|
||||
fn parse_ebb_arg(&mut self, ctx: &mut Context, ebb: Ebb) -> Result<()> {
|
||||
// ebb-arg ::= * Value(vx) ":" Type(t)
|
||||
let vx = self.match_value("EBB argument must be a value")?;
|
||||
let vx_location = self.loc;
|
||||
// ebb-arg ::= Value(vx) * ":" Type(t)
|
||||
// ebb-arg ::= * Value(v) ":" Type(t)
|
||||
let v = self.match_value("EBB argument must be a value")?;
|
||||
let v_location = self.loc;
|
||||
// ebb-arg ::= Value(v) * ":" Type(t)
|
||||
self.match_token(Token::Colon, "expected ':' after EBB argument")?;
|
||||
// ebb-arg ::= Value(vx) ":" * Type(t)
|
||||
// ebb-arg ::= Value(v) ":" * Type(t)
|
||||
let t = self.match_type("expected EBB argument type")?;
|
||||
// Allocate the EBB argument and add the mapping.
|
||||
let value = ctx.function.dfg.append_ebb_arg(ebb, t);
|
||||
ctx.map.def_value(vx, value, &vx_location)
|
||||
ctx.map.def_value(v, value, &v_location)
|
||||
}
|
||||
|
||||
fn parse_value_location(&mut self, ctx: &Context) -> Result<ValueLoc> {
|
||||
@@ -1147,21 +1147,21 @@ impl<'a> Parser<'a> {
|
||||
|
||||
// Parse instruction results and return them.
|
||||
//
|
||||
// inst-results ::= Value(v) { "," Value(vx) }
|
||||
// inst-results ::= Value(v) { "," Value(v) }
|
||||
//
|
||||
fn parse_inst_results(&mut self) -> Result<Vec<Value>> {
|
||||
// Result value numbers.
|
||||
let mut results = Vec::new();
|
||||
|
||||
// instruction ::= * [inst-results "="] Opcode(opc) ["." Type] ...
|
||||
// inst-results ::= * Value(v) { "," Value(vx) }
|
||||
// inst-results ::= * Value(v) { "," Value(v) }
|
||||
if let Some(Token::Value(v)) = self.token() {
|
||||
self.consume();
|
||||
results.push(v);
|
||||
|
||||
// inst-results ::= Value(v) * { "," Value(vx) }
|
||||
// inst-results ::= Value(v) * { "," Value(v) }
|
||||
while self.optional(Token::Comma) {
|
||||
// inst-results ::= Value(v) { "," * Value(vx) }
|
||||
// inst-results ::= Value(v) { "," * Value(v) }
|
||||
results.push(self.match_value("expected result value")?);
|
||||
}
|
||||
}
|
||||
@@ -1171,7 +1171,7 @@ impl<'a> Parser<'a> {
|
||||
|
||||
// Parse a value alias, and append it to `ebb`.
|
||||
//
|
||||
// value_alias ::= [inst-results] "->" Value(vx)
|
||||
// value_alias ::= [inst-results] "->" Value(v)
|
||||
//
|
||||
fn parse_value_alias(&mut self, results: Vec<Value>, ctx: &mut Context) -> Result<()> {
|
||||
if results.len() != 1 {
|
||||
@@ -1711,19 +1711,23 @@ mod tests {
|
||||
let (func, details) = Parser::new("function qux() {
|
||||
ebb0:
|
||||
v4 = iconst.i8 6
|
||||
vx3 -> v4
|
||||
v1 = iadd_imm vx3, 17
|
||||
v3 -> v4
|
||||
v1 = iadd_imm v3, 17
|
||||
}")
|
||||
.parse_function(None)
|
||||
.unwrap();
|
||||
assert_eq!(func.name.to_string(), "qux");
|
||||
let v4 = details.map.lookup_str("v4").unwrap();
|
||||
assert_eq!(v4.to_string(), "vx0");
|
||||
let vx3 = details.map.lookup_str("vx3").unwrap();
|
||||
assert_eq!(vx3.to_string(), "vx2");
|
||||
let aliased_to = func.dfg
|
||||
.resolve_aliases(Value::table_with_number(0).unwrap());
|
||||
assert_eq!(aliased_to.to_string(), "vx0");
|
||||
assert_eq!(v4.to_string(), "v0");
|
||||
let v3 = details.map.lookup_str("v3").unwrap();
|
||||
assert_eq!(v3.to_string(), "v2");
|
||||
match v3 {
|
||||
AnyEntity::Value(v3) => {
|
||||
let aliased_to = func.dfg.resolve_aliases(v3);
|
||||
assert_eq!(aliased_to.to_string(), "v0");
|
||||
}
|
||||
_ => panic!("expected value: {}", v3),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -1789,7 +1793,7 @@ mod tests {
|
||||
fn ebb_header() {
|
||||
let (func, _) = Parser::new("function ebbs() {
|
||||
ebb0:
|
||||
ebb4(vx3: i32):
|
||||
ebb4(v3: i32):
|
||||
}")
|
||||
.parse_function(None)
|
||||
.unwrap();
|
||||
|
||||
@@ -16,7 +16,7 @@ use lexer::split_entity_name;
|
||||
/// Mapping from source entity names to entity references that are valid in the parsed function.
|
||||
#[derive(Debug)]
|
||||
pub struct SourceMap {
|
||||
values: HashMap<Value, Value>, // vNN, vxNN
|
||||
values: HashMap<Value, Value>, // vNN
|
||||
ebbs: HashMap<Ebb, Ebb>, // ebbNN
|
||||
stack_slots: HashMap<u32, StackSlot>, // ssNN
|
||||
signatures: HashMap<u32, SigRef>, // sigNN
|
||||
@@ -64,12 +64,7 @@ impl SourceMap {
|
||||
pub fn lookup_str(&self, name: &str) -> Option<AnyEntity> {
|
||||
split_entity_name(name).and_then(|(ent, num)| match ent {
|
||||
"v" => {
|
||||
Value::direct_with_number(num)
|
||||
.and_then(|v| self.get_value(v))
|
||||
.map(AnyEntity::Value)
|
||||
}
|
||||
"vx" => {
|
||||
Value::table_with_number(num)
|
||||
Value::with_number(num)
|
||||
.and_then(|v| self.get_value(v))
|
||||
.map(AnyEntity::Value)
|
||||
}
|
||||
@@ -230,8 +225,8 @@ mod tests {
|
||||
let tf = parse_test("function detail() {
|
||||
ss10 = stack_slot 13
|
||||
jt10 = jump_table ebb0
|
||||
ebb0(v4: i32, vx7: i32):
|
||||
v10 = iadd v4, vx7
|
||||
ebb0(v4: i32, v7: i32):
|
||||
v10 = iadd v4, v7
|
||||
}")
|
||||
.unwrap();
|
||||
let map = &tf.functions[0].1.map;
|
||||
@@ -241,8 +236,8 @@ mod tests {
|
||||
assert_eq!(map.lookup_str("ss10").unwrap().to_string(), "ss0");
|
||||
assert_eq!(map.lookup_str("jt10").unwrap().to_string(), "jt0");
|
||||
assert_eq!(map.lookup_str("ebb0").unwrap().to_string(), "ebb0");
|
||||
assert_eq!(map.lookup_str("v4").unwrap().to_string(), "vx0");
|
||||
assert_eq!(map.lookup_str("vx7").unwrap().to_string(), "vx1");
|
||||
assert_eq!(map.lookup_str("v10").unwrap().to_string(), "vx2");
|
||||
assert_eq!(map.lookup_str("v4").unwrap().to_string(), "v0");
|
||||
assert_eq!(map.lookup_str("v7").unwrap().to_string(), "v1");
|
||||
assert_eq!(map.lookup_str("v10").unwrap().to_string(), "v2");
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user