Move library crates under 'lib/'.

Give these crates each a more standard directory layout with sources in
a 'src' sub-sirectory and Cargo.toml in the top lib/foo directory.

Add license and description fields to each.

The build script for the cretonne crate now lives in
'lib/cretonne/build.rs' separating it from the normal library sources
under 'lib/cretonne/src'.
This commit is contained in:
Jakob Stoklund Olesen
2016-10-17 14:44:43 -07:00
parent e7f30a40b4
commit 0764df28b5
52 changed files with 64 additions and 64 deletions

15
lib/reader/Cargo.toml Normal file
View File

@@ -0,0 +1,15 @@
[package]
authors = ["The Cretonne Project Developers"]
name = "cretonne-reader"
version = "0.0.0"
description = "Cretonne textual IL reader"
license = "Apache-2.0"
documentation = "https://cretonne.readthedocs.io/"
repository = "https://github.com/stoklund/cretonne"
publish = false
[lib]
name = "cton_reader"
[dependencies]
cretonne = { path = "../cretonne" }

44
lib/reader/src/error.rs Normal file
View File

@@ -0,0 +1,44 @@
//! Define the `Location`, `Error`, and `Result` types.
#![macro_use]
use std::fmt;
use std::result;
/// The location of a `Token` or `Error`.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub struct Location {
pub line_number: usize,
}
/// A parse error is returned when the parse failed.
#[derive(Debug)]
pub struct Error {
pub location: Location,
pub message: String,
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}: {}", self.location.line_number, self.message)
}
}
pub type Result<T> = result::Result<T, Error>;
// Create an `Err` variant of `Result<X>` from a location and `format!` args.
macro_rules! err {
( $loc:expr, $msg:expr ) => {
Err($crate::Error {
location: $loc.clone(),
message: String::from($msg),
})
};
( $loc:expr, $fmt:expr, $( $arg:expr ),+ ) => {
Err($crate::Error {
location: $loc.clone(),
message: format!( $fmt, $( $arg ),+ ),
})
};
}

49
lib/reader/src/isaspec.rs Normal file
View File

@@ -0,0 +1,49 @@
//! Parsed representation of `set` and `isa` commands.
//!
//! A test case file can contain `set` commands that set ISA-independent settings, and it can
//! contain `isa` commands that select an ISA and applies ISA-specific settings.
//!
//! If a test case file contains `isa` commands, the tests will only be run against the specified
//! ISAs. If the file contains no `isa` commands, the tests will be run against all supported ISAs.
use cretonne::settings::{Flags, Configurable, Error as SetError};
use cretonne::isa::TargetIsa;
use error::{Result, Location};
use testcommand::TestOption;
/// The ISA specifications in a `.cton` file.
pub enum IsaSpec {
/// The parsed file does not contain any `isa` commands, but it may contain `set` commands
/// which are reflected in the finished `Flags` object.
None(Flags),
/// The parsed file does contains `isa` commands.
/// Each `isa` command is used to configure a `TargetIsa` trait object.
Some(Vec<Box<TargetIsa>>),
}
/// Parse an iterator of command line options and apply them to `config`.
pub fn parse_options<'a, I>(iter: I, config: &mut Configurable, loc: &Location) -> Result<()>
where I: Iterator<Item = &'a str>
{
for opt in iter.map(TestOption::new) {
match opt {
TestOption::Flag(name) => {
match config.set_bool(name, true) {
Ok(_) => {}
Err(SetError::BadName) => return err!(loc, "unknown flag '{}'", opt),
Err(_) => return err!(loc, "not a boolean flag: '{}'", opt),
}
}
TestOption::Value(name, value) => {
match config.set(name, value) {
Ok(_) => {}
Err(SetError::BadName) => return err!(loc, "unknown setting '{}'", opt),
Err(SetError::BadType) => return err!(loc, "invalid setting type: '{}'", opt),
Err(SetError::BadValue) => return err!(loc, "invalid setting value: '{}'", opt),
}
}
}
}
Ok(())
}

489
lib/reader/src/lexer.rs Normal file
View File

@@ -0,0 +1,489 @@
// ====--------------------------------------------------------------------------------------====//
//
// Lexical analysis for .cton files.
//
// ====--------------------------------------------------------------------------------------====//
use std::str::CharIndices;
use std::u16;
use cretonne::ir::types;
use cretonne::ir::{Value, Ebb};
use error::Location;
/// A Token returned from the `Lexer`.
///
/// Some variants may contains references to the original source text, so the `Token` has the same
/// lifetime as the source.
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum Token<'a> {
Comment(&'a str),
LPar, // '('
RPar, // ')'
LBrace, // '{'
RBrace, // '}'
Comma, // ','
Dot, // '.'
Colon, // ':'
Equal, // '='
Arrow, // '->'
Float(&'a str), // Floating point immediate
Integer(&'a str), // Integer immediate
Type(types::Type), // i32, f32, b32x4, ...
Value(Value), // v12, vx7
Ebb(Ebb), // ebb3
StackSlot(u32), // ss3
JumpTable(u32), // jt2
FuncRef(u32), // fn2
SigRef(u32), // sig2
Identifier(&'a str), // Unrecognized identifier (opcode, enumerator, ...)
}
/// A `Token` with an associated location.
#[derive(Debug, PartialEq, Eq)]
pub struct LocatedToken<'a> {
pub token: Token<'a>,
pub location: Location,
}
/// Wrap up a `Token` with the given location.
fn token<'a>(token: Token<'a>, loc: Location) -> Result<LocatedToken<'a>, LocatedError> {
Ok(LocatedToken {
token: token,
location: loc,
})
}
/// An error from the lexical analysis.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Error {
InvalidChar,
}
/// An `Error` with an associated Location.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct LocatedError {
pub error: Error,
pub location: Location,
}
/// Wrap up an `Error` with the given location.
fn error<'a>(error: Error, loc: Location) -> Result<LocatedToken<'a>, LocatedError> {
Err(LocatedError {
error: error,
location: loc,
})
}
/// Get the number of decimal digits at the end of `s`.
fn trailing_digits(s: &str) -> usize {
// It's faster to iterate backwards over bytes, and we're only counting ASCII digits.
s.as_bytes().iter().rev().cloned().take_while(|&b| b'0' <= b && b <= b'9').count()
}
/// Pre-parse a supposed entity name by splitting it into two parts: A head of lowercase ASCII
/// letters and numeric tail.
pub fn split_entity_name(name: &str) -> Option<(&str, u32)> {
let (head, tail) = name.split_at(name.len() - trailing_digits(name));
if tail.len() > 1 && tail.starts_with('0') {
None
} else {
tail.parse().ok().map(|n| (head, n))
}
}
/// Lexical analysis.
///
/// A `Lexer` reads text from a `&str` and provides a sequence of tokens.
///
/// Also keep track of a line number for error reporting.
///
pub struct Lexer<'a> {
// Complete source being processed.
source: &'a str,
// Iterator into `source`.
chars: CharIndices<'a>,
// Next character to be processed, or `None` at the end.
lookahead: Option<char>,
// Index into `source` of lookahead character.
pos: usize,
// Current line number.
line_number: usize,
}
impl<'a> Lexer<'a> {
pub fn new(s: &'a str) -> Lexer {
let mut lex = Lexer {
source: s,
chars: s.char_indices(),
lookahead: None,
pos: 0,
line_number: 1,
};
// Advance to the first char.
lex.next_ch();
lex
}
// Advance to the next character.
// Return the next lookahead character, or None when the end is encountered.
// Always update cur_ch to reflect
fn next_ch(&mut self) -> Option<char> {
if self.lookahead == Some('\n') {
self.line_number += 1;
}
match self.chars.next() {
Some((idx, ch)) => {
self.pos = idx;
self.lookahead = Some(ch);
}
None => {
self.pos = self.source.len();
self.lookahead = None;
}
}
self.lookahead
}
// Get the location corresponding to `lookahead`.
fn loc(&self) -> Location {
Location { line_number: self.line_number }
}
// Starting from `lookahead`, are we looking at `prefix`?
fn looking_at(&self, prefix: &str) -> bool {
self.source[self.pos..].starts_with(prefix)
}
// Scan a single-char token.
fn scan_char(&mut self, tok: Token<'a>) -> Result<LocatedToken<'a>, LocatedError> {
assert!(self.lookahead != None);
let loc = self.loc();
self.next_ch();
token(tok, loc)
}
// Scan a multi-char token.
fn scan_chars(&mut self,
count: usize,
tok: Token<'a>)
-> Result<LocatedToken<'a>, LocatedError> {
let loc = self.loc();
for _ in 0..count {
assert!(self.lookahead != None);
self.next_ch();
}
token(tok, loc)
}
/// Get the rest of the current line.
/// The next token returned by `next()` will be from the following lines.
pub fn rest_of_line(&mut self) -> &'a str {
let begin = self.pos;
loop {
match self.next_ch() {
None | Some('\n') => return &self.source[begin..self.pos],
_ => {}
}
}
}
// Scan a comment extending to the end of the current line.
fn scan_comment(&mut self) -> Result<LocatedToken<'a>, LocatedError> {
let loc = self.loc();
let text = self.rest_of_line();
return token(Token::Comment(text), loc);
}
// Scan a number token which can represent either an integer or floating point number.
//
// Accept the following forms:
//
// - `10`: Integer
// - `-10`: Integer
// - `0xff_00`: Integer
// - `0.0`: Float
// - `0x1.f`: Float
// - `-0x2.4`: Float
// - `0x0.4p-34`: Float
//
// This function does not filter out all invalid numbers. It depends in the context-sensitive
// decoding of the text for that. For example, the number of allowed digits an an Ieee32` and
// an `Ieee64` constant are different.
fn scan_number(&mut self) -> Result<LocatedToken<'a>, LocatedError> {
let begin = self.pos;
let loc = self.loc();
let mut is_float = false;
// Skip a leading sign.
if self.lookahead == Some('-') {
self.next_ch();
}
// Check for NaNs with payloads.
if self.looking_at("NaN:") || self.looking_at("sNaN:") {
// Skip the `NaN:` prefix, the loop below won't accept it.
// We expect a hexadecimal number to follow the colon.
while self.next_ch() != Some(':') {}
is_float = true;
} else if self.looking_at("NaN") || self.looking_at("Inf") {
// This is Inf or a default quiet NaN.
is_float = true;
}
// Look for the end of this number. Detect the radix point if there is one.
loop {
match self.next_ch() {
Some('-') | Some('_') => {}
Some('.') => is_float = true,
Some(ch) if ch.is_alphanumeric() => {}
_ => break,
}
}
let text = &self.source[begin..self.pos];
if is_float {
token(Token::Float(text), loc)
} else {
token(Token::Integer(text), loc)
}
}
// Scan a 'word', which is an identifier-like sequence of characters beginning with '_' or an
// alphabetic char, followed by zero or more alphanumeric or '_' characters.
fn scan_word(&mut self) -> Result<LocatedToken<'a>, LocatedError> {
let begin = self.pos;
let loc = self.loc();
assert!(self.lookahead == Some('_') || self.lookahead.unwrap().is_alphabetic());
loop {
match self.next_ch() {
Some('_') => {}
Some(ch) if ch.is_alphanumeric() => {}
_ => break,
}
}
let text = &self.source[begin..self.pos];
// Look for numbered well-known entities like ebb15, v45, ...
token(split_entity_name(text)
.and_then(|(prefix, number)| {
Self::numbered_entity(prefix, number)
.or_else(|| Self::value_type(text, prefix, number))
})
.unwrap_or(Token::Identifier(text)),
loc)
}
// If prefix is a well-known entity prefix and suffix is a valid entity number, return the
// decoded token.
fn numbered_entity(prefix: &str, number: u32) -> Option<Token<'a>> {
match prefix {
"v" => Value::direct_with_number(number).map(|v| Token::Value(v)),
"vx" => Value::table_with_number(number).map(|v| Token::Value(v)),
"ebb" => Ebb::with_number(number).map(|ebb| Token::Ebb(ebb)),
"ss" => Some(Token::StackSlot(number)),
"jt" => Some(Token::JumpTable(number)),
"fn" => Some(Token::FuncRef(number)),
"sig" => Some(Token::SigRef(number)),
_ => None,
}
}
// Recognize a scalar or vector type.
fn value_type(text: &str, prefix: &str, number: u32) -> Option<Token<'a>> {
let is_vector = prefix.ends_with('x');
let scalar = if is_vector {
&prefix[0..prefix.len() - 1]
} else {
text
};
let base_type = match scalar {
"i8" => types::I8,
"i16" => types::I16,
"i32" => types::I32,
"i64" => types::I64,
"f32" => types::F32,
"f64" => types::F64,
"b1" => types::B1,
"b8" => types::B8,
"b16" => types::B16,
"b32" => types::B32,
"b64" => types::B64,
_ => return None,
};
if is_vector {
if number <= u16::MAX as u32 {
base_type.by(number as u16).map(|t| Token::Type(t))
} else {
None
}
} else {
Some(Token::Type(base_type))
}
}
/// Get the next token or a lexical error.
///
/// Return None when the end of the source is encountered.
pub fn next(&mut self) -> Option<Result<LocatedToken<'a>, LocatedError>> {
loop {
let loc = self.loc();
return match self.lookahead {
None => None,
Some(';') => Some(self.scan_comment()),
Some('(') => Some(self.scan_char(Token::LPar)),
Some(')') => Some(self.scan_char(Token::RPar)),
Some('{') => Some(self.scan_char(Token::LBrace)),
Some('}') => Some(self.scan_char(Token::RBrace)),
Some(',') => Some(self.scan_char(Token::Comma)),
Some('.') => Some(self.scan_char(Token::Dot)),
Some(':') => Some(self.scan_char(Token::Colon)),
Some('=') => Some(self.scan_char(Token::Equal)),
Some('-') => {
if self.looking_at("->") {
Some(self.scan_chars(2, Token::Arrow))
} else {
Some(self.scan_number())
}
}
Some(ch) if ch.is_digit(10) => Some(self.scan_number()),
Some(ch) if ch.is_alphabetic() => Some(self.scan_word()),
Some(ch) if ch.is_whitespace() => {
self.next_ch();
continue;
}
_ => {
// Skip invalid char, return error.
self.next_ch();
Some(error(Error::InvalidChar, loc))
}
};
}
}
}
#[cfg(test)]
mod tests {
use super::trailing_digits;
use super::*;
use cretonne::ir::types;
use cretonne::ir::{Value, Ebb};
use error::Location;
#[test]
fn digits() {
assert_eq!(trailing_digits(""), 0);
assert_eq!(trailing_digits("x"), 0);
assert_eq!(trailing_digits("0x"), 0);
assert_eq!(trailing_digits("x1"), 1);
assert_eq!(trailing_digits("1x1"), 1);
assert_eq!(trailing_digits("1x01"), 2);
}
#[test]
fn entity_name() {
assert_eq!(split_entity_name(""), None);
assert_eq!(split_entity_name("x"), None);
assert_eq!(split_entity_name("x+"), None);
assert_eq!(split_entity_name("x+1"), Some(("x+", 1)));
assert_eq!(split_entity_name("x-1"), Some(("x-", 1)));
assert_eq!(split_entity_name("1"), Some(("", 1)));
assert_eq!(split_entity_name("x1"), Some(("x", 1)));
assert_eq!(split_entity_name("xy0"), Some(("xy", 0)));
// Reject this non-canonical form.
assert_eq!(split_entity_name("inst01"), None);
}
fn token<'a>(token: Token<'a>, line: usize) -> Option<Result<LocatedToken<'a>, LocatedError>> {
Some(super::token(token, Location { line_number: line }))
}
fn error<'a>(error: Error, line: usize) -> Option<Result<LocatedToken<'a>, LocatedError>> {
Some(super::error(error, Location { line_number: line }))
}
#[test]
fn make_lexer() {
let mut l1 = Lexer::new("");
let mut l2 = Lexer::new(" ");
let mut l3 = Lexer::new("\n ");
assert_eq!(l1.next(), None);
assert_eq!(l2.next(), None);
assert_eq!(l3.next(), None);
}
#[test]
fn lex_comment() {
let mut lex = Lexer::new("; hello");
assert_eq!(lex.next(), token(Token::Comment("; hello"), 1));
assert_eq!(lex.next(), None);
lex = Lexer::new("\n ;hello\n;foo");
assert_eq!(lex.next(), token(Token::Comment(";hello"), 2));
assert_eq!(lex.next(), token(Token::Comment(";foo"), 3));
assert_eq!(lex.next(), None);
// Scan a comment after an invalid char.
let mut lex = Lexer::new("#; hello");
assert_eq!(lex.next(), error(Error::InvalidChar, 1));
assert_eq!(lex.next(), token(Token::Comment("; hello"), 1));
assert_eq!(lex.next(), None);
}
#[test]
fn lex_chars() {
let mut lex = Lexer::new("(); hello\n = :{, }.");
assert_eq!(lex.next(), token(Token::LPar, 1));
assert_eq!(lex.next(), token(Token::RPar, 1));
assert_eq!(lex.next(), token(Token::Comment("; hello"), 1));
assert_eq!(lex.next(), token(Token::Equal, 2));
assert_eq!(lex.next(), token(Token::Colon, 2));
assert_eq!(lex.next(), token(Token::LBrace, 2));
assert_eq!(lex.next(), token(Token::Comma, 2));
assert_eq!(lex.next(), token(Token::RBrace, 2));
assert_eq!(lex.next(), token(Token::Dot, 2));
assert_eq!(lex.next(), None);
}
#[test]
fn lex_numbers() {
let mut lex = Lexer::new(" 0 2_000 -1,0xf -0x0 0.0 0x0.4p-34");
assert_eq!(lex.next(), token(Token::Integer("0"), 1));
assert_eq!(lex.next(), token(Token::Integer("2_000"), 1));
assert_eq!(lex.next(), token(Token::Integer("-1"), 1));
assert_eq!(lex.next(), token(Token::Comma, 1));
assert_eq!(lex.next(), token(Token::Integer("0xf"), 1));
assert_eq!(lex.next(), token(Token::Integer("-0x0"), 1));
assert_eq!(lex.next(), token(Token::Float("0.0"), 1));
assert_eq!(lex.next(), token(Token::Float("0x0.4p-34"), 1));
assert_eq!(lex.next(), None);
}
#[test]
fn lex_identifiers() {
let mut lex = Lexer::new("v0 v00 vx01 ebb1234567890 ebb5234567890 v1x vx1 vxvx4 \
function0 function b1 i32x4 f32x5");
assert_eq!(lex.next(),
token(Token::Value(Value::direct_with_number(0).unwrap()), 1));
assert_eq!(lex.next(), token(Token::Identifier("v00"), 1));
assert_eq!(lex.next(), token(Token::Identifier("vx01"), 1));
assert_eq!(lex.next(),
token(Token::Ebb(Ebb::with_number(1234567890).unwrap()), 1));
assert_eq!(lex.next(), token(Token::Identifier("ebb5234567890"), 1));
assert_eq!(lex.next(), token(Token::Identifier("v1x"), 1));
assert_eq!(lex.next(),
token(Token::Value(Value::table_with_number(1).unwrap()), 1));
assert_eq!(lex.next(), token(Token::Identifier("vxvx4"), 1));
assert_eq!(lex.next(), token(Token::Identifier("function0"), 1));
assert_eq!(lex.next(), token(Token::Identifier("function"), 1));
assert_eq!(lex.next(), token(Token::Type(types::B1), 1));
assert_eq!(lex.next(), token(Token::Type(types::I32.by(4).unwrap()), 1));
assert_eq!(lex.next(), token(Token::Identifier("f32x5"), 1));
assert_eq!(lex.next(), None);
}
}

21
lib/reader/src/lib.rs Normal file
View File

@@ -0,0 +1,21 @@
//! Cretonne file reader library.
//!
//! The cton_reader library supports reading .cton files. This functionality is needed for testing
//! Cretonne, but is not essential for a JIT compiler.
extern crate cretonne;
pub use error::{Location, Result, Error};
pub use parser::{parse_functions, parse_test};
pub use testcommand::{TestCommand, TestOption};
pub use testfile::{TestFile, Details};
pub use isaspec::IsaSpec;
pub use sourcemap::SourceMap;
mod error;
mod lexer;
mod parser;
mod testcommand;
mod isaspec;
mod testfile;
mod sourcemap;

1376
lib/reader/src/parser.rs Normal file

File diff suppressed because it is too large Load Diff

220
lib/reader/src/sourcemap.rs Normal file
View File

@@ -0,0 +1,220 @@
//! Source map for translating source entity names to parsed entities.
//!
//! When the parser reads in a source file, entities like instructions, EBBs, and values get new
//! entity numbers. The parser maintains a mapping from the entity names in the source to the final
//! entity references.
//!
//! The `SourceMap` struct defined in this module makes the same mapping available to parser
//! clients.
use std::collections::HashMap;
use cretonne::ir::{StackSlot, JumpTable, Ebb, Value, Inst};
use cretonne::ir::entities::AnyEntity;
use error::{Result, Location};
use lexer::split_entity_name;
/// Mapping from source entity names to entity references that are valid in the parsed function.
#[derive(Debug)]
pub struct SourceMap {
values: HashMap<Value, Value>, // vNN, vxNN
ebbs: HashMap<Ebb, Ebb>, // ebbNN
stack_slots: HashMap<u32, StackSlot>, // ssNN
jump_tables: HashMap<u32, JumpTable>, // jtNN
// Store locations for entities, including instructions.
locations: HashMap<AnyEntity, Location>,
}
/// Read-only interface which is exposed outside the parser crate.
impl SourceMap {
/// Look up a value entity by its source number.
pub fn get_value(&self, src: Value) -> Option<Value> {
self.values.get(&src).cloned()
}
/// Look up a EBB entity by its source number.
pub fn get_ebb(&self, src: Ebb) -> Option<Ebb> {
self.ebbs.get(&src).cloned()
}
/// Look up a stack slot entity by its source number.
pub fn get_ss(&self, src_num: u32) -> Option<StackSlot> {
self.stack_slots.get(&src_num).cloned()
}
/// Look up a jump table entity by its source number.
pub fn get_jt(&self, src_num: u32) -> Option<JumpTable> {
self.jump_tables.get(&src_num).cloned()
}
/// Look up an entity by source name.
/// Returns the entity reference corresponding to `name`, if it exists.
pub fn lookup_str(&self, name: &str) -> Option<AnyEntity> {
split_entity_name(name).and_then(|(ent, num)| {
match ent {
"v" => {
Value::direct_with_number(num)
.and_then(|v| self.get_value(v))
.map(AnyEntity::Value)
}
"vx" => {
Value::table_with_number(num)
.and_then(|v| self.get_value(v))
.map(AnyEntity::Value)
}
"ebb" => Ebb::with_number(num).and_then(|e| self.get_ebb(e)).map(AnyEntity::Ebb),
"ss" => self.get_ss(num).map(AnyEntity::StackSlot),
"jt" => self.get_jt(num).map(AnyEntity::JumpTable),
_ => None,
}
})
}
/// Get the source location where an entity was defined.
/// This looks up entities in the parsed function, not the source entity numbers.
pub fn location(&self, entity: AnyEntity) -> Option<Location> {
self.locations.get(&entity).cloned()
}
/// Rewrite an Ebb reference.
pub fn rewrite_ebb(&self, ebb: &mut Ebb, loc: AnyEntity) -> Result<()> {
match self.get_ebb(*ebb) {
Some(new) => {
*ebb = new;
Ok(())
}
None => {
err!(self.location(loc).unwrap_or_default(),
"undefined reference: {}",
ebb)
}
}
}
/// Rewrite a value reference.
pub fn rewrite_value(&self, val: &mut Value, loc: AnyEntity) -> Result<()> {
match self.get_value(*val) {
Some(new) => {
*val = new;
Ok(())
}
None => {
err!(self.location(loc).unwrap_or_default(),
"undefined reference: {}",
val)
}
}
}
/// Rewrite a slice of value references.
pub fn rewrite_values(&self, vals: &mut [Value], loc: AnyEntity) -> Result<()> {
for val in vals {
try!(self.rewrite_value(val, loc));
}
Ok(())
}
}
/// Interface for mutating a source map.
///
/// This interface is provided for the parser itself, it is not made available outside the crate.
pub trait MutableSourceMap {
fn new() -> Self;
/// Define a value mapping from the source name `src` to the final `entity`.
fn def_value(&mut self, src: Value, entity: Value, loc: &Location) -> Result<()>;
fn def_ebb(&mut self, src: Ebb, entity: Ebb, loc: &Location) -> Result<()>;
fn def_ss(&mut self, src_num: u32, entity: StackSlot, loc: &Location) -> Result<()>;
fn def_jt(&mut self, src_num: u32, entity: JumpTable, loc: &Location) -> Result<()>;
/// Define an instruction. Since instruction numbers never appear in source, only the location
/// is recorded.
fn def_inst(&mut self, entity: Inst, loc: &Location) -> Result<()>;
}
impl MutableSourceMap for SourceMap {
fn new() -> SourceMap {
SourceMap {
values: HashMap::new(),
ebbs: HashMap::new(),
stack_slots: HashMap::new(),
jump_tables: HashMap::new(),
locations: HashMap::new(),
}
}
fn def_value(&mut self, src: Value, entity: Value, loc: &Location) -> Result<()> {
if self.values.insert(src, entity).is_some() {
err!(loc, "duplicate value: {}", src)
} else if self.locations.insert(entity.into(), loc.clone()).is_some() {
err!(loc, "duplicate entity: {}", entity)
} else {
Ok(())
}
}
fn def_ebb(&mut self, src: Ebb, entity: Ebb, loc: &Location) -> Result<()> {
if self.ebbs.insert(src, entity).is_some() {
err!(loc, "duplicate EBB: {}", src)
} else if self.locations.insert(entity.into(), loc.clone()).is_some() {
err!(loc, "duplicate entity: {}", entity)
} else {
Ok(())
}
}
fn def_ss(&mut self, src_num: u32, entity: StackSlot, loc: &Location) -> Result<()> {
if self.stack_slots.insert(src_num, entity).is_some() {
err!(loc, "duplicate stack slot: ss{}", src_num)
} else if self.locations.insert(entity.into(), loc.clone()).is_some() {
err!(loc, "duplicate entity: {}", entity)
} else {
Ok(())
}
}
fn def_jt(&mut self, src_num: u32, entity: JumpTable, loc: &Location) -> Result<()> {
if self.jump_tables.insert(src_num, entity).is_some() {
err!(loc, "duplicate jump table: jt{}", src_num)
} else if self.locations.insert(entity.into(), loc.clone()).is_some() {
err!(loc, "duplicate entity: {}", entity)
} else {
Ok(())
}
}
fn def_inst(&mut self, entity: Inst, loc: &Location) -> Result<()> {
if self.locations.insert(entity.into(), loc.clone()).is_some() {
err!(loc, "duplicate entity: {}", entity)
} else {
Ok(())
}
}
}
#[cfg(test)]
mod tests {
use parse_test;
#[test]
fn details() {
let tf = parse_test("function detail() {
ss10 = stack_slot 13
jt10 = jump_table ebb0
ebb0(v4: i32, vx7: i32):
v10 = iadd v4, vx7
}")
.unwrap();
let map = &tf.functions[0].1.map;
assert_eq!(map.lookup_str("v0"), None);
assert_eq!(map.lookup_str("ss1"), None);
assert_eq!(map.lookup_str("ss10").unwrap().to_string(), "ss0");
assert_eq!(map.lookup_str("jt10").unwrap().to_string(), "jt0");
assert_eq!(map.lookup_str("ebb0").unwrap().to_string(), "ebb0");
assert_eq!(map.lookup_str("v4").unwrap().to_string(), "vx0");
assert_eq!(map.lookup_str("vx7").unwrap().to_string(), "vx1");
assert_eq!(map.lookup_str("v10").unwrap().to_string(), "v0");
}
}

View File

@@ -0,0 +1,88 @@
//! Test commands.
//!
//! A `.cton` file can begin with one or more *test commands* which specify what is to be tested.
//! The general syntax is:
//!
//! <pre>
//! test <i>&lt;command&gt;</i> </i>[options]</i>...
//! </pre>
//!
//! The options are either a single identifier flag, or setting values like `identifier=value`.
//!
//! The parser does not understand the test commands or which options are alid. It simply parses
//! the general format into a `TestCommand` data structure.
use std::fmt::{self, Display, Formatter};
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct TestCommand<'a> {
pub command: &'a str,
pub options: Vec<TestOption<'a>>,
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum TestOption<'a> {
Flag(&'a str),
Value(&'a str, &'a str),
}
impl<'a> TestCommand<'a> {
pub fn new(s: &'a str) -> TestCommand<'a> {
let mut parts = s.split_whitespace();
let cmd = parts.next().unwrap_or("");
TestCommand {
command: cmd,
options: parts.filter(|s| !s.is_empty()).map(TestOption::new).collect(),
}
}
}
impl<'a> Display for TestCommand<'a> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
try!(write!(f, "{}", self.command));
for opt in &self.options {
try!(write!(f, " {}", opt));
}
writeln!(f, "")
}
}
impl<'a> TestOption<'a> {
pub fn new(s: &'a str) -> TestOption<'a> {
match s.find('=') {
None => TestOption::Flag(s),
Some(p) => TestOption::Value(&s[0..p], &s[p + 1..]),
}
}
}
impl<'a> Display for TestOption<'a> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match *self {
TestOption::Flag(s) => write!(f, "{}", s),
TestOption::Value(s, v) => write!(f, "{}={}", s, v),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_option() {
assert_eq!(TestOption::new(""), TestOption::Flag(""));
assert_eq!(TestOption::new("foo"), TestOption::Flag("foo"));
assert_eq!(TestOption::new("foo=bar"), TestOption::Value("foo", "bar"));
}
#[test]
fn parse_command() {
assert_eq!(&TestCommand::new("").to_string(), "\n");
assert_eq!(&TestCommand::new("cat").to_string(), "cat\n");
assert_eq!(&TestCommand::new("cat ").to_string(), "cat\n");
assert_eq!(&TestCommand::new("cat 1 ").to_string(), "cat 1\n");
assert_eq!(&TestCommand::new("cat one=4 two t").to_string(),
"cat one=4 two t\n");
}
}

View File

@@ -0,0 +1,47 @@
//! Data structures representing a parsed test file.
//!
//! A test file is a `.cton` file which contains test commands and settings for running a
//! file-based test case.
//!
use cretonne::ir::Function;
use cretonne::ir::entities::AnyEntity;
use testcommand::TestCommand;
use isaspec::IsaSpec;
use sourcemap::SourceMap;
use error::Location;
/// A parsed test case.
///
/// This is the result of parsing a `.cton` file which contains a number of test commands and ISA
/// specs followed by the functions that should be tested.
pub struct TestFile<'a> {
/// `test foo ...` lines.
pub commands: Vec<TestCommand<'a>>,
/// `isa bar ...` lines.
pub isa_spec: IsaSpec,
pub functions: Vec<(Function, Details<'a>)>,
}
/// Additional details about a function parsed from a text string.
/// These are useful for detecting test commands embedded in comments etc.
/// The details to not affect the semantics of the function.
#[derive(Debug)]
pub struct Details<'a> {
pub location: Location,
pub comments: Vec<Comment<'a>>,
pub map: SourceMap,
}
/// A comment in a parsed function.
///
/// The comment belongs to the immediately preceeding entity, whether that is an EBB header, and
/// instruction, or one of the preamble declarations.
///
/// Comments appearing inside the function but before the preamble, as well as comments appearing
/// after the function are tagged as `AnyEntity::Function`.
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct Comment<'a> {
pub entity: AnyEntity,
pub text: &'a str,
}