moved crates in lib/ to src/, renamed crates, modified some files' text (#660)
moved crates in lib/ to src/, renamed crates, modified some files' text (#660)
This commit is contained in:
18
cranelift/reader/Cargo.toml
Normal file
18
cranelift/reader/Cargo.toml
Normal file
@@ -0,0 +1,18 @@
|
||||
[package]
|
||||
authors = ["The Cranelift Project Developers"]
|
||||
name = "cranelift-reader"
|
||||
version = "0.28.0"
|
||||
description = "Cranelift textual IR reader"
|
||||
license = "Apache-2.0 WITH LLVM-exception"
|
||||
documentation = "https://cranelift.readthedocs.io/"
|
||||
repository = "https://github.com/CraneStation/cranelift"
|
||||
readme = "README.md"
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
cranelift-codegen = { path = "../cranelift-codegen", version = "0.28.0" }
|
||||
target-lexicon = "0.2.0"
|
||||
|
||||
[badges]
|
||||
maintenance = { status = "experimental" }
|
||||
travis-ci = { repository = "CraneStation/cranelift" }
|
||||
220
cranelift/reader/LICENSE
Normal file
220
cranelift/reader/LICENSE
Normal file
@@ -0,0 +1,220 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
|
||||
--- LLVM Exceptions to the Apache 2.0 License ----
|
||||
|
||||
As an exception, if, as a result of your compiling your source code, portions
|
||||
of this Software are embedded into an Object form of such source code, you
|
||||
may redistribute such embedded portions in such Object form without complying
|
||||
with the conditions of Sections 4(a), 4(b) and 4(d) of the License.
|
||||
|
||||
In addition, if you combine or link compiled forms of this Software with
|
||||
software that is licensed under the GPLv2 ("Combined Software") and if a
|
||||
court of competent jurisdiction determines that the patent provision (Section
|
||||
3), the indemnity provision (Section 9) or other Section of the License
|
||||
conflicts with the conditions of the GPLv2, you may retroactively and
|
||||
prospectively choose to deem waived or otherwise exclude such Section(s) of
|
||||
the License, but only in their entirety and only with respect to the Combined
|
||||
Software.
|
||||
|
||||
3
cranelift/reader/README.md
Normal file
3
cranelift/reader/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
This crate library supports reading .clif files. This functionality is needed
|
||||
for testing [Cranelift](https://crates.io/crates/cranelift), but is not essential
|
||||
for a JIT compiler.
|
||||
52
cranelift/reader/src/error.rs
Normal file
52
cranelift/reader/src/error.rs
Normal file
@@ -0,0 +1,52 @@
|
||||
//! Define the `Location`, `ParseError`, and `ParseResult` types.
|
||||
|
||||
#![macro_use]
|
||||
|
||||
use std::fmt;
|
||||
|
||||
/// The location of a `Token` or `Error`.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
|
||||
pub struct Location {
|
||||
/// Line number. Command-line arguments are line 0 and source file
|
||||
/// lines start from 1.
|
||||
pub line_number: usize,
|
||||
}
|
||||
|
||||
/// A parse error is returned when the parse failed.
|
||||
#[derive(Debug)]
|
||||
pub struct ParseError {
|
||||
/// Location of the error.
|
||||
pub location: Location,
|
||||
/// Error message.
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
impl fmt::Display for ParseError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
if self.location.line_number == 0 {
|
||||
write!(f, "command-line arguments: {}", self.message)
|
||||
} else {
|
||||
write!(f, "{}: {}", self.location.line_number, self.message)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Result of a parser operation. The `ParseError` variant includes a location.
|
||||
pub type ParseResult<T> = Result<T, ParseError>;
|
||||
|
||||
// Create an `Err` variant of `ParseResult<X>` from a location and `format!` args.
|
||||
macro_rules! err {
|
||||
( $loc:expr, $msg:expr ) => {
|
||||
Err($crate::ParseError {
|
||||
location: $loc.clone(),
|
||||
message: $msg.to_string(),
|
||||
})
|
||||
};
|
||||
|
||||
( $loc:expr, $fmt:expr, $( $arg:expr ),+ ) => {
|
||||
Err($crate::ParseError {
|
||||
location: $loc.clone(),
|
||||
message: format!( $fmt, $( $arg ),+ ),
|
||||
})
|
||||
};
|
||||
}
|
||||
65
cranelift/reader/src/isaspec.rs
Normal file
65
cranelift/reader/src/isaspec.rs
Normal file
@@ -0,0 +1,65 @@
|
||||
//! Parsed representation of `set` and `isa` commands.
|
||||
//!
|
||||
//! A test case file can contain `set` commands that set ISA-independent settings, and it can
|
||||
//! contain `isa` commands that select an ISA and applies ISA-specific settings.
|
||||
//!
|
||||
//! If a test case file contains `isa` commands, the tests will only be run against the specified
|
||||
//! ISAs. If the file contains no `isa` commands, the tests will be run against all supported ISAs.
|
||||
|
||||
use crate::error::{Location, ParseResult};
|
||||
use crate::testcommand::TestOption;
|
||||
use cranelift_codegen::isa::TargetIsa;
|
||||
use cranelift_codegen::settings::{Configurable, Flags, SetError};
|
||||
|
||||
/// The ISA specifications in a `.clif` file.
|
||||
pub enum IsaSpec {
|
||||
/// The parsed file does not contain any `isa` commands, but it may contain `set` commands
|
||||
/// which are reflected in the finished `Flags` object.
|
||||
None(Flags),
|
||||
|
||||
/// The parsed file does contains `isa` commands.
|
||||
/// Each `isa` command is used to configure a `TargetIsa` trait object.
|
||||
Some(Vec<Box<TargetIsa>>),
|
||||
}
|
||||
|
||||
impl IsaSpec {
|
||||
/// If the `IsaSpec` contains exactly 1 `TargetIsa` we return a reference to it
|
||||
pub fn unique_isa(&self) -> Option<&TargetIsa> {
|
||||
if let IsaSpec::Some(ref isa_vec) = *self {
|
||||
if isa_vec.len() == 1 {
|
||||
return Some(&*isa_vec[0]);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse an iterator of command line options and apply them to `config`.
|
||||
pub fn parse_options<'a, I>(iter: I, config: &mut Configurable, loc: Location) -> ParseResult<()>
|
||||
where
|
||||
I: Iterator<Item = &'a str>,
|
||||
{
|
||||
for opt in iter.map(TestOption::new) {
|
||||
match opt {
|
||||
TestOption::Flag(name) => match config.enable(name) {
|
||||
Ok(_) => {}
|
||||
Err(SetError::BadName(name)) => return err!(loc, "unknown flag '{}'", name),
|
||||
Err(_) => return err!(loc, "not a boolean flag: '{}'", opt),
|
||||
},
|
||||
TestOption::Value(name, value) => match config.set(name, value) {
|
||||
Ok(_) => {}
|
||||
Err(SetError::BadName(name)) => return err!(loc, "unknown setting '{}'", name),
|
||||
Err(SetError::BadType) => return err!(loc, "invalid setting type: '{}'", opt),
|
||||
Err(SetError::BadValue(expected)) => {
|
||||
return err!(
|
||||
loc,
|
||||
"invalid setting value for '{}', expected {}",
|
||||
opt,
|
||||
expected
|
||||
);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
646
cranelift/reader/src/lexer.rs
Normal file
646
cranelift/reader/src/lexer.rs
Normal file
@@ -0,0 +1,646 @@
|
||||
//! Lexical analysis for .clif files.
|
||||
|
||||
use crate::error::Location;
|
||||
use cranelift_codegen::ir::types;
|
||||
use cranelift_codegen::ir::{Ebb, Value};
|
||||
#[allow(unused_imports, deprecated)]
|
||||
use std::ascii::AsciiExt;
|
||||
use std::str::CharIndices;
|
||||
use std::u16;
|
||||
|
||||
/// A Token returned from the `Lexer`.
|
||||
///
|
||||
/// Some variants may contains references to the original source text, so the `Token` has the same
|
||||
/// lifetime as the source.
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
pub enum Token<'a> {
|
||||
Comment(&'a str),
|
||||
LPar, // '('
|
||||
RPar, // ')'
|
||||
LBrace, // '{'
|
||||
RBrace, // '}'
|
||||
LBracket, // '['
|
||||
RBracket, // ']'
|
||||
Minus, // '-'
|
||||
Plus, // '+'
|
||||
Comma, // ','
|
||||
Dot, // '.'
|
||||
Colon, // ':'
|
||||
Equal, // '='
|
||||
Arrow, // '->'
|
||||
Float(&'a str), // Floating point immediate
|
||||
Integer(&'a str), // Integer immediate
|
||||
Type(types::Type), // i32, f32, b32x4, ...
|
||||
Value(Value), // v12, v7
|
||||
Ebb(Ebb), // ebb3
|
||||
StackSlot(u32), // ss3
|
||||
GlobalValue(u32), // gv3
|
||||
Heap(u32), // heap2
|
||||
Table(u32), // table2
|
||||
JumpTable(u32), // jt2
|
||||
FuncRef(u32), // fn2
|
||||
SigRef(u32), // sig2
|
||||
UserRef(u32), // u345
|
||||
Name(&'a str), // %9arbitrary_alphanum, %x3, %0, %function ...
|
||||
HexSequence(&'a str), // #89AF
|
||||
Identifier(&'a str), // Unrecognized identifier (opcode, enumerator, ...)
|
||||
SourceLoc(&'a str), // @00c7
|
||||
}
|
||||
|
||||
/// A `Token` with an associated location.
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct LocatedToken<'a> {
|
||||
pub token: Token<'a>,
|
||||
pub location: Location,
|
||||
}
|
||||
|
||||
/// Wrap up a `Token` with the given location.
|
||||
fn token(token: Token, loc: Location) -> Result<LocatedToken, LocatedError> {
|
||||
Ok(LocatedToken {
|
||||
token,
|
||||
location: loc,
|
||||
})
|
||||
}
|
||||
|
||||
/// An error from the lexical analysis.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum LexError {
|
||||
InvalidChar,
|
||||
}
|
||||
|
||||
/// A `LexError` with an associated Location.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct LocatedError {
|
||||
pub error: LexError,
|
||||
pub location: Location,
|
||||
}
|
||||
|
||||
/// Wrap up a `LexError` with the given location.
|
||||
fn error<'a>(error: LexError, loc: Location) -> Result<LocatedToken<'a>, LocatedError> {
|
||||
Err(LocatedError {
|
||||
error,
|
||||
location: loc,
|
||||
})
|
||||
}
|
||||
|
||||
/// Get the number of decimal digits at the end of `s`.
|
||||
fn trailing_digits(s: &str) -> usize {
|
||||
// It's faster to iterate backwards over bytes, and we're only counting ASCII digits.
|
||||
s.as_bytes()
|
||||
.iter()
|
||||
.rev()
|
||||
.take_while(|&&b| b'0' <= b && b <= b'9')
|
||||
.count()
|
||||
}
|
||||
|
||||
/// Pre-parse a supposed entity name by splitting it into two parts: A head of lowercase ASCII
|
||||
/// letters and numeric tail.
|
||||
pub fn split_entity_name(name: &str) -> Option<(&str, u32)> {
|
||||
let (head, tail) = name.split_at(name.len() - trailing_digits(name));
|
||||
if tail.len() > 1 && tail.starts_with('0') {
|
||||
None
|
||||
} else {
|
||||
tail.parse().ok().map(|n| (head, n))
|
||||
}
|
||||
}
|
||||
|
||||
/// Lexical analysis.
|
||||
///
|
||||
/// A `Lexer` reads text from a `&str` and provides a sequence of tokens.
|
||||
///
|
||||
/// Also keep track of a line number for error reporting.
|
||||
///
|
||||
pub struct Lexer<'a> {
|
||||
// Complete source being processed.
|
||||
source: &'a str,
|
||||
|
||||
// Iterator into `source`.
|
||||
chars: CharIndices<'a>,
|
||||
|
||||
// Next character to be processed, or `None` at the end.
|
||||
lookahead: Option<char>,
|
||||
|
||||
// Index into `source` of lookahead character.
|
||||
pos: usize,
|
||||
|
||||
// Current line number.
|
||||
line_number: usize,
|
||||
}
|
||||
|
||||
impl<'a> Lexer<'a> {
|
||||
pub fn new(s: &'a str) -> Self {
|
||||
let mut lex = Self {
|
||||
source: s,
|
||||
chars: s.char_indices(),
|
||||
lookahead: None,
|
||||
pos: 0,
|
||||
line_number: 1,
|
||||
};
|
||||
// Advance to the first char.
|
||||
lex.next_ch();
|
||||
lex
|
||||
}
|
||||
|
||||
// Advance to the next character.
|
||||
// Return the next lookahead character, or None when the end is encountered.
|
||||
// Always update cur_ch to reflect
|
||||
fn next_ch(&mut self) -> Option<char> {
|
||||
if self.lookahead == Some('\n') {
|
||||
self.line_number += 1;
|
||||
}
|
||||
match self.chars.next() {
|
||||
Some((idx, ch)) => {
|
||||
self.pos = idx;
|
||||
self.lookahead = Some(ch);
|
||||
}
|
||||
None => {
|
||||
self.pos = self.source.len();
|
||||
self.lookahead = None;
|
||||
}
|
||||
}
|
||||
self.lookahead
|
||||
}
|
||||
|
||||
// Get the location corresponding to `lookahead`.
|
||||
fn loc(&self) -> Location {
|
||||
Location {
|
||||
line_number: self.line_number,
|
||||
}
|
||||
}
|
||||
|
||||
// Starting from `lookahead`, are we looking at `prefix`?
|
||||
fn looking_at(&self, prefix: &str) -> bool {
|
||||
self.source[self.pos..].starts_with(prefix)
|
||||
}
|
||||
|
||||
// Starting from `lookahead`, are we looking at a number?
|
||||
fn looking_at_numeric(&self) -> bool {
|
||||
if let Some(c) = self.lookahead {
|
||||
if c.is_digit(10) {
|
||||
return true;
|
||||
}
|
||||
match c {
|
||||
'-' => return true,
|
||||
'+' => return true,
|
||||
'.' => return true,
|
||||
_ => {}
|
||||
}
|
||||
if self.looking_at("NaN") || self.looking_at("Inf") || self.looking_at("sNaN") {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
// Scan a single-char token.
|
||||
fn scan_char(&mut self, tok: Token<'a>) -> Result<LocatedToken<'a>, LocatedError> {
|
||||
assert_ne!(self.lookahead, None);
|
||||
let loc = self.loc();
|
||||
self.next_ch();
|
||||
token(tok, loc)
|
||||
}
|
||||
|
||||
// Scan a multi-char token.
|
||||
fn scan_chars(
|
||||
&mut self,
|
||||
count: usize,
|
||||
tok: Token<'a>,
|
||||
) -> Result<LocatedToken<'a>, LocatedError> {
|
||||
let loc = self.loc();
|
||||
for _ in 0..count {
|
||||
assert_ne!(self.lookahead, None);
|
||||
self.next_ch();
|
||||
}
|
||||
token(tok, loc)
|
||||
}
|
||||
|
||||
/// Get the rest of the current line.
|
||||
/// The next token returned by `next()` will be from the following lines.
|
||||
pub fn rest_of_line(&mut self) -> &'a str {
|
||||
let begin = self.pos;
|
||||
loop {
|
||||
match self.next_ch() {
|
||||
None | Some('\n') => return &self.source[begin..self.pos],
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Scan a comment extending to the end of the current line.
|
||||
fn scan_comment(&mut self) -> Result<LocatedToken<'a>, LocatedError> {
|
||||
let loc = self.loc();
|
||||
let text = self.rest_of_line();
|
||||
token(Token::Comment(text), loc)
|
||||
}
|
||||
|
||||
// Scan a number token which can represent either an integer or floating point number.
|
||||
//
|
||||
// Accept the following forms:
|
||||
//
|
||||
// - `10`: Integer
|
||||
// - `-10`: Integer
|
||||
// - `0xff_00`: Integer
|
||||
// - `0.0`: Float
|
||||
// - `0x1.f`: Float
|
||||
// - `-0x2.4`: Float
|
||||
// - `0x0.4p-34`: Float
|
||||
//
|
||||
// This function does not filter out all invalid numbers. It depends in the context-sensitive
|
||||
// decoding of the text for that. For example, the number of allowed digits in an `Ieee32` and
|
||||
// an `Ieee64` constant are different.
|
||||
fn scan_number(&mut self) -> Result<LocatedToken<'a>, LocatedError> {
|
||||
let begin = self.pos;
|
||||
let loc = self.loc();
|
||||
let mut is_float = false;
|
||||
|
||||
// Skip a leading sign.
|
||||
match self.lookahead {
|
||||
Some('-') => {
|
||||
self.next_ch();
|
||||
if !self.looking_at_numeric() {
|
||||
// If the next characters won't parse as a number, we return Token::Minus
|
||||
return token(Token::Minus, loc);
|
||||
}
|
||||
}
|
||||
Some('+') => {
|
||||
self.next_ch();
|
||||
if !self.looking_at_numeric() {
|
||||
// If the next characters won't parse as a number, we return Token::Minus
|
||||
return token(Token::Plus, loc);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// Check for NaNs with payloads.
|
||||
if self.looking_at("NaN:") || self.looking_at("sNaN:") {
|
||||
// Skip the `NaN:` prefix, the loop below won't accept it.
|
||||
// We expect a hexadecimal number to follow the colon.
|
||||
while self.next_ch() != Some(':') {}
|
||||
is_float = true;
|
||||
} else if self.looking_at("NaN") || self.looking_at("Inf") {
|
||||
// This is Inf or a default quiet NaN.
|
||||
is_float = true;
|
||||
}
|
||||
|
||||
// Look for the end of this number. Detect the radix point if there is one.
|
||||
loop {
|
||||
match self.next_ch() {
|
||||
Some('-') | Some('_') => {}
|
||||
Some('.') => is_float = true,
|
||||
Some(ch) if ch.is_alphanumeric() => {}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
let text = &self.source[begin..self.pos];
|
||||
if is_float {
|
||||
token(Token::Float(text), loc)
|
||||
} else {
|
||||
token(Token::Integer(text), loc)
|
||||
}
|
||||
}
|
||||
|
||||
// Scan a 'word', which is an identifier-like sequence of characters beginning with '_' or an
|
||||
// alphabetic char, followed by zero or more alphanumeric or '_' characters.
|
||||
fn scan_word(&mut self) -> Result<LocatedToken<'a>, LocatedError> {
|
||||
let begin = self.pos;
|
||||
let loc = self.loc();
|
||||
|
||||
assert!(self.lookahead == Some('_') || self.lookahead.unwrap().is_alphabetic());
|
||||
loop {
|
||||
match self.next_ch() {
|
||||
Some('_') => {}
|
||||
Some(ch) if ch.is_alphanumeric() => {}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
let text = &self.source[begin..self.pos];
|
||||
|
||||
// Look for numbered well-known entities like ebb15, v45, ...
|
||||
token(
|
||||
split_entity_name(text)
|
||||
.and_then(|(prefix, number)| {
|
||||
Self::numbered_entity(prefix, number)
|
||||
.or_else(|| Self::value_type(text, prefix, number))
|
||||
})
|
||||
.unwrap_or_else(|| match text {
|
||||
"iflags" => Token::Type(types::IFLAGS),
|
||||
"fflags" => Token::Type(types::FFLAGS),
|
||||
_ => Token::Identifier(text),
|
||||
}),
|
||||
loc,
|
||||
)
|
||||
}
|
||||
|
||||
// If prefix is a well-known entity prefix and suffix is a valid entity number, return the
|
||||
// decoded token.
|
||||
fn numbered_entity(prefix: &str, number: u32) -> Option<Token<'a>> {
|
||||
match prefix {
|
||||
"v" => Value::with_number(number).map(Token::Value),
|
||||
"ebb" => Ebb::with_number(number).map(Token::Ebb),
|
||||
"ss" => Some(Token::StackSlot(number)),
|
||||
"gv" => Some(Token::GlobalValue(number)),
|
||||
"heap" => Some(Token::Heap(number)),
|
||||
"table" => Some(Token::Table(number)),
|
||||
"jt" => Some(Token::JumpTable(number)),
|
||||
"fn" => Some(Token::FuncRef(number)),
|
||||
"sig" => Some(Token::SigRef(number)),
|
||||
"u" => Some(Token::UserRef(number)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
// Recognize a scalar or vector type.
|
||||
fn value_type(text: &str, prefix: &str, number: u32) -> Option<Token<'a>> {
|
||||
let is_vector = prefix.ends_with('x');
|
||||
let scalar = if is_vector {
|
||||
&prefix[0..prefix.len() - 1]
|
||||
} else {
|
||||
text
|
||||
};
|
||||
let base_type = match scalar {
|
||||
"i8" => types::I8,
|
||||
"i16" => types::I16,
|
||||
"i32" => types::I32,
|
||||
"i64" => types::I64,
|
||||
"f32" => types::F32,
|
||||
"f64" => types::F64,
|
||||
"b1" => types::B1,
|
||||
"b8" => types::B8,
|
||||
"b16" => types::B16,
|
||||
"b32" => types::B32,
|
||||
"b64" => types::B64,
|
||||
_ => return None,
|
||||
};
|
||||
if is_vector {
|
||||
if number <= u32::from(u16::MAX) {
|
||||
base_type.by(number as u16).map(Token::Type)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
Some(Token::Type(base_type))
|
||||
}
|
||||
}
|
||||
|
||||
fn scan_name(&mut self) -> Result<LocatedToken<'a>, LocatedError> {
|
||||
let loc = self.loc();
|
||||
let begin = self.pos + 1;
|
||||
|
||||
assert_eq!(self.lookahead, Some('%'));
|
||||
|
||||
while let Some(c) = self.next_ch() {
|
||||
if !(c.is_ascii() && c.is_alphanumeric() || c == '_') {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let end = self.pos;
|
||||
token(Token::Name(&self.source[begin..end]), loc)
|
||||
}
|
||||
|
||||
fn scan_hex_sequence(&mut self) -> Result<LocatedToken<'a>, LocatedError> {
|
||||
let loc = self.loc();
|
||||
let begin = self.pos + 1;
|
||||
|
||||
assert_eq!(self.lookahead, Some('#'));
|
||||
|
||||
while let Some(c) = self.next_ch() {
|
||||
if !char::is_digit(c, 16) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let end = self.pos;
|
||||
token(Token::HexSequence(&self.source[begin..end]), loc)
|
||||
}
|
||||
|
||||
fn scan_srcloc(&mut self) -> Result<LocatedToken<'a>, LocatedError> {
|
||||
let loc = self.loc();
|
||||
let begin = self.pos + 1;
|
||||
|
||||
assert_eq!(self.lookahead, Some('@'));
|
||||
|
||||
while let Some(c) = self.next_ch() {
|
||||
if !char::is_digit(c, 16) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let end = self.pos;
|
||||
token(Token::SourceLoc(&self.source[begin..end]), loc)
|
||||
}
|
||||
|
||||
/// Get the next token or a lexical error.
|
||||
///
|
||||
/// Return None when the end of the source is encountered.
|
||||
pub fn next(&mut self) -> Option<Result<LocatedToken<'a>, LocatedError>> {
|
||||
loop {
|
||||
let loc = self.loc();
|
||||
return match self.lookahead {
|
||||
None => None,
|
||||
Some(';') => Some(self.scan_comment()),
|
||||
Some('(') => Some(self.scan_char(Token::LPar)),
|
||||
Some(')') => Some(self.scan_char(Token::RPar)),
|
||||
Some('{') => Some(self.scan_char(Token::LBrace)),
|
||||
Some('}') => Some(self.scan_char(Token::RBrace)),
|
||||
Some('[') => Some(self.scan_char(Token::LBracket)),
|
||||
Some(']') => Some(self.scan_char(Token::RBracket)),
|
||||
Some(',') => Some(self.scan_char(Token::Comma)),
|
||||
Some('.') => Some(self.scan_char(Token::Dot)),
|
||||
Some(':') => Some(self.scan_char(Token::Colon)),
|
||||
Some('=') => Some(self.scan_char(Token::Equal)),
|
||||
Some('+') => Some(self.scan_number()),
|
||||
Some('-') => {
|
||||
if self.looking_at("->") {
|
||||
Some(self.scan_chars(2, Token::Arrow))
|
||||
} else {
|
||||
Some(self.scan_number())
|
||||
}
|
||||
}
|
||||
Some(ch) if ch.is_digit(10) => Some(self.scan_number()),
|
||||
Some(ch) if ch.is_alphabetic() => Some(self.scan_word()),
|
||||
Some('%') => Some(self.scan_name()),
|
||||
Some('#') => Some(self.scan_hex_sequence()),
|
||||
Some('@') => Some(self.scan_srcloc()),
|
||||
Some(ch) if ch.is_whitespace() => {
|
||||
self.next_ch();
|
||||
continue;
|
||||
}
|
||||
_ => {
|
||||
// Skip invalid char, return error.
|
||||
self.next_ch();
|
||||
Some(error(LexError::InvalidChar, loc))
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::trailing_digits;
|
||||
use super::*;
|
||||
use crate::error::Location;
|
||||
use cranelift_codegen::ir::types;
|
||||
use cranelift_codegen::ir::{Ebb, Value};
|
||||
|
||||
#[test]
|
||||
fn digits() {
|
||||
assert_eq!(trailing_digits(""), 0);
|
||||
assert_eq!(trailing_digits("x"), 0);
|
||||
assert_eq!(trailing_digits("0x"), 0);
|
||||
assert_eq!(trailing_digits("x1"), 1);
|
||||
assert_eq!(trailing_digits("1x1"), 1);
|
||||
assert_eq!(trailing_digits("1x01"), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn entity_name() {
|
||||
assert_eq!(split_entity_name(""), None);
|
||||
assert_eq!(split_entity_name("x"), None);
|
||||
assert_eq!(split_entity_name("x+"), None);
|
||||
assert_eq!(split_entity_name("x+1"), Some(("x+", 1)));
|
||||
assert_eq!(split_entity_name("x-1"), Some(("x-", 1)));
|
||||
assert_eq!(split_entity_name("1"), Some(("", 1)));
|
||||
assert_eq!(split_entity_name("x1"), Some(("x", 1)));
|
||||
assert_eq!(split_entity_name("xy0"), Some(("xy", 0)));
|
||||
// Reject this non-canonical form.
|
||||
assert_eq!(split_entity_name("inst01"), None);
|
||||
}
|
||||
|
||||
fn token<'a>(token: Token<'a>, line: usize) -> Option<Result<LocatedToken<'a>, LocatedError>> {
|
||||
Some(super::token(token, Location { line_number: line }))
|
||||
}
|
||||
|
||||
fn error<'a>(error: LexError, line: usize) -> Option<Result<LocatedToken<'a>, LocatedError>> {
|
||||
Some(super::error(error, Location { line_number: line }))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn make_lexer() {
|
||||
let mut l1 = Lexer::new("");
|
||||
let mut l2 = Lexer::new(" ");
|
||||
let mut l3 = Lexer::new("\n ");
|
||||
|
||||
assert_eq!(l1.next(), None);
|
||||
assert_eq!(l2.next(), None);
|
||||
assert_eq!(l3.next(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lex_comment() {
|
||||
let mut lex = Lexer::new("; hello");
|
||||
assert_eq!(lex.next(), token(Token::Comment("; hello"), 1));
|
||||
assert_eq!(lex.next(), None);
|
||||
|
||||
lex = Lexer::new("\n ;hello\n;foo");
|
||||
assert_eq!(lex.next(), token(Token::Comment(";hello"), 2));
|
||||
assert_eq!(lex.next(), token(Token::Comment(";foo"), 3));
|
||||
assert_eq!(lex.next(), None);
|
||||
|
||||
// Scan a comment after an invalid char.
|
||||
let mut lex = Lexer::new("$; hello");
|
||||
assert_eq!(lex.next(), error(LexError::InvalidChar, 1));
|
||||
assert_eq!(lex.next(), token(Token::Comment("; hello"), 1));
|
||||
assert_eq!(lex.next(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lex_chars() {
|
||||
let mut lex = Lexer::new("(); hello\n = :{, }.");
|
||||
assert_eq!(lex.next(), token(Token::LPar, 1));
|
||||
assert_eq!(lex.next(), token(Token::RPar, 1));
|
||||
assert_eq!(lex.next(), token(Token::Comment("; hello"), 1));
|
||||
assert_eq!(lex.next(), token(Token::Equal, 2));
|
||||
assert_eq!(lex.next(), token(Token::Colon, 2));
|
||||
assert_eq!(lex.next(), token(Token::LBrace, 2));
|
||||
assert_eq!(lex.next(), token(Token::Comma, 2));
|
||||
assert_eq!(lex.next(), token(Token::RBrace, 2));
|
||||
assert_eq!(lex.next(), token(Token::Dot, 2));
|
||||
assert_eq!(lex.next(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lex_numbers() {
|
||||
let mut lex = Lexer::new(" 0 2_000 -1,0xf -0x0 0.0 0x0.4p-34 +5");
|
||||
assert_eq!(lex.next(), token(Token::Integer("0"), 1));
|
||||
assert_eq!(lex.next(), token(Token::Integer("2_000"), 1));
|
||||
assert_eq!(lex.next(), token(Token::Integer("-1"), 1));
|
||||
assert_eq!(lex.next(), token(Token::Comma, 1));
|
||||
assert_eq!(lex.next(), token(Token::Integer("0xf"), 1));
|
||||
assert_eq!(lex.next(), token(Token::Integer("-0x0"), 1));
|
||||
assert_eq!(lex.next(), token(Token::Float("0.0"), 1));
|
||||
assert_eq!(lex.next(), token(Token::Float("0x0.4p-34"), 1));
|
||||
assert_eq!(lex.next(), token(Token::Integer("+5"), 1));
|
||||
assert_eq!(lex.next(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lex_identifiers() {
|
||||
let mut lex = Lexer::new(
|
||||
"v0 v00 vx01 ebb1234567890 ebb5234567890 v1x vx1 vxvx4 \
|
||||
function0 function b1 i32x4 f32x5 \
|
||||
iflags fflags iflagss",
|
||||
);
|
||||
assert_eq!(
|
||||
lex.next(),
|
||||
token(Token::Value(Value::with_number(0).unwrap()), 1)
|
||||
);
|
||||
assert_eq!(lex.next(), token(Token::Identifier("v00"), 1));
|
||||
assert_eq!(lex.next(), token(Token::Identifier("vx01"), 1));
|
||||
assert_eq!(
|
||||
lex.next(),
|
||||
token(Token::Ebb(Ebb::with_number(1234567890).unwrap()), 1)
|
||||
);
|
||||
assert_eq!(lex.next(), token(Token::Identifier("ebb5234567890"), 1));
|
||||
assert_eq!(lex.next(), token(Token::Identifier("v1x"), 1));
|
||||
assert_eq!(lex.next(), token(Token::Identifier("vx1"), 1));
|
||||
assert_eq!(lex.next(), token(Token::Identifier("vxvx4"), 1));
|
||||
assert_eq!(lex.next(), token(Token::Identifier("function0"), 1));
|
||||
assert_eq!(lex.next(), token(Token::Identifier("function"), 1));
|
||||
assert_eq!(lex.next(), token(Token::Type(types::B1), 1));
|
||||
assert_eq!(lex.next(), token(Token::Type(types::I32X4), 1));
|
||||
assert_eq!(lex.next(), token(Token::Identifier("f32x5"), 1));
|
||||
assert_eq!(lex.next(), token(Token::Type(types::IFLAGS), 1));
|
||||
assert_eq!(lex.next(), token(Token::Type(types::FFLAGS), 1));
|
||||
assert_eq!(lex.next(), token(Token::Identifier("iflagss"), 1));
|
||||
assert_eq!(lex.next(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lex_hex_sequences() {
|
||||
let mut lex = Lexer::new("#0 #DEADbeef123 #789");
|
||||
|
||||
assert_eq!(lex.next(), token(Token::HexSequence("0"), 1));
|
||||
assert_eq!(lex.next(), token(Token::HexSequence("DEADbeef123"), 1));
|
||||
assert_eq!(lex.next(), token(Token::HexSequence("789"), 1));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lex_names() {
|
||||
let mut lex = Lexer::new("%0 %x3 %function %123_abc %ss0 %v3 %ebb11 %_");
|
||||
|
||||
assert_eq!(lex.next(), token(Token::Name("0"), 1));
|
||||
assert_eq!(lex.next(), token(Token::Name("x3"), 1));
|
||||
assert_eq!(lex.next(), token(Token::Name("function"), 1));
|
||||
assert_eq!(lex.next(), token(Token::Name("123_abc"), 1));
|
||||
assert_eq!(lex.next(), token(Token::Name("ss0"), 1));
|
||||
assert_eq!(lex.next(), token(Token::Name("v3"), 1));
|
||||
assert_eq!(lex.next(), token(Token::Name("ebb11"), 1));
|
||||
assert_eq!(lex.next(), token(Token::Name("_"), 1));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lex_userrefs() {
|
||||
let mut lex = Lexer::new("u0 u1 u234567890 u9:8765");
|
||||
|
||||
assert_eq!(lex.next(), token(Token::UserRef(0), 1));
|
||||
assert_eq!(lex.next(), token(Token::UserRef(1), 1));
|
||||
assert_eq!(lex.next(), token(Token::UserRef(234567890), 1));
|
||||
assert_eq!(lex.next(), token(Token::UserRef(9), 1));
|
||||
assert_eq!(lex.next(), token(Token::Colon, 1));
|
||||
assert_eq!(lex.next(), token(Token::Integer("8765"), 1));
|
||||
assert_eq!(lex.next(), None);
|
||||
}
|
||||
}
|
||||
42
cranelift/reader/src/lib.rs
Normal file
42
cranelift/reader/src/lib.rs
Normal file
@@ -0,0 +1,42 @@
|
||||
//! Cranelift file reader library.
|
||||
//!
|
||||
//! The `cranelift_reader` library supports reading .clif files. This functionality is needed for
|
||||
//! testing Cranelift, but is not essential for a JIT compiler.
|
||||
|
||||
#![deny(
|
||||
missing_docs,
|
||||
trivial_numeric_casts,
|
||||
unused_extern_crates,
|
||||
unstable_features
|
||||
)]
|
||||
#![warn(unused_import_braces)]
|
||||
#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))]
|
||||
#![cfg_attr(feature = "cargo-clippy", allow(clippy::new_without_default))]
|
||||
#![cfg_attr(
|
||||
feature = "cargo-clippy",
|
||||
warn(
|
||||
clippy::float_arithmetic,
|
||||
clippy::mut_mut,
|
||||
clippy::nonminimal_bool,
|
||||
clippy::option_map_unwrap_or,
|
||||
clippy::option_map_unwrap_or_else,
|
||||
clippy::print_stdout,
|
||||
clippy::unicode_not_nfc,
|
||||
clippy::use_self
|
||||
)
|
||||
)]
|
||||
|
||||
pub use crate::error::{Location, ParseError, ParseResult};
|
||||
pub use crate::isaspec::{parse_options, IsaSpec};
|
||||
pub use crate::parser::{parse_functions, parse_test};
|
||||
pub use crate::sourcemap::SourceMap;
|
||||
pub use crate::testcommand::{TestCommand, TestOption};
|
||||
pub use crate::testfile::{Comment, Details, TestFile};
|
||||
|
||||
mod error;
|
||||
mod isaspec;
|
||||
mod lexer;
|
||||
mod parser;
|
||||
mod sourcemap;
|
||||
mod testcommand;
|
||||
mod testfile;
|
||||
2987
cranelift/reader/src/parser.rs
Normal file
2987
cranelift/reader/src/parser.rs
Normal file
File diff suppressed because it is too large
Load Diff
240
cranelift/reader/src/sourcemap.rs
Normal file
240
cranelift/reader/src/sourcemap.rs
Normal file
@@ -0,0 +1,240 @@
|
||||
//! Source map associating entities with their source locations.
|
||||
//!
|
||||
//! When the parser reads in a source file, it records the locations of the
|
||||
//! definitions of entities like instructions, EBBs, and values.
|
||||
//!
|
||||
//! The `SourceMap` struct defined in this module makes this mapping available
|
||||
//! to parser clients.
|
||||
|
||||
use crate::error::{Location, ParseResult};
|
||||
use crate::lexer::split_entity_name;
|
||||
use cranelift_codegen::ir::entities::AnyEntity;
|
||||
use cranelift_codegen::ir::{
|
||||
Ebb, FuncRef, GlobalValue, Heap, JumpTable, SigRef, StackSlot, Table, Value,
|
||||
};
|
||||
use std::collections::HashMap;
|
||||
|
||||
/// Mapping from entity names to source locations.
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SourceMap {
|
||||
// Store locations for entities, including instructions.
|
||||
locations: HashMap<AnyEntity, Location>,
|
||||
}
|
||||
|
||||
/// Read-only interface which is exposed outside the parser crate.
|
||||
impl SourceMap {
|
||||
/// Look up a value entity.
|
||||
pub fn contains_value(&self, v: Value) -> bool {
|
||||
self.locations.contains_key(&v.into())
|
||||
}
|
||||
|
||||
/// Look up a EBB entity.
|
||||
pub fn contains_ebb(&self, ebb: Ebb) -> bool {
|
||||
self.locations.contains_key(&ebb.into())
|
||||
}
|
||||
|
||||
/// Look up a stack slot entity.
|
||||
pub fn contains_ss(&self, ss: StackSlot) -> bool {
|
||||
self.locations.contains_key(&ss.into())
|
||||
}
|
||||
|
||||
/// Look up a global value entity.
|
||||
pub fn contains_gv(&self, gv: GlobalValue) -> bool {
|
||||
self.locations.contains_key(&gv.into())
|
||||
}
|
||||
|
||||
/// Look up a heap entity.
|
||||
pub fn contains_heap(&self, heap: Heap) -> bool {
|
||||
self.locations.contains_key(&heap.into())
|
||||
}
|
||||
|
||||
/// Look up a table entity.
|
||||
pub fn contains_table(&self, table: Table) -> bool {
|
||||
self.locations.contains_key(&table.into())
|
||||
}
|
||||
|
||||
/// Look up a signature entity.
|
||||
pub fn contains_sig(&self, sig: SigRef) -> bool {
|
||||
self.locations.contains_key(&sig.into())
|
||||
}
|
||||
|
||||
/// Look up a function entity.
|
||||
pub fn contains_fn(&self, fn_: FuncRef) -> bool {
|
||||
self.locations.contains_key(&fn_.into())
|
||||
}
|
||||
|
||||
/// Look up a jump table entity.
|
||||
pub fn contains_jt(&self, jt: JumpTable) -> bool {
|
||||
self.locations.contains_key(&jt.into())
|
||||
}
|
||||
|
||||
/// Look up an entity by source name.
|
||||
/// Returns the entity reference corresponding to `name`, if it exists.
|
||||
pub fn lookup_str(&self, name: &str) -> Option<AnyEntity> {
|
||||
split_entity_name(name).and_then(|(ent, num)| match ent {
|
||||
"v" => Value::with_number(num).and_then(|v| {
|
||||
if !self.contains_value(v) {
|
||||
None
|
||||
} else {
|
||||
Some(v.into())
|
||||
}
|
||||
}),
|
||||
"ebb" => Ebb::with_number(num).and_then(|ebb| {
|
||||
if !self.contains_ebb(ebb) {
|
||||
None
|
||||
} else {
|
||||
Some(ebb.into())
|
||||
}
|
||||
}),
|
||||
"ss" => StackSlot::with_number(num).and_then(|ss| {
|
||||
if !self.contains_ss(ss) {
|
||||
None
|
||||
} else {
|
||||
Some(ss.into())
|
||||
}
|
||||
}),
|
||||
"gv" => GlobalValue::with_number(num).and_then(|gv| {
|
||||
if !self.contains_gv(gv) {
|
||||
None
|
||||
} else {
|
||||
Some(gv.into())
|
||||
}
|
||||
}),
|
||||
"heap" => Heap::with_number(num).and_then(|heap| {
|
||||
if !self.contains_heap(heap) {
|
||||
None
|
||||
} else {
|
||||
Some(heap.into())
|
||||
}
|
||||
}),
|
||||
"table" => Table::with_number(num).and_then(|table| {
|
||||
if !self.contains_table(table) {
|
||||
None
|
||||
} else {
|
||||
Some(table.into())
|
||||
}
|
||||
}),
|
||||
"sig" => SigRef::with_number(num).and_then(|sig| {
|
||||
if !self.contains_sig(sig) {
|
||||
None
|
||||
} else {
|
||||
Some(sig.into())
|
||||
}
|
||||
}),
|
||||
"fn" => FuncRef::with_number(num).and_then(|fn_| {
|
||||
if !self.contains_fn(fn_) {
|
||||
None
|
||||
} else {
|
||||
Some(fn_.into())
|
||||
}
|
||||
}),
|
||||
"jt" => JumpTable::with_number(num).and_then(|jt| {
|
||||
if !self.contains_jt(jt) {
|
||||
None
|
||||
} else {
|
||||
Some(jt.into())
|
||||
}
|
||||
}),
|
||||
_ => None,
|
||||
})
|
||||
}
|
||||
|
||||
/// Get the source location where an entity was defined.
|
||||
pub fn location(&self, entity: AnyEntity) -> Option<Location> {
|
||||
self.locations.get(&entity).cloned()
|
||||
}
|
||||
}
|
||||
|
||||
impl SourceMap {
|
||||
/// Create a new empty `SourceMap`.
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
locations: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Define the value `entity`.
|
||||
pub fn def_value(&mut self, entity: Value, loc: Location) -> ParseResult<()> {
|
||||
self.def_entity(entity.into(), loc)
|
||||
}
|
||||
|
||||
/// Define the ebb `entity`.
|
||||
pub fn def_ebb(&mut self, entity: Ebb, loc: Location) -> ParseResult<()> {
|
||||
self.def_entity(entity.into(), loc)
|
||||
}
|
||||
|
||||
/// Define the stack slot `entity`.
|
||||
pub fn def_ss(&mut self, entity: StackSlot, loc: Location) -> ParseResult<()> {
|
||||
self.def_entity(entity.into(), loc)
|
||||
}
|
||||
|
||||
/// Define the global value `entity`.
|
||||
pub fn def_gv(&mut self, entity: GlobalValue, loc: Location) -> ParseResult<()> {
|
||||
self.def_entity(entity.into(), loc)
|
||||
}
|
||||
|
||||
/// Define the heap `entity`.
|
||||
pub fn def_heap(&mut self, entity: Heap, loc: Location) -> ParseResult<()> {
|
||||
self.def_entity(entity.into(), loc)
|
||||
}
|
||||
|
||||
/// Define the table `entity`.
|
||||
pub fn def_table(&mut self, entity: Table, loc: Location) -> ParseResult<()> {
|
||||
self.def_entity(entity.into(), loc)
|
||||
}
|
||||
|
||||
/// Define the signature `entity`.
|
||||
pub fn def_sig(&mut self, entity: SigRef, loc: Location) -> ParseResult<()> {
|
||||
self.def_entity(entity.into(), loc)
|
||||
}
|
||||
|
||||
/// Define the external function `entity`.
|
||||
pub fn def_fn(&mut self, entity: FuncRef, loc: Location) -> ParseResult<()> {
|
||||
self.def_entity(entity.into(), loc)
|
||||
}
|
||||
|
||||
/// Define the jump table `entity`.
|
||||
pub fn def_jt(&mut self, entity: JumpTable, loc: Location) -> ParseResult<()> {
|
||||
self.def_entity(entity.into(), loc)
|
||||
}
|
||||
|
||||
/// Define an entity. This can be used for instructions whose numbers never
|
||||
/// appear in source, or implicitly defined signatures.
|
||||
pub fn def_entity(&mut self, entity: AnyEntity, loc: Location) -> ParseResult<()> {
|
||||
if self.locations.insert(entity, loc).is_some() {
|
||||
err!(loc, "duplicate entity: {}", entity)
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::parse_test;
|
||||
|
||||
#[test]
|
||||
fn details() {
|
||||
let tf = parse_test(
|
||||
"function %detail() {
|
||||
ss10 = incoming_arg 13
|
||||
jt10 = jump_table [ebb0]
|
||||
ebb0(v4: i32, v7: i32):
|
||||
v10 = iadd v4, v7
|
||||
}",
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
let map = &tf.functions[0].1.map;
|
||||
|
||||
assert_eq!(map.lookup_str("v0"), None);
|
||||
assert_eq!(map.lookup_str("ss1"), None);
|
||||
assert_eq!(map.lookup_str("ss10").unwrap().to_string(), "ss10");
|
||||
assert_eq!(map.lookup_str("jt10").unwrap().to_string(), "jt10");
|
||||
assert_eq!(map.lookup_str("ebb0").unwrap().to_string(), "ebb0");
|
||||
assert_eq!(map.lookup_str("v4").unwrap().to_string(), "v4");
|
||||
assert_eq!(map.lookup_str("v7").unwrap().to_string(), "v7");
|
||||
assert_eq!(map.lookup_str("v10").unwrap().to_string(), "v10");
|
||||
}
|
||||
}
|
||||
103
cranelift/reader/src/testcommand.rs
Normal file
103
cranelift/reader/src/testcommand.rs
Normal file
@@ -0,0 +1,103 @@
|
||||
//! Test commands.
|
||||
//!
|
||||
//! A `.clif` file can begin with one or more *test commands* which specify what is to be tested.
|
||||
//! The general syntax is:
|
||||
//!
|
||||
//! <pre>
|
||||
//! test <i><command></i> </i>[options]</i>...
|
||||
//! </pre>
|
||||
//!
|
||||
//! The options are either a single identifier flag, or setting values like `identifier=value`.
|
||||
//!
|
||||
//! The parser does not understand the test commands or which options are valid. It simply parses
|
||||
//! the general format into a `TestCommand` data structure.
|
||||
|
||||
use std::fmt::{self, Display, Formatter};
|
||||
|
||||
/// A command appearing in a test file.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct TestCommand<'a> {
|
||||
/// The command name as a string.
|
||||
pub command: &'a str,
|
||||
/// The options following the command name.
|
||||
pub options: Vec<TestOption<'a>>,
|
||||
}
|
||||
|
||||
/// An option on a test command.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub enum TestOption<'a> {
|
||||
/// Single identifier flag: `foo`.
|
||||
Flag(&'a str),
|
||||
/// A value assigned to an identifier: `foo=bar`.
|
||||
Value(&'a str, &'a str),
|
||||
}
|
||||
|
||||
impl<'a> TestCommand<'a> {
|
||||
/// Create a new TestCommand by parsing `s`.
|
||||
/// The returned command contains references into `s`.
|
||||
pub fn new(s: &'a str) -> Self {
|
||||
let mut parts = s.split_whitespace();
|
||||
let cmd = parts.next().unwrap_or("");
|
||||
Self {
|
||||
command: cmd,
|
||||
options: parts
|
||||
.filter(|s| !s.is_empty())
|
||||
.map(TestOption::new)
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Display for TestCommand<'a> {
|
||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||
write!(f, "{}", self.command)?;
|
||||
for opt in &self.options {
|
||||
write!(f, " {}", opt)?;
|
||||
}
|
||||
writeln!(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> TestOption<'a> {
|
||||
/// Create a new TestOption by parsing `s`.
|
||||
/// The returned option contains references into `s`.
|
||||
pub fn new(s: &'a str) -> Self {
|
||||
match s.find('=') {
|
||||
None => TestOption::Flag(s),
|
||||
Some(p) => TestOption::Value(&s[0..p], &s[p + 1..]),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Display for TestOption<'a> {
|
||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||
match *self {
|
||||
TestOption::Flag(s) => write!(f, "{}", s),
|
||||
TestOption::Value(s, v) => write!(f, "{}={}", s, v),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn parse_option() {
|
||||
assert_eq!(TestOption::new(""), TestOption::Flag(""));
|
||||
assert_eq!(TestOption::new("foo"), TestOption::Flag("foo"));
|
||||
assert_eq!(TestOption::new("foo=bar"), TestOption::Value("foo", "bar"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_command() {
|
||||
assert_eq!(&TestCommand::new("").to_string(), "\n");
|
||||
assert_eq!(&TestCommand::new("cat").to_string(), "cat\n");
|
||||
assert_eq!(&TestCommand::new("cat ").to_string(), "cat\n");
|
||||
assert_eq!(&TestCommand::new("cat 1 ").to_string(), "cat 1\n");
|
||||
assert_eq!(
|
||||
&TestCommand::new("cat one=4 two t").to_string(),
|
||||
"cat one=4 two t\n"
|
||||
);
|
||||
}
|
||||
}
|
||||
57
cranelift/reader/src/testfile.rs
Normal file
57
cranelift/reader/src/testfile.rs
Normal file
@@ -0,0 +1,57 @@
|
||||
//! Data structures representing a parsed test file.
|
||||
//!
|
||||
//! A test file is a `.clif` file which contains test commands and settings for running a
|
||||
//! file-based test case.
|
||||
//!
|
||||
|
||||
use crate::error::Location;
|
||||
use crate::isaspec::IsaSpec;
|
||||
use crate::sourcemap::SourceMap;
|
||||
use crate::testcommand::TestCommand;
|
||||
use cranelift_codegen::ir::entities::AnyEntity;
|
||||
use cranelift_codegen::ir::Function;
|
||||
|
||||
/// A parsed test case.
|
||||
///
|
||||
/// This is the result of parsing a `.clif` file which contains a number of test commands and ISA
|
||||
/// specs followed by the functions that should be tested.
|
||||
pub struct TestFile<'a> {
|
||||
/// `test foo ...` lines.
|
||||
pub commands: Vec<TestCommand<'a>>,
|
||||
/// `isa bar ...` lines.
|
||||
pub isa_spec: IsaSpec,
|
||||
/// Comments appearing before the first function.
|
||||
/// These are all tagged as 'Function' scope for lack of a better entity.
|
||||
pub preamble_comments: Vec<Comment<'a>>,
|
||||
/// Parsed functions and additional details about each function.
|
||||
pub functions: Vec<(Function, Details<'a>)>,
|
||||
}
|
||||
|
||||
/// Additional details about a function parsed from a text string.
|
||||
/// These are useful for detecting test commands embedded in comments etc.
|
||||
/// The details to not affect the semantics of the function.
|
||||
#[derive(Debug)]
|
||||
pub struct Details<'a> {
|
||||
/// Location of the `function` keyword that begins this function.
|
||||
pub location: Location,
|
||||
/// Annotation comments that appeared inside or after the function.
|
||||
pub comments: Vec<Comment<'a>>,
|
||||
/// Mapping of entity numbers to source locations.
|
||||
pub map: SourceMap,
|
||||
}
|
||||
|
||||
/// A comment in a parsed function.
|
||||
///
|
||||
/// The comment belongs to the immediately preceding entity, whether that is an EBB header, and
|
||||
/// instruction, or one of the preamble declarations.
|
||||
///
|
||||
/// Comments appearing inside the function but before the preamble, as well as comments appearing
|
||||
/// after the function are tagged as `AnyEntity::Function`.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct Comment<'a> {
|
||||
/// The entity this comment is attached to.
|
||||
/// Comments always follow their entity.
|
||||
pub entity: AnyEntity,
|
||||
/// Text of the comment, including the leading `;`.
|
||||
pub text: &'a str,
|
||||
}
|
||||
Reference in New Issue
Block a user