Update rustfmt to 0.9.0.

This commit is contained in:
Dan Gohman
2017-08-31 10:44:59 -07:00
parent 46fb64cbb4
commit 2efdc0ed37
111 changed files with 4692 additions and 3379 deletions

View File

@@ -36,7 +36,8 @@ impl IsaSpec {
/// Parse an iterator of command line options and apply them to `config`.
pub fn parse_options<'a, I>(iter: I, config: &mut Configurable, loc: &Location) -> Result<()>
where I: Iterator<Item = &'a str>
where
I: Iterator<Item = &'a str>,
{
for opt in iter.map(TestOption::new) {
match opt {

View File

@@ -57,9 +57,9 @@ pub struct LocatedToken<'a> {
/// Wrap up a `Token` with the given location.
fn token<'a>(token: Token<'a>, loc: Location) -> Result<LocatedToken<'a>, LocatedError> {
Ok(LocatedToken {
token,
location: loc,
})
token,
location: loc,
})
}
/// An error from the lexical analysis.
@@ -78,9 +78,9 @@ pub struct LocatedError {
/// Wrap up an `Error` with the given location.
fn error<'a>(error: Error, loc: Location) -> Result<LocatedToken<'a>, LocatedError> {
Err(LocatedError {
error,
location: loc,
})
error,
location: loc,
})
}
/// Get the number of decimal digits at the end of `s`.
@@ -180,10 +180,11 @@ impl<'a> Lexer<'a> {
}
// Scan a multi-char token.
fn scan_chars(&mut self,
count: usize,
tok: Token<'a>)
-> Result<LocatedToken<'a>, LocatedError> {
fn scan_chars(
&mut self,
count: usize,
tok: Token<'a>,
) -> Result<LocatedToken<'a>, LocatedError> {
let loc = self.loc();
for _ in 0..count {
assert_ne!(self.lookahead, None);
@@ -294,13 +295,16 @@ impl<'a> Lexer<'a> {
let text = &self.source[begin..self.pos];
// Look for numbered well-known entities like ebb15, v45, ...
token(split_entity_name(text)
.and_then(|(prefix, number)| {
Self::numbered_entity(prefix, number)
.or_else(|| Self::value_type(text, prefix, number))
})
.unwrap_or(Token::Identifier(text)),
loc)
token(
split_entity_name(text)
.and_then(|(prefix, number)| {
Self::numbered_entity(prefix, number).or_else(|| {
Self::value_type(text, prefix, number)
})
})
.unwrap_or(Token::Identifier(text)),
loc,
)
}
// If prefix is a well-known entity prefix and suffix is a valid entity number, return the
@@ -391,40 +395,40 @@ impl<'a> Lexer<'a> {
loop {
let loc = self.loc();
return match self.lookahead {
None => None,
Some(';') => Some(self.scan_comment()),
Some('(') => Some(self.scan_char(Token::LPar)),
Some(')') => Some(self.scan_char(Token::RPar)),
Some('{') => Some(self.scan_char(Token::LBrace)),
Some('}') => Some(self.scan_char(Token::RBrace)),
Some('[') => Some(self.scan_char(Token::LBracket)),
Some(']') => Some(self.scan_char(Token::RBracket)),
Some(',') => Some(self.scan_char(Token::Comma)),
Some('.') => Some(self.scan_char(Token::Dot)),
Some(':') => Some(self.scan_char(Token::Colon)),
Some('=') => Some(self.scan_char(Token::Equal)),
Some('+') => Some(self.scan_number()),
Some('-') => {
if self.looking_at("->") {
Some(self.scan_chars(2, Token::Arrow))
} else {
Some(self.scan_number())
}
}
Some(ch) if ch.is_digit(10) => Some(self.scan_number()),
Some(ch) if ch.is_alphabetic() => Some(self.scan_word()),
Some('%') => Some(self.scan_name()),
Some('#') => Some(self.scan_hex_sequence()),
Some(ch) if ch.is_whitespace() => {
self.next_ch();
continue;
}
_ => {
// Skip invalid char, return error.
self.next_ch();
Some(error(Error::InvalidChar, loc))
}
};
None => None,
Some(';') => Some(self.scan_comment()),
Some('(') => Some(self.scan_char(Token::LPar)),
Some(')') => Some(self.scan_char(Token::RPar)),
Some('{') => Some(self.scan_char(Token::LBrace)),
Some('}') => Some(self.scan_char(Token::RBrace)),
Some('[') => Some(self.scan_char(Token::LBracket)),
Some(']') => Some(self.scan_char(Token::RBracket)),
Some(',') => Some(self.scan_char(Token::Comma)),
Some('.') => Some(self.scan_char(Token::Dot)),
Some(':') => Some(self.scan_char(Token::Colon)),
Some('=') => Some(self.scan_char(Token::Equal)),
Some('+') => Some(self.scan_number()),
Some('-') => {
if self.looking_at("->") {
Some(self.scan_chars(2, Token::Arrow))
} else {
Some(self.scan_number())
}
}
Some(ch) if ch.is_digit(10) => Some(self.scan_number()),
Some(ch) if ch.is_alphabetic() => Some(self.scan_word()),
Some('%') => Some(self.scan_name()),
Some('#') => Some(self.scan_hex_sequence()),
Some(ch) if ch.is_whitespace() => {
self.next_ch();
continue;
}
_ => {
// Skip invalid char, return error.
self.next_ch();
Some(error(Error::InvalidChar, loc))
}
};
}
}
}
@@ -530,14 +534,20 @@ mod tests {
#[test]
fn lex_identifiers() {
let mut lex = Lexer::new("v0 v00 vx01 ebb1234567890 ebb5234567890 v1x vx1 vxvx4 \
function0 function b1 i32x4 f32x5");
assert_eq!(lex.next(),
token(Token::Value(Value::with_number(0).unwrap()), 1));
let mut lex = Lexer::new(
"v0 v00 vx01 ebb1234567890 ebb5234567890 v1x vx1 vxvx4 \
function0 function b1 i32x4 f32x5",
);
assert_eq!(
lex.next(),
token(Token::Value(Value::with_number(0).unwrap()), 1)
);
assert_eq!(lex.next(), token(Token::Identifier("v00"), 1));
assert_eq!(lex.next(), token(Token::Identifier("vx01"), 1));
assert_eq!(lex.next(),
token(Token::Ebb(Ebb::with_number(1234567890).unwrap()), 1));
assert_eq!(
lex.next(),
token(Token::Ebb(Ebb::with_number(1234567890).unwrap()), 1)
);
assert_eq!(lex.next(), token(Token::Identifier("ebb5234567890"), 1));
assert_eq!(lex.next(), token(Token::Identifier("v1x"), 1));
assert_eq!(lex.next(), token(Token::Identifier("vx1"), 1));

File diff suppressed because it is too large Load Diff

View File

@@ -76,24 +76,24 @@ impl SourceMap {
/// Returns the entity reference corresponding to `name`, if it exists.
pub fn lookup_str(&self, name: &str) -> Option<AnyEntity> {
split_entity_name(name).and_then(|(ent, num)| match ent {
"v" => {
Value::with_number(num)
.and_then(|v| self.get_value(v))
.map(AnyEntity::Value)
}
"ebb" => {
Ebb::with_number(num)
.and_then(|e| self.get_ebb(e))
.map(AnyEntity::Ebb)
}
"ss" => self.get_ss(num).map(AnyEntity::StackSlot),
"gv" => self.get_gv(num).map(AnyEntity::GlobalVar),
"heap" => self.get_heap(num).map(AnyEntity::Heap),
"sig" => self.get_sig(num).map(AnyEntity::SigRef),
"fn" => self.get_fn(num).map(AnyEntity::FuncRef),
"jt" => self.get_jt(num).map(AnyEntity::JumpTable),
_ => None,
})
"v" => {
Value::with_number(num)
.and_then(|v| self.get_value(v))
.map(AnyEntity::Value)
}
"ebb" => {
Ebb::with_number(num).and_then(|e| self.get_ebb(e)).map(
AnyEntity::Ebb,
)
}
"ss" => self.get_ss(num).map(AnyEntity::StackSlot),
"gv" => self.get_gv(num).map(AnyEntity::GlobalVar),
"heap" => self.get_heap(num).map(AnyEntity::Heap),
"sig" => self.get_sig(num).map(AnyEntity::SigRef),
"fn" => self.get_fn(num).map(AnyEntity::FuncRef),
"jt" => self.get_jt(num).map(AnyEntity::JumpTable),
_ => None,
})
}
/// Get the source location where an entity was defined.
@@ -110,9 +110,11 @@ impl SourceMap {
Ok(())
}
None => {
err!(self.location(loc).unwrap_or_default(),
"undefined reference: {}",
ebb)
err!(
self.location(loc).unwrap_or_default(),
"undefined reference: {}",
ebb
)
}
}
}
@@ -125,9 +127,11 @@ impl SourceMap {
Ok(())
}
None => {
err!(self.location(loc).unwrap_or_default(),
"undefined reference: {}",
val)
err!(
self.location(loc).unwrap_or_default(),
"undefined reference: {}",
val
)
}
}
}
@@ -148,9 +152,11 @@ impl SourceMap {
Ok(())
}
None => {
err!(self.location(loc).unwrap_or_default(),
"undefined reference: {}",
gv)
err!(
self.location(loc).unwrap_or_default(),
"undefined reference: {}",
gv
)
}
}
}
@@ -272,13 +278,14 @@ mod tests {
#[test]
fn details() {
let tf = parse_test("function %detail() {
let tf = parse_test(
"function %detail() {
ss10 = incoming_arg 13
jt10 = jump_table ebb0
ebb0(v4: i32, v7: i32):
v10 = iadd v4, v7
}")
.unwrap();
}",
).unwrap();
let map = &tf.functions[0].1.map;
assert_eq!(map.lookup_str("v0"), None);

View File

@@ -95,7 +95,9 @@ mod tests {
assert_eq!(&TestCommand::new("cat").to_string(), "cat\n");
assert_eq!(&TestCommand::new("cat ").to_string(), "cat\n");
assert_eq!(&TestCommand::new("cat 1 ").to_string(), "cat 1\n");
assert_eq!(&TestCommand::new("cat one=4 two t").to_string(),
"cat one=4 two t\n");
assert_eq!(
&TestCommand::new("cat one=4 two t").to_string(),
"cat one=4 two t\n"
);
}
}