Upgrade to rustfmt 0.8.0.

Lots of changes this time.

Worked around what looks like a rustfmt bug in parse_inst_operands where
a large match was nested inside Ok().
This commit is contained in:
Jakob Stoklund Olesen
2017-03-14 10:48:05 -07:00
parent 477fb4d5da
commit 010861d58e
37 changed files with 462 additions and 377 deletions

View File

@@ -51,11 +51,7 @@ pub type CommandResult = Result<(), String>;
fn cton_util() -> CommandResult { fn cton_util() -> CommandResult {
// Parse comand line arguments. // Parse comand line arguments.
let args: Args = Docopt::new(USAGE) let args: Args = Docopt::new(USAGE)
.and_then(|d| { .and_then(|d| d.help(true).version(Some(format!("Cretonne {}", VERSION))).decode())
d.help(true)
.version(Some(format!("Cretonne {}", VERSION)))
.decode()
})
.unwrap_or_else(|e| e.exit()); .unwrap_or_else(|e| e.exit());
// Find the sub-command to execute. // Find the sub-command to execute.

View File

@@ -89,7 +89,10 @@ impl SubTest for TestDomtree {
// Now we know that everything in `expected` is consistent with `domtree`. // Now we know that everything in `expected` is consistent with `domtree`.
// All other EBB's should be either unreachable or the entry block. // All other EBB's should be either unreachable or the entry block.
for ebb in func.layout.ebbs().skip(1).filter(|ebb| !expected.contains_key(&ebb)) { for ebb in func.layout
.ebbs()
.skip(1)
.filter(|ebb| !expected.contains_key(&ebb)) {
if let Some(got_inst) = domtree.idom(ebb) { if let Some(got_inst) = domtree.idom(ebb) {
return Err(format!("mismatching idoms for renumbered {}:\n\ return Err(format!("mismatching idoms for renumbered {}:\n\
want: unrechable, got: {}", want: unrechable, got: {}",

View File

@@ -303,10 +303,10 @@ impl TestRunner {
return; return;
} }
for t in self.tests for t in self.tests.iter().filter(|entry| match **entry {
.iter() QueueEntry { state: State::Done(Ok(dur)), .. } => {
.filter(|entry| match **entry { dur > cut
QueueEntry { state: State::Done(Ok(dur)), .. } => dur > cut, }
_ => false, _ => false,
}) { }) {
println!("slow: {}", t) println!("slow: {}", t)

View File

@@ -25,7 +25,10 @@ pub fn run(path: &Path) -> TestResult {
} }
// Parse the test commands. // Parse the test commands.
let mut tests = testfile.commands.iter().map(new_subtest).collect::<Result<Vec<_>>>()?; let mut tests = testfile.commands
.iter()
.map(new_subtest)
.collect::<Result<Vec<_>>>()?;
// Flags to use for those tests that don't need an ISA. // Flags to use for those tests that don't need an ISA.
// This is the cumulative effect of all the `set` commands in the file. // This is the cumulative effect of all the `set` commands in the file.

View File

@@ -66,7 +66,10 @@ pub trait SubTest {
/// match 'inst10'. /// match 'inst10'.
impl<'a> filecheck::VariableMap for Context<'a> { impl<'a> filecheck::VariableMap for Context<'a> {
fn lookup(&self, varname: &str) -> Option<FCValue> { fn lookup(&self, varname: &str) -> Option<FCValue> {
self.details.map.lookup_str(varname).map(|e| FCValue::Regex(format!(r"\b{}\b", e).into())) self.details
.map
.lookup_str(varname)
.map(|e| FCValue::Regex(format!(r"\b{}\b", e).into()))
} }
} }
@@ -77,8 +80,7 @@ pub fn run_filecheck(text: &str, context: &Context) -> Result<()> {
Ok(()) Ok(())
} else { } else {
// Filecheck mismatch. Emit an explanation as output. // Filecheck mismatch. Emit an explanation as output.
let (_, explain) = checker.explain(&text, context) let (_, explain) = checker.explain(&text, context).map_err(|e| format!("explain: {}", e))?;
.map_err(|e| format!("explain: {}", e))?;
Err(format!("filecheck failed:\n{}{}", checker, explain)) Err(format!("filecheck failed:\n{}{}", checker, explain))
} }
} }

View File

@@ -21,8 +21,7 @@ pub fn run(files: Vec<String>, verbose: bool) -> CommandResult {
io::stdin().read_to_string(&mut buffer).map_err(|e| format!("stdin: {}", e))?; io::stdin().read_to_string(&mut buffer).map_err(|e| format!("stdin: {}", e))?;
if verbose { if verbose {
let (success, explain) = checker.explain(&buffer, NO_VARIABLES) let (success, explain) = checker.explain(&buffer, NO_VARIABLES).map_err(|e| e.to_string())?;
.map_err(|e| e.to_string())?;
print!("{}", explain); print!("{}", explain);
if success { if success {
println!("OK"); println!("OK");

View File

@@ -30,7 +30,7 @@ function banner() {
# rustfmt is installed. # rustfmt is installed.
# #
# This version should always be bumped to the newest version available. # This version should always be bumped to the newest version available.
RUSTFMT_VERSION="0.7.1" RUSTFMT_VERSION="0.8.0"
if cargo install --list | grep -q "^rustfmt v$RUSTFMT_VERSION"; then if cargo install --list | grep -q "^rustfmt v$RUSTFMT_VERSION"; then
banner "Rust formatting" banner "Rust formatting"

View File

@@ -9,9 +9,7 @@ use self::cretonne::entity_map::EntityMap;
fn test_reverse_postorder_traversal(function_source: &str, ebb_order: Vec<u32>) { fn test_reverse_postorder_traversal(function_source: &str, ebb_order: Vec<u32>) {
let func = &parse_functions(function_source).unwrap()[0]; let func = &parse_functions(function_source).unwrap()[0];
let cfg = ControlFlowGraph::with_function(&func); let cfg = ControlFlowGraph::with_function(&func);
let ebbs = ebb_order.iter() let ebbs = ebb_order.iter().map(|n| Ebb::with_number(*n).unwrap()).collect::<Vec<Ebb>>();
.map(|n| Ebb::with_number(*n).unwrap())
.collect::<Vec<Ebb>>();
let mut postorder_ebbs = cfg.postorder_ebbs(); let mut postorder_ebbs = cfg.postorder_ebbs();
let mut postorder_map = EntityMap::with_capacity(postorder_ebbs.len()); let mut postorder_map = EntityMap::with_capacity(postorder_ebbs.len());

View File

@@ -26,7 +26,8 @@ fn main() {
// Make sure we rebuild is this build script changes. // Make sure we rebuild is this build script changes.
// I guess that won't happen if you have non-UTF8 bytes in your path names. // I guess that won't happen if you have non-UTF8 bytes in your path names.
// The `build.py` script prints out its own dependencies. // The `build.py` script prints out its own dependencies.
println!("cargo:rerun-if-changed={}", crate_dir.join("build.rs").to_string_lossy()); println!("cargo:rerun-if-changed={}",
crate_dir.join("build.rs").to_string_lossy());
// Scripts are in `$crate_dir/meta`. // Scripts are in `$crate_dir/meta`.
let meta_dir = crate_dir.join("meta"); let meta_dir = crate_dir.join("meta");

View File

@@ -191,12 +191,14 @@ impl DominatorTree {
// Get an iterator with just the reachable predecessors to `ebb`. // Get an iterator with just the reachable predecessors to `ebb`.
// Note that during the first pass, `is_reachable` returns false for blocks that haven't // Note that during the first pass, `is_reachable` returns false for blocks that haven't
// been visited yet. // been visited yet.
let mut reachable_preds = let mut reachable_preds = cfg.get_predecessors(ebb)
cfg.get_predecessors(ebb).iter().cloned().filter(|&(ebb, _)| self.is_reachable(ebb)); .iter()
.cloned()
.filter(|&(ebb, _)| self.is_reachable(ebb));
// The RPO must visit at least one predecessor before this node. // The RPO must visit at least one predecessor before this node.
let mut idom = reachable_preds.next() let mut idom =
.expect("EBB node must have one reachable predecessor"); reachable_preds.next().expect("EBB node must have one reachable predecessor");
for pred in reachable_preds { for pred in reachable_preds {
idom = self.common_dominator(idom, pred, layout); idom = self.common_dominator(idom, pred, layout);

View File

@@ -184,7 +184,8 @@ impl DataFlowGraph {
match self[inst] { match self[inst] {
InstructionData::Unary { opcode, arg, .. } => { InstructionData::Unary { opcode, arg, .. } => {
match opcode { match opcode {
Opcode::Copy | Opcode::Spill | Opcode::Fill => arg, Opcode::Copy | Opcode::Spill |
Opcode::Fill => arg,
_ => return v, _ => return v,
} }
} }

View File

@@ -50,7 +50,9 @@ impl Signature {
let bytes = self.argument_types let bytes = self.argument_types
.iter() .iter()
.filter_map(|arg| match arg.location { .filter_map(|arg| match arg.location {
ArgumentLoc::Stack(offset) => Some(offset + arg.value_type.bits() as u32 / 8), ArgumentLoc::Stack(offset) => {
Some(offset + arg.value_type.bits() as u32 / 8)
}
_ => None, _ => None,
}) })
.fold(0, cmp::max); .fold(0, cmp::max);

View File

@@ -66,7 +66,10 @@ impl JumpTableData {
/// ///
/// This returns an iterator that skips any empty slots in the table. /// This returns an iterator that skips any empty slots in the table.
pub fn entries<'a>(&'a self) -> Entries { pub fn entries<'a>(&'a self) -> Entries {
Entries(self.table.iter().cloned().enumerate()) Entries(self.table
.iter()
.cloned()
.enumerate())
} }
/// Access the whole table as a mutable slice. /// Access the whole table as a mutable slice.
@@ -101,7 +104,10 @@ impl Display for JumpTableData {
Some(first) => write!(fmt, "jump_table {}", first)?, Some(first) => write!(fmt, "jump_table {}", first)?,
} }
for dest in self.table.iter().skip(1).map(|e| e.expand()) { for dest in self.table
.iter()
.skip(1)
.map(|e| e.expand()) {
match dest { match dest {
None => write!(fmt, ", 0")?, None => write!(fmt, ", 0")?,
Some(ebb) => write!(fmt, ", {}", ebb)?, Some(ebb) => write!(fmt, ", {}", ebb)?,

View File

@@ -125,10 +125,7 @@ impl Layout {
/// Get the last sequence number in `ebb`. /// Get the last sequence number in `ebb`.
fn last_ebb_seq(&self, ebb: Ebb) -> SequenceNumber { fn last_ebb_seq(&self, ebb: Ebb) -> SequenceNumber {
// Get the seq of the last instruction if it exists, otherwise use the EBB header seq. // Get the seq of the last instruction if it exists, otherwise use the EBB header seq.
self.ebbs[ebb] self.ebbs[ebb].last_inst.map(|inst| self.insts[inst].seq).unwrap_or(self.ebbs[ebb].seq)
.last_inst
.map(|inst| self.insts[inst].seq)
.unwrap_or(self.ebbs[ebb].seq)
} }
/// Assign a valid sequence number to `ebb` such that the numbers are still monotonic. This may /// Assign a valid sequence number to `ebb` such that the numbers are still monotonic. This may
@@ -439,8 +436,8 @@ impl Layout {
/// Insert `inst` before the instruction `before` in the same EBB. /// Insert `inst` before the instruction `before` in the same EBB.
pub fn insert_inst(&mut self, inst: Inst, before: Inst) { pub fn insert_inst(&mut self, inst: Inst, before: Inst) {
assert_eq!(self.inst_ebb(inst), None); assert_eq!(self.inst_ebb(inst), None);
let ebb = self.inst_ebb(before) let ebb =
.expect("Instruction before insertion point not in the layout"); self.inst_ebb(before).expect("Instruction before insertion point not in the layout");
let after = self.insts[before].prev; let after = self.insts[before].prev;
{ {
let inst_node = self.insts.ensure(inst); let inst_node = self.insts.ensure(inst);
@@ -488,8 +485,8 @@ impl Layout {
/// i4 /// i4
/// ``` /// ```
pub fn split_ebb(&mut self, new_ebb: Ebb, before: Inst) { pub fn split_ebb(&mut self, new_ebb: Ebb, before: Inst) {
let old_ebb = self.inst_ebb(before) let old_ebb =
.expect("The `before` instruction must be in the layout"); self.inst_ebb(before).expect("The `before` instruction must be in the layout");
assert!(!self.is_ebb_inserted(new_ebb)); assert!(!self.is_ebb_inserted(new_ebb));
// Insert new_ebb after old_ebb. // Insert new_ebb after old_ebb.

View File

@@ -348,7 +348,12 @@ mod tests {
assert_eq!(big.bits(), 64 * 256); assert_eq!(big.bits(), 64 * 256);
assert_eq!(big.half_vector().unwrap().to_string(), "f64x128"); assert_eq!(big.half_vector().unwrap().to_string(), "f64x128");
assert_eq!(B1.by(2).unwrap().half_vector().unwrap().to_string(), "b1"); assert_eq!(B1.by(2)
.unwrap()
.half_vector()
.unwrap()
.to_string(),
"b1");
assert_eq!(I32.half_vector(), None); assert_eq!(I32.half_vector(), None);
assert_eq!(VOID.half_vector(), None); assert_eq!(VOID.half_vector(), None);
@@ -378,7 +383,12 @@ mod tests {
assert_eq!(B1.by(8).unwrap().to_string(), "b1x8"); assert_eq!(B1.by(8).unwrap().to_string(), "b1x8");
assert_eq!(B8.by(1).unwrap().to_string(), "b8"); assert_eq!(B8.by(1).unwrap().to_string(), "b8");
assert_eq!(B16.by(256).unwrap().to_string(), "b16x256"); assert_eq!(B16.by(256).unwrap().to_string(), "b16x256");
assert_eq!(B32.by(4).unwrap().by(2).unwrap().to_string(), "b32x8"); assert_eq!(B32.by(4)
.unwrap()
.by(2)
.unwrap()
.to_string(),
"b32x8");
assert_eq!(B64.by(8).unwrap().to_string(), "b64x8"); assert_eq!(B64.by(8).unwrap().to_string(), "b64x8");
assert_eq!(I8.by(64).unwrap().to_string(), "i8x64"); assert_eq!(I8.by(64).unwrap().to_string(), "i8x64");
assert_eq!(F64.by(2).unwrap().to_string(), "f64x2"); assert_eq!(F64.by(2).unwrap().to_string(), "f64x2");

View File

@@ -199,7 +199,10 @@ impl RegInfo {
/// Try to parse a regunit name. The name is not expected to begin with `%`. /// Try to parse a regunit name. The name is not expected to begin with `%`.
pub fn parse_regunit(&self, name: &str) -> Option<RegUnit> { pub fn parse_regunit(&self, name: &str) -> Option<RegUnit> {
self.banks.iter().filter_map(|b| b.parse_regunit(name)).next() self.banks
.iter()
.filter_map(|b| b.parse_regunit(name))
.next()
} }
/// Make a temporary object that can display a register unit. /// Make a temporary object that can display a register unit.

View File

@@ -35,7 +35,10 @@ mod tests {
fn check(x: &[u32], want: &[u32]) { fn check(x: &[u32], want: &[u32]) {
assert_eq!(x.len(), want.len()); assert_eq!(x.len(), want.len());
let want_count = want.iter().cloned().filter(|&x| x % 10 == 0).count(); let want_count = want.iter()
.cloned()
.filter(|&x| x % 10 == 0)
.count();
let mut v = Vec::new(); let mut v = Vec::new();
v.extend(x.iter().cloned()); v.extend(x.iter().cloned());
let count = partition_slice(&mut v[..], |&x| x % 10 == 0); let count = partition_slice(&mut v[..], |&x| x % 10 == 0);

View File

@@ -200,7 +200,10 @@ impl<'a> Context<'a> {
for lv in liveins { for lv in liveins {
let value = lv.value; let value = lv.value;
let affinity = self.liveness.get(value).expect("No live range for live-in").affinity; let affinity = self.liveness
.get(value)
.expect("No live range for live-in")
.affinity;
if let Affinity::Reg(rc_index) = affinity { if let Affinity::Reg(rc_index) = affinity {
let regclass = self.reginfo.rc(rc_index); let regclass = self.reginfo.rc(rc_index);
match func.locations[value] { match func.locations[value] {

View File

@@ -167,8 +167,7 @@ impl LiveValueTracker {
self.idom_sets.get(&idom).expect("No stored live set for dominator"); self.idom_sets.get(&idom).expect("No stored live set for dominator");
// Get just the values that are live-in to `ebb`. // Get just the values that are live-in to `ebb`.
for &value in idom_live_list.as_slice(&self.idom_pool) { for &value in idom_live_list.as_slice(&self.idom_pool) {
let lr = liveness.get(value) let lr = liveness.get(value).expect("Immediate dominator value has no live range");
.expect("Immediate dominator value has no live range");
// Check if this value is live-in here. // Check if this value is live-in here.
if let Some(endpoint) = lr.livein_local_end(ebb, program_order) { if let Some(endpoint) = lr.livein_local_end(ebb, program_order) {
@@ -260,7 +259,10 @@ impl LiveValueTracker {
/// Save the current set of live values so it is associated with `idom`. /// Save the current set of live values so it is associated with `idom`.
fn save_idom_live_set(&mut self, idom: Inst) { fn save_idom_live_set(&mut self, idom: Inst) {
let values = self.live.values.iter().map(|lv| lv.value); let values = self.live
.values
.iter()
.map(|lv| lv.value);
let pool = &mut self.idom_pool; let pool = &mut self.idom_pool;
// If there already is a set saved for `idom`, just keep it. // If there already is a set saved for `idom`, just keep it.
self.idom_sets.entry(idom).or_insert_with(|| { self.idom_sets.entry(idom).or_insert_with(|| {

View File

@@ -315,8 +315,10 @@ impl Liveness {
let recipe = func.encodings[inst].recipe(); let recipe = func.encodings[inst].recipe();
// Iterator of constraints, one per value operand. // Iterator of constraints, one per value operand.
// TODO: Should we fail here if the instruction doesn't have a valid encoding? // TODO: Should we fail here if the instruction doesn't have a valid encoding?
let mut operand_constraints = let mut operand_constraints = recipe_constraints.get(recipe)
recipe_constraints.get(recipe).map(|c| c.ins).unwrap_or(&[]).iter(); .map(|c| c.ins)
.unwrap_or(&[])
.iter();
for &arg in func.dfg[inst].arguments(&func.dfg.value_lists) { for &arg in func.dfg[inst].arguments(&func.dfg.value_lists) {
// Get the live range, create it as a dead range if necessary. // Get the live range, create it as a dead range if necessary.

View File

@@ -221,9 +221,7 @@ impl LiveRange {
/// Return `Ok(n)` if `liveins[n]` already contains `ebb`. /// Return `Ok(n)` if `liveins[n]` already contains `ebb`.
/// Otherwise, return `Err(n)` with the index where such an interval should be inserted. /// Otherwise, return `Err(n)` with the index where such an interval should be inserted.
fn find_ebb_interval<PO: ProgramOrder>(&self, ebb: Ebb, order: &PO) -> Result<usize, usize> { fn find_ebb_interval<PO: ProgramOrder>(&self, ebb: Ebb, order: &PO) -> Result<usize, usize> {
self.liveins self.liveins.binary_search_by(|intv| order.cmp(intv.begin, ebb)).or_else(|n| {
.binary_search_by(|intv| order.cmp(intv.begin, ebb))
.or_else(|n| {
// The interval at `n-1` may cover `ebb`. // The interval at `n-1` may cover `ebb`.
if n > 0 && order.cmp(self.liveins[n - 1].end, ebb) == Ordering::Greater { if n > 0 && order.cmp(self.liveins[n - 1].end, ebb) == Ordering::Greater {
Ok(n - 1) Ok(n - 1)

View File

@@ -157,9 +157,8 @@ impl<'a> Verifier<'a> {
let fixed_results = inst_data.opcode().constraints().fixed_results(); let fixed_results = inst_data.opcode().constraints().fixed_results();
// var_results is 0 if we aren't a call instruction // var_results is 0 if we aren't a call instruction
let var_results = dfg.call_signature(inst) let var_results =
.map(|sig| dfg.signatures[sig].return_types.len()) dfg.call_signature(inst).map(|sig| dfg.signatures[sig].return_types.len()).unwrap_or(0);
.unwrap_or(0);
let total_results = fixed_results + var_results; let total_results = fixed_results + var_results;
if total_results == 0 { if total_results == 0 {
@@ -247,7 +246,10 @@ impl<'a> Verifier<'a> {
} }
fn verify_sig_ref(&self, inst: Inst, s: SigRef) -> Result<()> { fn verify_sig_ref(&self, inst: Inst, s: SigRef) -> Result<()> {
if !self.func.dfg.signatures.is_valid(s) { if !self.func
.dfg
.signatures
.is_valid(s) {
err!(inst, "invalid signature reference {}", s) err!(inst, "invalid signature reference {}", s)
} else { } else {
Ok(()) Ok(())
@@ -255,7 +257,10 @@ impl<'a> Verifier<'a> {
} }
fn verify_func_ref(&self, inst: Inst, f: FuncRef) -> Result<()> { fn verify_func_ref(&self, inst: Inst, f: FuncRef) -> Result<()> {
if !self.func.dfg.ext_funcs.is_valid(f) { if !self.func
.dfg
.ext_funcs
.is_valid(f) {
err!(inst, "invalid function reference {}", f) err!(inst, "invalid function reference {}", f)
} else { } else {
Ok(()) Ok(())

View File

@@ -188,7 +188,11 @@ fn write_instruction(w: &mut Write,
for r in func.dfg.inst_results(inst) { for r in func.dfg.inst_results(inst) {
write!(s, write!(s,
",{}", ",{}",
func.locations.get(r).cloned().unwrap_or_default().display(&regs))? func.locations
.get(r)
.cloned()
.unwrap_or_default()
.display(&regs))?
} }
} }
write!(s, "]")?; write!(s, "]")?;

View File

@@ -270,7 +270,10 @@ impl<'a> State<'a> {
// Get the offset following the match that defined `var`, or 0 if var is an environment // Get the offset following the match that defined `var`, or 0 if var is an environment
// variable or unknown. // variable or unknown.
fn def_offset(&self, var: &str) -> usize { fn def_offset(&self, var: &str) -> usize {
self.vars.get(var).map(|&VarDef { offset, .. }| offset).unwrap_or(0) self.vars
.get(var)
.map(|&VarDef { offset, .. }| offset)
.unwrap_or(0)
} }
// Get the offset of the beginning of the next line after `pos`. // Get the offset of the beginning of the next line after `pos`.

View File

@@ -84,7 +84,12 @@ fn error<'a>(error: Error, loc: Location) -> Result<LocatedToken<'a>, LocatedErr
/// Get the number of decimal digits at the end of `s`. /// Get the number of decimal digits at the end of `s`.
fn trailing_digits(s: &str) -> usize { fn trailing_digits(s: &str) -> usize {
// It's faster to iterate backwards over bytes, and we're only counting ASCII digits. // It's faster to iterate backwards over bytes, and we're only counting ASCII digits.
s.as_bytes().iter().rev().cloned().take_while(|&b| b'0' <= b && b <= b'9').count() s.as_bytes()
.iter()
.rev()
.cloned()
.take_while(|&b| b'0' <= b && b <= b'9')
.count()
} }
/// Pre-parse a supposed entity name by splitting it into two parts: A head of lowercase ASCII /// Pre-parse a supposed entity name by splitting it into two parts: A head of lowercase ASCII

View File

@@ -28,7 +28,12 @@ use sourcemap::{SourceMap, MutableSourceMap};
/// ///
/// Any test commands or ISA declarations are ignored. /// Any test commands or ISA declarations are ignored.
pub fn parse_functions(text: &str) -> Result<Vec<Function>> { pub fn parse_functions(text: &str) -> Result<Vec<Function>> {
parse_test(text).map(|file| file.functions.into_iter().map(|(func, _)| func).collect()) parse_test(text).map(|file| {
file.functions
.into_iter()
.map(|(func, _)| func)
.collect()
})
} }
/// Parse the entire `text` as a test case file. /// Parse the entire `text` as a test case file.
@@ -114,7 +119,12 @@ impl<'a> Context<'a> {
// Allocate a new signature and add a mapping number -> SigRef. // Allocate a new signature and add a mapping number -> SigRef.
fn add_sig(&mut self, number: u32, data: Signature, loc: &Location) -> Result<()> { fn add_sig(&mut self, number: u32, data: Signature, loc: &Location) -> Result<()> {
self.map.def_sig(number, self.function.dfg.signatures.push(data), loc) self.map.def_sig(number,
self.function
.dfg
.signatures
.push(data),
loc)
} }
// Resolve a reference to a signature. // Resolve a reference to a signature.
@@ -127,7 +137,12 @@ impl<'a> Context<'a> {
// Allocate a new external function and add a mapping number -> FuncRef. // Allocate a new external function and add a mapping number -> FuncRef.
fn add_fn(&mut self, number: u32, data: ExtFuncData, loc: &Location) -> Result<()> { fn add_fn(&mut self, number: u32, data: ExtFuncData, loc: &Location) -> Result<()> {
self.map.def_fn(number, self.function.dfg.ext_funcs.push(data), loc) self.map.def_fn(number,
self.function
.dfg
.ext_funcs
.push(data),
loc)
} }
// Resolve a reference to a function. // Resolve a reference to a function.
@@ -777,23 +792,35 @@ impl<'a> Parser<'a> {
match self.token() { match self.token() {
Some(Token::StackSlot(..)) => { Some(Token::StackSlot(..)) => {
self.gather_comments(ctx.function.stack_slots.next_key()); self.gather_comments(ctx.function.stack_slots.next_key());
self.parse_stack_slot_decl() self.parse_stack_slot_decl().and_then(|(num, dat)| {
.and_then(|(num, dat)| ctx.add_ss(num, dat, &self.loc)) ctx.add_ss(num, dat, &self.loc)
})
} }
Some(Token::SigRef(..)) => { Some(Token::SigRef(..)) => {
self.gather_comments(ctx.function.dfg.signatures.next_key()); self.gather_comments(ctx.function
self.parse_signature_decl(ctx.unique_isa) .dfg
.and_then(|(num, dat)| ctx.add_sig(num, dat, &self.loc)) .signatures
.next_key());
self.parse_signature_decl(ctx.unique_isa).and_then(|(num, dat)| {
ctx.add_sig(num,
dat,
&self.loc)
})
} }
Some(Token::FuncRef(..)) => { Some(Token::FuncRef(..)) => {
self.gather_comments(ctx.function.dfg.ext_funcs.next_key()); self.gather_comments(ctx.function
self.parse_function_decl(ctx) .dfg
.and_then(|(num, dat)| ctx.add_fn(num, dat, &self.loc)) .ext_funcs
.next_key());
self.parse_function_decl(ctx).and_then(|(num, dat)| {
ctx.add_fn(num, dat, &self.loc)
})
} }
Some(Token::JumpTable(..)) => { Some(Token::JumpTable(..)) => {
self.gather_comments(ctx.function.jump_tables.next_key()); self.gather_comments(ctx.function.jump_tables.next_key());
self.parse_jump_table_decl() self.parse_jump_table_decl().and_then(|(num, dat)| {
.and_then(|(num, dat)| ctx.add_jt(num, dat, &self.loc)) ctx.add_jt(num, dat, &self.loc)
})
} }
// More to come.. // More to come..
_ => return Ok(()), _ => return Ok(()),
@@ -852,7 +879,10 @@ impl<'a> Parser<'a> {
let data = match self.token() { let data = match self.token() {
Some(Token::Identifier("function")) => { Some(Token::Identifier("function")) => {
let (loc, name, sig) = self.parse_function_spec(ctx.unique_isa)?; let (loc, name, sig) = self.parse_function_spec(ctx.unique_isa)?;
let sigref = ctx.function.dfg.signatures.push(sig); let sigref = ctx.function
.dfg
.signatures
.push(sig);
ctx.map.def_entity(sigref.into(), &loc).expect("duplicate SigRef entities created"); ctx.map.def_entity(sigref.into(), &loc).expect("duplicate SigRef entities created");
ExtFuncData { ExtFuncData {
name: name, name: name,
@@ -1161,7 +1191,10 @@ impl<'a> Parser<'a> {
ctx.function.dfg.inst_results(inst))?; ctx.function.dfg.inst_results(inst))?;
if let Some(result_locations) = result_locations { if let Some(result_locations) = result_locations {
for (value, loc) in ctx.function.dfg.inst_results(inst).zip(result_locations) { for (value, loc) in ctx.function
.dfg
.inst_results(inst)
.zip(result_locations) {
*ctx.function.locations.ensure(value) = loc; *ctx.function.locations.ensure(value) = loc;
} }
} }
@@ -1290,7 +1323,7 @@ impl<'a> Parser<'a> {
ctx: &mut Context, ctx: &mut Context,
opcode: Opcode) opcode: Opcode)
-> Result<InstructionData> { -> Result<InstructionData> {
Ok(match opcode.format() { let idata = match opcode.format() {
InstructionFormat::Nullary => { InstructionFormat::Nullary => {
InstructionData::Nullary { InstructionData::Nullary {
opcode: opcode, opcode: opcode,
@@ -1502,7 +1535,8 @@ impl<'a> Parser<'a> {
table: table, table: table,
} }
} }
}) };
Ok(idata)
} }
} }
@@ -1602,8 +1636,7 @@ mod tests {
#[test] #[test]
fn comments() { fn comments() {
let (func, Details { comments, .. }) = let (func, Details { comments, .. }) = Parser::new("; before
Parser::new("; before
function comment() { ; decl function comment() { ; decl
ss10 = stack_slot 13 ; stackslot. ss10 = stack_slot 13 ; stackslot.
; Still stackslot. ; Still stackslot.

View File

@@ -73,7 +73,11 @@ impl SourceMap {
.and_then(|v| self.get_value(v)) .and_then(|v| self.get_value(v))
.map(AnyEntity::Value) .map(AnyEntity::Value)
} }
"ebb" => Ebb::with_number(num).and_then(|e| self.get_ebb(e)).map(AnyEntity::Ebb), "ebb" => {
Ebb::with_number(num)
.and_then(|e| self.get_ebb(e))
.map(AnyEntity::Ebb)
}
"ss" => self.get_ss(num).map(AnyEntity::StackSlot), "ss" => self.get_ss(num).map(AnyEntity::StackSlot),
"sig" => self.get_sig(num).map(AnyEntity::SigRef), "sig" => self.get_sig(num).map(AnyEntity::SigRef),
"fn" => self.get_fn(num).map(AnyEntity::FuncRef), "fn" => self.get_fn(num).map(AnyEntity::FuncRef),