Update rustfmt to 0.9.0.
This commit is contained in:
@@ -15,7 +15,7 @@
|
|||||||
# With the --install option, also tries to install the right version.
|
# With the --install option, also tries to install the right version.
|
||||||
|
|
||||||
# This version should always be bumped to the newest version available.
|
# This version should always be bumped to the newest version available.
|
||||||
VERS="0.8.4"
|
VERS="0.9.0"
|
||||||
|
|
||||||
if cargo install --list | grep -q "^rustfmt v$VERS"; then
|
if cargo install --list | grep -q "^rustfmt v$VERS"; then
|
||||||
exit 0
|
exit 0
|
||||||
|
|||||||
@@ -21,10 +21,12 @@ pub fn run(files: Vec<String>) -> CommandResult {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn cat_one(filename: String) -> CommandResult {
|
fn cat_one(filename: String) -> CommandResult {
|
||||||
let buffer = read_to_string(&filename)
|
let buffer = read_to_string(&filename).map_err(
|
||||||
.map_err(|e| format!("{}: {}", filename, e))?;
|
|e| format!("{}: {}", filename, e),
|
||||||
let items = parse_functions(&buffer)
|
)?;
|
||||||
.map_err(|e| format!("{}: {}", filename, e))?;
|
let items = parse_functions(&buffer).map_err(
|
||||||
|
|e| format!("{}: {}", filename, e),
|
||||||
|
)?;
|
||||||
|
|
||||||
for (idx, func) in items.into_iter().enumerate() {
|
for (idx, func) in items.into_iter().enumerate() {
|
||||||
if idx != 0 {
|
if idx != 0 {
|
||||||
|
|||||||
@@ -64,10 +64,10 @@ fn cton_util() -> CommandResult {
|
|||||||
// Parse command line arguments.
|
// Parse command line arguments.
|
||||||
let args: Args = Docopt::new(USAGE)
|
let args: Args = Docopt::new(USAGE)
|
||||||
.and_then(|d| {
|
.and_then(|d| {
|
||||||
d.help(true)
|
d.help(true)
|
||||||
.version(Some(format!("Cretonne {}", VERSION)))
|
.version(Some(format!("Cretonne {}", VERSION)))
|
||||||
.deserialize()
|
.deserialize()
|
||||||
})
|
})
|
||||||
.unwrap_or_else(|e| e.exit());
|
.unwrap_or_else(|e| e.exit());
|
||||||
|
|
||||||
// Find the sub-command to execute.
|
// Find the sub-command to execute.
|
||||||
@@ -80,10 +80,12 @@ fn cton_util() -> CommandResult {
|
|||||||
} else if args.cmd_print_cfg {
|
} else if args.cmd_print_cfg {
|
||||||
print_cfg::run(args.arg_file)
|
print_cfg::run(args.arg_file)
|
||||||
} else if args.cmd_wasm {
|
} else if args.cmd_wasm {
|
||||||
wasm::run(args.arg_file,
|
wasm::run(
|
||||||
args.flag_verbose,
|
args.arg_file,
|
||||||
args.flag_optimize,
|
args.flag_verbose,
|
||||||
args.flag_check)
|
args.flag_optimize,
|
||||||
|
args.flag_check,
|
||||||
|
)
|
||||||
} else {
|
} else {
|
||||||
// Debugging / shouldn't happen with proper command line handling above.
|
// Debugging / shouldn't happen with proper command line handling above.
|
||||||
Err(format!("Unhandled args: {:?}", args))
|
Err(format!("Unhandled args: {:?}", args))
|
||||||
|
|||||||
@@ -108,9 +108,12 @@ impl SubTest for TestBinEmit {
|
|||||||
for ebb in func.layout.ebbs() {
|
for ebb in func.layout.ebbs() {
|
||||||
for inst in func.layout.ebb_insts(ebb) {
|
for inst in func.layout.ebb_insts(ebb) {
|
||||||
if !func.encodings[inst].is_legal() {
|
if !func.encodings[inst].is_legal() {
|
||||||
if let Ok(enc) = isa.encode(&func.dfg,
|
if let Ok(enc) = isa.encode(
|
||||||
&func.dfg[inst],
|
&func.dfg,
|
||||||
func.dfg.ctrl_typevar(inst)) {
|
&func.dfg[inst],
|
||||||
|
func.dfg.ctrl_typevar(inst),
|
||||||
|
)
|
||||||
|
{
|
||||||
func.encodings[inst] = enc;
|
func.encodings[inst] = enc;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -118,8 +121,9 @@ impl SubTest for TestBinEmit {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Relax branches and compute EBB offsets based on the encodings.
|
// Relax branches and compute EBB offsets based on the encodings.
|
||||||
let code_size = binemit::relax_branches(&mut func, isa)
|
let code_size = binemit::relax_branches(&mut func, isa).map_err(|e| {
|
||||||
.map_err(|e| pretty_error(&func, context.isa, e))?;
|
pretty_error(&func, context.isa, e)
|
||||||
|
})?;
|
||||||
|
|
||||||
// Collect all of the 'bin:' directives on instructions.
|
// Collect all of the 'bin:' directives on instructions.
|
||||||
let mut bins = HashMap::new();
|
let mut bins = HashMap::new();
|
||||||
@@ -128,16 +132,20 @@ impl SubTest for TestBinEmit {
|
|||||||
match comment.entity {
|
match comment.entity {
|
||||||
AnyEntity::Inst(inst) => {
|
AnyEntity::Inst(inst) => {
|
||||||
if let Some(prev) = bins.insert(inst, want) {
|
if let Some(prev) = bins.insert(inst, want) {
|
||||||
return Err(format!("multiple 'bin:' directives on {}: '{}' and '{}'",
|
return Err(format!(
|
||||||
func.dfg.display_inst(inst, isa),
|
"multiple 'bin:' directives on {}: '{}' and '{}'",
|
||||||
prev,
|
func.dfg.display_inst(inst, isa),
|
||||||
want));
|
prev,
|
||||||
|
want
|
||||||
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
return Err(format!("'bin:' directive on non-inst {}: {}",
|
return Err(format!(
|
||||||
comment.entity,
|
"'bin:' directive on non-inst {}: {}",
|
||||||
comment.text))
|
comment.entity,
|
||||||
|
comment.text
|
||||||
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -152,10 +160,12 @@ impl SubTest for TestBinEmit {
|
|||||||
for ebb in func.layout.ebbs() {
|
for ebb in func.layout.ebbs() {
|
||||||
divert.clear();
|
divert.clear();
|
||||||
// Correct header offsets should have been computed by `relax_branches()`.
|
// Correct header offsets should have been computed by `relax_branches()`.
|
||||||
assert_eq!(sink.offset,
|
assert_eq!(
|
||||||
func.offsets[ebb],
|
sink.offset,
|
||||||
"Inconsistent {} header offset",
|
func.offsets[ebb],
|
||||||
ebb);
|
"Inconsistent {} header offset",
|
||||||
|
ebb
|
||||||
|
);
|
||||||
for inst in func.layout.ebb_insts(ebb) {
|
for inst in func.layout.ebb_insts(ebb) {
|
||||||
sink.text.clear();
|
sink.text.clear();
|
||||||
let enc = func.encodings[inst];
|
let enc = func.encodings[inst];
|
||||||
@@ -166,34 +176,44 @@ impl SubTest for TestBinEmit {
|
|||||||
isa.emit_inst(&func, inst, &mut divert, &mut sink);
|
isa.emit_inst(&func, inst, &mut divert, &mut sink);
|
||||||
let emitted = sink.offset - before;
|
let emitted = sink.offset - before;
|
||||||
// Verify the encoding recipe sizes against the ISAs emit_inst implementation.
|
// Verify the encoding recipe sizes against the ISAs emit_inst implementation.
|
||||||
assert_eq!(emitted,
|
assert_eq!(
|
||||||
encinfo.bytes(enc),
|
emitted,
|
||||||
"Inconsistent size for [{}] {}",
|
encinfo.bytes(enc),
|
||||||
encinfo.display(enc),
|
"Inconsistent size for [{}] {}",
|
||||||
func.dfg.display_inst(inst, isa));
|
encinfo.display(enc),
|
||||||
|
func.dfg.display_inst(inst, isa)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check against bin: directives.
|
// Check against bin: directives.
|
||||||
if let Some(want) = bins.remove(&inst) {
|
if let Some(want) = bins.remove(&inst) {
|
||||||
if !enc.is_legal() {
|
if !enc.is_legal() {
|
||||||
return Err(format!("{} can't be encoded: {}",
|
return Err(format!(
|
||||||
inst,
|
"{} can't be encoded: {}",
|
||||||
func.dfg.display_inst(inst, isa)));
|
inst,
|
||||||
|
func.dfg.display_inst(inst, isa)
|
||||||
|
));
|
||||||
}
|
}
|
||||||
let have = sink.text.trim();
|
let have = sink.text.trim();
|
||||||
if have != want {
|
if have != want {
|
||||||
return Err(format!("Bad machine code for {}: {}\nWant: {}\nGot: {}",
|
return Err(format!(
|
||||||
inst,
|
"Bad machine code for {}: {}\nWant: {}\nGot: {}",
|
||||||
func.dfg.display_inst(inst, isa),
|
inst,
|
||||||
want,
|
func.dfg.display_inst(inst, isa),
|
||||||
have));
|
want,
|
||||||
|
have
|
||||||
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if sink.offset != code_size {
|
if sink.offset != code_size {
|
||||||
return Err(format!("Expected code size {}, got {}", code_size, sink.offset));
|
return Err(format!(
|
||||||
|
"Expected code size {}, got {}",
|
||||||
|
code_size,
|
||||||
|
sink.offset
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|||||||
@@ -41,22 +41,30 @@ impl SubTest for TestCompile {
|
|||||||
let mut comp_ctx = cretonne::Context::new();
|
let mut comp_ctx = cretonne::Context::new();
|
||||||
comp_ctx.func = func.into_owned();
|
comp_ctx.func = func.into_owned();
|
||||||
|
|
||||||
let code_size = comp_ctx
|
let code_size = comp_ctx.compile(isa).map_err(|e| {
|
||||||
.compile(isa)
|
pretty_error(&comp_ctx.func, context.isa, e)
|
||||||
.map_err(|e| pretty_error(&comp_ctx.func, context.isa, e))?;
|
})?;
|
||||||
|
|
||||||
dbg!("Generated {} bytes of code:\n{}",
|
dbg!(
|
||||||
code_size,
|
"Generated {} bytes of code:\n{}",
|
||||||
comp_ctx.func.display(isa));
|
code_size,
|
||||||
|
comp_ctx.func.display(isa)
|
||||||
|
);
|
||||||
|
|
||||||
// Finally verify that the returned code size matches the emitted bytes.
|
// Finally verify that the returned code size matches the emitted bytes.
|
||||||
let mut sink = SizeSink { offset: 0 };
|
let mut sink = SizeSink { offset: 0 };
|
||||||
binemit::emit_function(&comp_ctx.func,
|
binemit::emit_function(
|
||||||
|func, inst, div, sink| isa.emit_inst(func, inst, div, sink),
|
&comp_ctx.func,
|
||||||
&mut sink);
|
|func, inst, div, sink| isa.emit_inst(func, inst, div, sink),
|
||||||
|
&mut sink,
|
||||||
|
);
|
||||||
|
|
||||||
if sink.offset != code_size {
|
if sink.offset != code_size {
|
||||||
return Err(format!("Expected code size {}, got {}", code_size, sink.offset));
|
return Err(format!(
|
||||||
|
"Expected code size {}, got {}",
|
||||||
|
code_size,
|
||||||
|
sink.offset
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|||||||
@@ -46,7 +46,9 @@ impl ConcurrentRunner {
|
|||||||
heartbeat_thread(reply_tx.clone());
|
heartbeat_thread(reply_tx.clone());
|
||||||
|
|
||||||
let handles = (0..num_cpus::get())
|
let handles = (0..num_cpus::get())
|
||||||
.map(|num| worker_thread(num, request_mutex.clone(), reply_tx.clone()))
|
.map(|num| {
|
||||||
|
worker_thread(num, request_mutex.clone(), reply_tx.clone())
|
||||||
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
ConcurrentRunner {
|
ConcurrentRunner {
|
||||||
@@ -97,16 +99,17 @@ fn heartbeat_thread(replies: Sender<Reply>) -> thread::JoinHandle<()> {
|
|||||||
thread::Builder::new()
|
thread::Builder::new()
|
||||||
.name("heartbeat".to_string())
|
.name("heartbeat".to_string())
|
||||||
.spawn(move || while replies.send(Reply::Tick).is_ok() {
|
.spawn(move || while replies.send(Reply::Tick).is_ok() {
|
||||||
thread::sleep(Duration::from_secs(1));
|
thread::sleep(Duration::from_secs(1));
|
||||||
})
|
})
|
||||||
.unwrap()
|
.unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Spawn a worker thread running tests.
|
/// Spawn a worker thread running tests.
|
||||||
fn worker_thread(thread_num: usize,
|
fn worker_thread(
|
||||||
requests: Arc<Mutex<Receiver<Request>>>,
|
thread_num: usize,
|
||||||
replies: Sender<Reply>)
|
requests: Arc<Mutex<Receiver<Request>>>,
|
||||||
-> thread::JoinHandle<()> {
|
replies: Sender<Reply>,
|
||||||
|
) -> thread::JoinHandle<()> {
|
||||||
thread::Builder::new()
|
thread::Builder::new()
|
||||||
.name(format!("worker #{}", thread_num))
|
.name(format!("worker #{}", thread_num))
|
||||||
.spawn(move || {
|
.spawn(move || {
|
||||||
|
|||||||
@@ -50,9 +50,11 @@ impl SubTest for TestDomtree {
|
|||||||
let inst = match comment.entity {
|
let inst = match comment.entity {
|
||||||
AnyEntity::Inst(inst) => inst,
|
AnyEntity::Inst(inst) => inst,
|
||||||
_ => {
|
_ => {
|
||||||
return Err(format!("annotation on non-inst {}: {}",
|
return Err(format!(
|
||||||
comment.entity,
|
"annotation on non-inst {}: {}",
|
||||||
comment.text))
|
comment.entity,
|
||||||
|
comment.text
|
||||||
|
))
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
for src_ebb in tail.split_whitespace() {
|
for src_ebb in tail.split_whitespace() {
|
||||||
@@ -69,17 +71,21 @@ impl SubTest for TestDomtree {
|
|||||||
// Compare to computed domtree.
|
// Compare to computed domtree.
|
||||||
match domtree.idom(ebb) {
|
match domtree.idom(ebb) {
|
||||||
Some(got_inst) if got_inst != inst => {
|
Some(got_inst) if got_inst != inst => {
|
||||||
return Err(format!("mismatching idoms for {}:\n\
|
return Err(format!(
|
||||||
|
"mismatching idoms for {}:\n\
|
||||||
want: {}, got: {}",
|
want: {}, got: {}",
|
||||||
src_ebb,
|
src_ebb,
|
||||||
inst,
|
inst,
|
||||||
got_inst));
|
got_inst
|
||||||
|
));
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
return Err(format!("mismatching idoms for {}:\n\
|
return Err(format!(
|
||||||
|
"mismatching idoms for {}:\n\
|
||||||
want: {}, got: unreachable",
|
want: {}, got: unreachable",
|
||||||
src_ebb,
|
src_ebb,
|
||||||
inst));
|
inst
|
||||||
|
));
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
@@ -89,15 +95,17 @@ impl SubTest for TestDomtree {
|
|||||||
|
|
||||||
// Now we know that everything in `expected` is consistent with `domtree`.
|
// Now we know that everything in `expected` is consistent with `domtree`.
|
||||||
// All other EBB's should be either unreachable or the entry block.
|
// All other EBB's should be either unreachable or the entry block.
|
||||||
for ebb in func.layout
|
for ebb in func.layout.ebbs().skip(1).filter(
|
||||||
.ebbs()
|
|ebb| !expected.contains_key(&ebb),
|
||||||
.skip(1)
|
)
|
||||||
.filter(|ebb| !expected.contains_key(&ebb)) {
|
{
|
||||||
if let Some(got_inst) = domtree.idom(ebb) {
|
if let Some(got_inst) = domtree.idom(ebb) {
|
||||||
return Err(format!("mismatching idoms for renumbered {}:\n\
|
return Err(format!(
|
||||||
|
"mismatching idoms for renumbered {}:\n\
|
||||||
want: unrechable, got: {}",
|
want: unrechable, got: {}",
|
||||||
ebb,
|
ebb,
|
||||||
got_inst));
|
got_inst
|
||||||
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -41,9 +41,9 @@ impl SubTest for TestLegalizer {
|
|||||||
let isa = context.isa.expect("legalizer needs an ISA");
|
let isa = context.isa.expect("legalizer needs an ISA");
|
||||||
|
|
||||||
comp_ctx.flowgraph();
|
comp_ctx.flowgraph();
|
||||||
comp_ctx
|
comp_ctx.legalize(isa).map_err(|e| {
|
||||||
.legalize(isa)
|
pretty_error(&comp_ctx.func, context.isa, e)
|
||||||
.map_err(|e| pretty_error(&comp_ctx.func, context.isa, e))?;
|
})?;
|
||||||
|
|
||||||
let mut text = String::new();
|
let mut text = String::new();
|
||||||
write!(&mut text, "{}", &comp_ctx.func.display(Some(isa)))
|
write!(&mut text, "{}", &comp_ctx.func.display(Some(isa)))
|
||||||
|
|||||||
@@ -39,13 +39,14 @@ impl SubTest for TestLICM {
|
|||||||
comp_ctx.func = func.into_owned();
|
comp_ctx.func = func.into_owned();
|
||||||
|
|
||||||
comp_ctx.flowgraph();
|
comp_ctx.flowgraph();
|
||||||
comp_ctx
|
comp_ctx.licm().map_err(|e| {
|
||||||
.licm()
|
pretty_error(&comp_ctx.func, context.isa, e)
|
||||||
.map_err(|e| pretty_error(&comp_ctx.func, context.isa, e))?;
|
})?;
|
||||||
|
|
||||||
let mut text = String::new();
|
let mut text = String::new();
|
||||||
write!(&mut text, "{}", &comp_ctx.func)
|
write!(&mut text, "{}", &comp_ctx.func).map_err(
|
||||||
.map_err(|e| e.to_string())?;
|
|e| e.to_string(),
|
||||||
|
)?;
|
||||||
run_filecheck(&text, context)
|
run_filecheck(&text, context)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -46,12 +46,12 @@ impl SubTest for TestRegalloc {
|
|||||||
|
|
||||||
comp_ctx.flowgraph();
|
comp_ctx.flowgraph();
|
||||||
// TODO: Should we have an option to skip legalization?
|
// TODO: Should we have an option to skip legalization?
|
||||||
comp_ctx
|
comp_ctx.legalize(isa).map_err(|e| {
|
||||||
.legalize(isa)
|
pretty_error(&comp_ctx.func, context.isa, e)
|
||||||
.map_err(|e| pretty_error(&comp_ctx.func, context.isa, e))?;
|
})?;
|
||||||
comp_ctx
|
comp_ctx.regalloc(isa).map_err(|e| {
|
||||||
.regalloc(isa)
|
pretty_error(&comp_ctx.func, context.isa, e)
|
||||||
.map_err(|e| pretty_error(&comp_ctx.func, context.isa, e))?;
|
})?;
|
||||||
|
|
||||||
let mut text = String::new();
|
let mut text = String::new();
|
||||||
write!(&mut text, "{}", &comp_ctx.func.display(Some(isa)))
|
write!(&mut text, "{}", &comp_ctx.func.display(Some(isa)))
|
||||||
|
|||||||
@@ -41,11 +41,13 @@ impl Display for QueueEntry {
|
|||||||
let p = self.path.to_string_lossy();
|
let p = self.path.to_string_lossy();
|
||||||
match self.state {
|
match self.state {
|
||||||
State::Done(Ok(dur)) => {
|
State::Done(Ok(dur)) => {
|
||||||
write!(f,
|
write!(
|
||||||
"{}.{:03} {}",
|
f,
|
||||||
dur.as_secs(),
|
"{}.{:03} {}",
|
||||||
dur.subsec_nanos() / 1000000,
|
dur.as_secs(),
|
||||||
p)
|
dur.subsec_nanos() / 1000000,
|
||||||
|
p
|
||||||
|
)
|
||||||
}
|
}
|
||||||
State::Done(Err(ref e)) => write!(f, "FAIL {}: {}", p, e),
|
State::Done(Err(ref e)) => write!(f, "FAIL {}: {}", p, e),
|
||||||
_ => write!(f, "{}", p),
|
_ => write!(f, "{}", p),
|
||||||
@@ -104,11 +106,10 @@ impl TestRunner {
|
|||||||
///
|
///
|
||||||
/// Any problems reading `file` as a test case file will be reported as a test failure.
|
/// Any problems reading `file` as a test case file will be reported as a test failure.
|
||||||
pub fn push_test<P: Into<PathBuf>>(&mut self, file: P) {
|
pub fn push_test<P: Into<PathBuf>>(&mut self, file: P) {
|
||||||
self.tests
|
self.tests.push(QueueEntry {
|
||||||
.push(QueueEntry {
|
path: file.into(),
|
||||||
path: file.into(),
|
state: State::New,
|
||||||
state: State::New,
|
});
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Begin running tests concurrently.
|
/// Begin running tests concurrently.
|
||||||
@@ -240,10 +241,12 @@ impl TestRunner {
|
|||||||
Reply::Tick => {
|
Reply::Tick => {
|
||||||
self.ticks_since_progress += 1;
|
self.ticks_since_progress += 1;
|
||||||
if self.ticks_since_progress == TIMEOUT_SLOW {
|
if self.ticks_since_progress == TIMEOUT_SLOW {
|
||||||
println!("STALLED for {} seconds with {}/{} tests finished",
|
println!(
|
||||||
self.ticks_since_progress,
|
"STALLED for {} seconds with {}/{} tests finished",
|
||||||
self.reported_tests,
|
self.ticks_since_progress,
|
||||||
self.tests.len());
|
self.reported_tests,
|
||||||
|
self.tests.len()
|
||||||
|
);
|
||||||
for jobid in self.reported_tests..self.tests.len() {
|
for jobid in self.reported_tests..self.tests.len() {
|
||||||
if self.tests[jobid].state == State::Running {
|
if self.tests[jobid].state == State::Running {
|
||||||
println!("slow: {}", self.tests[jobid]);
|
println!("slow: {}", self.tests[jobid]);
|
||||||
@@ -251,8 +254,10 @@ impl TestRunner {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if self.ticks_since_progress >= TIMEOUT_PANIC {
|
if self.ticks_since_progress >= TIMEOUT_PANIC {
|
||||||
panic!("worker threads stalled for {} seconds.",
|
panic!(
|
||||||
self.ticks_since_progress);
|
"worker threads stalled for {} seconds.",
|
||||||
|
self.ticks_since_progress
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -278,9 +283,9 @@ impl TestRunner {
|
|||||||
let mut times = self.tests
|
let mut times = self.tests
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|entry| match *entry {
|
.filter_map(|entry| match *entry {
|
||||||
QueueEntry { state: State::Done(Ok(dur)), .. } => Some(dur),
|
QueueEntry { state: State::Done(Ok(dur)), .. } => Some(dur),
|
||||||
_ => None,
|
_ => None,
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
// Get me some real data, kid.
|
// Get me some real data, kid.
|
||||||
@@ -304,12 +309,11 @@ impl TestRunner {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
for t in self.tests
|
for t in self.tests.iter().filter(|entry| match **entry {
|
||||||
.iter()
|
QueueEntry { state: State::Done(Ok(dur)), .. } => dur > cut,
|
||||||
.filter(|entry| match **entry {
|
_ => false,
|
||||||
QueueEntry { state: State::Done(Ok(dur)), .. } => dur > cut,
|
})
|
||||||
_ => false,
|
{
|
||||||
}) {
|
|
||||||
println!("slow: {}", t)
|
println!("slow: {}", t)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -76,10 +76,11 @@ pub fn run(path: &Path) -> TestResult {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Given a slice of tests, generate a vector of (test, flags, isa) tuples.
|
// Given a slice of tests, generate a vector of (test, flags, isa) tuples.
|
||||||
fn test_tuples<'a>(tests: &'a [Box<SubTest>],
|
fn test_tuples<'a>(
|
||||||
isa_spec: &'a IsaSpec,
|
tests: &'a [Box<SubTest>],
|
||||||
no_isa_flags: &'a Flags)
|
isa_spec: &'a IsaSpec,
|
||||||
-> Result<Vec<(&'a SubTest, &'a Flags, Option<&'a TargetIsa>)>> {
|
no_isa_flags: &'a Flags,
|
||||||
|
) -> Result<Vec<(&'a SubTest, &'a Flags, Option<&'a TargetIsa>)>> {
|
||||||
let mut out = Vec::new();
|
let mut out = Vec::new();
|
||||||
for test in tests {
|
for test in tests {
|
||||||
if test.needs_isa() {
|
if test.needs_isa() {
|
||||||
@@ -104,10 +105,11 @@ fn test_tuples<'a>(tests: &'a [Box<SubTest>],
|
|||||||
Ok(out)
|
Ok(out)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run_one_test<'a>(tuple: (&'a SubTest, &'a Flags, Option<&'a TargetIsa>),
|
fn run_one_test<'a>(
|
||||||
func: Cow<Function>,
|
tuple: (&'a SubTest, &'a Flags, Option<&'a TargetIsa>),
|
||||||
context: &mut Context<'a>)
|
func: Cow<Function>,
|
||||||
-> Result<()> {
|
context: &mut Context<'a>,
|
||||||
|
) -> Result<()> {
|
||||||
let (test, flags, isa) = tuple;
|
let (test, flags, isa) = tuple;
|
||||||
let name = format!("{}({})", test.name(), func.name);
|
let name = format!("{}({})", test.name(), func.name);
|
||||||
dbg!("Test: {} {}", name, isa.map(TargetIsa::name).unwrap_or("-"));
|
dbg!("Test: {} {}", name, isa.map(TargetIsa::name).unwrap_or("-"));
|
||||||
@@ -117,11 +119,13 @@ fn run_one_test<'a>(tuple: (&'a SubTest, &'a Flags, Option<&'a TargetIsa>),
|
|||||||
|
|
||||||
// Should we run the verifier before this test?
|
// Should we run the verifier before this test?
|
||||||
if !context.verified && test.needs_verifier() {
|
if !context.verified && test.needs_verifier() {
|
||||||
verify_function(&func, isa)
|
verify_function(&func, isa).map_err(|e| {
|
||||||
.map_err(|e| pretty_verifier_error(&func, isa, e))?;
|
pretty_verifier_error(&func, isa, e)
|
||||||
|
})?;
|
||||||
context.verified = true;
|
context.verified = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
test.run(func, context)
|
test.run(func, context).map_err(
|
||||||
.map_err(|e| format!("{}: {}", name, e))
|
|e| format!("{}: {}", name, e),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -39,13 +39,14 @@ impl SubTest for TestSimpleGVN {
|
|||||||
comp_ctx.func = func.into_owned();
|
comp_ctx.func = func.into_owned();
|
||||||
|
|
||||||
comp_ctx.flowgraph();
|
comp_ctx.flowgraph();
|
||||||
comp_ctx
|
comp_ctx.simple_gvn().map_err(|e| {
|
||||||
.simple_gvn()
|
pretty_error(&comp_ctx.func, context.isa, e)
|
||||||
.map_err(|e| pretty_error(&comp_ctx.func, context.isa, e))?;
|
})?;
|
||||||
|
|
||||||
let mut text = String::new();
|
let mut text = String::new();
|
||||||
write!(&mut text, "{}", &comp_ctx.func)
|
write!(&mut text, "{}", &comp_ctx.func).map_err(
|
||||||
.map_err(|e| e.to_string())?;
|
|e| e.to_string(),
|
||||||
|
)?;
|
||||||
run_filecheck(&text, context)
|
run_filecheck(&text, context)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -66,25 +66,25 @@ pub trait SubTest {
|
|||||||
/// match 'inst10'.
|
/// match 'inst10'.
|
||||||
impl<'a> filecheck::VariableMap for Context<'a> {
|
impl<'a> filecheck::VariableMap for Context<'a> {
|
||||||
fn lookup(&self, varname: &str) -> Option<FCValue> {
|
fn lookup(&self, varname: &str) -> Option<FCValue> {
|
||||||
self.details
|
self.details.map.lookup_str(varname).map(|e| {
|
||||||
.map
|
FCValue::Regex(format!(r"\b{}\b", e).into())
|
||||||
.lookup_str(varname)
|
})
|
||||||
.map(|e| FCValue::Regex(format!(r"\b{}\b", e).into()))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Run filecheck on `text`, using directives extracted from `context`.
|
/// Run filecheck on `text`, using directives extracted from `context`.
|
||||||
pub fn run_filecheck(text: &str, context: &Context) -> Result<()> {
|
pub fn run_filecheck(text: &str, context: &Context) -> Result<()> {
|
||||||
let checker = build_filechecker(context)?;
|
let checker = build_filechecker(context)?;
|
||||||
if checker
|
if checker.check(&text, context).map_err(
|
||||||
.check(&text, context)
|
|e| format!("filecheck: {}", e),
|
||||||
.map_err(|e| format!("filecheck: {}", e))? {
|
)?
|
||||||
|
{
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
// Filecheck mismatch. Emit an explanation as output.
|
// Filecheck mismatch. Emit an explanation as output.
|
||||||
let (_, explain) = checker
|
let (_, explain) = checker.explain(&text, context).map_err(
|
||||||
.explain(&text, context)
|
|e| format!("explain: {}", e),
|
||||||
.map_err(|e| format!("explain: {}", e))?;
|
)?;
|
||||||
Err(format!("filecheck failed:\n{}{}", checker, explain))
|
Err(format!("filecheck failed:\n{}{}", checker, explain))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -94,14 +94,14 @@ pub fn build_filechecker(context: &Context) -> Result<Checker> {
|
|||||||
let mut builder = CheckerBuilder::new();
|
let mut builder = CheckerBuilder::new();
|
||||||
// Preamble comments apply to all functions.
|
// Preamble comments apply to all functions.
|
||||||
for comment in context.preamble_comments {
|
for comment in context.preamble_comments {
|
||||||
builder
|
builder.directive(comment.text).map_err(|e| {
|
||||||
.directive(comment.text)
|
format!("filecheck: {}", e)
|
||||||
.map_err(|e| format!("filecheck: {}", e))?;
|
})?;
|
||||||
}
|
}
|
||||||
for comment in &context.details.comments {
|
for comment in &context.details.comments {
|
||||||
builder
|
builder.directive(comment.text).map_err(|e| {
|
||||||
.directive(comment.text)
|
format!("filecheck: {}", e)
|
||||||
.map_err(|e| format!("filecheck: {}", e))?;
|
})?;
|
||||||
}
|
}
|
||||||
let checker = builder.finish();
|
let checker = builder.finish();
|
||||||
if checker.is_empty() {
|
if checker.is_empty() {
|
||||||
|
|||||||
@@ -65,9 +65,11 @@ impl SubTest for TestVerifier {
|
|||||||
if want_loc == got.location {
|
if want_loc == got.location {
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
Err(format!("correct error reported on {}, but wanted {}",
|
Err(format!(
|
||||||
got.location,
|
"correct error reported on {}, but wanted {}",
|
||||||
want_loc))
|
got.location,
|
||||||
|
want_loc
|
||||||
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Some(_) => Err(format!("mismatching error: {}", got)),
|
Some(_) => Err(format!("mismatching error: {}", got)),
|
||||||
|
|||||||
@@ -91,10 +91,12 @@ impl<'a> Display for CFGPrinter<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn print_cfg(filename: String) -> CommandResult {
|
fn print_cfg(filename: String) -> CommandResult {
|
||||||
let buffer = read_to_string(&filename)
|
let buffer = read_to_string(&filename).map_err(
|
||||||
.map_err(|e| format!("{}: {}", filename, e))?;
|
|e| format!("{}: {}", filename, e),
|
||||||
let items = parse_functions(&buffer)
|
)?;
|
||||||
.map_err(|e| format!("{}: {}", filename, e))?;
|
let items = parse_functions(&buffer).map_err(
|
||||||
|
|e| format!("{}: {}", filename, e),
|
||||||
|
)?;
|
||||||
|
|
||||||
for (idx, func) in items.into_iter().enumerate() {
|
for (idx, func) in items.into_iter().enumerate() {
|
||||||
if idx != 0 {
|
if idx != 0 {
|
||||||
|
|||||||
@@ -18,14 +18,14 @@ pub fn run(files: Vec<String>, verbose: bool) -> CommandResult {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let mut buffer = String::new();
|
let mut buffer = String::new();
|
||||||
io::stdin()
|
io::stdin().read_to_string(&mut buffer).map_err(|e| {
|
||||||
.read_to_string(&mut buffer)
|
format!("stdin: {}", e)
|
||||||
.map_err(|e| format!("stdin: {}", e))?;
|
})?;
|
||||||
|
|
||||||
if verbose {
|
if verbose {
|
||||||
let (success, explain) = checker
|
let (success, explain) = checker.explain(&buffer, NO_VARIABLES).map_err(
|
||||||
.explain(&buffer, NO_VARIABLES)
|
|e| e.to_string(),
|
||||||
.map_err(|e| e.to_string())?;
|
)?;
|
||||||
print!("{}", explain);
|
print!("{}", explain);
|
||||||
if success {
|
if success {
|
||||||
println!("OK");
|
println!("OK");
|
||||||
@@ -33,25 +33,27 @@ pub fn run(files: Vec<String>, verbose: bool) -> CommandResult {
|
|||||||
} else {
|
} else {
|
||||||
Err("Check failed".to_string())
|
Err("Check failed".to_string())
|
||||||
}
|
}
|
||||||
} else if checker
|
} else if checker.check(&buffer, NO_VARIABLES).map_err(
|
||||||
.check(&buffer, NO_VARIABLES)
|
|e| e.to_string(),
|
||||||
.map_err(|e| e.to_string())? {
|
)?
|
||||||
|
{
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
let (_, explain) = checker
|
let (_, explain) = checker.explain(&buffer, NO_VARIABLES).map_err(
|
||||||
.explain(&buffer, NO_VARIABLES)
|
|e| e.to_string(),
|
||||||
.map_err(|e| e.to_string())?;
|
)?;
|
||||||
print!("{}", explain);
|
print!("{}", explain);
|
||||||
Err("Check failed".to_string())
|
Err("Check failed".to_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_checkfile(filename: &str) -> Result<Checker, String> {
|
fn read_checkfile(filename: &str) -> Result<Checker, String> {
|
||||||
let buffer = read_to_string(&filename)
|
let buffer = read_to_string(&filename).map_err(
|
||||||
.map_err(|e| format!("{}: {}", filename, e))?;
|
|e| format!("{}: {}", filename, e),
|
||||||
|
)?;
|
||||||
let mut builder = CheckerBuilder::new();
|
let mut builder = CheckerBuilder::new();
|
||||||
builder
|
builder.text(&buffer).map_err(
|
||||||
.text(&buffer)
|
|e| format!("{}: {}", filename, e),
|
||||||
.map_err(|e| format!("{}: {}", filename, e))?;
|
)?;
|
||||||
Ok(builder.finish())
|
Ok(builder.finish())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -24,8 +24,10 @@ pub fn read_to_string<P: AsRef<Path>>(path: P) -> Result<String> {
|
|||||||
///
|
///
|
||||||
/// Return the comment text following the directive.
|
/// Return the comment text following the directive.
|
||||||
pub fn match_directive<'a>(comment: &'a str, directive: &str) -> Option<&'a str> {
|
pub fn match_directive<'a>(comment: &'a str, directive: &str) -> Option<&'a str> {
|
||||||
assert!(directive.ends_with(':'),
|
assert!(
|
||||||
"Directive must include trailing colon");
|
directive.ends_with(':'),
|
||||||
|
"Directive must include trailing colon"
|
||||||
|
);
|
||||||
let text = comment.trim_left_matches(';').trim_left();
|
let text = comment.trim_left_matches(';').trim_left();
|
||||||
if text.starts_with(directive) {
|
if text.starts_with(directive) {
|
||||||
Some(text[directive.len()..].trim())
|
Some(text[directive.len()..].trim())
|
||||||
@@ -35,10 +37,11 @@ pub fn match_directive<'a>(comment: &'a str, directive: &str) -> Option<&'a str>
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Pretty-print a verifier error.
|
/// Pretty-print a verifier error.
|
||||||
pub fn pretty_verifier_error(func: &ir::Function,
|
pub fn pretty_verifier_error(
|
||||||
isa: Option<&TargetIsa>,
|
func: &ir::Function,
|
||||||
err: verifier::Error)
|
isa: Option<&TargetIsa>,
|
||||||
-> String {
|
err: verifier::Error,
|
||||||
|
) -> String {
|
||||||
let mut msg = err.to_string();
|
let mut msg = err.to_string();
|
||||||
match err.location {
|
match err.location {
|
||||||
AnyEntity::Inst(inst) => {
|
AnyEntity::Inst(inst) => {
|
||||||
|
|||||||
@@ -51,19 +51,22 @@ fn read_wasm_file(path: PathBuf) -> Result<Vec<u8>, io::Error> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
pub fn run(files: Vec<String>,
|
pub fn run(
|
||||||
flag_verbose: bool,
|
files: Vec<String>,
|
||||||
flag_optimize: bool,
|
flag_verbose: bool,
|
||||||
flag_check: bool)
|
flag_optimize: bool,
|
||||||
-> Result<(), String> {
|
flag_check: bool,
|
||||||
|
) -> Result<(), String> {
|
||||||
for filename in files.iter() {
|
for filename in files.iter() {
|
||||||
let path = Path::new(&filename);
|
let path = Path::new(&filename);
|
||||||
let name = String::from(path.as_os_str().to_string_lossy());
|
let name = String::from(path.as_os_str().to_string_lossy());
|
||||||
match handle_module(flag_verbose,
|
match handle_module(
|
||||||
flag_optimize,
|
flag_verbose,
|
||||||
flag_check,
|
flag_optimize,
|
||||||
path.to_path_buf(),
|
flag_check,
|
||||||
name) {
|
path.to_path_buf(),
|
||||||
|
name,
|
||||||
|
) {
|
||||||
Ok(()) => {}
|
Ok(()) => {}
|
||||||
Err(message) => return Err(message),
|
Err(message) => return Err(message),
|
||||||
}
|
}
|
||||||
@@ -71,12 +74,13 @@ pub fn run(files: Vec<String>,
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn handle_module(flag_verbose: bool,
|
fn handle_module(
|
||||||
flag_optimize: bool,
|
flag_verbose: bool,
|
||||||
flag_check: bool,
|
flag_optimize: bool,
|
||||||
path: PathBuf,
|
flag_check: bool,
|
||||||
name: String)
|
path: PathBuf,
|
||||||
-> Result<(), String> {
|
name: String,
|
||||||
|
) -> Result<(), String> {
|
||||||
let mut terminal = term::stdout().unwrap();
|
let mut terminal = term::stdout().unwrap();
|
||||||
terminal.fg(term::color::YELLOW).unwrap();
|
terminal.fg(term::color::YELLOW).unwrap();
|
||||||
vprint!(flag_verbose, "Handling: ");
|
vprint!(flag_verbose, "Handling: ");
|
||||||
@@ -109,10 +113,10 @@ fn handle_module(flag_verbose: bool,
|
|||||||
.arg(file_path.to_str().unwrap())
|
.arg(file_path.to_str().unwrap())
|
||||||
.output()
|
.output()
|
||||||
.or_else(|e| if let io::ErrorKind::NotFound = e.kind() {
|
.or_else(|e| if let io::ErrorKind::NotFound = e.kind() {
|
||||||
return Err(String::from("wast2wasm not found"));
|
return Err(String::from("wast2wasm not found"));
|
||||||
} else {
|
} else {
|
||||||
return Err(String::from(e.description()));
|
return Err(String::from(e.description()));
|
||||||
})
|
})
|
||||||
.unwrap();
|
.unwrap();
|
||||||
match read_wasm_file(file_path) {
|
match read_wasm_file(file_path) {
|
||||||
Ok(data) => data,
|
Ok(data) => data,
|
||||||
@@ -221,17 +225,20 @@ fn handle_module(flag_verbose: bool,
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Pretty-print a verifier error.
|
/// Pretty-print a verifier error.
|
||||||
pub fn pretty_verifier_error(func: &ir::Function,
|
pub fn pretty_verifier_error(
|
||||||
isa: Option<&TargetIsa>,
|
func: &ir::Function,
|
||||||
err: verifier::Error)
|
isa: Option<&TargetIsa>,
|
||||||
-> String {
|
err: verifier::Error,
|
||||||
|
) -> String {
|
||||||
let msg = err.to_string();
|
let msg = err.to_string();
|
||||||
let str1 = match err.location {
|
let str1 = match err.location {
|
||||||
AnyEntity::Inst(inst) => {
|
AnyEntity::Inst(inst) => {
|
||||||
format!("{}\n{}: {}\n\n",
|
format!(
|
||||||
msg,
|
"{}\n{}: {}\n\n",
|
||||||
inst,
|
msg,
|
||||||
func.dfg.display_inst(inst, isa))
|
inst,
|
||||||
|
func.dfg.display_inst(inst, isa)
|
||||||
|
)
|
||||||
}
|
}
|
||||||
_ => String::from(format!("{}\n", msg)),
|
_ => String::from(format!("{}\n", msg)),
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -26,7 +26,8 @@ fn test_reverse_postorder_traversal(function_source: &str, ebb_order: Vec<u32>)
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn simple_traversal() {
|
fn simple_traversal() {
|
||||||
test_reverse_postorder_traversal("
|
test_reverse_postorder_traversal(
|
||||||
|
"
|
||||||
function %test(i32) native {
|
function %test(i32) native {
|
||||||
ebb0(v0: i32):
|
ebb0(v0: i32):
|
||||||
brz v0, ebb1
|
brz v0, ebb1
|
||||||
@@ -50,12 +51,14 @@ fn simple_traversal() {
|
|||||||
trap
|
trap
|
||||||
}
|
}
|
||||||
",
|
",
|
||||||
vec![0, 1, 3, 2, 4, 5]);
|
vec![0, 1, 3, 2, 4, 5],
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn loops_one() {
|
fn loops_one() {
|
||||||
test_reverse_postorder_traversal("
|
test_reverse_postorder_traversal(
|
||||||
|
"
|
||||||
function %test(i32) native {
|
function %test(i32) native {
|
||||||
ebb0(v0: i32):
|
ebb0(v0: i32):
|
||||||
jump ebb1
|
jump ebb1
|
||||||
@@ -68,12 +71,14 @@ fn loops_one() {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
",
|
",
|
||||||
vec![0, 1, 3, 2]);
|
vec![0, 1, 3, 2],
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn loops_two() {
|
fn loops_two() {
|
||||||
test_reverse_postorder_traversal("
|
test_reverse_postorder_traversal(
|
||||||
|
"
|
||||||
function %test(i32) native {
|
function %test(i32) native {
|
||||||
ebb0(v0: i32):
|
ebb0(v0: i32):
|
||||||
brz v0, ebb1
|
brz v0, ebb1
|
||||||
@@ -93,12 +98,14 @@ fn loops_two() {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
",
|
",
|
||||||
vec![0, 1, 2, 4, 3, 5]);
|
vec![0, 1, 2, 4, 3, 5],
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn loops_three() {
|
fn loops_three() {
|
||||||
test_reverse_postorder_traversal("
|
test_reverse_postorder_traversal(
|
||||||
|
"
|
||||||
function %test(i32) native {
|
function %test(i32) native {
|
||||||
ebb0(v0: i32):
|
ebb0(v0: i32):
|
||||||
brz v0, ebb1
|
brz v0, ebb1
|
||||||
@@ -123,12 +130,14 @@ fn loops_three() {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
",
|
",
|
||||||
vec![0, 1, 2, 4, 3, 6, 7, 5]);
|
vec![0, 1, 2, 4, 3, 6, 7, 5],
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn back_edge_one() {
|
fn back_edge_one() {
|
||||||
test_reverse_postorder_traversal("
|
test_reverse_postorder_traversal(
|
||||||
|
"
|
||||||
function %test(i32) native {
|
function %test(i32) native {
|
||||||
ebb0(v0: i32):
|
ebb0(v0: i32):
|
||||||
brz v0, ebb1
|
brz v0, ebb1
|
||||||
@@ -146,5 +155,6 @@ fn back_edge_one() {
|
|||||||
trap
|
trap
|
||||||
}
|
}
|
||||||
",
|
",
|
||||||
vec![0, 1, 3, 2, 4]);
|
vec![0, 1, 3, 2, 4],
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -49,8 +49,10 @@ fn main() {
|
|||||||
// Make sure we rebuild is this build script changes.
|
// Make sure we rebuild is this build script changes.
|
||||||
// I guess that won't happen if you have non-UTF8 bytes in your path names.
|
// I guess that won't happen if you have non-UTF8 bytes in your path names.
|
||||||
// The `build.py` script prints out its own dependencies.
|
// The `build.py` script prints out its own dependencies.
|
||||||
println!("cargo:rerun-if-changed={}",
|
println!(
|
||||||
crate_dir.join("build.rs").to_string_lossy());
|
"cargo:rerun-if-changed={}",
|
||||||
|
crate_dir.join("build.rs").to_string_lossy()
|
||||||
|
);
|
||||||
|
|
||||||
// Scripts are in `$crate_dir/meta`.
|
// Scripts are in `$crate_dir/meta`.
|
||||||
let meta_dir = crate_dir.join("meta");
|
let meta_dir = crate_dir.join("meta");
|
||||||
@@ -130,9 +132,11 @@ fn isa_targets(cretonne_targets: Option<&str>, target_triple: &str) -> Result<Ve
|
|||||||
Isa::from_arch(target_triple.split('-').next().unwrap())
|
Isa::from_arch(target_triple.split('-').next().unwrap())
|
||||||
.map(|isa| vec![isa])
|
.map(|isa| vec![isa])
|
||||||
.ok_or_else(|| {
|
.ok_or_else(|| {
|
||||||
format!("no supported isa found for target triple `{}`",
|
format!(
|
||||||
target_triple)
|
"no supported isa found for target triple `{}`",
|
||||||
})
|
target_triple
|
||||||
|
)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
Some(targets) => {
|
Some(targets) => {
|
||||||
let unknown_isa_targets = targets
|
let unknown_isa_targets = targets
|
||||||
@@ -143,7 +147,10 @@ fn isa_targets(cretonne_targets: Option<&str>, target_triple: &str) -> Result<Ve
|
|||||||
match (unknown_isa_targets.is_empty(), isa_targets.is_empty()) {
|
match (unknown_isa_targets.is_empty(), isa_targets.is_empty()) {
|
||||||
(true, true) => Ok(Isa::all().to_vec()),
|
(true, true) => Ok(Isa::all().to_vec()),
|
||||||
(true, _) => Ok(isa_targets),
|
(true, _) => Ok(isa_targets),
|
||||||
(_, _) => Err(format!("unknown isa targets: `{}`", unknown_isa_targets.join(", "))),
|
(_, _) => Err(format!(
|
||||||
|
"unknown isa targets: `{}`",
|
||||||
|
unknown_isa_targets.join(", ")
|
||||||
|
)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None => Ok(Isa::all().to_vec()),
|
None => Ok(Isa::all().to_vec()),
|
||||||
|
|||||||
@@ -150,8 +150,10 @@ pub fn legalize_abi_value(have: Type, arg: &ArgumentType) -> ValueConversion {
|
|||||||
match have_bits.cmp(&arg_bits) {
|
match have_bits.cmp(&arg_bits) {
|
||||||
// We have fewer bits than the ABI argument.
|
// We have fewer bits than the ABI argument.
|
||||||
Ordering::Less => {
|
Ordering::Less => {
|
||||||
assert!(have.is_int() && arg.value_type.is_int(),
|
assert!(
|
||||||
"Can only extend integer values");
|
have.is_int() && arg.value_type.is_int(),
|
||||||
|
"Can only extend integer values"
|
||||||
|
);
|
||||||
match arg.extension {
|
match arg.extension {
|
||||||
ArgumentExtension::Uext => ValueConversion::Uext(arg.value_type),
|
ArgumentExtension::Uext => ValueConversion::Uext(arg.value_type),
|
||||||
ArgumentExtension::Sext => ValueConversion::Sext(arg.value_type),
|
ArgumentExtension::Sext => ValueConversion::Sext(arg.value_type),
|
||||||
@@ -192,22 +194,34 @@ mod tests {
|
|||||||
fn legalize() {
|
fn legalize() {
|
||||||
let mut arg = ArgumentType::new(types::I32);
|
let mut arg = ArgumentType::new(types::I32);
|
||||||
|
|
||||||
assert_eq!(legalize_abi_value(types::I64X2, &arg),
|
assert_eq!(
|
||||||
ValueConversion::VectorSplit);
|
legalize_abi_value(types::I64X2, &arg),
|
||||||
assert_eq!(legalize_abi_value(types::I64, &arg),
|
ValueConversion::VectorSplit
|
||||||
ValueConversion::IntSplit);
|
);
|
||||||
|
assert_eq!(
|
||||||
|
legalize_abi_value(types::I64, &arg),
|
||||||
|
ValueConversion::IntSplit
|
||||||
|
);
|
||||||
|
|
||||||
// Vector of integers is broken down, then sign-extended.
|
// Vector of integers is broken down, then sign-extended.
|
||||||
arg.extension = ArgumentExtension::Sext;
|
arg.extension = ArgumentExtension::Sext;
|
||||||
assert_eq!(legalize_abi_value(types::I16X4, &arg),
|
assert_eq!(
|
||||||
ValueConversion::VectorSplit);
|
legalize_abi_value(types::I16X4, &arg),
|
||||||
assert_eq!(legalize_abi_value(types::I16.by(2).unwrap(), &arg),
|
ValueConversion::VectorSplit
|
||||||
ValueConversion::VectorSplit);
|
);
|
||||||
assert_eq!(legalize_abi_value(types::I16, &arg),
|
assert_eq!(
|
||||||
ValueConversion::Sext(types::I32));
|
legalize_abi_value(types::I16.by(2).unwrap(), &arg),
|
||||||
|
ValueConversion::VectorSplit
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
legalize_abi_value(types::I16, &arg),
|
||||||
|
ValueConversion::Sext(types::I32)
|
||||||
|
);
|
||||||
|
|
||||||
// 64-bit float is split as an integer.
|
// 64-bit float is split as an integer.
|
||||||
assert_eq!(legalize_abi_value(types::F64, &arg),
|
assert_eq!(
|
||||||
ValueConversion::IntBits);
|
legalize_abi_value(types::F64, &arg),
|
||||||
|
ValueConversion::IntBits
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -54,9 +54,11 @@ pub trait CodeSink {
|
|||||||
/// Report a bad encoding error.
|
/// Report a bad encoding error.
|
||||||
#[inline(never)]
|
#[inline(never)]
|
||||||
pub fn bad_encoding(func: &Function, inst: Inst) -> ! {
|
pub fn bad_encoding(func: &Function, inst: Inst) -> ! {
|
||||||
panic!("Bad encoding {} for {}",
|
panic!(
|
||||||
func.encodings[inst],
|
"Bad encoding {} for {}",
|
||||||
func.dfg.display_inst(inst, None));
|
func.encodings[inst],
|
||||||
|
func.dfg.display_inst(inst, None)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Emit a function to `sink`, given an instruction emitter function.
|
/// Emit a function to `sink`, given an instruction emitter function.
|
||||||
@@ -64,8 +66,9 @@ pub fn bad_encoding(func: &Function, inst: Inst) -> ! {
|
|||||||
/// This function is called from the `TargetIsa::emit_function()` implementations with the
|
/// This function is called from the `TargetIsa::emit_function()` implementations with the
|
||||||
/// appropriate instruction emitter.
|
/// appropriate instruction emitter.
|
||||||
pub fn emit_function<CS, EI>(func: &Function, emit_inst: EI, sink: &mut CS)
|
pub fn emit_function<CS, EI>(func: &Function, emit_inst: EI, sink: &mut CS)
|
||||||
where CS: CodeSink,
|
where
|
||||||
EI: Fn(&Function, Inst, &mut RegDiversions, &mut CS)
|
CS: CodeSink,
|
||||||
|
EI: Fn(&Function, Inst, &mut RegDiversions, &mut CS),
|
||||||
{
|
{
|
||||||
let mut divert = RegDiversions::new();
|
let mut divert = RegDiversions::new();
|
||||||
for ebb in func.layout.ebbs() {
|
for ebb in func.layout.ebbs() {
|
||||||
|
|||||||
@@ -60,8 +60,10 @@ pub fn relax_branches(func: &mut Function, isa: &TargetIsa) -> Result<CodeOffset
|
|||||||
while let Some(ebb) = cur.next_ebb() {
|
while let Some(ebb) = cur.next_ebb() {
|
||||||
// Record the offset for `ebb` and make sure we iterate until offsets are stable.
|
// Record the offset for `ebb` and make sure we iterate until offsets are stable.
|
||||||
if cur.func.offsets[ebb] != offset {
|
if cur.func.offsets[ebb] != offset {
|
||||||
assert!(cur.func.offsets[ebb] < offset,
|
assert!(
|
||||||
"Code shrinking during relaxation");
|
cur.func.offsets[ebb] < offset,
|
||||||
|
"Code shrinking during relaxation"
|
||||||
|
);
|
||||||
cur.func.offsets[ebb] = offset;
|
cur.func.offsets[ebb] = offset;
|
||||||
go_again = true;
|
go_again = true;
|
||||||
}
|
}
|
||||||
@@ -99,10 +101,11 @@ fn fallthroughs(func: &mut Function) {
|
|||||||
for (ebb, succ) in func.layout.ebbs().adjacent_pairs() {
|
for (ebb, succ) in func.layout.ebbs().adjacent_pairs() {
|
||||||
let term = func.layout.last_inst(ebb).expect("EBB has no terminator.");
|
let term = func.layout.last_inst(ebb).expect("EBB has no terminator.");
|
||||||
if let InstructionData::Jump {
|
if let InstructionData::Jump {
|
||||||
ref mut opcode,
|
ref mut opcode,
|
||||||
destination,
|
destination,
|
||||||
..
|
..
|
||||||
} = func.dfg[term] {
|
} = func.dfg[term]
|
||||||
|
{
|
||||||
match *opcode {
|
match *opcode {
|
||||||
Opcode::Fallthrough => {
|
Opcode::Fallthrough => {
|
||||||
// Somebody used a fall-through instruction before the branch relaxation pass.
|
// Somebody used a fall-through instruction before the branch relaxation pass.
|
||||||
@@ -126,16 +129,19 @@ fn fallthroughs(func: &mut Function) {
|
|||||||
///
|
///
|
||||||
/// Return the size of the replacement instructions up to and including the location where `pos` is
|
/// Return the size of the replacement instructions up to and including the location where `pos` is
|
||||||
/// left.
|
/// left.
|
||||||
fn relax_branch(cur: &mut FuncCursor,
|
fn relax_branch(
|
||||||
offset: CodeOffset,
|
cur: &mut FuncCursor,
|
||||||
dest_offset: CodeOffset,
|
offset: CodeOffset,
|
||||||
encinfo: &EncInfo)
|
dest_offset: CodeOffset,
|
||||||
-> CodeOffset {
|
encinfo: &EncInfo,
|
||||||
|
) -> CodeOffset {
|
||||||
let inst = cur.current_inst().unwrap();
|
let inst = cur.current_inst().unwrap();
|
||||||
dbg!("Relaxing [{}] {} for {:#x}-{:#x} range",
|
dbg!(
|
||||||
encinfo.display(cur.func.encodings[inst]),
|
"Relaxing [{}] {} for {:#x}-{:#x} range",
|
||||||
cur.func.dfg.display_inst(inst, None),
|
encinfo.display(cur.func.encodings[inst]),
|
||||||
offset,
|
cur.func.dfg.display_inst(inst, None),
|
||||||
dest_offset);
|
offset,
|
||||||
|
dest_offset
|
||||||
|
);
|
||||||
unimplemented!();
|
unimplemented!();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,27 +14,34 @@ use std::convert::{Into, From};
|
|||||||
pub struct BitSet<T>(pub T);
|
pub struct BitSet<T>(pub T);
|
||||||
|
|
||||||
impl<T> BitSet<T>
|
impl<T> BitSet<T>
|
||||||
where T: Into<u32> + From<u8> + BitOr<T, Output = T> + Shl<u8, Output = T> + Sub<T, Output=T> +
|
where
|
||||||
Add<T, Output=T> + PartialEq + Copy
|
T: Into<u32>
|
||||||
|
+ From<u8>
|
||||||
|
+ BitOr<T, Output = T>
|
||||||
|
+ Shl<u8, Output = T>
|
||||||
|
+ Sub<T, Output = T>
|
||||||
|
+ Add<T, Output = T>
|
||||||
|
+ PartialEq
|
||||||
|
+ Copy,
|
||||||
{
|
{
|
||||||
/// Maximum number of bits supported by this BitSet instance
|
/// Maximum number of bits supported by this BitSet instance
|
||||||
pub fn bits() -> usize {
|
pub fn bits() -> usize {
|
||||||
size_of::<T>() * 8
|
size_of::<T>() * 8
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Maximum number of bits supported by any bitset instance atm.
|
/// Maximum number of bits supported by any bitset instance atm.
|
||||||
pub fn max_bits() -> usize {
|
pub fn max_bits() -> usize {
|
||||||
size_of::<u32>() * 8
|
size_of::<u32>() * 8
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check if this BitSet contains the number num
|
/// Check if this BitSet contains the number num
|
||||||
pub fn contains(&self, num: u8) -> bool {
|
pub fn contains(&self, num: u8) -> bool {
|
||||||
assert!((num as usize) < Self::bits());
|
assert!((num as usize) < Self::bits());
|
||||||
assert!((num as usize) < Self::max_bits());
|
assert!((num as usize) < Self::max_bits());
|
||||||
self.0.into() & (1 << num) != 0
|
self.0.into() & (1 << num) != 0
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the smallest number contained in the bitset or None if empty
|
/// Return the smallest number contained in the bitset or None if empty
|
||||||
pub fn min(&self) -> Option<u8> {
|
pub fn min(&self) -> Option<u8> {
|
||||||
if self.0.into() == 0 {
|
if self.0.into() == 0 {
|
||||||
None
|
None
|
||||||
@@ -43,7 +50,7 @@ impl<T> BitSet<T>
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the largest number contained in the bitset or None if empty
|
/// Return the largest number contained in the bitset or None if empty
|
||||||
pub fn max(&self) -> Option<u8> {
|
pub fn max(&self) -> Option<u8> {
|
||||||
if self.0.into() == 0 {
|
if self.0.into() == 0 {
|
||||||
None
|
None
|
||||||
@@ -53,17 +60,17 @@ impl<T> BitSet<T>
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Construct a BitSet with the half-open range [lo,hi) filled in
|
/// Construct a BitSet with the half-open range [lo,hi) filled in
|
||||||
pub fn from_range(lo: u8, hi: u8) -> BitSet<T> {
|
pub fn from_range(lo: u8, hi: u8) -> BitSet<T> {
|
||||||
assert!(lo <= hi);
|
assert!(lo <= hi);
|
||||||
assert!((hi as usize) <= Self::bits());
|
assert!((hi as usize) <= Self::bits());
|
||||||
let one : T = T::from(1);
|
let one: T = T::from(1);
|
||||||
// I can't just do (one << hi) - one here as the shift may overflow
|
// I can't just do (one << hi) - one here as the shift may overflow
|
||||||
let hi_rng = if hi >= 1 {
|
let hi_rng = if hi >= 1 {
|
||||||
(one << (hi-1)) + ((one << (hi-1)) - one)
|
(one << (hi - 1)) + ((one << (hi - 1)) - one)
|
||||||
} else {
|
} else {
|
||||||
T::from(0)
|
T::from(0)
|
||||||
};
|
};
|
||||||
|
|
||||||
let lo_rng = (one << lo) - one;
|
let lo_rng = (one << lo) - one;
|
||||||
|
|
||||||
@@ -94,14 +101,15 @@ mod tests {
|
|||||||
assert!(!s2.contains(7));
|
assert!(!s2.contains(7));
|
||||||
|
|
||||||
let s3 = BitSet::<u8>(2 | 4 | 64);
|
let s3 = BitSet::<u8>(2 | 4 | 64);
|
||||||
assert!(!s3.contains(0) && !s3.contains(3) && !s3.contains(4) && !s3.contains(5) &&
|
assert!(!s3.contains(0) && !s3.contains(3) && !s3.contains(4));
|
||||||
!s3.contains(7));
|
assert!(!s3.contains(5) && !s3.contains(7));
|
||||||
assert!(s3.contains(1) && s3.contains(2) && s3.contains(6));
|
assert!(s3.contains(1) && s3.contains(2) && s3.contains(6));
|
||||||
|
|
||||||
let s4 = BitSet::<u16>(4 | 8 | 256 | 1024);
|
let s4 = BitSet::<u16>(4 | 8 | 256 | 1024);
|
||||||
assert!(!s4.contains(0) && !s4.contains(1) && !s4.contains(4) && !s4.contains(5) &&
|
assert!(
|
||||||
!s4.contains(6) && !s4.contains(7) &&
|
!s4.contains(0) && !s4.contains(1) && !s4.contains(4) && !s4.contains(5) &&
|
||||||
!s4.contains(9) && !s4.contains(11));
|
!s4.contains(6) && !s4.contains(7) && !s4.contains(9) && !s4.contains(11)
|
||||||
|
);
|
||||||
assert!(s4.contains(2) && s4.contains(3) && s4.contains(8) && s4.contains(10));
|
assert!(s4.contains(2) && s4.contains(3) && s4.contains(8) && s4.contains(10));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -63,7 +63,8 @@ impl<K: Default, V: Default> NodePool<K, V> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<K: Default, V: Default> BTree<K, V> {
|
impl<K: Default, V: Default> BTree<K, V> {
|
||||||
/// Search for `key` and return a `Cursor` that either points at `key` or the position where it would be inserted.
|
/// Search for `key` and return a `Cursor` that either points at `key` or the position
|
||||||
|
/// where it would be inserted.
|
||||||
pub fn search(&mut self, key: K) -> Cursor<K, V> {
|
pub fn search(&mut self, key: K) -> Cursor<K, V> {
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -26,10 +26,11 @@ pub trait Table<K: Copy + Eq> {
|
|||||||
///
|
///
|
||||||
/// Returns `Ok(idx)` with the table index containing the found entry, or `Err(idx)` with the empty
|
/// Returns `Ok(idx)` with the table index containing the found entry, or `Err(idx)` with the empty
|
||||||
/// sentinel entry if no entry could be found.
|
/// sentinel entry if no entry could be found.
|
||||||
pub fn probe<K: Copy + Eq, T: Table<K> + ?Sized>(table: &T,
|
pub fn probe<K: Copy + Eq, T: Table<K> + ?Sized>(
|
||||||
key: K,
|
table: &T,
|
||||||
hash: usize)
|
key: K,
|
||||||
-> Result<usize, usize> {
|
hash: usize,
|
||||||
|
) -> Result<usize, usize> {
|
||||||
debug_assert!(table.len().is_power_of_two());
|
debug_assert!(table.len().is_power_of_two());
|
||||||
let mask = table.len() - 1;
|
let mask = table.len() - 1;
|
||||||
|
|
||||||
|
|||||||
@@ -133,18 +133,24 @@ impl Context {
|
|||||||
/// Perform LICM on the function.
|
/// Perform LICM on the function.
|
||||||
pub fn licm(&mut self) -> CtonResult {
|
pub fn licm(&mut self) -> CtonResult {
|
||||||
self.ensure_domtree();
|
self.ensure_domtree();
|
||||||
do_licm(&mut self.func,
|
do_licm(
|
||||||
&mut self.cfg,
|
&mut self.func,
|
||||||
&mut self.domtree,
|
&mut self.cfg,
|
||||||
&mut self.loop_analysis);
|
&mut self.domtree,
|
||||||
|
&mut self.loop_analysis,
|
||||||
|
);
|
||||||
self.verify(None).map_err(Into::into)
|
self.verify(None).map_err(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Run the register allocator.
|
/// Run the register allocator.
|
||||||
pub fn regalloc(&mut self, isa: &TargetIsa) -> CtonResult {
|
pub fn regalloc(&mut self, isa: &TargetIsa) -> CtonResult {
|
||||||
self.ensure_domtree();
|
self.ensure_domtree();
|
||||||
self.regalloc
|
self.regalloc.run(
|
||||||
.run(isa, &mut self.func, &self.cfg, &self.domtree)
|
isa,
|
||||||
|
&mut self.func,
|
||||||
|
&self.cfg,
|
||||||
|
&self.domtree,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Insert prologue and epilogues after computing the stack frame layout.
|
/// Insert prologue and epilogues after computing the stack frame layout.
|
||||||
|
|||||||
@@ -146,17 +146,21 @@ impl<'c, 'f> ir::InstInserterBase<'c> for &'c mut EncCursor<'f> {
|
|||||||
&mut self.func.dfg
|
&mut self.func.dfg
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert_built_inst(self,
|
fn insert_built_inst(
|
||||||
inst: ir::Inst,
|
self,
|
||||||
ctrl_typevar: ir::Type)
|
inst: ir::Inst,
|
||||||
-> &'c mut ir::DataFlowGraph {
|
ctrl_typevar: ir::Type,
|
||||||
|
) -> &'c mut ir::DataFlowGraph {
|
||||||
// Insert the instruction and remember the reference.
|
// Insert the instruction and remember the reference.
|
||||||
self.insert_inst(inst);
|
self.insert_inst(inst);
|
||||||
self.built_inst = Some(inst);
|
self.built_inst = Some(inst);
|
||||||
|
|
||||||
// Assign an encoding.
|
// Assign an encoding.
|
||||||
match self.isa
|
match self.isa.encode(
|
||||||
.encode(&self.func.dfg, &self.func.dfg[inst], ctrl_typevar) {
|
&self.func.dfg,
|
||||||
|
&self.func.dfg[inst],
|
||||||
|
ctrl_typevar,
|
||||||
|
) {
|
||||||
Ok(e) => self.func.encodings[inst] = e,
|
Ok(e) => self.func.encodings[inst] = e,
|
||||||
Err(_) => panic!("can't encode {}", self.display_inst(inst)),
|
Err(_) => panic!("can't encode {}", self.display_inst(inst)),
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -69,18 +69,17 @@ pub fn writeln_with_format_args(args: fmt::Arguments) -> io::Result<()> {
|
|||||||
/// Open the tracing file for the current thread.
|
/// Open the tracing file for the current thread.
|
||||||
fn open_file() -> io::BufWriter<File> {
|
fn open_file() -> io::BufWriter<File> {
|
||||||
let file = match thread::current().name() {
|
let file = match thread::current().name() {
|
||||||
None => File::create("cretonne.dbg"),
|
None => File::create("cretonne.dbg"),
|
||||||
Some(name) => {
|
Some(name) => {
|
||||||
let mut path = "cretonne.dbg.".to_owned();
|
let mut path = "cretonne.dbg.".to_owned();
|
||||||
for ch in name.chars() {
|
for ch in name.chars() {
|
||||||
if ch.is_ascii() && ch.is_alphanumeric() {
|
if ch.is_ascii() && ch.is_alphanumeric() {
|
||||||
path.push(ch);
|
path.push(ch);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
File::create(path)
|
|
||||||
}
|
}
|
||||||
|
File::create(path)
|
||||||
}
|
}
|
||||||
.expect("Can't open tracing file");
|
}.expect("Can't open tracing file");
|
||||||
io::BufWriter::new(file)
|
io::BufWriter::new(file)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -99,10 +98,13 @@ macro_rules! dbg {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Helper for printing lists.
|
/// Helper for printing lists.
|
||||||
pub struct DisplayList<'a, T>(pub &'a [T]) where T: 'a + fmt::Display;
|
pub struct DisplayList<'a, T>(pub &'a [T])
|
||||||
|
where
|
||||||
|
T: 'a + fmt::Display;
|
||||||
|
|
||||||
impl<'a, T> fmt::Display for DisplayList<'a, T>
|
impl<'a, T> fmt::Display for DisplayList<'a, T>
|
||||||
where T: 'a + fmt::Display
|
where
|
||||||
|
T: 'a + fmt::Display,
|
||||||
{
|
{
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
match self.0.split_first() {
|
match self.0.split_first() {
|
||||||
|
|||||||
@@ -85,13 +85,15 @@ impl DominatorTree {
|
|||||||
///
|
///
|
||||||
/// If `a` and `b` belong to the same EBB, compare their relative position in the EBB.
|
/// If `a` and `b` belong to the same EBB, compare their relative position in the EBB.
|
||||||
pub fn rpo_cmp<A, B>(&self, a: A, b: B, layout: &Layout) -> Ordering
|
pub fn rpo_cmp<A, B>(&self, a: A, b: B, layout: &Layout) -> Ordering
|
||||||
where A: Into<ExpandedProgramPoint>,
|
where
|
||||||
B: Into<ExpandedProgramPoint>
|
A: Into<ExpandedProgramPoint>,
|
||||||
|
B: Into<ExpandedProgramPoint>,
|
||||||
{
|
{
|
||||||
let a = a.into();
|
let a = a.into();
|
||||||
let b = b.into();
|
let b = b.into();
|
||||||
self.rpo_cmp_ebb(layout.pp_ebb(a), layout.pp_ebb(b))
|
self.rpo_cmp_ebb(layout.pp_ebb(a), layout.pp_ebb(b)).then(
|
||||||
.then(layout.cmp(a, b))
|
layout.cmp(a, b),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if `a` dominates `b`.
|
/// Returns `true` if `a` dominates `b`.
|
||||||
@@ -104,8 +106,9 @@ impl DominatorTree {
|
|||||||
///
|
///
|
||||||
/// An instruction is considered to dominate itself.
|
/// An instruction is considered to dominate itself.
|
||||||
pub fn dominates<A, B>(&self, a: A, b: B, layout: &Layout) -> bool
|
pub fn dominates<A, B>(&self, a: A, b: B, layout: &Layout) -> bool
|
||||||
where A: Into<ExpandedProgramPoint>,
|
where
|
||||||
B: Into<ExpandedProgramPoint>
|
A: Into<ExpandedProgramPoint>,
|
||||||
|
B: Into<ExpandedProgramPoint>,
|
||||||
{
|
{
|
||||||
let a = a.into();
|
let a = a.into();
|
||||||
let b = b.into();
|
let b = b.into();
|
||||||
@@ -126,12 +129,16 @@ impl DominatorTree {
|
|||||||
/// Find the last instruction in `a` that dominates `b`.
|
/// Find the last instruction in `a` that dominates `b`.
|
||||||
/// If no instructions in `a` dominate `b`, return `None`.
|
/// If no instructions in `a` dominate `b`, return `None`.
|
||||||
fn last_dominator<B>(&self, a: Ebb, b: B, layout: &Layout) -> Option<Inst>
|
fn last_dominator<B>(&self, a: Ebb, b: B, layout: &Layout) -> Option<Inst>
|
||||||
where B: Into<ExpandedProgramPoint>
|
where
|
||||||
|
B: Into<ExpandedProgramPoint>,
|
||||||
{
|
{
|
||||||
let (mut ebb_b, mut inst_b) = match b.into() {
|
let (mut ebb_b, mut inst_b) = match b.into() {
|
||||||
ExpandedProgramPoint::Ebb(ebb) => (ebb, None),
|
ExpandedProgramPoint::Ebb(ebb) => (ebb, None),
|
||||||
ExpandedProgramPoint::Inst(inst) => {
|
ExpandedProgramPoint::Inst(inst) => {
|
||||||
(layout.inst_ebb(inst).expect("Instruction not in layout."), Some(inst))
|
(
|
||||||
|
layout.inst_ebb(inst).expect("Instruction not in layout."),
|
||||||
|
Some(inst),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let rpo_a = self.nodes[a].rpo_number;
|
let rpo_a = self.nodes[a].rpo_number;
|
||||||
@@ -149,22 +156,29 @@ impl DominatorTree {
|
|||||||
/// Compute the common dominator of two basic blocks.
|
/// Compute the common dominator of two basic blocks.
|
||||||
///
|
///
|
||||||
/// Both basic blocks are assumed to be reachable.
|
/// Both basic blocks are assumed to be reachable.
|
||||||
pub fn common_dominator(&self,
|
pub fn common_dominator(
|
||||||
mut a: BasicBlock,
|
&self,
|
||||||
mut b: BasicBlock,
|
mut a: BasicBlock,
|
||||||
layout: &Layout)
|
mut b: BasicBlock,
|
||||||
-> BasicBlock {
|
layout: &Layout,
|
||||||
|
) -> BasicBlock {
|
||||||
loop {
|
loop {
|
||||||
match self.rpo_cmp_ebb(a.0, b.0) {
|
match self.rpo_cmp_ebb(a.0, b.0) {
|
||||||
Ordering::Less => {
|
Ordering::Less => {
|
||||||
// `a` comes before `b` in the RPO. Move `b` up.
|
// `a` comes before `b` in the RPO. Move `b` up.
|
||||||
let idom = self.nodes[b.0].idom.expect("Unreachable basic block?");
|
let idom = self.nodes[b.0].idom.expect("Unreachable basic block?");
|
||||||
b = (layout.inst_ebb(idom).expect("Dangling idom instruction"), idom);
|
b = (
|
||||||
|
layout.inst_ebb(idom).expect("Dangling idom instruction"),
|
||||||
|
idom,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
Ordering::Greater => {
|
Ordering::Greater => {
|
||||||
// `b` comes before `a` in the RPO. Move `a` up.
|
// `b` comes before `a` in the RPO. Move `a` up.
|
||||||
let idom = self.nodes[a.0].idom.expect("Unreachable basic block?");
|
let idom = self.nodes[a.0].idom.expect("Unreachable basic block?");
|
||||||
a = (layout.inst_ebb(idom).expect("Dangling idom instruction"), idom);
|
a = (
|
||||||
|
layout.inst_ebb(idom).expect("Dangling idom instruction"),
|
||||||
|
idom,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
Ordering::Equal => break,
|
Ordering::Equal => break,
|
||||||
}
|
}
|
||||||
@@ -327,15 +341,16 @@ impl DominatorTree {
|
|||||||
// Get an iterator with just the reachable, already visited predecessors to `ebb`.
|
// Get an iterator with just the reachable, already visited predecessors to `ebb`.
|
||||||
// Note that during the first pass, `rpo_number` is 1 for reachable blocks that haven't
|
// Note that during the first pass, `rpo_number` is 1 for reachable blocks that haven't
|
||||||
// been visited yet, 0 for unreachable blocks.
|
// been visited yet, 0 for unreachable blocks.
|
||||||
let mut reachable_preds = cfg.get_predecessors(ebb)
|
let mut reachable_preds = cfg.get_predecessors(ebb).iter().cloned().filter(
|
||||||
.iter()
|
|&(pred, _)| {
|
||||||
.cloned()
|
self.nodes[pred].rpo_number > 1
|
||||||
.filter(|&(pred, _)| self.nodes[pred].rpo_number > 1);
|
},
|
||||||
|
);
|
||||||
|
|
||||||
// The RPO must visit at least one predecessor before this node.
|
// The RPO must visit at least one predecessor before this node.
|
||||||
let mut idom = reachable_preds
|
let mut idom = reachable_preds.next().expect(
|
||||||
.next()
|
"EBB node must have one reachable predecessor",
|
||||||
.expect("EBB node must have one reachable predecessor");
|
);
|
||||||
|
|
||||||
for pred in reachable_preds {
|
for pred in reachable_preds {
|
||||||
idom = self.common_dominator(idom, pred, layout);
|
idom = self.common_dominator(idom, pred, layout);
|
||||||
@@ -383,10 +398,11 @@ impl DominatorTree {
|
|||||||
// forward in RPO numbers and backwards in the postorder list of EBBs, renumbering the Ebbs
|
// forward in RPO numbers and backwards in the postorder list of EBBs, renumbering the Ebbs
|
||||||
// until we find a gap
|
// until we find a gap
|
||||||
for (¤t_ebb, current_rpo) in
|
for (¤t_ebb, current_rpo) in
|
||||||
self.postorder[0..ebb_postorder_index]
|
self.postorder[0..ebb_postorder_index].iter().rev().zip(
|
||||||
.iter()
|
inserted_rpo_number +
|
||||||
.rev()
|
1..,
|
||||||
.zip(inserted_rpo_number + 1..) {
|
)
|
||||||
|
{
|
||||||
if self.nodes[current_ebb].rpo_number < current_rpo {
|
if self.nodes[current_ebb].rpo_number < current_rpo {
|
||||||
// There is no gap, we renumber
|
// There is no gap, we renumber
|
||||||
self.nodes[current_ebb].rpo_number = current_rpo;
|
self.nodes[current_ebb].rpo_number = current_rpo;
|
||||||
@@ -457,10 +473,14 @@ mod test {
|
|||||||
|
|
||||||
assert_eq!(dt.rpo_cmp(ebb3, ebb3, &cur.func.layout), Ordering::Equal);
|
assert_eq!(dt.rpo_cmp(ebb3, ebb3, &cur.func.layout), Ordering::Equal);
|
||||||
assert_eq!(dt.rpo_cmp(ebb3, ebb1, &cur.func.layout), Ordering::Less);
|
assert_eq!(dt.rpo_cmp(ebb3, ebb1, &cur.func.layout), Ordering::Less);
|
||||||
assert_eq!(dt.rpo_cmp(ebb3, jmp_ebb3_ebb1, &cur.func.layout),
|
assert_eq!(
|
||||||
Ordering::Less);
|
dt.rpo_cmp(ebb3, jmp_ebb3_ebb1, &cur.func.layout),
|
||||||
assert_eq!(dt.rpo_cmp(jmp_ebb3_ebb1, jmp_ebb1_ebb2, &cur.func.layout),
|
Ordering::Less
|
||||||
Ordering::Less);
|
);
|
||||||
|
assert_eq!(
|
||||||
|
dt.rpo_cmp(jmp_ebb3_ebb1, jmp_ebb1_ebb2, &cur.func.layout),
|
||||||
|
Ordering::Less
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(dt.cfg_postorder(), &[ebb2, ebb0, ebb1, ebb3]);
|
assert_eq!(dt.cfg_postorder(), &[ebb2, ebb0, ebb1, ebb3]);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -212,12 +212,13 @@ impl<T: EntityRef> ListPool<T> {
|
|||||||
/// Reallocate a block to a different size class.
|
/// Reallocate a block to a different size class.
|
||||||
///
|
///
|
||||||
/// Copy `elems_to_copy` elements from the old to the new block.
|
/// Copy `elems_to_copy` elements from the old to the new block.
|
||||||
fn realloc(&mut self,
|
fn realloc(
|
||||||
block: usize,
|
&mut self,
|
||||||
from_sclass: SizeClass,
|
block: usize,
|
||||||
to_sclass: SizeClass,
|
from_sclass: SizeClass,
|
||||||
elems_to_copy: usize)
|
to_sclass: SizeClass,
|
||||||
-> usize {
|
elems_to_copy: usize,
|
||||||
|
) -> usize {
|
||||||
assert!(elems_to_copy <= sclass_size(from_sclass));
|
assert!(elems_to_copy <= sclass_size(from_sclass));
|
||||||
assert!(elems_to_copy <= sclass_size(to_sclass));
|
assert!(elems_to_copy <= sclass_size(to_sclass));
|
||||||
let new_block = self.alloc(to_sclass);
|
let new_block = self.alloc(to_sclass);
|
||||||
@@ -384,7 +385,8 @@ impl<T: EntityRef> EntityList<T> {
|
|||||||
|
|
||||||
/// Appends multiple elements to the back of the list.
|
/// Appends multiple elements to the back of the list.
|
||||||
pub fn extend<I>(&mut self, elements: I, pool: &mut ListPool<T>)
|
pub fn extend<I>(&mut self, elements: I, pool: &mut ListPool<T>)
|
||||||
where I: IntoIterator<Item = T>
|
where
|
||||||
|
I: IntoIterator<Item = T>,
|
||||||
{
|
{
|
||||||
// TODO: use `size_hint()` to reduce reallocations.
|
// TODO: use `size_hint()` to reduce reallocations.
|
||||||
for x in elements {
|
for x in elements {
|
||||||
@@ -597,8 +599,10 @@ mod tests {
|
|||||||
|
|
||||||
list.extend([i1, i1, i2, i2, i3, i3, i4, i4].iter().cloned(), pool);
|
list.extend([i1, i1, i2, i2, i3, i3, i4, i4].iter().cloned(), pool);
|
||||||
assert_eq!(list.len(pool), 12);
|
assert_eq!(list.len(pool), 12);
|
||||||
assert_eq!(list.as_slice(pool),
|
assert_eq!(
|
||||||
&[i1, i2, i3, i4, i1, i1, i2, i2, i3, i3, i4, i4]);
|
list.as_slice(pool),
|
||||||
|
&[i1, i2, i3, i4, i1, i1, i2, i2, i3, i3, i4, i4]
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|||||||
@@ -14,8 +14,9 @@ use std::ops::{Index, IndexMut};
|
|||||||
/// all keys have a default entry from the beginning.
|
/// all keys have a default entry from the beginning.
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct EntityMap<K, V>
|
pub struct EntityMap<K, V>
|
||||||
where K: EntityRef,
|
where
|
||||||
V: Clone
|
K: EntityRef,
|
||||||
|
V: Clone,
|
||||||
{
|
{
|
||||||
elems: Vec<V>,
|
elems: Vec<V>,
|
||||||
default: V,
|
default: V,
|
||||||
@@ -24,12 +25,14 @@ pub struct EntityMap<K, V>
|
|||||||
|
|
||||||
/// Shared `EntityMap` implementation for all value types.
|
/// Shared `EntityMap` implementation for all value types.
|
||||||
impl<K, V> EntityMap<K, V>
|
impl<K, V> EntityMap<K, V>
|
||||||
where K: EntityRef,
|
where
|
||||||
V: Clone
|
K: EntityRef,
|
||||||
|
V: Clone,
|
||||||
{
|
{
|
||||||
/// Create a new empty map.
|
/// Create a new empty map.
|
||||||
pub fn new() -> Self
|
pub fn new() -> Self
|
||||||
where V: Default
|
where
|
||||||
|
V: Default,
|
||||||
{
|
{
|
||||||
EntityMap {
|
EntityMap {
|
||||||
elems: Vec::new(),
|
elems: Vec::new(),
|
||||||
@@ -68,8 +71,9 @@ impl<K, V> EntityMap<K, V>
|
|||||||
///
|
///
|
||||||
/// All keys are permitted. Untouched entries have the default value.
|
/// All keys are permitted. Untouched entries have the default value.
|
||||||
impl<K, V> Index<K> for EntityMap<K, V>
|
impl<K, V> Index<K> for EntityMap<K, V>
|
||||||
where K: EntityRef,
|
where
|
||||||
V: Clone
|
K: EntityRef,
|
||||||
|
V: Clone,
|
||||||
{
|
{
|
||||||
type Output = V;
|
type Output = V;
|
||||||
|
|
||||||
@@ -82,8 +86,9 @@ impl<K, V> Index<K> for EntityMap<K, V>
|
|||||||
///
|
///
|
||||||
/// The map grows as needed to accommodate new keys.
|
/// The map grows as needed to accommodate new keys.
|
||||||
impl<K, V> IndexMut<K> for EntityMap<K, V>
|
impl<K, V> IndexMut<K> for EntityMap<K, V>
|
||||||
where K: EntityRef,
|
where
|
||||||
V: Clone
|
K: EntityRef,
|
||||||
|
V: Clone,
|
||||||
{
|
{
|
||||||
fn index_mut(&mut self, k: K) -> &mut V {
|
fn index_mut(&mut self, k: K) -> &mut V {
|
||||||
let i = k.index();
|
let i = k.index();
|
||||||
|
|||||||
@@ -14,14 +14,16 @@ use std::ops::{Index, IndexMut};
|
|||||||
/// conflicting references will be created. Using unknown keys for indexing will cause a panic.
|
/// conflicting references will be created. Using unknown keys for indexing will cause a panic.
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct PrimaryMap<K, V>
|
pub struct PrimaryMap<K, V>
|
||||||
where K: EntityRef
|
where
|
||||||
|
K: EntityRef,
|
||||||
{
|
{
|
||||||
elems: Vec<V>,
|
elems: Vec<V>,
|
||||||
unused: PhantomData<K>,
|
unused: PhantomData<K>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<K, V> PrimaryMap<K, V>
|
impl<K, V> PrimaryMap<K, V>
|
||||||
where K: EntityRef
|
where
|
||||||
|
K: EntityRef,
|
||||||
{
|
{
|
||||||
/// Create a new empty map.
|
/// Create a new empty map.
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
@@ -77,7 +79,8 @@ impl<K, V> PrimaryMap<K, V>
|
|||||||
/// Immutable indexing into an `PrimaryMap`.
|
/// Immutable indexing into an `PrimaryMap`.
|
||||||
/// The indexed value must be in the map.
|
/// The indexed value must be in the map.
|
||||||
impl<K, V> Index<K> for PrimaryMap<K, V>
|
impl<K, V> Index<K> for PrimaryMap<K, V>
|
||||||
where K: EntityRef
|
where
|
||||||
|
K: EntityRef,
|
||||||
{
|
{
|
||||||
type Output = V;
|
type Output = V;
|
||||||
|
|
||||||
@@ -88,7 +91,8 @@ impl<K, V> Index<K> for PrimaryMap<K, V>
|
|||||||
|
|
||||||
/// Mutable indexing into an `PrimaryMap`.
|
/// Mutable indexing into an `PrimaryMap`.
|
||||||
impl<K, V> IndexMut<K> for PrimaryMap<K, V>
|
impl<K, V> IndexMut<K> for PrimaryMap<K, V>
|
||||||
where K: EntityRef
|
where
|
||||||
|
K: EntityRef,
|
||||||
{
|
{
|
||||||
fn index_mut(&mut self, k: K) -> &mut V {
|
fn index_mut(&mut self, k: K) -> &mut V {
|
||||||
&mut self.elems[k.index()]
|
&mut self.elems[k.index()]
|
||||||
|
|||||||
@@ -51,16 +51,18 @@ pub trait SparseMapValue<K> {
|
|||||||
/// - `SparseMap` requires the values to implement `SparseMapValue<K>` which means that they must
|
/// - `SparseMap` requires the values to implement `SparseMapValue<K>` which means that they must
|
||||||
/// contain their own key.
|
/// contain their own key.
|
||||||
pub struct SparseMap<K, V>
|
pub struct SparseMap<K, V>
|
||||||
where K: EntityRef,
|
where
|
||||||
V: SparseMapValue<K>
|
K: EntityRef,
|
||||||
|
V: SparseMapValue<K>,
|
||||||
{
|
{
|
||||||
sparse: EntityMap<K, u32>,
|
sparse: EntityMap<K, u32>,
|
||||||
dense: Vec<V>,
|
dense: Vec<V>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<K, V> SparseMap<K, V>
|
impl<K, V> SparseMap<K, V>
|
||||||
where K: EntityRef,
|
where
|
||||||
V: SparseMapValue<K>
|
K: EntityRef,
|
||||||
|
V: SparseMapValue<K>,
|
||||||
{
|
{
|
||||||
/// Create a new empty mapping.
|
/// Create a new empty mapping.
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
@@ -191,8 +193,9 @@ impl<K, V> SparseMap<K, V>
|
|||||||
|
|
||||||
/// Iterating over the elements of a set.
|
/// Iterating over the elements of a set.
|
||||||
impl<'a, K, V> IntoIterator for &'a SparseMap<K, V>
|
impl<'a, K, V> IntoIterator for &'a SparseMap<K, V>
|
||||||
where K: EntityRef,
|
where
|
||||||
V: SparseMapValue<K>
|
K: EntityRef,
|
||||||
|
V: SparseMapValue<K>,
|
||||||
{
|
{
|
||||||
type Item = &'a V;
|
type Item = &'a V;
|
||||||
type IntoIter = slice::Iter<'a, V>;
|
type IntoIter = slice::Iter<'a, V>;
|
||||||
@@ -204,7 +207,8 @@ impl<'a, K, V> IntoIterator for &'a SparseMap<K, V>
|
|||||||
|
|
||||||
/// Any `EntityRef` can be used as a sparse map value representing itself.
|
/// Any `EntityRef` can be used as a sparse map value representing itself.
|
||||||
impl<T> SparseMapValue<T> for T
|
impl<T> SparseMapValue<T> for T
|
||||||
where T: EntityRef
|
where
|
||||||
|
T: EntityRef,
|
||||||
{
|
{
|
||||||
fn key(&self) -> T {
|
fn key(&self) -> T {
|
||||||
*self
|
*self
|
||||||
@@ -290,8 +294,10 @@ mod tests {
|
|||||||
assert_eq!(map.insert(Obj(i0, "baz")), None);
|
assert_eq!(map.insert(Obj(i0, "baz")), None);
|
||||||
|
|
||||||
// Iteration order = insertion order when nothing has been removed yet.
|
// Iteration order = insertion order when nothing has been removed yet.
|
||||||
assert_eq!(map.values().map(|obj| obj.1).collect::<Vec<_>>(),
|
assert_eq!(
|
||||||
["foo", "bar", "baz"]);
|
map.values().map(|obj| obj.1).collect::<Vec<_>>(),
|
||||||
|
["foo", "bar", "baz"]
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(map.len(), 3);
|
assert_eq!(map.len(), 3);
|
||||||
assert_eq!(map.get(i0), Some(&Obj(i0, "baz")));
|
assert_eq!(map.get(i0), Some(&Obj(i0, "baz")));
|
||||||
|
|||||||
@@ -89,7 +89,8 @@ impl<'f, IIB: InstInserterBase<'f>> InsertBuilder<'f, IIB> {
|
|||||||
///
|
///
|
||||||
/// The `reuse` argument is expected to be an array of `Option<Value>`.
|
/// The `reuse` argument is expected to be an array of `Option<Value>`.
|
||||||
pub fn with_results<Array>(self, reuse: Array) -> InsertReuseBuilder<'f, IIB, Array>
|
pub fn with_results<Array>(self, reuse: Array) -> InsertReuseBuilder<'f, IIB, Array>
|
||||||
where Array: AsRef<[Option<Value>]>
|
where
|
||||||
|
Array: AsRef<[Option<Value>]>,
|
||||||
{
|
{
|
||||||
InsertReuseBuilder {
|
InsertReuseBuilder {
|
||||||
inserter: self.inserter,
|
inserter: self.inserter,
|
||||||
@@ -134,8 +135,9 @@ impl<'f, IIB: InstInserterBase<'f>> InstBuilderBase<'f> for InsertBuilder<'f, II
|
|||||||
|
|
||||||
/// Builder that inserts a new instruction like `InsertBuilder`, but reusing result values.
|
/// Builder that inserts a new instruction like `InsertBuilder`, but reusing result values.
|
||||||
pub struct InsertReuseBuilder<'f, IIB, Array>
|
pub struct InsertReuseBuilder<'f, IIB, Array>
|
||||||
where IIB: InstInserterBase<'f>,
|
where
|
||||||
Array: AsRef<[Option<Value>]>
|
IIB: InstInserterBase<'f>,
|
||||||
|
Array: AsRef<[Option<Value>]>,
|
||||||
{
|
{
|
||||||
inserter: IIB,
|
inserter: IIB,
|
||||||
reuse: Array,
|
reuse: Array,
|
||||||
|
|||||||
@@ -89,17 +89,17 @@ impl Display for IntCC {
|
|||||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||||
use self::IntCC::*;
|
use self::IntCC::*;
|
||||||
f.write_str(match *self {
|
f.write_str(match *self {
|
||||||
Equal => "eq",
|
Equal => "eq",
|
||||||
NotEqual => "ne",
|
NotEqual => "ne",
|
||||||
SignedGreaterThan => "sgt",
|
SignedGreaterThan => "sgt",
|
||||||
SignedGreaterThanOrEqual => "sge",
|
SignedGreaterThanOrEqual => "sge",
|
||||||
SignedLessThan => "slt",
|
SignedLessThan => "slt",
|
||||||
SignedLessThanOrEqual => "sle",
|
SignedLessThanOrEqual => "sle",
|
||||||
UnsignedGreaterThan => "ugt",
|
UnsignedGreaterThan => "ugt",
|
||||||
UnsignedGreaterThanOrEqual => "uge",
|
UnsignedGreaterThanOrEqual => "uge",
|
||||||
UnsignedLessThan => "ult",
|
UnsignedLessThan => "ult",
|
||||||
UnsignedLessThanOrEqual => "ule",
|
UnsignedLessThanOrEqual => "ule",
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -220,21 +220,21 @@ impl Display for FloatCC {
|
|||||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||||
use self::FloatCC::*;
|
use self::FloatCC::*;
|
||||||
f.write_str(match *self {
|
f.write_str(match *self {
|
||||||
Ordered => "ord",
|
Ordered => "ord",
|
||||||
Unordered => "uno",
|
Unordered => "uno",
|
||||||
Equal => "eq",
|
Equal => "eq",
|
||||||
NotEqual => "ne",
|
NotEqual => "ne",
|
||||||
OrderedNotEqual => "one",
|
OrderedNotEqual => "one",
|
||||||
UnorderedOrEqual => "ueq",
|
UnorderedOrEqual => "ueq",
|
||||||
LessThan => "lt",
|
LessThan => "lt",
|
||||||
LessThanOrEqual => "le",
|
LessThanOrEqual => "le",
|
||||||
GreaterThan => "gt",
|
GreaterThan => "gt",
|
||||||
GreaterThanOrEqual => "ge",
|
GreaterThanOrEqual => "ge",
|
||||||
UnorderedOrLessThan => "ult",
|
UnorderedOrLessThan => "ult",
|
||||||
UnorderedOrLessThanOrEqual => "ule",
|
UnorderedOrLessThanOrEqual => "ule",
|
||||||
UnorderedOrGreaterThan => "ugt",
|
UnorderedOrGreaterThan => "ugt",
|
||||||
UnorderedOrGreaterThanOrEqual => "uge",
|
UnorderedOrGreaterThanOrEqual => "uge",
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -267,16 +267,18 @@ impl FromStr for FloatCC {
|
|||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
static INT_ALL: [IntCC; 10] = [IntCC::Equal,
|
static INT_ALL: [IntCC; 10] = [
|
||||||
IntCC::NotEqual,
|
IntCC::Equal,
|
||||||
IntCC::SignedLessThan,
|
IntCC::NotEqual,
|
||||||
IntCC::SignedGreaterThanOrEqual,
|
IntCC::SignedLessThan,
|
||||||
IntCC::SignedGreaterThan,
|
IntCC::SignedGreaterThanOrEqual,
|
||||||
IntCC::SignedLessThanOrEqual,
|
IntCC::SignedGreaterThan,
|
||||||
IntCC::UnsignedLessThan,
|
IntCC::SignedLessThanOrEqual,
|
||||||
IntCC::UnsignedGreaterThanOrEqual,
|
IntCC::UnsignedLessThan,
|
||||||
IntCC::UnsignedGreaterThan,
|
IntCC::UnsignedGreaterThanOrEqual,
|
||||||
IntCC::UnsignedLessThanOrEqual];
|
IntCC::UnsignedGreaterThan,
|
||||||
|
IntCC::UnsignedLessThanOrEqual,
|
||||||
|
];
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn int_inverse() {
|
fn int_inverse() {
|
||||||
@@ -306,20 +308,22 @@ mod tests {
|
|||||||
assert_eq!("bogus".parse::<IntCC>(), Err(()));
|
assert_eq!("bogus".parse::<IntCC>(), Err(()));
|
||||||
}
|
}
|
||||||
|
|
||||||
static FLOAT_ALL: [FloatCC; 14] = [FloatCC::Ordered,
|
static FLOAT_ALL: [FloatCC; 14] = [
|
||||||
FloatCC::Unordered,
|
FloatCC::Ordered,
|
||||||
FloatCC::Equal,
|
FloatCC::Unordered,
|
||||||
FloatCC::NotEqual,
|
FloatCC::Equal,
|
||||||
FloatCC::OrderedNotEqual,
|
FloatCC::NotEqual,
|
||||||
FloatCC::UnorderedOrEqual,
|
FloatCC::OrderedNotEqual,
|
||||||
FloatCC::LessThan,
|
FloatCC::UnorderedOrEqual,
|
||||||
FloatCC::LessThanOrEqual,
|
FloatCC::LessThan,
|
||||||
FloatCC::GreaterThan,
|
FloatCC::LessThanOrEqual,
|
||||||
FloatCC::GreaterThanOrEqual,
|
FloatCC::GreaterThan,
|
||||||
FloatCC::UnorderedOrLessThan,
|
FloatCC::GreaterThanOrEqual,
|
||||||
FloatCC::UnorderedOrLessThanOrEqual,
|
FloatCC::UnorderedOrLessThan,
|
||||||
FloatCC::UnorderedOrGreaterThan,
|
FloatCC::UnorderedOrLessThanOrEqual,
|
||||||
FloatCC::UnorderedOrGreaterThanOrEqual];
|
FloatCC::UnorderedOrGreaterThan,
|
||||||
|
FloatCC::UnorderedOrGreaterThanOrEqual,
|
||||||
|
];
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn float_inverse() {
|
fn float_inverse() {
|
||||||
|
|||||||
@@ -153,17 +153,21 @@ impl DataFlowGraph {
|
|||||||
pub fn value_def(&self, v: Value) -> ValueDef {
|
pub fn value_def(&self, v: Value) -> ValueDef {
|
||||||
match self.values[v] {
|
match self.values[v] {
|
||||||
ValueData::Inst { inst, num, .. } => {
|
ValueData::Inst { inst, num, .. } => {
|
||||||
assert_eq!(Some(v),
|
assert_eq!(
|
||||||
self.results[inst].get(num as usize, &self.value_lists),
|
Some(v),
|
||||||
"Dangling result value {}: {}",
|
self.results[inst].get(num as usize, &self.value_lists),
|
||||||
v,
|
"Dangling result value {}: {}",
|
||||||
self.display_inst(inst, None));
|
v,
|
||||||
|
self.display_inst(inst, None)
|
||||||
|
);
|
||||||
ValueDef::Res(inst, num as usize)
|
ValueDef::Res(inst, num as usize)
|
||||||
}
|
}
|
||||||
ValueData::Arg { ebb, num, .. } => {
|
ValueData::Arg { ebb, num, .. } => {
|
||||||
assert_eq!(Some(v),
|
assert_eq!(
|
||||||
self.ebbs[ebb].args.get(num as usize, &self.value_lists),
|
Some(v),
|
||||||
"Dangling EBB argument value");
|
self.ebbs[ebb].args.get(num as usize, &self.value_lists),
|
||||||
|
"Dangling EBB argument value"
|
||||||
|
);
|
||||||
ValueDef::Arg(ebb, num as usize)
|
ValueDef::Arg(ebb, num as usize)
|
||||||
}
|
}
|
||||||
ValueData::Alias { original, .. } => {
|
ValueData::Alias { original, .. } => {
|
||||||
@@ -247,19 +251,23 @@ impl DataFlowGraph {
|
|||||||
// Try to create short alias chains by finding the original source value.
|
// Try to create short alias chains by finding the original source value.
|
||||||
// This also avoids the creation of loops.
|
// This also avoids the creation of loops.
|
||||||
let original = self.resolve_aliases(src);
|
let original = self.resolve_aliases(src);
|
||||||
assert_ne!(dest,
|
assert_ne!(
|
||||||
original,
|
dest,
|
||||||
"Aliasing {} to {} would create a loop",
|
original,
|
||||||
dest,
|
"Aliasing {} to {} would create a loop",
|
||||||
src);
|
dest,
|
||||||
|
src
|
||||||
|
);
|
||||||
let ty = self.value_type(original);
|
let ty = self.value_type(original);
|
||||||
assert_eq!(self.value_type(dest),
|
assert_eq!(
|
||||||
ty,
|
self.value_type(dest),
|
||||||
"Aliasing {} to {} would change its type {} to {}",
|
ty,
|
||||||
dest,
|
"Aliasing {} to {} would change its type {} to {}",
|
||||||
src,
|
dest,
|
||||||
self.value_type(dest),
|
src,
|
||||||
ty);
|
self.value_type(dest),
|
||||||
|
ty
|
||||||
|
);
|
||||||
|
|
||||||
self.values[dest] = ValueData::Alias { ty, original };
|
self.values[dest] = ValueData::Alias { ty, original };
|
||||||
}
|
}
|
||||||
@@ -274,29 +282,36 @@ impl DataFlowGraph {
|
|||||||
/// cleared, so it likely needs to be removed from the graph.
|
/// cleared, so it likely needs to be removed from the graph.
|
||||||
///
|
///
|
||||||
pub fn replace_with_aliases(&mut self, dest_inst: Inst, src_inst: Inst) {
|
pub fn replace_with_aliases(&mut self, dest_inst: Inst, src_inst: Inst) {
|
||||||
debug_assert_ne!(dest_inst,
|
debug_assert_ne!(
|
||||||
src_inst,
|
dest_inst,
|
||||||
"Replacing {} with itself would create a loop",
|
src_inst,
|
||||||
dest_inst);
|
"Replacing {} with itself would create a loop",
|
||||||
debug_assert_eq!(self.results[dest_inst].len(&self.value_lists),
|
dest_inst
|
||||||
self.results[src_inst].len(&self.value_lists),
|
);
|
||||||
"Replacing {} with {} would produce a different number of results.",
|
debug_assert_eq!(
|
||||||
dest_inst,
|
self.results[dest_inst].len(&self.value_lists),
|
||||||
src_inst);
|
self.results[src_inst].len(&self.value_lists),
|
||||||
|
"Replacing {} with {} would produce a different number of results.",
|
||||||
|
dest_inst,
|
||||||
|
src_inst
|
||||||
|
);
|
||||||
|
|
||||||
for (&dest, &src) in self.results[dest_inst]
|
for (&dest, &src) in self.results[dest_inst]
|
||||||
.as_slice(&self.value_lists)
|
.as_slice(&self.value_lists)
|
||||||
.iter()
|
.iter()
|
||||||
.zip(self.results[src_inst].as_slice(&self.value_lists)) {
|
.zip(self.results[src_inst].as_slice(&self.value_lists))
|
||||||
|
{
|
||||||
let original = src;
|
let original = src;
|
||||||
let ty = self.value_type(original);
|
let ty = self.value_type(original);
|
||||||
assert_eq!(self.value_type(dest),
|
assert_eq!(
|
||||||
ty,
|
self.value_type(dest),
|
||||||
"Aliasing {} to {} would change its type {} to {}",
|
ty,
|
||||||
dest,
|
"Aliasing {} to {} would change its type {} to {}",
|
||||||
src,
|
dest,
|
||||||
self.value_type(dest),
|
src,
|
||||||
ty);
|
self.value_type(dest),
|
||||||
|
ty
|
||||||
|
);
|
||||||
|
|
||||||
self.values[dest] = ValueData::Alias { ty, original };
|
self.values[dest] = ValueData::Alias { ty, original };
|
||||||
}
|
}
|
||||||
@@ -371,10 +386,11 @@ impl DataFlowGraph {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Returns an object that displays `inst`.
|
/// Returns an object that displays `inst`.
|
||||||
pub fn display_inst<'a, I: Into<Option<&'a TargetIsa>>>(&'a self,
|
pub fn display_inst<'a, I: Into<Option<&'a TargetIsa>>>(
|
||||||
inst: Inst,
|
&'a self,
|
||||||
isa: I)
|
inst: Inst,
|
||||||
-> DisplayInst<'a> {
|
isa: I,
|
||||||
|
) -> DisplayInst<'a> {
|
||||||
DisplayInst(self, isa.into(), inst)
|
DisplayInst(self, isa.into(), inst)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -433,12 +449,14 @@ impl DataFlowGraph {
|
|||||||
/// Create a new set of result values for `inst` using `ctrl_typevar` to determine the result
|
/// Create a new set of result values for `inst` using `ctrl_typevar` to determine the result
|
||||||
/// types. Any values provided by `reuse` will be reused. When `reuse` is exhausted or when it
|
/// types. Any values provided by `reuse` will be reused. When `reuse` is exhausted or when it
|
||||||
/// produces `None`, a new value is created.
|
/// produces `None`, a new value is created.
|
||||||
pub fn make_inst_results_reusing<I>(&mut self,
|
pub fn make_inst_results_reusing<I>(
|
||||||
inst: Inst,
|
&mut self,
|
||||||
ctrl_typevar: Type,
|
inst: Inst,
|
||||||
reuse: I)
|
ctrl_typevar: Type,
|
||||||
-> usize
|
reuse: I,
|
||||||
where I: Iterator<Item = Option<Value>>
|
) -> usize
|
||||||
|
where
|
||||||
|
I: Iterator<Item = Option<Value>>,
|
||||||
{
|
{
|
||||||
let mut reuse = reuse.fuse();
|
let mut reuse = reuse.fuse();
|
||||||
let constraints = self.insts[inst].opcode().constraints();
|
let constraints = self.insts[inst].opcode().constraints();
|
||||||
@@ -478,9 +496,10 @@ impl DataFlowGraph {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Create an `InsertBuilder` that will insert an instruction at the cursor's current position.
|
/// Create an `InsertBuilder` that will insert an instruction at the cursor's current position.
|
||||||
pub fn ins<'c, 'fc: 'c, 'fd>(&'fd mut self,
|
pub fn ins<'c, 'fc: 'c, 'fd>(
|
||||||
at: &'c mut Cursor<'fc>)
|
&'fd mut self,
|
||||||
-> InsertBuilder<'fd, LayoutCursorInserter<'c, 'fc, 'fd>> {
|
at: &'c mut Cursor<'fc>,
|
||||||
|
) -> InsertBuilder<'fd, LayoutCursorInserter<'c, 'fc, 'fd>> {
|
||||||
InsertBuilder::new(LayoutCursorInserter::new(at, self))
|
InsertBuilder::new(LayoutCursorInserter::new(at, self))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -537,20 +556,24 @@ impl DataFlowGraph {
|
|||||||
_ => panic!("{} is not an instruction result value", old_value),
|
_ => panic!("{} is not an instruction result value", old_value),
|
||||||
};
|
};
|
||||||
let new_value = self.make_value(ValueData::Inst {
|
let new_value = self.make_value(ValueData::Inst {
|
||||||
ty: new_type,
|
ty: new_type,
|
||||||
num,
|
num,
|
||||||
inst,
|
inst,
|
||||||
});
|
});
|
||||||
let num = num as usize;
|
let num = num as usize;
|
||||||
let attached = mem::replace(self.results[inst]
|
let attached = mem::replace(
|
||||||
.get_mut(num, &mut self.value_lists)
|
self.results[inst]
|
||||||
.expect("Replacing detached result"),
|
.get_mut(num, &mut self.value_lists)
|
||||||
new_value);
|
.expect("Replacing detached result"),
|
||||||
assert_eq!(attached,
|
new_value,
|
||||||
old_value,
|
);
|
||||||
"{} wasn't detached from {}",
|
assert_eq!(
|
||||||
old_value,
|
attached,
|
||||||
self.display_inst(inst, None));
|
old_value,
|
||||||
|
"{} wasn't detached from {}",
|
||||||
|
old_value,
|
||||||
|
self.display_inst(inst, None)
|
||||||
|
);
|
||||||
new_value
|
new_value
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -560,19 +583,19 @@ impl DataFlowGraph {
|
|||||||
let num = self.results[inst].push(res, &mut self.value_lists);
|
let num = self.results[inst].push(res, &mut self.value_lists);
|
||||||
assert!(num <= u16::MAX as usize, "Too many result values");
|
assert!(num <= u16::MAX as usize, "Too many result values");
|
||||||
self.make_value(ValueData::Inst {
|
self.make_value(ValueData::Inst {
|
||||||
ty,
|
ty,
|
||||||
inst,
|
inst,
|
||||||
num: num as u16,
|
num: num as u16,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Append a new value argument to an instruction.
|
/// Append a new value argument to an instruction.
|
||||||
///
|
///
|
||||||
/// Panics if the instruction doesn't support arguments.
|
/// Panics if the instruction doesn't support arguments.
|
||||||
pub fn append_inst_arg(&mut self, inst: Inst, new_arg: Value) {
|
pub fn append_inst_arg(&mut self, inst: Inst, new_arg: Value) {
|
||||||
let mut branch_values = self.insts[inst]
|
let mut branch_values = self.insts[inst].take_value_list().expect(
|
||||||
.take_value_list()
|
"the instruction doesn't have value arguments",
|
||||||
.expect("the instruction doesn't have value arguments");
|
);
|
||||||
branch_values.push(new_arg, &mut self.value_lists);
|
branch_values.push(new_arg, &mut self.value_lists);
|
||||||
self.insts[inst].put_value_list(branch_values)
|
self.insts[inst].put_value_list(branch_values)
|
||||||
}
|
}
|
||||||
@@ -581,9 +604,9 @@ impl DataFlowGraph {
|
|||||||
///
|
///
|
||||||
/// This function panics if the instruction doesn't have any result.
|
/// This function panics if the instruction doesn't have any result.
|
||||||
pub fn first_result(&self, inst: Inst) -> Value {
|
pub fn first_result(&self, inst: Inst) -> Value {
|
||||||
self.results[inst]
|
self.results[inst].first(&self.value_lists).expect(
|
||||||
.first(&self.value_lists)
|
"Instruction has no results",
|
||||||
.expect("Instruction has no results")
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Test if `inst` has any result values currently.
|
/// Test if `inst` has any result values currently.
|
||||||
@@ -613,11 +636,12 @@ impl DataFlowGraph {
|
|||||||
/// called first.
|
/// called first.
|
||||||
///
|
///
|
||||||
/// Returns `None` if asked about a result index that is too large.
|
/// Returns `None` if asked about a result index that is too large.
|
||||||
pub fn compute_result_type(&self,
|
pub fn compute_result_type(
|
||||||
inst: Inst,
|
&self,
|
||||||
result_idx: usize,
|
inst: Inst,
|
||||||
ctrl_typevar: Type)
|
result_idx: usize,
|
||||||
-> Option<Type> {
|
ctrl_typevar: Type,
|
||||||
|
) -> Option<Type> {
|
||||||
let constraints = self.insts[inst].opcode().constraints();
|
let constraints = self.insts[inst].opcode().constraints();
|
||||||
let fixed_results = constraints.fixed_results();
|
let fixed_results = constraints.fixed_results();
|
||||||
|
|
||||||
@@ -626,13 +650,12 @@ impl DataFlowGraph {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Not a fixed result, try to extract a return type from the call signature.
|
// Not a fixed result, try to extract a return type from the call signature.
|
||||||
self.call_signature(inst)
|
self.call_signature(inst).and_then(|sigref| {
|
||||||
.and_then(|sigref| {
|
self.signatures[sigref]
|
||||||
self.signatures[sigref]
|
.return_types
|
||||||
.return_types
|
.get(result_idx - fixed_results)
|
||||||
.get(result_idx - fixed_results)
|
.map(|&arg| arg.value_type)
|
||||||
.map(|&arg| arg.value_type)
|
})
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the controlling type variable, or `VOID` if `inst` isn't polymorphic.
|
/// Get the controlling type variable, or `VOID` if `inst` isn't polymorphic.
|
||||||
@@ -644,8 +667,9 @@ impl DataFlowGraph {
|
|||||||
} else if constraints.requires_typevar_operand() {
|
} else if constraints.requires_typevar_operand() {
|
||||||
// Not all instruction formats have a designated operand, but in that case
|
// Not all instruction formats have a designated operand, but in that case
|
||||||
// `requires_typevar_operand()` should never be true.
|
// `requires_typevar_operand()` should never be true.
|
||||||
self.value_type(self[inst].typevar_operand(&self.value_lists)
|
self.value_type(self[inst].typevar_operand(&self.value_lists).expect(
|
||||||
.expect("Instruction format doesn't have a designated operand, bad opcode."))
|
"Instruction format doesn't have a designated operand, bad opcode.",
|
||||||
|
))
|
||||||
} else {
|
} else {
|
||||||
self.value_type(self.first_result(inst))
|
self.value_type(self.first_result(inst))
|
||||||
}
|
}
|
||||||
@@ -691,10 +715,10 @@ impl DataFlowGraph {
|
|||||||
let num = self.ebbs[ebb].args.push(arg, &mut self.value_lists);
|
let num = self.ebbs[ebb].args.push(arg, &mut self.value_lists);
|
||||||
assert!(num <= u16::MAX as usize, "Too many arguments to EBB");
|
assert!(num <= u16::MAX as usize, "Too many arguments to EBB");
|
||||||
self.make_value(ValueData::Arg {
|
self.make_value(ValueData::Arg {
|
||||||
ty,
|
ty,
|
||||||
num: num as u16,
|
num: num as u16,
|
||||||
ebb,
|
ebb,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Removes `val` from `ebb`'s argument by swapping it with the last argument of `ebb`.
|
/// Removes `val` from `ebb`'s argument by swapping it with the last argument of `ebb`.
|
||||||
@@ -712,9 +736,10 @@ impl DataFlowGraph {
|
|||||||
} else {
|
} else {
|
||||||
panic!("{} must be an EBB argument", val);
|
panic!("{} must be an EBB argument", val);
|
||||||
};
|
};
|
||||||
self.ebbs[ebb]
|
self.ebbs[ebb].args.swap_remove(
|
||||||
.args
|
num as usize,
|
||||||
.swap_remove(num as usize, &mut self.value_lists);
|
&mut self.value_lists,
|
||||||
|
);
|
||||||
if let Some(last_arg_val) = self.ebbs[ebb].args.get(num as usize, &self.value_lists) {
|
if let Some(last_arg_val) = self.ebbs[ebb].args.get(num as usize, &self.value_lists) {
|
||||||
// We update the position of the old last arg.
|
// We update the position of the old last arg.
|
||||||
if let ValueData::Arg { num: ref mut old_num, .. } = self.values[last_arg_val] {
|
if let ValueData::Arg { num: ref mut old_num, .. } = self.values[last_arg_val] {
|
||||||
@@ -734,23 +759,26 @@ impl DataFlowGraph {
|
|||||||
} else {
|
} else {
|
||||||
panic!("{} must be an EBB argument", val);
|
panic!("{} must be an EBB argument", val);
|
||||||
};
|
};
|
||||||
self.ebbs[ebb]
|
self.ebbs[ebb].args.remove(
|
||||||
.args
|
num as usize,
|
||||||
.remove(num as usize, &mut self.value_lists);
|
&mut self.value_lists,
|
||||||
|
);
|
||||||
for index in num..(self.ebb_args(ebb).len() as u16) {
|
for index in num..(self.ebb_args(ebb).len() as u16) {
|
||||||
match self.values[self.ebbs[ebb]
|
match self.values[self.ebbs[ebb]
|
||||||
.args
|
.args
|
||||||
.get(index as usize, &self.value_lists)
|
.get(index as usize, &self.value_lists)
|
||||||
.unwrap()] {
|
.unwrap()] {
|
||||||
ValueData::Arg { ref mut num, .. } => {
|
ValueData::Arg { ref mut num, .. } => {
|
||||||
*num -= 1;
|
*num -= 1;
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
panic!("{} must be an EBB argument",
|
panic!(
|
||||||
self.ebbs[ebb]
|
"{} must be an EBB argument",
|
||||||
.args
|
self.ebbs[ebb]
|
||||||
.get(index as usize, &self.value_lists)
|
.args
|
||||||
.unwrap())
|
.get(index as usize, &self.value_lists)
|
||||||
|
.unwrap()
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -791,10 +819,10 @@ impl DataFlowGraph {
|
|||||||
panic!("{} must be an EBB argument", old_arg);
|
panic!("{} must be an EBB argument", old_arg);
|
||||||
};
|
};
|
||||||
let new_arg = self.make_value(ValueData::Arg {
|
let new_arg = self.make_value(ValueData::Arg {
|
||||||
ty: new_type,
|
ty: new_type,
|
||||||
num,
|
num,
|
||||||
ebb,
|
ebb,
|
||||||
});
|
});
|
||||||
|
|
||||||
self.ebbs[ebb].args.as_mut_slice(&mut self.value_lists)[num as usize] = new_arg;
|
self.ebbs[ebb].args.as_mut_slice(&mut self.value_lists)[num as usize] = new_arg;
|
||||||
new_arg
|
new_arg
|
||||||
|
|||||||
@@ -218,7 +218,9 @@ mod tests {
|
|||||||
use std::mem;
|
use std::mem;
|
||||||
use packed_option::PackedOption;
|
use packed_option::PackedOption;
|
||||||
// This is the whole point of `PackedOption`.
|
// This is the whole point of `PackedOption`.
|
||||||
assert_eq!(mem::size_of::<Value>(),
|
assert_eq!(
|
||||||
mem::size_of::<PackedOption<Value>>());
|
mem::size_of::<Value>(),
|
||||||
|
mem::size_of::<PackedOption<Value>>()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -55,11 +55,11 @@ impl Signature {
|
|||||||
let bytes = self.argument_types
|
let bytes = self.argument_types
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|arg| match arg.location {
|
.filter_map(|arg| match arg.location {
|
||||||
ArgumentLoc::Stack(offset) if offset >= 0 => {
|
ArgumentLoc::Stack(offset) if offset >= 0 => {
|
||||||
Some(offset as u32 + arg.value_type.bytes())
|
Some(offset as u32 + arg.value_type.bytes())
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
})
|
})
|
||||||
.fold(0, cmp::max);
|
.fold(0, cmp::max);
|
||||||
self.argument_bytes = Some(bytes);
|
self.argument_bytes = Some(bytes);
|
||||||
}
|
}
|
||||||
@@ -73,10 +73,11 @@ impl Signature {
|
|||||||
/// Wrapper type capable of displaying a `Signature` with correct register names.
|
/// Wrapper type capable of displaying a `Signature` with correct register names.
|
||||||
pub struct DisplaySignature<'a>(&'a Signature, Option<&'a RegInfo>);
|
pub struct DisplaySignature<'a>(&'a Signature, Option<&'a RegInfo>);
|
||||||
|
|
||||||
fn write_list(f: &mut fmt::Formatter,
|
fn write_list(
|
||||||
args: &[ArgumentType],
|
f: &mut fmt::Formatter,
|
||||||
regs: Option<&RegInfo>)
|
args: &[ArgumentType],
|
||||||
-> fmt::Result {
|
regs: Option<&RegInfo>,
|
||||||
|
) -> fmt::Result {
|
||||||
match args.split_first() {
|
match args.split_first() {
|
||||||
None => {}
|
None => {}
|
||||||
Some((first, rest)) => {
|
Some((first, rest)) => {
|
||||||
@@ -310,9 +311,9 @@ impl fmt::Display for CallConv {
|
|||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
use self::CallConv::*;
|
use self::CallConv::*;
|
||||||
f.write_str(match *self {
|
f.write_str(match *self {
|
||||||
Native => "native",
|
Native => "native",
|
||||||
SpiderWASM => "spiderwasm",
|
SpiderWASM => "spiderwasm",
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -346,12 +347,14 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn argument_purpose() {
|
fn argument_purpose() {
|
||||||
let all_purpose = [ArgumentPurpose::Normal,
|
let all_purpose = [
|
||||||
ArgumentPurpose::StructReturn,
|
ArgumentPurpose::Normal,
|
||||||
ArgumentPurpose::Link,
|
ArgumentPurpose::StructReturn,
|
||||||
ArgumentPurpose::FramePointer,
|
ArgumentPurpose::Link,
|
||||||
ArgumentPurpose::CalleeSaved,
|
ArgumentPurpose::FramePointer,
|
||||||
ArgumentPurpose::VMContext];
|
ArgumentPurpose::CalleeSaved,
|
||||||
|
ArgumentPurpose::VMContext,
|
||||||
|
];
|
||||||
for (&e, &n) in all_purpose.iter().zip(PURPOSE_NAMES.iter()) {
|
for (&e, &n) in all_purpose.iter().zip(PURPOSE_NAMES.iter()) {
|
||||||
assert_eq!(e.to_string(), n);
|
assert_eq!(e.to_string(), n);
|
||||||
assert_eq!(Ok(e), n.parse());
|
assert_eq!(Ok(e), n.parse());
|
||||||
@@ -373,8 +376,9 @@ mod tests {
|
|||||||
assert_eq!(sig.to_string(), "(i32) spiderwasm");
|
assert_eq!(sig.to_string(), "(i32) spiderwasm");
|
||||||
sig.return_types.push(ArgumentType::new(F32));
|
sig.return_types.push(ArgumentType::new(F32));
|
||||||
assert_eq!(sig.to_string(), "(i32) -> f32 spiderwasm");
|
assert_eq!(sig.to_string(), "(i32) -> f32 spiderwasm");
|
||||||
sig.argument_types
|
sig.argument_types.push(
|
||||||
.push(ArgumentType::new(I32.by(4).unwrap()));
|
ArgumentType::new(I32.by(4).unwrap()),
|
||||||
|
);
|
||||||
assert_eq!(sig.to_string(), "(i32, i32x4) -> f32 spiderwasm");
|
assert_eq!(sig.to_string(), "(i32, i32x4) -> f32 spiderwasm");
|
||||||
sig.return_types.push(ArgumentType::new(B8));
|
sig.return_types.push(ArgumentType::new(B8));
|
||||||
assert_eq!(sig.to_string(), "(i32, i32x4) -> f32, b8 spiderwasm");
|
assert_eq!(sig.to_string(), "(i32, i32x4) -> f32, b8 spiderwasm");
|
||||||
@@ -391,7 +395,9 @@ mod tests {
|
|||||||
assert_eq!(sig.argument_bytes, Some(28));
|
assert_eq!(sig.argument_bytes, Some(28));
|
||||||
|
|
||||||
// Writing ABI-annotated signatures.
|
// Writing ABI-annotated signatures.
|
||||||
assert_eq!(sig.to_string(),
|
assert_eq!(
|
||||||
"(i32 [24], i32x4 [8]) -> f32, b8 spiderwasm");
|
sig.to_string(),
|
||||||
|
"(i32 [24], i32x4 [8]) -> f32, b8 spiderwasm"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -30,7 +30,8 @@ impl FunctionName {
|
|||||||
/// assert_eq!(name.to_string(), "#0a0908");
|
/// assert_eq!(name.to_string(), "#0a0908");
|
||||||
/// ```
|
/// ```
|
||||||
pub fn new<T>(v: T) -> FunctionName
|
pub fn new<T>(v: T) -> FunctionName
|
||||||
where T: Into<Vec<u8>>
|
where
|
||||||
|
T: Into<Vec<u8>>,
|
||||||
{
|
{
|
||||||
let vec = v.into();
|
let vec = v.into();
|
||||||
if vec.len() <= NAME_LENGTH_THRESHOLD {
|
if vec.len() <= NAME_LENGTH_THRESHOLD {
|
||||||
@@ -39,9 +40,9 @@ impl FunctionName {
|
|||||||
bytes[i] = byte;
|
bytes[i] = byte;
|
||||||
}
|
}
|
||||||
FunctionName(NameRepr::Short {
|
FunctionName(NameRepr::Short {
|
||||||
length: vec.len() as u8,
|
length: vec.len() as u8,
|
||||||
bytes: bytes,
|
bytes: bytes,
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
FunctionName(NameRepr::Long(vec))
|
FunctionName(NameRepr::Long(vec))
|
||||||
}
|
}
|
||||||
@@ -114,11 +115,17 @@ mod tests {
|
|||||||
assert_eq!(FunctionName::new("x").to_string(), "%x");
|
assert_eq!(FunctionName::new("x").to_string(), "%x");
|
||||||
assert_eq!(FunctionName::new("x_1").to_string(), "%x_1");
|
assert_eq!(FunctionName::new("x_1").to_string(), "%x_1");
|
||||||
assert_eq!(FunctionName::new(" ").to_string(), "#20");
|
assert_eq!(FunctionName::new(" ").to_string(), "#20");
|
||||||
assert_eq!(FunctionName::new("кретон").to_string(),
|
assert_eq!(
|
||||||
"#d0bad180d0b5d182d0bed0bd");
|
FunctionName::new("кретон").to_string(),
|
||||||
assert_eq!(FunctionName::new("印花棉布").to_string(),
|
"#d0bad180d0b5d182d0bed0bd"
|
||||||
"#e58db0e88ab1e6a389e5b883");
|
);
|
||||||
assert_eq!(FunctionName::new(vec![0, 1, 2, 3, 4, 5]).to_string(),
|
assert_eq!(
|
||||||
"#000102030405");
|
FunctionName::new("印花棉布").to_string(),
|
||||||
|
"#e58db0e88ab1e6a389e5b883"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
FunctionName::new(vec![0, 1, 2, 3, 4, 5]).to_string(),
|
||||||
|
"#000102030405"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -51,9 +51,9 @@ pub enum HeapStyle {
|
|||||||
impl fmt::Display for HeapData {
|
impl fmt::Display for HeapData {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
f.write_str(match self.style {
|
f.write_str(match self.style {
|
||||||
HeapStyle::Dynamic { .. } => "dynamic",
|
HeapStyle::Dynamic { .. } => "dynamic",
|
||||||
HeapStyle::Static { .. } => "static",
|
HeapStyle::Static { .. } => "static",
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
match self.base {
|
match self.base {
|
||||||
HeapBase::ReservedReg => write!(f, " reserved_reg")?,
|
HeapBase::ReservedReg => write!(f, " reserved_reg")?,
|
||||||
|
|||||||
@@ -192,10 +192,10 @@ impl FromStr for Uimm32 {
|
|||||||
// Parse a decimal or hexadecimal `Uimm32`, formatted as above.
|
// Parse a decimal or hexadecimal `Uimm32`, formatted as above.
|
||||||
fn from_str(s: &str) -> Result<Uimm32, &'static str> {
|
fn from_str(s: &str) -> Result<Uimm32, &'static str> {
|
||||||
parse_i64(s).and_then(|x| if 0 <= x && x <= u32::MAX as i64 {
|
parse_i64(s).and_then(|x| if 0 <= x && x <= u32::MAX as i64 {
|
||||||
Ok(Uimm32(x as u32))
|
Ok(Uimm32(x as u32))
|
||||||
} else {
|
} else {
|
||||||
Err("Uimm32 out of range")
|
Err("Uimm32 out of range")
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -260,10 +260,10 @@ impl FromStr for Offset32 {
|
|||||||
return Err("Offset must begin with sign");
|
return Err("Offset must begin with sign");
|
||||||
}
|
}
|
||||||
parse_i64(s).and_then(|x| if i32::MIN as i64 <= x && x <= i32::MAX as i64 {
|
parse_i64(s).and_then(|x| if i32::MIN as i64 <= x && x <= i32::MAX as i64 {
|
||||||
Ok(Offset32::new(x as i32))
|
Ok(Offset32::new(x as i32))
|
||||||
} else {
|
} else {
|
||||||
Err("Offset out of range")
|
Err("Offset out of range")
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -325,10 +325,10 @@ impl FromStr for Uoffset32 {
|
|||||||
return Err("Unsigned offset must begin with '+' sign");
|
return Err("Unsigned offset must begin with '+' sign");
|
||||||
}
|
}
|
||||||
parse_i64(s).and_then(|x| if 0 <= x && x <= u32::MAX as i64 {
|
parse_i64(s).and_then(|x| if 0 <= x && x <= u32::MAX as i64 {
|
||||||
Ok(Uoffset32::new(x as u32))
|
Ok(Uoffset32::new(x as u32))
|
||||||
} else {
|
} else {
|
||||||
Err("Offset out of range")
|
Err("Offset out of range")
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -458,20 +458,20 @@ fn parse_float(s: &str, w: u8, t: u8) -> Result<u64, &'static str> {
|
|||||||
if s2.starts_with("NaN:0x") {
|
if s2.starts_with("NaN:0x") {
|
||||||
// Quiet NaN with payload.
|
// Quiet NaN with payload.
|
||||||
return match u64::from_str_radix(&s2[6..], 16) {
|
return match u64::from_str_radix(&s2[6..], 16) {
|
||||||
Ok(payload) if payload < quiet_bit => {
|
Ok(payload) if payload < quiet_bit => {
|
||||||
Ok(sign_bit | max_e_bits | quiet_bit | payload)
|
Ok(sign_bit | max_e_bits | quiet_bit | payload)
|
||||||
}
|
}
|
||||||
_ => Err("Invalid NaN payload"),
|
_ => Err("Invalid NaN payload"),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
if s2.starts_with("sNaN:0x") {
|
if s2.starts_with("sNaN:0x") {
|
||||||
// Signaling NaN with payload.
|
// Signaling NaN with payload.
|
||||||
return match u64::from_str_radix(&s2[7..], 16) {
|
return match u64::from_str_radix(&s2[7..], 16) {
|
||||||
Ok(payload) if 0 < payload && payload < quiet_bit => {
|
Ok(payload) if 0 < payload && payload < quiet_bit => {
|
||||||
Ok(sign_bit | max_e_bits | payload)
|
Ok(sign_bit | max_e_bits | payload)
|
||||||
}
|
}
|
||||||
_ => Err("Invalid sNaN payload"),
|
_ => Err("Invalid sNaN payload"),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
return Err("Float must be hexadecimal");
|
return Err("Float must be hexadecimal");
|
||||||
@@ -662,7 +662,8 @@ mod tests {
|
|||||||
|
|
||||||
// Verify that `text` can be parsed as a `T` into a value that displays as `want`.
|
// Verify that `text` can be parsed as a `T` into a value that displays as `want`.
|
||||||
fn parse_ok<T: FromStr + Display>(text: &str, want: &str)
|
fn parse_ok<T: FromStr + Display>(text: &str, want: &str)
|
||||||
where <T as FromStr>::Err: Display
|
where
|
||||||
|
<T as FromStr>::Err: Display,
|
||||||
{
|
{
|
||||||
match text.parse::<T>() {
|
match text.parse::<T>() {
|
||||||
Err(s) => panic!("\"{}\".parse() error: {}", text, s),
|
Err(s) => panic!("\"{}\".parse() error: {}", text, s),
|
||||||
@@ -672,7 +673,8 @@ mod tests {
|
|||||||
|
|
||||||
// Verify that `text` fails to parse as `T` with the error `msg`.
|
// Verify that `text` fails to parse as `T` with the error `msg`.
|
||||||
fn parse_err<T: FromStr + Display>(text: &str, msg: &str)
|
fn parse_err<T: FromStr + Display>(text: &str, msg: &str)
|
||||||
where <T as FromStr>::Err: Display
|
where
|
||||||
|
<T as FromStr>::Err: Display,
|
||||||
{
|
{
|
||||||
match text.parse::<T>() {
|
match text.parse::<T>() {
|
||||||
Err(s) => assert_eq!(s.to_string(), msg),
|
Err(s) => assert_eq!(s.to_string(), msg),
|
||||||
@@ -781,18 +783,26 @@ mod tests {
|
|||||||
assert_eq!(Ieee32::with_float(1.0).to_string(), "0x1.000000p0");
|
assert_eq!(Ieee32::with_float(1.0).to_string(), "0x1.000000p0");
|
||||||
assert_eq!(Ieee32::with_float(1.5).to_string(), "0x1.800000p0");
|
assert_eq!(Ieee32::with_float(1.5).to_string(), "0x1.800000p0");
|
||||||
assert_eq!(Ieee32::with_float(0.5).to_string(), "0x1.000000p-1");
|
assert_eq!(Ieee32::with_float(0.5).to_string(), "0x1.000000p-1");
|
||||||
assert_eq!(Ieee32::with_float(f32::EPSILON).to_string(),
|
assert_eq!(
|
||||||
"0x1.000000p-23");
|
Ieee32::with_float(f32::EPSILON).to_string(),
|
||||||
|
"0x1.000000p-23"
|
||||||
|
);
|
||||||
assert_eq!(Ieee32::with_float(f32::MIN).to_string(), "-0x1.fffffep127");
|
assert_eq!(Ieee32::with_float(f32::MIN).to_string(), "-0x1.fffffep127");
|
||||||
assert_eq!(Ieee32::with_float(f32::MAX).to_string(), "0x1.fffffep127");
|
assert_eq!(Ieee32::with_float(f32::MAX).to_string(), "0x1.fffffep127");
|
||||||
// Smallest positive normal number.
|
// Smallest positive normal number.
|
||||||
assert_eq!(Ieee32::with_float(f32::MIN_POSITIVE).to_string(),
|
assert_eq!(
|
||||||
"0x1.000000p-126");
|
Ieee32::with_float(f32::MIN_POSITIVE).to_string(),
|
||||||
|
"0x1.000000p-126"
|
||||||
|
);
|
||||||
// Subnormals.
|
// Subnormals.
|
||||||
assert_eq!(Ieee32::with_float(f32::MIN_POSITIVE / 2.0).to_string(),
|
assert_eq!(
|
||||||
"0x0.800000p-126");
|
Ieee32::with_float(f32::MIN_POSITIVE / 2.0).to_string(),
|
||||||
assert_eq!(Ieee32::with_float(f32::MIN_POSITIVE * f32::EPSILON).to_string(),
|
"0x0.800000p-126"
|
||||||
"0x0.000002p-126");
|
);
|
||||||
|
assert_eq!(
|
||||||
|
Ieee32::with_float(f32::MIN_POSITIVE * f32::EPSILON).to_string(),
|
||||||
|
"0x0.000002p-126"
|
||||||
|
);
|
||||||
assert_eq!(Ieee32::with_float(f32::INFINITY).to_string(), "+Inf");
|
assert_eq!(Ieee32::with_float(f32::INFINITY).to_string(), "+Inf");
|
||||||
assert_eq!(Ieee32::with_float(f32::NEG_INFINITY).to_string(), "-Inf");
|
assert_eq!(Ieee32::with_float(f32::NEG_INFINITY).to_string(), "-Inf");
|
||||||
assert_eq!(Ieee32::with_float(f32::NAN).to_string(), "+NaN");
|
assert_eq!(Ieee32::with_float(f32::NAN).to_string(), "+NaN");
|
||||||
@@ -883,32 +893,48 @@ mod tests {
|
|||||||
assert_eq!(Ieee64::with_float(1.0).to_string(), "0x1.0000000000000p0");
|
assert_eq!(Ieee64::with_float(1.0).to_string(), "0x1.0000000000000p0");
|
||||||
assert_eq!(Ieee64::with_float(1.5).to_string(), "0x1.8000000000000p0");
|
assert_eq!(Ieee64::with_float(1.5).to_string(), "0x1.8000000000000p0");
|
||||||
assert_eq!(Ieee64::with_float(0.5).to_string(), "0x1.0000000000000p-1");
|
assert_eq!(Ieee64::with_float(0.5).to_string(), "0x1.0000000000000p-1");
|
||||||
assert_eq!(Ieee64::with_float(f64::EPSILON).to_string(),
|
assert_eq!(
|
||||||
"0x1.0000000000000p-52");
|
Ieee64::with_float(f64::EPSILON).to_string(),
|
||||||
assert_eq!(Ieee64::with_float(f64::MIN).to_string(),
|
"0x1.0000000000000p-52"
|
||||||
"-0x1.fffffffffffffp1023");
|
);
|
||||||
assert_eq!(Ieee64::with_float(f64::MAX).to_string(),
|
assert_eq!(
|
||||||
"0x1.fffffffffffffp1023");
|
Ieee64::with_float(f64::MIN).to_string(),
|
||||||
|
"-0x1.fffffffffffffp1023"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
Ieee64::with_float(f64::MAX).to_string(),
|
||||||
|
"0x1.fffffffffffffp1023"
|
||||||
|
);
|
||||||
// Smallest positive normal number.
|
// Smallest positive normal number.
|
||||||
assert_eq!(Ieee64::with_float(f64::MIN_POSITIVE).to_string(),
|
assert_eq!(
|
||||||
"0x1.0000000000000p-1022");
|
Ieee64::with_float(f64::MIN_POSITIVE).to_string(),
|
||||||
|
"0x1.0000000000000p-1022"
|
||||||
|
);
|
||||||
// Subnormals.
|
// Subnormals.
|
||||||
assert_eq!(Ieee64::with_float(f64::MIN_POSITIVE / 2.0).to_string(),
|
assert_eq!(
|
||||||
"0x0.8000000000000p-1022");
|
Ieee64::with_float(f64::MIN_POSITIVE / 2.0).to_string(),
|
||||||
assert_eq!(Ieee64::with_float(f64::MIN_POSITIVE * f64::EPSILON).to_string(),
|
"0x0.8000000000000p-1022"
|
||||||
"0x0.0000000000001p-1022");
|
);
|
||||||
|
assert_eq!(
|
||||||
|
Ieee64::with_float(f64::MIN_POSITIVE * f64::EPSILON).to_string(),
|
||||||
|
"0x0.0000000000001p-1022"
|
||||||
|
);
|
||||||
assert_eq!(Ieee64::with_float(f64::INFINITY).to_string(), "+Inf");
|
assert_eq!(Ieee64::with_float(f64::INFINITY).to_string(), "+Inf");
|
||||||
assert_eq!(Ieee64::with_float(f64::NEG_INFINITY).to_string(), "-Inf");
|
assert_eq!(Ieee64::with_float(f64::NEG_INFINITY).to_string(), "-Inf");
|
||||||
assert_eq!(Ieee64::with_float(f64::NAN).to_string(), "+NaN");
|
assert_eq!(Ieee64::with_float(f64::NAN).to_string(), "+NaN");
|
||||||
assert_eq!(Ieee64::with_float(-f64::NAN).to_string(), "-NaN");
|
assert_eq!(Ieee64::with_float(-f64::NAN).to_string(), "-NaN");
|
||||||
// Construct some qNaNs with payloads.
|
// Construct some qNaNs with payloads.
|
||||||
assert_eq!(Ieee64(0x7ff8000000000001).to_string(), "+NaN:0x1");
|
assert_eq!(Ieee64(0x7ff8000000000001).to_string(), "+NaN:0x1");
|
||||||
assert_eq!(Ieee64(0x7ffc000000000001).to_string(),
|
assert_eq!(
|
||||||
"+NaN:0x4000000000001");
|
Ieee64(0x7ffc000000000001).to_string(),
|
||||||
|
"+NaN:0x4000000000001"
|
||||||
|
);
|
||||||
// Signaling NaNs.
|
// Signaling NaNs.
|
||||||
assert_eq!(Ieee64(0x7ff0000000000001).to_string(), "+sNaN:0x1");
|
assert_eq!(Ieee64(0x7ff0000000000001).to_string(), "+sNaN:0x1");
|
||||||
assert_eq!(Ieee64(0x7ff4000000000001).to_string(),
|
assert_eq!(
|
||||||
"+sNaN:0x4000000000001");
|
Ieee64(0x7ff4000000000001).to_string(),
|
||||||
|
"+sNaN:0x4000000000001"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|||||||
@@ -481,7 +481,8 @@ impl OpcodeConstraints {
|
|||||||
pub fn result_type(self, n: usize, ctrl_type: Type) -> Type {
|
pub fn result_type(self, n: usize, ctrl_type: Type) -> Type {
|
||||||
assert!(n < self.fixed_results(), "Invalid result index");
|
assert!(n < self.fixed_results(), "Invalid result index");
|
||||||
if let ResolvedConstraint::Bound(t) =
|
if let ResolvedConstraint::Bound(t) =
|
||||||
OPERAND_CONSTRAINTS[self.constraint_offset() + n].resolve(ctrl_type) {
|
OPERAND_CONSTRAINTS[self.constraint_offset() + n].resolve(ctrl_type)
|
||||||
|
{
|
||||||
t
|
t
|
||||||
} else {
|
} else {
|
||||||
panic!("Result constraints can't be free");
|
panic!("Result constraints can't be free");
|
||||||
@@ -494,8 +495,10 @@ impl OpcodeConstraints {
|
|||||||
/// Unlike results, it is possible for some input values to vary freely within a specific
|
/// Unlike results, it is possible for some input values to vary freely within a specific
|
||||||
/// `ValueTypeSet`. This is represented with the `ArgumentConstraint::Free` variant.
|
/// `ValueTypeSet`. This is represented with the `ArgumentConstraint::Free` variant.
|
||||||
pub fn value_argument_constraint(self, n: usize, ctrl_type: Type) -> ResolvedConstraint {
|
pub fn value_argument_constraint(self, n: usize, ctrl_type: Type) -> ResolvedConstraint {
|
||||||
assert!(n < self.fixed_value_arguments(),
|
assert!(
|
||||||
"Invalid value argument index");
|
n < self.fixed_value_arguments(),
|
||||||
|
"Invalid value argument index"
|
||||||
|
);
|
||||||
let offset = self.constraint_offset() + self.fixed_results();
|
let offset = self.constraint_offset() + self.fixed_results();
|
||||||
OPERAND_CONSTRAINTS[offset + n].resolve(ctrl_type)
|
OPERAND_CONSTRAINTS[offset + n].resolve(ctrl_type)
|
||||||
}
|
}
|
||||||
@@ -613,14 +616,14 @@ impl OperandConstraint {
|
|||||||
AsBool => Bound(ctrl_type.as_bool()),
|
AsBool => Bound(ctrl_type.as_bool()),
|
||||||
HalfWidth => Bound(ctrl_type.half_width().expect("invalid type for half_width")),
|
HalfWidth => Bound(ctrl_type.half_width().expect("invalid type for half_width")),
|
||||||
DoubleWidth => {
|
DoubleWidth => {
|
||||||
Bound(ctrl_type
|
Bound(ctrl_type.double_width().expect(
|
||||||
.double_width()
|
"invalid type for double_width",
|
||||||
.expect("invalid type for double_width"))
|
))
|
||||||
}
|
}
|
||||||
HalfVector => {
|
HalfVector => {
|
||||||
Bound(ctrl_type
|
Bound(ctrl_type.half_vector().expect(
|
||||||
.half_vector()
|
"invalid type for half_vector",
|
||||||
.expect("invalid type for half_vector"))
|
))
|
||||||
}
|
}
|
||||||
DoubleVector => Bound(ctrl_type.by(2).expect("invalid type for double_vector")),
|
DoubleVector => Bound(ctrl_type.by(2).expect("invalid type for double_vector")),
|
||||||
}
|
}
|
||||||
@@ -688,10 +691,14 @@ mod tests {
|
|||||||
assert_eq!(a.fixed_value_arguments(), 2);
|
assert_eq!(a.fixed_value_arguments(), 2);
|
||||||
assert_eq!(a.result_type(0, types::I32), types::I32);
|
assert_eq!(a.result_type(0, types::I32), types::I32);
|
||||||
assert_eq!(a.result_type(0, types::I8), types::I8);
|
assert_eq!(a.result_type(0, types::I8), types::I8);
|
||||||
assert_eq!(a.value_argument_constraint(0, types::I32),
|
assert_eq!(
|
||||||
ResolvedConstraint::Bound(types::I32));
|
a.value_argument_constraint(0, types::I32),
|
||||||
assert_eq!(a.value_argument_constraint(1, types::I32),
|
ResolvedConstraint::Bound(types::I32)
|
||||||
ResolvedConstraint::Bound(types::I32));
|
);
|
||||||
|
assert_eq!(
|
||||||
|
a.value_argument_constraint(1, types::I32),
|
||||||
|
ResolvedConstraint::Bound(types::I32)
|
||||||
|
);
|
||||||
|
|
||||||
let b = Opcode::Bitcast.constraints();
|
let b = Opcode::Bitcast.constraints();
|
||||||
assert!(!b.use_typevar_operand());
|
assert!(!b.use_typevar_operand());
|
||||||
|
|||||||
@@ -71,9 +71,9 @@ impl JumpTableData {
|
|||||||
|
|
||||||
/// Checks if any of the entries branch to `ebb`.
|
/// Checks if any of the entries branch to `ebb`.
|
||||||
pub fn branches_to(&self, ebb: Ebb) -> bool {
|
pub fn branches_to(&self, ebb: Ebb) -> bool {
|
||||||
self.table
|
self.table.iter().any(|target_ebb| {
|
||||||
.iter()
|
target_ebb.expand() == Some(ebb)
|
||||||
.any(|target_ebb| target_ebb.expand() == Some(ebb))
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Access the whole table as a mutable slice.
|
/// Access the whole table as a mutable slice.
|
||||||
@@ -148,8 +148,10 @@ mod tests {
|
|||||||
jt.set_entry(0, e2);
|
jt.set_entry(0, e2);
|
||||||
jt.set_entry(10, e1);
|
jt.set_entry(10, e1);
|
||||||
|
|
||||||
assert_eq!(jt.to_string(),
|
assert_eq!(
|
||||||
"jump_table ebb2, 0, 0, 0, 0, 0, 0, 0, 0, 0, ebb1");
|
jt.to_string(),
|
||||||
|
"jump_table ebb2, 0, 0, 0, 0, 0, 0, 0, 0, 0, ebb1"
|
||||||
|
);
|
||||||
|
|
||||||
let v: Vec<(usize, Ebb)> = jt.entries().collect();
|
let v: Vec<(usize, Ebb)> = jt.entries().collect();
|
||||||
assert_eq!(v, [(0, e2), (10, e1)]);
|
assert_eq!(v, [(0, e2), (10, e1)]);
|
||||||
|
|||||||
@@ -96,8 +96,9 @@ fn test_midpoint() {
|
|||||||
|
|
||||||
impl ProgramOrder for Layout {
|
impl ProgramOrder for Layout {
|
||||||
fn cmp<A, B>(&self, a: A, b: B) -> cmp::Ordering
|
fn cmp<A, B>(&self, a: A, b: B) -> cmp::Ordering
|
||||||
where A: Into<ExpandedProgramPoint>,
|
where
|
||||||
B: Into<ExpandedProgramPoint>
|
A: Into<ExpandedProgramPoint>,
|
||||||
|
B: Into<ExpandedProgramPoint>,
|
||||||
{
|
{
|
||||||
let a_seq = self.seq(a);
|
let a_seq = self.seq(a);
|
||||||
let b_seq = self.seq(b);
|
let b_seq = self.seq(b);
|
||||||
@@ -166,8 +167,9 @@ impl Layout {
|
|||||||
/// Assign a valid sequence number to `inst` such that the numbers are still monotonic. This may
|
/// Assign a valid sequence number to `inst` such that the numbers are still monotonic. This may
|
||||||
/// require renumbering.
|
/// require renumbering.
|
||||||
fn assign_inst_seq(&mut self, inst: Inst) {
|
fn assign_inst_seq(&mut self, inst: Inst) {
|
||||||
let ebb = self.inst_ebb(inst)
|
let ebb = self.inst_ebb(inst).expect(
|
||||||
.expect("inst must be inserted before assigning an seq");
|
"inst must be inserted before assigning an seq",
|
||||||
|
);
|
||||||
|
|
||||||
// Get the sequence number immediately before `inst`.
|
// Get the sequence number immediately before `inst`.
|
||||||
let prev_seq = match self.insts[inst].prev.expand() {
|
let prev_seq = match self.insts[inst].prev.expand() {
|
||||||
@@ -283,8 +285,10 @@ impl Layout {
|
|||||||
|
|
||||||
/// Insert `ebb` as the last EBB in the layout.
|
/// Insert `ebb` as the last EBB in the layout.
|
||||||
pub fn append_ebb(&mut self, ebb: Ebb) {
|
pub fn append_ebb(&mut self, ebb: Ebb) {
|
||||||
assert!(!self.is_ebb_inserted(ebb),
|
assert!(
|
||||||
"Cannot append EBB that is already in the layout");
|
!self.is_ebb_inserted(ebb),
|
||||||
|
"Cannot append EBB that is already in the layout"
|
||||||
|
);
|
||||||
{
|
{
|
||||||
let node = &mut self.ebbs[ebb];
|
let node = &mut self.ebbs[ebb];
|
||||||
assert!(node.first_inst.is_none() && node.last_inst.is_none());
|
assert!(node.first_inst.is_none() && node.last_inst.is_none());
|
||||||
@@ -302,10 +306,14 @@ impl Layout {
|
|||||||
|
|
||||||
/// Insert `ebb` in the layout before the existing EBB `before`.
|
/// Insert `ebb` in the layout before the existing EBB `before`.
|
||||||
pub fn insert_ebb(&mut self, ebb: Ebb, before: Ebb) {
|
pub fn insert_ebb(&mut self, ebb: Ebb, before: Ebb) {
|
||||||
assert!(!self.is_ebb_inserted(ebb),
|
assert!(
|
||||||
"Cannot insert EBB that is already in the layout");
|
!self.is_ebb_inserted(ebb),
|
||||||
assert!(self.is_ebb_inserted(before),
|
"Cannot insert EBB that is already in the layout"
|
||||||
"EBB Insertion point not in the layout");
|
);
|
||||||
|
assert!(
|
||||||
|
self.is_ebb_inserted(before),
|
||||||
|
"EBB Insertion point not in the layout"
|
||||||
|
);
|
||||||
let after = self.ebbs[before].prev;
|
let after = self.ebbs[before].prev;
|
||||||
{
|
{
|
||||||
let node = &mut self.ebbs[ebb];
|
let node = &mut self.ebbs[ebb];
|
||||||
@@ -322,10 +330,14 @@ impl Layout {
|
|||||||
|
|
||||||
/// Insert `ebb` in the layout *after* the existing EBB `after`.
|
/// Insert `ebb` in the layout *after* the existing EBB `after`.
|
||||||
pub fn insert_ebb_after(&mut self, ebb: Ebb, after: Ebb) {
|
pub fn insert_ebb_after(&mut self, ebb: Ebb, after: Ebb) {
|
||||||
assert!(!self.is_ebb_inserted(ebb),
|
assert!(
|
||||||
"Cannot insert EBB that is already in the layout");
|
!self.is_ebb_inserted(ebb),
|
||||||
assert!(self.is_ebb_inserted(after),
|
"Cannot insert EBB that is already in the layout"
|
||||||
"EBB Insertion point not in the layout");
|
);
|
||||||
|
assert!(
|
||||||
|
self.is_ebb_inserted(after),
|
||||||
|
"EBB Insertion point not in the layout"
|
||||||
|
);
|
||||||
let before = self.ebbs[after].next;
|
let before = self.ebbs[after].next;
|
||||||
{
|
{
|
||||||
let node = &mut self.ebbs[ebb];
|
let node = &mut self.ebbs[ebb];
|
||||||
@@ -411,7 +423,8 @@ impl Layout {
|
|||||||
|
|
||||||
/// Get the EBB containing the program point `pp`. Panic if `pp` is not in the layout.
|
/// Get the EBB containing the program point `pp`. Panic if `pp` is not in the layout.
|
||||||
pub fn pp_ebb<PP>(&self, pp: PP) -> Ebb
|
pub fn pp_ebb<PP>(&self, pp: PP) -> Ebb
|
||||||
where PP: Into<ExpandedProgramPoint>
|
where
|
||||||
|
PP: Into<ExpandedProgramPoint>,
|
||||||
{
|
{
|
||||||
match pp.into() {
|
match pp.into() {
|
||||||
ExpandedProgramPoint::Ebb(ebb) => ebb,
|
ExpandedProgramPoint::Ebb(ebb) => ebb,
|
||||||
@@ -424,8 +437,10 @@ impl Layout {
|
|||||||
/// Append `inst` to the end of `ebb`.
|
/// Append `inst` to the end of `ebb`.
|
||||||
pub fn append_inst(&mut self, inst: Inst, ebb: Ebb) {
|
pub fn append_inst(&mut self, inst: Inst, ebb: Ebb) {
|
||||||
assert_eq!(self.inst_ebb(inst), None);
|
assert_eq!(self.inst_ebb(inst), None);
|
||||||
assert!(self.is_ebb_inserted(ebb),
|
assert!(
|
||||||
"Cannot append instructions to EBB not in layout");
|
self.is_ebb_inserted(ebb),
|
||||||
|
"Cannot append instructions to EBB not in layout"
|
||||||
|
);
|
||||||
{
|
{
|
||||||
let ebb_node = &mut self.ebbs[ebb];
|
let ebb_node = &mut self.ebbs[ebb];
|
||||||
{
|
{
|
||||||
@@ -457,8 +472,9 @@ impl Layout {
|
|||||||
/// Insert `inst` before the instruction `before` in the same EBB.
|
/// Insert `inst` before the instruction `before` in the same EBB.
|
||||||
pub fn insert_inst(&mut self, inst: Inst, before: Inst) {
|
pub fn insert_inst(&mut self, inst: Inst, before: Inst) {
|
||||||
assert_eq!(self.inst_ebb(inst), None);
|
assert_eq!(self.inst_ebb(inst), None);
|
||||||
let ebb = self.inst_ebb(before)
|
let ebb = self.inst_ebb(before).expect(
|
||||||
.expect("Instruction before insertion point not in the layout");
|
"Instruction before insertion point not in the layout",
|
||||||
|
);
|
||||||
let after = self.insts[before].prev;
|
let after = self.insts[before].prev;
|
||||||
{
|
{
|
||||||
let inst_node = &mut self.insts[inst];
|
let inst_node = &mut self.insts[inst];
|
||||||
@@ -531,8 +547,9 @@ impl Layout {
|
|||||||
/// i4
|
/// i4
|
||||||
/// ```
|
/// ```
|
||||||
pub fn split_ebb(&mut self, new_ebb: Ebb, before: Inst) {
|
pub fn split_ebb(&mut self, new_ebb: Ebb, before: Inst) {
|
||||||
let old_ebb = self.inst_ebb(before)
|
let old_ebb = self.inst_ebb(before).expect(
|
||||||
.expect("The `before` instruction must be in the layout");
|
"The `before` instruction must be in the layout",
|
||||||
|
);
|
||||||
assert!(!self.is_ebb_inserted(new_ebb));
|
assert!(!self.is_ebb_inserted(new_ebb));
|
||||||
|
|
||||||
// Insert new_ebb after old_ebb.
|
// Insert new_ebb after old_ebb.
|
||||||
@@ -683,7 +700,8 @@ pub trait CursorBase {
|
|||||||
/// }
|
/// }
|
||||||
/// ```
|
/// ```
|
||||||
fn at_inst(mut self, inst: Inst) -> Self
|
fn at_inst(mut self, inst: Inst) -> Self
|
||||||
where Self: Sized
|
where
|
||||||
|
Self: Sized,
|
||||||
{
|
{
|
||||||
self.goto_inst(inst);
|
self.goto_inst(inst);
|
||||||
self
|
self
|
||||||
@@ -703,7 +721,8 @@ pub trait CursorBase {
|
|||||||
/// }
|
/// }
|
||||||
/// ```
|
/// ```
|
||||||
fn at_first_inst(mut self, ebb: Ebb) -> Self
|
fn at_first_inst(mut self, ebb: Ebb) -> Self
|
||||||
where Self: Sized
|
where
|
||||||
|
Self: Sized,
|
||||||
{
|
{
|
||||||
self.goto_first_inst(ebb);
|
self.goto_first_inst(ebb);
|
||||||
self
|
self
|
||||||
@@ -783,9 +802,9 @@ pub trait CursorBase {
|
|||||||
self.layout().first_ebb
|
self.layout().first_ebb
|
||||||
};
|
};
|
||||||
self.set_position(match next {
|
self.set_position(match next {
|
||||||
Some(ebb) => CursorPosition::Before(ebb),
|
Some(ebb) => CursorPosition::Before(ebb),
|
||||||
None => CursorPosition::Nowhere,
|
None => CursorPosition::Nowhere,
|
||||||
});
|
});
|
||||||
next
|
next
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -816,9 +835,9 @@ pub trait CursorBase {
|
|||||||
self.layout().last_ebb
|
self.layout().last_ebb
|
||||||
};
|
};
|
||||||
self.set_position(match prev {
|
self.set_position(match prev {
|
||||||
Some(ebb) => CursorPosition::After(ebb),
|
Some(ebb) => CursorPosition::After(ebb),
|
||||||
None => CursorPosition::Nowhere,
|
None => CursorPosition::Nowhere,
|
||||||
});
|
});
|
||||||
prev
|
prev
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -872,9 +891,9 @@ pub trait CursorBase {
|
|||||||
self.set_position(At(next));
|
self.set_position(At(next));
|
||||||
Some(next)
|
Some(next)
|
||||||
} else {
|
} else {
|
||||||
let pos = After(self.layout()
|
let pos = After(self.layout().inst_ebb(inst).expect(
|
||||||
.inst_ebb(inst)
|
"current instruction removed?",
|
||||||
.expect("current instruction removed?"));
|
));
|
||||||
self.set_position(pos);
|
self.set_position(pos);
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
@@ -925,9 +944,9 @@ pub trait CursorBase {
|
|||||||
self.set_position(At(prev));
|
self.set_position(At(prev));
|
||||||
Some(prev)
|
Some(prev)
|
||||||
} else {
|
} else {
|
||||||
let pos = Before(self.layout()
|
let pos = Before(self.layout().inst_ebb(inst).expect(
|
||||||
.inst_ebb(inst)
|
"current instruction removed?",
|
||||||
.expect("current instruction removed?"));
|
));
|
||||||
self.set_position(pos);
|
self.set_position(pos);
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
@@ -1057,9 +1076,10 @@ pub struct LayoutCursorInserter<'c, 'fc: 'c, 'fd> {
|
|||||||
|
|
||||||
impl<'c, 'fc: 'c, 'fd> LayoutCursorInserter<'c, 'fc, 'fd> {
|
impl<'c, 'fc: 'c, 'fd> LayoutCursorInserter<'c, 'fc, 'fd> {
|
||||||
/// Create a new inserter. Don't use this, use `dfg.ins(pos)`.
|
/// Create a new inserter. Don't use this, use `dfg.ins(pos)`.
|
||||||
pub fn new(pos: &'c mut Cursor<'fc>,
|
pub fn new(
|
||||||
dfg: &'fd mut DataFlowGraph)
|
pos: &'c mut Cursor<'fc>,
|
||||||
-> LayoutCursorInserter<'c, 'fc, 'fd> {
|
dfg: &'fd mut DataFlowGraph,
|
||||||
|
) -> LayoutCursorInserter<'c, 'fc, 'fd> {
|
||||||
LayoutCursorInserter { pos, dfg }
|
LayoutCursorInserter { pos, dfg }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -123,8 +123,9 @@ pub trait ProgramOrder {
|
|||||||
/// directly. Depending on the implementation, there is a good chance performance will be
|
/// directly. Depending on the implementation, there is a good chance performance will be
|
||||||
/// improved for those cases where the type of either argument is known statically.
|
/// improved for those cases where the type of either argument is known statically.
|
||||||
fn cmp<A, B>(&self, a: A, b: B) -> cmp::Ordering
|
fn cmp<A, B>(&self, a: A, b: B) -> cmp::Ordering
|
||||||
where A: Into<ExpandedProgramPoint>,
|
where
|
||||||
B: Into<ExpandedProgramPoint>;
|
A: Into<ExpandedProgramPoint>,
|
||||||
|
B: Into<ExpandedProgramPoint>;
|
||||||
|
|
||||||
/// Is the range from `inst` to `ebb` just the gap between consecutive EBBs?
|
/// Is the range from `inst` to `ebb` just the gap between consecutive EBBs?
|
||||||
///
|
///
|
||||||
|
|||||||
@@ -65,11 +65,11 @@ impl fmt::Display for StackSlotKind {
|
|||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
use self::StackSlotKind::*;
|
use self::StackSlotKind::*;
|
||||||
f.write_str(match *self {
|
f.write_str(match *self {
|
||||||
Local => "local",
|
Local => "local",
|
||||||
SpillSlot => "spill_slot",
|
SpillSlot => "spill_slot",
|
||||||
IncomingArg => "incoming_arg",
|
IncomingArg => "incoming_arg",
|
||||||
OutgoingArg => "outgoing_arg",
|
OutgoingArg => "outgoing_arg",
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -228,9 +228,9 @@ impl StackSlots {
|
|||||||
let size = ty.bytes();
|
let size = ty.bytes();
|
||||||
|
|
||||||
// Look for an existing outgoing stack slot with the same offset and size.
|
// Look for an existing outgoing stack slot with the same offset and size.
|
||||||
let inspos = match self.outgoing
|
let inspos = match self.outgoing.binary_search_by_key(&(offset, size), |&ss| {
|
||||||
.binary_search_by_key(&(offset, size),
|
(self[ss].offset, self[ss].size)
|
||||||
|&ss| (self[ss].offset, self[ss].size)) {
|
}) {
|
||||||
Ok(idx) => return self.outgoing[idx],
|
Ok(idx) => return self.outgoing[idx],
|
||||||
Err(idx) => idx,
|
Err(idx) => idx,
|
||||||
};
|
};
|
||||||
@@ -255,10 +255,14 @@ mod tests {
|
|||||||
fn stack_slot() {
|
fn stack_slot() {
|
||||||
let mut func = Function::new();
|
let mut func = Function::new();
|
||||||
|
|
||||||
let ss0 = func.stack_slots
|
let ss0 = func.stack_slots.push(StackSlotData::new(
|
||||||
.push(StackSlotData::new(StackSlotKind::IncomingArg, 4));
|
StackSlotKind::IncomingArg,
|
||||||
let ss1 = func.stack_slots
|
4,
|
||||||
.push(StackSlotData::new(StackSlotKind::SpillSlot, 8));
|
));
|
||||||
|
let ss1 = func.stack_slots.push(StackSlotData::new(
|
||||||
|
StackSlotKind::SpillSlot,
|
||||||
|
8,
|
||||||
|
));
|
||||||
assert_eq!(ss0.to_string(), "ss0");
|
assert_eq!(ss0.to_string(), "ss0");
|
||||||
assert_eq!(ss1.to_string(), "ss1");
|
assert_eq!(ss1.to_string(), "ss1");
|
||||||
|
|
||||||
|
|||||||
@@ -7,9 +7,11 @@ use settings as shared_settings;
|
|||||||
use super::registers::{S, D, Q, GPR};
|
use super::registers::{S, D, Q, GPR};
|
||||||
|
|
||||||
/// Legalize `sig`.
|
/// Legalize `sig`.
|
||||||
pub fn legalize_signature(_sig: &mut ir::Signature,
|
pub fn legalize_signature(
|
||||||
_flags: &shared_settings::Flags,
|
_sig: &mut ir::Signature,
|
||||||
_current: bool) {
|
_flags: &shared_settings::Flags,
|
||||||
|
_current: bool,
|
||||||
|
) {
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -29,19 +29,20 @@ pub fn isa_builder() -> IsaBuilder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn isa_constructor(shared_flags: shared_settings::Flags,
|
fn isa_constructor(
|
||||||
builder: &shared_settings::Builder)
|
shared_flags: shared_settings::Flags,
|
||||||
-> Box<TargetIsa> {
|
builder: &shared_settings::Builder,
|
||||||
|
) -> Box<TargetIsa> {
|
||||||
let level1 = if shared_flags.is_compressed() {
|
let level1 = if shared_flags.is_compressed() {
|
||||||
&enc_tables::LEVEL1_T32[..]
|
&enc_tables::LEVEL1_T32[..]
|
||||||
} else {
|
} else {
|
||||||
&enc_tables::LEVEL1_A32[..]
|
&enc_tables::LEVEL1_A32[..]
|
||||||
};
|
};
|
||||||
Box::new(Isa {
|
Box::new(Isa {
|
||||||
isa_flags: settings::Flags::new(&shared_flags, builder),
|
isa_flags: settings::Flags::new(&shared_flags, builder),
|
||||||
shared_flags,
|
shared_flags,
|
||||||
cpumode: level1,
|
cpumode: level1,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TargetIsa for Isa {
|
impl TargetIsa for Isa {
|
||||||
@@ -61,21 +62,24 @@ impl TargetIsa for Isa {
|
|||||||
enc_tables::INFO.clone()
|
enc_tables::INFO.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn legal_encodings<'a>(&'a self,
|
fn legal_encodings<'a>(
|
||||||
dfg: &'a ir::DataFlowGraph,
|
&'a self,
|
||||||
inst: &'a ir::InstructionData,
|
dfg: &'a ir::DataFlowGraph,
|
||||||
ctrl_typevar: ir::Type)
|
inst: &'a ir::InstructionData,
|
||||||
-> Encodings<'a> {
|
ctrl_typevar: ir::Type,
|
||||||
lookup_enclist(ctrl_typevar,
|
) -> Encodings<'a> {
|
||||||
inst,
|
lookup_enclist(
|
||||||
dfg,
|
ctrl_typevar,
|
||||||
self.cpumode,
|
inst,
|
||||||
&enc_tables::LEVEL2[..],
|
dfg,
|
||||||
&enc_tables::ENCLISTS[..],
|
self.cpumode,
|
||||||
&enc_tables::LEGALIZE_ACTIONS[..],
|
&enc_tables::LEVEL2[..],
|
||||||
&enc_tables::RECIPE_PREDICATES[..],
|
&enc_tables::ENCLISTS[..],
|
||||||
&enc_tables::INST_PREDICATES[..],
|
&enc_tables::LEGALIZE_ACTIONS[..],
|
||||||
self.isa_flags.predicate_view())
|
&enc_tables::RECIPE_PREDICATES[..],
|
||||||
|
&enc_tables::INST_PREDICATES[..],
|
||||||
|
self.isa_flags.predicate_view(),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn legalize_signature(&self, sig: &mut ir::Signature, current: bool) {
|
fn legalize_signature(&self, sig: &mut ir::Signature, current: bool) {
|
||||||
@@ -90,11 +94,13 @@ impl TargetIsa for Isa {
|
|||||||
abi::allocatable_registers(func)
|
abi::allocatable_registers(func)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn emit_inst(&self,
|
fn emit_inst(
|
||||||
func: &ir::Function,
|
&self,
|
||||||
inst: ir::Inst,
|
func: &ir::Function,
|
||||||
divert: &mut regalloc::RegDiversions,
|
inst: ir::Inst,
|
||||||
sink: &mut CodeSink) {
|
divert: &mut regalloc::RegDiversions,
|
||||||
|
sink: &mut CodeSink,
|
||||||
|
) {
|
||||||
binemit::emit_inst(func, inst, divert, sink)
|
binemit::emit_inst(func, inst, divert, sink)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -7,9 +7,11 @@ use settings as shared_settings;
|
|||||||
use super::registers::{GPR, FPR};
|
use super::registers::{GPR, FPR};
|
||||||
|
|
||||||
/// Legalize `sig`.
|
/// Legalize `sig`.
|
||||||
pub fn legalize_signature(_sig: &mut ir::Signature,
|
pub fn legalize_signature(
|
||||||
_flags: &shared_settings::Flags,
|
_sig: &mut ir::Signature,
|
||||||
_current: bool) {
|
_flags: &shared_settings::Flags,
|
||||||
|
_current: bool,
|
||||||
|
) {
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -28,13 +28,14 @@ pub fn isa_builder() -> IsaBuilder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn isa_constructor(shared_flags: shared_settings::Flags,
|
fn isa_constructor(
|
||||||
builder: &shared_settings::Builder)
|
shared_flags: shared_settings::Flags,
|
||||||
-> Box<TargetIsa> {
|
builder: &shared_settings::Builder,
|
||||||
|
) -> Box<TargetIsa> {
|
||||||
Box::new(Isa {
|
Box::new(Isa {
|
||||||
isa_flags: settings::Flags::new(&shared_flags, builder),
|
isa_flags: settings::Flags::new(&shared_flags, builder),
|
||||||
shared_flags,
|
shared_flags,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TargetIsa for Isa {
|
impl TargetIsa for Isa {
|
||||||
@@ -54,21 +55,24 @@ impl TargetIsa for Isa {
|
|||||||
enc_tables::INFO.clone()
|
enc_tables::INFO.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn legal_encodings<'a>(&'a self,
|
fn legal_encodings<'a>(
|
||||||
dfg: &'a ir::DataFlowGraph,
|
&'a self,
|
||||||
inst: &'a ir::InstructionData,
|
dfg: &'a ir::DataFlowGraph,
|
||||||
ctrl_typevar: ir::Type)
|
inst: &'a ir::InstructionData,
|
||||||
-> Encodings<'a> {
|
ctrl_typevar: ir::Type,
|
||||||
lookup_enclist(ctrl_typevar,
|
) -> Encodings<'a> {
|
||||||
inst,
|
lookup_enclist(
|
||||||
dfg,
|
ctrl_typevar,
|
||||||
&enc_tables::LEVEL1_A64[..],
|
inst,
|
||||||
&enc_tables::LEVEL2[..],
|
dfg,
|
||||||
&enc_tables::ENCLISTS[..],
|
&enc_tables::LEVEL1_A64[..],
|
||||||
&enc_tables::LEGALIZE_ACTIONS[..],
|
&enc_tables::LEVEL2[..],
|
||||||
&enc_tables::RECIPE_PREDICATES[..],
|
&enc_tables::ENCLISTS[..],
|
||||||
&enc_tables::INST_PREDICATES[..],
|
&enc_tables::LEGALIZE_ACTIONS[..],
|
||||||
self.isa_flags.predicate_view())
|
&enc_tables::RECIPE_PREDICATES[..],
|
||||||
|
&enc_tables::INST_PREDICATES[..],
|
||||||
|
self.isa_flags.predicate_view(),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn legalize_signature(&self, sig: &mut ir::Signature, current: bool) {
|
fn legalize_signature(&self, sig: &mut ir::Signature, current: bool) {
|
||||||
@@ -83,11 +87,13 @@ impl TargetIsa for Isa {
|
|||||||
abi::allocatable_registers(func)
|
abi::allocatable_registers(func)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn emit_inst(&self,
|
fn emit_inst(
|
||||||
func: &ir::Function,
|
&self,
|
||||||
inst: ir::Inst,
|
func: &ir::Function,
|
||||||
divert: &mut regalloc::RegDiversions,
|
inst: ir::Inst,
|
||||||
sink: &mut CodeSink) {
|
divert: &mut regalloc::RegDiversions,
|
||||||
|
sink: &mut CodeSink,
|
||||||
|
) {
|
||||||
binemit::emit_inst(func, inst, divert, sink)
|
binemit::emit_inst(func, inst, divert, sink)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -103,19 +103,21 @@ impl<OffT: Into<u32> + Copy> Table<Opcode> for [Level2Entry<OffT>] {
|
|||||||
/// list.
|
/// list.
|
||||||
///
|
///
|
||||||
/// Returns an iterator that produces legal encodings for `inst`.
|
/// Returns an iterator that produces legal encodings for `inst`.
|
||||||
pub fn lookup_enclist<'a, OffT1, OffT2>(ctrl_typevar: Type,
|
pub fn lookup_enclist<'a, OffT1, OffT2>(
|
||||||
inst: &'a InstructionData,
|
ctrl_typevar: Type,
|
||||||
dfg: &'a DataFlowGraph,
|
inst: &'a InstructionData,
|
||||||
level1_table: &'static [Level1Entry<OffT1>],
|
dfg: &'a DataFlowGraph,
|
||||||
level2_table: &'static [Level2Entry<OffT2>],
|
level1_table: &'static [Level1Entry<OffT1>],
|
||||||
enclist: &'static [EncListEntry],
|
level2_table: &'static [Level2Entry<OffT2>],
|
||||||
legalize_actions: &'static [Legalize],
|
enclist: &'static [EncListEntry],
|
||||||
recipe_preds: &'static [RecipePredicate],
|
legalize_actions: &'static [Legalize],
|
||||||
inst_preds: &'static [InstPredicate],
|
recipe_preds: &'static [RecipePredicate],
|
||||||
isa_preds: PredicateView<'a>)
|
inst_preds: &'static [InstPredicate],
|
||||||
-> Encodings<'a>
|
isa_preds: PredicateView<'a>,
|
||||||
where OffT1: Into<u32> + Copy,
|
) -> Encodings<'a>
|
||||||
OffT2: Into<u32> + Copy
|
where
|
||||||
|
OffT1: Into<u32> + Copy,
|
||||||
|
OffT2: Into<u32> + Copy,
|
||||||
{
|
{
|
||||||
let (offset, legalize) = match probe(level1_table, ctrl_typevar, ctrl_typevar.index()) {
|
let (offset, legalize) = match probe(level1_table, ctrl_typevar, ctrl_typevar.index()) {
|
||||||
Err(l1idx) => {
|
Err(l1idx) => {
|
||||||
@@ -144,15 +146,17 @@ pub fn lookup_enclist<'a, OffT1, OffT2>(ctrl_typevar: Type,
|
|||||||
|
|
||||||
// Now we have an offset into `enclist` that is `!0` when no encoding list could be found.
|
// Now we have an offset into `enclist` that is `!0` when no encoding list could be found.
|
||||||
// The default legalization code is always valid.
|
// The default legalization code is always valid.
|
||||||
Encodings::new(offset,
|
Encodings::new(
|
||||||
legalize,
|
offset,
|
||||||
inst,
|
legalize,
|
||||||
dfg,
|
inst,
|
||||||
enclist,
|
dfg,
|
||||||
legalize_actions,
|
enclist,
|
||||||
recipe_preds,
|
legalize_actions,
|
||||||
inst_preds,
|
recipe_preds,
|
||||||
isa_preds)
|
inst_preds,
|
||||||
|
isa_preds,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Encoding list entry.
|
/// Encoding list entry.
|
||||||
@@ -187,16 +191,17 @@ impl<'a> Encodings<'a> {
|
|||||||
/// This iterator provides search for encodings that applies to the given instruction. The
|
/// This iterator provides search for encodings that applies to the given instruction. The
|
||||||
/// encoding lists are laid out such that first call to `next` returns valid entry in the list
|
/// encoding lists are laid out such that first call to `next` returns valid entry in the list
|
||||||
/// or `None`.
|
/// or `None`.
|
||||||
pub fn new(offset: usize,
|
pub fn new(
|
||||||
legalize: LegalizeCode,
|
offset: usize,
|
||||||
inst: &'a InstructionData,
|
legalize: LegalizeCode,
|
||||||
dfg: &'a DataFlowGraph,
|
inst: &'a InstructionData,
|
||||||
enclist: &'static [EncListEntry],
|
dfg: &'a DataFlowGraph,
|
||||||
legalize_actions: &'static [Legalize],
|
enclist: &'static [EncListEntry],
|
||||||
recipe_preds: &'static [RecipePredicate],
|
legalize_actions: &'static [Legalize],
|
||||||
inst_preds: &'static [InstPredicate],
|
recipe_preds: &'static [RecipePredicate],
|
||||||
isa_preds: PredicateView<'a>)
|
inst_preds: &'static [InstPredicate],
|
||||||
-> Self {
|
isa_preds: PredicateView<'a>,
|
||||||
|
) -> Self {
|
||||||
Encodings {
|
Encodings {
|
||||||
offset,
|
offset,
|
||||||
inst,
|
inst,
|
||||||
|
|||||||
@@ -66,10 +66,12 @@ pub struct DisplayEncoding {
|
|||||||
impl fmt::Display for DisplayEncoding {
|
impl fmt::Display for DisplayEncoding {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
if self.encoding.is_legal() {
|
if self.encoding.is_legal() {
|
||||||
write!(f,
|
write!(
|
||||||
"{}#{:02x}",
|
f,
|
||||||
self.recipe_names[self.encoding.recipe()],
|
"{}#{:02x}",
|
||||||
self.encoding.bits)
|
self.recipe_names[self.encoding.recipe()],
|
||||||
|
self.encoding.bits
|
||||||
|
)
|
||||||
} else {
|
} else {
|
||||||
write!(f, "-")
|
write!(f, "-")
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -87,9 +87,7 @@ impl ArgAssigner for Args {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Legalize `sig`.
|
/// Legalize `sig`.
|
||||||
pub fn legalize_signature(sig: &mut ir::Signature,
|
pub fn legalize_signature(sig: &mut ir::Signature, flags: &shared_settings::Flags, _current: bool) {
|
||||||
flags: &shared_settings::Flags,
|
|
||||||
_current: bool) {
|
|
||||||
let bits = if flags.is_64bit() { 64 } else { 32 };
|
let bits = if flags.is_64bit() { 64 } else { 32 };
|
||||||
|
|
||||||
let mut args = Args::new(bits, &ARG_GPRS, 8);
|
let mut args = Args::new(bits, &ARG_GPRS, 8);
|
||||||
@@ -105,9 +103,10 @@ pub fn regclass_for_abi_type(ty: ir::Type) -> RegClass {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Get the set of allocatable registers for `func`.
|
/// Get the set of allocatable registers for `func`.
|
||||||
pub fn allocatable_registers(_func: &ir::Function,
|
pub fn allocatable_registers(
|
||||||
flags: &shared_settings::Flags)
|
_func: &ir::Function,
|
||||||
-> AllocatableSet {
|
flags: &shared_settings::Flags,
|
||||||
|
) -> AllocatableSet {
|
||||||
let mut regs = AllocatableSet::new();
|
let mut regs = AllocatableSet::new();
|
||||||
regs.take(GPR, RU::rsp as RegUnit);
|
regs.take(GPR, RU::rsp as RegUnit);
|
||||||
regs.take(GPR, RU::rbp as RegUnit);
|
regs.take(GPR, RU::rbp as RegUnit);
|
||||||
|
|||||||
@@ -29,19 +29,20 @@ pub fn isa_builder() -> IsaBuilder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn isa_constructor(shared_flags: shared_settings::Flags,
|
fn isa_constructor(
|
||||||
builder: &shared_settings::Builder)
|
shared_flags: shared_settings::Flags,
|
||||||
-> Box<TargetIsa> {
|
builder: &shared_settings::Builder,
|
||||||
|
) -> Box<TargetIsa> {
|
||||||
let level1 = if shared_flags.is_64bit() {
|
let level1 = if shared_flags.is_64bit() {
|
||||||
&enc_tables::LEVEL1_I64[..]
|
&enc_tables::LEVEL1_I64[..]
|
||||||
} else {
|
} else {
|
||||||
&enc_tables::LEVEL1_I32[..]
|
&enc_tables::LEVEL1_I32[..]
|
||||||
};
|
};
|
||||||
Box::new(Isa {
|
Box::new(Isa {
|
||||||
isa_flags: settings::Flags::new(&shared_flags, builder),
|
isa_flags: settings::Flags::new(&shared_flags, builder),
|
||||||
shared_flags,
|
shared_flags,
|
||||||
cpumode: level1,
|
cpumode: level1,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TargetIsa for Isa {
|
impl TargetIsa for Isa {
|
||||||
@@ -61,21 +62,24 @@ impl TargetIsa for Isa {
|
|||||||
enc_tables::INFO.clone()
|
enc_tables::INFO.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn legal_encodings<'a>(&'a self,
|
fn legal_encodings<'a>(
|
||||||
dfg: &'a ir::DataFlowGraph,
|
&'a self,
|
||||||
inst: &'a ir::InstructionData,
|
dfg: &'a ir::DataFlowGraph,
|
||||||
ctrl_typevar: ir::Type)
|
inst: &'a ir::InstructionData,
|
||||||
-> Encodings<'a> {
|
ctrl_typevar: ir::Type,
|
||||||
lookup_enclist(ctrl_typevar,
|
) -> Encodings<'a> {
|
||||||
inst,
|
lookup_enclist(
|
||||||
dfg,
|
ctrl_typevar,
|
||||||
self.cpumode,
|
inst,
|
||||||
&enc_tables::LEVEL2[..],
|
dfg,
|
||||||
&enc_tables::ENCLISTS[..],
|
self.cpumode,
|
||||||
&enc_tables::LEGALIZE_ACTIONS[..],
|
&enc_tables::LEVEL2[..],
|
||||||
&enc_tables::RECIPE_PREDICATES[..],
|
&enc_tables::ENCLISTS[..],
|
||||||
&enc_tables::INST_PREDICATES[..],
|
&enc_tables::LEGALIZE_ACTIONS[..],
|
||||||
self.isa_flags.predicate_view())
|
&enc_tables::RECIPE_PREDICATES[..],
|
||||||
|
&enc_tables::INST_PREDICATES[..],
|
||||||
|
self.isa_flags.predicate_view(),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn legalize_signature(&self, sig: &mut ir::Signature, current: bool) {
|
fn legalize_signature(&self, sig: &mut ir::Signature, current: bool) {
|
||||||
@@ -90,11 +94,13 @@ impl TargetIsa for Isa {
|
|||||||
abi::allocatable_registers(func, &self.shared_flags)
|
abi::allocatable_registers(func, &self.shared_flags)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn emit_inst(&self,
|
fn emit_inst(
|
||||||
func: &ir::Function,
|
&self,
|
||||||
inst: ir::Inst,
|
func: &ir::Function,
|
||||||
divert: &mut regalloc::RegDiversions,
|
inst: ir::Inst,
|
||||||
sink: &mut CodeSink) {
|
divert: &mut regalloc::RegDiversions,
|
||||||
|
sink: &mut CodeSink,
|
||||||
|
) {
|
||||||
binemit::emit_inst(func, inst, divert, sink)
|
binemit::emit_inst(func, inst, divert, sink)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -155,11 +155,12 @@ pub trait TargetIsa {
|
|||||||
fn register_info(&self) -> RegInfo;
|
fn register_info(&self) -> RegInfo;
|
||||||
|
|
||||||
/// Returns an iterartor over legal encodings for the instruction.
|
/// Returns an iterartor over legal encodings for the instruction.
|
||||||
fn legal_encodings<'a>(&'a self,
|
fn legal_encodings<'a>(
|
||||||
dfg: &'a ir::DataFlowGraph,
|
&'a self,
|
||||||
inst: &'a ir::InstructionData,
|
dfg: &'a ir::DataFlowGraph,
|
||||||
ctrl_typevar: ir::Type)
|
inst: &'a ir::InstructionData,
|
||||||
-> Encodings<'a>;
|
ctrl_typevar: ir::Type,
|
||||||
|
) -> Encodings<'a>;
|
||||||
|
|
||||||
/// Encode an instruction after determining it is legal.
|
/// Encode an instruction after determining it is legal.
|
||||||
///
|
///
|
||||||
@@ -167,11 +168,12 @@ pub trait TargetIsa {
|
|||||||
/// Otherwise, return `Legalize` action.
|
/// Otherwise, return `Legalize` action.
|
||||||
///
|
///
|
||||||
/// This is also the main entry point for determining if an instruction is legal.
|
/// This is also the main entry point for determining if an instruction is legal.
|
||||||
fn encode(&self,
|
fn encode(
|
||||||
dfg: &ir::DataFlowGraph,
|
&self,
|
||||||
inst: &ir::InstructionData,
|
dfg: &ir::DataFlowGraph,
|
||||||
ctrl_typevar: ir::Type)
|
inst: &ir::InstructionData,
|
||||||
-> Result<Encoding, Legalize> {
|
ctrl_typevar: ir::Type,
|
||||||
|
) -> Result<Encoding, Legalize> {
|
||||||
let mut iter = self.legal_encodings(dfg, inst, ctrl_typevar);
|
let mut iter = self.legal_encodings(dfg, inst, ctrl_typevar);
|
||||||
iter.next().ok_or_else(|| iter.legalize().into())
|
iter.next().ok_or_else(|| iter.legalize().into())
|
||||||
}
|
}
|
||||||
@@ -244,11 +246,13 @@ pub trait TargetIsa {
|
|||||||
///
|
///
|
||||||
/// Note that this will call `put*` methods on the trait object via its vtable which is not the
|
/// Note that this will call `put*` methods on the trait object via its vtable which is not the
|
||||||
/// fastest way of emitting code.
|
/// fastest way of emitting code.
|
||||||
fn emit_inst(&self,
|
fn emit_inst(
|
||||||
func: &ir::Function,
|
&self,
|
||||||
inst: ir::Inst,
|
func: &ir::Function,
|
||||||
divert: &mut regalloc::RegDiversions,
|
inst: ir::Inst,
|
||||||
sink: &mut binemit::CodeSink);
|
divert: &mut regalloc::RegDiversions,
|
||||||
|
sink: &mut binemit::CodeSink,
|
||||||
|
);
|
||||||
|
|
||||||
/// Emit a whole function into memory.
|
/// Emit a whole function into memory.
|
||||||
///
|
///
|
||||||
|
|||||||
@@ -74,24 +74,23 @@ impl RegBank {
|
|||||||
/// Try to parse a regunit name. The name is not expected to begin with `%`.
|
/// Try to parse a regunit name. The name is not expected to begin with `%`.
|
||||||
fn parse_regunit(&self, name: &str) -> Option<RegUnit> {
|
fn parse_regunit(&self, name: &str) -> Option<RegUnit> {
|
||||||
match self.names.iter().position(|&x| x == name) {
|
match self.names.iter().position(|&x| x == name) {
|
||||||
Some(offset) => {
|
Some(offset) => {
|
||||||
// This is one of the special-cased names.
|
// This is one of the special-cased names.
|
||||||
Some(offset as RegUnit)
|
Some(offset as RegUnit)
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
// Try a regular prefixed name.
|
// Try a regular prefixed name.
|
||||||
if name.starts_with(self.prefix) {
|
if name.starts_with(self.prefix) {
|
||||||
name[self.prefix.len()..].parse().ok()
|
name[self.prefix.len()..].parse().ok()
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
.and_then(|offset| if offset < self.units {
|
}.and_then(|offset| if offset < self.units {
|
||||||
Some(offset + self.first_unit)
|
Some(offset + self.first_unit)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Write `regunit` to `w`, assuming that it belongs to this bank.
|
/// Write `regunit` to `w`, assuming that it belongs to this bank.
|
||||||
|
|||||||
@@ -86,10 +86,12 @@ impl ArgAssigner for Args {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Legalize `sig` for RISC-V.
|
/// Legalize `sig` for RISC-V.
|
||||||
pub fn legalize_signature(sig: &mut ir::Signature,
|
pub fn legalize_signature(
|
||||||
flags: &shared_settings::Flags,
|
sig: &mut ir::Signature,
|
||||||
isa_flags: &settings::Flags,
|
flags: &shared_settings::Flags,
|
||||||
current: bool) {
|
isa_flags: &settings::Flags,
|
||||||
|
current: bool,
|
||||||
|
) {
|
||||||
let bits = if flags.is_64bit() { 64 } else { 32 };
|
let bits = if flags.is_64bit() { 64 } else { 32 };
|
||||||
|
|
||||||
let mut args = Args::new(bits, isa_flags.enable_e());
|
let mut args = Args::new(bits, isa_flags.enable_e());
|
||||||
|
|||||||
@@ -29,11 +29,7 @@ impl Into<Reloc> for RelocKind {
|
|||||||
/// 25 20 15 12 7 0
|
/// 25 20 15 12 7 0
|
||||||
///
|
///
|
||||||
/// Encoding bits: `opcode[6:2] | (funct3 << 5) | (funct7 << 8)`.
|
/// Encoding bits: `opcode[6:2] | (funct3 << 5) | (funct7 << 8)`.
|
||||||
fn put_r<CS: CodeSink + ?Sized>(bits: u16,
|
fn put_r<CS: CodeSink + ?Sized>(bits: u16, rs1: RegUnit, rs2: RegUnit, rd: RegUnit, sink: &mut CS) {
|
||||||
rs1: RegUnit,
|
|
||||||
rs2: RegUnit,
|
|
||||||
rd: RegUnit,
|
|
||||||
sink: &mut CS) {
|
|
||||||
let bits = bits as u32;
|
let bits = bits as u32;
|
||||||
let opcode5 = bits & 0x1f;
|
let opcode5 = bits & 0x1f;
|
||||||
let funct3 = (bits >> 5) & 0x7;
|
let funct3 = (bits >> 5) & 0x7;
|
||||||
@@ -63,11 +59,13 @@ fn put_r<CS: CodeSink + ?Sized>(bits: u16,
|
|||||||
/// Both funct7 and shamt contribute to bit 25. In RV64, shamt uses it for shifts > 31.
|
/// Both funct7 and shamt contribute to bit 25. In RV64, shamt uses it for shifts > 31.
|
||||||
///
|
///
|
||||||
/// Encoding bits: `opcode[6:2] | (funct3 << 5) | (funct7 << 8)`.
|
/// Encoding bits: `opcode[6:2] | (funct3 << 5) | (funct7 << 8)`.
|
||||||
fn put_rshamt<CS: CodeSink + ?Sized>(bits: u16,
|
fn put_rshamt<CS: CodeSink + ?Sized>(
|
||||||
rs1: RegUnit,
|
bits: u16,
|
||||||
shamt: i64,
|
rs1: RegUnit,
|
||||||
rd: RegUnit,
|
shamt: i64,
|
||||||
sink: &mut CS) {
|
rd: RegUnit,
|
||||||
|
sink: &mut CS,
|
||||||
|
) {
|
||||||
let bits = bits as u32;
|
let bits = bits as u32;
|
||||||
let opcode5 = bits & 0x1f;
|
let opcode5 = bits & 0x1f;
|
||||||
let funct3 = (bits >> 5) & 0x7;
|
let funct3 = (bits >> 5) & 0x7;
|
||||||
|
|||||||
@@ -29,19 +29,20 @@ pub fn isa_builder() -> IsaBuilder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn isa_constructor(shared_flags: shared_settings::Flags,
|
fn isa_constructor(
|
||||||
builder: &shared_settings::Builder)
|
shared_flags: shared_settings::Flags,
|
||||||
-> Box<TargetIsa> {
|
builder: &shared_settings::Builder,
|
||||||
|
) -> Box<TargetIsa> {
|
||||||
let level1 = if shared_flags.is_64bit() {
|
let level1 = if shared_flags.is_64bit() {
|
||||||
&enc_tables::LEVEL1_RV64[..]
|
&enc_tables::LEVEL1_RV64[..]
|
||||||
} else {
|
} else {
|
||||||
&enc_tables::LEVEL1_RV32[..]
|
&enc_tables::LEVEL1_RV32[..]
|
||||||
};
|
};
|
||||||
Box::new(Isa {
|
Box::new(Isa {
|
||||||
isa_flags: settings::Flags::new(&shared_flags, builder),
|
isa_flags: settings::Flags::new(&shared_flags, builder),
|
||||||
shared_flags,
|
shared_flags,
|
||||||
cpumode: level1,
|
cpumode: level1,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TargetIsa for Isa {
|
impl TargetIsa for Isa {
|
||||||
@@ -61,21 +62,24 @@ impl TargetIsa for Isa {
|
|||||||
enc_tables::INFO.clone()
|
enc_tables::INFO.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn legal_encodings<'a>(&'a self,
|
fn legal_encodings<'a>(
|
||||||
dfg: &'a ir::DataFlowGraph,
|
&'a self,
|
||||||
inst: &'a ir::InstructionData,
|
dfg: &'a ir::DataFlowGraph,
|
||||||
ctrl_typevar: ir::Type)
|
inst: &'a ir::InstructionData,
|
||||||
-> Encodings<'a> {
|
ctrl_typevar: ir::Type,
|
||||||
lookup_enclist(ctrl_typevar,
|
) -> Encodings<'a> {
|
||||||
inst,
|
lookup_enclist(
|
||||||
dfg,
|
ctrl_typevar,
|
||||||
self.cpumode,
|
inst,
|
||||||
&enc_tables::LEVEL2[..],
|
dfg,
|
||||||
&enc_tables::ENCLISTS[..],
|
self.cpumode,
|
||||||
&enc_tables::LEGALIZE_ACTIONS[..],
|
&enc_tables::LEVEL2[..],
|
||||||
&enc_tables::RECIPE_PREDICATES[..],
|
&enc_tables::ENCLISTS[..],
|
||||||
&enc_tables::INST_PREDICATES[..],
|
&enc_tables::LEGALIZE_ACTIONS[..],
|
||||||
self.isa_flags.predicate_view())
|
&enc_tables::RECIPE_PREDICATES[..],
|
||||||
|
&enc_tables::INST_PREDICATES[..],
|
||||||
|
self.isa_flags.predicate_view(),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn legalize_signature(&self, sig: &mut ir::Signature, current: bool) {
|
fn legalize_signature(&self, sig: &mut ir::Signature, current: bool) {
|
||||||
@@ -90,11 +94,13 @@ impl TargetIsa for Isa {
|
|||||||
abi::allocatable_registers(func, &self.isa_flags)
|
abi::allocatable_registers(func, &self.isa_flags)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn emit_inst(&self,
|
fn emit_inst(
|
||||||
func: &ir::Function,
|
&self,
|
||||||
inst: ir::Inst,
|
func: &ir::Function,
|
||||||
divert: &mut regalloc::RegDiversions,
|
inst: ir::Inst,
|
||||||
sink: &mut CodeSink) {
|
divert: &mut regalloc::RegDiversions,
|
||||||
|
sink: &mut CodeSink,
|
||||||
|
) {
|
||||||
binemit::emit_inst(func, inst, divert, sink)
|
binemit::emit_inst(func, inst, divert, sink)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -18,14 +18,16 @@ mod tests {
|
|||||||
let shared = settings::Flags::new(&settings::builder());
|
let shared = settings::Flags::new(&settings::builder());
|
||||||
let b = builder();
|
let b = builder();
|
||||||
let f = Flags::new(&shared, &b);
|
let f = Flags::new(&shared, &b);
|
||||||
assert_eq!(f.to_string(),
|
assert_eq!(
|
||||||
"[riscv]\n\
|
f.to_string(),
|
||||||
|
"[riscv]\n\
|
||||||
supports_m = false\n\
|
supports_m = false\n\
|
||||||
supports_a = false\n\
|
supports_a = false\n\
|
||||||
supports_f = false\n\
|
supports_f = false\n\
|
||||||
supports_d = false\n\
|
supports_d = false\n\
|
||||||
enable_m = true\n\
|
enable_m = true\n\
|
||||||
enable_e = false\n");
|
enable_e = false\n"
|
||||||
|
);
|
||||||
// Predicates are not part of the Display output.
|
// Predicates are not part of the Display output.
|
||||||
assert_eq!(f.full_float(), false);
|
assert_eq!(f.full_float(), false);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,40 +4,45 @@
|
|||||||
pub trait IteratorExtras: Iterator {
|
pub trait IteratorExtras: Iterator {
|
||||||
/// Create an iterator that produces adjacent pairs of elements from the iterator.
|
/// Create an iterator that produces adjacent pairs of elements from the iterator.
|
||||||
fn adjacent_pairs(mut self) -> AdjacentPairs<Self>
|
fn adjacent_pairs(mut self) -> AdjacentPairs<Self>
|
||||||
where Self: Sized,
|
where
|
||||||
Self::Item: Clone
|
Self: Sized,
|
||||||
|
Self::Item: Clone,
|
||||||
{
|
{
|
||||||
let elem = self.next();
|
let elem = self.next();
|
||||||
AdjacentPairs { iter: self, elem }
|
AdjacentPairs { iter: self, elem }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> IteratorExtras for T where T: Iterator {}
|
impl<T> IteratorExtras for T
|
||||||
|
where
|
||||||
|
T: Iterator,
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
/// Adjacent pairs iterator returned by `adjacent_pairs()`.
|
/// Adjacent pairs iterator returned by `adjacent_pairs()`.
|
||||||
///
|
///
|
||||||
/// This wraps another iterator and produces a sequence of adjacent pairs of elements.
|
/// This wraps another iterator and produces a sequence of adjacent pairs of elements.
|
||||||
pub struct AdjacentPairs<I>
|
pub struct AdjacentPairs<I>
|
||||||
where I: Iterator,
|
where
|
||||||
I::Item: Clone
|
I: Iterator,
|
||||||
|
I::Item: Clone,
|
||||||
{
|
{
|
||||||
iter: I,
|
iter: I,
|
||||||
elem: Option<I::Item>,
|
elem: Option<I::Item>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<I> Iterator for AdjacentPairs<I>
|
impl<I> Iterator for AdjacentPairs<I>
|
||||||
where I: Iterator,
|
where
|
||||||
I::Item: Clone
|
I: Iterator,
|
||||||
|
I::Item: Clone,
|
||||||
{
|
{
|
||||||
type Item = (I::Item, I::Item);
|
type Item = (I::Item, I::Item);
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
self.elem
|
self.elem.take().and_then(|e| {
|
||||||
.take()
|
self.elem = self.iter.next();
|
||||||
.and_then(|e| {
|
self.elem.clone().map(|n| (e, n))
|
||||||
self.elem = self.iter.next();
|
})
|
||||||
self.elem.clone().map(|n| (e, n))
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -47,33 +52,45 @@ mod tests {
|
|||||||
fn adjpairs() {
|
fn adjpairs() {
|
||||||
use super::IteratorExtras;
|
use super::IteratorExtras;
|
||||||
|
|
||||||
assert_eq!([1, 2, 3, 4]
|
assert_eq!(
|
||||||
.iter()
|
[1, 2, 3, 4]
|
||||||
.cloned()
|
.iter()
|
||||||
.adjacent_pairs()
|
.cloned()
|
||||||
.collect::<Vec<_>>(),
|
.adjacent_pairs()
|
||||||
vec![(1, 2), (2, 3), (3, 4)]);
|
.collect::<Vec<_>>(),
|
||||||
assert_eq!([2, 3, 4]
|
vec![(1, 2), (2, 3), (3, 4)]
|
||||||
.iter()
|
);
|
||||||
.cloned()
|
assert_eq!(
|
||||||
.adjacent_pairs()
|
[2, 3, 4]
|
||||||
.collect::<Vec<_>>(),
|
.iter()
|
||||||
vec![(2, 3), (3, 4)]);
|
.cloned()
|
||||||
assert_eq!([2, 3, 4]
|
.adjacent_pairs()
|
||||||
.iter()
|
.collect::<Vec<_>>(),
|
||||||
.cloned()
|
vec![(2, 3), (3, 4)]
|
||||||
.adjacent_pairs()
|
);
|
||||||
.collect::<Vec<_>>(),
|
assert_eq!(
|
||||||
vec![(2, 3), (3, 4)]);
|
[2, 3, 4]
|
||||||
assert_eq!([3, 4].iter().cloned().adjacent_pairs().collect::<Vec<_>>(),
|
.iter()
|
||||||
vec![(3, 4)]);
|
.cloned()
|
||||||
assert_eq!([4].iter().cloned().adjacent_pairs().collect::<Vec<_>>(),
|
.adjacent_pairs()
|
||||||
vec![]);
|
.collect::<Vec<_>>(),
|
||||||
assert_eq!([]
|
vec![(2, 3), (3, 4)]
|
||||||
.iter()
|
);
|
||||||
.cloned()
|
assert_eq!(
|
||||||
.adjacent_pairs()
|
[3, 4].iter().cloned().adjacent_pairs().collect::<Vec<_>>(),
|
||||||
.collect::<Vec<(i32, i32)>>(),
|
vec![(3, 4)]
|
||||||
vec![]);
|
);
|
||||||
|
assert_eq!(
|
||||||
|
[4].iter().cloned().adjacent_pairs().collect::<Vec<_>>(),
|
||||||
|
vec![]
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
[]
|
||||||
|
.iter()
|
||||||
|
.cloned()
|
||||||
|
.adjacent_pairs()
|
||||||
|
.collect::<Vec<(i32, i32)>>(),
|
||||||
|
vec![]
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -98,9 +98,11 @@ fn legalize_entry_arguments(func: &mut Function, entry: Ebb) {
|
|||||||
// Compute the value we want for `arg` from the legalized ABI arguments.
|
// Compute the value we want for `arg` from the legalized ABI arguments.
|
||||||
let mut get_arg = |dfg: &mut DataFlowGraph, ty| {
|
let mut get_arg = |dfg: &mut DataFlowGraph, ty| {
|
||||||
let abi_type = abi_types[abi_arg];
|
let abi_type = abi_types[abi_arg];
|
||||||
assert_eq!(abi_type.purpose,
|
assert_eq!(
|
||||||
ArgumentPurpose::Normal,
|
abi_type.purpose,
|
||||||
"Can't legalize special-purpose argument");
|
ArgumentPurpose::Normal,
|
||||||
|
"Can't legalize special-purpose argument"
|
||||||
|
);
|
||||||
if ty == abi_type.value_type {
|
if ty == abi_type.value_type {
|
||||||
abi_arg += 1;
|
abi_arg += 1;
|
||||||
Ok(dfg.append_ebb_arg(entry, ty))
|
Ok(dfg.append_ebb_arg(entry, ty))
|
||||||
@@ -159,14 +161,17 @@ fn legalize_entry_arguments(func: &mut Function, entry: Ebb) {
|
|||||||
/// This function is very similar to the `legalize_entry_arguments` function above.
|
/// This function is very similar to the `legalize_entry_arguments` function above.
|
||||||
///
|
///
|
||||||
/// Returns the possibly new instruction representing the call.
|
/// Returns the possibly new instruction representing the call.
|
||||||
fn legalize_inst_results<ResType>(dfg: &mut DataFlowGraph,
|
fn legalize_inst_results<ResType>(
|
||||||
pos: &mut Cursor,
|
dfg: &mut DataFlowGraph,
|
||||||
mut get_abi_type: ResType)
|
pos: &mut Cursor,
|
||||||
-> Inst
|
mut get_abi_type: ResType,
|
||||||
where ResType: FnMut(&DataFlowGraph, usize) -> ArgumentType
|
) -> Inst
|
||||||
|
where
|
||||||
|
ResType: FnMut(&DataFlowGraph, usize) -> ArgumentType,
|
||||||
{
|
{
|
||||||
let call = pos.current_inst()
|
let call = pos.current_inst().expect(
|
||||||
.expect("Cursor must point to a call instruction");
|
"Cursor must point to a call instruction",
|
||||||
|
);
|
||||||
|
|
||||||
// We theoretically allow for call instructions that return a number of fixed results before
|
// We theoretically allow for call instructions that return a number of fixed results before
|
||||||
// the call return values. In practice, it doesn't happen.
|
// the call return values. In practice, it doesn't happen.
|
||||||
@@ -216,13 +221,15 @@ fn legalize_inst_results<ResType>(dfg: &mut DataFlowGraph,
|
|||||||
/// - `Err(arg_type)` if further conversions are needed from the ABI argument `arg_type`.
|
/// - `Err(arg_type)` if further conversions are needed from the ABI argument `arg_type`.
|
||||||
///
|
///
|
||||||
/// If the `into_result` value is provided, the converted result will be written into that value.
|
/// If the `into_result` value is provided, the converted result will be written into that value.
|
||||||
fn convert_from_abi<GetArg>(dfg: &mut DataFlowGraph,
|
fn convert_from_abi<GetArg>(
|
||||||
pos: &mut Cursor,
|
dfg: &mut DataFlowGraph,
|
||||||
ty: Type,
|
pos: &mut Cursor,
|
||||||
into_result: Option<Value>,
|
ty: Type,
|
||||||
get_arg: &mut GetArg)
|
into_result: Option<Value>,
|
||||||
-> Value
|
get_arg: &mut GetArg,
|
||||||
where GetArg: FnMut(&mut DataFlowGraph, Type) -> Result<Value, ArgumentType>
|
) -> Value
|
||||||
|
where
|
||||||
|
GetArg: FnMut(&mut DataFlowGraph, Type) -> Result<Value, ArgumentType>,
|
||||||
{
|
{
|
||||||
// Terminate the recursion when we get the desired type.
|
// Terminate the recursion when we get the desired type.
|
||||||
let arg_type = match get_arg(dfg, ty) {
|
let arg_type = match get_arg(dfg, ty) {
|
||||||
@@ -246,11 +253,13 @@ fn convert_from_abi<GetArg>(dfg: &mut DataFlowGraph,
|
|||||||
let abi_ty = ty.half_width().expect("Invalid type for conversion");
|
let abi_ty = ty.half_width().expect("Invalid type for conversion");
|
||||||
let lo = convert_from_abi(dfg, pos, abi_ty, None, get_arg);
|
let lo = convert_from_abi(dfg, pos, abi_ty, None, get_arg);
|
||||||
let hi = convert_from_abi(dfg, pos, abi_ty, None, get_arg);
|
let hi = convert_from_abi(dfg, pos, abi_ty, None, get_arg);
|
||||||
dbg!("intsplit {}: {}, {}: {}",
|
dbg!(
|
||||||
lo,
|
"intsplit {}: {}, {}: {}",
|
||||||
dfg.value_type(lo),
|
lo,
|
||||||
hi,
|
dfg.value_type(lo),
|
||||||
dfg.value_type(hi));
|
hi,
|
||||||
|
dfg.value_type(hi)
|
||||||
|
);
|
||||||
dfg.ins(pos).with_results([into_result]).iconcat(lo, hi)
|
dfg.ins(pos).with_results([into_result]).iconcat(lo, hi)
|
||||||
}
|
}
|
||||||
// Construct a `ty` by concatenating two halves of a vector.
|
// Construct a `ty` by concatenating two halves of a vector.
|
||||||
@@ -296,12 +305,14 @@ fn convert_from_abi<GetArg>(dfg: &mut DataFlowGraph,
|
|||||||
/// 2. If the suggested argument doesn't have the right value type, don't change anything, but
|
/// 2. If the suggested argument doesn't have the right value type, don't change anything, but
|
||||||
/// return the `Err(ArgumentType)` that is needed.
|
/// return the `Err(ArgumentType)` that is needed.
|
||||||
///
|
///
|
||||||
fn convert_to_abi<PutArg>(dfg: &mut DataFlowGraph,
|
fn convert_to_abi<PutArg>(
|
||||||
cfg: &ControlFlowGraph,
|
dfg: &mut DataFlowGraph,
|
||||||
pos: &mut Cursor,
|
cfg: &ControlFlowGraph,
|
||||||
value: Value,
|
pos: &mut Cursor,
|
||||||
put_arg: &mut PutArg)
|
value: Value,
|
||||||
where PutArg: FnMut(&mut DataFlowGraph, Value) -> Result<(), ArgumentType>
|
put_arg: &mut PutArg,
|
||||||
|
) where
|
||||||
|
PutArg: FnMut(&mut DataFlowGraph, Value) -> Result<(), ArgumentType>,
|
||||||
{
|
{
|
||||||
// Start by invoking the closure to either terminate the recursion or get the argument type
|
// Start by invoking the closure to either terminate the recursion or get the argument type
|
||||||
// we're trying to match.
|
// we're trying to match.
|
||||||
@@ -360,7 +371,8 @@ fn check_call_signature(dfg: &DataFlowGraph, inst: Inst) -> Result<(), SigRef> {
|
|||||||
let sig = &dfg.signatures[sig_ref];
|
let sig = &dfg.signatures[sig_ref];
|
||||||
|
|
||||||
if check_arg_types(dfg, args, &sig.argument_types[..]) &&
|
if check_arg_types(dfg, args, &sig.argument_types[..]) &&
|
||||||
check_arg_types(dfg, dfg.inst_results(inst), &sig.return_types[..]) {
|
check_arg_types(dfg, dfg.inst_results(inst), &sig.return_types[..])
|
||||||
|
{
|
||||||
// All types check out.
|
// All types check out.
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
@@ -380,20 +392,23 @@ fn check_return_signature(dfg: &DataFlowGraph, inst: Inst, sig: &Signature) -> b
|
|||||||
/// - `get_abi_type` is a closure that can provide the desired `ArgumentType` for a given ABI
|
/// - `get_abi_type` is a closure that can provide the desired `ArgumentType` for a given ABI
|
||||||
/// argument number in `0..abi_args`.
|
/// argument number in `0..abi_args`.
|
||||||
///
|
///
|
||||||
fn legalize_inst_arguments<ArgType>(dfg: &mut DataFlowGraph,
|
fn legalize_inst_arguments<ArgType>(
|
||||||
cfg: &ControlFlowGraph,
|
dfg: &mut DataFlowGraph,
|
||||||
pos: &mut Cursor,
|
cfg: &ControlFlowGraph,
|
||||||
abi_args: usize,
|
pos: &mut Cursor,
|
||||||
mut get_abi_type: ArgType)
|
abi_args: usize,
|
||||||
where ArgType: FnMut(&DataFlowGraph, usize) -> ArgumentType
|
mut get_abi_type: ArgType,
|
||||||
|
) where
|
||||||
|
ArgType: FnMut(&DataFlowGraph, usize) -> ArgumentType,
|
||||||
{
|
{
|
||||||
let inst = pos.current_inst()
|
let inst = pos.current_inst().expect(
|
||||||
.expect("Cursor must point to a call instruction");
|
"Cursor must point to a call instruction",
|
||||||
|
);
|
||||||
|
|
||||||
// Lift the value list out of the call instruction so we modify it.
|
// Lift the value list out of the call instruction so we modify it.
|
||||||
let mut vlist = dfg[inst]
|
let mut vlist = dfg[inst].take_value_list().expect(
|
||||||
.take_value_list()
|
"Call must have a value list",
|
||||||
.expect("Call must have a value list");
|
);
|
||||||
|
|
||||||
// The value list contains all arguments to the instruction, including the callee on an
|
// The value list contains all arguments to the instruction, including the callee on an
|
||||||
// indirect call which isn't part of the call arguments that must match the ABI signature.
|
// indirect call which isn't part of the call arguments that must match the ABI signature.
|
||||||
@@ -474,23 +489,23 @@ pub fn handle_call_abi(mut inst: Inst, func: &mut Function, cfg: &ControlFlowGra
|
|||||||
|
|
||||||
// OK, we need to fix the call arguments to match the ABI signature.
|
// OK, we need to fix the call arguments to match the ABI signature.
|
||||||
let abi_args = dfg.signatures[sig_ref].argument_types.len();
|
let abi_args = dfg.signatures[sig_ref].argument_types.len();
|
||||||
legalize_inst_arguments(dfg,
|
legalize_inst_arguments(dfg, cfg, pos, abi_args, |dfg, abi_arg| {
|
||||||
cfg,
|
dfg.signatures[sig_ref].argument_types[abi_arg]
|
||||||
pos,
|
});
|
||||||
abi_args,
|
|
||||||
|dfg, abi_arg| dfg.signatures[sig_ref].argument_types[abi_arg]);
|
|
||||||
|
|
||||||
if !dfg.signatures[sig_ref].return_types.is_empty() {
|
if !dfg.signatures[sig_ref].return_types.is_empty() {
|
||||||
inst = legalize_inst_results(dfg,
|
inst = legalize_inst_results(dfg, pos, |dfg, abi_res| {
|
||||||
pos,
|
dfg.signatures[sig_ref].return_types[abi_res]
|
||||||
|dfg, abi_res| dfg.signatures[sig_ref].return_types[abi_res]);
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
debug_assert!(check_call_signature(dfg, inst).is_ok(),
|
debug_assert!(
|
||||||
"Signature still wrong: {}, {}{}",
|
check_call_signature(dfg, inst).is_ok(),
|
||||||
dfg.display_inst(inst, None),
|
"Signature still wrong: {}, {}{}",
|
||||||
sig_ref,
|
dfg.display_inst(inst, None),
|
||||||
dfg.signatures[sig_ref]);
|
sig_ref,
|
||||||
|
dfg.signatures[sig_ref]
|
||||||
|
);
|
||||||
|
|
||||||
// Go back and insert spills for any stack arguments.
|
// Go back and insert spills for any stack arguments.
|
||||||
pos.goto_inst(inst);
|
pos.goto_inst(inst);
|
||||||
@@ -519,27 +534,30 @@ pub fn handle_return_abi(inst: Inst, func: &mut Function, cfg: &ControlFlowGraph
|
|||||||
.iter()
|
.iter()
|
||||||
.rev()
|
.rev()
|
||||||
.take_while(|&rt| {
|
.take_while(|&rt| {
|
||||||
rt.purpose == ArgumentPurpose::Link ||
|
rt.purpose == ArgumentPurpose::Link || rt.purpose == ArgumentPurpose::StructReturn ||
|
||||||
rt.purpose == ArgumentPurpose::StructReturn ||
|
rt.purpose == ArgumentPurpose::VMContext
|
||||||
rt.purpose == ArgumentPurpose::VMContext
|
})
|
||||||
})
|
|
||||||
.count();
|
.count();
|
||||||
|
|
||||||
let abi_args = sig.return_types.len() - special_args;
|
let abi_args = sig.return_types.len() - special_args;
|
||||||
legalize_inst_arguments(dfg,
|
legalize_inst_arguments(
|
||||||
cfg,
|
dfg,
|
||||||
pos,
|
cfg,
|
||||||
abi_args,
|
pos,
|
||||||
|_, abi_arg| sig.return_types[abi_arg]);
|
abi_args,
|
||||||
|
|_, abi_arg| sig.return_types[abi_arg],
|
||||||
|
);
|
||||||
assert_eq!(dfg.inst_variable_args(inst).len(), abi_args);
|
assert_eq!(dfg.inst_variable_args(inst).len(), abi_args);
|
||||||
|
|
||||||
// Append special return arguments for any `sret`, `link`, and `vmctx` return values added to
|
// Append special return arguments for any `sret`, `link`, and `vmctx` return values added to
|
||||||
// the legalized signature. These values should simply be propagated from the entry block
|
// the legalized signature. These values should simply be propagated from the entry block
|
||||||
// arguments.
|
// arguments.
|
||||||
if special_args > 0 {
|
if special_args > 0 {
|
||||||
dbg!("Adding {} special-purpose arguments to {}",
|
dbg!(
|
||||||
special_args,
|
"Adding {} special-purpose arguments to {}",
|
||||||
dfg.display_inst(inst, None));
|
special_args,
|
||||||
|
dfg.display_inst(inst, None)
|
||||||
|
);
|
||||||
let mut vlist = dfg[inst].take_value_list().unwrap();
|
let mut vlist = dfg[inst].take_value_list().unwrap();
|
||||||
for arg in &sig.return_types[abi_args..] {
|
for arg in &sig.return_types[abi_args..] {
|
||||||
match arg.purpose {
|
match arg.purpose {
|
||||||
@@ -565,10 +583,12 @@ pub fn handle_return_abi(inst: Inst, func: &mut Function, cfg: &ControlFlowGraph
|
|||||||
dfg[inst].put_value_list(vlist);
|
dfg[inst].put_value_list(vlist);
|
||||||
}
|
}
|
||||||
|
|
||||||
debug_assert!(check_return_signature(dfg, inst, sig),
|
debug_assert!(
|
||||||
"Signature still wrong: {} / signature {}",
|
check_return_signature(dfg, inst, sig),
|
||||||
dfg.display_inst(inst, None),
|
"Signature still wrong: {} / signature {}",
|
||||||
sig);
|
dfg.display_inst(inst, None),
|
||||||
|
sig
|
||||||
|
);
|
||||||
|
|
||||||
// Yes, we changed stuff.
|
// Yes, we changed stuff.
|
||||||
true
|
true
|
||||||
@@ -579,10 +599,10 @@ pub fn handle_return_abi(inst: Inst, func: &mut Function, cfg: &ControlFlowGraph
|
|||||||
/// Values that are passed into the function on the stack must be assigned to an `IncomingArg`
|
/// Values that are passed into the function on the stack must be assigned to an `IncomingArg`
|
||||||
/// stack slot already during legalization.
|
/// stack slot already during legalization.
|
||||||
fn spill_entry_arguments(func: &mut Function, entry: Ebb) {
|
fn spill_entry_arguments(func: &mut Function, entry: Ebb) {
|
||||||
for (abi, &arg) in func.signature
|
for (abi, &arg) in func.signature.argument_types.iter().zip(
|
||||||
.argument_types
|
func.dfg.ebb_args(entry),
|
||||||
.iter()
|
)
|
||||||
.zip(func.dfg.ebb_args(entry)) {
|
{
|
||||||
if let ArgumentLoc::Stack(offset) = abi.location {
|
if let ArgumentLoc::Stack(offset) = abi.location {
|
||||||
let ss = func.stack_slots.make_incoming_arg(abi.value_type, offset);
|
let ss = func.stack_slots.make_incoming_arg(abi.value_type, offset);
|
||||||
func.locations[arg] = ValueLoc::Stack(ss);
|
func.locations[arg] = ValueLoc::Stack(ss);
|
||||||
@@ -598,15 +618,18 @@ fn spill_entry_arguments(func: &mut Function, entry: Ebb) {
|
|||||||
/// TODO: The outgoing stack slots can be written a bit earlier, as long as there are no branches
|
/// TODO: The outgoing stack slots can be written a bit earlier, as long as there are no branches
|
||||||
/// or calls between writing the stack slots and the call instruction. Writing the slots earlier
|
/// or calls between writing the stack slots and the call instruction. Writing the slots earlier
|
||||||
/// could help reduce register pressure before the call.
|
/// could help reduce register pressure before the call.
|
||||||
fn spill_call_arguments(dfg: &mut DataFlowGraph,
|
fn spill_call_arguments(
|
||||||
locations: &mut ValueLocations,
|
dfg: &mut DataFlowGraph,
|
||||||
stack_slots: &mut StackSlots,
|
locations: &mut ValueLocations,
|
||||||
pos: &mut Cursor)
|
stack_slots: &mut StackSlots,
|
||||||
-> bool {
|
pos: &mut Cursor,
|
||||||
let inst = pos.current_inst()
|
) -> bool {
|
||||||
.expect("Cursor must point to a call instruction");
|
let inst = pos.current_inst().expect(
|
||||||
let sig_ref = dfg.call_signature(inst)
|
"Cursor must point to a call instruction",
|
||||||
.expect("Call instruction expected.");
|
);
|
||||||
|
let sig_ref = dfg.call_signature(inst).expect(
|
||||||
|
"Call instruction expected.",
|
||||||
|
);
|
||||||
|
|
||||||
// Start by building a list of stack slots and arguments to be replaced.
|
// Start by building a list of stack slots and arguments to be replaced.
|
||||||
// This requires borrowing `dfg`, so we can't change anything.
|
// This requires borrowing `dfg`, so we can't change anything.
|
||||||
|
|||||||
@@ -35,12 +35,14 @@ pub fn expand_heap_addr(inst: ir::Inst, func: &mut ir::Function, _cfg: &mut Cont
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Expand a `heap_addr` for a dynamic heap.
|
/// Expand a `heap_addr` for a dynamic heap.
|
||||||
fn dynamic_addr(inst: ir::Inst,
|
fn dynamic_addr(
|
||||||
heap: ir::Heap,
|
inst: ir::Inst,
|
||||||
offset: ir::Value,
|
heap: ir::Heap,
|
||||||
size: u32,
|
offset: ir::Value,
|
||||||
bound_gv: ir::GlobalVar,
|
size: u32,
|
||||||
func: &mut ir::Function) {
|
bound_gv: ir::GlobalVar,
|
||||||
|
func: &mut ir::Function,
|
||||||
|
) {
|
||||||
let size = size as i64;
|
let size = size as i64;
|
||||||
let offset_ty = func.dfg.value_type(offset);
|
let offset_ty = func.dfg.value_type(offset);
|
||||||
let addr_ty = func.dfg.value_type(func.dfg.first_result(inst));
|
let addr_ty = func.dfg.value_type(func.dfg.first_result(inst));
|
||||||
@@ -54,21 +56,30 @@ fn dynamic_addr(inst: ir::Inst,
|
|||||||
let oob;
|
let oob;
|
||||||
if size == 1 {
|
if size == 1 {
|
||||||
// `offset > bound - 1` is the same as `offset >= bound`.
|
// `offset > bound - 1` is the same as `offset >= bound`.
|
||||||
oob = pos.ins()
|
oob = pos.ins().icmp(
|
||||||
.icmp(IntCC::UnsignedGreaterThanOrEqual, offset, bound);
|
IntCC::UnsignedGreaterThanOrEqual,
|
||||||
|
offset,
|
||||||
|
bound,
|
||||||
|
);
|
||||||
} else if size <= min_size {
|
} else if size <= min_size {
|
||||||
// We know that bound >= min_size, so here we can compare `offset > bound - size` without
|
// We know that bound >= min_size, so here we can compare `offset > bound - size` without
|
||||||
// wrapping.
|
// wrapping.
|
||||||
let adj_bound = pos.ins().iadd_imm(bound, -size);
|
let adj_bound = pos.ins().iadd_imm(bound, -size);
|
||||||
oob = pos.ins()
|
oob = pos.ins().icmp(
|
||||||
.icmp(IntCC::UnsignedGreaterThan, offset, adj_bound);
|
IntCC::UnsignedGreaterThan,
|
||||||
|
offset,
|
||||||
|
adj_bound,
|
||||||
|
);
|
||||||
} else {
|
} else {
|
||||||
// We need an overflow check for the adjusted offset.
|
// We need an overflow check for the adjusted offset.
|
||||||
let size_val = pos.ins().iconst(offset_ty, size);
|
let size_val = pos.ins().iconst(offset_ty, size);
|
||||||
let (adj_offset, overflow) = pos.ins().iadd_cout(offset, size_val);
|
let (adj_offset, overflow) = pos.ins().iadd_cout(offset, size_val);
|
||||||
pos.ins().trapnz(overflow);
|
pos.ins().trapnz(overflow);
|
||||||
oob = pos.ins()
|
oob = pos.ins().icmp(
|
||||||
.icmp(IntCC::UnsignedGreaterThan, adj_offset, bound);
|
IntCC::UnsignedGreaterThan,
|
||||||
|
adj_offset,
|
||||||
|
bound,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
pos.ins().trapnz(oob);
|
pos.ins().trapnz(oob);
|
||||||
|
|
||||||
@@ -76,12 +87,14 @@ fn dynamic_addr(inst: ir::Inst,
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Expand a `heap_addr` for a static heap.
|
/// Expand a `heap_addr` for a static heap.
|
||||||
fn static_addr(inst: ir::Inst,
|
fn static_addr(
|
||||||
heap: ir::Heap,
|
inst: ir::Inst,
|
||||||
offset: ir::Value,
|
heap: ir::Heap,
|
||||||
size: u32,
|
offset: ir::Value,
|
||||||
bound: i64,
|
size: u32,
|
||||||
func: &mut ir::Function) {
|
bound: i64,
|
||||||
|
func: &mut ir::Function,
|
||||||
|
) {
|
||||||
let size = size as i64;
|
let size = size as i64;
|
||||||
let offset_ty = func.dfg.value_type(offset);
|
let offset_ty = func.dfg.value_type(offset);
|
||||||
let addr_ty = func.dfg.value_type(func.dfg.first_result(inst));
|
let addr_ty = func.dfg.value_type(func.dfg.first_result(inst));
|
||||||
@@ -104,11 +117,17 @@ fn static_addr(inst: ir::Inst,
|
|||||||
let oob = if limit & 1 == 1 {
|
let oob = if limit & 1 == 1 {
|
||||||
// Prefer testing `offset >= limit - 1` when limit is odd because an even number is
|
// Prefer testing `offset >= limit - 1` when limit is odd because an even number is
|
||||||
// likely to be a convenient constant on ARM and other RISC architectures.
|
// likely to be a convenient constant on ARM and other RISC architectures.
|
||||||
pos.ins()
|
pos.ins().icmp_imm(
|
||||||
.icmp_imm(IntCC::UnsignedGreaterThanOrEqual, offset, limit - 1)
|
IntCC::UnsignedGreaterThanOrEqual,
|
||||||
|
offset,
|
||||||
|
limit - 1,
|
||||||
|
)
|
||||||
} else {
|
} else {
|
||||||
pos.ins()
|
pos.ins().icmp_imm(
|
||||||
.icmp_imm(IntCC::UnsignedGreaterThan, offset, limit)
|
IntCC::UnsignedGreaterThan,
|
||||||
|
offset,
|
||||||
|
limit,
|
||||||
|
)
|
||||||
};
|
};
|
||||||
pos.ins().trapnz(oob);
|
pos.ins().trapnz(oob);
|
||||||
}
|
}
|
||||||
@@ -119,12 +138,14 @@ fn static_addr(inst: ir::Inst,
|
|||||||
/// Emit code for the base address computation of a `heap_addr` instruction.
|
/// Emit code for the base address computation of a `heap_addr` instruction.
|
||||||
///
|
///
|
||||||
///
|
///
|
||||||
fn offset_addr(inst: ir::Inst,
|
fn offset_addr(
|
||||||
heap: ir::Heap,
|
inst: ir::Inst,
|
||||||
addr_ty: ir::Type,
|
heap: ir::Heap,
|
||||||
mut offset: ir::Value,
|
addr_ty: ir::Type,
|
||||||
offset_ty: ir::Type,
|
mut offset: ir::Value,
|
||||||
func: &mut ir::Function) {
|
offset_ty: ir::Type,
|
||||||
|
func: &mut ir::Function,
|
||||||
|
) {
|
||||||
let mut pos = FuncCursor::new(func).at_inst(inst);
|
let mut pos = FuncCursor::new(func).at_inst(inst);
|
||||||
|
|
||||||
// Convert `offset` to `addr_ty`.
|
// Convert `offset` to `addr_ty`.
|
||||||
|
|||||||
@@ -66,9 +66,11 @@ pub fn legalize_function(func: &mut ir::Function, cfg: &mut ControlFlowGraph, is
|
|||||||
split::simplify_branch_arguments(&mut pos.func.dfg, inst);
|
split::simplify_branch_arguments(&mut pos.func.dfg, inst);
|
||||||
}
|
}
|
||||||
|
|
||||||
match isa.encode(&pos.func.dfg,
|
match isa.encode(
|
||||||
&pos.func.dfg[inst],
|
&pos.func.dfg,
|
||||||
pos.func.dfg.ctrl_typevar(inst)) {
|
&pos.func.dfg[inst],
|
||||||
|
pos.func.dfg.ctrl_typevar(inst),
|
||||||
|
) {
|
||||||
Ok(encoding) => pos.func.encodings[inst] = encoding,
|
Ok(encoding) => pos.func.encodings[inst] = encoding,
|
||||||
Err(action) => {
|
Err(action) => {
|
||||||
// We should transform the instruction into legal equivalents.
|
// We should transform the instruction into legal equivalents.
|
||||||
|
|||||||
@@ -71,21 +71,23 @@ use std::iter;
|
|||||||
|
|
||||||
/// Split `value` into two values using the `isplit` semantics. Do this by reusing existing values
|
/// Split `value` into two values using the `isplit` semantics. Do this by reusing existing values
|
||||||
/// if possible.
|
/// if possible.
|
||||||
pub fn isplit(dfg: &mut DataFlowGraph,
|
pub fn isplit(
|
||||||
cfg: &ControlFlowGraph,
|
dfg: &mut DataFlowGraph,
|
||||||
pos: &mut Cursor,
|
cfg: &ControlFlowGraph,
|
||||||
value: Value)
|
pos: &mut Cursor,
|
||||||
-> (Value, Value) {
|
value: Value,
|
||||||
|
) -> (Value, Value) {
|
||||||
split_any(dfg, cfg, pos, value, Opcode::Iconcat)
|
split_any(dfg, cfg, pos, value, Opcode::Iconcat)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Split `value` into halves using the `vsplit` semantics. Do this by reusing existing values if
|
/// Split `value` into halves using the `vsplit` semantics. Do this by reusing existing values if
|
||||||
/// possible.
|
/// possible.
|
||||||
pub fn vsplit(dfg: &mut DataFlowGraph,
|
pub fn vsplit(
|
||||||
cfg: &ControlFlowGraph,
|
dfg: &mut DataFlowGraph,
|
||||||
pos: &mut Cursor,
|
cfg: &ControlFlowGraph,
|
||||||
value: Value)
|
pos: &mut Cursor,
|
||||||
-> (Value, Value) {
|
value: Value,
|
||||||
|
) -> (Value, Value) {
|
||||||
split_any(dfg, cfg, pos, value, Opcode::Vconcat)
|
split_any(dfg, cfg, pos, value, Opcode::Vconcat)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -107,12 +109,13 @@ struct Repair {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Generic version of `isplit` and `vsplit` controlled by the `concat` opcode.
|
/// Generic version of `isplit` and `vsplit` controlled by the `concat` opcode.
|
||||||
fn split_any(dfg: &mut DataFlowGraph,
|
fn split_any(
|
||||||
cfg: &ControlFlowGraph,
|
dfg: &mut DataFlowGraph,
|
||||||
pos: &mut Cursor,
|
cfg: &ControlFlowGraph,
|
||||||
value: Value,
|
pos: &mut Cursor,
|
||||||
concat: Opcode)
|
value: Value,
|
||||||
-> (Value, Value) {
|
concat: Opcode,
|
||||||
|
) -> (Value, Value) {
|
||||||
let saved_pos = pos.position();
|
let saved_pos = pos.position();
|
||||||
let mut repairs = Vec::new();
|
let mut repairs = Vec::new();
|
||||||
let result = split_value(dfg, pos, value, concat, &mut repairs);
|
let result = split_value(dfg, pos, value, concat, &mut repairs);
|
||||||
@@ -121,17 +124,20 @@ fn split_any(dfg: &mut DataFlowGraph,
|
|||||||
while let Some(repair) = repairs.pop() {
|
while let Some(repair) = repairs.pop() {
|
||||||
for &(_, inst) in cfg.get_predecessors(repair.ebb) {
|
for &(_, inst) in cfg.get_predecessors(repair.ebb) {
|
||||||
let branch_opc = dfg[inst].opcode();
|
let branch_opc = dfg[inst].opcode();
|
||||||
assert!(branch_opc.is_branch(),
|
assert!(
|
||||||
"Predecessor not a branch: {}",
|
branch_opc.is_branch(),
|
||||||
dfg.display_inst(inst, None));
|
"Predecessor not a branch: {}",
|
||||||
|
dfg.display_inst(inst, None)
|
||||||
|
);
|
||||||
let fixed_args = branch_opc.constraints().fixed_value_arguments();
|
let fixed_args = branch_opc.constraints().fixed_value_arguments();
|
||||||
let mut args = dfg[inst]
|
let mut args = dfg[inst].take_value_list().expect(
|
||||||
.take_value_list()
|
"Branches must have value lists.",
|
||||||
.expect("Branches must have value lists.");
|
);
|
||||||
let num_args = args.len(&dfg.value_lists);
|
let num_args = args.len(&dfg.value_lists);
|
||||||
// Get the old value passed to the EBB argument we're repairing.
|
// Get the old value passed to the EBB argument we're repairing.
|
||||||
let old_arg = args.get(fixed_args + repair.num, &dfg.value_lists)
|
let old_arg = args.get(fixed_args + repair.num, &dfg.value_lists).expect(
|
||||||
.expect("Too few branch arguments");
|
"Too few branch arguments",
|
||||||
|
);
|
||||||
|
|
||||||
// It's possible that the CFG's predecessor list has duplicates. Detect them here.
|
// It's possible that the CFG's predecessor list has duplicates. Detect them here.
|
||||||
if dfg.value_type(old_arg) == repair.split_type {
|
if dfg.value_type(old_arg) == repair.split_type {
|
||||||
@@ -145,19 +151,21 @@ fn split_any(dfg: &mut DataFlowGraph,
|
|||||||
|
|
||||||
// The `lo` part replaces the original argument.
|
// The `lo` part replaces the original argument.
|
||||||
*args.get_mut(fixed_args + repair.num, &mut dfg.value_lists)
|
*args.get_mut(fixed_args + repair.num, &mut dfg.value_lists)
|
||||||
.unwrap() = lo;
|
.unwrap() = lo;
|
||||||
|
|
||||||
// The `hi` part goes at the end. Since multiple repairs may have been scheduled to the
|
// The `hi` part goes at the end. Since multiple repairs may have been scheduled to the
|
||||||
// same EBB, there could be multiple arguments missing.
|
// same EBB, there could be multiple arguments missing.
|
||||||
if num_args > fixed_args + repair.hi_num {
|
if num_args > fixed_args + repair.hi_num {
|
||||||
*args.get_mut(fixed_args + repair.hi_num, &mut dfg.value_lists)
|
*args.get_mut(fixed_args + repair.hi_num, &mut dfg.value_lists)
|
||||||
.unwrap() = hi;
|
.unwrap() = hi;
|
||||||
} else {
|
} else {
|
||||||
// We need to append one or more arguments. If we're adding more than one argument,
|
// We need to append one or more arguments. If we're adding more than one argument,
|
||||||
// there must be pending repairs on the stack that will fill in the correct values
|
// there must be pending repairs on the stack that will fill in the correct values
|
||||||
// instead of `hi`.
|
// instead of `hi`.
|
||||||
args.extend(iter::repeat(hi).take(1 + fixed_args + repair.hi_num - num_args),
|
args.extend(
|
||||||
&mut dfg.value_lists);
|
iter::repeat(hi).take(1 + fixed_args + repair.hi_num - num_args),
|
||||||
|
&mut dfg.value_lists,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Put the value list back after manipulating it.
|
// Put the value list back after manipulating it.
|
||||||
@@ -175,12 +183,13 @@ fn split_any(dfg: &mut DataFlowGraph,
|
|||||||
/// instruction.
|
/// instruction.
|
||||||
///
|
///
|
||||||
/// Return the two new values representing the parts of `value`.
|
/// Return the two new values representing the parts of `value`.
|
||||||
fn split_value(dfg: &mut DataFlowGraph,
|
fn split_value(
|
||||||
pos: &mut Cursor,
|
dfg: &mut DataFlowGraph,
|
||||||
value: Value,
|
pos: &mut Cursor,
|
||||||
concat: Opcode,
|
value: Value,
|
||||||
repairs: &mut Vec<Repair>)
|
concat: Opcode,
|
||||||
-> (Value, Value) {
|
repairs: &mut Vec<Repair>,
|
||||||
|
) -> (Value, Value) {
|
||||||
let value = dfg.resolve_copies(value);
|
let value = dfg.resolve_copies(value);
|
||||||
let mut reuse = None;
|
let mut reuse = None;
|
||||||
|
|
||||||
@@ -228,9 +237,12 @@ fn split_value(dfg: &mut DataFlowGraph,
|
|||||||
// need to insert a split instruction before returning.
|
// need to insert a split instruction before returning.
|
||||||
pos.goto_top(ebb);
|
pos.goto_top(ebb);
|
||||||
pos.next_inst();
|
pos.next_inst();
|
||||||
dfg.ins(pos)
|
dfg.ins(pos).with_result(value).Binary(
|
||||||
.with_result(value)
|
concat,
|
||||||
.Binary(concat, split_type, lo, hi);
|
split_type,
|
||||||
|
lo,
|
||||||
|
hi,
|
||||||
|
);
|
||||||
|
|
||||||
// Finally, splitting the EBB argument is not enough. We also have to repair all
|
// Finally, splitting the EBB argument is not enough. We also have to repair all
|
||||||
// of the predecessor instructions that branch here.
|
// of the predecessor instructions that branch here.
|
||||||
@@ -254,19 +266,21 @@ fn split_value(dfg: &mut DataFlowGraph,
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Add a repair entry to the work list.
|
// Add a repair entry to the work list.
|
||||||
fn add_repair(concat: Opcode,
|
fn add_repair(
|
||||||
split_type: Type,
|
concat: Opcode,
|
||||||
ebb: Ebb,
|
split_type: Type,
|
||||||
num: usize,
|
ebb: Ebb,
|
||||||
hi_num: usize,
|
num: usize,
|
||||||
repairs: &mut Vec<Repair>) {
|
hi_num: usize,
|
||||||
|
repairs: &mut Vec<Repair>,
|
||||||
|
) {
|
||||||
repairs.push(Repair {
|
repairs.push(Repair {
|
||||||
concat,
|
concat,
|
||||||
split_type,
|
split_type,
|
||||||
ebb,
|
ebb,
|
||||||
num,
|
num,
|
||||||
hi_num,
|
hi_num,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Strip concat-split chains. Return a simpler way of computing the same value.
|
/// Strip concat-split chains. Return a simpler way of computing the same value.
|
||||||
|
|||||||
@@ -10,10 +10,12 @@ use loop_analysis::{Loop, LoopAnalysis};
|
|||||||
/// Performs the LICM pass by detecting loops within the CFG and moving
|
/// Performs the LICM pass by detecting loops within the CFG and moving
|
||||||
/// loop-invariant instructions out of them.
|
/// loop-invariant instructions out of them.
|
||||||
/// Changes the CFG and domtree in-place during the operation.
|
/// Changes the CFG and domtree in-place during the operation.
|
||||||
pub fn do_licm(func: &mut Function,
|
pub fn do_licm(
|
||||||
cfg: &mut ControlFlowGraph,
|
func: &mut Function,
|
||||||
domtree: &mut DominatorTree,
|
cfg: &mut ControlFlowGraph,
|
||||||
loop_analysis: &mut LoopAnalysis) {
|
domtree: &mut DominatorTree,
|
||||||
|
loop_analysis: &mut LoopAnalysis,
|
||||||
|
) {
|
||||||
loop_analysis.compute(func, cfg, domtree);
|
loop_analysis.compute(func, cfg, domtree);
|
||||||
for lp in loop_analysis.loops() {
|
for lp in loop_analysis.loops() {
|
||||||
// For each loop that we want to optimize we determine the set of loop-invariant
|
// For each loop that we want to optimize we determine the set of loop-invariant
|
||||||
@@ -53,11 +55,12 @@ pub fn do_licm(func: &mut Function,
|
|||||||
|
|
||||||
// Insert a pre-header before the header, modifying the function layout and CFG to reflect it.
|
// Insert a pre-header before the header, modifying the function layout and CFG to reflect it.
|
||||||
// A jump instruction to the header is placed at the end of the pre-header.
|
// A jump instruction to the header is placed at the end of the pre-header.
|
||||||
fn create_pre_header(header: Ebb,
|
fn create_pre_header(
|
||||||
func: &mut Function,
|
header: Ebb,
|
||||||
cfg: &mut ControlFlowGraph,
|
func: &mut Function,
|
||||||
domtree: &DominatorTree)
|
cfg: &mut ControlFlowGraph,
|
||||||
-> Ebb {
|
domtree: &DominatorTree,
|
||||||
|
) -> Ebb {
|
||||||
let pool = &mut ListPool::<Value>::new();
|
let pool = &mut ListPool::<Value>::new();
|
||||||
let header_args_values: Vec<Value> = func.dfg.ebb_args(header).into_iter().cloned().collect();
|
let header_args_values: Vec<Value> = func.dfg.ebb_args(header).into_iter().cloned().collect();
|
||||||
let header_args_types: Vec<Type> = header_args_values
|
let header_args_types: Vec<Type> = header_args_values
|
||||||
@@ -82,9 +85,10 @@ fn create_pre_header(header: Ebb,
|
|||||||
// Inserts the pre-header at the right place in the layout.
|
// Inserts the pre-header at the right place in the layout.
|
||||||
pos.insert_ebb(pre_header);
|
pos.insert_ebb(pre_header);
|
||||||
pos.next_inst();
|
pos.next_inst();
|
||||||
func.dfg
|
func.dfg.ins(&mut pos).jump(
|
||||||
.ins(&mut pos)
|
header,
|
||||||
.jump(header, pre_header_args_value.as_slice(pool));
|
pre_header_args_value.as_slice(pool),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
pre_header
|
pre_header
|
||||||
}
|
}
|
||||||
@@ -94,11 +98,12 @@ fn create_pre_header(header: Ebb,
|
|||||||
// A loop header has a pre-header if there is only one predecessor that the header doesn't
|
// A loop header has a pre-header if there is only one predecessor that the header doesn't
|
||||||
// dominate.
|
// dominate.
|
||||||
// Returns the pre-header Ebb and the instruction jumping to the header.
|
// Returns the pre-header Ebb and the instruction jumping to the header.
|
||||||
fn has_pre_header(layout: &Layout,
|
fn has_pre_header(
|
||||||
cfg: &ControlFlowGraph,
|
layout: &Layout,
|
||||||
domtree: &DominatorTree,
|
cfg: &ControlFlowGraph,
|
||||||
header: Ebb)
|
domtree: &DominatorTree,
|
||||||
-> Option<(Ebb, Inst)> {
|
header: Ebb,
|
||||||
|
) -> Option<(Ebb, Inst)> {
|
||||||
let mut result = None;
|
let mut result = None;
|
||||||
let mut found = false;
|
let mut found = false;
|
||||||
for &(pred_ebb, last_inst) in cfg.get_predecessors(header) {
|
for &(pred_ebb, last_inst) in cfg.get_predecessors(header) {
|
||||||
@@ -129,11 +134,12 @@ fn change_branch_jump_destination(inst: Inst, new_ebb: Ebb, func: &mut Function)
|
|||||||
// Traverses a loop in reverse post-order from a header EBB and identify loop-invariant
|
// Traverses a loop in reverse post-order from a header EBB and identify loop-invariant
|
||||||
// instructions. These loop-invariant instructions are then removed from the code and returned
|
// instructions. These loop-invariant instructions are then removed from the code and returned
|
||||||
// (in reverse post-order) for later use.
|
// (in reverse post-order) for later use.
|
||||||
fn remove_loop_invariant_instructions(lp: Loop,
|
fn remove_loop_invariant_instructions(
|
||||||
func: &mut Function,
|
lp: Loop,
|
||||||
cfg: &ControlFlowGraph,
|
func: &mut Function,
|
||||||
loop_analysis: &LoopAnalysis)
|
cfg: &ControlFlowGraph,
|
||||||
-> Vec<Inst> {
|
loop_analysis: &LoopAnalysis,
|
||||||
|
) -> Vec<Inst> {
|
||||||
let mut loop_values: HashSet<Value> = HashSet::new();
|
let mut loop_values: HashSet<Value> = HashSet::new();
|
||||||
let mut invariant_inst: Vec<Inst> = Vec::new();
|
let mut invariant_inst: Vec<Inst> = Vec::new();
|
||||||
let mut pos = Cursor::new(&mut func.layout);
|
let mut pos = Cursor::new(&mut func.layout);
|
||||||
@@ -146,10 +152,10 @@ fn remove_loop_invariant_instructions(lp: Loop,
|
|||||||
pos.goto_top(*ebb);
|
pos.goto_top(*ebb);
|
||||||
while let Some(inst) = pos.next_inst() {
|
while let Some(inst) = pos.next_inst() {
|
||||||
if func.dfg.has_results(inst) &&
|
if func.dfg.has_results(inst) &&
|
||||||
func.dfg
|
func.dfg.inst_args(inst).into_iter().all(|arg| {
|
||||||
.inst_args(inst)
|
!loop_values.contains(arg)
|
||||||
.into_iter()
|
})
|
||||||
.all(|arg| !loop_values.contains(arg)) {
|
{
|
||||||
// If all the instruction's argument are defined outside the loop
|
// If all the instruction's argument are defined outside the loop
|
||||||
// then this instruction is loop-invariant
|
// then this instruction is loop-invariant
|
||||||
invariant_inst.push(inst);
|
invariant_inst.push(inst);
|
||||||
|
|||||||
@@ -105,10 +105,12 @@ impl LoopAnalysis {
|
|||||||
|
|
||||||
// Traverses the CFG in reverse postorder and create a loop object for every EBB having a
|
// Traverses the CFG in reverse postorder and create a loop object for every EBB having a
|
||||||
// back edge.
|
// back edge.
|
||||||
fn find_loop_headers(&mut self,
|
fn find_loop_headers(
|
||||||
cfg: &ControlFlowGraph,
|
&mut self,
|
||||||
domtree: &DominatorTree,
|
cfg: &ControlFlowGraph,
|
||||||
layout: &Layout) {
|
domtree: &DominatorTree,
|
||||||
|
layout: &Layout,
|
||||||
|
) {
|
||||||
// We traverse the CFG in reverse postorder
|
// We traverse the CFG in reverse postorder
|
||||||
for &ebb in domtree.cfg_postorder().iter().rev() {
|
for &ebb in domtree.cfg_postorder().iter().rev() {
|
||||||
for &(_, pred_inst) in cfg.get_predecessors(ebb) {
|
for &(_, pred_inst) in cfg.get_predecessors(ebb) {
|
||||||
@@ -127,10 +129,12 @@ impl LoopAnalysis {
|
|||||||
// Intended to be called after `find_loop_headers`. For each detected loop header,
|
// Intended to be called after `find_loop_headers`. For each detected loop header,
|
||||||
// discovers all the ebb belonging to the loop and its inner loops. After a call to this
|
// discovers all the ebb belonging to the loop and its inner loops. After a call to this
|
||||||
// function, the loop tree is fully constructed.
|
// function, the loop tree is fully constructed.
|
||||||
fn discover_loop_blocks(&mut self,
|
fn discover_loop_blocks(
|
||||||
cfg: &ControlFlowGraph,
|
&mut self,
|
||||||
domtree: &DominatorTree,
|
cfg: &ControlFlowGraph,
|
||||||
layout: &Layout) {
|
domtree: &DominatorTree,
|
||||||
|
layout: &Layout,
|
||||||
|
) {
|
||||||
let mut stack: Vec<Ebb> = Vec::new();
|
let mut stack: Vec<Ebb> = Vec::new();
|
||||||
// We handle each loop header in reverse order, corresponding to a pesudo postorder
|
// We handle each loop header in reverse order, corresponding to a pesudo postorder
|
||||||
// traversal of the graph.
|
// traversal of the graph.
|
||||||
|
|||||||
@@ -38,7 +38,8 @@ impl<T: ReservedValue> PackedOption<T> {
|
|||||||
|
|
||||||
/// Maps a `PackedOption<T>` to `Option<U>` by applying a function to a contained value.
|
/// Maps a `PackedOption<T>` to `Option<U>` by applying a function to a contained value.
|
||||||
pub fn map<U, F>(self, f: F) -> Option<U>
|
pub fn map<U, F>(self, f: F) -> Option<U>
|
||||||
where F: FnOnce(T) -> U
|
where
|
||||||
|
F: FnOnce(T) -> U,
|
||||||
{
|
{
|
||||||
self.expand().map(f)
|
self.expand().map(f)
|
||||||
}
|
}
|
||||||
@@ -69,8 +70,10 @@ impl<T: ReservedValue> Default for PackedOption<T> {
|
|||||||
impl<T: ReservedValue> From<T> for PackedOption<T> {
|
impl<T: ReservedValue> From<T> for PackedOption<T> {
|
||||||
/// Convert `t` into a packed `Some(x)`.
|
/// Convert `t` into a packed `Some(x)`.
|
||||||
fn from(t: T) -> PackedOption<T> {
|
fn from(t: T) -> PackedOption<T> {
|
||||||
debug_assert!(t != T::reserved_value(),
|
debug_assert!(
|
||||||
"Can't make a PackedOption from the reserved value.");
|
t != T::reserved_value(),
|
||||||
|
"Can't make a PackedOption from the reserved value."
|
||||||
|
);
|
||||||
PackedOption(t)
|
PackedOption(t)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -92,7 +95,8 @@ impl<T: ReservedValue> Into<Option<T>> for PackedOption<T> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<T> fmt::Debug for PackedOption<T>
|
impl<T> fmt::Debug for PackedOption<T>
|
||||||
where T: ReservedValue + fmt::Debug
|
where
|
||||||
|
T: ReservedValue + fmt::Debug,
|
||||||
{
|
{
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
if self.is_none() {
|
if self.is_none() {
|
||||||
|
|||||||
@@ -7,7 +7,8 @@
|
|||||||
///
|
///
|
||||||
/// Returns the number of elements where `p(t)` is true.
|
/// Returns the number of elements where `p(t)` is true.
|
||||||
pub fn partition_slice<'a, T: 'a, F>(s: &'a mut [T], mut p: F) -> usize
|
pub fn partition_slice<'a, T: 'a, F>(s: &'a mut [T], mut p: F) -> usize
|
||||||
where F: FnMut(&T) -> bool
|
where
|
||||||
|
F: FnMut(&T) -> bool,
|
||||||
{
|
{
|
||||||
// Count the length of the prefix where `p` returns true.
|
// Count the length of the prefix where `p` returns true.
|
||||||
let mut count = match s.iter().position(|t| !p(t)) {
|
let mut count = match s.iter().position(|t| !p(t)) {
|
||||||
|
|||||||
@@ -91,7 +91,8 @@ impl Affinity {
|
|||||||
// If the preferred register class is a subclass of the constraint, there's no need
|
// If the preferred register class is a subclass of the constraint, there's no need
|
||||||
// to change anything.
|
// to change anything.
|
||||||
if constraint.kind != ConstraintKind::Stack &&
|
if constraint.kind != ConstraintKind::Stack &&
|
||||||
!constraint.regclass.has_subclass(rc) {
|
!constraint.regclass.has_subclass(rc)
|
||||||
|
{
|
||||||
// If the register classes don't overlap, `intersect` returns `None`, and we
|
// If the register classes don't overlap, `intersect` returns `None`, and we
|
||||||
// just keep our previous affinity.
|
// just keep our previous affinity.
|
||||||
if let Some(subclass) = constraint.regclass.intersect(reg_info.rc(rc)) {
|
if let Some(subclass) = constraint.regclass.intersect(reg_info.rc(rc)) {
|
||||||
|
|||||||
@@ -86,10 +86,9 @@ impl AllocatableSet {
|
|||||||
///
|
///
|
||||||
/// This assumes that unused bits are 1.
|
/// This assumes that unused bits are 1.
|
||||||
pub fn interferes_with(&self, other: &AllocatableSet) -> bool {
|
pub fn interferes_with(&self, other: &AllocatableSet) -> bool {
|
||||||
self.avail
|
self.avail.iter().zip(&other.avail).any(
|
||||||
.iter()
|
|(&x, &y)| (x | y) != !0,
|
||||||
.zip(&other.avail)
|
)
|
||||||
.any(|(&x, &y)| (x | y) != !0)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Intersect this set of allocatable registers with `other`. This has the effect of removing
|
/// Intersect this set of allocatable registers with `other`. This has the effect of removing
|
||||||
|
|||||||
@@ -132,14 +132,15 @@ impl DomForest {
|
|||||||
///
|
///
|
||||||
/// If the merge succeeds, returns `Ok(())`. The merged sequence can be extracted with
|
/// If the merge succeeds, returns `Ok(())`. The merged sequence can be extracted with
|
||||||
/// `swap()`.
|
/// `swap()`.
|
||||||
pub fn try_merge(&mut self,
|
pub fn try_merge(
|
||||||
va: &[Value],
|
&mut self,
|
||||||
vb: &[Value],
|
va: &[Value],
|
||||||
dfg: &DataFlowGraph,
|
vb: &[Value],
|
||||||
layout: &Layout,
|
dfg: &DataFlowGraph,
|
||||||
domtree: &DominatorTree,
|
layout: &Layout,
|
||||||
liveness: &Liveness)
|
domtree: &DominatorTree,
|
||||||
-> Result<(), (Value, Value)> {
|
liveness: &Liveness,
|
||||||
|
) -> Result<(), (Value, Value)> {
|
||||||
self.stack.clear();
|
self.stack.clear();
|
||||||
self.values.clear();
|
self.values.clear();
|
||||||
self.values.reserve(va.len() + vb.len());
|
self.values.reserve(va.len() + vb.len());
|
||||||
@@ -154,16 +155,16 @@ impl DomForest {
|
|||||||
for node in merged {
|
for node in merged {
|
||||||
if let Some(parent) = self.push_node(node, layout, domtree) {
|
if let Some(parent) = self.push_node(node, layout, domtree) {
|
||||||
// Check if `parent` live range contains `node.def`.
|
// Check if `parent` live range contains `node.def`.
|
||||||
let lr = liveness
|
let lr = liveness.get(parent).expect(
|
||||||
.get(parent)
|
"No live range for parent value",
|
||||||
.expect("No live range for parent value");
|
);
|
||||||
if lr.overlaps_def(node.def, layout.pp_ebb(node.def), layout) {
|
if lr.overlaps_def(node.def, layout.pp_ebb(node.def), layout) {
|
||||||
// Interference detected. Get the `(a, b)` order right in the error.
|
// Interference detected. Get the `(a, b)` order right in the error.
|
||||||
return Err(if node.set == 0 {
|
return Err(if node.set == 0 {
|
||||||
(node.value, parent)
|
(node.value, parent)
|
||||||
} else {
|
} else {
|
||||||
(parent, node.value)
|
(parent, node.value)
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -177,8 +178,9 @@ impl DomForest {
|
|||||||
/// Given two ordered sequences of nodes, yield an ordered sequence containing all of them.
|
/// Given two ordered sequences of nodes, yield an ordered sequence containing all of them.
|
||||||
/// Duplicates are removed.
|
/// Duplicates are removed.
|
||||||
struct MergedNodes<'a, IA, IB>
|
struct MergedNodes<'a, IA, IB>
|
||||||
where IA: Iterator<Item = Node>,
|
where
|
||||||
IB: Iterator<Item = Node>
|
IA: Iterator<Item = Node>,
|
||||||
|
IB: Iterator<Item = Node>,
|
||||||
{
|
{
|
||||||
a: Peekable<IA>,
|
a: Peekable<IA>,
|
||||||
b: Peekable<IB>,
|
b: Peekable<IB>,
|
||||||
@@ -187,8 +189,9 @@ struct MergedNodes<'a, IA, IB>
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, IA, IB> Iterator for MergedNodes<'a, IA, IB>
|
impl<'a, IA, IB> Iterator for MergedNodes<'a, IA, IB>
|
||||||
where IA: Iterator<Item = Node>,
|
where
|
||||||
IB: Iterator<Item = Node>
|
IA: Iterator<Item = Node>,
|
||||||
|
IB: Iterator<Item = Node>,
|
||||||
{
|
{
|
||||||
type Item = Node;
|
type Item = Node;
|
||||||
|
|
||||||
@@ -198,9 +201,12 @@ impl<'a, IA, IB> Iterator for MergedNodes<'a, IA, IB>
|
|||||||
// If the two values are defined at the same point, compare value numbers instead
|
// If the two values are defined at the same point, compare value numbers instead
|
||||||
// this is going to cause an interference conflict unless its actually the same
|
// this is going to cause an interference conflict unless its actually the same
|
||||||
// value appearing in both streams.
|
// value appearing in both streams.
|
||||||
self.domtree
|
self.domtree.rpo_cmp(a.def, b.def, self.layout).then(
|
||||||
.rpo_cmp(a.def, b.def, self.layout)
|
Ord::cmp(
|
||||||
.then(Ord::cmp(&a.value, &b.value))
|
&a.value,
|
||||||
|
&b.value,
|
||||||
|
),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
(Some(_), None) => Ordering::Less,
|
(Some(_), None) => Ordering::Less,
|
||||||
(None, Some(_)) => Ordering::Greater,
|
(None, Some(_)) => Ordering::Greater,
|
||||||
@@ -256,13 +262,15 @@ impl Coalescing {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Convert `func` to conventional SSA form and build virtual registers in the process.
|
/// Convert `func` to conventional SSA form and build virtual registers in the process.
|
||||||
pub fn conventional_ssa(&mut self,
|
pub fn conventional_ssa(
|
||||||
isa: &TargetIsa,
|
&mut self,
|
||||||
func: &mut Function,
|
isa: &TargetIsa,
|
||||||
cfg: &ControlFlowGraph,
|
func: &mut Function,
|
||||||
domtree: &DominatorTree,
|
cfg: &ControlFlowGraph,
|
||||||
liveness: &mut Liveness,
|
domtree: &DominatorTree,
|
||||||
virtregs: &mut VirtRegs) {
|
liveness: &mut Liveness,
|
||||||
|
virtregs: &mut VirtRegs,
|
||||||
|
) {
|
||||||
dbg!("Coalescing for:\n{}", func.display(isa));
|
dbg!("Coalescing for:\n{}", func.display(isa));
|
||||||
let mut context = Context {
|
let mut context = Context {
|
||||||
isa,
|
isa,
|
||||||
@@ -329,9 +337,11 @@ impl<'a> Context<'a> {
|
|||||||
//
|
//
|
||||||
// Try to catch infinite splitting loops. The values created by splitting should never
|
// Try to catch infinite splitting loops. The values created by splitting should never
|
||||||
// have irreconcilable interferences.
|
// have irreconcilable interferences.
|
||||||
assert!(!self.split_values.contains(&bad_value),
|
assert!(
|
||||||
"{} was already isolated",
|
!self.split_values.contains(&bad_value),
|
||||||
bad_value);
|
"{} was already isolated",
|
||||||
|
bad_value
|
||||||
|
);
|
||||||
let split_len = self.split_values.len();
|
let split_len = self.split_values.len();
|
||||||
|
|
||||||
// The bad value can be both the successor value and a predecessor value at the same
|
// The bad value can be both the successor value and a predecessor value at the same
|
||||||
@@ -349,18 +359,22 @@ impl<'a> Context<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Second loop check.
|
// Second loop check.
|
||||||
assert_ne!(split_len,
|
assert_ne!(
|
||||||
self.split_values.len(),
|
split_len,
|
||||||
"Couldn't isolate {}",
|
self.split_values.len(),
|
||||||
bad_value);
|
"Couldn't isolate {}",
|
||||||
|
bad_value
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
let vreg = self.virtregs.unify(self.values);
|
let vreg = self.virtregs.unify(self.values);
|
||||||
dbg!("Coalesced {} arg {} into {} = {}",
|
dbg!(
|
||||||
ebb,
|
"Coalesced {} arg {} into {} = {}",
|
||||||
argnum,
|
ebb,
|
||||||
vreg,
|
argnum,
|
||||||
DisplayList(self.virtregs.values(vreg)));
|
vreg,
|
||||||
|
DisplayList(self.virtregs.values(vreg))
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Reset `self.values` to just the set of split values.
|
/// Reset `self.values` to just the set of split values.
|
||||||
@@ -369,21 +383,21 @@ impl<'a> Context<'a> {
|
|||||||
self.values.extend_from_slice(self.split_values);
|
self.values.extend_from_slice(self.split_values);
|
||||||
let domtree = &self.domtree;
|
let domtree = &self.domtree;
|
||||||
let func = &self.func;
|
let func = &self.func;
|
||||||
self.values
|
self.values.sort_by(|&a, &b| {
|
||||||
.sort_by(|&a, &b| {
|
domtree.rpo_cmp(func.dfg.value_def(a), func.dfg.value_def(b), &func.layout)
|
||||||
domtree.rpo_cmp(func.dfg.value_def(a), func.dfg.value_def(b), &func.layout)
|
});
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Try coalescing predecessors with `succ_val`.
|
/// Try coalescing predecessors with `succ_val`.
|
||||||
///
|
///
|
||||||
/// Returns a value from a congruence class that needs to be split before starting over, or
|
/// Returns a value from a congruence class that needs to be split before starting over, or
|
||||||
/// `None` if everything was successfully coalesced into `self.values`.
|
/// `None` if everything was successfully coalesced into `self.values`.
|
||||||
fn try_coalesce(&mut self,
|
fn try_coalesce(
|
||||||
argnum: usize,
|
&mut self,
|
||||||
succ_val: Value,
|
argnum: usize,
|
||||||
preds: &[BasicBlock])
|
succ_val: Value,
|
||||||
-> Option<Value> {
|
preds: &[BasicBlock],
|
||||||
|
) -> Option<Value> {
|
||||||
// Initialize the value list with the split values. These are guaranteed to be
|
// Initialize the value list with the split values. These are guaranteed to be
|
||||||
// interference free, and anything that interferes with them must be split away.
|
// interference free, and anything that interferes with them must be split away.
|
||||||
self.reset_values();
|
self.reset_values();
|
||||||
@@ -397,19 +411,22 @@ impl<'a> Context<'a> {
|
|||||||
|
|
||||||
for &(pred_ebb, pred_inst) in preds {
|
for &(pred_ebb, pred_inst) in preds {
|
||||||
let pred_val = self.func.dfg.inst_variable_args(pred_inst)[argnum];
|
let pred_val = self.func.dfg.inst_variable_args(pred_inst)[argnum];
|
||||||
dbg!("Checking {}: {}: {}",
|
dbg!(
|
||||||
pred_val,
|
"Checking {}: {}: {}",
|
||||||
pred_ebb,
|
pred_val,
|
||||||
self.func.dfg.display_inst(pred_inst, self.isa));
|
pred_ebb,
|
||||||
|
self.func.dfg.display_inst(pred_inst, self.isa)
|
||||||
|
);
|
||||||
|
|
||||||
// Never coalesce incoming function arguments on the stack. These arguments are
|
// Never coalesce incoming function arguments on the stack. These arguments are
|
||||||
// pre-spilled, and the rest of the virtual register would be forced to spill to the
|
// pre-spilled, and the rest of the virtual register would be forced to spill to the
|
||||||
// `incoming_arg` stack slot too.
|
// `incoming_arg` stack slot too.
|
||||||
if let ValueDef::Arg(def_ebb, def_num) = self.func.dfg.value_def(pred_val) {
|
if let ValueDef::Arg(def_ebb, def_num) = self.func.dfg.value_def(pred_val) {
|
||||||
if Some(def_ebb) == self.func.layout.entry_block() &&
|
if Some(def_ebb) == self.func.layout.entry_block() &&
|
||||||
self.func.signature.argument_types[def_num]
|
self.func.signature.argument_types[def_num]
|
||||||
.location
|
.location
|
||||||
.is_stack() {
|
.is_stack()
|
||||||
|
{
|
||||||
dbg!("Isolating incoming stack parameter {}", pred_val);
|
dbg!("Isolating incoming stack parameter {}", pred_val);
|
||||||
let new_val = self.split_pred(pred_inst, pred_ebb, argnum, pred_val);
|
let new_val = self.split_pred(pred_inst, pred_ebb, argnum, pred_val);
|
||||||
assert!(self.add_class(new_val).is_ok());
|
assert!(self.add_class(new_val).is_ok());
|
||||||
@@ -424,9 +441,10 @@ impl<'a> Context<'a> {
|
|||||||
//
|
//
|
||||||
// Check if the `a` live range is fundamentally incompatible with `pred_inst`.
|
// Check if the `a` live range is fundamentally incompatible with `pred_inst`.
|
||||||
if self.liveness
|
if self.liveness
|
||||||
.get(a)
|
.get(a)
|
||||||
.expect("No live range for interfering value")
|
.expect("No live range for interfering value")
|
||||||
.reaches_use(pred_inst, pred_ebb, &self.func.layout) {
|
.reaches_use(pred_inst, pred_ebb, &self.func.layout)
|
||||||
|
{
|
||||||
// Splitting at `pred_inst` wouldn't resolve the interference, so we need to
|
// Splitting at `pred_inst` wouldn't resolve the interference, so we need to
|
||||||
// start over.
|
// start over.
|
||||||
return Some(a);
|
return Some(a);
|
||||||
@@ -435,8 +453,10 @@ impl<'a> Context<'a> {
|
|||||||
// The local conflict could be avoided by splitting at this predecessor, so try
|
// The local conflict could be avoided by splitting at this predecessor, so try
|
||||||
// that. This split is not necessarily required, but it allows us to make progress.
|
// that. This split is not necessarily required, but it allows us to make progress.
|
||||||
let new_val = self.split_pred(pred_inst, pred_ebb, argnum, pred_val);
|
let new_val = self.split_pred(pred_inst, pred_ebb, argnum, pred_val);
|
||||||
assert!(self.add_class(new_val).is_ok(),
|
assert!(
|
||||||
"Splitting didn't resolve conflict.");
|
self.add_class(new_val).is_ok(),
|
||||||
|
"Splitting didn't resolve conflict."
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -447,42 +467,52 @@ impl<'a> Context<'a> {
|
|||||||
///
|
///
|
||||||
/// Leave `self.values` unchanged on failure.
|
/// Leave `self.values` unchanged on failure.
|
||||||
fn add_class(&mut self, value: Value) -> Result<(), (Value, Value)> {
|
fn add_class(&mut self, value: Value) -> Result<(), (Value, Value)> {
|
||||||
self.forest
|
self.forest.try_merge(
|
||||||
.try_merge(&self.values,
|
&self.values,
|
||||||
self.virtregs.congruence_class(&value),
|
self.virtregs.congruence_class(&value),
|
||||||
&self.func.dfg,
|
&self.func.dfg,
|
||||||
&self.func.layout,
|
&self.func.layout,
|
||||||
self.domtree,
|
self.domtree,
|
||||||
self.liveness)?;
|
self.liveness,
|
||||||
|
)?;
|
||||||
self.forest.swap(&mut self.values);
|
self.forest.swap(&mut self.values);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Split the congruence class for the `argnum` argument to `pred_inst` by inserting a copy.
|
/// Split the congruence class for the `argnum` argument to `pred_inst` by inserting a copy.
|
||||||
fn split_pred(&mut self,
|
fn split_pred(
|
||||||
pred_inst: Inst,
|
&mut self,
|
||||||
pred_ebb: Ebb,
|
pred_inst: Inst,
|
||||||
argnum: usize,
|
pred_ebb: Ebb,
|
||||||
pred_val: Value)
|
argnum: usize,
|
||||||
-> Value {
|
pred_val: Value,
|
||||||
|
) -> Value {
|
||||||
let mut pos = EncCursor::new(self.func, self.isa).at_inst(pred_inst);
|
let mut pos = EncCursor::new(self.func, self.isa).at_inst(pred_inst);
|
||||||
let copy = pos.ins().copy(pred_val);
|
let copy = pos.ins().copy(pred_val);
|
||||||
let inst = pos.built_inst();
|
let inst = pos.built_inst();
|
||||||
|
|
||||||
dbg!("Inserted {}, before {}: {}",
|
dbg!(
|
||||||
pos.display_inst(inst),
|
"Inserted {}, before {}: {}",
|
||||||
pred_ebb,
|
pos.display_inst(inst),
|
||||||
pos.display_inst(pred_inst));
|
pred_ebb,
|
||||||
|
pos.display_inst(pred_inst)
|
||||||
|
);
|
||||||
|
|
||||||
// Create a live range for the new value.
|
// Create a live range for the new value.
|
||||||
let affinity = Affinity::new(&self.encinfo
|
let affinity = Affinity::new(
|
||||||
.operand_constraints(pos.func.encodings[inst])
|
&self.encinfo
|
||||||
.expect("Bad copy encoding")
|
.operand_constraints(pos.func.encodings[inst])
|
||||||
.outs
|
.expect("Bad copy encoding")
|
||||||
[0]);
|
.outs
|
||||||
|
[0],
|
||||||
|
);
|
||||||
self.liveness.create_dead(copy, inst, affinity);
|
self.liveness.create_dead(copy, inst, affinity);
|
||||||
self.liveness
|
self.liveness.extend_locally(
|
||||||
.extend_locally(copy, pred_ebb, pred_inst, &pos.func.layout);
|
copy,
|
||||||
|
pred_ebb,
|
||||||
|
pred_inst,
|
||||||
|
&pos.func.layout,
|
||||||
|
);
|
||||||
|
|
||||||
pos.func.dfg.inst_variable_args_mut(pred_inst)[argnum] = copy;
|
pos.func.dfg.inst_variable_args_mut(pred_inst)[argnum] = copy;
|
||||||
self.split_values.push(copy);
|
self.split_values.push(copy);
|
||||||
@@ -500,21 +530,29 @@ impl<'a> Context<'a> {
|
|||||||
let inst = pos.built_inst();
|
let inst = pos.built_inst();
|
||||||
self.liveness.move_def_locally(succ_val, inst);
|
self.liveness.move_def_locally(succ_val, inst);
|
||||||
|
|
||||||
dbg!("Inserted {}, following {}({}: {})",
|
dbg!(
|
||||||
pos.display_inst(inst),
|
"Inserted {}, following {}({}: {})",
|
||||||
ebb,
|
pos.display_inst(inst),
|
||||||
new_val,
|
ebb,
|
||||||
ty);
|
new_val,
|
||||||
|
ty
|
||||||
|
);
|
||||||
|
|
||||||
// Create a live range for the new value.
|
// Create a live range for the new value.
|
||||||
let affinity = Affinity::new(&self.encinfo
|
let affinity = Affinity::new(
|
||||||
.operand_constraints(pos.func.encodings[inst])
|
&self.encinfo
|
||||||
.expect("Bad copy encoding")
|
.operand_constraints(pos.func.encodings[inst])
|
||||||
.outs
|
.expect("Bad copy encoding")
|
||||||
[0]);
|
.outs
|
||||||
|
[0],
|
||||||
|
);
|
||||||
self.liveness.create_dead(new_val, ebb, affinity);
|
self.liveness.create_dead(new_val, ebb, affinity);
|
||||||
self.liveness
|
self.liveness.extend_locally(
|
||||||
.extend_locally(new_val, ebb, inst, &pos.func.layout);
|
new_val,
|
||||||
|
ebb,
|
||||||
|
inst,
|
||||||
|
&pos.func.layout,
|
||||||
|
);
|
||||||
|
|
||||||
self.split_values.push(new_val);
|
self.split_values.push(new_val);
|
||||||
new_val
|
new_val
|
||||||
|
|||||||
@@ -105,12 +105,14 @@ impl Coloring {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Run the coloring algorithm over `func`.
|
/// Run the coloring algorithm over `func`.
|
||||||
pub fn run(&mut self,
|
pub fn run(
|
||||||
isa: &TargetIsa,
|
&mut self,
|
||||||
func: &mut Function,
|
isa: &TargetIsa,
|
||||||
domtree: &DominatorTree,
|
func: &mut Function,
|
||||||
liveness: &mut Liveness,
|
domtree: &DominatorTree,
|
||||||
tracker: &mut LiveValueTracker) {
|
liveness: &mut Liveness,
|
||||||
|
tracker: &mut LiveValueTracker,
|
||||||
|
) {
|
||||||
dbg!("Coloring for:\n{}", func.display(isa));
|
dbg!("Coloring for:\n{}", func.display(isa));
|
||||||
let mut ctx = Context {
|
let mut ctx = Context {
|
||||||
isa,
|
isa,
|
||||||
@@ -150,15 +152,17 @@ impl<'a> Context<'a> {
|
|||||||
pos.goto_top(ebb);
|
pos.goto_top(ebb);
|
||||||
while let Some(inst) = pos.next_inst() {
|
while let Some(inst) = pos.next_inst() {
|
||||||
if let Some(constraints) = self.encinfo.operand_constraints(func.encodings[inst]) {
|
if let Some(constraints) = self.encinfo.operand_constraints(func.encodings[inst]) {
|
||||||
self.visit_inst(inst,
|
self.visit_inst(
|
||||||
constraints,
|
inst,
|
||||||
&mut pos,
|
constraints,
|
||||||
&mut func.dfg,
|
&mut pos,
|
||||||
tracker,
|
&mut func.dfg,
|
||||||
&mut regs,
|
tracker,
|
||||||
&mut func.locations,
|
&mut regs,
|
||||||
&mut func.encodings,
|
&mut func.locations,
|
||||||
&func.signature);
|
&mut func.encodings,
|
||||||
|
&func.signature,
|
||||||
|
);
|
||||||
} else {
|
} else {
|
||||||
let (_throughs, kills) = tracker.process_ghost(inst);
|
let (_throughs, kills) = tracker.process_ghost(inst);
|
||||||
self.process_ghost_kills(kills, &mut regs, &func.locations);
|
self.process_ghost_kills(kills, &mut regs, &func.locations);
|
||||||
@@ -170,11 +174,12 @@ impl<'a> Context<'a> {
|
|||||||
/// Visit the `ebb` header.
|
/// Visit the `ebb` header.
|
||||||
///
|
///
|
||||||
/// Initialize the set of live registers and color the arguments to `ebb`.
|
/// Initialize the set of live registers and color the arguments to `ebb`.
|
||||||
fn visit_ebb_header(&self,
|
fn visit_ebb_header(
|
||||||
ebb: Ebb,
|
&self,
|
||||||
func: &mut Function,
|
ebb: Ebb,
|
||||||
tracker: &mut LiveValueTracker)
|
func: &mut Function,
|
||||||
-> AllocatableSet {
|
tracker: &mut LiveValueTracker,
|
||||||
|
) -> AllocatableSet {
|
||||||
// Reposition the live value tracker and deal with the EBB arguments.
|
// Reposition the live value tracker and deal with the EBB arguments.
|
||||||
tracker.ebb_top(ebb, &func.dfg, self.liveness, &func.layout, self.domtree);
|
tracker.ebb_top(ebb, &func.dfg, self.liveness, &func.layout, self.domtree);
|
||||||
|
|
||||||
@@ -204,10 +209,12 @@ impl<'a> Context<'a> {
|
|||||||
.get(value)
|
.get(value)
|
||||||
.expect("No live range for live-in")
|
.expect("No live range for live-in")
|
||||||
.affinity;
|
.affinity;
|
||||||
dbg!("Live-in: {}:{} in {}",
|
dbg!(
|
||||||
value,
|
"Live-in: {}:{} in {}",
|
||||||
affinity.display(&self.reginfo),
|
value,
|
||||||
func.locations[value].display(&self.reginfo));
|
affinity.display(&self.reginfo),
|
||||||
|
func.locations[value].display(&self.reginfo)
|
||||||
|
);
|
||||||
if let Affinity::Reg(rci) = affinity {
|
if let Affinity::Reg(rci) = affinity {
|
||||||
let rc = self.reginfo.rc(rci);
|
let rc = self.reginfo.rc(rci);
|
||||||
let loc = func.locations[value];
|
let loc = func.locations[value];
|
||||||
@@ -230,11 +237,12 @@ impl<'a> Context<'a> {
|
|||||||
/// function signature.
|
/// function signature.
|
||||||
///
|
///
|
||||||
/// Return the set of remaining allocatable registers after filtering out the dead arguments.
|
/// Return the set of remaining allocatable registers after filtering out the dead arguments.
|
||||||
fn color_entry_args(&self,
|
fn color_entry_args(
|
||||||
sig: &Signature,
|
&self,
|
||||||
args: &[LiveValue],
|
sig: &Signature,
|
||||||
locations: &mut ValueLocations)
|
args: &[LiveValue],
|
||||||
-> AllocatableSet {
|
locations: &mut ValueLocations,
|
||||||
|
) -> AllocatableSet {
|
||||||
assert_eq!(sig.argument_types.len(), args.len());
|
assert_eq!(sig.argument_types.len(), args.len());
|
||||||
|
|
||||||
let mut regs = self.usable_regs.clone();
|
let mut regs = self.usable_regs.clone();
|
||||||
@@ -250,10 +258,12 @@ impl<'a> Context<'a> {
|
|||||||
locations[lv.value] = ValueLoc::Reg(reg);
|
locations[lv.value] = ValueLoc::Reg(reg);
|
||||||
} else {
|
} else {
|
||||||
// This should have been fixed by the reload pass.
|
// This should have been fixed by the reload pass.
|
||||||
panic!("Entry arg {} has {} affinity, but ABI {}",
|
panic!(
|
||||||
lv.value,
|
"Entry arg {} has {} affinity, but ABI {}",
|
||||||
lv.affinity.display(&self.reginfo),
|
lv.value,
|
||||||
abi.display(&self.reginfo));
|
lv.affinity.display(&self.reginfo),
|
||||||
|
abi.display(&self.reginfo)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
@@ -273,19 +283,23 @@ impl<'a> Context<'a> {
|
|||||||
///
|
///
|
||||||
/// Update `regs` to reflect the allocated registers after `inst`, including removing any dead
|
/// Update `regs` to reflect the allocated registers after `inst`, including removing any dead
|
||||||
/// or killed values from the set.
|
/// or killed values from the set.
|
||||||
fn visit_inst(&mut self,
|
fn visit_inst(
|
||||||
inst: Inst,
|
&mut self,
|
||||||
constraints: &RecipeConstraints,
|
inst: Inst,
|
||||||
pos: &mut Cursor,
|
constraints: &RecipeConstraints,
|
||||||
dfg: &mut DataFlowGraph,
|
pos: &mut Cursor,
|
||||||
tracker: &mut LiveValueTracker,
|
dfg: &mut DataFlowGraph,
|
||||||
regs: &mut AllocatableSet,
|
tracker: &mut LiveValueTracker,
|
||||||
locations: &mut ValueLocations,
|
regs: &mut AllocatableSet,
|
||||||
encodings: &mut InstEncodings,
|
locations: &mut ValueLocations,
|
||||||
func_signature: &Signature) {
|
encodings: &mut InstEncodings,
|
||||||
dbg!("Coloring {}\n {}",
|
func_signature: &Signature,
|
||||||
dfg.display_inst(inst, self.isa),
|
) {
|
||||||
regs.display(&self.reginfo));
|
dbg!(
|
||||||
|
"Coloring {}\n {}",
|
||||||
|
dfg.display_inst(inst, self.isa),
|
||||||
|
regs.display(&self.reginfo)
|
||||||
|
);
|
||||||
|
|
||||||
// EBB whose arguments should be colored to match the current branch instruction's
|
// EBB whose arguments should be colored to match the current branch instruction's
|
||||||
// arguments.
|
// arguments.
|
||||||
@@ -310,10 +324,12 @@ impl<'a> Context<'a> {
|
|||||||
} else {
|
} else {
|
||||||
// This is a multi-way branch like `br_table`. We only support arguments on
|
// This is a multi-way branch like `br_table`. We only support arguments on
|
||||||
// single-destination branches.
|
// single-destination branches.
|
||||||
assert_eq!(dfg.inst_variable_args(inst).len(),
|
assert_eq!(
|
||||||
0,
|
dfg.inst_variable_args(inst).len(),
|
||||||
"Can't handle EBB arguments: {}",
|
0,
|
||||||
dfg.display_inst(inst, self.isa));
|
"Can't handle EBB arguments: {}",
|
||||||
|
dfg.display_inst(inst, self.isa)
|
||||||
|
);
|
||||||
self.undivert_regs(|lr| !lr.is_local());
|
self.undivert_regs(|lr| !lr.is_local());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -329,10 +345,11 @@ impl<'a> Context<'a> {
|
|||||||
// Get rid of the killed values.
|
// Get rid of the killed values.
|
||||||
for lv in kills {
|
for lv in kills {
|
||||||
if let Affinity::Reg(rci) = lv.affinity {
|
if let Affinity::Reg(rci) = lv.affinity {
|
||||||
self.solver
|
self.solver.add_kill(
|
||||||
.add_kill(lv.value,
|
lv.value,
|
||||||
self.reginfo.rc(rci),
|
self.reginfo.rc(rci),
|
||||||
self.divert.reg(lv.value, locations));
|
self.divert.reg(lv.value, locations),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -350,9 +367,9 @@ impl<'a> Context<'a> {
|
|||||||
|
|
||||||
// Finally, we've fully programmed the constraint solver.
|
// Finally, we've fully programmed the constraint solver.
|
||||||
// We expect a quick solution in most cases.
|
// We expect a quick solution in most cases.
|
||||||
let mut output_regs = self.solver
|
let mut output_regs = self.solver.quick_solve().unwrap_or_else(
|
||||||
.quick_solve()
|
|_| self.iterate_solution(),
|
||||||
.unwrap_or_else(|_| self.iterate_solution());
|
);
|
||||||
|
|
||||||
|
|
||||||
// The solution and/or fixed input constraints may require us to shuffle the set of live
|
// The solution and/or fixed input constraints may require us to shuffle the set of live
|
||||||
@@ -399,30 +416,42 @@ impl<'a> Context<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Program the input-side constraints for `inst` into the constraint solver.
|
/// Program the input-side constraints for `inst` into the constraint solver.
|
||||||
fn program_input_constraints(&mut self,
|
fn program_input_constraints(
|
||||||
inst: Inst,
|
&mut self,
|
||||||
constraints: &[OperandConstraint],
|
inst: Inst,
|
||||||
dfg: &DataFlowGraph,
|
constraints: &[OperandConstraint],
|
||||||
locations: &ValueLocations) {
|
dfg: &DataFlowGraph,
|
||||||
for (op, &value) in constraints
|
locations: &ValueLocations,
|
||||||
.iter()
|
) {
|
||||||
.zip(dfg.inst_args(inst))
|
for (op, &value) in constraints.iter().zip(dfg.inst_args(inst)).filter(
|
||||||
.filter(|&(op, _)| op.kind != ConstraintKind::Stack) {
|
|&(op, _)| {
|
||||||
|
op.kind != ConstraintKind::Stack
|
||||||
|
},
|
||||||
|
)
|
||||||
|
{
|
||||||
// Reload pass is supposed to ensure that all arguments to register operands are
|
// Reload pass is supposed to ensure that all arguments to register operands are
|
||||||
// already in a register.
|
// already in a register.
|
||||||
let cur_reg = self.divert.reg(value, locations);
|
let cur_reg = self.divert.reg(value, locations);
|
||||||
match op.kind {
|
match op.kind {
|
||||||
ConstraintKind::FixedReg(regunit) => {
|
ConstraintKind::FixedReg(regunit) => {
|
||||||
if regunit != cur_reg {
|
if regunit != cur_reg {
|
||||||
self.solver
|
self.solver.reassign_in(
|
||||||
.reassign_in(value, op.regclass, cur_reg, regunit);
|
value,
|
||||||
|
op.regclass,
|
||||||
|
cur_reg,
|
||||||
|
regunit,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ConstraintKind::Reg |
|
ConstraintKind::Reg |
|
||||||
ConstraintKind::Tied(_) => {
|
ConstraintKind::Tied(_) => {
|
||||||
if !op.regclass.contains(cur_reg) {
|
if !op.regclass.contains(cur_reg) {
|
||||||
self.solver
|
self.solver.add_var(
|
||||||
.add_var(value, op.regclass, cur_reg, &self.reginfo);
|
value,
|
||||||
|
op.regclass,
|
||||||
|
cur_reg,
|
||||||
|
&self.reginfo,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ConstraintKind::Stack => unreachable!(),
|
ConstraintKind::Stack => unreachable!(),
|
||||||
@@ -433,18 +462,21 @@ impl<'a> Context<'a> {
|
|||||||
/// Program the input-side ABI constraints for `inst` into the constraint solver.
|
/// Program the input-side ABI constraints for `inst` into the constraint solver.
|
||||||
///
|
///
|
||||||
/// ABI constraints are the fixed register assignments used for calls and returns.
|
/// ABI constraints are the fixed register assignments used for calls and returns.
|
||||||
fn program_input_abi(&mut self,
|
fn program_input_abi(
|
||||||
inst: Inst,
|
&mut self,
|
||||||
abi_types: &[ArgumentType],
|
inst: Inst,
|
||||||
dfg: &DataFlowGraph,
|
abi_types: &[ArgumentType],
|
||||||
locations: &ValueLocations) {
|
dfg: &DataFlowGraph,
|
||||||
|
locations: &ValueLocations,
|
||||||
|
) {
|
||||||
for (abi, &value) in abi_types.iter().zip(dfg.inst_variable_args(inst)) {
|
for (abi, &value) in abi_types.iter().zip(dfg.inst_variable_args(inst)) {
|
||||||
if let ArgumentLoc::Reg(reg) = abi.location {
|
if let ArgumentLoc::Reg(reg) = abi.location {
|
||||||
if let Affinity::Reg(rci) =
|
if let Affinity::Reg(rci) =
|
||||||
self.liveness
|
self.liveness
|
||||||
.get(value)
|
.get(value)
|
||||||
.expect("ABI register must have live range")
|
.expect("ABI register must have live range")
|
||||||
.affinity {
|
.affinity
|
||||||
|
{
|
||||||
let rc = self.reginfo.rc(rci);
|
let rc = self.reginfo.rc(rci);
|
||||||
let cur_reg = self.divert.reg(value, locations);
|
let cur_reg = self.divert.reg(value, locations);
|
||||||
self.solver.reassign_in(value, rc, cur_reg, reg);
|
self.solver.reassign_in(value, rc, cur_reg, reg);
|
||||||
@@ -464,13 +496,14 @@ impl<'a> Context<'a> {
|
|||||||
///
|
///
|
||||||
/// Returns true if this is the first time a branch to `dest` is seen, so the `dest` argument
|
/// Returns true if this is the first time a branch to `dest` is seen, so the `dest` argument
|
||||||
/// values should be colored after `shuffle_inputs`.
|
/// values should be colored after `shuffle_inputs`.
|
||||||
fn program_ebb_arguments(&mut self,
|
fn program_ebb_arguments(
|
||||||
inst: Inst,
|
&mut self,
|
||||||
dest: Ebb,
|
inst: Inst,
|
||||||
dfg: &DataFlowGraph,
|
dest: Ebb,
|
||||||
layout: &Layout,
|
dfg: &DataFlowGraph,
|
||||||
locations: &ValueLocations)
|
layout: &Layout,
|
||||||
-> bool {
|
locations: &ValueLocations,
|
||||||
|
) -> bool {
|
||||||
// Find diverted registers that are live-in to `dest` and reassign them to their global
|
// Find diverted registers that are live-in to `dest` and reassign them to their global
|
||||||
// home.
|
// home.
|
||||||
//
|
//
|
||||||
@@ -523,11 +556,13 @@ impl<'a> Context<'a> {
|
|||||||
/// register state.
|
/// register state.
|
||||||
///
|
///
|
||||||
/// This function is only called when `program_ebb_arguments()` returned `true`.
|
/// This function is only called when `program_ebb_arguments()` returned `true`.
|
||||||
fn color_ebb_arguments(&mut self,
|
fn color_ebb_arguments(
|
||||||
inst: Inst,
|
&mut self,
|
||||||
dest: Ebb,
|
inst: Inst,
|
||||||
dfg: &DataFlowGraph,
|
dest: Ebb,
|
||||||
locations: &mut ValueLocations) {
|
dfg: &DataFlowGraph,
|
||||||
|
locations: &mut ValueLocations,
|
||||||
|
) {
|
||||||
let br_args = dfg.inst_variable_args(inst);
|
let br_args = dfg.inst_variable_args(inst);
|
||||||
let dest_args = dfg.ebb_args(dest);
|
let dest_args = dfg.ebb_args(dest);
|
||||||
assert_eq!(br_args.len(), dest_args.len());
|
assert_eq!(br_args.len(), dest_args.len());
|
||||||
@@ -549,20 +584,23 @@ impl<'a> Context<'a> {
|
|||||||
/// Find all diverted registers where `pred` returns `true` and undo their diversion so they
|
/// Find all diverted registers where `pred` returns `true` and undo their diversion so they
|
||||||
/// are reallocated to their global register assignments.
|
/// are reallocated to their global register assignments.
|
||||||
fn undivert_regs<Pred>(&mut self, mut pred: Pred)
|
fn undivert_regs<Pred>(&mut self, mut pred: Pred)
|
||||||
where Pred: FnMut(&LiveRange) -> bool
|
where
|
||||||
|
Pred: FnMut(&LiveRange) -> bool,
|
||||||
{
|
{
|
||||||
for rdiv in self.divert.all() {
|
for rdiv in self.divert.all() {
|
||||||
let lr = self.liveness
|
let lr = self.liveness.get(rdiv.value).expect(
|
||||||
.get(rdiv.value)
|
"Missing live range for diverted register",
|
||||||
.expect("Missing live range for diverted register");
|
);
|
||||||
if pred(lr) {
|
if pred(lr) {
|
||||||
if let Affinity::Reg(rci) = lr.affinity {
|
if let Affinity::Reg(rci) = lr.affinity {
|
||||||
let rc = self.reginfo.rc(rci);
|
let rc = self.reginfo.rc(rci);
|
||||||
self.solver.reassign_in(rdiv.value, rc, rdiv.to, rdiv.from);
|
self.solver.reassign_in(rdiv.value, rc, rdiv.to, rdiv.from);
|
||||||
} else {
|
} else {
|
||||||
panic!("Diverted register {} with {} affinity",
|
panic!(
|
||||||
rdiv.value,
|
"Diverted register {} with {} affinity",
|
||||||
lr.affinity.display(&self.reginfo));
|
rdiv.value,
|
||||||
|
lr.affinity.display(&self.reginfo)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -570,9 +608,7 @@ impl<'a> Context<'a> {
|
|||||||
|
|
||||||
// Find existing live values that conflict with the fixed input register constraints programmed
|
// Find existing live values that conflict with the fixed input register constraints programmed
|
||||||
// into the constraint solver. Convert them to solver variables so they can be diverted.
|
// into the constraint solver. Convert them to solver variables so they can be diverted.
|
||||||
fn divert_fixed_input_conflicts(&mut self,
|
fn divert_fixed_input_conflicts(&mut self, live: &[LiveValue], locations: &mut ValueLocations) {
|
||||||
live: &[LiveValue],
|
|
||||||
locations: &mut ValueLocations) {
|
|
||||||
for lv in live {
|
for lv in live {
|
||||||
if let Affinity::Reg(rci) = lv.affinity {
|
if let Affinity::Reg(rci) = lv.affinity {
|
||||||
let rc = self.reginfo.rc(rci);
|
let rc = self.reginfo.rc(rci);
|
||||||
@@ -587,11 +623,13 @@ impl<'a> Context<'a> {
|
|||||||
/// Program any fixed-register output constraints into the solver. This may also detect
|
/// Program any fixed-register output constraints into the solver. This may also detect
|
||||||
/// conflicts between live-through registers and fixed output registers. These live-through
|
/// conflicts between live-through registers and fixed output registers. These live-through
|
||||||
/// values need to be turned into solver variables so they can be reassigned.
|
/// values need to be turned into solver variables so they can be reassigned.
|
||||||
fn program_fixed_outputs(&mut self,
|
fn program_fixed_outputs(
|
||||||
constraints: &[OperandConstraint],
|
&mut self,
|
||||||
defs: &[LiveValue],
|
constraints: &[OperandConstraint],
|
||||||
throughs: &[LiveValue],
|
defs: &[LiveValue],
|
||||||
locations: &mut ValueLocations) {
|
throughs: &[LiveValue],
|
||||||
|
locations: &mut ValueLocations,
|
||||||
|
) {
|
||||||
for (op, lv) in constraints.iter().zip(defs) {
|
for (op, lv) in constraints.iter().zip(defs) {
|
||||||
if let ConstraintKind::FixedReg(reg) = op.kind {
|
if let ConstraintKind::FixedReg(reg) = op.kind {
|
||||||
self.add_fixed_output(lv.value, op.regclass, reg, throughs, locations);
|
self.add_fixed_output(lv.value, op.regclass, reg, throughs, locations);
|
||||||
@@ -602,11 +640,13 @@ impl<'a> Context<'a> {
|
|||||||
/// Program the output-side ABI constraints for `inst` into the constraint solver.
|
/// Program the output-side ABI constraints for `inst` into the constraint solver.
|
||||||
///
|
///
|
||||||
/// That means return values for a call instruction.
|
/// That means return values for a call instruction.
|
||||||
fn program_output_abi(&mut self,
|
fn program_output_abi(
|
||||||
abi_types: &[ArgumentType],
|
&mut self,
|
||||||
defs: &[LiveValue],
|
abi_types: &[ArgumentType],
|
||||||
throughs: &[LiveValue],
|
defs: &[LiveValue],
|
||||||
locations: &mut ValueLocations) {
|
throughs: &[LiveValue],
|
||||||
|
locations: &mut ValueLocations,
|
||||||
|
) {
|
||||||
// It's technically possible for a call instruction to have fixed results before the
|
// It's technically possible for a call instruction to have fixed results before the
|
||||||
// variable list of results, but we have no known instances of that.
|
// variable list of results, but we have no known instances of that.
|
||||||
// Just assume all results are variable return values.
|
// Just assume all results are variable return values.
|
||||||
@@ -624,12 +664,14 @@ impl<'a> Context<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Add a single fixed output value to the solver.
|
/// Add a single fixed output value to the solver.
|
||||||
fn add_fixed_output(&mut self,
|
fn add_fixed_output(
|
||||||
value: Value,
|
&mut self,
|
||||||
rc: RegClass,
|
value: Value,
|
||||||
reg: RegUnit,
|
rc: RegClass,
|
||||||
throughs: &[LiveValue],
|
reg: RegUnit,
|
||||||
locations: &mut ValueLocations) {
|
throughs: &[LiveValue],
|
||||||
|
locations: &mut ValueLocations,
|
||||||
|
) {
|
||||||
if !self.solver.add_fixed_output(rc, reg) {
|
if !self.solver.add_fixed_output(rc, reg) {
|
||||||
// The fixed output conflicts with some of the live-through registers.
|
// The fixed output conflicts with some of the live-through registers.
|
||||||
for lv in throughs {
|
for lv in throughs {
|
||||||
@@ -656,12 +698,14 @@ impl<'a> Context<'a> {
|
|||||||
/// Program the output-side constraints for `inst` into the constraint solver.
|
/// Program the output-side constraints for `inst` into the constraint solver.
|
||||||
///
|
///
|
||||||
/// It is assumed that all fixed outputs have already been handled.
|
/// It is assumed that all fixed outputs have already been handled.
|
||||||
fn program_output_constraints(&mut self,
|
fn program_output_constraints(
|
||||||
inst: Inst,
|
&mut self,
|
||||||
constraints: &[OperandConstraint],
|
inst: Inst,
|
||||||
defs: &[LiveValue],
|
constraints: &[OperandConstraint],
|
||||||
dfg: &mut DataFlowGraph,
|
defs: &[LiveValue],
|
||||||
locations: &mut ValueLocations) {
|
dfg: &mut DataFlowGraph,
|
||||||
|
locations: &mut ValueLocations,
|
||||||
|
) {
|
||||||
for (op, lv) in constraints.iter().zip(defs) {
|
for (op, lv) in constraints.iter().zip(defs) {
|
||||||
match op.kind {
|
match op.kind {
|
||||||
ConstraintKind::FixedReg(_) |
|
ConstraintKind::FixedReg(_) |
|
||||||
@@ -673,8 +717,11 @@ impl<'a> Context<'a> {
|
|||||||
// Find the input operand we're tied to.
|
// Find the input operand we're tied to.
|
||||||
// The solver doesn't care about the output value.
|
// The solver doesn't care about the output value.
|
||||||
let arg = dfg.inst_args(inst)[num as usize];
|
let arg = dfg.inst_args(inst)[num as usize];
|
||||||
self.solver
|
self.solver.add_tied_input(
|
||||||
.add_tied_input(arg, op.regclass, self.divert.reg(arg, locations));
|
arg,
|
||||||
|
op.regclass,
|
||||||
|
self.divert.reg(arg, locations),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -695,11 +742,13 @@ impl<'a> Context<'a> {
|
|||||||
/// before.
|
/// before.
|
||||||
///
|
///
|
||||||
/// The solver needs to be reminded of the available registers before any moves are inserted.
|
/// The solver needs to be reminded of the available registers before any moves are inserted.
|
||||||
fn shuffle_inputs(&mut self,
|
fn shuffle_inputs(
|
||||||
pos: &mut Cursor,
|
&mut self,
|
||||||
dfg: &mut DataFlowGraph,
|
pos: &mut Cursor,
|
||||||
regs: &mut AllocatableSet,
|
dfg: &mut DataFlowGraph,
|
||||||
encodings: &mut InstEncodings) {
|
regs: &mut AllocatableSet,
|
||||||
|
encodings: &mut InstEncodings,
|
||||||
|
) {
|
||||||
self.solver.schedule_moves(regs);
|
self.solver.schedule_moves(regs);
|
||||||
|
|
||||||
for m in self.solver.moves() {
|
for m in self.solver.moves() {
|
||||||
@@ -729,10 +778,12 @@ impl<'a> Context<'a> {
|
|||||||
/// Process kills on a ghost instruction.
|
/// Process kills on a ghost instruction.
|
||||||
/// - Forget diversions.
|
/// - Forget diversions.
|
||||||
/// - Free killed registers.
|
/// - Free killed registers.
|
||||||
fn process_ghost_kills(&mut self,
|
fn process_ghost_kills(
|
||||||
kills: &[LiveValue],
|
&mut self,
|
||||||
regs: &mut AllocatableSet,
|
kills: &[LiveValue],
|
||||||
locations: &ValueLocations) {
|
regs: &mut AllocatableSet,
|
||||||
|
locations: &ValueLocations,
|
||||||
|
) {
|
||||||
for lv in kills {
|
for lv in kills {
|
||||||
if let Affinity::Reg(rci) = lv.affinity {
|
if let Affinity::Reg(rci) = lv.affinity {
|
||||||
let rc = self.reginfo.rc(rci);
|
let rc = self.reginfo.rc(rci);
|
||||||
|
|||||||
@@ -53,12 +53,13 @@ impl Context {
|
|||||||
///
|
///
|
||||||
/// After register allocation, all values in `func` have been assigned to a register or stack
|
/// After register allocation, all values in `func` have been assigned to a register or stack
|
||||||
/// location that is consistent with instruction encoding constraints.
|
/// location that is consistent with instruction encoding constraints.
|
||||||
pub fn run(&mut self,
|
pub fn run(
|
||||||
isa: &TargetIsa,
|
&mut self,
|
||||||
func: &mut Function,
|
isa: &TargetIsa,
|
||||||
cfg: &ControlFlowGraph,
|
func: &mut Function,
|
||||||
domtree: &DominatorTree)
|
cfg: &ControlFlowGraph,
|
||||||
-> CtonResult {
|
domtree: &DominatorTree,
|
||||||
|
) -> CtonResult {
|
||||||
// `Liveness` and `Coloring` are self-clearing.
|
// `Liveness` and `Coloring` are self-clearing.
|
||||||
self.virtregs.clear();
|
self.virtregs.clear();
|
||||||
|
|
||||||
@@ -74,13 +75,14 @@ impl Context {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Pass: Coalesce and create conventional SSA form.
|
// Pass: Coalesce and create conventional SSA form.
|
||||||
self.coalescing
|
self.coalescing.conventional_ssa(
|
||||||
.conventional_ssa(isa,
|
isa,
|
||||||
func,
|
func,
|
||||||
cfg,
|
cfg,
|
||||||
domtree,
|
domtree,
|
||||||
&mut self.liveness,
|
&mut self.liveness,
|
||||||
&mut self.virtregs);
|
&mut self.virtregs,
|
||||||
|
);
|
||||||
|
|
||||||
if isa.flags().enable_verifier() {
|
if isa.flags().enable_verifier() {
|
||||||
verify_context(func, cfg, domtree, Some(isa))?;
|
verify_context(func, cfg, domtree, Some(isa))?;
|
||||||
@@ -90,14 +92,15 @@ impl Context {
|
|||||||
|
|
||||||
|
|
||||||
// Pass: Spilling.
|
// Pass: Spilling.
|
||||||
self.spilling
|
self.spilling.run(
|
||||||
.run(isa,
|
isa,
|
||||||
func,
|
func,
|
||||||
domtree,
|
domtree,
|
||||||
&mut self.liveness,
|
&mut self.liveness,
|
||||||
&self.virtregs,
|
&self.virtregs,
|
||||||
&mut self.topo,
|
&mut self.topo,
|
||||||
&mut self.tracker);
|
&mut self.tracker,
|
||||||
|
);
|
||||||
|
|
||||||
if isa.flags().enable_verifier() {
|
if isa.flags().enable_verifier() {
|
||||||
verify_context(func, cfg, domtree, Some(isa))?;
|
verify_context(func, cfg, domtree, Some(isa))?;
|
||||||
@@ -106,13 +109,14 @@ impl Context {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Pass: Reload.
|
// Pass: Reload.
|
||||||
self.reload
|
self.reload.run(
|
||||||
.run(isa,
|
isa,
|
||||||
func,
|
func,
|
||||||
domtree,
|
domtree,
|
||||||
&mut self.liveness,
|
&mut self.liveness,
|
||||||
&mut self.topo,
|
&mut self.topo,
|
||||||
&mut self.tracker);
|
&mut self.tracker,
|
||||||
|
);
|
||||||
|
|
||||||
if isa.flags().enable_verifier() {
|
if isa.flags().enable_verifier() {
|
||||||
verify_context(func, cfg, domtree, Some(isa))?;
|
verify_context(func, cfg, domtree, Some(isa))?;
|
||||||
@@ -121,8 +125,13 @@ impl Context {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Pass: Coloring.
|
// Pass: Coloring.
|
||||||
self.coloring
|
self.coloring.run(
|
||||||
.run(isa, func, domtree, &mut self.liveness, &mut self.tracker);
|
isa,
|
||||||
|
func,
|
||||||
|
domtree,
|
||||||
|
&mut self.liveness,
|
||||||
|
&mut self.tracker,
|
||||||
|
);
|
||||||
|
|
||||||
if isa.flags().enable_verifier() {
|
if isa.flags().enable_verifier() {
|
||||||
verify_context(func, cfg, domtree, Some(isa))?;
|
verify_context(func, cfg, domtree, Some(isa))?;
|
||||||
|
|||||||
@@ -93,10 +93,11 @@ impl RegDiversions {
|
|||||||
///
|
///
|
||||||
/// Returns the `to` register of the removed diversion.
|
/// Returns the `to` register of the removed diversion.
|
||||||
pub fn remove(&mut self, value: Value) -> Option<RegUnit> {
|
pub fn remove(&mut self, value: Value) -> Option<RegUnit> {
|
||||||
self.current
|
self.current.iter().position(|d| d.value == value).map(
|
||||||
.iter()
|
|i| {
|
||||||
.position(|d| d.value == value)
|
self.current.swap_remove(i).to
|
||||||
.map(|i| self.current.swap_remove(i).to)
|
},
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -113,12 +114,14 @@ mod tests {
|
|||||||
let v2 = Value::new(2);
|
let v2 = Value::new(2);
|
||||||
|
|
||||||
divs.regmove(v1, 10, 12);
|
divs.regmove(v1, 10, 12);
|
||||||
assert_eq!(divs.diversion(v1),
|
assert_eq!(
|
||||||
Some(&Diversion {
|
divs.diversion(v1),
|
||||||
value: v1,
|
Some(&Diversion {
|
||||||
from: 10,
|
value: v1,
|
||||||
to: 12,
|
from: 10,
|
||||||
}));
|
to: 12,
|
||||||
|
})
|
||||||
|
);
|
||||||
assert_eq!(divs.diversion(v2), None);
|
assert_eq!(divs.diversion(v2), None);
|
||||||
|
|
||||||
divs.regmove(v1, 12, 11);
|
divs.regmove(v1, 12, 11);
|
||||||
|
|||||||
@@ -74,14 +74,13 @@ impl LiveValueVec {
|
|||||||
|
|
||||||
/// Add a new live value to `values`. Copy some properties from `lr`.
|
/// Add a new live value to `values`. Copy some properties from `lr`.
|
||||||
fn push(&mut self, value: Value, endpoint: Inst, lr: &LiveRange) {
|
fn push(&mut self, value: Value, endpoint: Inst, lr: &LiveRange) {
|
||||||
self.values
|
self.values.push(LiveValue {
|
||||||
.push(LiveValue {
|
value,
|
||||||
value,
|
endpoint,
|
||||||
endpoint,
|
affinity: lr.affinity,
|
||||||
affinity: lr.affinity,
|
is_local: lr.is_local(),
|
||||||
is_local: lr.is_local(),
|
is_dead: lr.is_dead(),
|
||||||
is_dead: lr.is_dead(),
|
});
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Remove all elements.
|
/// Remove all elements.
|
||||||
@@ -157,13 +156,14 @@ impl LiveValueTracker {
|
|||||||
/// from the immediate dominator. The second slice is the set of `ebb` arguments that are live.
|
/// from the immediate dominator. The second slice is the set of `ebb` arguments that are live.
|
||||||
///
|
///
|
||||||
/// Dead arguments with no uses are included in `args`. Call `drop_dead_args()` to remove them.
|
/// Dead arguments with no uses are included in `args`. Call `drop_dead_args()` to remove them.
|
||||||
pub fn ebb_top(&mut self,
|
pub fn ebb_top(
|
||||||
ebb: Ebb,
|
&mut self,
|
||||||
dfg: &DataFlowGraph,
|
ebb: Ebb,
|
||||||
liveness: &Liveness,
|
dfg: &DataFlowGraph,
|
||||||
layout: &Layout,
|
liveness: &Liveness,
|
||||||
domtree: &DominatorTree)
|
layout: &Layout,
|
||||||
-> (&[LiveValue], &[LiveValue]) {
|
domtree: &DominatorTree,
|
||||||
|
) -> (&[LiveValue], &[LiveValue]) {
|
||||||
// Start over, compute the set of live values at the top of the EBB from two sources:
|
// Start over, compute the set of live values at the top of the EBB from two sources:
|
||||||
//
|
//
|
||||||
// 1. Values that were live before `ebb`'s immediate dominator, filtered for those that are
|
// 1. Values that were live before `ebb`'s immediate dominator, filtered for those that are
|
||||||
@@ -179,14 +179,14 @@ impl LiveValueTracker {
|
|||||||
// If the immediate dominator exits, we must have a stored list for it. This is a
|
// If the immediate dominator exits, we must have a stored list for it. This is a
|
||||||
// requirement to the order EBBs are visited: All dominators must have been processed
|
// requirement to the order EBBs are visited: All dominators must have been processed
|
||||||
// before the current EBB.
|
// before the current EBB.
|
||||||
let idom_live_list = self.idom_sets
|
let idom_live_list = self.idom_sets.get(&idom).expect(
|
||||||
.get(&idom)
|
"No stored live set for dominator",
|
||||||
.expect("No stored live set for dominator");
|
);
|
||||||
// Get just the values that are live-in to `ebb`.
|
// Get just the values that are live-in to `ebb`.
|
||||||
for &value in idom_live_list.as_slice(&self.idom_pool) {
|
for &value in idom_live_list.as_slice(&self.idom_pool) {
|
||||||
let lr = liveness
|
let lr = liveness.get(value).expect(
|
||||||
.get(value)
|
"Immediate dominator value has no live range",
|
||||||
.expect("Immediate dominator value has no live range");
|
);
|
||||||
|
|
||||||
// Check if this value is live-in here.
|
// Check if this value is live-in here.
|
||||||
if let Some(endpoint) = lr.livein_local_end(ebb, layout) {
|
if let Some(endpoint) = lr.livein_local_end(ebb, layout) {
|
||||||
@@ -198,9 +198,9 @@ impl LiveValueTracker {
|
|||||||
// Now add all the live arguments to `ebb`.
|
// Now add all the live arguments to `ebb`.
|
||||||
let first_arg = self.live.values.len();
|
let first_arg = self.live.values.len();
|
||||||
for &value in dfg.ebb_args(ebb) {
|
for &value in dfg.ebb_args(ebb) {
|
||||||
let lr = liveness
|
let lr = liveness.get(value).expect(
|
||||||
.get(value)
|
"EBB argument value has no live range",
|
||||||
.expect("EBB argument value has no live range");
|
);
|
||||||
assert_eq!(lr.def(), ebb.into());
|
assert_eq!(lr.def(), ebb.into());
|
||||||
match lr.def_local_end().into() {
|
match lr.def_local_end().into() {
|
||||||
ExpandedProgramPoint::Inst(endpoint) => {
|
ExpandedProgramPoint::Inst(endpoint) => {
|
||||||
@@ -209,13 +209,18 @@ impl LiveValueTracker {
|
|||||||
ExpandedProgramPoint::Ebb(local_ebb) => {
|
ExpandedProgramPoint::Ebb(local_ebb) => {
|
||||||
// This is a dead EBB argument which is not even live into the first
|
// This is a dead EBB argument which is not even live into the first
|
||||||
// instruction in the EBB.
|
// instruction in the EBB.
|
||||||
assert_eq!(local_ebb,
|
assert_eq!(
|
||||||
ebb,
|
local_ebb,
|
||||||
"EBB argument live range ends at wrong EBB header");
|
ebb,
|
||||||
|
"EBB argument live range ends at wrong EBB header"
|
||||||
|
);
|
||||||
// Give this value a fake endpoint that is the first instruction in the EBB.
|
// Give this value a fake endpoint that is the first instruction in the EBB.
|
||||||
// We expect it to be removed by calling `drop_dead_args()`.
|
// We expect it to be removed by calling `drop_dead_args()`.
|
||||||
self.live
|
self.live.push(
|
||||||
.push(value, layout.first_inst(ebb).expect("Empty EBB"), lr);
|
value,
|
||||||
|
layout.first_inst(ebb).expect("Empty EBB"),
|
||||||
|
lr,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -241,11 +246,12 @@ impl LiveValueTracker {
|
|||||||
///
|
///
|
||||||
/// The `drop_dead()` method must be called next to actually remove the dead values from the
|
/// The `drop_dead()` method must be called next to actually remove the dead values from the
|
||||||
/// tracked set after the two returned slices are no longer needed.
|
/// tracked set after the two returned slices are no longer needed.
|
||||||
pub fn process_inst(&mut self,
|
pub fn process_inst(
|
||||||
inst: Inst,
|
&mut self,
|
||||||
dfg: &DataFlowGraph,
|
inst: Inst,
|
||||||
liveness: &Liveness)
|
dfg: &DataFlowGraph,
|
||||||
-> (&[LiveValue], &[LiveValue], &[LiveValue]) {
|
liveness: &Liveness,
|
||||||
|
) -> (&[LiveValue], &[LiveValue], &[LiveValue]) {
|
||||||
// Save a copy of the live values before any branches or jumps that could be somebody's
|
// Save a copy of the live values before any branches or jumps that could be somebody's
|
||||||
// immediate dominator.
|
// immediate dominator.
|
||||||
match dfg[inst].analyze_branch(&dfg.value_lists) {
|
match dfg[inst].analyze_branch(&dfg.value_lists) {
|
||||||
@@ -272,9 +278,11 @@ impl LiveValueTracker {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
(&self.live.values[0..first_kill],
|
(
|
||||||
&self.live.values[first_kill..first_def],
|
&self.live.values[0..first_kill],
|
||||||
&self.live.values[first_def..])
|
&self.live.values[first_kill..first_def],
|
||||||
|
&self.live.values[first_def..],
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Prepare to move past a ghost instruction.
|
/// Prepare to move past a ghost instruction.
|
||||||
@@ -310,7 +318,8 @@ impl LiveValueTracker {
|
|||||||
/// Any values where `f` returns true are spilled and will be treated as if their affinity was
|
/// Any values where `f` returns true are spilled and will be treated as if their affinity was
|
||||||
/// `Stack`.
|
/// `Stack`.
|
||||||
pub fn process_spills<F>(&mut self, mut f: F)
|
pub fn process_spills<F>(&mut self, mut f: F)
|
||||||
where F: FnMut(Value) -> bool
|
where
|
||||||
|
F: FnMut(Value) -> bool,
|
||||||
{
|
{
|
||||||
for lv in &mut self.live.values {
|
for lv in &mut self.live.values {
|
||||||
if f(lv.value) {
|
if f(lv.value) {
|
||||||
@@ -324,12 +333,10 @@ impl LiveValueTracker {
|
|||||||
let values = self.live.values.iter().map(|lv| lv.value);
|
let values = self.live.values.iter().map(|lv| lv.value);
|
||||||
let pool = &mut self.idom_pool;
|
let pool = &mut self.idom_pool;
|
||||||
// If there already is a set saved for `idom`, just keep it.
|
// If there already is a set saved for `idom`, just keep it.
|
||||||
self.idom_sets
|
self.idom_sets.entry(idom).or_insert_with(|| {
|
||||||
.entry(idom)
|
let mut list = ValueList::default();
|
||||||
.or_insert_with(|| {
|
list.extend(values, pool);
|
||||||
let mut list = ValueList::default();
|
list
|
||||||
list.extend(values, pool);
|
});
|
||||||
list
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -190,12 +190,13 @@ type LiveRangeSet = SparseMap<Value, LiveRange>;
|
|||||||
|
|
||||||
/// Get a mutable reference to the live range for `value`.
|
/// Get a mutable reference to the live range for `value`.
|
||||||
/// Create it if necessary.
|
/// Create it if necessary.
|
||||||
fn get_or_create<'a>(lrset: &'a mut LiveRangeSet,
|
fn get_or_create<'a>(
|
||||||
value: Value,
|
lrset: &'a mut LiveRangeSet,
|
||||||
isa: &TargetIsa,
|
value: Value,
|
||||||
func: &Function,
|
isa: &TargetIsa,
|
||||||
enc_info: &EncInfo)
|
func: &Function,
|
||||||
-> &'a mut LiveRange {
|
enc_info: &EncInfo,
|
||||||
|
) -> &'a mut LiveRange {
|
||||||
// It would be better to use `get_mut()` here, but that leads to borrow checker fighting
|
// It would be better to use `get_mut()` here, but that leads to borrow checker fighting
|
||||||
// which can probably only be resolved by non-lexical lifetimes.
|
// which can probably only be resolved by non-lexical lifetimes.
|
||||||
// https://github.com/rust-lang/rfcs/issues/811
|
// https://github.com/rust-lang/rfcs/issues/811
|
||||||
@@ -233,12 +234,14 @@ fn get_or_create<'a>(lrset: &'a mut LiveRangeSet,
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Extend the live range for `value` so it reaches `to` which must live in `ebb`.
|
/// Extend the live range for `value` so it reaches `to` which must live in `ebb`.
|
||||||
fn extend_to_use(lr: &mut LiveRange,
|
fn extend_to_use(
|
||||||
ebb: Ebb,
|
lr: &mut LiveRange,
|
||||||
to: Inst,
|
ebb: Ebb,
|
||||||
worklist: &mut Vec<Ebb>,
|
to: Inst,
|
||||||
func: &Function,
|
worklist: &mut Vec<Ebb>,
|
||||||
cfg: &ControlFlowGraph) {
|
func: &Function,
|
||||||
|
cfg: &ControlFlowGraph,
|
||||||
|
) {
|
||||||
// This is our scratch working space, and we'll leave it empty when we return.
|
// This is our scratch working space, and we'll leave it empty when we return.
|
||||||
assert!(worklist.is_empty());
|
assert!(worklist.is_empty());
|
||||||
|
|
||||||
@@ -309,10 +312,12 @@ impl Liveness {
|
|||||||
///
|
///
|
||||||
/// This asserts that `value` does not have an existing live range.
|
/// This asserts that `value` does not have an existing live range.
|
||||||
pub fn create_dead<PP>(&mut self, value: Value, def: PP, affinity: Affinity)
|
pub fn create_dead<PP>(&mut self, value: Value, def: PP, affinity: Affinity)
|
||||||
where PP: Into<ProgramPoint>
|
where
|
||||||
|
PP: Into<ProgramPoint>,
|
||||||
{
|
{
|
||||||
let old = self.ranges
|
let old = self.ranges.insert(
|
||||||
.insert(LiveRange::new(value, def.into(), affinity));
|
LiveRange::new(value, def.into(), affinity),
|
||||||
|
);
|
||||||
assert!(old.is_none(), "{} already has a live range", value);
|
assert!(old.is_none(), "{} already has a live range", value);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -320,7 +325,8 @@ impl Liveness {
|
|||||||
///
|
///
|
||||||
/// The old and new def points must be in the same EBB, and before the end of the live range.
|
/// The old and new def points must be in the same EBB, and before the end of the live range.
|
||||||
pub fn move_def_locally<PP>(&mut self, value: Value, def: PP)
|
pub fn move_def_locally<PP>(&mut self, value: Value, def: PP)
|
||||||
where PP: Into<ProgramPoint>
|
where
|
||||||
|
PP: Into<ProgramPoint>,
|
||||||
{
|
{
|
||||||
let mut lr = self.ranges.get_mut(value).expect("Value has no live range");
|
let mut lr = self.ranges.get_mut(value).expect("Value has no live range");
|
||||||
lr.move_def_locally(def.into());
|
lr.move_def_locally(def.into());
|
||||||
@@ -331,12 +337,13 @@ impl Liveness {
|
|||||||
/// It is assumed the `value` is already live before `user` in `ebb`.
|
/// It is assumed the `value` is already live before `user` in `ebb`.
|
||||||
///
|
///
|
||||||
/// Returns a mutable reference to the value's affinity in case that also needs to be updated.
|
/// Returns a mutable reference to the value's affinity in case that also needs to be updated.
|
||||||
pub fn extend_locally(&mut self,
|
pub fn extend_locally(
|
||||||
value: Value,
|
&mut self,
|
||||||
ebb: Ebb,
|
value: Value,
|
||||||
user: Inst,
|
ebb: Ebb,
|
||||||
layout: &Layout)
|
user: Inst,
|
||||||
-> &mut Affinity {
|
layout: &Layout,
|
||||||
|
) -> &mut Affinity {
|
||||||
debug_assert_eq!(Some(ebb), layout.inst_ebb(user));
|
debug_assert_eq!(Some(ebb), layout.inst_ebb(user));
|
||||||
let mut lr = self.ranges.get_mut(value).expect("Value has no live range");
|
let mut lr = self.ranges.get_mut(value).expect("Value has no live range");
|
||||||
let livein = lr.extend_in_ebb(ebb, user, layout);
|
let livein = lr.extend_in_ebb(ebb, user, layout);
|
||||||
@@ -401,7 +408,8 @@ impl Liveness {
|
|||||||
if let Some(constraint) = operand_constraints.next() {
|
if let Some(constraint) = operand_constraints.next() {
|
||||||
lr.affinity.merge(constraint, ®_info);
|
lr.affinity.merge(constraint, ®_info);
|
||||||
} else if lr.affinity.is_none() && encoding.is_legal() &&
|
} else if lr.affinity.is_none() && encoding.is_legal() &&
|
||||||
!func.dfg[inst].opcode().is_branch() {
|
!func.dfg[inst].opcode().is_branch()
|
||||||
|
{
|
||||||
// This is a real encoded instruction using a value that doesn't yet have a
|
// This is a real encoded instruction using a value that doesn't yet have a
|
||||||
// concrete affinity. Most likely a call argument or a return value. Give
|
// concrete affinity. Most likely a call argument or a return value. Give
|
||||||
// the value a register affinity matching the ABI type.
|
// the value a register affinity matching the ABI type.
|
||||||
|
|||||||
@@ -224,13 +224,13 @@ impl LiveRange {
|
|||||||
self.liveins
|
self.liveins
|
||||||
.binary_search_by(|intv| order.cmp(intv.begin, ebb))
|
.binary_search_by(|intv| order.cmp(intv.begin, ebb))
|
||||||
.or_else(|n| {
|
.or_else(|n| {
|
||||||
// The interval at `n-1` may cover `ebb`.
|
// The interval at `n-1` may cover `ebb`.
|
||||||
if n > 0 && order.cmp(self.liveins[n - 1].end, ebb) == Ordering::Greater {
|
if n > 0 && order.cmp(self.liveins[n - 1].end, ebb) == Ordering::Greater {
|
||||||
Ok(n - 1)
|
Ok(n - 1)
|
||||||
} else {
|
} else {
|
||||||
Err(n)
|
Err(n)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Extend the local interval for `ebb` so it reaches `to` which must belong to `ebb`.
|
/// Extend the local interval for `ebb` so it reaches `to` which must belong to `ebb`.
|
||||||
@@ -250,11 +250,14 @@ impl LiveRange {
|
|||||||
// We're assuming here that `to` never precedes `def_begin` in the same EBB, but we can't
|
// We're assuming here that `to` never precedes `def_begin` in the same EBB, but we can't
|
||||||
// check it without a method for getting `to`'s EBB.
|
// check it without a method for getting `to`'s EBB.
|
||||||
if order.cmp(ebb, self.def_end) != Ordering::Greater &&
|
if order.cmp(ebb, self.def_end) != Ordering::Greater &&
|
||||||
order.cmp(to, self.def_begin) != Ordering::Less {
|
order.cmp(to, self.def_begin) != Ordering::Less
|
||||||
|
{
|
||||||
let to_pp = to.into();
|
let to_pp = to.into();
|
||||||
assert_ne!(to_pp,
|
assert_ne!(
|
||||||
self.def_begin,
|
to_pp,
|
||||||
"Can't use value in the defining instruction.");
|
self.def_begin,
|
||||||
|
"Can't use value in the defining instruction."
|
||||||
|
);
|
||||||
if order.cmp(to, self.def_end) == Ordering::Greater {
|
if order.cmp(to, self.def_end) == Ordering::Greater {
|
||||||
self.def_end = to_pp;
|
self.def_end = to_pp;
|
||||||
}
|
}
|
||||||
@@ -288,8 +291,10 @@ impl LiveRange {
|
|||||||
let prev = n.checked_sub(1).and_then(|i| self.liveins.get(i));
|
let prev = n.checked_sub(1).and_then(|i| self.liveins.get(i));
|
||||||
let next = self.liveins.get(n);
|
let next = self.liveins.get(n);
|
||||||
|
|
||||||
(prev.map_or(false, |prev| order.is_ebb_gap(prev.end, ebb)),
|
(
|
||||||
next.map_or(false, |next| order.is_ebb_gap(to, next.begin)))
|
prev.map_or(false, |prev| order.is_ebb_gap(prev.end, ebb)),
|
||||||
|
next.map_or(false, |next| order.is_ebb_gap(to, next.begin)),
|
||||||
|
)
|
||||||
};
|
};
|
||||||
|
|
||||||
match (coalesce_prev, coalesce_next) {
|
match (coalesce_prev, coalesce_next) {
|
||||||
@@ -309,12 +314,13 @@ impl LiveRange {
|
|||||||
}
|
}
|
||||||
// Cannot coalesce; insert new interval
|
// Cannot coalesce; insert new interval
|
||||||
(false, false) => {
|
(false, false) => {
|
||||||
self.liveins
|
self.liveins.insert(
|
||||||
.insert(n,
|
n,
|
||||||
Interval {
|
Interval {
|
||||||
begin: ebb,
|
begin: ebb,
|
||||||
end: to,
|
end: to,
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -372,9 +378,9 @@ impl LiveRange {
|
|||||||
/// answer, but it is also possible that an even later program point is returned. So don't
|
/// answer, but it is also possible that an even later program point is returned. So don't
|
||||||
/// depend on the returned `Inst` to belong to `ebb`.
|
/// depend on the returned `Inst` to belong to `ebb`.
|
||||||
pub fn livein_local_end<PO: ProgramOrder>(&self, ebb: Ebb, order: &PO) -> Option<Inst> {
|
pub fn livein_local_end<PO: ProgramOrder>(&self, ebb: Ebb, order: &PO) -> Option<Inst> {
|
||||||
self.find_ebb_interval(ebb, order)
|
self.find_ebb_interval(ebb, order).ok().map(|n| {
|
||||||
.ok()
|
self.liveins[n].end
|
||||||
.map(|n| self.liveins[n].end)
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get all the live-in intervals.
|
/// Get all the live-in intervals.
|
||||||
@@ -384,11 +390,13 @@ impl LiveRange {
|
|||||||
|
|
||||||
/// Check if this live range overlaps a definition in `ebb`.
|
/// Check if this live range overlaps a definition in `ebb`.
|
||||||
pub fn overlaps_def<PO>(&self, def: ExpandedProgramPoint, ebb: Ebb, order: &PO) -> bool
|
pub fn overlaps_def<PO>(&self, def: ExpandedProgramPoint, ebb: Ebb, order: &PO) -> bool
|
||||||
where PO: ProgramOrder
|
where
|
||||||
|
PO: ProgramOrder,
|
||||||
{
|
{
|
||||||
// Check for an overlap with the local range.
|
// Check for an overlap with the local range.
|
||||||
if order.cmp(def, self.def_begin) != Ordering::Less &&
|
if order.cmp(def, self.def_begin) != Ordering::Less &&
|
||||||
order.cmp(def, self.def_end) == Ordering::Less {
|
order.cmp(def, self.def_end) == Ordering::Less
|
||||||
|
{
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -401,11 +409,13 @@ impl LiveRange {
|
|||||||
|
|
||||||
/// Check if this live range reaches a use at `user` in `ebb`.
|
/// Check if this live range reaches a use at `user` in `ebb`.
|
||||||
pub fn reaches_use<PO>(&self, user: Inst, ebb: Ebb, order: &PO) -> bool
|
pub fn reaches_use<PO>(&self, user: Inst, ebb: Ebb, order: &PO) -> bool
|
||||||
where PO: ProgramOrder
|
where
|
||||||
|
PO: ProgramOrder,
|
||||||
{
|
{
|
||||||
// Check for an overlap with the local range.
|
// Check for an overlap with the local range.
|
||||||
if order.cmp(user, self.def_begin) == Ordering::Greater &&
|
if order.cmp(user, self.def_begin) == Ordering::Greater &&
|
||||||
order.cmp(user, self.def_end) != Ordering::Greater {
|
order.cmp(user, self.def_end) != Ordering::Greater
|
||||||
|
{
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -418,7 +428,8 @@ impl LiveRange {
|
|||||||
|
|
||||||
/// Check if this live range is killed at `user` in `ebb`.
|
/// Check if this live range is killed at `user` in `ebb`.
|
||||||
pub fn killed_at<PO>(&self, user: Inst, ebb: Ebb, order: &PO) -> bool
|
pub fn killed_at<PO>(&self, user: Inst, ebb: Ebb, order: &PO) -> bool
|
||||||
where PO: ProgramOrder
|
where
|
||||||
|
PO: ProgramOrder,
|
||||||
{
|
{
|
||||||
self.def_local_end() == user.into() || self.livein_local_end(ebb, order) == Some(user)
|
self.def_local_end() == user.into() || self.livein_local_end(ebb, order) == Some(user)
|
||||||
}
|
}
|
||||||
@@ -447,8 +458,9 @@ mod tests {
|
|||||||
|
|
||||||
impl ProgramOrder for ProgOrder {
|
impl ProgramOrder for ProgOrder {
|
||||||
fn cmp<A, B>(&self, a: A, b: B) -> Ordering
|
fn cmp<A, B>(&self, a: A, b: B) -> Ordering
|
||||||
where A: Into<ExpandedProgramPoint>,
|
where
|
||||||
B: Into<ExpandedProgramPoint>
|
A: Into<ExpandedProgramPoint>,
|
||||||
|
B: Into<ExpandedProgramPoint>,
|
||||||
{
|
{
|
||||||
fn idx(pp: ExpandedProgramPoint) -> usize {
|
fn idx(pp: ExpandedProgramPoint) -> usize {
|
||||||
match pp {
|
match pp {
|
||||||
@@ -505,9 +517,11 @@ mod tests {
|
|||||||
assert_eq!(self.cmp(e, li.begin), Ordering::Less);
|
assert_eq!(self.cmp(e, li.begin), Ordering::Less);
|
||||||
}
|
}
|
||||||
|
|
||||||
assert!(self.cmp(lr.def_end, li.begin) == Ordering::Less ||
|
assert!(
|
||||||
|
self.cmp(lr.def_end, li.begin) == Ordering::Less ||
|
||||||
self.cmp(lr.def_begin, li.end) == Ordering::Greater,
|
self.cmp(lr.def_begin, li.end) == Ordering::Greater,
|
||||||
"Interval can't overlap the def EBB");
|
"Interval can't overlap the def EBB"
|
||||||
|
);
|
||||||
|
|
||||||
// Save for next round.
|
// Save for next round.
|
||||||
prev_end = Some(li.end);
|
prev_end = Some(li.end);
|
||||||
|
|||||||
@@ -103,10 +103,10 @@ impl Pressure {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Compute per-class limits from `usable`.
|
// Compute per-class limits from `usable`.
|
||||||
for (toprc, rc) in p.toprc
|
for (toprc, rc) in p.toprc.iter_mut().take_while(|t| t.num_toprcs > 0).zip(
|
||||||
.iter_mut()
|
reginfo.classes,
|
||||||
.take_while(|t| t.num_toprcs > 0)
|
)
|
||||||
.zip(reginfo.classes) {
|
{
|
||||||
toprc.limit = usable.iter(rc).len() as u32;
|
toprc.limit = usable.iter(rc).len() as u32;
|
||||||
toprc.width = rc.width;
|
toprc.width = rc.width;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -54,13 +54,15 @@ impl Reload {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Run the reload algorithm over `func`.
|
/// Run the reload algorithm over `func`.
|
||||||
pub fn run(&mut self,
|
pub fn run(
|
||||||
isa: &TargetIsa,
|
&mut self,
|
||||||
func: &mut Function,
|
isa: &TargetIsa,
|
||||||
domtree: &DominatorTree,
|
func: &mut Function,
|
||||||
liveness: &mut Liveness,
|
domtree: &DominatorTree,
|
||||||
topo: &mut TopoOrder,
|
liveness: &mut Liveness,
|
||||||
tracker: &mut LiveValueTracker) {
|
topo: &mut TopoOrder,
|
||||||
|
tracker: &mut LiveValueTracker,
|
||||||
|
) {
|
||||||
dbg!("Reload for:\n{}", func.display(isa));
|
dbg!("Reload for:\n{}", func.display(isa));
|
||||||
let mut ctx = Context {
|
let mut ctx = Context {
|
||||||
cur: EncCursor::new(func, isa),
|
cur: EncCursor::new(func, isa),
|
||||||
@@ -125,11 +127,13 @@ impl<'a> Context<'a> {
|
|||||||
|
|
||||||
/// Process the EBB parameters. Move to the next instruction in the EBB to be processed
|
/// Process the EBB parameters. Move to the next instruction in the EBB to be processed
|
||||||
fn visit_ebb_header(&mut self, ebb: Ebb, tracker: &mut LiveValueTracker) {
|
fn visit_ebb_header(&mut self, ebb: Ebb, tracker: &mut LiveValueTracker) {
|
||||||
let (liveins, args) = tracker.ebb_top(ebb,
|
let (liveins, args) = tracker.ebb_top(
|
||||||
&self.cur.func.dfg,
|
ebb,
|
||||||
self.liveness,
|
&self.cur.func.dfg,
|
||||||
&self.cur.func.layout,
|
self.liveness,
|
||||||
self.domtree);
|
&self.cur.func.layout,
|
||||||
|
self.domtree,
|
||||||
|
);
|
||||||
|
|
||||||
if self.cur.func.layout.entry_block() == Some(ebb) {
|
if self.cur.func.layout.entry_block() == Some(ebb) {
|
||||||
assert_eq!(liveins.len(), 0);
|
assert_eq!(liveins.len(), 0);
|
||||||
@@ -172,15 +176,17 @@ impl<'a> Context<'a> {
|
|||||||
|
|
||||||
/// Process the instruction pointed to by `pos`, and advance the cursor to the next instruction
|
/// Process the instruction pointed to by `pos`, and advance the cursor to the next instruction
|
||||||
/// that needs processing.
|
/// that needs processing.
|
||||||
fn visit_inst(&mut self,
|
fn visit_inst(
|
||||||
ebb: Ebb,
|
&mut self,
|
||||||
inst: Inst,
|
ebb: Ebb,
|
||||||
encoding: Encoding,
|
inst: Inst,
|
||||||
tracker: &mut LiveValueTracker) {
|
encoding: Encoding,
|
||||||
|
tracker: &mut LiveValueTracker,
|
||||||
|
) {
|
||||||
// Get the operand constraints for `inst` that we are trying to satisfy.
|
// Get the operand constraints for `inst` that we are trying to satisfy.
|
||||||
let constraints = self.encinfo
|
let constraints = self.encinfo.operand_constraints(encoding).expect(
|
||||||
.operand_constraints(encoding)
|
"Missing instruction encoding",
|
||||||
.expect("Missing instruction encoding");
|
);
|
||||||
|
|
||||||
// Identify reload candidates.
|
// Identify reload candidates.
|
||||||
assert!(self.candidates.is_empty());
|
assert!(self.candidates.is_empty());
|
||||||
@@ -195,17 +201,20 @@ impl<'a> Context<'a> {
|
|||||||
let reg = self.cur.ins().fill(cand.value);
|
let reg = self.cur.ins().fill(cand.value);
|
||||||
let fill = self.cur.built_inst();
|
let fill = self.cur.built_inst();
|
||||||
|
|
||||||
self.reloads
|
self.reloads.insert(ReloadedValue {
|
||||||
.insert(ReloadedValue {
|
stack: cand.value,
|
||||||
stack: cand.value,
|
reg: reg,
|
||||||
reg: reg,
|
});
|
||||||
});
|
|
||||||
|
|
||||||
// Create a live range for the new reload.
|
// Create a live range for the new reload.
|
||||||
let affinity = Affinity::Reg(cand.regclass.into());
|
let affinity = Affinity::Reg(cand.regclass.into());
|
||||||
self.liveness.create_dead(reg, fill, affinity);
|
self.liveness.create_dead(reg, fill, affinity);
|
||||||
self.liveness
|
self.liveness.extend_locally(
|
||||||
.extend_locally(reg, ebb, inst, &self.cur.func.layout);
|
reg,
|
||||||
|
ebb,
|
||||||
|
inst,
|
||||||
|
&self.cur.func.layout,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Rewrite arguments.
|
// Rewrite arguments.
|
||||||
@@ -218,8 +227,8 @@ impl<'a> Context<'a> {
|
|||||||
// TODO: Reuse reloads for future instructions.
|
// TODO: Reuse reloads for future instructions.
|
||||||
self.reloads.clear();
|
self.reloads.clear();
|
||||||
|
|
||||||
let (_throughs, _kills, defs) = tracker
|
let (_throughs, _kills, defs) =
|
||||||
.process_inst(inst, &self.cur.func.dfg, self.liveness);
|
tracker.process_inst(inst, &self.cur.func.dfg, self.liveness);
|
||||||
|
|
||||||
// Advance to the next instruction so we can insert any spills after the instruction.
|
// Advance to the next instruction so we can insert any spills after the instruction.
|
||||||
self.cur.next_inst();
|
self.cur.next_inst();
|
||||||
@@ -255,11 +264,10 @@ impl<'a> Context<'a> {
|
|||||||
for (op, &arg) in constraints.ins.iter().zip(args) {
|
for (op, &arg) in constraints.ins.iter().zip(args) {
|
||||||
if op.kind != ConstraintKind::Stack {
|
if op.kind != ConstraintKind::Stack {
|
||||||
if self.liveness[arg].affinity.is_stack() {
|
if self.liveness[arg].affinity.is_stack() {
|
||||||
self.candidates
|
self.candidates.push(ReloadCandidate {
|
||||||
.push(ReloadCandidate {
|
value: arg,
|
||||||
value: arg,
|
regclass: op.regclass,
|
||||||
regclass: op.regclass,
|
})
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -272,17 +280,21 @@ impl<'a> Context<'a> {
|
|||||||
|
|
||||||
// Handle ABI arguments.
|
// Handle ABI arguments.
|
||||||
if let Some(sig) = self.cur.func.dfg.call_signature(inst) {
|
if let Some(sig) = self.cur.func.dfg.call_signature(inst) {
|
||||||
handle_abi_args(self.candidates,
|
handle_abi_args(
|
||||||
&self.cur.func.dfg.signatures[sig].argument_types,
|
self.candidates,
|
||||||
var_args,
|
&self.cur.func.dfg.signatures[sig].argument_types,
|
||||||
self.cur.isa,
|
var_args,
|
||||||
self.liveness);
|
self.cur.isa,
|
||||||
|
self.liveness,
|
||||||
|
);
|
||||||
} else if self.cur.func.dfg[inst].opcode().is_return() {
|
} else if self.cur.func.dfg[inst].opcode().is_return() {
|
||||||
handle_abi_args(self.candidates,
|
handle_abi_args(
|
||||||
&self.cur.func.signature.return_types,
|
self.candidates,
|
||||||
var_args,
|
&self.cur.func.signature.return_types,
|
||||||
self.cur.isa,
|
var_args,
|
||||||
self.liveness);
|
self.cur.isa,
|
||||||
|
self.liveness,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -297,27 +309,33 @@ impl<'a> Context<'a> {
|
|||||||
|
|
||||||
// Update live ranges.
|
// Update live ranges.
|
||||||
self.liveness.move_def_locally(stack, inst);
|
self.liveness.move_def_locally(stack, inst);
|
||||||
self.liveness
|
self.liveness.extend_locally(
|
||||||
.extend_locally(reg, ebb, inst, &self.cur.func.layout);
|
reg,
|
||||||
|
ebb,
|
||||||
|
inst,
|
||||||
|
&self.cur.func.layout,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Find reload candidates in the instruction's ABI variable arguments. This handles both
|
/// Find reload candidates in the instruction's ABI variable arguments. This handles both
|
||||||
/// return values and call arguments.
|
/// return values and call arguments.
|
||||||
fn handle_abi_args(candidates: &mut Vec<ReloadCandidate>,
|
fn handle_abi_args(
|
||||||
abi_types: &[ArgumentType],
|
candidates: &mut Vec<ReloadCandidate>,
|
||||||
var_args: &[Value],
|
abi_types: &[ArgumentType],
|
||||||
isa: &TargetIsa,
|
var_args: &[Value],
|
||||||
liveness: &Liveness) {
|
isa: &TargetIsa,
|
||||||
|
liveness: &Liveness,
|
||||||
|
) {
|
||||||
assert_eq!(abi_types.len(), var_args.len());
|
assert_eq!(abi_types.len(), var_args.len());
|
||||||
for (abi, &arg) in abi_types.iter().zip(var_args) {
|
for (abi, &arg) in abi_types.iter().zip(var_args) {
|
||||||
if abi.location.is_reg() {
|
if abi.location.is_reg() {
|
||||||
let lv = liveness.get(arg).expect("Missing live range for ABI arg");
|
let lv = liveness.get(arg).expect("Missing live range for ABI arg");
|
||||||
if lv.affinity.is_stack() {
|
if lv.affinity.is_stack() {
|
||||||
candidates.push(ReloadCandidate {
|
candidates.push(ReloadCandidate {
|
||||||
value: arg,
|
value: arg,
|
||||||
regclass: isa.regclass_for_abi_type(abi.value_type),
|
regclass: isa.regclass_for_abi_type(abi.value_type),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -231,12 +231,14 @@ impl SparseMapValue<Value> for Assignment {
|
|||||||
|
|
||||||
impl fmt::Display for Assignment {
|
impl fmt::Display for Assignment {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
write!(f,
|
write!(
|
||||||
"{}:{}(%{} -> %{})",
|
f,
|
||||||
self.value,
|
"{}:{}(%{} -> %{})",
|
||||||
self.rc,
|
self.value,
|
||||||
self.from,
|
self.rc,
|
||||||
self.to)
|
self.from,
|
||||||
|
self.to
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -244,7 +246,7 @@ impl fmt::Display for Assignment {
|
|||||||
impl PartialEq for Assignment {
|
impl PartialEq for Assignment {
|
||||||
fn eq(&self, other: &Assignment) -> bool {
|
fn eq(&self, other: &Assignment) -> bool {
|
||||||
self.value == other.value && self.from == other.from && self.to == other.to &&
|
self.value == other.value && self.from == other.from && self.to == other.to &&
|
||||||
self.rc.index == other.rc.index
|
self.rc.index == other.rc.index
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -363,22 +365,23 @@ impl Solver {
|
|||||||
dbg!("-> converting variable {} to a fixed constraint", v);
|
dbg!("-> converting variable {} to a fixed constraint", v);
|
||||||
// The spiller is responsible for ensuring that all constraints on the uses of a
|
// The spiller is responsible for ensuring that all constraints on the uses of a
|
||||||
// value are compatible.
|
// value are compatible.
|
||||||
assert!(v.constraint.contains(to),
|
assert!(
|
||||||
"Incompatible constraints for {}",
|
v.constraint.contains(to),
|
||||||
value);
|
"Incompatible constraints for {}",
|
||||||
|
value
|
||||||
|
);
|
||||||
} else {
|
} else {
|
||||||
panic!("Invalid from register for fixed {} constraint", value);
|
panic!("Invalid from register for fixed {} constraint", value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.regs_in.free(rc, from);
|
self.regs_in.free(rc, from);
|
||||||
self.regs_out.take(rc, to);
|
self.regs_out.take(rc, to);
|
||||||
self.assignments
|
self.assignments.insert(Assignment {
|
||||||
.insert(Assignment {
|
value,
|
||||||
value,
|
rc,
|
||||||
rc,
|
from,
|
||||||
from,
|
to,
|
||||||
to,
|
});
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add a variable representing an input side value with an existing register assignment.
|
/// Add a variable representing an input side value with an existing register assignment.
|
||||||
@@ -388,18 +391,22 @@ impl Solver {
|
|||||||
///
|
///
|
||||||
/// It is assumed initially that the value is also live on the output side of the instruction.
|
/// It is assumed initially that the value is also live on the output side of the instruction.
|
||||||
/// This can be changed by calling to `add_kill()`.
|
/// This can be changed by calling to `add_kill()`.
|
||||||
pub fn add_var(&mut self,
|
pub fn add_var(
|
||||||
value: Value,
|
&mut self,
|
||||||
constraint: RegClass,
|
value: Value,
|
||||||
from: RegUnit,
|
constraint: RegClass,
|
||||||
reginfo: &RegInfo) {
|
from: RegUnit,
|
||||||
|
reginfo: &RegInfo,
|
||||||
|
) {
|
||||||
// Check for existing entries for this value.
|
// Check for existing entries for this value.
|
||||||
if self.regs_in.is_avail(constraint, from) {
|
if self.regs_in.is_avail(constraint, from) {
|
||||||
dbg!("add_var({}:{}, from={}/%{}) for existing entry",
|
dbg!(
|
||||||
value,
|
"add_var({}:{}, from={}/%{}) for existing entry",
|
||||||
constraint,
|
value,
|
||||||
reginfo.display_regunit(from),
|
constraint,
|
||||||
from);
|
reginfo.display_regunit(from),
|
||||||
|
from
|
||||||
|
);
|
||||||
|
|
||||||
// There could be an existing variable entry.
|
// There could be an existing variable entry.
|
||||||
if let Some(v) = self.vars.iter_mut().find(|v| v.value == value) {
|
if let Some(v) = self.vars.iter_mut().find(|v| v.value == value) {
|
||||||
@@ -419,9 +426,11 @@ impl Solver {
|
|||||||
// No variable, then it must be a fixed reassignment.
|
// No variable, then it must be a fixed reassignment.
|
||||||
if let Some(a) = self.assignments.get(value) {
|
if let Some(a) = self.assignments.get(value) {
|
||||||
dbg!("-> already fixed assignment {}", a);
|
dbg!("-> already fixed assignment {}", a);
|
||||||
assert!(constraint.contains(a.to),
|
assert!(
|
||||||
"Incompatible constraints for {}",
|
constraint.contains(a.to),
|
||||||
value);
|
"Incompatible constraints for {}",
|
||||||
|
value
|
||||||
|
);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -430,12 +439,14 @@ impl Solver {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let new_var = Variable::new_live(value, constraint, from);
|
let new_var = Variable::new_live(value, constraint, from);
|
||||||
dbg!("add_var({}:{}, from={}/%{}) new entry: {}",
|
dbg!(
|
||||||
value,
|
"add_var({}:{}, from={}/%{}) new entry: {}",
|
||||||
constraint,
|
value,
|
||||||
reginfo.display_regunit(from),
|
constraint,
|
||||||
from,
|
reginfo.display_regunit(from),
|
||||||
new_var);
|
from,
|
||||||
|
new_var
|
||||||
|
);
|
||||||
|
|
||||||
self.regs_in.free(constraint, from);
|
self.regs_in.free(constraint, from);
|
||||||
if self.inputs_done {
|
if self.inputs_done {
|
||||||
@@ -623,23 +634,20 @@ impl Solver {
|
|||||||
// Collect moves from the chosen solution for all non-define variables.
|
// Collect moves from the chosen solution for all non-define variables.
|
||||||
for v in &self.vars {
|
for v in &self.vars {
|
||||||
if let Some(from) = v.from {
|
if let Some(from) = v.from {
|
||||||
self.moves
|
self.moves.push(Assignment {
|
||||||
.push(Assignment {
|
value: v.value,
|
||||||
value: v.value,
|
from,
|
||||||
from,
|
to: v.solution,
|
||||||
to: v.solution,
|
rc: v.constraint,
|
||||||
rc: v.constraint,
|
});
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Convert all of the fixed register assignments into moves, but omit the ones that are
|
// Convert all of the fixed register assignments into moves, but omit the ones that are
|
||||||
// already in the right register.
|
// already in the right register.
|
||||||
self.moves
|
self.moves.extend(self.assignments.values().cloned().filter(
|
||||||
.extend(self.assignments
|
|v| v.from != v.to,
|
||||||
.values()
|
));
|
||||||
.cloned()
|
|
||||||
.filter(|v| v.from != v.to));
|
|
||||||
|
|
||||||
dbg!("collect_moves: {}", DisplayList(self.moves.as_slice()));
|
dbg!("collect_moves: {}", DisplayList(self.moves.as_slice()));
|
||||||
}
|
}
|
||||||
@@ -661,9 +669,10 @@ impl Solver {
|
|||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
while i < self.moves.len() {
|
while i < self.moves.len() {
|
||||||
// Find the first move that can be executed now.
|
// Find the first move that can be executed now.
|
||||||
if let Some(j) = self.moves[i..]
|
if let Some(j) = self.moves[i..].iter().position(
|
||||||
.iter()
|
|m| avail.is_avail(m.rc, m.to),
|
||||||
.position(|m| avail.is_avail(m.rc, m.to)) {
|
)
|
||||||
|
{
|
||||||
// This move can be executed now.
|
// This move can be executed now.
|
||||||
self.moves.swap(i, i + j);
|
self.moves.swap(i, i + j);
|
||||||
let m = &self.moves[i];
|
let m = &self.moves[i];
|
||||||
@@ -709,17 +718,16 @@ impl Solver {
|
|||||||
// Append a fixup move so we end up in the right place. This move will be scheduled
|
// Append a fixup move so we end up in the right place. This move will be scheduled
|
||||||
// later. That's ok because it is the single remaining move of `m.value` after the
|
// later. That's ok because it is the single remaining move of `m.value` after the
|
||||||
// next iteration.
|
// next iteration.
|
||||||
self.moves
|
self.moves.push(Assignment {
|
||||||
.push(Assignment {
|
value: m.value,
|
||||||
value: m.value,
|
rc: m.rc,
|
||||||
rc: m.rc,
|
from: reg,
|
||||||
from: reg,
|
to: m.to,
|
||||||
to: m.to,
|
});
|
||||||
});
|
// TODO: What if allocating an extra register is not enough to break a cycle? This
|
||||||
// TODO: What if allocating an extra register is not enough to break a cycle? This
|
// can happen when there are registers of different widths in a cycle. For ARM, we
|
||||||
// can happen when there are registers of different widths in a cycle. For ARM, we
|
// may have to move two S-registers out of the way before we can resolve a cycle
|
||||||
// may have to move two S-registers out of the way before we can resolve a cycle
|
// involving a D-register.
|
||||||
// involving a D-register.
|
|
||||||
} else {
|
} else {
|
||||||
panic!("Not enough registers in {} to schedule moves", m.rc);
|
panic!("Not enough registers in {} to schedule moves", m.rc);
|
||||||
}
|
}
|
||||||
@@ -738,9 +746,11 @@ impl Solver {
|
|||||||
impl fmt::Display for Solver {
|
impl fmt::Display for Solver {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
writeln!(f, "Solver {{ inputs_done: {},", self.inputs_done)?;
|
writeln!(f, "Solver {{ inputs_done: {},", self.inputs_done)?;
|
||||||
writeln!(f,
|
writeln!(
|
||||||
" assignments: {}",
|
f,
|
||||||
DisplayList(self.assignments.as_slice()))?;
|
" assignments: {}",
|
||||||
|
DisplayList(self.assignments.as_slice())
|
||||||
|
)?;
|
||||||
writeln!(f, " vars: {}", DisplayList(self.vars.as_slice()))?;
|
writeln!(f, " vars: {}", DisplayList(self.vars.as_slice()))?;
|
||||||
writeln!(f, " moves: {}", DisplayList(self.moves.as_slice()))?;
|
writeln!(f, " moves: {}", DisplayList(self.moves.as_slice()))?;
|
||||||
writeln!(f, "}}")
|
writeln!(f, "}}")
|
||||||
@@ -817,8 +827,10 @@ mod tests {
|
|||||||
solver.inputs_done();
|
solver.inputs_done();
|
||||||
assert!(solver.quick_solve().is_ok());
|
assert!(solver.quick_solve().is_ok());
|
||||||
assert_eq!(solver.schedule_moves(®s), 0);
|
assert_eq!(solver.schedule_moves(®s), 0);
|
||||||
assert_eq!(solver.moves(),
|
assert_eq!(
|
||||||
&[mov(v11, gpr, r1, r2), mov(v10, gpr, r0, r1)]);
|
solver.moves(),
|
||||||
|
&[mov(v11, gpr, r1, r2), mov(v10, gpr, r0, r1)]
|
||||||
|
);
|
||||||
|
|
||||||
// Swap r0 and r1 in three moves using r2 as a scratch.
|
// Swap r0 and r1 in three moves using r2 as a scratch.
|
||||||
solver.reset(®s);
|
solver.reset(®s);
|
||||||
@@ -827,10 +839,14 @@ mod tests {
|
|||||||
solver.inputs_done();
|
solver.inputs_done();
|
||||||
assert!(solver.quick_solve().is_ok());
|
assert!(solver.quick_solve().is_ok());
|
||||||
assert_eq!(solver.schedule_moves(®s), 0);
|
assert_eq!(solver.schedule_moves(®s), 0);
|
||||||
assert_eq!(solver.moves(),
|
assert_eq!(
|
||||||
&[mov(v10, gpr, r0, r2),
|
solver.moves(),
|
||||||
mov(v11, gpr, r1, r0),
|
&[
|
||||||
mov(v10, gpr, r2, r1)]);
|
mov(v10, gpr, r0, r2),
|
||||||
|
mov(v11, gpr, r1, r0),
|
||||||
|
mov(v10, gpr, r2, r1),
|
||||||
|
]
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -862,11 +878,15 @@ mod tests {
|
|||||||
solver.inputs_done();
|
solver.inputs_done();
|
||||||
assert!(solver.quick_solve().is_ok());
|
assert!(solver.quick_solve().is_ok());
|
||||||
assert_eq!(solver.schedule_moves(®s), 0);
|
assert_eq!(solver.schedule_moves(®s), 0);
|
||||||
assert_eq!(solver.moves(),
|
assert_eq!(
|
||||||
&[mov(v10, d, d0, d2),
|
solver.moves(),
|
||||||
mov(v11, s, s2, s0),
|
&[
|
||||||
mov(v12, s, s3, s1),
|
mov(v10, d, d0, d2),
|
||||||
mov(v10, d, d2, d1)]);
|
mov(v11, s, s2, s0),
|
||||||
|
mov(v12, s, s3, s1),
|
||||||
|
mov(v10, d, d2, d1),
|
||||||
|
]
|
||||||
|
);
|
||||||
|
|
||||||
// Same problem in the other direction: Swap (s0, s1) <-> d1.
|
// Same problem in the other direction: Swap (s0, s1) <-> d1.
|
||||||
//
|
//
|
||||||
@@ -879,10 +899,14 @@ mod tests {
|
|||||||
solver.inputs_done();
|
solver.inputs_done();
|
||||||
assert!(solver.quick_solve().is_ok());
|
assert!(solver.quick_solve().is_ok());
|
||||||
assert_eq!(solver.schedule_moves(®s), 0);
|
assert_eq!(solver.schedule_moves(®s), 0);
|
||||||
assert_eq!(solver.moves(),
|
assert_eq!(
|
||||||
&[mov(v10, d, d1, d2),
|
solver.moves(),
|
||||||
mov(v12, s, s1, s3),
|
&[
|
||||||
mov(v11, s, s0, s2),
|
mov(v10, d, d1, d2),
|
||||||
mov(v10, d, d2, d0)]);
|
mov(v12, s, s1, s3),
|
||||||
|
mov(v11, s, s0, s2),
|
||||||
|
mov(v10, d, d2, d0),
|
||||||
|
]
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -71,14 +71,16 @@ impl Spilling {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Run the spilling algorithm over `func`.
|
/// Run the spilling algorithm over `func`.
|
||||||
pub fn run(&mut self,
|
pub fn run(
|
||||||
isa: &TargetIsa,
|
&mut self,
|
||||||
func: &mut Function,
|
isa: &TargetIsa,
|
||||||
domtree: &DominatorTree,
|
func: &mut Function,
|
||||||
liveness: &mut Liveness,
|
domtree: &DominatorTree,
|
||||||
virtregs: &VirtRegs,
|
liveness: &mut Liveness,
|
||||||
topo: &mut TopoOrder,
|
virtregs: &VirtRegs,
|
||||||
tracker: &mut LiveValueTracker) {
|
topo: &mut TopoOrder,
|
||||||
|
tracker: &mut LiveValueTracker,
|
||||||
|
) {
|
||||||
dbg!("Spilling for:\n{}", func.display(isa));
|
dbg!("Spilling for:\n{}", func.display(isa));
|
||||||
let reginfo = isa.register_info();
|
let reginfo = isa.register_info();
|
||||||
let usable_regs = isa.allocatable_registers(func);
|
let usable_regs = isa.allocatable_registers(func);
|
||||||
@@ -114,8 +116,10 @@ impl<'a> Context<'a> {
|
|||||||
|
|
||||||
while let Some(inst) = self.cur.next_inst() {
|
while let Some(inst) = self.cur.next_inst() {
|
||||||
if let Some(constraints) =
|
if let Some(constraints) =
|
||||||
self.encinfo
|
self.encinfo.operand_constraints(
|
||||||
.operand_constraints(self.cur.func.encodings[inst]) {
|
self.cur.func.encodings[inst],
|
||||||
|
)
|
||||||
|
{
|
||||||
self.visit_inst(inst, ebb, constraints, tracker);
|
self.visit_inst(inst, ebb, constraints, tracker);
|
||||||
} else {
|
} else {
|
||||||
let (_throughs, kills) = tracker.process_ghost(inst);
|
let (_throughs, kills) = tracker.process_ghost(inst);
|
||||||
@@ -150,11 +154,13 @@ impl<'a> Context<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn visit_ebb_header(&mut self, ebb: Ebb, tracker: &mut LiveValueTracker) {
|
fn visit_ebb_header(&mut self, ebb: Ebb, tracker: &mut LiveValueTracker) {
|
||||||
let (liveins, args) = tracker.ebb_top(ebb,
|
let (liveins, args) = tracker.ebb_top(
|
||||||
&self.cur.func.dfg,
|
ebb,
|
||||||
self.liveness,
|
&self.cur.func.dfg,
|
||||||
&self.cur.func.layout,
|
self.liveness,
|
||||||
self.domtree);
|
&self.cur.func.layout,
|
||||||
|
self.domtree,
|
||||||
|
);
|
||||||
|
|
||||||
// Count the live-in registers. These should already fit in registers; they did at the
|
// Count the live-in registers. These should already fit in registers; they did at the
|
||||||
// dominator.
|
// dominator.
|
||||||
@@ -167,16 +173,20 @@ impl<'a> Context<'a> {
|
|||||||
if let Affinity::Reg(rci) = lv.affinity {
|
if let Affinity::Reg(rci) = lv.affinity {
|
||||||
let rc = self.reginfo.rc(rci);
|
let rc = self.reginfo.rc(rci);
|
||||||
'try_take: while let Err(mask) = self.pressure.take_transient(rc) {
|
'try_take: while let Err(mask) = self.pressure.take_transient(rc) {
|
||||||
dbg!("Need {} reg for EBB argument {} from {} live-ins",
|
dbg!(
|
||||||
rc,
|
"Need {} reg for EBB argument {} from {} live-ins",
|
||||||
lv.value,
|
rc,
|
||||||
liveins.len());
|
lv.value,
|
||||||
|
liveins.len()
|
||||||
|
);
|
||||||
match self.spill_candidate(mask, liveins) {
|
match self.spill_candidate(mask, liveins) {
|
||||||
Some(cand) => {
|
Some(cand) => {
|
||||||
dbg!("Spilling live-in {} to make room for {} EBB argument {}",
|
dbg!(
|
||||||
cand,
|
"Spilling live-in {} to make room for {} EBB argument {}",
|
||||||
rc,
|
cand,
|
||||||
lv.value);
|
rc,
|
||||||
|
lv.value
|
||||||
|
);
|
||||||
self.spill_reg(cand);
|
self.spill_reg(cand);
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
@@ -199,11 +209,13 @@ impl<'a> Context<'a> {
|
|||||||
self.pressure.preserve_transient();
|
self.pressure.preserve_transient();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_inst(&mut self,
|
fn visit_inst(
|
||||||
inst: Inst,
|
&mut self,
|
||||||
ebb: Ebb,
|
inst: Inst,
|
||||||
constraints: &RecipeConstraints,
|
ebb: Ebb,
|
||||||
tracker: &mut LiveValueTracker) {
|
constraints: &RecipeConstraints,
|
||||||
|
tracker: &mut LiveValueTracker,
|
||||||
|
) {
|
||||||
dbg!("Inst {}, {}", self.cur.display_inst(inst), self.pressure);
|
dbg!("Inst {}, {}", self.cur.display_inst(inst), self.pressure);
|
||||||
debug_assert_eq!(self.cur.current_inst(), Some(inst));
|
debug_assert_eq!(self.cur.current_inst(), Some(inst));
|
||||||
debug_assert_eq!(self.cur.current_ebb(), Some(ebb));
|
debug_assert_eq!(self.cur.current_ebb(), Some(ebb));
|
||||||
@@ -250,9 +262,11 @@ impl<'a> Context<'a> {
|
|||||||
match self.spill_candidate(mask, throughs) {
|
match self.spill_candidate(mask, throughs) {
|
||||||
Some(cand) => self.spill_reg(cand),
|
Some(cand) => self.spill_reg(cand),
|
||||||
None => {
|
None => {
|
||||||
panic!("Ran out of {} registers for {}",
|
panic!(
|
||||||
op.regclass,
|
"Ran out of {} registers for {}",
|
||||||
self.cur.display_inst(inst))
|
op.regclass,
|
||||||
|
self.cur.display_inst(inst)
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -313,12 +327,16 @@ impl<'a> Context<'a> {
|
|||||||
.argument_types
|
.argument_types
|
||||||
.iter()
|
.iter()
|
||||||
.zip(args)
|
.zip(args)
|
||||||
.enumerate() {
|
.enumerate()
|
||||||
|
{
|
||||||
if abi.location.is_reg() {
|
if abi.location.is_reg() {
|
||||||
let (rci, spilled) = match self.liveness[arg].affinity {
|
let (rci, spilled) = match self.liveness[arg].affinity {
|
||||||
Affinity::Reg(rci) => (rci, false),
|
Affinity::Reg(rci) => (rci, false),
|
||||||
Affinity::Stack => {
|
Affinity::Stack => {
|
||||||
(self.cur.isa.regclass_for_abi_type(abi.value_type).into(), true)
|
(
|
||||||
|
self.cur.isa.regclass_for_abi_type(abi.value_type).into(),
|
||||||
|
true,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
Affinity::None => panic!("Missing affinity for {}", arg),
|
Affinity::None => panic!("Missing affinity for {}", arg),
|
||||||
};
|
};
|
||||||
@@ -373,17 +391,19 @@ impl<'a> Context<'a> {
|
|||||||
// Spill a live register that is *not* used by the current instruction.
|
// Spill a live register that is *not* used by the current instruction.
|
||||||
// Spilling a use wouldn't help.
|
// Spilling a use wouldn't help.
|
||||||
match {
|
match {
|
||||||
let args = self.cur.func.dfg.inst_args(inst);
|
let args = self.cur.func.dfg.inst_args(inst);
|
||||||
self.spill_candidate(mask,
|
self.spill_candidate(
|
||||||
tracker.live().iter().filter(|lv| {
|
mask,
|
||||||
!args.contains(&lv.value)
|
tracker.live().iter().filter(|lv| !args.contains(&lv.value)),
|
||||||
}))
|
)
|
||||||
} {
|
} {
|
||||||
Some(cand) => self.spill_reg(cand),
|
Some(cand) => self.spill_reg(cand),
|
||||||
None => {
|
None => {
|
||||||
panic!("Ran out of {} registers when inserting copy before {}",
|
panic!(
|
||||||
rc,
|
"Ran out of {} registers when inserting copy before {}",
|
||||||
self.cur.display_inst(inst))
|
rc,
|
||||||
|
self.cur.display_inst(inst)
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -395,7 +415,8 @@ impl<'a> Context<'a> {
|
|||||||
|
|
||||||
// Find a spill candidate from `candidates` whose top-level register class is in `mask`.
|
// Find a spill candidate from `candidates` whose top-level register class is in `mask`.
|
||||||
fn spill_candidate<'ii, II>(&self, mask: RegClassMask, candidates: II) -> Option<Value>
|
fn spill_candidate<'ii, II>(&self, mask: RegClassMask, candidates: II) -> Option<Value>
|
||||||
where II: IntoIterator<Item = &'ii LiveValue>
|
where
|
||||||
|
II: IntoIterator<Item = &'ii LiveValue>,
|
||||||
{
|
{
|
||||||
// Find the best viable spill candidate.
|
// Find the best viable spill candidate.
|
||||||
//
|
//
|
||||||
@@ -420,12 +441,13 @@ impl<'a> Context<'a> {
|
|||||||
None
|
None
|
||||||
})
|
})
|
||||||
.min_by(|&a, &b| {
|
.min_by(|&a, &b| {
|
||||||
// Find the minimum candidate according to the RPO of their defs.
|
// Find the minimum candidate according to the RPO of their defs.
|
||||||
self.domtree
|
self.domtree.rpo_cmp(
|
||||||
.rpo_cmp(self.cur.func.dfg.value_def(a),
|
self.cur.func.dfg.value_def(a),
|
||||||
self.cur.func.dfg.value_def(b),
|
self.cur.func.dfg.value_def(b),
|
||||||
&self.cur.func.layout)
|
&self.cur.func.layout,
|
||||||
})
|
)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Spill `value` immediately by
|
/// Spill `value` immediately by
|
||||||
@@ -447,10 +469,9 @@ impl<'a> Context<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Assign a spill slot for the whole virtual register.
|
// Assign a spill slot for the whole virtual register.
|
||||||
let ss = self.cur
|
let ss = self.cur.func.stack_slots.make_spill_slot(
|
||||||
.func
|
self.cur.func.dfg.value_type(value),
|
||||||
.stack_slots
|
);
|
||||||
.make_spill_slot(self.cur.func.dfg.value_type(value));
|
|
||||||
for &v in self.virtregs.congruence_class(&value) {
|
for &v in self.virtregs.congruence_class(&value) {
|
||||||
self.liveness.spill(v);
|
self.liveness.spill(v);
|
||||||
self.cur.func.locations[v] = ValueLoc::Stack(ss);
|
self.cur.func.locations[v] = ValueLoc::Stack(ss);
|
||||||
@@ -481,11 +502,12 @@ impl<'a> Context<'a> {
|
|||||||
|
|
||||||
// Update live ranges.
|
// Update live ranges.
|
||||||
self.liveness.create_dead(copy, inst, Affinity::Reg(rci));
|
self.liveness.create_dead(copy, inst, Affinity::Reg(rci));
|
||||||
self.liveness
|
self.liveness.extend_locally(
|
||||||
.extend_locally(copy,
|
copy,
|
||||||
self.cur.func.layout.pp_ebb(inst),
|
self.cur.func.layout.pp_ebb(inst),
|
||||||
self.cur.current_inst().expect("must be at an instruction"),
|
self.cur.current_inst().expect("must be at an instruction"),
|
||||||
&self.cur.func.layout);
|
&self.cur.func.layout,
|
||||||
|
);
|
||||||
|
|
||||||
copy
|
copy
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -81,11 +81,12 @@ impl VirtRegs {
|
|||||||
/// If `value` belongs to a virtual register, the congruence class is the values of the virtual
|
/// If `value` belongs to a virtual register, the congruence class is the values of the virtual
|
||||||
/// register. Otherwise it is just the value itself.
|
/// register. Otherwise it is just the value itself.
|
||||||
pub fn congruence_class<'a, 'b>(&'a self, value: &'b Value) -> &'b [Value]
|
pub fn congruence_class<'a, 'b>(&'a self, value: &'b Value) -> &'b [Value]
|
||||||
where 'a: 'b
|
where
|
||||||
|
'a: 'b,
|
||||||
{
|
{
|
||||||
self.get(*value)
|
self.get(*value).map(|vr| self.values(vr)).unwrap_or(
|
||||||
.map(|vr| self.values(vr))
|
ref_slice(value),
|
||||||
.unwrap_or(ref_slice(value))
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check if `a` and `b` belong to the same congruence class.
|
/// Check if `a` and `b` belong to the same congruence class.
|
||||||
@@ -126,9 +127,11 @@ impl VirtRegs {
|
|||||||
.min()
|
.min()
|
||||||
.unwrap_or_else(|| self.vregs.push(Default::default()));
|
.unwrap_or_else(|| self.vregs.push(Default::default()));
|
||||||
|
|
||||||
assert_eq!(values.len(),
|
assert_eq!(
|
||||||
singletons + cleared,
|
values.len(),
|
||||||
"Can't unify partial virtual registers");
|
singletons + cleared,
|
||||||
|
"Can't unify partial virtual registers"
|
||||||
|
);
|
||||||
|
|
||||||
self.vregs[vreg].extend(values.iter().cloned(), &mut self.pool);
|
self.vregs[vreg].extend(values.iter().cloned(), &mut self.pool);
|
||||||
for &v in values {
|
for &v in values {
|
||||||
|
|||||||
@@ -137,8 +137,8 @@ impl Configurable for Builder {
|
|||||||
self.bytes[offset] = value.parse().map_err(|_| Error::BadValue)?;
|
self.bytes[offset] = value.parse().map_err(|_| Error::BadValue)?;
|
||||||
}
|
}
|
||||||
Detail::Enum { last, enumerators } => {
|
Detail::Enum { last, enumerators } => {
|
||||||
self.bytes[offset] = parse_enum_value(value,
|
self.bytes[offset] =
|
||||||
self.template.enums(last, enumerators))?;
|
parse_enum_value(value, self.template.enums(last, enumerators))?;
|
||||||
}
|
}
|
||||||
Detail::Preset => return Err(Error::BadName),
|
Detail::Preset => return Err(Error::BadName),
|
||||||
}
|
}
|
||||||
@@ -218,11 +218,12 @@ pub mod detail {
|
|||||||
|
|
||||||
/// Format a setting value as a TOML string. This is mostly for use by the generated
|
/// Format a setting value as a TOML string. This is mostly for use by the generated
|
||||||
/// `Display` implementation.
|
/// `Display` implementation.
|
||||||
pub fn format_toml_value(&self,
|
pub fn format_toml_value(
|
||||||
detail: Detail,
|
&self,
|
||||||
byte: u8,
|
detail: Detail,
|
||||||
f: &mut fmt::Formatter)
|
byte: u8,
|
||||||
-> fmt::Result {
|
f: &mut fmt::Formatter,
|
||||||
|
) -> fmt::Result {
|
||||||
match detail {
|
match detail {
|
||||||
Detail::Bool { bit } => write!(f, "{}", (byte & (1 << bit)) != 0),
|
Detail::Bool { bit } => write!(f, "{}", (byte & (1 << bit)) != 0),
|
||||||
Detail::Num => write!(f, "{}", byte),
|
Detail::Num => write!(f, "{}", byte),
|
||||||
@@ -312,15 +313,17 @@ mod tests {
|
|||||||
fn display_default() {
|
fn display_default() {
|
||||||
let b = builder();
|
let b = builder();
|
||||||
let f = Flags::new(&b);
|
let f = Flags::new(&b);
|
||||||
assert_eq!(f.to_string(),
|
assert_eq!(
|
||||||
"[shared]\n\
|
f.to_string(),
|
||||||
|
"[shared]\n\
|
||||||
opt_level = \"default\"\n\
|
opt_level = \"default\"\n\
|
||||||
enable_verifier = false\n\
|
enable_verifier = false\n\
|
||||||
is_64bit = false\n\
|
is_64bit = false\n\
|
||||||
is_compressed = false\n\
|
is_compressed = false\n\
|
||||||
enable_float = true\n\
|
enable_float = true\n\
|
||||||
enable_simd = true\n\
|
enable_simd = true\n\
|
||||||
enable_atomics = true\n");
|
enable_atomics = true\n"
|
||||||
|
);
|
||||||
assert_eq!(f.opt_level(), super::OptLevel::Default);
|
assert_eq!(f.opt_level(), super::OptLevel::Default);
|
||||||
assert_eq!(f.enable_simd(), true);
|
assert_eq!(f.enable_simd(), true);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ use std::collections::HashMap;
|
|||||||
/// Test whether the given opcode is unsafe to even consider for GVN.
|
/// Test whether the given opcode is unsafe to even consider for GVN.
|
||||||
fn trivially_unsafe_for_gvn(opcode: Opcode) -> bool {
|
fn trivially_unsafe_for_gvn(opcode: Opcode) -> bool {
|
||||||
opcode.is_call() || opcode.is_branch() || opcode.is_terminator() || opcode.is_return() ||
|
opcode.is_call() || opcode.is_branch() || opcode.is_terminator() || opcode.is_return() ||
|
||||||
opcode.can_trap() || opcode.other_side_effects()
|
opcode.can_trap() || opcode.other_side_effects()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Perform simple GVN on `func`.
|
/// Perform simple GVN on `func`.
|
||||||
|
|||||||
@@ -51,9 +51,9 @@ pub fn layout_stack(frame: &mut StackSlots, alignment: StackSize) -> Result<Stac
|
|||||||
incoming_min = min(incoming_min, slot.offset);
|
incoming_min = min(incoming_min, slot.offset);
|
||||||
}
|
}
|
||||||
StackSlotKind::OutgoingArg => {
|
StackSlotKind::OutgoingArg => {
|
||||||
let offset = slot.offset
|
let offset = slot.offset.checked_add(slot.size as StackOffset).ok_or(
|
||||||
.checked_add(slot.size as StackOffset)
|
CtonError::ImplLimitExceeded,
|
||||||
.ok_or(CtonError::ImplLimitExceeded)?;
|
)?;
|
||||||
outgoing_max = max(outgoing_max, offset);
|
outgoing_max = max(outgoing_max, offset);
|
||||||
}
|
}
|
||||||
StackSlotKind::SpillSlot | StackSlotKind::Local => {
|
StackSlotKind::SpillSlot | StackSlotKind::Local => {
|
||||||
@@ -82,9 +82,9 @@ pub fn layout_stack(frame: &mut StackSlots, alignment: StackSize) -> Result<Stac
|
|||||||
_ => continue,
|
_ => continue,
|
||||||
}
|
}
|
||||||
|
|
||||||
offset = offset
|
offset = offset.checked_sub(slot.size as StackOffset).ok_or(
|
||||||
.checked_sub(slot.size as StackOffset)
|
CtonError::ImplLimitExceeded,
|
||||||
.ok_or(CtonError::ImplLimitExceeded)?;
|
)?;
|
||||||
|
|
||||||
// Aligning the negative offset can never cause overflow. We're only clearing bits.
|
// Aligning the negative offset can never cause overflow. We're only clearing bits.
|
||||||
offset &= -(min_align as StackOffset);
|
offset &= -(min_align as StackOffset);
|
||||||
@@ -96,9 +96,9 @@ pub fn layout_stack(frame: &mut StackSlots, alignment: StackSize) -> Result<Stac
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Finally, make room for the outgoing arguments.
|
// Finally, make room for the outgoing arguments.
|
||||||
offset = offset
|
offset = offset.checked_sub(outgoing_max).ok_or(
|
||||||
.checked_sub(outgoing_max)
|
CtonError::ImplLimitExceeded,
|
||||||
.ok_or(CtonError::ImplLimitExceeded)?;
|
)?;
|
||||||
offset &= -(alignment as StackOffset);
|
offset &= -(alignment as StackOffset);
|
||||||
|
|
||||||
let frame_size = (offset as StackSize).wrapping_neg();
|
let frame_size = (offset as StackSize).wrapping_neg();
|
||||||
|
|||||||
@@ -38,7 +38,8 @@ impl TopoOrder {
|
|||||||
/// Reset and initialize with a preferred sequence of EBBs. The resulting topological order is
|
/// Reset and initialize with a preferred sequence of EBBs. The resulting topological order is
|
||||||
/// guaranteed to contain all of the EBBs in `preferred` as well as any dominators.
|
/// guaranteed to contain all of the EBBs in `preferred` as well as any dominators.
|
||||||
pub fn reset<Ebbs>(&mut self, preferred: Ebbs)
|
pub fn reset<Ebbs>(&mut self, preferred: Ebbs)
|
||||||
where Ebbs: IntoIterator<Item = Ebb>
|
where
|
||||||
|
Ebbs: IntoIterator<Item = Ebb>,
|
||||||
{
|
{
|
||||||
self.preferred.clear();
|
self.preferred.clear();
|
||||||
self.preferred.extend(preferred);
|
self.preferred.extend(preferred);
|
||||||
|
|||||||
@@ -22,12 +22,13 @@ use verifier::Result;
|
|||||||
/// - The values in a virtual register are ordered according to the dominator tree's `rpo_cmp()`.
|
/// - The values in a virtual register are ordered according to the dominator tree's `rpo_cmp()`.
|
||||||
///
|
///
|
||||||
/// We don't verify that virtual registers are minimal. Minimal CSSA is not required.
|
/// We don't verify that virtual registers are minimal. Minimal CSSA is not required.
|
||||||
pub fn verify_cssa(func: &Function,
|
pub fn verify_cssa(
|
||||||
cfg: &ControlFlowGraph,
|
func: &Function,
|
||||||
domtree: &DominatorTree,
|
cfg: &ControlFlowGraph,
|
||||||
liveness: &Liveness,
|
domtree: &DominatorTree,
|
||||||
virtregs: &VirtRegs)
|
liveness: &Liveness,
|
||||||
-> Result {
|
virtregs: &VirtRegs,
|
||||||
|
) -> Result {
|
||||||
let verifier = CssaVerifier {
|
let verifier = CssaVerifier {
|
||||||
func,
|
func,
|
||||||
cfg,
|
cfg,
|
||||||
@@ -77,10 +78,12 @@ impl<'a> CssaVerifier<'a> {
|
|||||||
return err!(val, "Value in {} has same def as {}", vreg, prev_val);
|
return err!(val, "Value in {} has same def as {}", vreg, prev_val);
|
||||||
}
|
}
|
||||||
Ordering::Greater => {
|
Ordering::Greater => {
|
||||||
return err!(val,
|
return err!(
|
||||||
"Value in {} in wrong order relative to {}",
|
val,
|
||||||
vreg,
|
"Value in {} in wrong order relative to {}",
|
||||||
prev_val);
|
vreg,
|
||||||
|
prev_val
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -102,16 +105,20 @@ impl<'a> CssaVerifier<'a> {
|
|||||||
for &(_, pred) in self.cfg.get_predecessors(ebb) {
|
for &(_, pred) in self.cfg.get_predecessors(ebb) {
|
||||||
let pred_args = self.func.dfg.inst_variable_args(pred);
|
let pred_args = self.func.dfg.inst_variable_args(pred);
|
||||||
// This should have been caught by an earlier verifier pass.
|
// This should have been caught by an earlier verifier pass.
|
||||||
assert_eq!(ebb_args.len(),
|
assert_eq!(
|
||||||
pred_args.len(),
|
ebb_args.len(),
|
||||||
"Wrong arguments on branch.");
|
pred_args.len(),
|
||||||
|
"Wrong arguments on branch."
|
||||||
|
);
|
||||||
|
|
||||||
for (&ebb_arg, &pred_arg) in ebb_args.iter().zip(pred_args) {
|
for (&ebb_arg, &pred_arg) in ebb_args.iter().zip(pred_args) {
|
||||||
if !self.virtregs.same_class(ebb_arg, pred_arg) {
|
if !self.virtregs.same_class(ebb_arg, pred_arg) {
|
||||||
return err!(pred,
|
return err!(
|
||||||
"{} and {} must be in the same virtual register",
|
pred,
|
||||||
ebb_arg,
|
"{} and {} must be in the same virtual register",
|
||||||
pred_arg);
|
ebb_arg,
|
||||||
|
pred_arg
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -21,11 +21,12 @@ use verifier::Result;
|
|||||||
///
|
///
|
||||||
/// We don't verify that live ranges are minimal. This would require recomputing live ranges for
|
/// We don't verify that live ranges are minimal. This would require recomputing live ranges for
|
||||||
/// all values.
|
/// all values.
|
||||||
pub fn verify_liveness(isa: &TargetIsa,
|
pub fn verify_liveness(
|
||||||
func: &Function,
|
isa: &TargetIsa,
|
||||||
cfg: &ControlFlowGraph,
|
func: &Function,
|
||||||
liveness: &Liveness)
|
cfg: &ControlFlowGraph,
|
||||||
-> Result {
|
liveness: &Liveness,
|
||||||
|
) -> Result {
|
||||||
let verifier = LivenessVerifier {
|
let verifier = LivenessVerifier {
|
||||||
isa,
|
isa,
|
||||||
func,
|
func,
|
||||||
@@ -76,18 +77,22 @@ impl<'a> LivenessVerifier<'a> {
|
|||||||
if encoding.is_legal() {
|
if encoding.is_legal() {
|
||||||
// A legal instruction is not allowed to define ghost values.
|
// A legal instruction is not allowed to define ghost values.
|
||||||
if lr.affinity.is_none() {
|
if lr.affinity.is_none() {
|
||||||
return err!(inst,
|
return err!(
|
||||||
"{} is a ghost value defined by a real [{}] instruction",
|
inst,
|
||||||
val,
|
"{} is a ghost value defined by a real [{}] instruction",
|
||||||
self.isa.encoding_info().display(encoding));
|
val,
|
||||||
|
self.isa.encoding_info().display(encoding)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// A non-encoded instruction can only define ghost values.
|
// A non-encoded instruction can only define ghost values.
|
||||||
if !lr.affinity.is_none() {
|
if !lr.affinity.is_none() {
|
||||||
return err!(inst,
|
return err!(
|
||||||
"{} is a real {} value defined by a ghost instruction",
|
inst,
|
||||||
val,
|
"{} is a real {} value defined by a ghost instruction",
|
||||||
lr.affinity.display(&self.isa.register_info()));
|
val,
|
||||||
|
lr.affinity.display(&self.isa.register_info())
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -108,10 +113,12 @@ impl<'a> LivenessVerifier<'a> {
|
|||||||
// A branch argument can be a ghost value if the corresponding destination
|
// A branch argument can be a ghost value if the corresponding destination
|
||||||
// EBB argument is a ghost value.
|
// EBB argument is a ghost value.
|
||||||
if lr.affinity.is_none() && !self.is_ghost_branch_argument(inst, idx) {
|
if lr.affinity.is_none() && !self.is_ghost_branch_argument(inst, idx) {
|
||||||
return err!(inst,
|
return err!(
|
||||||
"{} is a ghost value used by a real [{}] instruction",
|
inst,
|
||||||
val,
|
"{} is a ghost value used by a real [{}] instruction",
|
||||||
self.isa.encoding_info().display(encoding));
|
val,
|
||||||
|
self.isa.encoding_info().display(encoding)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -126,7 +133,8 @@ impl<'a> LivenessVerifier<'a> {
|
|||||||
|
|
||||||
// Check if `inst` is in the def range, not including the def itself.
|
// Check if `inst` is in the def range, not including the def itself.
|
||||||
if l.cmp(lr.def(), inst) == Ordering::Less &&
|
if l.cmp(lr.def(), inst) == Ordering::Less &&
|
||||||
l.cmp(inst, lr.def_local_end()) != Ordering::Greater {
|
l.cmp(inst, lr.def_local_end()) != Ordering::Greater
|
||||||
|
{
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -205,11 +213,13 @@ impl<'a> LivenessVerifier<'a> {
|
|||||||
let end_ebb = match l.inst_ebb(livein.end) {
|
let end_ebb = match l.inst_ebb(livein.end) {
|
||||||
Some(e) => e,
|
Some(e) => e,
|
||||||
None => {
|
None => {
|
||||||
return err!(loc,
|
return err!(
|
||||||
"{} livein for {} ends at {} which is not in the layout",
|
loc,
|
||||||
val,
|
"{} livein for {} ends at {} which is not in the layout",
|
||||||
ebb,
|
val,
|
||||||
livein.end)
|
ebb,
|
||||||
|
livein.end
|
||||||
|
)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -218,10 +228,12 @@ impl<'a> LivenessVerifier<'a> {
|
|||||||
// If `val` is live-in at `ebb`, it must be live at all the predecessors.
|
// If `val` is live-in at `ebb`, it must be live at all the predecessors.
|
||||||
for &(_, pred) in self.cfg.get_predecessors(ebb) {
|
for &(_, pred) in self.cfg.get_predecessors(ebb) {
|
||||||
if !self.live_at_use(lr, pred) {
|
if !self.live_at_use(lr, pred) {
|
||||||
return err!(pred,
|
return err!(
|
||||||
"{} is live in to {} but not live at predecessor",
|
pred,
|
||||||
val,
|
"{} is live in to {} but not live at predecessor",
|
||||||
ebb);
|
val,
|
||||||
|
ebb
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -127,11 +127,12 @@ pub fn verify_function(func: &Function, isa: Option<&TargetIsa>) -> Result {
|
|||||||
|
|
||||||
/// Verify `func` after checking the integrity of associated context data structures `cfg` and
|
/// Verify `func` after checking the integrity of associated context data structures `cfg` and
|
||||||
/// `domtree`.
|
/// `domtree`.
|
||||||
pub fn verify_context(func: &Function,
|
pub fn verify_context(
|
||||||
cfg: &ControlFlowGraph,
|
func: &Function,
|
||||||
domtree: &DominatorTree,
|
cfg: &ControlFlowGraph,
|
||||||
isa: Option<&TargetIsa>)
|
domtree: &DominatorTree,
|
||||||
-> Result {
|
isa: Option<&TargetIsa>,
|
||||||
|
) -> Result {
|
||||||
let verifier = Verifier::new(func, isa);
|
let verifier = Verifier::new(func, isa);
|
||||||
verifier.cfg_integrity(cfg)?;
|
verifier.cfg_integrity(cfg)?;
|
||||||
if domtree.is_valid() {
|
if domtree.is_valid() {
|
||||||
@@ -187,9 +188,11 @@ impl<'a> Verifier<'a> {
|
|||||||
|
|
||||||
if is_terminator && !is_last_inst {
|
if is_terminator && !is_last_inst {
|
||||||
// Terminating instructions only occur at the end of blocks.
|
// Terminating instructions only occur at the end of blocks.
|
||||||
return err!(inst,
|
return err!(
|
||||||
"a terminator instruction was encountered before the end of {}",
|
inst,
|
||||||
ebb);
|
"a terminator instruction was encountered before the end of {}",
|
||||||
|
ebb
|
||||||
|
);
|
||||||
}
|
}
|
||||||
if is_last_inst && !is_terminator {
|
if is_last_inst && !is_terminator {
|
||||||
return err!(ebb, "block does not end in a terminator instruction!");
|
return err!(ebb, "block does not end in a terminator instruction!");
|
||||||
@@ -237,10 +240,12 @@ impl<'a> Verifier<'a> {
|
|||||||
// All result values for multi-valued instructions are created
|
// All result values for multi-valued instructions are created
|
||||||
let got_results = dfg.inst_results(inst).len();
|
let got_results = dfg.inst_results(inst).len();
|
||||||
if got_results != total_results {
|
if got_results != total_results {
|
||||||
return err!(inst,
|
return err!(
|
||||||
"expected {} result values, found {}",
|
inst,
|
||||||
total_results,
|
"expected {} result values, found {}",
|
||||||
got_results);
|
total_results,
|
||||||
|
got_results
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
self.verify_entity_references(inst)
|
self.verify_entity_references(inst)
|
||||||
@@ -407,22 +412,30 @@ impl<'a> Verifier<'a> {
|
|||||||
ValueDef::Res(def_inst, _) => {
|
ValueDef::Res(def_inst, _) => {
|
||||||
// Value is defined by an instruction that exists.
|
// Value is defined by an instruction that exists.
|
||||||
if !dfg.inst_is_valid(def_inst) {
|
if !dfg.inst_is_valid(def_inst) {
|
||||||
return err!(loc_inst,
|
return err!(
|
||||||
"{} is defined by invalid instruction {}",
|
loc_inst,
|
||||||
v,
|
"{} is defined by invalid instruction {}",
|
||||||
def_inst);
|
v,
|
||||||
|
def_inst
|
||||||
|
);
|
||||||
}
|
}
|
||||||
// Defining instruction is inserted in an EBB.
|
// Defining instruction is inserted in an EBB.
|
||||||
if self.func.layout.inst_ebb(def_inst) == None {
|
if self.func.layout.inst_ebb(def_inst) == None {
|
||||||
return err!(loc_inst,
|
return err!(
|
||||||
"{} is defined by {} which has no EBB",
|
loc_inst,
|
||||||
v,
|
"{} is defined by {} which has no EBB",
|
||||||
def_inst);
|
v,
|
||||||
|
def_inst
|
||||||
|
);
|
||||||
}
|
}
|
||||||
// Defining instruction dominates the instruction that uses the value.
|
// Defining instruction dominates the instruction that uses the value.
|
||||||
if self.domtree.is_reachable(self.func.layout.pp_ebb(loc_inst)) &&
|
if self.domtree.is_reachable(self.func.layout.pp_ebb(loc_inst)) &&
|
||||||
!self.domtree
|
!self.domtree.dominates(
|
||||||
.dominates(def_inst, loc_inst, &self.func.layout) {
|
def_inst,
|
||||||
|
loc_inst,
|
||||||
|
&self.func.layout,
|
||||||
|
)
|
||||||
|
{
|
||||||
return err!(loc_inst, "uses value from non-dominating {}", def_inst);
|
return err!(loc_inst, "uses value from non-dominating {}", def_inst);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -433,14 +446,17 @@ impl<'a> Verifier<'a> {
|
|||||||
}
|
}
|
||||||
// Defining EBB is inserted in the layout
|
// Defining EBB is inserted in the layout
|
||||||
if !self.func.layout.is_ebb_inserted(ebb) {
|
if !self.func.layout.is_ebb_inserted(ebb) {
|
||||||
return err!(loc_inst,
|
return err!(
|
||||||
"{} is defined by {} which is not in the layout",
|
loc_inst,
|
||||||
v,
|
"{} is defined by {} which is not in the layout",
|
||||||
ebb);
|
v,
|
||||||
|
ebb
|
||||||
|
);
|
||||||
}
|
}
|
||||||
// The defining EBB dominates the instruction using this value.
|
// The defining EBB dominates the instruction using this value.
|
||||||
if self.domtree.is_reachable(ebb) &&
|
if self.domtree.is_reachable(ebb) &&
|
||||||
!self.domtree.dominates(ebb, loc_inst, &self.func.layout) {
|
!self.domtree.dominates(ebb, loc_inst, &self.func.layout)
|
||||||
|
{
|
||||||
return err!(loc_inst, "uses value arg from non-dominating {}", ebb);
|
return err!(loc_inst, "uses value arg from non-dominating {}", ebb);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -456,39 +472,48 @@ impl<'a> Verifier<'a> {
|
|||||||
let expected = domtree.idom(ebb);
|
let expected = domtree.idom(ebb);
|
||||||
let got = self.domtree.idom(ebb);
|
let got = self.domtree.idom(ebb);
|
||||||
if got != expected {
|
if got != expected {
|
||||||
return err!(ebb,
|
return err!(
|
||||||
"invalid domtree, expected idom({}) = {:?}, got {:?}",
|
ebb,
|
||||||
ebb,
|
"invalid domtree, expected idom({}) = {:?}, got {:?}",
|
||||||
expected,
|
ebb,
|
||||||
got);
|
expected,
|
||||||
|
got
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// We also verify if the postorder defined by `DominatorTree` is sane
|
// We also verify if the postorder defined by `DominatorTree` is sane
|
||||||
if self.domtree.cfg_postorder().len() != domtree.cfg_postorder().len() {
|
if self.domtree.cfg_postorder().len() != domtree.cfg_postorder().len() {
|
||||||
return err!(AnyEntity::Function,
|
return err!(
|
||||||
"incorrect number of Ebbs in postorder traversal");
|
AnyEntity::Function,
|
||||||
|
"incorrect number of Ebbs in postorder traversal"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
for (index, (&true_ebb, &test_ebb)) in
|
for (index, (&true_ebb, &test_ebb)) in
|
||||||
self.domtree
|
self.domtree
|
||||||
.cfg_postorder()
|
.cfg_postorder()
|
||||||
.iter()
|
.iter()
|
||||||
.zip(domtree.cfg_postorder().iter())
|
.zip(domtree.cfg_postorder().iter())
|
||||||
.enumerate() {
|
.enumerate()
|
||||||
|
{
|
||||||
if true_ebb != test_ebb {
|
if true_ebb != test_ebb {
|
||||||
return err!(test_ebb,
|
return err!(
|
||||||
"invalid domtree, postorder ebb number {} should be {}, got {}",
|
test_ebb,
|
||||||
index,
|
"invalid domtree, postorder ebb number {} should be {}, got {}",
|
||||||
true_ebb,
|
index,
|
||||||
test_ebb);
|
true_ebb,
|
||||||
|
test_ebb
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// We verify rpo_cmp on pairs of adjacent ebbs in the postorder
|
// We verify rpo_cmp on pairs of adjacent ebbs in the postorder
|
||||||
for (&prev_ebb, &next_ebb) in self.domtree.cfg_postorder().iter().adjacent_pairs() {
|
for (&prev_ebb, &next_ebb) in self.domtree.cfg_postorder().iter().adjacent_pairs() {
|
||||||
if domtree.rpo_cmp(prev_ebb, next_ebb, &self.func.layout) != Ordering::Greater {
|
if domtree.rpo_cmp(prev_ebb, next_ebb, &self.func.layout) != Ordering::Greater {
|
||||||
return err!(next_ebb,
|
return err!(
|
||||||
"invalid domtree, rpo_cmp does not says {} is greater than {}",
|
next_ebb,
|
||||||
prev_ebb,
|
"invalid domtree, rpo_cmp does not says {} is greater than {}",
|
||||||
next_ebb);
|
prev_ebb,
|
||||||
|
next_ebb
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
@@ -506,11 +531,13 @@ impl<'a> Verifier<'a> {
|
|||||||
for (i, &arg) in self.func.dfg.ebb_args(ebb).iter().enumerate() {
|
for (i, &arg) in self.func.dfg.ebb_args(ebb).iter().enumerate() {
|
||||||
let arg_type = self.func.dfg.value_type(arg);
|
let arg_type = self.func.dfg.value_type(arg);
|
||||||
if arg_type != expected_types[i].value_type {
|
if arg_type != expected_types[i].value_type {
|
||||||
return err!(ebb,
|
return err!(
|
||||||
"entry block argument {} expected to have type {}, got {}",
|
ebb,
|
||||||
i,
|
"entry block argument {} expected to have type {}, got {}",
|
||||||
expected_types[i],
|
i,
|
||||||
arg_type);
|
expected_types[i],
|
||||||
|
arg_type
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -551,12 +578,14 @@ impl<'a> Verifier<'a> {
|
|||||||
let expected_type = self.func.dfg.compute_result_type(inst, i, ctrl_type);
|
let expected_type = self.func.dfg.compute_result_type(inst, i, ctrl_type);
|
||||||
if let Some(expected_type) = expected_type {
|
if let Some(expected_type) = expected_type {
|
||||||
if result_type != expected_type {
|
if result_type != expected_type {
|
||||||
return err!(inst,
|
return err!(
|
||||||
"expected result {} ({}) to have type {}, found {}",
|
inst,
|
||||||
i,
|
"expected result {} ({}) to have type {}, found {}",
|
||||||
result,
|
i,
|
||||||
expected_type,
|
result,
|
||||||
result_type);
|
expected_type,
|
||||||
|
result_type
|
||||||
|
);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return err!(inst, "has more result values than expected");
|
return err!(inst, "has more result values than expected");
|
||||||
@@ -579,22 +608,26 @@ impl<'a> Verifier<'a> {
|
|||||||
match constraints.value_argument_constraint(i, ctrl_type) {
|
match constraints.value_argument_constraint(i, ctrl_type) {
|
||||||
ResolvedConstraint::Bound(expected_type) => {
|
ResolvedConstraint::Bound(expected_type) => {
|
||||||
if arg_type != expected_type {
|
if arg_type != expected_type {
|
||||||
return err!(inst,
|
return err!(
|
||||||
"arg {} ({}) has type {}, expected {}",
|
inst,
|
||||||
i,
|
"arg {} ({}) has type {}, expected {}",
|
||||||
arg,
|
i,
|
||||||
arg_type,
|
arg,
|
||||||
expected_type);
|
arg_type,
|
||||||
|
expected_type
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ResolvedConstraint::Free(type_set) => {
|
ResolvedConstraint::Free(type_set) => {
|
||||||
if !type_set.contains(arg_type) {
|
if !type_set.contains(arg_type) {
|
||||||
return err!(inst,
|
return err!(
|
||||||
"arg {} ({}) with type {} failed to satisfy type set {:?}",
|
inst,
|
||||||
i,
|
"arg {} ({}) with type {} failed to satisfy type set {:?}",
|
||||||
arg,
|
i,
|
||||||
arg_type,
|
arg,
|
||||||
type_set);
|
arg_type,
|
||||||
|
type_set
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -605,21 +638,21 @@ impl<'a> Verifier<'a> {
|
|||||||
fn typecheck_variable_args(&self, inst: Inst) -> Result {
|
fn typecheck_variable_args(&self, inst: Inst) -> Result {
|
||||||
match self.func.dfg[inst].analyze_branch(&self.func.dfg.value_lists) {
|
match self.func.dfg[inst].analyze_branch(&self.func.dfg.value_lists) {
|
||||||
BranchInfo::SingleDest(ebb, _) => {
|
BranchInfo::SingleDest(ebb, _) => {
|
||||||
let iter = self.func
|
let iter = self.func.dfg.ebb_args(ebb).iter().map(|&v| {
|
||||||
.dfg
|
self.func.dfg.value_type(v)
|
||||||
.ebb_args(ebb)
|
});
|
||||||
.iter()
|
|
||||||
.map(|&v| self.func.dfg.value_type(v));
|
|
||||||
self.typecheck_variable_args_iterator(inst, iter)?;
|
self.typecheck_variable_args_iterator(inst, iter)?;
|
||||||
}
|
}
|
||||||
BranchInfo::Table(table) => {
|
BranchInfo::Table(table) => {
|
||||||
for (_, ebb) in self.func.jump_tables[table].entries() {
|
for (_, ebb) in self.func.jump_tables[table].entries() {
|
||||||
let arg_count = self.func.dfg.num_ebb_args(ebb);
|
let arg_count = self.func.dfg.num_ebb_args(ebb);
|
||||||
if arg_count != 0 {
|
if arg_count != 0 {
|
||||||
return err!(inst,
|
return err!(
|
||||||
"takes no arguments, but had target {} with {} arguments",
|
inst,
|
||||||
ebb,
|
"takes no arguments, but had target {} with {} arguments",
|
||||||
arg_count);
|
ebb,
|
||||||
|
arg_count
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -649,10 +682,11 @@ impl<'a> Verifier<'a> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn typecheck_variable_args_iterator<I: Iterator<Item = Type>>(&self,
|
fn typecheck_variable_args_iterator<I: Iterator<Item = Type>>(
|
||||||
inst: Inst,
|
&self,
|
||||||
iter: I)
|
inst: Inst,
|
||||||
-> Result {
|
iter: I,
|
||||||
|
) -> Result {
|
||||||
let variable_args = self.func.dfg.inst_variable_args(inst);
|
let variable_args = self.func.dfg.inst_variable_args(inst);
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
|
|
||||||
@@ -665,20 +699,24 @@ impl<'a> Verifier<'a> {
|
|||||||
let arg = variable_args[i];
|
let arg = variable_args[i];
|
||||||
let arg_type = self.func.dfg.value_type(arg);
|
let arg_type = self.func.dfg.value_type(arg);
|
||||||
if expected_type != arg_type {
|
if expected_type != arg_type {
|
||||||
return err!(inst,
|
return err!(
|
||||||
"arg {} ({}) has type {}, expected {}",
|
inst,
|
||||||
i,
|
"arg {} ({}) has type {}, expected {}",
|
||||||
variable_args[i],
|
i,
|
||||||
arg_type,
|
variable_args[i],
|
||||||
expected_type);
|
arg_type,
|
||||||
|
expected_type
|
||||||
|
);
|
||||||
}
|
}
|
||||||
i += 1;
|
i += 1;
|
||||||
}
|
}
|
||||||
if i != variable_args.len() {
|
if i != variable_args.len() {
|
||||||
return err!(inst,
|
return err!(
|
||||||
"mismatched argument count, got {}, expected {}",
|
inst,
|
||||||
variable_args.len(),
|
"mismatched argument count, got {}, expected {}",
|
||||||
i);
|
variable_args.len(),
|
||||||
|
i
|
||||||
|
);
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -707,34 +745,42 @@ impl<'a> Verifier<'a> {
|
|||||||
self.verify_stack_slot(inst, ss)?;
|
self.verify_stack_slot(inst, ss)?;
|
||||||
let slot = &self.func.stack_slots[ss];
|
let slot = &self.func.stack_slots[ss];
|
||||||
if slot.kind != StackSlotKind::OutgoingArg {
|
if slot.kind != StackSlotKind::OutgoingArg {
|
||||||
return err!(inst,
|
return err!(
|
||||||
"Outgoing stack argument {} in wrong stack slot: {} = {}",
|
inst,
|
||||||
arg,
|
"Outgoing stack argument {} in wrong stack slot: {} = {}",
|
||||||
ss,
|
arg,
|
||||||
slot);
|
ss,
|
||||||
|
slot
|
||||||
|
);
|
||||||
}
|
}
|
||||||
if slot.offset != offset {
|
if slot.offset != offset {
|
||||||
return err!(inst,
|
return err!(
|
||||||
"Outgoing stack argument {} should have offset {}: {} = {}",
|
inst,
|
||||||
arg,
|
"Outgoing stack argument {} should have offset {}: {} = {}",
|
||||||
offset,
|
arg,
|
||||||
ss,
|
offset,
|
||||||
slot);
|
ss,
|
||||||
|
slot
|
||||||
|
);
|
||||||
}
|
}
|
||||||
if slot.size != abi.value_type.bytes() {
|
if slot.size != abi.value_type.bytes() {
|
||||||
return err!(inst,
|
return err!(
|
||||||
"Outgoing stack argument {} wrong size for {}: {} = {}",
|
inst,
|
||||||
arg,
|
"Outgoing stack argument {} wrong size for {}: {} = {}",
|
||||||
abi.value_type,
|
arg,
|
||||||
ss,
|
abi.value_type,
|
||||||
slot);
|
ss,
|
||||||
|
slot
|
||||||
|
);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let reginfo = self.isa.map(|i| i.register_info());
|
let reginfo = self.isa.map(|i| i.register_info());
|
||||||
return err!(inst,
|
return err!(
|
||||||
"Outgoing stack argument {} in wrong location: {}",
|
inst,
|
||||||
arg,
|
"Outgoing stack argument {} in wrong location: {}",
|
||||||
arg_loc.display(reginfo.as_ref()));
|
arg,
|
||||||
|
arg_loc.display(reginfo.as_ref())
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -751,12 +797,14 @@ impl<'a> Verifier<'a> {
|
|||||||
for (i, (&arg, &expected_type)) in args.iter().zip(expected_types).enumerate() {
|
for (i, (&arg, &expected_type)) in args.iter().zip(expected_types).enumerate() {
|
||||||
let arg_type = self.func.dfg.value_type(arg);
|
let arg_type = self.func.dfg.value_type(arg);
|
||||||
if arg_type != expected_type.value_type {
|
if arg_type != expected_type.value_type {
|
||||||
return err!(inst,
|
return err!(
|
||||||
"arg {} ({}) has type {}, must match function signature of {}",
|
inst,
|
||||||
i,
|
"arg {} ({}) has type {}, must match function signature of {}",
|
||||||
arg,
|
i,
|
||||||
arg_type,
|
arg,
|
||||||
expected_type);
|
arg_type,
|
||||||
|
expected_type
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -775,9 +823,11 @@ impl<'a> Verifier<'a> {
|
|||||||
|
|
||||||
let missing_succs: Vec<Ebb> = expected_succs.difference(&got_succs).cloned().collect();
|
let missing_succs: Vec<Ebb> = expected_succs.difference(&got_succs).cloned().collect();
|
||||||
if !missing_succs.is_empty() {
|
if !missing_succs.is_empty() {
|
||||||
return err!(ebb,
|
return err!(
|
||||||
"cfg lacked the following successor(s) {:?}",
|
ebb,
|
||||||
missing_succs);
|
"cfg lacked the following successor(s) {:?}",
|
||||||
|
missing_succs
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
let excess_succs: Vec<Ebb> = got_succs.difference(&expected_succs).cloned().collect();
|
let excess_succs: Vec<Ebb> = got_succs.difference(&expected_succs).cloned().collect();
|
||||||
@@ -790,9 +840,11 @@ impl<'a> Verifier<'a> {
|
|||||||
|
|
||||||
let missing_preds: Vec<Inst> = expected_preds.difference(&got_preds).cloned().collect();
|
let missing_preds: Vec<Inst> = expected_preds.difference(&got_preds).cloned().collect();
|
||||||
if !missing_preds.is_empty() {
|
if !missing_preds.is_empty() {
|
||||||
return err!(ebb,
|
return err!(
|
||||||
"cfg lacked the following predecessor(s) {:?}",
|
ebb,
|
||||||
missing_preds);
|
"cfg lacked the following predecessor(s) {:?}",
|
||||||
|
missing_preds
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
let excess_preds: Vec<Inst> = got_preds.difference(&expected_preds).cloned().collect();
|
let excess_preds: Vec<Inst> = got_preds.difference(&expected_preds).cloned().collect();
|
||||||
@@ -826,23 +878,28 @@ impl<'a> Verifier<'a> {
|
|||||||
|
|
||||||
let encoding = self.func.encodings[inst];
|
let encoding = self.func.encodings[inst];
|
||||||
if encoding.is_legal() {
|
if encoding.is_legal() {
|
||||||
let verify_encoding =
|
let verify_encoding = isa.encode(
|
||||||
isa.encode(&self.func.dfg,
|
&self.func.dfg,
|
||||||
&self.func.dfg[inst],
|
&self.func.dfg[inst],
|
||||||
self.func.dfg.ctrl_typevar(inst));
|
self.func.dfg.ctrl_typevar(inst),
|
||||||
|
);
|
||||||
match verify_encoding {
|
match verify_encoding {
|
||||||
Ok(verify_encoding) => {
|
Ok(verify_encoding) => {
|
||||||
if verify_encoding != encoding {
|
if verify_encoding != encoding {
|
||||||
return err!(inst,
|
return err!(
|
||||||
"Instruction re-encoding {} doesn't match {}",
|
inst,
|
||||||
isa.encoding_info().display(verify_encoding),
|
"Instruction re-encoding {} doesn't match {}",
|
||||||
isa.encoding_info().display(encoding));
|
isa.encoding_info().display(verify_encoding),
|
||||||
|
isa.encoding_info().display(encoding)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
return err!(inst,
|
return err!(
|
||||||
"Instruction failed to re-encode {}",
|
inst,
|
||||||
isa.encoding_info().display(encoding))
|
"Instruction failed to re-encode {}",
|
||||||
|
isa.encoding_info().display(encoding)
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return Ok(());
|
return Ok(());
|
||||||
@@ -932,9 +989,9 @@ mod tests {
|
|||||||
let mut func = Function::new();
|
let mut func = Function::new();
|
||||||
let ebb0 = func.dfg.make_ebb();
|
let ebb0 = func.dfg.make_ebb();
|
||||||
func.layout.append_ebb(ebb0);
|
func.layout.append_ebb(ebb0);
|
||||||
let nullary_with_bad_opcode =
|
let nullary_with_bad_opcode = func.dfg.make_inst(
|
||||||
func.dfg
|
InstructionData::Nullary { opcode: Opcode::Jump },
|
||||||
.make_inst(InstructionData::Nullary { opcode: Opcode::Jump });
|
);
|
||||||
func.layout.append_inst(nullary_with_bad_opcode, ebb0);
|
func.layout.append_inst(nullary_with_bad_opcode, ebb0);
|
||||||
let verifier = Verifier::new(&func, None);
|
let verifier = Verifier::new(&func, None);
|
||||||
assert_err_with_msg!(verifier.run(), "instruction format");
|
assert_err_with_msg!(verifier.run(), "instruction format");
|
||||||
|
|||||||
@@ -38,10 +38,11 @@ fn write_spec(w: &mut Write, func: &Function, regs: Option<&RegInfo>) -> Result
|
|||||||
write!(w, "function {}{}", func.name, func.signature.display(regs))
|
write!(w, "function {}{}", func.name, func.signature.display(regs))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_preamble(w: &mut Write,
|
fn write_preamble(
|
||||||
func: &Function,
|
w: &mut Write,
|
||||||
regs: Option<&RegInfo>)
|
func: &Function,
|
||||||
-> result::Result<bool, Error> {
|
regs: Option<&RegInfo>,
|
||||||
|
) -> result::Result<bool, Error> {
|
||||||
let mut any = false;
|
let mut any = false;
|
||||||
|
|
||||||
for ss in func.stack_slots.keys() {
|
for ss in func.stack_slots.keys() {
|
||||||
@@ -63,10 +64,12 @@ fn write_preamble(w: &mut Write,
|
|||||||
// signatures.
|
// signatures.
|
||||||
for sig in func.dfg.signatures.keys() {
|
for sig in func.dfg.signatures.keys() {
|
||||||
any = true;
|
any = true;
|
||||||
writeln!(w,
|
writeln!(
|
||||||
" {} = {}",
|
w,
|
||||||
sig,
|
" {} = {}",
|
||||||
func.dfg.signatures[sig].display(regs))?;
|
sig,
|
||||||
|
func.dfg.signatures[sig].display(regs)
|
||||||
|
)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
for fnref in func.dfg.ext_funcs.keys() {
|
for fnref in func.dfg.ext_funcs.keys() {
|
||||||
@@ -163,8 +166,10 @@ fn type_suffix(func: &Function, inst: Inst) -> Option<Type> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let rtype = func.dfg.ctrl_typevar(inst);
|
let rtype = func.dfg.ctrl_typevar(inst);
|
||||||
assert!(!rtype.is_void(),
|
assert!(
|
||||||
"Polymorphic instruction must produce a result");
|
!rtype.is_void(),
|
||||||
|
"Polymorphic instruction must produce a result"
|
||||||
|
);
|
||||||
Some(rtype)
|
Some(rtype)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -179,11 +184,12 @@ fn write_value_aliases(w: &mut Write, func: &Function, inst: Inst, indent: usize
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_instruction(w: &mut Write,
|
fn write_instruction(
|
||||||
func: &Function,
|
w: &mut Write,
|
||||||
isa: Option<&TargetIsa>,
|
func: &Function,
|
||||||
inst: Inst)
|
isa: Option<&TargetIsa>,
|
||||||
-> Result {
|
inst: Inst,
|
||||||
|
) -> Result {
|
||||||
// Indent all instructions to col 24 if any encodings are present.
|
// Indent all instructions to col 24 if any encodings are present.
|
||||||
let indent = if func.encodings.is_empty() { 4 } else { 24 };
|
let indent = if func.encodings.is_empty() { 4 } else { 24 };
|
||||||
|
|
||||||
@@ -240,11 +246,12 @@ fn write_instruction(w: &mut Write,
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Write the operands of `inst` to `w` with a prepended space.
|
/// Write the operands of `inst` to `w` with a prepended space.
|
||||||
pub fn write_operands(w: &mut Write,
|
pub fn write_operands(
|
||||||
dfg: &DataFlowGraph,
|
w: &mut Write,
|
||||||
isa: Option<&TargetIsa>,
|
dfg: &DataFlowGraph,
|
||||||
inst: Inst)
|
isa: Option<&TargetIsa>,
|
||||||
-> Result {
|
inst: Inst,
|
||||||
|
) -> Result {
|
||||||
let pool = &dfg.value_lists;
|
let pool = &dfg.value_lists;
|
||||||
use ir::instructions::InstructionData::*;
|
use ir::instructions::InstructionData::*;
|
||||||
match dfg[inst] {
|
match dfg[inst] {
|
||||||
@@ -278,10 +285,12 @@ pub fn write_operands(w: &mut Write,
|
|||||||
if args.is_empty() {
|
if args.is_empty() {
|
||||||
write!(w, " {}", destination)
|
write!(w, " {}", destination)
|
||||||
} else {
|
} else {
|
||||||
write!(w,
|
write!(
|
||||||
" {}({})",
|
w,
|
||||||
destination,
|
" {}({})",
|
||||||
DisplayValues(args.as_slice(pool)))
|
destination,
|
||||||
|
DisplayValues(args.as_slice(pool))
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Branch {
|
Branch {
|
||||||
@@ -315,11 +324,13 @@ pub fn write_operands(w: &mut Write,
|
|||||||
}
|
}
|
||||||
IndirectCall { sig_ref, ref args, .. } => {
|
IndirectCall { sig_ref, ref args, .. } => {
|
||||||
let args = args.as_slice(pool);
|
let args = args.as_slice(pool);
|
||||||
write!(w,
|
write!(
|
||||||
" {}, {}({})",
|
w,
|
||||||
sig_ref,
|
" {}, {}({})",
|
||||||
args[0],
|
sig_ref,
|
||||||
DisplayValues(&args[1..]))
|
args[0],
|
||||||
|
DisplayValues(&args[1..])
|
||||||
|
)
|
||||||
}
|
}
|
||||||
StackLoad { stack_slot, offset, .. } => write!(w, " {}{}", stack_slot, offset),
|
StackLoad { stack_slot, offset, .. } => write!(w, " {}{}", stack_slot, offset),
|
||||||
StackStore {
|
StackStore {
|
||||||
@@ -341,11 +352,13 @@ pub fn write_operands(w: &mut Write,
|
|||||||
RegMove { arg, src, dst, .. } => {
|
RegMove { arg, src, dst, .. } => {
|
||||||
if let Some(isa) = isa {
|
if let Some(isa) = isa {
|
||||||
let regs = isa.register_info();
|
let regs = isa.register_info();
|
||||||
write!(w,
|
write!(
|
||||||
" {}, {} -> {}",
|
w,
|
||||||
arg,
|
" {}, {} -> {}",
|
||||||
regs.display_regunit(src),
|
arg,
|
||||||
regs.display_regunit(dst))
|
regs.display_regunit(src),
|
||||||
|
regs.display_regunit(dst)
|
||||||
|
)
|
||||||
} else {
|
} else {
|
||||||
write!(w, " {}, %{} -> %{}", arg, src, dst)
|
write!(w, " {}, %{} -> %{}", arg, src, dst)
|
||||||
}
|
}
|
||||||
@@ -382,22 +395,31 @@ mod tests {
|
|||||||
f.name = FunctionName::new("foo");
|
f.name = FunctionName::new("foo");
|
||||||
assert_eq!(f.to_string(), "function %foo() native {\n}\n");
|
assert_eq!(f.to_string(), "function %foo() native {\n}\n");
|
||||||
|
|
||||||
f.stack_slots
|
f.stack_slots.push(
|
||||||
.push(StackSlotData::new(StackSlotKind::Local, 4));
|
StackSlotData::new(StackSlotKind::Local, 4),
|
||||||
assert_eq!(f.to_string(),
|
);
|
||||||
"function %foo() native {\n ss0 = local 4\n}\n");
|
assert_eq!(
|
||||||
|
f.to_string(),
|
||||||
|
"function %foo() native {\n ss0 = local 4\n}\n"
|
||||||
|
);
|
||||||
|
|
||||||
let ebb = f.dfg.make_ebb();
|
let ebb = f.dfg.make_ebb();
|
||||||
f.layout.append_ebb(ebb);
|
f.layout.append_ebb(ebb);
|
||||||
assert_eq!(f.to_string(),
|
assert_eq!(
|
||||||
"function %foo() native {\n ss0 = local 4\n\nebb0:\n}\n");
|
f.to_string(),
|
||||||
|
"function %foo() native {\n ss0 = local 4\n\nebb0:\n}\n"
|
||||||
|
);
|
||||||
|
|
||||||
f.dfg.append_ebb_arg(ebb, types::I8);
|
f.dfg.append_ebb_arg(ebb, types::I8);
|
||||||
assert_eq!(f.to_string(),
|
assert_eq!(
|
||||||
"function %foo() native {\n ss0 = local 4\n\nebb0(v0: i8):\n}\n");
|
f.to_string(),
|
||||||
|
"function %foo() native {\n ss0 = local 4\n\nebb0(v0: i8):\n}\n"
|
||||||
|
);
|
||||||
|
|
||||||
f.dfg.append_ebb_arg(ebb, types::F32.by(4).unwrap());
|
f.dfg.append_ebb_arg(ebb, types::F32.by(4).unwrap());
|
||||||
assert_eq!(f.to_string(),
|
assert_eq!(
|
||||||
"function %foo() native {\n ss0 = local 4\n\nebb0(v0: i8, v1: f32x4):\n}\n");
|
f.to_string(),
|
||||||
|
"function %foo() native {\n ss0 = local 4\n\nebb0(v0: i8, v1: f32x4):\n}\n"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -47,9 +47,11 @@ impl Directive {
|
|||||||
"unordered" => Ok(Directive::Unordered(pat)),
|
"unordered" => Ok(Directive::Unordered(pat)),
|
||||||
"not" => {
|
"not" => {
|
||||||
if !pat.defs().is_empty() {
|
if !pat.defs().is_empty() {
|
||||||
let msg = format!("can't define variables '$({}=...' in not: {}",
|
let msg = format!(
|
||||||
pat.defs()[0],
|
"can't define variables '$({}=...' in not: {}",
|
||||||
rest);
|
pat.defs()[0],
|
||||||
|
rest
|
||||||
|
);
|
||||||
Err(Error::DuplicateDef(msg))
|
Err(Error::DuplicateDef(msg))
|
||||||
} else {
|
} else {
|
||||||
Ok(Directive::Not(pat))
|
Ok(Directive::Not(pat))
|
||||||
@@ -63,16 +65,23 @@ impl Directive {
|
|||||||
fn regex(rest: &str) -> Result<Directive> {
|
fn regex(rest: &str) -> Result<Directive> {
|
||||||
let varlen = varname_prefix(rest);
|
let varlen = varname_prefix(rest);
|
||||||
if varlen == 0 {
|
if varlen == 0 {
|
||||||
return Err(Error::Syntax(format!("invalid variable name in regex: {}", rest)));
|
return Err(Error::Syntax(
|
||||||
|
format!("invalid variable name in regex: {}", rest),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
let var = rest[0..varlen].to_string();
|
let var = rest[0..varlen].to_string();
|
||||||
if !rest[varlen..].starts_with('=') {
|
if !rest[varlen..].starts_with('=') {
|
||||||
return Err(Error::Syntax(format!("expected '=' after variable '{}' in regex: {}",
|
return Err(Error::Syntax(format!(
|
||||||
var,
|
"expected '=' after variable '{}' in regex: {}",
|
||||||
rest)));
|
var,
|
||||||
|
rest
|
||||||
|
)));
|
||||||
}
|
}
|
||||||
// Ignore trailing white space in the regex, including CR.
|
// Ignore trailing white space in the regex, including CR.
|
||||||
Ok(Directive::Regex(var, rest[varlen + 1..].trim_right().to_string()))
|
Ok(Directive::Regex(
|
||||||
|
var,
|
||||||
|
rest[varlen + 1..].trim_right().to_string(),
|
||||||
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -183,13 +192,13 @@ impl Checker {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
Directive::Regex(ref var, ref rx) => {
|
Directive::Regex(ref var, ref rx) => {
|
||||||
state
|
state.vars.insert(
|
||||||
.vars
|
var.clone(),
|
||||||
.insert(var.clone(),
|
VarDef {
|
||||||
VarDef {
|
value: Value::Regex(Cow::Borrowed(rx)),
|
||||||
value: Value::Regex(Cow::Borrowed(rx)),
|
offset: 0,
|
||||||
offset: 0,
|
},
|
||||||
});
|
);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -210,15 +219,16 @@ impl Checker {
|
|||||||
state.recorder.directive(not_idx);
|
state.recorder.directive(not_idx);
|
||||||
if let Some(mat) = rx.find(&text[not_begin..match_begin]) {
|
if let Some(mat) = rx.find(&text[not_begin..match_begin]) {
|
||||||
// Matched `not:` pattern.
|
// Matched `not:` pattern.
|
||||||
state
|
state.recorder.matched_not(rx.as_str(), (
|
||||||
.recorder
|
not_begin + mat.start(),
|
||||||
.matched_not(rx.as_str(),
|
not_begin + mat.end(),
|
||||||
(not_begin + mat.start(), not_begin + mat.end()));
|
));
|
||||||
return Ok(false);
|
return Ok(false);
|
||||||
} else {
|
} else {
|
||||||
state
|
state.recorder.missed_not(
|
||||||
.recorder
|
rx.as_str(),
|
||||||
.missed_not(rx.as_str(), (not_begin, match_begin));
|
(not_begin, match_begin),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -354,13 +364,13 @@ impl<'a> State<'a> {
|
|||||||
})
|
})
|
||||||
};
|
};
|
||||||
Ok(if let Some(mat) = matched_range {
|
Ok(if let Some(mat) = matched_range {
|
||||||
let r = (range.0 + mat.start(), range.0 + mat.end());
|
let r = (range.0 + mat.start(), range.0 + mat.end());
|
||||||
self.recorder.matched_check(rx.as_str(), r);
|
self.recorder.matched_check(rx.as_str(), r);
|
||||||
Some(r)
|
Some(r)
|
||||||
} else {
|
} else {
|
||||||
self.recorder.missed_check(rx.as_str(), range);
|
self.recorder.missed_check(rx.as_str(), range);
|
||||||
None
|
None
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -413,20 +423,32 @@ mod tests {
|
|||||||
let mut b = CheckerBuilder::new();
|
let mut b = CheckerBuilder::new();
|
||||||
|
|
||||||
assert_eq!(b.directive("not here: more text").map_err(e2s), Ok(false));
|
assert_eq!(b.directive("not here: more text").map_err(e2s), Ok(false));
|
||||||
assert_eq!(b.directive("not here: regex: X=more text").map_err(e2s),
|
assert_eq!(
|
||||||
Ok(true));
|
b.directive("not here: regex: X=more text").map_err(e2s),
|
||||||
assert_eq!(b.directive("regex: X = tommy").map_err(e2s),
|
Ok(true)
|
||||||
Err("expected '=' after variable 'X' in regex: X = tommy".to_string()));
|
);
|
||||||
assert_eq!(b.directive("[arm]not: patt $x $(y) here").map_err(e2s),
|
assert_eq!(
|
||||||
Ok(true));
|
b.directive("regex: X = tommy").map_err(e2s),
|
||||||
assert_eq!(b.directive("[x86]sameln: $x $(y=[^]]*) there").map_err(e2s),
|
Err(
|
||||||
Ok(true));
|
"expected '=' after variable 'X' in regex: X = tommy".to_string(),
|
||||||
|
)
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
b.directive("[arm]not: patt $x $(y) here").map_err(e2s),
|
||||||
|
Ok(true)
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
b.directive("[x86]sameln: $x $(y=[^]]*) there").map_err(e2s),
|
||||||
|
Ok(true)
|
||||||
|
);
|
||||||
// Windows line ending sneaking in.
|
// Windows line ending sneaking in.
|
||||||
assert_eq!(b.directive("regex: Y=foo\r").map_err(e2s), Ok(true));
|
assert_eq!(b.directive("regex: Y=foo\r").map_err(e2s), Ok(true));
|
||||||
|
|
||||||
let c = b.finish();
|
let c = b.finish();
|
||||||
assert_eq!(c.to_string(),
|
assert_eq!(
|
||||||
"#0 regex: X=more text\n#1 not: patt $(x) $(y) here\n#2 sameln: $(x) \
|
c.to_string(),
|
||||||
$(y=[^]]*) there\n#3 regex: Y=foo\n");
|
"#0 regex: X=more text\n#1 not: patt $(x) $(y) here\n#2 sameln: $(x) \
|
||||||
|
$(y=[^]]*) there\n#3 regex: Y=foo\n"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -111,16 +111,21 @@ impl<'a> Display for Explainer<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Emit the match message itself.
|
// Emit the match message itself.
|
||||||
writeln!(f,
|
writeln!(
|
||||||
"{} #{}{}: {}",
|
f,
|
||||||
if m.is_match { "Matched" } else { "Missed" },
|
"{} #{}{}: {}",
|
||||||
m.directive,
|
if m.is_match { "Matched" } else { "Missed" },
|
||||||
if m.is_not { " not" } else { "" },
|
m.directive,
|
||||||
m.regex)?;
|
if m.is_not { " not" } else { "" },
|
||||||
|
m.regex
|
||||||
|
)?;
|
||||||
|
|
||||||
// Emit any variable definitions.
|
// Emit any variable definitions.
|
||||||
if let Ok(found) = self.vardefs
|
if let Ok(found) = self.vardefs.binary_search_by_key(
|
||||||
.binary_search_by_key(&m.directive, |v| v.directive) {
|
&m.directive,
|
||||||
|
|v| v.directive,
|
||||||
|
)
|
||||||
|
{
|
||||||
let mut first = found;
|
let mut first = found;
|
||||||
while first > 0 && self.vardefs[first - 1].directive == m.directive {
|
while first > 0 && self.vardefs[first - 1].directive == m.directive {
|
||||||
first -= 1;
|
first -= 1;
|
||||||
@@ -148,55 +153,50 @@ impl<'a> Recorder for Explainer<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn matched_check(&mut self, regex: &str, matched: MatchRange) {
|
fn matched_check(&mut self, regex: &str, matched: MatchRange) {
|
||||||
self.matches
|
self.matches.push(Match {
|
||||||
.push(Match {
|
directive: self.directive,
|
||||||
directive: self.directive,
|
is_match: true,
|
||||||
is_match: true,
|
is_not: false,
|
||||||
is_not: false,
|
regex: regex.to_owned(),
|
||||||
regex: regex.to_owned(),
|
range: matched,
|
||||||
range: matched,
|
});
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn matched_not(&mut self, regex: &str, matched: MatchRange) {
|
fn matched_not(&mut self, regex: &str, matched: MatchRange) {
|
||||||
self.matches
|
self.matches.push(Match {
|
||||||
.push(Match {
|
directive: self.directive,
|
||||||
directive: self.directive,
|
is_match: true,
|
||||||
is_match: true,
|
is_not: true,
|
||||||
is_not: true,
|
regex: regex.to_owned(),
|
||||||
regex: regex.to_owned(),
|
range: matched,
|
||||||
range: matched,
|
});
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn missed_check(&mut self, regex: &str, searched: MatchRange) {
|
fn missed_check(&mut self, regex: &str, searched: MatchRange) {
|
||||||
self.matches
|
self.matches.push(Match {
|
||||||
.push(Match {
|
directive: self.directive,
|
||||||
directive: self.directive,
|
is_match: false,
|
||||||
is_match: false,
|
is_not: false,
|
||||||
is_not: false,
|
regex: regex.to_owned(),
|
||||||
regex: regex.to_owned(),
|
range: searched,
|
||||||
range: searched,
|
});
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn missed_not(&mut self, regex: &str, searched: MatchRange) {
|
fn missed_not(&mut self, regex: &str, searched: MatchRange) {
|
||||||
self.matches
|
self.matches.push(Match {
|
||||||
.push(Match {
|
directive: self.directive,
|
||||||
directive: self.directive,
|
is_match: false,
|
||||||
is_match: false,
|
is_not: true,
|
||||||
is_not: true,
|
regex: regex.to_owned(),
|
||||||
regex: regex.to_owned(),
|
range: searched,
|
||||||
range: searched,
|
});
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn defined_var(&mut self, varname: &str, value: &str) {
|
fn defined_var(&mut self, varname: &str, value: &str) {
|
||||||
self.vardefs
|
self.vardefs.push(VarDef {
|
||||||
.push(VarDef {
|
directive: self.directive,
|
||||||
directive: self.directive,
|
varname: varname.to_owned(),
|
||||||
varname: varname.to_owned(),
|
value: value.to_owned(),
|
||||||
value: value.to_owned(),
|
});
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -70,7 +70,9 @@ impl Pattern {
|
|||||||
/// Return the allocated def number.
|
/// Return the allocated def number.
|
||||||
fn add_def(&mut self, v: &str) -> Result<usize> {
|
fn add_def(&mut self, v: &str) -> Result<usize> {
|
||||||
if self.defines_var(v) {
|
if self.defines_var(v) {
|
||||||
Err(Error::DuplicateDef(format!("duplicate definition of ${} in same pattern", v)))
|
Err(Error::DuplicateDef(
|
||||||
|
format!("duplicate definition of ${} in same pattern", v),
|
||||||
|
))
|
||||||
} else {
|
} else {
|
||||||
let idx = self.defs.len();
|
let idx = self.defs.len();
|
||||||
self.defs.push(v.to_string());
|
self.defs.push(v.to_string());
|
||||||
@@ -111,8 +113,10 @@ impl Pattern {
|
|||||||
|
|
||||||
// All remaining possibilities start with `$(`.
|
// All remaining possibilities start with `$(`.
|
||||||
if s.len() < 2 || !s.starts_with("$(") {
|
if s.len() < 2 || !s.starts_with("$(") {
|
||||||
return Err(Error::Syntax("pattern syntax error, use $$ to match a single $"
|
return Err(Error::Syntax(
|
||||||
.to_string()));
|
"pattern syntax error, use $$ to match a single $"
|
||||||
|
.to_string(),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Match the variable name, allowing for an empty varname in `$()`, or `$(=...)`.
|
// Match the variable name, allowing for an empty varname in `$()`, or `$(=...)`.
|
||||||
@@ -137,7 +141,9 @@ impl Pattern {
|
|||||||
// Variable definition. Fall through.
|
// Variable definition. Fall through.
|
||||||
}
|
}
|
||||||
Some(ch) => {
|
Some(ch) => {
|
||||||
return Err(Error::Syntax(format!("syntax error in $({}... '{}'", varname, ch)));
|
return Err(Error::Syntax(
|
||||||
|
format!("syntax error in $({}... '{}'", varname, ch),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -155,23 +161,31 @@ impl Pattern {
|
|||||||
let refname_begin = varname_end + 2;
|
let refname_begin = varname_end + 2;
|
||||||
let refname_end = refname_begin + varname_prefix(&s[refname_begin..]);
|
let refname_end = refname_begin + varname_prefix(&s[refname_begin..]);
|
||||||
if refname_begin == refname_end {
|
if refname_begin == refname_end {
|
||||||
return Err(Error::Syntax(format!("expected variable name in $({}=$...", varname)));
|
return Err(Error::Syntax(
|
||||||
|
format!("expected variable name in $({}=$...", varname),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
if !s[refname_end..].starts_with(')') {
|
if !s[refname_end..].starts_with(')') {
|
||||||
return Err(Error::Syntax(format!("expected ')' after $({}=${}...",
|
return Err(Error::Syntax(format!(
|
||||||
varname,
|
"expected ')' after $({}=${}...",
|
||||||
&s[refname_begin..refname_end])));
|
varname,
|
||||||
|
&s[refname_begin..refname_end]
|
||||||
|
)));
|
||||||
}
|
}
|
||||||
let refname = s[refname_begin..refname_end].to_string();
|
let refname = s[refname_begin..refname_end].to_string();
|
||||||
return if let Some(defidx) = def {
|
return if let Some(defidx) = def {
|
||||||
Ok((Part::DefVar {
|
Ok((
|
||||||
def: defidx,
|
Part::DefVar {
|
||||||
var: refname,
|
def: defidx,
|
||||||
},
|
var: refname,
|
||||||
refname_end + 1))
|
},
|
||||||
} else {
|
refname_end + 1,
|
||||||
Err(Error::Syntax(format!("expected variable name in $(=${})", refname)))
|
))
|
||||||
};
|
} else {
|
||||||
|
Err(Error::Syntax(
|
||||||
|
format!("expected variable name in $(=${})", refname),
|
||||||
|
))
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Last case: `$(var=...)` where `...` is a regular expression, possibly containing matched
|
// Last case: `$(var=...)` where `...` is a regular expression, possibly containing matched
|
||||||
@@ -193,9 +207,11 @@ impl Pattern {
|
|||||||
};
|
};
|
||||||
Ok((part, rx_end + 1))
|
Ok((part, rx_end + 1))
|
||||||
} else {
|
} else {
|
||||||
Err(Error::Syntax(format!("missing ')' after regex in $({}={}",
|
Err(Error::Syntax(format!(
|
||||||
varname,
|
"missing ')' after regex in $({}={}",
|
||||||
&s[rx_begin..rx_end])))
|
varname,
|
||||||
|
&s[rx_begin..rx_end]
|
||||||
|
)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -273,9 +289,11 @@ impl FromStr for Pattern {
|
|||||||
let (part, len) = pat.parse_part(&s[pos..])?;
|
let (part, len) = pat.parse_part(&s[pos..])?;
|
||||||
if let Some(v) = part.ref_var() {
|
if let Some(v) = part.ref_var() {
|
||||||
if pat.defines_var(v) {
|
if pat.defines_var(v) {
|
||||||
return Err(Error::Backref(format!("unsupported back-reference to '${}' \
|
return Err(Error::Backref(format!(
|
||||||
|
"unsupported back-reference to '${}' \
|
||||||
defined in same pattern",
|
defined in same pattern",
|
||||||
v)));
|
v
|
||||||
|
)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pat.parts.push(part);
|
pat.parts.push(part);
|
||||||
@@ -410,49 +428,87 @@ mod tests {
|
|||||||
// This is dubious, should we panic instead?
|
// This is dubious, should we panic instead?
|
||||||
assert_eq!(pat.parse_part("").unwrap(), (Part::Text("".to_string()), 0));
|
assert_eq!(pat.parse_part("").unwrap(), (Part::Text("".to_string()), 0));
|
||||||
|
|
||||||
assert_eq!(pat.parse_part("x").unwrap(),
|
assert_eq!(
|
||||||
(Part::Text("x".to_string()), 1));
|
pat.parse_part("x").unwrap(),
|
||||||
assert_eq!(pat.parse_part("x2").unwrap(),
|
(Part::Text("x".to_string()), 1)
|
||||||
(Part::Text("x2".to_string()), 2));
|
);
|
||||||
assert_eq!(pat.parse_part("x$").unwrap(),
|
assert_eq!(pat.parse_part("x2").unwrap(), (
|
||||||
(Part::Text("x".to_string()), 1));
|
Part::Text("x2".to_string()),
|
||||||
assert_eq!(pat.parse_part("x$$").unwrap(),
|
2,
|
||||||
(Part::Text("x".to_string()), 1));
|
));
|
||||||
|
assert_eq!(pat.parse_part("x$").unwrap(), (
|
||||||
|
Part::Text("x".to_string()),
|
||||||
|
1,
|
||||||
|
));
|
||||||
|
assert_eq!(pat.parse_part("x$$").unwrap(), (
|
||||||
|
Part::Text("x".to_string()),
|
||||||
|
1,
|
||||||
|
));
|
||||||
|
|
||||||
assert_eq!(pat.parse_part("$").unwrap_err().to_string(),
|
assert_eq!(
|
||||||
"pattern syntax error, use $$ to match a single $");
|
pat.parse_part("$").unwrap_err().to_string(),
|
||||||
|
"pattern syntax error, use $$ to match a single $"
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(pat.parse_part("$$").unwrap(),
|
assert_eq!(pat.parse_part("$$").unwrap(), (
|
||||||
(Part::Text("$".to_string()), 2));
|
Part::Text("$".to_string()),
|
||||||
assert_eq!(pat.parse_part("$$ ").unwrap(),
|
2,
|
||||||
(Part::Text("$".to_string()), 2));
|
));
|
||||||
|
assert_eq!(pat.parse_part("$$ ").unwrap(), (
|
||||||
|
Part::Text("$".to_string()),
|
||||||
|
2,
|
||||||
|
));
|
||||||
|
|
||||||
assert_eq!(pat.parse_part("$0").unwrap(),
|
assert_eq!(
|
||||||
(Part::Var("0".to_string()), 2));
|
pat.parse_part("$0").unwrap(),
|
||||||
assert_eq!(pat.parse_part("$xx=").unwrap(),
|
(Part::Var("0".to_string()), 2)
|
||||||
(Part::Var("xx".to_string()), 3));
|
);
|
||||||
assert_eq!(pat.parse_part("$xx$").unwrap(),
|
assert_eq!(pat.parse_part("$xx=").unwrap(), (
|
||||||
(Part::Var("xx".to_string()), 3));
|
Part::Var("xx".to_string()),
|
||||||
|
3,
|
||||||
|
));
|
||||||
|
assert_eq!(pat.parse_part("$xx$").unwrap(), (
|
||||||
|
Part::Var("xx".to_string()),
|
||||||
|
3,
|
||||||
|
));
|
||||||
|
|
||||||
assert_eq!(pat.parse_part("$(0)").unwrap(),
|
assert_eq!(pat.parse_part("$(0)").unwrap(), (
|
||||||
(Part::Var("0".to_string()), 4));
|
Part::Var("0".to_string()),
|
||||||
assert_eq!(pat.parse_part("$()").unwrap(),
|
4,
|
||||||
(Part::Text("".to_string()), 3));
|
));
|
||||||
|
assert_eq!(pat.parse_part("$()").unwrap(), (
|
||||||
|
Part::Text("".to_string()),
|
||||||
|
3,
|
||||||
|
));
|
||||||
|
|
||||||
assert_eq!(pat.parse_part("$(0").unwrap_err().to_string(),
|
assert_eq!(
|
||||||
("unterminated $(0..."));
|
pat.parse_part("$(0").unwrap_err().to_string(),
|
||||||
assert_eq!(pat.parse_part("$(foo:").unwrap_err().to_string(),
|
("unterminated $(0...")
|
||||||
("syntax error in $(foo... ':'"));
|
);
|
||||||
assert_eq!(pat.parse_part("$(foo =").unwrap_err().to_string(),
|
assert_eq!(
|
||||||
("syntax error in $(foo... ' '"));
|
pat.parse_part("$(foo:").unwrap_err().to_string(),
|
||||||
assert_eq!(pat.parse_part("$(eo0=$bar").unwrap_err().to_string(),
|
("syntax error in $(foo... ':'")
|
||||||
("expected ')' after $(eo0=$bar..."));
|
);
|
||||||
assert_eq!(pat.parse_part("$(eo1=$bar}").unwrap_err().to_string(),
|
assert_eq!(
|
||||||
("expected ')' after $(eo1=$bar..."));
|
pat.parse_part("$(foo =").unwrap_err().to_string(),
|
||||||
assert_eq!(pat.parse_part("$(eo2=$)").unwrap_err().to_string(),
|
("syntax error in $(foo... ' '")
|
||||||
("expected variable name in $(eo2=$..."));
|
);
|
||||||
assert_eq!(pat.parse_part("$(eo3=$-)").unwrap_err().to_string(),
|
assert_eq!(
|
||||||
("expected variable name in $(eo3=$..."));
|
pat.parse_part("$(eo0=$bar").unwrap_err().to_string(),
|
||||||
|
("expected ')' after $(eo0=$bar...")
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
pat.parse_part("$(eo1=$bar}").unwrap_err().to_string(),
|
||||||
|
("expected ')' after $(eo1=$bar...")
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
pat.parse_part("$(eo2=$)").unwrap_err().to_string(),
|
||||||
|
("expected variable name in $(eo2=$...")
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
pat.parse_part("$(eo3=$-)").unwrap_err().to_string(),
|
||||||
|
("expected variable name in $(eo3=$...")
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -460,48 +516,65 @@ mod tests {
|
|||||||
use super::{Pattern, Part};
|
use super::{Pattern, Part};
|
||||||
let mut pat = Pattern::new();
|
let mut pat = Pattern::new();
|
||||||
|
|
||||||
assert_eq!(pat.parse_part("$(foo=$bar)").unwrap(),
|
assert_eq!(pat.parse_part("$(foo=$bar)").unwrap(), (
|
||||||
(Part::DefVar {
|
Part::DefVar {
|
||||||
def: 0,
|
def: 0,
|
||||||
var: "bar".to_string(),
|
var: "bar".to_string(),
|
||||||
},
|
},
|
||||||
11));
|
11,
|
||||||
assert_eq!(pat.parse_part("$(foo=$bar)").unwrap_err().to_string(),
|
));
|
||||||
"duplicate definition of $foo in same pattern");
|
assert_eq!(
|
||||||
|
pat.parse_part("$(foo=$bar)").unwrap_err().to_string(),
|
||||||
|
"duplicate definition of $foo in same pattern"
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(pat.parse_part("$(fxo=$bar)x").unwrap(),
|
assert_eq!(pat.parse_part("$(fxo=$bar)x").unwrap(), (
|
||||||
(Part::DefVar {
|
Part::DefVar {
|
||||||
def: 1,
|
def: 1,
|
||||||
var: "bar".to_string(),
|
var: "bar".to_string(),
|
||||||
},
|
},
|
||||||
11));
|
11,
|
||||||
|
));
|
||||||
|
|
||||||
assert_eq!(pat.parse_part("$(fo2=[a-z])").unwrap(),
|
assert_eq!(pat.parse_part("$(fo2=[a-z])").unwrap(), (
|
||||||
(Part::DefLit {
|
Part::DefLit {
|
||||||
def: 2,
|
def: 2,
|
||||||
regex: "(?P<fo2>[a-z])".to_string(),
|
regex: "(?P<fo2>[a-z])".to_string(),
|
||||||
},
|
},
|
||||||
12));
|
12,
|
||||||
assert_eq!(pat.parse_part("$(fo3=[a-)])").unwrap(),
|
));
|
||||||
(Part::DefLit {
|
assert_eq!(pat.parse_part("$(fo3=[a-)])").unwrap(), (
|
||||||
def: 3,
|
Part::DefLit {
|
||||||
regex: "(?P<fo3>[a-)])".to_string(),
|
def: 3,
|
||||||
},
|
regex: "(?P<fo3>[a-)])".to_string(),
|
||||||
12));
|
},
|
||||||
assert_eq!(pat.parse_part("$(fo4=)").unwrap(),
|
12,
|
||||||
(Part::DefLit {
|
));
|
||||||
def: 4,
|
assert_eq!(pat.parse_part("$(fo4=)").unwrap(), (
|
||||||
regex: "(?P<fo4>)".to_string(),
|
Part::DefLit {
|
||||||
},
|
def: 4,
|
||||||
7));
|
regex: "(?P<fo4>)".to_string(),
|
||||||
|
},
|
||||||
|
7,
|
||||||
|
));
|
||||||
|
|
||||||
assert_eq!(pat.parse_part("$(=.*)").unwrap(),
|
assert_eq!(pat.parse_part("$(=.*)").unwrap(), (
|
||||||
(Part::Regex("(?:.*)".to_string()), 6));
|
Part::Regex(
|
||||||
|
"(?:.*)".to_string(),
|
||||||
|
),
|
||||||
|
6,
|
||||||
|
));
|
||||||
|
|
||||||
assert_eq!(pat.parse_part("$(=)").unwrap(),
|
assert_eq!(pat.parse_part("$(=)").unwrap(), (
|
||||||
(Part::Regex("(?:)".to_string()), 4));
|
Part::Regex(
|
||||||
assert_eq!(pat.parse_part("$()").unwrap(),
|
"(?:)".to_string(),
|
||||||
(Part::Text("".to_string()), 3));
|
),
|
||||||
|
4,
|
||||||
|
));
|
||||||
|
assert_eq!(pat.parse_part("$()").unwrap(), (
|
||||||
|
Part::Text("".to_string()),
|
||||||
|
3,
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -512,7 +585,9 @@ mod tests {
|
|||||||
assert_eq!(format!("{:?}", p.parts), "[Text(\"Hello world!\")]");
|
assert_eq!(format!("{:?}", p.parts), "[Text(\"Hello world!\")]");
|
||||||
|
|
||||||
let p: Pattern = " $foo=$(bar) ".parse().unwrap();
|
let p: Pattern = " $foo=$(bar) ".parse().unwrap();
|
||||||
assert_eq!(format!("{:?}", p.parts),
|
assert_eq!(
|
||||||
"[Var(\"foo\"), Text(\"=\"), Var(\"bar\")]");
|
format!("{:?}", p.parts),
|
||||||
|
"[Var(\"foo\"), Text(\"=\"), Var(\"bar\")]"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -42,10 +42,12 @@ fn no_matches() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn simple() {
|
fn simple() {
|
||||||
let c = CheckerBuilder::new()
|
let c = CheckerBuilder::new()
|
||||||
.text("
|
.text(
|
||||||
|
"
|
||||||
check: one
|
check: one
|
||||||
check: two
|
check: two
|
||||||
")
|
",
|
||||||
|
)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.finish();
|
.finish();
|
||||||
|
|
||||||
@@ -71,10 +73,12 @@ fn simple() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn sameln() {
|
fn sameln() {
|
||||||
let c = CheckerBuilder::new()
|
let c = CheckerBuilder::new()
|
||||||
.text("
|
.text(
|
||||||
|
"
|
||||||
check: one
|
check: one
|
||||||
sameln: two
|
sameln: two
|
||||||
")
|
",
|
||||||
|
)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.finish();
|
.finish();
|
||||||
|
|
||||||
@@ -106,10 +110,12 @@ fn sameln() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn nextln() {
|
fn nextln() {
|
||||||
let c = CheckerBuilder::new()
|
let c = CheckerBuilder::new()
|
||||||
.text("
|
.text(
|
||||||
|
"
|
||||||
check: one
|
check: one
|
||||||
nextln: two
|
nextln: two
|
||||||
")
|
",
|
||||||
|
)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.finish();
|
.finish();
|
||||||
|
|
||||||
@@ -149,10 +155,12 @@ fn leading_nextln() {
|
|||||||
// A leading nextln directive should match from line 2.
|
// A leading nextln directive should match from line 2.
|
||||||
// This is somewhat arbitrary, but consistent with a preceeding 'check: $()' directive.
|
// This is somewhat arbitrary, but consistent with a preceeding 'check: $()' directive.
|
||||||
let c = CheckerBuilder::new()
|
let c = CheckerBuilder::new()
|
||||||
.text("
|
.text(
|
||||||
|
"
|
||||||
nextln: one
|
nextln: one
|
||||||
nextln: two
|
nextln: two
|
||||||
")
|
",
|
||||||
|
)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.finish();
|
.finish();
|
||||||
|
|
||||||
@@ -174,10 +182,12 @@ fn leading_nextln() {
|
|||||||
fn leading_sameln() {
|
fn leading_sameln() {
|
||||||
// A leading sameln directive should match from line 1.
|
// A leading sameln directive should match from line 1.
|
||||||
let c = CheckerBuilder::new()
|
let c = CheckerBuilder::new()
|
||||||
.text("
|
.text(
|
||||||
|
"
|
||||||
sameln: one
|
sameln: one
|
||||||
sameln: two
|
sameln: two
|
||||||
")
|
",
|
||||||
|
)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.finish();
|
.finish();
|
||||||
|
|
||||||
@@ -197,11 +207,13 @@ fn leading_sameln() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn not() {
|
fn not() {
|
||||||
let c = CheckerBuilder::new()
|
let c = CheckerBuilder::new()
|
||||||
.text("
|
.text(
|
||||||
|
"
|
||||||
check: one$()
|
check: one$()
|
||||||
not: $()eat$()
|
not: $()eat$()
|
||||||
check: $()two
|
check: $()two
|
||||||
")
|
",
|
||||||
|
)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.finish();
|
.finish();
|
||||||
|
|
||||||
@@ -221,12 +233,14 @@ fn not() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn notnot() {
|
fn notnot() {
|
||||||
let c = CheckerBuilder::new()
|
let c = CheckerBuilder::new()
|
||||||
.text("
|
.text(
|
||||||
|
"
|
||||||
check: one$()
|
check: one$()
|
||||||
not: $()eat$()
|
not: $()eat$()
|
||||||
not: half
|
not: half
|
||||||
check: $()two
|
check: $()two
|
||||||
")
|
",
|
||||||
|
)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.finish();
|
.finish();
|
||||||
|
|
||||||
@@ -254,87 +268,135 @@ fn notnot() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn unordered() {
|
fn unordered() {
|
||||||
let c = CheckerBuilder::new()
|
let c = CheckerBuilder::new()
|
||||||
.text("
|
.text(
|
||||||
|
"
|
||||||
check: one
|
check: one
|
||||||
unordered: two
|
unordered: two
|
||||||
unordered: three
|
unordered: three
|
||||||
check: four
|
check: four
|
||||||
")
|
",
|
||||||
|
)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.finish();
|
.finish();
|
||||||
|
|
||||||
assert_eq!(c.check("one two three four", NO_VARIABLES).map_err(e2s),
|
assert_eq!(
|
||||||
Ok(true));
|
c.check("one two three four", NO_VARIABLES).map_err(e2s),
|
||||||
assert_eq!(c.check("one three two four", NO_VARIABLES).map_err(e2s),
|
Ok(true)
|
||||||
Ok(true));
|
);
|
||||||
|
assert_eq!(
|
||||||
|
c.check("one three two four", NO_VARIABLES).map_err(e2s),
|
||||||
|
Ok(true)
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(c.check("one two four three four", NO_VARIABLES)
|
assert_eq!(
|
||||||
.map_err(e2s),
|
c.check("one two four three four", NO_VARIABLES).map_err(
|
||||||
Ok(true));
|
e2s,
|
||||||
assert_eq!(c.check("one three four two four", NO_VARIABLES)
|
),
|
||||||
.map_err(e2s),
|
Ok(true)
|
||||||
Ok(true));
|
);
|
||||||
|
assert_eq!(
|
||||||
|
c.check("one three four two four", NO_VARIABLES).map_err(
|
||||||
|
e2s,
|
||||||
|
),
|
||||||
|
Ok(true)
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(c.check("one two four three", NO_VARIABLES).map_err(e2s),
|
assert_eq!(
|
||||||
Ok(false));
|
c.check("one two four three", NO_VARIABLES).map_err(e2s),
|
||||||
assert_eq!(c.check("one three four two", NO_VARIABLES).map_err(e2s),
|
Ok(false)
|
||||||
Ok(false));
|
);
|
||||||
|
assert_eq!(
|
||||||
|
c.check("one three four two", NO_VARIABLES).map_err(e2s),
|
||||||
|
Ok(false)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn leading_unordered() {
|
fn leading_unordered() {
|
||||||
let c = CheckerBuilder::new()
|
let c = CheckerBuilder::new()
|
||||||
.text("
|
.text(
|
||||||
|
"
|
||||||
unordered: two
|
unordered: two
|
||||||
unordered: three
|
unordered: three
|
||||||
check: four
|
check: four
|
||||||
")
|
",
|
||||||
|
)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.finish();
|
.finish();
|
||||||
|
|
||||||
assert_eq!(c.check("one two three four", NO_VARIABLES).map_err(e2s),
|
assert_eq!(
|
||||||
Ok(true));
|
c.check("one two three four", NO_VARIABLES).map_err(e2s),
|
||||||
assert_eq!(c.check("one three two four", NO_VARIABLES).map_err(e2s),
|
Ok(true)
|
||||||
Ok(true));
|
);
|
||||||
|
assert_eq!(
|
||||||
|
c.check("one three two four", NO_VARIABLES).map_err(e2s),
|
||||||
|
Ok(true)
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(c.check("one two four three four", NO_VARIABLES)
|
assert_eq!(
|
||||||
.map_err(e2s),
|
c.check("one two four three four", NO_VARIABLES).map_err(
|
||||||
Ok(true));
|
e2s,
|
||||||
assert_eq!(c.check("one three four two four", NO_VARIABLES)
|
),
|
||||||
.map_err(e2s),
|
Ok(true)
|
||||||
Ok(true));
|
);
|
||||||
|
assert_eq!(
|
||||||
|
c.check("one three four two four", NO_VARIABLES).map_err(
|
||||||
|
e2s,
|
||||||
|
),
|
||||||
|
Ok(true)
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(c.check("one two four three", NO_VARIABLES).map_err(e2s),
|
assert_eq!(
|
||||||
Ok(false));
|
c.check("one two four three", NO_VARIABLES).map_err(e2s),
|
||||||
assert_eq!(c.check("one three four two", NO_VARIABLES).map_err(e2s),
|
Ok(false)
|
||||||
Ok(false));
|
);
|
||||||
|
assert_eq!(
|
||||||
|
c.check("one three four two", NO_VARIABLES).map_err(e2s),
|
||||||
|
Ok(false)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn trailing_unordered() {
|
fn trailing_unordered() {
|
||||||
let c = CheckerBuilder::new()
|
let c = CheckerBuilder::new()
|
||||||
.text("
|
.text(
|
||||||
|
"
|
||||||
check: one
|
check: one
|
||||||
unordered: two
|
unordered: two
|
||||||
unordered: three
|
unordered: three
|
||||||
")
|
",
|
||||||
|
)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.finish();
|
.finish();
|
||||||
|
|
||||||
assert_eq!(c.check("one two three four", NO_VARIABLES).map_err(e2s),
|
assert_eq!(
|
||||||
Ok(true));
|
c.check("one two three four", NO_VARIABLES).map_err(e2s),
|
||||||
assert_eq!(c.check("one three two four", NO_VARIABLES).map_err(e2s),
|
Ok(true)
|
||||||
Ok(true));
|
);
|
||||||
|
assert_eq!(
|
||||||
|
c.check("one three two four", NO_VARIABLES).map_err(e2s),
|
||||||
|
Ok(true)
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(c.check("one two four three four", NO_VARIABLES)
|
assert_eq!(
|
||||||
.map_err(e2s),
|
c.check("one two four three four", NO_VARIABLES).map_err(
|
||||||
Ok(true));
|
e2s,
|
||||||
assert_eq!(c.check("one three four two four", NO_VARIABLES)
|
),
|
||||||
.map_err(e2s),
|
Ok(true)
|
||||||
Ok(true));
|
);
|
||||||
|
assert_eq!(
|
||||||
|
c.check("one three four two four", NO_VARIABLES).map_err(
|
||||||
|
e2s,
|
||||||
|
),
|
||||||
|
Ok(true)
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(c.check("one two four three", NO_VARIABLES).map_err(e2s),
|
assert_eq!(
|
||||||
Ok(true));
|
c.check("one two four three", NO_VARIABLES).map_err(e2s),
|
||||||
assert_eq!(c.check("one three four two", NO_VARIABLES).map_err(e2s),
|
Ok(true)
|
||||||
Ok(true));
|
);
|
||||||
|
assert_eq!(
|
||||||
|
c.check("one three four two", NO_VARIABLES).map_err(e2s),
|
||||||
|
Ok(true)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,7 +12,8 @@ use std::hash::Hash;
|
|||||||
|
|
||||||
/// Permanent structure used for translating into Cretonne IL.
|
/// Permanent structure used for translating into Cretonne IL.
|
||||||
pub struct ILBuilder<Variable>
|
pub struct ILBuilder<Variable>
|
||||||
where Variable: EntityRef + Hash + Default
|
where
|
||||||
|
Variable: EntityRef + Hash + Default,
|
||||||
{
|
{
|
||||||
ssa: SSABuilder<Variable>,
|
ssa: SSABuilder<Variable>,
|
||||||
ebbs: EntityMap<Ebb, EbbData>,
|
ebbs: EntityMap<Ebb, EbbData>,
|
||||||
@@ -23,7 +24,8 @@ pub struct ILBuilder<Variable>
|
|||||||
|
|
||||||
/// Temporary object used to build a Cretonne IL `Function`.
|
/// Temporary object used to build a Cretonne IL `Function`.
|
||||||
pub struct FunctionBuilder<'a, Variable: 'a>
|
pub struct FunctionBuilder<'a, Variable: 'a>
|
||||||
where Variable: EntityRef + Hash + Default
|
where
|
||||||
|
Variable: EntityRef + Hash + Default,
|
||||||
{
|
{
|
||||||
func: &'a mut Function,
|
func: &'a mut Function,
|
||||||
builder: &'a mut ILBuilder<Variable>,
|
builder: &'a mut ILBuilder<Variable>,
|
||||||
@@ -44,7 +46,8 @@ struct Position {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<Variable> ILBuilder<Variable>
|
impl<Variable> ILBuilder<Variable>
|
||||||
where Variable: EntityRef + Hash + Default
|
where
|
||||||
|
Variable: EntityRef + Hash + Default,
|
||||||
{
|
{
|
||||||
/// Creates a ILBuilder structure. The structure is automatically cleared each time it is
|
/// Creates a ILBuilder structure. The structure is automatically cleared each time it is
|
||||||
/// passed to a [`FunctionBuilder`](struct.FunctionBuilder.html) for creation.
|
/// passed to a [`FunctionBuilder`](struct.FunctionBuilder.html) for creation.
|
||||||
@@ -68,7 +71,8 @@ impl<Variable> ILBuilder<Variable>
|
|||||||
/// Implementation of the [`InstBuilder`](../cretonne/ir/builder/trait.InstBuilder.html) that has
|
/// Implementation of the [`InstBuilder`](../cretonne/ir/builder/trait.InstBuilder.html) that has
|
||||||
/// one convenience method per Cretonne IL instruction.
|
/// one convenience method per Cretonne IL instruction.
|
||||||
pub struct FuncInstBuilder<'short, 'long: 'short, Variable: 'long>
|
pub struct FuncInstBuilder<'short, 'long: 'short, Variable: 'long>
|
||||||
where Variable: EntityRef + Hash + Default
|
where
|
||||||
|
Variable: EntityRef + Hash + Default,
|
||||||
{
|
{
|
||||||
builder: &'short mut FunctionBuilder<'long, Variable>,
|
builder: &'short mut FunctionBuilder<'long, Variable>,
|
||||||
ebb: Ebb,
|
ebb: Ebb,
|
||||||
@@ -103,7 +107,7 @@ impl<'short, 'long, Variable> InstBuilderBase<'short> for FuncInstBuilder<'short
|
|||||||
self.builder
|
self.builder
|
||||||
.check_return_args(data.arguments(&self.builder.func.dfg.value_lists))
|
.check_return_args(data.arguments(&self.builder.func.dfg.value_lists))
|
||||||
}
|
}
|
||||||
// We only insert the Ebb in the layout when an instruction is added to it
|
// We only insert the Ebb in the layout when an instruction is added to it
|
||||||
if self.builder.builder.ebbs[self.builder.position.ebb].pristine {
|
if self.builder.builder.ebbs[self.builder.position.ebb].pristine {
|
||||||
if !self.builder
|
if !self.builder
|
||||||
.func
|
.func
|
||||||
@@ -125,9 +129,9 @@ impl<'short, 'long, Variable> InstBuilderBase<'short> for FuncInstBuilder<'short
|
|||||||
if data.opcode().is_branch() {
|
if data.opcode().is_branch() {
|
||||||
match data.branch_destination() {
|
match data.branch_destination() {
|
||||||
Some(dest_ebb) => {
|
Some(dest_ebb) => {
|
||||||
// If the user has supplied jump arguments we must adapt the arguments of
|
// If the user has supplied jump arguments we must adapt the arguments of
|
||||||
// the destination ebb
|
// the destination ebb
|
||||||
// TODO: find a way not to allocate a vector
|
// TODO: find a way not to allocate a vector
|
||||||
let args_types: Vec<Type> =
|
let args_types: Vec<Type> =
|
||||||
match data.analyze_branch(&self.builder.func.dfg.value_lists) {
|
match data.analyze_branch(&self.builder.func.dfg.value_lists) {
|
||||||
BranchInfo::SingleDest(_, args) => {
|
BranchInfo::SingleDest(_, args) => {
|
||||||
@@ -142,14 +146,14 @@ impl<'short, 'long, Variable> InstBuilderBase<'short> for FuncInstBuilder<'short
|
|||||||
self.builder.declare_successor(dest_ebb, inst);
|
self.builder.declare_successor(dest_ebb, inst);
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
// branch_destination() doesn't detect jump_tables
|
// branch_destination() doesn't detect jump_tables
|
||||||
match data {
|
match data {
|
||||||
// If jump table we declare all entries successor
|
// If jump table we declare all entries successor
|
||||||
// TODO: not collect with vector?
|
// TODO: not collect with vector?
|
||||||
InstructionData::BranchTable { table, .. } => {
|
InstructionData::BranchTable { table, .. } => {
|
||||||
// Unlike all other jumps/branches, jump tables are
|
// Unlike all other jumps/branches, jump tables are
|
||||||
// capable of having the same successor appear
|
// capable of having the same successor appear
|
||||||
// multiple times. Use a HashSet to deduplicate.
|
// multiple times. Use a HashSet to deduplicate.
|
||||||
let mut unique = HashSet::new();
|
let mut unique = HashSet::new();
|
||||||
for dest_ebb in self.builder
|
for dest_ebb in self.builder
|
||||||
.func
|
.func
|
||||||
@@ -163,7 +167,7 @@ impl<'short, 'long, Variable> InstBuilderBase<'short> for FuncInstBuilder<'short
|
|||||||
self.builder.declare_successor(dest_ebb, inst)
|
self.builder.declare_successor(dest_ebb, inst)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// If not we do nothing
|
// If not we do nothing
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -211,13 +215,15 @@ impl<'short, 'long, Variable> InstBuilderBase<'short> for FuncInstBuilder<'short
|
|||||||
/// `Ebb` when you haven't filled the current one with a terminator instruction, inserting a
|
/// `Ebb` when you haven't filled the current one with a terminator instruction, inserting a
|
||||||
/// return instruction with arguments that don't match the function's signature.
|
/// return instruction with arguments that don't match the function's signature.
|
||||||
impl<'a, Variable> FunctionBuilder<'a, Variable>
|
impl<'a, Variable> FunctionBuilder<'a, Variable>
|
||||||
where Variable: EntityRef + Hash + Default
|
where
|
||||||
|
Variable: EntityRef + Hash + Default,
|
||||||
{
|
{
|
||||||
/// Creates a new FunctionBuilder structure that will operate on a `Function` using a
|
/// Creates a new FunctionBuilder structure that will operate on a `Function` using a
|
||||||
/// `IlBuilder`.
|
/// `IlBuilder`.
|
||||||
pub fn new(func: &'a mut Function,
|
pub fn new(
|
||||||
builder: &'a mut ILBuilder<Variable>)
|
func: &'a mut Function,
|
||||||
-> FunctionBuilder<'a, Variable> {
|
builder: &'a mut ILBuilder<Variable>,
|
||||||
|
) -> FunctionBuilder<'a, Variable> {
|
||||||
builder.clear();
|
builder.clear();
|
||||||
FunctionBuilder {
|
FunctionBuilder {
|
||||||
func: func,
|
func: func,
|
||||||
@@ -255,12 +261,16 @@ impl<'a, Variable> FunctionBuilder<'a, Variable>
|
|||||||
}
|
}
|
||||||
if !self.builder.ebbs[self.position.ebb].pristine {
|
if !self.builder.ebbs[self.position.ebb].pristine {
|
||||||
// First we check that the previous block has been filled.
|
// First we check that the previous block has been filled.
|
||||||
debug_assert!(self.is_unreachable() || self.builder.ebbs[self.position.ebb].filled,
|
debug_assert!(
|
||||||
"you have to fill your block before switching");
|
self.is_unreachable() || self.builder.ebbs[self.position.ebb].filled,
|
||||||
|
"you have to fill your block before switching"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
// We cannot switch to a filled block
|
// We cannot switch to a filled block
|
||||||
debug_assert!(!self.builder.ebbs[ebb].filled,
|
debug_assert!(
|
||||||
"you cannot switch to a block which is already filled");
|
!self.builder.ebbs[ebb].filled,
|
||||||
|
"you cannot switch to a block which is already filled"
|
||||||
|
);
|
||||||
|
|
||||||
let basic_block = self.builder.ssa.header_block(ebb);
|
let basic_block = self.builder.ssa.header_block(ebb);
|
||||||
// Then we change the cursor position.
|
// Then we change the cursor position.
|
||||||
@@ -278,12 +288,12 @@ impl<'a, Variable> FunctionBuilder<'a, Variable>
|
|||||||
/// created. Forgetting to call this method on every block will cause inconsistences in the
|
/// created. Forgetting to call this method on every block will cause inconsistences in the
|
||||||
/// produced functions.
|
/// produced functions.
|
||||||
pub fn seal_block(&mut self, ebb: Ebb) {
|
pub fn seal_block(&mut self, ebb: Ebb) {
|
||||||
let side_effects = self.builder
|
let side_effects = self.builder.ssa.seal_ebb_header_block(
|
||||||
.ssa
|
ebb,
|
||||||
.seal_ebb_header_block(ebb,
|
&mut self.func.dfg,
|
||||||
&mut self.func.dfg,
|
&mut self.func.layout,
|
||||||
&mut self.func.layout,
|
&mut self.func.jump_tables,
|
||||||
&mut self.func.jump_tables);
|
);
|
||||||
self.handle_ssa_side_effects(side_effects);
|
self.handle_ssa_side_effects(side_effects);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -295,18 +305,17 @@ impl<'a, Variable> FunctionBuilder<'a, Variable>
|
|||||||
/// Returns the Cretonne IL value corresponding to the utilization at the current program
|
/// Returns the Cretonne IL value corresponding to the utilization at the current program
|
||||||
/// position of a previously defined user variable.
|
/// position of a previously defined user variable.
|
||||||
pub fn use_var(&mut self, var: Variable) -> Value {
|
pub fn use_var(&mut self, var: Variable) -> Value {
|
||||||
let ty = *self.builder
|
let ty = *self.builder.types.get(var).expect(
|
||||||
.types
|
"this variable is used but its type has not been declared",
|
||||||
.get(var)
|
);
|
||||||
.expect("this variable is used but its type has not been declared");
|
let (val, side_effects) = self.builder.ssa.use_var(
|
||||||
let (val, side_effects) = self.builder
|
&mut self.func.dfg,
|
||||||
.ssa
|
&mut self.func.layout,
|
||||||
.use_var(&mut self.func.dfg,
|
&mut self.func.jump_tables,
|
||||||
&mut self.func.layout,
|
var,
|
||||||
&mut self.func.jump_tables,
|
ty,
|
||||||
var,
|
self.position.basic_block,
|
||||||
ty,
|
);
|
||||||
self.position.basic_block);
|
|
||||||
self.handle_ssa_side_effects(side_effects);
|
self.handle_ssa_side_effects(side_effects);
|
||||||
val
|
val
|
||||||
}
|
}
|
||||||
@@ -314,11 +323,15 @@ impl<'a, Variable> FunctionBuilder<'a, Variable>
|
|||||||
/// Register a new definition of a user variable. Panics if the type of the value is not the
|
/// Register a new definition of a user variable. Panics if the type of the value is not the
|
||||||
/// same as the type registered for the variable.
|
/// same as the type registered for the variable.
|
||||||
pub fn def_var(&mut self, var: Variable, val: Value) {
|
pub fn def_var(&mut self, var: Variable, val: Value) {
|
||||||
debug_assert!(self.func.dfg.value_type(val) == self.builder.types[var],
|
debug_assert!(
|
||||||
"the type of the value is not the type registered for the variable");
|
self.func.dfg.value_type(val) == self.builder.types[var],
|
||||||
self.builder
|
"the type of the value is not the type registered for the variable"
|
||||||
.ssa
|
);
|
||||||
.def_var(var, val, self.position.basic_block);
|
self.builder.ssa.def_var(
|
||||||
|
var,
|
||||||
|
val,
|
||||||
|
self.position.basic_block,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the value corresponding to the `i`-th argument of the function as defined by
|
/// Returns the value corresponding to the `i`-th argument of the function as defined by
|
||||||
@@ -369,7 +382,8 @@ impl<'a, Variable> FunctionBuilder<'a, Variable>
|
|||||||
/// function. The functions below help you inspect the function you're creating and modify it
|
/// function. The functions below help you inspect the function you're creating and modify it
|
||||||
/// in ways that can be unsafe if used incorrectly.
|
/// in ways that can be unsafe if used incorrectly.
|
||||||
impl<'a, Variable> FunctionBuilder<'a, Variable>
|
impl<'a, Variable> FunctionBuilder<'a, Variable>
|
||||||
where Variable: EntityRef + Hash + Default
|
where
|
||||||
|
Variable: EntityRef + Hash + Default,
|
||||||
{
|
{
|
||||||
/// Retrieves all the arguments for an `Ebb` currently infered from the jump instructions
|
/// Retrieves all the arguments for an `Ebb` currently infered from the jump instructions
|
||||||
/// inserted that target it and the SSA construction.
|
/// inserted that target it and the SSA construction.
|
||||||
@@ -402,15 +416,16 @@ impl<'a, Variable> FunctionBuilder<'a, Variable>
|
|||||||
/// **Note:** You are responsible for maintaining the coherence with the arguments of
|
/// **Note:** You are responsible for maintaining the coherence with the arguments of
|
||||||
/// other jump instructions.
|
/// other jump instructions.
|
||||||
pub fn change_jump_destination(&mut self, inst: Inst, new_dest: Ebb) {
|
pub fn change_jump_destination(&mut self, inst: Inst, new_dest: Ebb) {
|
||||||
let old_dest =
|
let old_dest = self.func.dfg[inst].branch_destination_mut().expect(
|
||||||
self.func.dfg[inst]
|
"you want to change the jump destination of a non-jump instruction",
|
||||||
.branch_destination_mut()
|
);
|
||||||
.expect("you want to change the jump destination of a non-jump instruction");
|
|
||||||
let pred = self.builder.ssa.remove_ebb_predecessor(*old_dest, inst);
|
let pred = self.builder.ssa.remove_ebb_predecessor(*old_dest, inst);
|
||||||
*old_dest = new_dest;
|
*old_dest = new_dest;
|
||||||
self.builder
|
self.builder.ssa.declare_ebb_predecessor(
|
||||||
.ssa
|
new_dest,
|
||||||
.declare_ebb_predecessor(new_dest, pred, inst);
|
pred,
|
||||||
|
inst,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if and only if the current `Ebb` is sealed and has no predecessors declared.
|
/// Returns `true` if and only if the current `Ebb` is sealed and has no predecessors declared.
|
||||||
@@ -422,7 +437,7 @@ impl<'a, Variable> FunctionBuilder<'a, Variable>
|
|||||||
Some(entry) => self.position.ebb == entry,
|
Some(entry) => self.position.ebb == entry,
|
||||||
};
|
};
|
||||||
(!is_entry && self.builder.ssa.is_sealed(self.position.ebb) &&
|
(!is_entry && self.builder.ssa.is_sealed(self.position.ebb) &&
|
||||||
self.builder.ssa.predecessors(self.position.ebb).is_empty())
|
self.builder.ssa.predecessors(self.position.ebb).is_empty())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if and only if no instructions have been added since the last call to
|
/// Returns `true` if and only if no instructions have been added since the last call to
|
||||||
@@ -446,31 +461,31 @@ impl<'a, Variable> FunctionBuilder<'a, Variable>
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, Variable> Drop for FunctionBuilder<'a, Variable>
|
impl<'a, Variable> Drop for FunctionBuilder<'a, Variable>
|
||||||
where Variable: EntityRef + Hash + Default
|
where
|
||||||
|
Variable: EntityRef + Hash + Default,
|
||||||
{
|
{
|
||||||
/// When a `FunctionBuilder` goes out of scope, it means that the function is fully built.
|
/// When a `FunctionBuilder` goes out of scope, it means that the function is fully built.
|
||||||
/// We then proceed to check if all the `Ebb`s are filled and sealed
|
/// We then proceed to check if all the `Ebb`s are filled and sealed
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
debug_assert!(self.builder
|
debug_assert!(
|
||||||
.ebbs
|
self.builder.ebbs.keys().all(|ebb| {
|
||||||
.keys()
|
self.builder.ebbs[ebb].pristine ||
|
||||||
.all(|ebb| {
|
(self.builder.ssa.is_sealed(ebb) && self.builder.ebbs[ebb].filled)
|
||||||
self.builder.ebbs[ebb].pristine ||
|
}),
|
||||||
(self.builder.ssa.is_sealed(ebb) &&
|
"all blocks should be filled and sealed before dropping a FunctionBuilder"
|
||||||
self.builder.ebbs[ebb].filled)
|
)
|
||||||
}),
|
|
||||||
"all blocks should be filled and sealed before dropping a FunctionBuilder")
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Helper functions
|
// Helper functions
|
||||||
impl<'a, Variable> FunctionBuilder<'a, Variable>
|
impl<'a, Variable> FunctionBuilder<'a, Variable>
|
||||||
where Variable: EntityRef + Hash + Default
|
where
|
||||||
|
Variable: EntityRef + Hash + Default,
|
||||||
{
|
{
|
||||||
fn move_to_next_basic_block(&mut self) {
|
fn move_to_next_basic_block(&mut self) {
|
||||||
self.position.basic_block = self.builder
|
self.position.basic_block = self.builder.ssa.declare_ebb_body_block(
|
||||||
.ssa
|
self.position.basic_block,
|
||||||
.declare_ebb_body_block(self.position.basic_block);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fill_current_block(&mut self) {
|
fn fill_current_block(&mut self) {
|
||||||
@@ -478,30 +493,36 @@ impl<'a, Variable> FunctionBuilder<'a, Variable>
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn declare_successor(&mut self, dest_ebb: Ebb, jump_inst: Inst) {
|
fn declare_successor(&mut self, dest_ebb: Ebb, jump_inst: Inst) {
|
||||||
self.builder
|
self.builder.ssa.declare_ebb_predecessor(
|
||||||
.ssa
|
dest_ebb,
|
||||||
.declare_ebb_predecessor(dest_ebb, self.position.basic_block, jump_inst);
|
self.position.basic_block,
|
||||||
|
jump_inst,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_return_args(&self, args: &[Value]) {
|
fn check_return_args(&self, args: &[Value]) {
|
||||||
debug_assert_eq!(args.len(),
|
debug_assert_eq!(
|
||||||
self.func.signature.return_types.len(),
|
args.len(),
|
||||||
"the number of returned values doesn't match the function signature ");
|
self.func.signature.return_types.len(),
|
||||||
|
"the number of returned values doesn't match the function signature "
|
||||||
|
);
|
||||||
for (i, arg) in args.iter().enumerate() {
|
for (i, arg) in args.iter().enumerate() {
|
||||||
let valty = self.func.dfg.value_type(*arg);
|
let valty = self.func.dfg.value_type(*arg);
|
||||||
debug_assert_eq!(valty,
|
debug_assert_eq!(
|
||||||
self.func.signature.return_types[i].value_type,
|
valty,
|
||||||
"the types of the values returned don't match the \
|
self.func.signature.return_types[i].value_type,
|
||||||
function signature");
|
"the types of the values returned don't match the \
|
||||||
|
function signature"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fill_function_args_values(&mut self, ebb: Ebb) {
|
fn fill_function_args_values(&mut self, ebb: Ebb) {
|
||||||
debug_assert!(self.pristine);
|
debug_assert!(self.pristine);
|
||||||
for argtyp in &self.func.signature.argument_types {
|
for argtyp in &self.func.signature.argument_types {
|
||||||
self.builder
|
self.builder.function_args_values.push(
|
||||||
.function_args_values
|
self.func.dfg.append_ebb_arg(ebb, argtyp.value_type),
|
||||||
.push(self.func.dfg.append_ebb_arg(ebb, argtyp.value_type));
|
);
|
||||||
}
|
}
|
||||||
self.pristine = false;
|
self.pristine = false;
|
||||||
}
|
}
|
||||||
@@ -510,48 +531,56 @@ impl<'a, Variable> FunctionBuilder<'a, Variable>
|
|||||||
fn ebb_args_adjustement(&mut self, dest_ebb: Ebb, jump_args: &[Type]) {
|
fn ebb_args_adjustement(&mut self, dest_ebb: Ebb, jump_args: &[Type]) {
|
||||||
let ty_to_append: Option<Vec<Type>> =
|
let ty_to_append: Option<Vec<Type>> =
|
||||||
if self.builder.ssa.predecessors(dest_ebb).len() == 0 ||
|
if self.builder.ssa.predecessors(dest_ebb).len() == 0 ||
|
||||||
self.builder.ebbs[dest_ebb].pristine {
|
self.builder.ebbs[dest_ebb].pristine
|
||||||
|
{
|
||||||
// This is the first jump instruction targeting this Ebb
|
// This is the first jump instruction targeting this Ebb
|
||||||
// so the jump arguments supplied here are this Ebb' arguments
|
// so the jump arguments supplied here are this Ebb' arguments
|
||||||
// However some of the arguments might already be there
|
// However some of the arguments might already be there
|
||||||
// in the Ebb so we have to check they're consistent
|
// in the Ebb so we have to check they're consistent
|
||||||
let dest_ebb_args = self.func.dfg.ebb_args(dest_ebb);
|
let dest_ebb_args = self.func.dfg.ebb_args(dest_ebb);
|
||||||
debug_assert!(dest_ebb_args
|
debug_assert!(
|
||||||
.iter()
|
dest_ebb_args
|
||||||
.zip(jump_args.iter().take(dest_ebb_args.len()))
|
.iter()
|
||||||
.all(|(dest_arg, jump_arg)| {
|
.zip(jump_args.iter().take(dest_ebb_args.len()))
|
||||||
*jump_arg == self.func.dfg.value_type(*dest_arg)
|
.all(|(dest_arg, jump_arg)| {
|
||||||
}),
|
*jump_arg == self.func.dfg.value_type(*dest_arg)
|
||||||
"the jump argument supplied has not the \
|
}),
|
||||||
same type as the corresponding dest ebb argument");
|
"the jump argument supplied has not the \
|
||||||
|
same type as the corresponding dest ebb argument"
|
||||||
|
);
|
||||||
self.builder.ebbs[dest_ebb].user_arg_count = jump_args.len();
|
self.builder.ebbs[dest_ebb].user_arg_count = jump_args.len();
|
||||||
Some(jump_args
|
Some(
|
||||||
.iter()
|
jump_args
|
||||||
.skip(dest_ebb_args.len())
|
.iter()
|
||||||
.cloned()
|
.skip(dest_ebb_args.len())
|
||||||
.collect())
|
.cloned()
|
||||||
|
.collect(),
|
||||||
|
)
|
||||||
} else {
|
} else {
|
||||||
let dest_ebb_args = self.func.dfg.ebb_args(dest_ebb);
|
let dest_ebb_args = self.func.dfg.ebb_args(dest_ebb);
|
||||||
// The Ebb already has predecessors
|
// The Ebb already has predecessors
|
||||||
// We check that the arguments supplied match those supplied
|
// We check that the arguments supplied match those supplied
|
||||||
// previously.
|
// previously.
|
||||||
debug_assert!(jump_args.len() == self.builder.ebbs[dest_ebb].user_arg_count,
|
debug_assert!(
|
||||||
"the jump instruction doesn't have the same \
|
jump_args.len() == self.builder.ebbs[dest_ebb].user_arg_count,
|
||||||
|
"the jump instruction doesn't have the same \
|
||||||
number of arguments as its destination Ebb \
|
number of arguments as its destination Ebb \
|
||||||
({} vs {}).",
|
({} vs {}).",
|
||||||
jump_args.len(),
|
jump_args.len(),
|
||||||
dest_ebb_args.len());
|
dest_ebb_args.len()
|
||||||
debug_assert!(jump_args
|
);
|
||||||
.iter()
|
debug_assert!(
|
||||||
.zip(dest_ebb_args
|
jump_args
|
||||||
.iter()
|
.iter()
|
||||||
.take(self.builder.ebbs[dest_ebb].user_arg_count)
|
.zip(dest_ebb_args.iter().take(
|
||||||
)
|
self.builder.ebbs[dest_ebb].user_arg_count,
|
||||||
.all(|(jump_arg, dest_arg)| {
|
))
|
||||||
*jump_arg == self.func.dfg.value_type(*dest_arg)
|
.all(|(jump_arg, dest_arg)| {
|
||||||
}),
|
*jump_arg == self.func.dfg.value_type(*dest_arg)
|
||||||
"the jump argument supplied has not the \
|
}),
|
||||||
same type as the corresponding dest ebb argument");
|
"the jump argument supplied has not the \
|
||||||
|
same type as the corresponding dest ebb argument"
|
||||||
|
);
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
if let Some(ty_args) = ty_to_append {
|
if let Some(ty_args) = ty_to_append {
|
||||||
|
|||||||
@@ -33,7 +33,8 @@ use std::collections::HashMap;
|
|||||||
/// and it is said _sealed_ if all of its predecessors have been declared. Only filled predecessors
|
/// and it is said _sealed_ if all of its predecessors have been declared. Only filled predecessors
|
||||||
/// can be declared.
|
/// can be declared.
|
||||||
pub struct SSABuilder<Variable>
|
pub struct SSABuilder<Variable>
|
||||||
where Variable: EntityRef + Default
|
where
|
||||||
|
Variable: EntityRef + Default,
|
||||||
{
|
{
|
||||||
// Records for every variable and for every revelant block, the last definition of
|
// Records for every variable and for every revelant block, the last definition of
|
||||||
// the variable in the block.
|
// the variable in the block.
|
||||||
@@ -133,7 +134,8 @@ impl ReservedValue for Block {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<Variable> SSABuilder<Variable>
|
impl<Variable> SSABuilder<Variable>
|
||||||
where Variable: EntityRef + Default
|
where
|
||||||
|
Variable: EntityRef + Default,
|
||||||
{
|
{
|
||||||
/// Allocate a new blank SSA builder struct. Use the API function to interact with the struct.
|
/// Allocate a new blank SSA builder struct. Use the API function to interact with the struct.
|
||||||
pub fn new() -> SSABuilder<Variable> {
|
pub fn new() -> SSABuilder<Variable> {
|
||||||
@@ -191,7 +193,8 @@ enum UseVarCases {
|
|||||||
/// Phi functions.
|
/// Phi functions.
|
||||||
///
|
///
|
||||||
impl<Variable> SSABuilder<Variable>
|
impl<Variable> SSABuilder<Variable>
|
||||||
where Variable: EntityRef + Hash + Default
|
where
|
||||||
|
Variable: EntityRef + Hash + Default,
|
||||||
{
|
{
|
||||||
/// Declares a new definition of a variable in a given basic block.
|
/// Declares a new definition of a variable in a given basic block.
|
||||||
/// The SSA value is passed as an argument because it should be created with
|
/// The SSA value is passed as an argument because it should be created with
|
||||||
@@ -207,14 +210,15 @@ impl<Variable> SSABuilder<Variable>
|
|||||||
/// If the variable has never been defined in this blocks or recursively in its predecessors,
|
/// If the variable has never been defined in this blocks or recursively in its predecessors,
|
||||||
/// this method will silently create an initializer with `iconst` or `fconst`. You are
|
/// this method will silently create an initializer with `iconst` or `fconst`. You are
|
||||||
/// responsible for making sure that you initialize your variables.
|
/// responsible for making sure that you initialize your variables.
|
||||||
pub fn use_var(&mut self,
|
pub fn use_var(
|
||||||
dfg: &mut DataFlowGraph,
|
&mut self,
|
||||||
layout: &mut Layout,
|
dfg: &mut DataFlowGraph,
|
||||||
jts: &mut JumpTables,
|
layout: &mut Layout,
|
||||||
var: Variable,
|
jts: &mut JumpTables,
|
||||||
ty: Type,
|
var: Variable,
|
||||||
block: Block)
|
ty: Type,
|
||||||
-> (Value, SideEffects) {
|
block: Block,
|
||||||
|
) -> (Value, SideEffects) {
|
||||||
// First we lookup for the current definition of the variable in this block
|
// First we lookup for the current definition of the variable in this block
|
||||||
if let Some(var_defs) = self.variables.get(var) {
|
if let Some(var_defs) = self.variables.get(var) {
|
||||||
if let Some(val) = var_defs.get(&block) {
|
if let Some(val) = var_defs.get(&block) {
|
||||||
@@ -281,13 +285,12 @@ impl<Variable> SSABuilder<Variable>
|
|||||||
/// here and the block is not sealed.
|
/// here and the block is not sealed.
|
||||||
/// Predecessors have to be added with `declare_ebb_predecessor`.
|
/// Predecessors have to be added with `declare_ebb_predecessor`.
|
||||||
pub fn declare_ebb_header_block(&mut self, ebb: Ebb) -> Block {
|
pub fn declare_ebb_header_block(&mut self, ebb: Ebb) -> Block {
|
||||||
let block = self.blocks
|
let block = self.blocks.push(BlockData::EbbHeader(EbbHeaderBlockData {
|
||||||
.push(BlockData::EbbHeader(EbbHeaderBlockData {
|
predecessors: Vec::new(),
|
||||||
predecessors: Vec::new(),
|
sealed: false,
|
||||||
sealed: false,
|
ebb: ebb,
|
||||||
ebb: ebb,
|
undef_variables: Vec::new(),
|
||||||
undef_variables: Vec::new(),
|
}));
|
||||||
}));
|
|
||||||
self.ebb_headers[ebb] = block.into();
|
self.ebb_headers[ebb] = block.into();
|
||||||
block
|
block
|
||||||
}
|
}
|
||||||
@@ -331,12 +334,13 @@ impl<Variable> SSABuilder<Variable>
|
|||||||
///
|
///
|
||||||
/// This method modifies the function's `Layout` by adding arguments to the `Ebb`s to
|
/// This method modifies the function's `Layout` by adding arguments to the `Ebb`s to
|
||||||
/// take into account the Phi function placed by the SSA algorithm.
|
/// take into account the Phi function placed by the SSA algorithm.
|
||||||
pub fn seal_ebb_header_block(&mut self,
|
pub fn seal_ebb_header_block(
|
||||||
ebb: Ebb,
|
&mut self,
|
||||||
dfg: &mut DataFlowGraph,
|
ebb: Ebb,
|
||||||
layout: &mut Layout,
|
dfg: &mut DataFlowGraph,
|
||||||
jts: &mut JumpTables)
|
layout: &mut Layout,
|
||||||
-> SideEffects {
|
jts: &mut JumpTables,
|
||||||
|
) -> SideEffects {
|
||||||
let block = self.header_block(ebb);
|
let block = self.header_block(ebb);
|
||||||
|
|
||||||
// Sanity check
|
// Sanity check
|
||||||
@@ -362,19 +366,24 @@ impl<Variable> SSABuilder<Variable>
|
|||||||
// jump argument to the branch instruction.
|
// jump argument to the branch instruction.
|
||||||
// Panics if called with a non-header block.
|
// Panics if called with a non-header block.
|
||||||
// Returns the list of newly created ebbs for critical edge splitting.
|
// Returns the list of newly created ebbs for critical edge splitting.
|
||||||
fn resolve_undef_vars(&mut self,
|
fn resolve_undef_vars(
|
||||||
block: Block,
|
&mut self,
|
||||||
dfg: &mut DataFlowGraph,
|
block: Block,
|
||||||
layout: &mut Layout,
|
dfg: &mut DataFlowGraph,
|
||||||
jts: &mut JumpTables)
|
layout: &mut Layout,
|
||||||
-> SideEffects {
|
jts: &mut JumpTables,
|
||||||
|
) -> SideEffects {
|
||||||
// TODO: find a way to not allocate vectors
|
// TODO: find a way to not allocate vectors
|
||||||
let (predecessors, undef_vars, ebb): (Vec<(Block, Inst)>,
|
let (predecessors, undef_vars, ebb): (Vec<(Block, Inst)>,
|
||||||
Vec<(Variable, Value)>,
|
Vec<(Variable, Value)>,
|
||||||
Ebb) = match self.blocks[block] {
|
Ebb) = match self.blocks[block] {
|
||||||
BlockData::EbbBody { .. } => panic!("this should not happen"),
|
BlockData::EbbBody { .. } => panic!("this should not happen"),
|
||||||
BlockData::EbbHeader(ref mut data) => {
|
BlockData::EbbHeader(ref mut data) => {
|
||||||
(data.predecessors.clone(), data.undef_variables.clone(), data.ebb)
|
(
|
||||||
|
data.predecessors.clone(),
|
||||||
|
data.undef_variables.clone(),
|
||||||
|
data.ebb,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -384,12 +393,13 @@ impl<Variable> SSABuilder<Variable>
|
|||||||
for (var, val) in undef_vars {
|
for (var, val) in undef_vars {
|
||||||
let (_, mut local_side_effects) =
|
let (_, mut local_side_effects) =
|
||||||
self.predecessors_lookup(dfg, layout, jts, val, var, ebb, &predecessors);
|
self.predecessors_lookup(dfg, layout, jts, val, var, ebb, &predecessors);
|
||||||
side_effects
|
side_effects.split_ebbs_created.append(
|
||||||
.split_ebbs_created
|
&mut local_side_effects
|
||||||
.append(&mut local_side_effects.split_ebbs_created);
|
.split_ebbs_created,
|
||||||
side_effects
|
);
|
||||||
.instructions_added_to_ebbs
|
side_effects.instructions_added_to_ebbs.append(
|
||||||
.append(&mut local_side_effects.instructions_added_to_ebbs);
|
&mut local_side_effects.instructions_added_to_ebbs,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Then we clear the undef_vars and mark the block as sealed.
|
// Then we clear the undef_vars and mark the block as sealed.
|
||||||
@@ -405,15 +415,16 @@ impl<Variable> SSABuilder<Variable>
|
|||||||
/// Look up in the predecessors of an Ebb the def for a value an decides wether or not
|
/// Look up in the predecessors of an Ebb the def for a value an decides wether or not
|
||||||
/// to keep the eeb arg, and act accordingly. Returns the chosen value and optionnaly a
|
/// to keep the eeb arg, and act accordingly. Returns the chosen value and optionnaly a
|
||||||
/// list of Ebb that are the middle of newly created critical edges splits.
|
/// list of Ebb that are the middle of newly created critical edges splits.
|
||||||
fn predecessors_lookup(&mut self,
|
fn predecessors_lookup(
|
||||||
dfg: &mut DataFlowGraph,
|
&mut self,
|
||||||
layout: &mut Layout,
|
dfg: &mut DataFlowGraph,
|
||||||
jts: &mut JumpTables,
|
layout: &mut Layout,
|
||||||
temp_arg_val: Value,
|
jts: &mut JumpTables,
|
||||||
temp_arg_var: Variable,
|
temp_arg_val: Value,
|
||||||
dest_ebb: Ebb,
|
temp_arg_var: Variable,
|
||||||
preds: &[(Block, Inst)])
|
dest_ebb: Ebb,
|
||||||
-> (Value, SideEffects) {
|
preds: &[(Block, Inst)],
|
||||||
|
) -> (Value, SideEffects) {
|
||||||
let mut pred_values: ZeroOneOrMore<Value> = ZeroOneOrMore::Zero();
|
let mut pred_values: ZeroOneOrMore<Value> = ZeroOneOrMore::Zero();
|
||||||
// TODO: find a way not not allocate a vector
|
// TODO: find a way not not allocate a vector
|
||||||
let mut jump_args_to_append: Vec<(Block, Inst, Value)> = Vec::new();
|
let mut jump_args_to_append: Vec<(Block, Inst, Value)> = Vec::new();
|
||||||
@@ -442,12 +453,13 @@ impl<Variable> SSABuilder<Variable>
|
|||||||
ZeroOneOrMore::More() => ZeroOneOrMore::More(),
|
ZeroOneOrMore::More() => ZeroOneOrMore::More(),
|
||||||
};
|
};
|
||||||
jump_args_to_append.push((pred, last_inst, pred_val));
|
jump_args_to_append.push((pred, last_inst, pred_val));
|
||||||
side_effects
|
side_effects.split_ebbs_created.append(
|
||||||
.split_ebbs_created
|
&mut local_side_effects
|
||||||
.append(&mut local_side_effects.split_ebbs_created);
|
.split_ebbs_created,
|
||||||
side_effects
|
);
|
||||||
.instructions_added_to_ebbs
|
side_effects.instructions_added_to_ebbs.append(
|
||||||
.append(&mut local_side_effects.instructions_added_to_ebbs);
|
&mut local_side_effects.instructions_added_to_ebbs,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
match pred_values {
|
match pred_values {
|
||||||
ZeroOneOrMore::Zero() => {
|
ZeroOneOrMore::Zero() => {
|
||||||
@@ -486,14 +498,16 @@ impl<Variable> SSABuilder<Variable>
|
|||||||
// There is disagreement in the predecessors on which value to use so we have
|
// There is disagreement in the predecessors on which value to use so we have
|
||||||
// to keep the ebb argument.
|
// to keep the ebb argument.
|
||||||
for (pred_block, last_inst, pred_val) in jump_args_to_append {
|
for (pred_block, last_inst, pred_val) in jump_args_to_append {
|
||||||
match self.append_jump_argument(dfg,
|
match self.append_jump_argument(
|
||||||
layout,
|
dfg,
|
||||||
last_inst,
|
layout,
|
||||||
pred_block,
|
last_inst,
|
||||||
dest_ebb,
|
pred_block,
|
||||||
pred_val,
|
dest_ebb,
|
||||||
temp_arg_var,
|
pred_val,
|
||||||
jts) {
|
temp_arg_var,
|
||||||
|
jts,
|
||||||
|
) {
|
||||||
None => (),
|
None => (),
|
||||||
Some(middle_ebb) => side_effects.split_ebbs_created.push(middle_ebb),
|
Some(middle_ebb) => side_effects.split_ebbs_created.push(middle_ebb),
|
||||||
};
|
};
|
||||||
@@ -505,16 +519,17 @@ impl<Variable> SSABuilder<Variable>
|
|||||||
|
|
||||||
/// Appends a jump argument to a jump instruction, returns ebb created in case of
|
/// Appends a jump argument to a jump instruction, returns ebb created in case of
|
||||||
/// critical edge splitting.
|
/// critical edge splitting.
|
||||||
fn append_jump_argument(&mut self,
|
fn append_jump_argument(
|
||||||
dfg: &mut DataFlowGraph,
|
&mut self,
|
||||||
layout: &mut Layout,
|
dfg: &mut DataFlowGraph,
|
||||||
jump_inst: Inst,
|
layout: &mut Layout,
|
||||||
jump_inst_block: Block,
|
jump_inst: Inst,
|
||||||
dest_ebb: Ebb,
|
jump_inst_block: Block,
|
||||||
val: Value,
|
dest_ebb: Ebb,
|
||||||
var: Variable,
|
val: Value,
|
||||||
jts: &mut JumpTables)
|
var: Variable,
|
||||||
-> Option<Ebb> {
|
jts: &mut JumpTables,
|
||||||
|
) -> Option<Ebb> {
|
||||||
match dfg[jump_inst].analyze_branch(&dfg.value_lists) {
|
match dfg[jump_inst].analyze_branch(&dfg.value_lists) {
|
||||||
BranchInfo::NotABranch => {
|
BranchInfo::NotABranch => {
|
||||||
panic!("you have declared a non-branch instruction as a predecessor to an ebb");
|
panic!("you have declared a non-branch instruction as a predecessor to an ebb");
|
||||||
@@ -529,14 +544,17 @@ impl<Variable> SSABuilder<Variable>
|
|||||||
// In the case of a jump table, the situation is tricky because br_table doesn't
|
// In the case of a jump table, the situation is tricky because br_table doesn't
|
||||||
// support arguments.
|
// support arguments.
|
||||||
// We have to split the critical edge
|
// We have to split the critical edge
|
||||||
let indexes: Vec<usize> = jts[jt]
|
let indexes: Vec<usize> = jts[jt].entries().fold(
|
||||||
.entries()
|
Vec::new(),
|
||||||
.fold(Vec::new(), |mut acc, (index, dest)| if dest == dest_ebb {
|
|mut acc, (index, dest)| if dest ==
|
||||||
|
dest_ebb
|
||||||
|
{
|
||||||
acc.push(index);
|
acc.push(index);
|
||||||
acc
|
acc
|
||||||
} else {
|
} else {
|
||||||
acc
|
acc
|
||||||
});
|
},
|
||||||
|
);
|
||||||
let middle_ebb = dfg.make_ebb();
|
let middle_ebb = dfg.make_ebb();
|
||||||
layout.append_ebb(middle_ebb);
|
layout.append_ebb(middle_ebb);
|
||||||
let block = self.declare_ebb_header_block(middle_ebb);
|
let block = self.declare_ebb_header_block(middle_ebb);
|
||||||
@@ -632,79 +650,95 @@ mod tests {
|
|||||||
};
|
};
|
||||||
ssa.def_var(y_var, y_ssa, block);
|
ssa.def_var(y_var, y_ssa, block);
|
||||||
|
|
||||||
assert_eq!(ssa.use_var(&mut func.dfg,
|
assert_eq!(
|
||||||
&mut func.layout,
|
ssa.use_var(
|
||||||
&mut func.jump_tables,
|
&mut func.dfg,
|
||||||
x_var,
|
&mut func.layout,
|
||||||
I32,
|
&mut func.jump_tables,
|
||||||
block)
|
x_var,
|
||||||
.0,
|
I32,
|
||||||
x_ssa);
|
block,
|
||||||
assert_eq!(ssa.use_var(&mut func.dfg,
|
).0,
|
||||||
&mut func.layout,
|
x_ssa
|
||||||
&mut func.jump_tables,
|
);
|
||||||
y_var,
|
assert_eq!(
|
||||||
I32,
|
ssa.use_var(
|
||||||
block)
|
&mut func.dfg,
|
||||||
.0,
|
&mut func.layout,
|
||||||
y_ssa);
|
&mut func.jump_tables,
|
||||||
|
y_var,
|
||||||
|
I32,
|
||||||
|
block,
|
||||||
|
).0,
|
||||||
|
y_ssa
|
||||||
|
);
|
||||||
let z_var = Variable(2);
|
let z_var = Variable(2);
|
||||||
let x_use1 = ssa.use_var(&mut func.dfg,
|
let x_use1 = ssa.use_var(
|
||||||
&mut func.layout,
|
&mut func.dfg,
|
||||||
&mut func.jump_tables,
|
&mut func.layout,
|
||||||
x_var,
|
&mut func.jump_tables,
|
||||||
I32,
|
x_var,
|
||||||
block)
|
I32,
|
||||||
.0;
|
block,
|
||||||
let y_use1 = ssa.use_var(&mut func.dfg,
|
).0;
|
||||||
&mut func.layout,
|
let y_use1 = ssa.use_var(
|
||||||
&mut func.jump_tables,
|
&mut func.dfg,
|
||||||
y_var,
|
&mut func.layout,
|
||||||
I32,
|
&mut func.jump_tables,
|
||||||
block)
|
y_var,
|
||||||
.0;
|
I32,
|
||||||
|
block,
|
||||||
|
).0;
|
||||||
let z1_ssa = {
|
let z1_ssa = {
|
||||||
let cur = &mut Cursor::new(&mut func.layout);
|
let cur = &mut Cursor::new(&mut func.layout);
|
||||||
cur.goto_bottom(ebb0);
|
cur.goto_bottom(ebb0);
|
||||||
func.dfg.ins(cur).iadd(x_use1, y_use1)
|
func.dfg.ins(cur).iadd(x_use1, y_use1)
|
||||||
};
|
};
|
||||||
ssa.def_var(z_var, z1_ssa, block);
|
ssa.def_var(z_var, z1_ssa, block);
|
||||||
assert_eq!(ssa.use_var(&mut func.dfg,
|
assert_eq!(
|
||||||
&mut func.layout,
|
ssa.use_var(
|
||||||
&mut func.jump_tables,
|
&mut func.dfg,
|
||||||
z_var,
|
&mut func.layout,
|
||||||
I32,
|
&mut func.jump_tables,
|
||||||
block)
|
z_var,
|
||||||
.0,
|
I32,
|
||||||
z1_ssa);
|
block,
|
||||||
let x_use2 = ssa.use_var(&mut func.dfg,
|
).0,
|
||||||
&mut func.layout,
|
z1_ssa
|
||||||
&mut func.jump_tables,
|
);
|
||||||
x_var,
|
let x_use2 = ssa.use_var(
|
||||||
I32,
|
&mut func.dfg,
|
||||||
block)
|
&mut func.layout,
|
||||||
.0;
|
&mut func.jump_tables,
|
||||||
let z_use1 = ssa.use_var(&mut func.dfg,
|
x_var,
|
||||||
&mut func.layout,
|
I32,
|
||||||
&mut func.jump_tables,
|
block,
|
||||||
z_var,
|
).0;
|
||||||
I32,
|
let z_use1 = ssa.use_var(
|
||||||
block)
|
&mut func.dfg,
|
||||||
.0;
|
&mut func.layout,
|
||||||
|
&mut func.jump_tables,
|
||||||
|
z_var,
|
||||||
|
I32,
|
||||||
|
block,
|
||||||
|
).0;
|
||||||
let z2_ssa = {
|
let z2_ssa = {
|
||||||
let cur = &mut Cursor::new(&mut func.layout);
|
let cur = &mut Cursor::new(&mut func.layout);
|
||||||
cur.goto_bottom(ebb0);
|
cur.goto_bottom(ebb0);
|
||||||
func.dfg.ins(cur).iadd(x_use2, z_use1)
|
func.dfg.ins(cur).iadd(x_use2, z_use1)
|
||||||
};
|
};
|
||||||
ssa.def_var(z_var, z2_ssa, block);
|
ssa.def_var(z_var, z2_ssa, block);
|
||||||
assert_eq!(ssa.use_var(&mut func.dfg,
|
assert_eq!(
|
||||||
&mut func.layout,
|
ssa.use_var(
|
||||||
&mut func.jump_tables,
|
&mut func.dfg,
|
||||||
z_var,
|
&mut func.layout,
|
||||||
I32,
|
&mut func.jump_tables,
|
||||||
block)
|
z_var,
|
||||||
.0,
|
I32,
|
||||||
z2_ssa);
|
block,
|
||||||
|
).0,
|
||||||
|
z2_ssa
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -740,79 +774,93 @@ mod tests {
|
|||||||
func.dfg.ins(cur).iconst(I32, 2)
|
func.dfg.ins(cur).iconst(I32, 2)
|
||||||
};
|
};
|
||||||
ssa.def_var(y_var, y_ssa, block0);
|
ssa.def_var(y_var, y_ssa, block0);
|
||||||
assert_eq!(ssa.use_var(&mut func.dfg,
|
assert_eq!(
|
||||||
&mut func.layout,
|
ssa.use_var(
|
||||||
&mut func.jump_tables,
|
&mut func.dfg,
|
||||||
x_var,
|
&mut func.layout,
|
||||||
I32,
|
&mut func.jump_tables,
|
||||||
block0)
|
x_var,
|
||||||
.0,
|
I32,
|
||||||
x_ssa);
|
block0,
|
||||||
assert_eq!(ssa.use_var(&mut func.dfg,
|
).0,
|
||||||
&mut func.layout,
|
x_ssa
|
||||||
&mut func.jump_tables,
|
);
|
||||||
y_var,
|
assert_eq!(
|
||||||
I32,
|
ssa.use_var(
|
||||||
block0)
|
&mut func.dfg,
|
||||||
.0,
|
&mut func.layout,
|
||||||
y_ssa);
|
&mut func.jump_tables,
|
||||||
|
y_var,
|
||||||
|
I32,
|
||||||
|
block0,
|
||||||
|
).0,
|
||||||
|
y_ssa
|
||||||
|
);
|
||||||
let z_var = Variable(2);
|
let z_var = Variable(2);
|
||||||
let x_use1 = ssa.use_var(&mut func.dfg,
|
let x_use1 = ssa.use_var(
|
||||||
&mut func.layout,
|
&mut func.dfg,
|
||||||
&mut func.jump_tables,
|
&mut func.layout,
|
||||||
x_var,
|
&mut func.jump_tables,
|
||||||
I32,
|
x_var,
|
||||||
block0)
|
I32,
|
||||||
.0;
|
block0,
|
||||||
let y_use1 = ssa.use_var(&mut func.dfg,
|
).0;
|
||||||
&mut func.layout,
|
let y_use1 = ssa.use_var(
|
||||||
&mut func.jump_tables,
|
&mut func.dfg,
|
||||||
y_var,
|
&mut func.layout,
|
||||||
I32,
|
&mut func.jump_tables,
|
||||||
block0)
|
y_var,
|
||||||
.0;
|
I32,
|
||||||
|
block0,
|
||||||
|
).0;
|
||||||
let z1_ssa = {
|
let z1_ssa = {
|
||||||
let cur = &mut Cursor::new(&mut func.layout);
|
let cur = &mut Cursor::new(&mut func.layout);
|
||||||
cur.goto_bottom(ebb0);
|
cur.goto_bottom(ebb0);
|
||||||
func.dfg.ins(cur).iadd(x_use1, y_use1)
|
func.dfg.ins(cur).iadd(x_use1, y_use1)
|
||||||
};
|
};
|
||||||
ssa.def_var(z_var, z1_ssa, block0);
|
ssa.def_var(z_var, z1_ssa, block0);
|
||||||
assert_eq!(ssa.use_var(&mut func.dfg,
|
assert_eq!(
|
||||||
&mut func.layout,
|
ssa.use_var(
|
||||||
&mut func.jump_tables,
|
&mut func.dfg,
|
||||||
z_var,
|
&mut func.layout,
|
||||||
I32,
|
&mut func.jump_tables,
|
||||||
block0)
|
z_var,
|
||||||
.0,
|
I32,
|
||||||
z1_ssa);
|
block0,
|
||||||
let y_use2 = ssa.use_var(&mut func.dfg,
|
).0,
|
||||||
&mut func.layout,
|
z1_ssa
|
||||||
&mut func.jump_tables,
|
);
|
||||||
y_var,
|
let y_use2 = ssa.use_var(
|
||||||
I32,
|
&mut func.dfg,
|
||||||
block0)
|
&mut func.layout,
|
||||||
.0;
|
&mut func.jump_tables,
|
||||||
|
y_var,
|
||||||
|
I32,
|
||||||
|
block0,
|
||||||
|
).0;
|
||||||
let jump_inst: Inst = {
|
let jump_inst: Inst = {
|
||||||
let cur = &mut Cursor::new(&mut func.layout);
|
let cur = &mut Cursor::new(&mut func.layout);
|
||||||
cur.goto_bottom(ebb0);
|
cur.goto_bottom(ebb0);
|
||||||
func.dfg.ins(cur).brnz(y_use2, ebb1, &[])
|
func.dfg.ins(cur).brnz(y_use2, ebb1, &[])
|
||||||
};
|
};
|
||||||
let block1 = ssa.declare_ebb_body_block(block0);
|
let block1 = ssa.declare_ebb_body_block(block0);
|
||||||
let x_use2 = ssa.use_var(&mut func.dfg,
|
let x_use2 = ssa.use_var(
|
||||||
&mut func.layout,
|
&mut func.dfg,
|
||||||
&mut func.jump_tables,
|
&mut func.layout,
|
||||||
x_var,
|
&mut func.jump_tables,
|
||||||
I32,
|
x_var,
|
||||||
block1)
|
I32,
|
||||||
.0;
|
block1,
|
||||||
|
).0;
|
||||||
assert_eq!(x_use2, x_ssa);
|
assert_eq!(x_use2, x_ssa);
|
||||||
let z_use1 = ssa.use_var(&mut func.dfg,
|
let z_use1 = ssa.use_var(
|
||||||
&mut func.layout,
|
&mut func.dfg,
|
||||||
&mut func.jump_tables,
|
&mut func.layout,
|
||||||
z_var,
|
&mut func.jump_tables,
|
||||||
I32,
|
z_var,
|
||||||
block1)
|
I32,
|
||||||
.0;
|
block1,
|
||||||
|
).0;
|
||||||
assert_eq!(z_use1, z1_ssa);
|
assert_eq!(z_use1, z1_ssa);
|
||||||
let z2_ssa = {
|
let z2_ssa = {
|
||||||
let cur = &mut Cursor::new(&mut func.layout);
|
let cur = &mut Cursor::new(&mut func.layout);
|
||||||
@@ -820,33 +868,38 @@ mod tests {
|
|||||||
func.dfg.ins(cur).iadd(x_use2, z_use1)
|
func.dfg.ins(cur).iadd(x_use2, z_use1)
|
||||||
};
|
};
|
||||||
ssa.def_var(z_var, z2_ssa, block1);
|
ssa.def_var(z_var, z2_ssa, block1);
|
||||||
assert_eq!(ssa.use_var(&mut func.dfg,
|
assert_eq!(
|
||||||
&mut func.layout,
|
ssa.use_var(
|
||||||
&mut func.jump_tables,
|
&mut func.dfg,
|
||||||
z_var,
|
&mut func.layout,
|
||||||
I32,
|
&mut func.jump_tables,
|
||||||
block1)
|
z_var,
|
||||||
.0,
|
I32,
|
||||||
z2_ssa);
|
block1,
|
||||||
|
).0,
|
||||||
|
z2_ssa
|
||||||
|
);
|
||||||
ssa.seal_ebb_header_block(ebb0, &mut func.dfg, &mut func.layout, &mut func.jump_tables);
|
ssa.seal_ebb_header_block(ebb0, &mut func.dfg, &mut func.layout, &mut func.jump_tables);
|
||||||
let block2 = ssa.declare_ebb_header_block(ebb1);
|
let block2 = ssa.declare_ebb_header_block(ebb1);
|
||||||
ssa.declare_ebb_predecessor(ebb1, block0, jump_inst);
|
ssa.declare_ebb_predecessor(ebb1, block0, jump_inst);
|
||||||
ssa.seal_ebb_header_block(ebb1, &mut func.dfg, &mut func.layout, &mut func.jump_tables);
|
ssa.seal_ebb_header_block(ebb1, &mut func.dfg, &mut func.layout, &mut func.jump_tables);
|
||||||
let x_use3 = ssa.use_var(&mut func.dfg,
|
let x_use3 = ssa.use_var(
|
||||||
&mut func.layout,
|
&mut func.dfg,
|
||||||
&mut func.jump_tables,
|
&mut func.layout,
|
||||||
x_var,
|
&mut func.jump_tables,
|
||||||
I32,
|
x_var,
|
||||||
block2)
|
I32,
|
||||||
.0;
|
block2,
|
||||||
|
).0;
|
||||||
assert_eq!(x_ssa, x_use3);
|
assert_eq!(x_ssa, x_use3);
|
||||||
let y_use3 = ssa.use_var(&mut func.dfg,
|
let y_use3 = ssa.use_var(
|
||||||
&mut func.layout,
|
&mut func.dfg,
|
||||||
&mut func.jump_tables,
|
&mut func.layout,
|
||||||
y_var,
|
&mut func.jump_tables,
|
||||||
I32,
|
y_var,
|
||||||
block2)
|
I32,
|
||||||
.0;
|
block2,
|
||||||
|
).0;
|
||||||
assert_eq!(y_ssa, y_use3);
|
assert_eq!(y_ssa, y_use3);
|
||||||
let y2_ssa = {
|
let y2_ssa = {
|
||||||
let cur = &mut Cursor::new(&mut func.layout);
|
let cur = &mut Cursor::new(&mut func.layout);
|
||||||
@@ -897,14 +950,17 @@ mod tests {
|
|||||||
func.dfg.ins(cur).iconst(I32, 1)
|
func.dfg.ins(cur).iconst(I32, 1)
|
||||||
};
|
};
|
||||||
ssa.def_var(x_var, x1, block0);
|
ssa.def_var(x_var, x1, block0);
|
||||||
assert_eq!(ssa.use_var(&mut func.dfg,
|
assert_eq!(
|
||||||
&mut func.layout,
|
ssa.use_var(
|
||||||
&mut func.jump_tables,
|
&mut func.dfg,
|
||||||
x_var,
|
&mut func.layout,
|
||||||
I32,
|
&mut func.jump_tables,
|
||||||
block0)
|
x_var,
|
||||||
.0,
|
I32,
|
||||||
x1);
|
block0,
|
||||||
|
).0,
|
||||||
|
x1
|
||||||
|
);
|
||||||
let y_var = Variable(1);
|
let y_var = Variable(1);
|
||||||
let y1 = {
|
let y1 = {
|
||||||
let cur = &mut Cursor::new(&mut func.layout);
|
let cur = &mut Cursor::new(&mut func.layout);
|
||||||
@@ -912,30 +968,35 @@ mod tests {
|
|||||||
func.dfg.ins(cur).iconst(I32, 2)
|
func.dfg.ins(cur).iconst(I32, 2)
|
||||||
};
|
};
|
||||||
ssa.def_var(y_var, y1, block0);
|
ssa.def_var(y_var, y1, block0);
|
||||||
assert_eq!(ssa.use_var(&mut func.dfg,
|
assert_eq!(
|
||||||
&mut func.layout,
|
ssa.use_var(
|
||||||
&mut func.jump_tables,
|
&mut func.dfg,
|
||||||
y_var,
|
&mut func.layout,
|
||||||
I32,
|
&mut func.jump_tables,
|
||||||
block0)
|
y_var,
|
||||||
.0,
|
I32,
|
||||||
y1);
|
block0,
|
||||||
|
).0,
|
||||||
|
y1
|
||||||
|
);
|
||||||
let z_var = Variable(2);
|
let z_var = Variable(2);
|
||||||
let x2 = ssa.use_var(&mut func.dfg,
|
let x2 = ssa.use_var(
|
||||||
&mut func.layout,
|
&mut func.dfg,
|
||||||
&mut func.jump_tables,
|
&mut func.layout,
|
||||||
x_var,
|
&mut func.jump_tables,
|
||||||
I32,
|
x_var,
|
||||||
block0)
|
I32,
|
||||||
.0;
|
block0,
|
||||||
|
).0;
|
||||||
assert_eq!(x2, x1);
|
assert_eq!(x2, x1);
|
||||||
let y2 = ssa.use_var(&mut func.dfg,
|
let y2 = ssa.use_var(
|
||||||
&mut func.layout,
|
&mut func.dfg,
|
||||||
&mut func.jump_tables,
|
&mut func.layout,
|
||||||
y_var,
|
&mut func.jump_tables,
|
||||||
I32,
|
y_var,
|
||||||
block0)
|
I32,
|
||||||
.0;
|
block0,
|
||||||
|
).0;
|
||||||
assert_eq!(y2, y1);
|
assert_eq!(y2, y1);
|
||||||
let z1 = {
|
let z1 = {
|
||||||
let cur = &mut Cursor::new(&mut func.layout);
|
let cur = &mut Cursor::new(&mut func.layout);
|
||||||
@@ -950,33 +1011,36 @@ mod tests {
|
|||||||
};
|
};
|
||||||
let block1 = ssa.declare_ebb_header_block(ebb1);
|
let block1 = ssa.declare_ebb_header_block(ebb1);
|
||||||
ssa.declare_ebb_predecessor(ebb1, block0, jump_ebb0_ebb1);
|
ssa.declare_ebb_predecessor(ebb1, block0, jump_ebb0_ebb1);
|
||||||
let z2 = ssa.use_var(&mut func.dfg,
|
let z2 = ssa.use_var(
|
||||||
&mut func.layout,
|
&mut func.dfg,
|
||||||
&mut func.jump_tables,
|
&mut func.layout,
|
||||||
z_var,
|
&mut func.jump_tables,
|
||||||
I32,
|
z_var,
|
||||||
block1)
|
I32,
|
||||||
.0;
|
block1,
|
||||||
let y3 = ssa.use_var(&mut func.dfg,
|
).0;
|
||||||
&mut func.layout,
|
let y3 = ssa.use_var(
|
||||||
&mut func.jump_tables,
|
&mut func.dfg,
|
||||||
y_var,
|
&mut func.layout,
|
||||||
I32,
|
&mut func.jump_tables,
|
||||||
block1)
|
y_var,
|
||||||
.0;
|
I32,
|
||||||
|
block1,
|
||||||
|
).0;
|
||||||
let z3 = {
|
let z3 = {
|
||||||
let cur = &mut Cursor::new(&mut func.layout);
|
let cur = &mut Cursor::new(&mut func.layout);
|
||||||
cur.goto_bottom(ebb1);
|
cur.goto_bottom(ebb1);
|
||||||
func.dfg.ins(cur).iadd(z2, y3)
|
func.dfg.ins(cur).iadd(z2, y3)
|
||||||
};
|
};
|
||||||
ssa.def_var(z_var, z3, block1);
|
ssa.def_var(z_var, z3, block1);
|
||||||
let y4 = ssa.use_var(&mut func.dfg,
|
let y4 = ssa.use_var(
|
||||||
&mut func.layout,
|
&mut func.dfg,
|
||||||
&mut func.jump_tables,
|
&mut func.layout,
|
||||||
y_var,
|
&mut func.jump_tables,
|
||||||
I32,
|
y_var,
|
||||||
block1)
|
I32,
|
||||||
.0;
|
block1,
|
||||||
|
).0;
|
||||||
assert_eq!(y4, y3);
|
assert_eq!(y4, y3);
|
||||||
let jump_ebb1_ebb2 = {
|
let jump_ebb1_ebb2 = {
|
||||||
let cur = &mut Cursor::new(&mut func.layout);
|
let cur = &mut Cursor::new(&mut func.layout);
|
||||||
@@ -984,34 +1048,37 @@ mod tests {
|
|||||||
func.dfg.ins(cur).brnz(y4, ebb2, &[])
|
func.dfg.ins(cur).brnz(y4, ebb2, &[])
|
||||||
};
|
};
|
||||||
let block2 = ssa.declare_ebb_body_block(block1);
|
let block2 = ssa.declare_ebb_body_block(block1);
|
||||||
let z4 = ssa.use_var(&mut func.dfg,
|
let z4 = ssa.use_var(
|
||||||
&mut func.layout,
|
&mut func.dfg,
|
||||||
&mut func.jump_tables,
|
&mut func.layout,
|
||||||
z_var,
|
&mut func.jump_tables,
|
||||||
I32,
|
z_var,
|
||||||
block2)
|
I32,
|
||||||
.0;
|
block2,
|
||||||
|
).0;
|
||||||
assert_eq!(z4, z3);
|
assert_eq!(z4, z3);
|
||||||
let x3 = ssa.use_var(&mut func.dfg,
|
let x3 = ssa.use_var(
|
||||||
&mut func.layout,
|
&mut func.dfg,
|
||||||
&mut func.jump_tables,
|
&mut func.layout,
|
||||||
x_var,
|
&mut func.jump_tables,
|
||||||
I32,
|
x_var,
|
||||||
block2)
|
I32,
|
||||||
.0;
|
block2,
|
||||||
|
).0;
|
||||||
let z5 = {
|
let z5 = {
|
||||||
let cur = &mut Cursor::new(&mut func.layout);
|
let cur = &mut Cursor::new(&mut func.layout);
|
||||||
cur.goto_bottom(ebb1);
|
cur.goto_bottom(ebb1);
|
||||||
func.dfg.ins(cur).isub(z4, x3)
|
func.dfg.ins(cur).isub(z4, x3)
|
||||||
};
|
};
|
||||||
ssa.def_var(z_var, z5, block2);
|
ssa.def_var(z_var, z5, block2);
|
||||||
let y5 = ssa.use_var(&mut func.dfg,
|
let y5 = ssa.use_var(
|
||||||
&mut func.layout,
|
&mut func.dfg,
|
||||||
&mut func.jump_tables,
|
&mut func.layout,
|
||||||
y_var,
|
&mut func.jump_tables,
|
||||||
I32,
|
y_var,
|
||||||
block2)
|
I32,
|
||||||
.0;
|
block2,
|
||||||
|
).0;
|
||||||
assert_eq!(y5, y3);
|
assert_eq!(y5, y3);
|
||||||
{
|
{
|
||||||
let cur = &mut Cursor::new(&mut func.layout);
|
let cur = &mut Cursor::new(&mut func.layout);
|
||||||
@@ -1022,21 +1089,23 @@ mod tests {
|
|||||||
let block3 = ssa.declare_ebb_header_block(ebb2);
|
let block3 = ssa.declare_ebb_header_block(ebb2);
|
||||||
ssa.declare_ebb_predecessor(ebb2, block1, jump_ebb1_ebb2);
|
ssa.declare_ebb_predecessor(ebb2, block1, jump_ebb1_ebb2);
|
||||||
ssa.seal_ebb_header_block(ebb2, &mut func.dfg, &mut func.layout, &mut func.jump_tables);
|
ssa.seal_ebb_header_block(ebb2, &mut func.dfg, &mut func.layout, &mut func.jump_tables);
|
||||||
let y6 = ssa.use_var(&mut func.dfg,
|
let y6 = ssa.use_var(
|
||||||
&mut func.layout,
|
&mut func.dfg,
|
||||||
&mut func.jump_tables,
|
&mut func.layout,
|
||||||
y_var,
|
&mut func.jump_tables,
|
||||||
I32,
|
y_var,
|
||||||
block3)
|
I32,
|
||||||
.0;
|
block3,
|
||||||
|
).0;
|
||||||
assert_eq!(y6, y3);
|
assert_eq!(y6, y3);
|
||||||
let x4 = ssa.use_var(&mut func.dfg,
|
let x4 = ssa.use_var(
|
||||||
&mut func.layout,
|
&mut func.dfg,
|
||||||
&mut func.jump_tables,
|
&mut func.layout,
|
||||||
x_var,
|
&mut func.jump_tables,
|
||||||
I32,
|
x_var,
|
||||||
block3)
|
I32,
|
||||||
.0;
|
block3,
|
||||||
|
).0;
|
||||||
assert_eq!(x4, x3);
|
assert_eq!(x4, x3);
|
||||||
let y7 = {
|
let y7 = {
|
||||||
let cur = &mut Cursor::new(&mut func.layout);
|
let cur = &mut Cursor::new(&mut func.layout);
|
||||||
@@ -1089,13 +1158,14 @@ mod tests {
|
|||||||
let mut jt_data = JumpTableData::new();
|
let mut jt_data = JumpTableData::new();
|
||||||
jt_data.set_entry(0, ebb1);
|
jt_data.set_entry(0, ebb1);
|
||||||
let jt = func.jump_tables.push(jt_data);
|
let jt = func.jump_tables.push(jt_data);
|
||||||
ssa.use_var(&mut func.dfg,
|
ssa.use_var(
|
||||||
&mut func.layout,
|
&mut func.dfg,
|
||||||
&mut func.jump_tables,
|
&mut func.layout,
|
||||||
x_var,
|
&mut func.jump_tables,
|
||||||
I32,
|
x_var,
|
||||||
block0)
|
I32,
|
||||||
.0;
|
block0,
|
||||||
|
).0;
|
||||||
let br_table = {
|
let br_table = {
|
||||||
let cur = &mut Cursor::new(&mut func.layout);
|
let cur = &mut Cursor::new(&mut func.layout);
|
||||||
cur.goto_bottom(ebb0);
|
cur.goto_bottom(ebb0);
|
||||||
@@ -1117,13 +1187,14 @@ mod tests {
|
|||||||
ssa.declare_ebb_predecessor(ebb1, block1, jump_inst);
|
ssa.declare_ebb_predecessor(ebb1, block1, jump_inst);
|
||||||
ssa.declare_ebb_predecessor(ebb1, block0, br_table);
|
ssa.declare_ebb_predecessor(ebb1, block0, br_table);
|
||||||
ssa.seal_ebb_header_block(ebb1, &mut func.dfg, &mut func.layout, &mut func.jump_tables);
|
ssa.seal_ebb_header_block(ebb1, &mut func.dfg, &mut func.layout, &mut func.jump_tables);
|
||||||
let x4 = ssa.use_var(&mut func.dfg,
|
let x4 = ssa.use_var(
|
||||||
&mut func.layout,
|
&mut func.dfg,
|
||||||
&mut func.jump_tables,
|
&mut func.layout,
|
||||||
x_var,
|
&mut func.jump_tables,
|
||||||
I32,
|
x_var,
|
||||||
block2)
|
I32,
|
||||||
.0;
|
block2,
|
||||||
|
).0;
|
||||||
{
|
{
|
||||||
let cur = &mut Cursor::new(&mut func.layout);
|
let cur = &mut Cursor::new(&mut func.layout);
|
||||||
cur.goto_bottom(ebb1);
|
cur.goto_bottom(ebb1);
|
||||||
@@ -1189,21 +1260,23 @@ mod tests {
|
|||||||
};
|
};
|
||||||
let block1 = ssa.declare_ebb_header_block(ebb1);
|
let block1 = ssa.declare_ebb_header_block(ebb1);
|
||||||
ssa.declare_ebb_predecessor(ebb1, block0, jump_inst);
|
ssa.declare_ebb_predecessor(ebb1, block0, jump_inst);
|
||||||
let z2 = ssa.use_var(&mut func.dfg,
|
let z2 = ssa.use_var(
|
||||||
&mut func.layout,
|
&mut func.dfg,
|
||||||
&mut func.jump_tables,
|
&mut func.layout,
|
||||||
z_var,
|
&mut func.jump_tables,
|
||||||
I32,
|
z_var,
|
||||||
block1)
|
I32,
|
||||||
.0;
|
block1,
|
||||||
|
).0;
|
||||||
assert_eq!(func.dfg.ebb_args(ebb1)[0], z2);
|
assert_eq!(func.dfg.ebb_args(ebb1)[0], z2);
|
||||||
let x2 = ssa.use_var(&mut func.dfg,
|
let x2 = ssa.use_var(
|
||||||
&mut func.layout,
|
&mut func.dfg,
|
||||||
&mut func.jump_tables,
|
&mut func.layout,
|
||||||
x_var,
|
&mut func.jump_tables,
|
||||||
I32,
|
x_var,
|
||||||
block1)
|
I32,
|
||||||
.0;
|
block1,
|
||||||
|
).0;
|
||||||
assert_eq!(func.dfg.ebb_args(ebb1)[1], x2);
|
assert_eq!(func.dfg.ebb_args(ebb1)[1], x2);
|
||||||
let x3 = {
|
let x3 = {
|
||||||
let cur = &mut Cursor::new(&mut func.layout);
|
let cur = &mut Cursor::new(&mut func.layout);
|
||||||
@@ -1211,20 +1284,22 @@ mod tests {
|
|||||||
func.dfg.ins(cur).iadd(x2, z2)
|
func.dfg.ins(cur).iadd(x2, z2)
|
||||||
};
|
};
|
||||||
ssa.def_var(x_var, x3, block1);
|
ssa.def_var(x_var, x3, block1);
|
||||||
let x4 = ssa.use_var(&mut func.dfg,
|
let x4 = ssa.use_var(
|
||||||
&mut func.layout,
|
&mut func.dfg,
|
||||||
&mut func.jump_tables,
|
&mut func.layout,
|
||||||
x_var,
|
&mut func.jump_tables,
|
||||||
I32,
|
x_var,
|
||||||
block1)
|
I32,
|
||||||
.0;
|
block1,
|
||||||
let y3 = ssa.use_var(&mut func.dfg,
|
).0;
|
||||||
&mut func.layout,
|
let y3 = ssa.use_var(
|
||||||
&mut func.jump_tables,
|
&mut func.dfg,
|
||||||
y_var,
|
&mut func.layout,
|
||||||
I32,
|
&mut func.jump_tables,
|
||||||
block1)
|
y_var,
|
||||||
.0;
|
I32,
|
||||||
|
block1,
|
||||||
|
).0;
|
||||||
assert_eq!(func.dfg.ebb_args(ebb1)[2], y3);
|
assert_eq!(func.dfg.ebb_args(ebb1)[2], y3);
|
||||||
let y4 = {
|
let y4 = {
|
||||||
let cur = &mut Cursor::new(&mut func.layout);
|
let cur = &mut Cursor::new(&mut func.layout);
|
||||||
|
|||||||
@@ -36,7 +36,8 @@ impl IsaSpec {
|
|||||||
|
|
||||||
/// Parse an iterator of command line options and apply them to `config`.
|
/// Parse an iterator of command line options and apply them to `config`.
|
||||||
pub fn parse_options<'a, I>(iter: I, config: &mut Configurable, loc: &Location) -> Result<()>
|
pub fn parse_options<'a, I>(iter: I, config: &mut Configurable, loc: &Location) -> Result<()>
|
||||||
where I: Iterator<Item = &'a str>
|
where
|
||||||
|
I: Iterator<Item = &'a str>,
|
||||||
{
|
{
|
||||||
for opt in iter.map(TestOption::new) {
|
for opt in iter.map(TestOption::new) {
|
||||||
match opt {
|
match opt {
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user