Update to the rustfmt in rust 1.28, which is now stable.
Also, rustfmt's --write-mode=check is now named --check.
This commit is contained in:
@@ -27,7 +27,7 @@ function banner {
|
|||||||
# Run rustfmt if we have it.
|
# Run rustfmt if we have it.
|
||||||
banner "Rust formatting"
|
banner "Rust formatting"
|
||||||
if type rustfmt > /dev/null; then
|
if type rustfmt > /dev/null; then
|
||||||
if ! "$topdir/format-all.sh" --write-mode=check ; then
|
if ! "$topdir/format-all.sh" --check ; then
|
||||||
echo "Formatting diffs detected! Run \"cargo fmt --all\" to correct."
|
echo "Formatting diffs detected! Run \"cargo fmt --all\" to correct."
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -621,7 +621,8 @@ impl<F: Forest> Path<F> {
|
|||||||
|
|
||||||
/// Update the critical key for the right sibling node at `level`.
|
/// Update the critical key for the right sibling node at `level`.
|
||||||
fn update_right_crit_key(&self, level: usize, crit_key: F::Key, pool: &mut NodePool<F>) {
|
fn update_right_crit_key(&self, level: usize, crit_key: F::Key, pool: &mut NodePool<F>) {
|
||||||
let bl = self.right_sibling_branch_level(level, pool)
|
let bl = self
|
||||||
|
.right_sibling_branch_level(level, pool)
|
||||||
.expect("No right sibling exists");
|
.expect("No right sibling exists");
|
||||||
match pool[self.node[bl]] {
|
match pool[self.node[bl]] {
|
||||||
NodeData::Inner { ref mut keys, .. } => {
|
NodeData::Inner { ref mut keys, .. } => {
|
||||||
|
|||||||
@@ -151,7 +151,8 @@ fn relax_branch(
|
|||||||
// Pick the first encoding that can handle the branch range.
|
// Pick the first encoding that can handle the branch range.
|
||||||
let dfg = &cur.func.dfg;
|
let dfg = &cur.func.dfg;
|
||||||
let ctrl_type = dfg.ctrl_typevar(inst);
|
let ctrl_type = dfg.ctrl_typevar(inst);
|
||||||
if let Some(enc) = isa.legal_encodings(cur.func, &dfg[inst], ctrl_type)
|
if let Some(enc) = isa
|
||||||
|
.legal_encodings(cur.func, &dfg[inst], ctrl_type)
|
||||||
.find(|&enc| {
|
.find(|&enc| {
|
||||||
let range = encinfo.branch_range(enc).expect("Branch with no range");
|
let range = encinfo.branch_range(enc).expect("Branch with no range");
|
||||||
if !range.contains(offset, dest_offset) {
|
if !range.contains(offset, dest_offset) {
|
||||||
|
|||||||
@@ -38,14 +38,15 @@ pub fn shrink_instructions(func: &mut Function, isa: &TargetIsa) {
|
|||||||
| InstructionData::RegSpill { .. } => {
|
| InstructionData::RegSpill { .. } => {
|
||||||
divert.apply(&func.dfg[inst]);
|
divert.apply(&func.dfg[inst]);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
_ => ()
|
_ => (),
|
||||||
}
|
}
|
||||||
|
|
||||||
let ctrl_type = func.dfg.ctrl_typevar(inst);
|
let ctrl_type = func.dfg.ctrl_typevar(inst);
|
||||||
|
|
||||||
// Pick the last encoding with constraints that are satisfied.
|
// Pick the last encoding with constraints that are satisfied.
|
||||||
let best_enc = isa.legal_encodings(func, &func.dfg[inst], ctrl_type)
|
let best_enc = isa
|
||||||
|
.legal_encodings(func, &func.dfg[inst], ctrl_type)
|
||||||
.filter(|e| encinfo.constraints[e.recipe()].satisfied(inst, &divert, &func))
|
.filter(|e| encinfo.constraints[e.recipe()].satisfied(inst, &divert, &func))
|
||||||
.min_by_key(|e| encinfo.bytes(*e))
|
.min_by_key(|e| encinfo.bytes(*e))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|||||||
@@ -752,7 +752,8 @@ impl<'c, 'f> ir::InstInserterBase<'c> for &'c mut EncCursor<'f> {
|
|||||||
// Assign an encoding.
|
// Assign an encoding.
|
||||||
// XXX Is there a way to describe this error to the user?
|
// XXX Is there a way to describe this error to the user?
|
||||||
#[cfg_attr(feature = "cargo-clippy", allow(match_wild_err_arm))]
|
#[cfg_attr(feature = "cargo-clippy", allow(match_wild_err_arm))]
|
||||||
match self.isa
|
match self
|
||||||
|
.isa
|
||||||
.encode(&self.func, &self.func.dfg[inst], ctrl_typevar)
|
.encode(&self.func, &self.func.dfg[inst], ctrl_typevar)
|
||||||
{
|
{
|
||||||
Ok(e) => self.func.encodings[inst] = e,
|
Ok(e) => self.func.encodings[inst] = e,
|
||||||
|
|||||||
@@ -422,7 +422,8 @@ impl DominatorTree {
|
|||||||
// Get an iterator with just the reachable, already visited predecessors to `ebb`.
|
// Get an iterator with just the reachable, already visited predecessors to `ebb`.
|
||||||
// Note that during the first pass, `rpo_number` is 1 for reachable blocks that haven't
|
// Note that during the first pass, `rpo_number` is 1 for reachable blocks that haven't
|
||||||
// been visited yet, 0 for unreachable blocks.
|
// been visited yet, 0 for unreachable blocks.
|
||||||
let mut reachable_preds = cfg.pred_iter(ebb)
|
let mut reachable_preds = cfg
|
||||||
|
.pred_iter(ebb)
|
||||||
.filter(|&BasicBlock { ebb: pred, .. }| self.nodes[pred].rpo_number > 1);
|
.filter(|&BasicBlock { ebb: pred, .. }| self.nodes[pred].rpo_number > 1);
|
||||||
|
|
||||||
// The RPO must visit at least one predecessor before this node.
|
// The RPO must visit at least one predecessor before this node.
|
||||||
@@ -453,7 +454,8 @@ impl DominatorTree {
|
|||||||
}
|
}
|
||||||
// We use the RPO comparison on the postorder list so we invert the operands of the
|
// We use the RPO comparison on the postorder list so we invert the operands of the
|
||||||
// comparison
|
// comparison
|
||||||
let old_ebb_postorder_index = self.postorder
|
let old_ebb_postorder_index = self
|
||||||
|
.postorder
|
||||||
.as_slice()
|
.as_slice()
|
||||||
.binary_search_by(|probe| self.rpo_cmp_ebb(old_ebb, *probe))
|
.binary_search_by(|probe| self.rpo_cmp_ebb(old_ebb, *probe))
|
||||||
.expect("the old ebb is not declared to the dominator tree");
|
.expect("the old ebb is not declared to the dominator tree");
|
||||||
|
|||||||
@@ -62,7 +62,8 @@ impl Signature {
|
|||||||
/// Even if there are no stack arguments, this will set `params` to `Some(0)` instead
|
/// Even if there are no stack arguments, this will set `params` to `Some(0)` instead
|
||||||
/// of `None`. This indicates that the signature has been legalized.
|
/// of `None`. This indicates that the signature has been legalized.
|
||||||
pub fn compute_argument_bytes(&mut self) {
|
pub fn compute_argument_bytes(&mut self) {
|
||||||
let bytes = self.params
|
let bytes = self
|
||||||
|
.params
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|arg| match arg.location {
|
.filter_map(|arg| match arg.location {
|
||||||
ArgumentLoc::Stack(offset) if offset >= 0 => {
|
ArgumentLoc::Stack(offset) if offset >= 0 => {
|
||||||
|
|||||||
@@ -182,7 +182,8 @@ impl Layout {
|
|||||||
/// Assign a valid sequence number to `inst` such that the numbers are still monotonic. This may
|
/// Assign a valid sequence number to `inst` such that the numbers are still monotonic. This may
|
||||||
/// require renumbering.
|
/// require renumbering.
|
||||||
fn assign_inst_seq(&mut self, inst: Inst) {
|
fn assign_inst_seq(&mut self, inst: Inst) {
|
||||||
let ebb = self.inst_ebb(inst)
|
let ebb = self
|
||||||
|
.inst_ebb(inst)
|
||||||
.expect("inst must be inserted before assigning an seq");
|
.expect("inst must be inserted before assigning an seq");
|
||||||
|
|
||||||
// Get the sequence number immediately before `inst`.
|
// Get the sequence number immediately before `inst`.
|
||||||
@@ -569,7 +570,8 @@ impl Layout {
|
|||||||
/// Insert `inst` before the instruction `before` in the same EBB.
|
/// Insert `inst` before the instruction `before` in the same EBB.
|
||||||
pub fn insert_inst(&mut self, inst: Inst, before: Inst) {
|
pub fn insert_inst(&mut self, inst: Inst, before: Inst) {
|
||||||
debug_assert_eq!(self.inst_ebb(inst), None);
|
debug_assert_eq!(self.inst_ebb(inst), None);
|
||||||
let ebb = self.inst_ebb(before)
|
let ebb = self
|
||||||
|
.inst_ebb(before)
|
||||||
.expect("Instruction before insertion point not in the layout");
|
.expect("Instruction before insertion point not in the layout");
|
||||||
let after = self.insts[before].prev;
|
let after = self.insts[before].prev;
|
||||||
{
|
{
|
||||||
@@ -643,7 +645,8 @@ impl Layout {
|
|||||||
/// i4
|
/// i4
|
||||||
/// ```
|
/// ```
|
||||||
pub fn split_ebb(&mut self, new_ebb: Ebb, before: Inst) {
|
pub fn split_ebb(&mut self, new_ebb: Ebb, before: Inst) {
|
||||||
let old_ebb = self.inst_ebb(before)
|
let old_ebb = self
|
||||||
|
.inst_ebb(before)
|
||||||
.expect("The `before` instruction must be in the layout");
|
.expect("The `before` instruction must be in the layout");
|
||||||
debug_assert!(!self.is_ebb_inserted(new_ebb));
|
debug_assert!(!self.is_ebb_inserted(new_ebb));
|
||||||
|
|
||||||
|
|||||||
@@ -309,7 +309,8 @@ impl StackSlots {
|
|||||||
let size = spill_size(ty);
|
let size = spill_size(ty);
|
||||||
|
|
||||||
// Find the smallest existing slot that can fit the type.
|
// Find the smallest existing slot that can fit the type.
|
||||||
if let Some(&ss) = self.emergency
|
if let Some(&ss) = self
|
||||||
|
.emergency
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|&&ss| self[ss].size >= size && !in_use.contains(&ss.into()))
|
.filter(|&&ss| self[ss].size >= size && !in_use.contains(&ss.into()))
|
||||||
.min_by_key(|&&ss| self[ss].size)
|
.min_by_key(|&&ss| self[ss].size)
|
||||||
@@ -318,7 +319,8 @@ impl StackSlots {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Alternatively, use the largest available slot and make it larger.
|
// Alternatively, use the largest available slot and make it larger.
|
||||||
if let Some(&ss) = self.emergency
|
if let Some(&ss) = self
|
||||||
|
.emergency
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|&&ss| !in_use.contains(&ss.into()))
|
.filter(|&&ss| !in_use.contains(&ss.into()))
|
||||||
.max_by_key(|&&ss| self[ss].size)
|
.max_by_key(|&&ss| self[ss].size)
|
||||||
|
|||||||
@@ -345,7 +345,8 @@ fn expand_fcvt_to_sint(
|
|||||||
let mut pos = FuncCursor::new(func).after_inst(inst);
|
let mut pos = FuncCursor::new(func).after_inst(inst);
|
||||||
pos.use_srcloc(inst);
|
pos.use_srcloc(inst);
|
||||||
|
|
||||||
let is_done = pos.ins()
|
let is_done = pos
|
||||||
|
.ins()
|
||||||
.icmp_imm(IntCC::NotEqual, result, 1 << (ty.lane_bits() - 1));
|
.icmp_imm(IntCC::NotEqual, result, 1 << (ty.lane_bits() - 1));
|
||||||
pos.ins().brnz(is_done, done, &[]);
|
pos.ins().brnz(is_done, done, &[]);
|
||||||
|
|
||||||
|
|||||||
@@ -13,11 +13,7 @@ pub trait IteratorExtras: Iterator {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> IteratorExtras for T
|
impl<T> IteratorExtras for T where T: Iterator {}
|
||||||
where
|
|
||||||
T: Iterator,
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Adjacent pairs iterator returned by `adjacent_pairs()`.
|
/// Adjacent pairs iterator returned by `adjacent_pairs()`.
|
||||||
///
|
///
|
||||||
|
|||||||
@@ -187,7 +187,8 @@ fn legalize_inst_results<ResType>(pos: &mut FuncCursor, mut get_abi_type: ResTyp
|
|||||||
where
|
where
|
||||||
ResType: FnMut(&Function, usize) -> AbiParam,
|
ResType: FnMut(&Function, usize) -> AbiParam,
|
||||||
{
|
{
|
||||||
let call = pos.current_inst()
|
let call = pos
|
||||||
|
.current_inst()
|
||||||
.expect("Cursor must point to a call instruction");
|
.expect("Cursor must point to a call instruction");
|
||||||
|
|
||||||
// We theoretically allow for call instructions that return a number of fixed results before
|
// We theoretically allow for call instructions that return a number of fixed results before
|
||||||
@@ -419,7 +420,8 @@ fn legalize_inst_arguments<ArgType>(
|
|||||||
) where
|
) where
|
||||||
ArgType: FnMut(&Function, usize) -> AbiParam,
|
ArgType: FnMut(&Function, usize) -> AbiParam,
|
||||||
{
|
{
|
||||||
let inst = pos.current_inst()
|
let inst = pos
|
||||||
|
.current_inst()
|
||||||
.expect("Cursor must point to a call instruction");
|
.expect("Cursor must point to a call instruction");
|
||||||
|
|
||||||
// Lift the value list out of the call instruction so we modify it.
|
// Lift the value list out of the call instruction so we modify it.
|
||||||
@@ -550,7 +552,8 @@ pub fn handle_return_abi(inst: Inst, func: &mut Function, cfg: &ControlFlowGraph
|
|||||||
|
|
||||||
// Count the special-purpose return values (`link`, `sret`, and `vmctx`) that were appended to
|
// Count the special-purpose return values (`link`, `sret`, and `vmctx`) that were appended to
|
||||||
// the legalized signature.
|
// the legalized signature.
|
||||||
let special_args = func.signature
|
let special_args = func
|
||||||
|
.signature
|
||||||
.returns
|
.returns
|
||||||
.iter()
|
.iter()
|
||||||
.rev()
|
.rev()
|
||||||
@@ -591,7 +594,8 @@ pub fn handle_return_abi(inst: Inst, func: &mut Function, cfg: &ControlFlowGraph
|
|||||||
// A `link`/`sret`/`vmctx` return value can only appear in a signature that has a
|
// A `link`/`sret`/`vmctx` return value can only appear in a signature that has a
|
||||||
// unique matching argument. They are appended at the end, so search the signature from
|
// unique matching argument. They are appended at the end, so search the signature from
|
||||||
// the end.
|
// the end.
|
||||||
let idx = pos.func
|
let idx = pos
|
||||||
|
.func
|
||||||
.signature
|
.signature
|
||||||
.params
|
.params
|
||||||
.iter()
|
.iter()
|
||||||
@@ -599,7 +603,8 @@ pub fn handle_return_abi(inst: Inst, func: &mut Function, cfg: &ControlFlowGraph
|
|||||||
.expect("No matching special purpose argument.");
|
.expect("No matching special purpose argument.");
|
||||||
// Get the corresponding entry block value and add it to the return instruction's
|
// Get the corresponding entry block value and add it to the return instruction's
|
||||||
// arguments.
|
// arguments.
|
||||||
let val = pos.func
|
let val = pos
|
||||||
|
.func
|
||||||
.dfg
|
.dfg
|
||||||
.ebb_params(pos.func.layout.entry_block().unwrap())[idx];
|
.ebb_params(pos.func.layout.entry_block().unwrap())[idx];
|
||||||
debug_assert_eq!(pos.func.dfg.value_type(val), arg.value_type);
|
debug_assert_eq!(pos.func.dfg.value_type(val), arg.value_type);
|
||||||
@@ -641,9 +646,11 @@ fn spill_entry_params(func: &mut Function, entry: Ebb) {
|
|||||||
/// or calls between writing the stack slots and the call instruction. Writing the slots earlier
|
/// or calls between writing the stack slots and the call instruction. Writing the slots earlier
|
||||||
/// could help reduce register pressure before the call.
|
/// could help reduce register pressure before the call.
|
||||||
fn spill_call_arguments(pos: &mut FuncCursor) -> bool {
|
fn spill_call_arguments(pos: &mut FuncCursor) -> bool {
|
||||||
let inst = pos.current_inst()
|
let inst = pos
|
||||||
|
.current_inst()
|
||||||
.expect("Cursor must point to a call instruction");
|
.expect("Cursor must point to a call instruction");
|
||||||
let sig_ref = pos.func
|
let sig_ref = pos
|
||||||
|
.func
|
||||||
.dfg
|
.dfg
|
||||||
.call_signature(inst)
|
.call_signature(inst)
|
||||||
.expect("Call instruction expected.");
|
.expect("Call instruction expected.");
|
||||||
|
|||||||
@@ -37,7 +37,8 @@ pub fn expand_global_value(
|
|||||||
/// Expand a `global_value` instruction for a vmctx global.
|
/// Expand a `global_value` instruction for a vmctx global.
|
||||||
fn vmctx_addr(inst: ir::Inst, func: &mut ir::Function, offset: i64) {
|
fn vmctx_addr(inst: ir::Inst, func: &mut ir::Function, offset: i64) {
|
||||||
// Get the value representing the `vmctx` argument.
|
// Get the value representing the `vmctx` argument.
|
||||||
let vmctx = func.special_param(ir::ArgumentPurpose::VMContext)
|
let vmctx = func
|
||||||
|
.special_param(ir::ArgumentPurpose::VMContext)
|
||||||
.expect("Missing vmctx parameter");
|
.expect("Missing vmctx parameter");
|
||||||
|
|
||||||
// Simply replace the `global_value` instruction with an `iadd_imm`, reusing the result value.
|
// Simply replace the `global_value` instruction with an `iadd_imm`, reusing the result value.
|
||||||
|
|||||||
@@ -61,20 +61,23 @@ fn dynamic_addr(
|
|||||||
let oob;
|
let oob;
|
||||||
if access_size == 1 {
|
if access_size == 1 {
|
||||||
// `offset > bound - 1` is the same as `offset >= bound`.
|
// `offset > bound - 1` is the same as `offset >= bound`.
|
||||||
oob = pos.ins()
|
oob = pos
|
||||||
|
.ins()
|
||||||
.icmp(IntCC::UnsignedGreaterThanOrEqual, offset, bound);
|
.icmp(IntCC::UnsignedGreaterThanOrEqual, offset, bound);
|
||||||
} else if access_size <= min_size {
|
} else if access_size <= min_size {
|
||||||
// We know that bound >= min_size, so here we can compare `offset > bound - access_size` without
|
// We know that bound >= min_size, so here we can compare `offset > bound - access_size` without
|
||||||
// wrapping.
|
// wrapping.
|
||||||
let adj_bound = pos.ins().iadd_imm(bound, -access_size);
|
let adj_bound = pos.ins().iadd_imm(bound, -access_size);
|
||||||
oob = pos.ins()
|
oob = pos
|
||||||
|
.ins()
|
||||||
.icmp(IntCC::UnsignedGreaterThan, offset, adj_bound);
|
.icmp(IntCC::UnsignedGreaterThan, offset, adj_bound);
|
||||||
} else {
|
} else {
|
||||||
// We need an overflow check for the adjusted offset.
|
// We need an overflow check for the adjusted offset.
|
||||||
let access_size_val = pos.ins().iconst(offset_ty, access_size);
|
let access_size_val = pos.ins().iconst(offset_ty, access_size);
|
||||||
let (adj_offset, overflow) = pos.ins().iadd_cout(offset, access_size_val);
|
let (adj_offset, overflow) = pos.ins().iadd_cout(offset, access_size_val);
|
||||||
pos.ins().trapnz(overflow, ir::TrapCode::HeapOutOfBounds);
|
pos.ins().trapnz(overflow, ir::TrapCode::HeapOutOfBounds);
|
||||||
oob = pos.ins()
|
oob = pos
|
||||||
|
.ins()
|
||||||
.icmp(IntCC::UnsignedGreaterThan, adj_offset, bound);
|
.icmp(IntCC::UnsignedGreaterThan, adj_offset, bound);
|
||||||
}
|
}
|
||||||
pos.ins().trapnz(oob, ir::TrapCode::HeapOutOfBounds);
|
pos.ins().trapnz(oob, ir::TrapCode::HeapOutOfBounds);
|
||||||
|
|||||||
@@ -139,7 +139,8 @@ fn split_any(
|
|||||||
.expect("Branches must have value lists.");
|
.expect("Branches must have value lists.");
|
||||||
let num_args = args.len(&pos.func.dfg.value_lists);
|
let num_args = args.len(&pos.func.dfg.value_lists);
|
||||||
// Get the old value passed to the EBB argument we're repairing.
|
// Get the old value passed to the EBB argument we're repairing.
|
||||||
let old_arg = args.get(fixed_args + repair.num, &pos.func.dfg.value_lists)
|
let old_arg = args
|
||||||
|
.get(fixed_args + repair.num, &pos.func.dfg.value_lists)
|
||||||
.expect("Too few branch arguments");
|
.expect("Too few branch arguments");
|
||||||
|
|
||||||
// It's possible that the CFG's predecessor list has duplicates. Detect them here.
|
// It's possible that the CFG's predecessor list has duplicates. Detect them here.
|
||||||
@@ -153,13 +154,15 @@ fn split_any(
|
|||||||
let (lo, hi) = split_value(pos, old_arg, repair.concat, &mut repairs);
|
let (lo, hi) = split_value(pos, old_arg, repair.concat, &mut repairs);
|
||||||
|
|
||||||
// The `lo` part replaces the original argument.
|
// The `lo` part replaces the original argument.
|
||||||
*args.get_mut(fixed_args + repair.num, &mut pos.func.dfg.value_lists)
|
*args
|
||||||
|
.get_mut(fixed_args + repair.num, &mut pos.func.dfg.value_lists)
|
||||||
.unwrap() = lo;
|
.unwrap() = lo;
|
||||||
|
|
||||||
// The `hi` part goes at the end. Since multiple repairs may have been scheduled to the
|
// The `hi` part goes at the end. Since multiple repairs may have been scheduled to the
|
||||||
// same EBB, there could be multiple arguments missing.
|
// same EBB, there could be multiple arguments missing.
|
||||||
if num_args > fixed_args + repair.hi_num {
|
if num_args > fixed_args + repair.hi_num {
|
||||||
*args.get_mut(fixed_args + repair.hi_num, &mut pos.func.dfg.value_lists)
|
*args
|
||||||
|
.get_mut(fixed_args + repair.hi_num, &mut pos.func.dfg.value_lists)
|
||||||
.unwrap() = hi;
|
.unwrap() = hi;
|
||||||
} else {
|
} else {
|
||||||
// We need to append one or more arguments. If we're adding more than one argument,
|
// We need to append one or more arguments. If we're adding more than one argument,
|
||||||
|
|||||||
@@ -334,7 +334,8 @@ pub fn do_postopt(func: &mut Function, isa: &TargetIsa) {
|
|||||||
optimize_cpu_flags(&mut pos, inst, last_flags_clobber, isa);
|
optimize_cpu_flags(&mut pos, inst, last_flags_clobber, isa);
|
||||||
|
|
||||||
// Track the most recent seen instruction that clobbers the flags.
|
// Track the most recent seen instruction that clobbers the flags.
|
||||||
if let Some(constraints) = isa.encoding_info()
|
if let Some(constraints) = isa
|
||||||
|
.encoding_info()
|
||||||
.operand_constraints(pos.func.encodings[inst])
|
.operand_constraints(pos.func.encodings[inst])
|
||||||
{
|
{
|
||||||
if constraints.clobbers_flags {
|
if constraints.clobbers_flags {
|
||||||
|
|||||||
@@ -307,7 +307,8 @@ impl<'a> Context<'a> {
|
|||||||
// Create a live range for the new value.
|
// Create a live range for the new value.
|
||||||
// TODO: Should we handle ghost values?
|
// TODO: Should we handle ghost values?
|
||||||
let affinity = Affinity::new(
|
let affinity = Affinity::new(
|
||||||
&self.encinfo
|
&self
|
||||||
|
.encinfo
|
||||||
.operand_constraints(pos.func.encodings[inst])
|
.operand_constraints(pos.func.encodings[inst])
|
||||||
.expect("Bad copy encoding")
|
.expect("Bad copy encoding")
|
||||||
.outs[0],
|
.outs[0],
|
||||||
@@ -352,7 +353,8 @@ impl<'a> Context<'a> {
|
|||||||
// Create a live range for the new value.
|
// Create a live range for the new value.
|
||||||
// TODO: Handle affinity for ghost values.
|
// TODO: Handle affinity for ghost values.
|
||||||
let affinity = Affinity::new(
|
let affinity = Affinity::new(
|
||||||
&self.encinfo
|
&self
|
||||||
|
.encinfo
|
||||||
.operand_constraints(pos.func.encodings[inst])
|
.operand_constraints(pos.func.encodings[inst])
|
||||||
.expect("Bad copy encoding")
|
.expect("Bad copy encoding")
|
||||||
.outs[0],
|
.outs[0],
|
||||||
@@ -419,7 +421,8 @@ impl<'a> Context<'a> {
|
|||||||
let node = Node::value(value, 0, self.func);
|
let node = Node::value(value, 0, self.func);
|
||||||
|
|
||||||
// Push this value and get the nearest dominating def back.
|
// Push this value and get the nearest dominating def back.
|
||||||
let parent = match self.forest
|
let parent = match self
|
||||||
|
.forest
|
||||||
.push_node(node, self.func, self.domtree, self.preorder)
|
.push_node(node, self.func, self.domtree, self.preorder)
|
||||||
{
|
{
|
||||||
None => continue,
|
None => continue,
|
||||||
|
|||||||
@@ -527,7 +527,8 @@ impl<'a> Context<'a> {
|
|||||||
/// all values used by the instruction.
|
/// all values used by the instruction.
|
||||||
fn program_complete_input_constraints(&mut self) {
|
fn program_complete_input_constraints(&mut self) {
|
||||||
let inst = self.cur.current_inst().expect("Not on an instruction");
|
let inst = self.cur.current_inst().expect("Not on an instruction");
|
||||||
let constraints = self.encinfo
|
let constraints = self
|
||||||
|
.encinfo
|
||||||
.operand_constraints(self.cur.func.encodings[inst])
|
.operand_constraints(self.cur.func.encodings[inst])
|
||||||
.expect("Current instruction not encoded")
|
.expect("Current instruction not encoded")
|
||||||
.ins;
|
.ins;
|
||||||
@@ -643,7 +644,8 @@ impl<'a> Context<'a> {
|
|||||||
Pred: FnMut(&LiveRange, LiveRangeContext<Layout>) -> bool,
|
Pred: FnMut(&LiveRange, LiveRangeContext<Layout>) -> bool,
|
||||||
{
|
{
|
||||||
for rdiv in self.divert.all() {
|
for rdiv in self.divert.all() {
|
||||||
let lr = self.liveness
|
let lr = self
|
||||||
|
.liveness
|
||||||
.get(rdiv.value)
|
.get(rdiv.value)
|
||||||
.expect("Missing live range for diverted register");
|
.expect("Missing live range for diverted register");
|
||||||
if pred(lr, self.liveness.context(&self.cur.func.layout)) {
|
if pred(lr, self.liveness.context(&self.cur.func.layout)) {
|
||||||
@@ -942,7 +944,8 @@ impl<'a> Context<'a> {
|
|||||||
..
|
..
|
||||||
} => {
|
} => {
|
||||||
debug_assert_eq!(slot[to_slot].expand(), None, "Overwriting slot in use");
|
debug_assert_eq!(slot[to_slot].expand(), None, "Overwriting slot in use");
|
||||||
let ss = self.cur
|
let ss = self
|
||||||
|
.cur
|
||||||
.func
|
.func
|
||||||
.stack_slots
|
.stack_slots
|
||||||
.get_emergency_slot(self.cur.func.dfg.value_type(value), &slot[0..spills]);
|
.get_emergency_slot(self.cur.func.dfg.value_type(value), &slot[0..spills]);
|
||||||
|
|||||||
@@ -187,7 +187,8 @@ impl LiveValueTracker {
|
|||||||
// If the immediate dominator exits, we must have a stored list for it. This is a
|
// If the immediate dominator exits, we must have a stored list for it. This is a
|
||||||
// requirement to the order EBBs are visited: All dominators must have been processed
|
// requirement to the order EBBs are visited: All dominators must have been processed
|
||||||
// before the current EBB.
|
// before the current EBB.
|
||||||
let idom_live_list = self.idom_sets
|
let idom_live_list = self
|
||||||
|
.idom_sets
|
||||||
.get(&idom)
|
.get(&idom)
|
||||||
.expect("No stored live set for dominator");
|
.expect("No stored live set for dominator");
|
||||||
let ctx = liveness.context(layout);
|
let ctx = liveness.context(layout);
|
||||||
|
|||||||
@@ -340,7 +340,8 @@ impl Liveness {
|
|||||||
where
|
where
|
||||||
PP: Into<ProgramPoint>,
|
PP: Into<ProgramPoint>,
|
||||||
{
|
{
|
||||||
let old = self.ranges
|
let old = self
|
||||||
|
.ranges
|
||||||
.insert(LiveRange::new(value, def.into(), affinity));
|
.insert(LiveRange::new(value, def.into(), affinity));
|
||||||
debug_assert!(old.is_none(), "{} already has a live range", value);
|
debug_assert!(old.is_none(), "{} already has a live range", value);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -114,7 +114,8 @@ impl Pressure {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Compute per-class limits from `usable`.
|
// Compute per-class limits from `usable`.
|
||||||
for (toprc, rc) in p.toprc
|
for (toprc, rc) in p
|
||||||
|
.toprc
|
||||||
.iter_mut()
|
.iter_mut()
|
||||||
.take_while(|t| t.num_toprcs > 0)
|
.take_while(|t| t.num_toprcs > 0)
|
||||||
.zip(reginfo.classes)
|
.zip(reginfo.classes)
|
||||||
|
|||||||
@@ -166,7 +166,8 @@ impl<'a> Context<'a> {
|
|||||||
if arg.affinity.is_stack() {
|
if arg.affinity.is_stack() {
|
||||||
// An incoming register parameter was spilled. Replace the parameter value
|
// An incoming register parameter was spilled. Replace the parameter value
|
||||||
// with a temporary register value that is immediately spilled.
|
// with a temporary register value that is immediately spilled.
|
||||||
let reg = self.cur
|
let reg = self
|
||||||
|
.cur
|
||||||
.func
|
.func
|
||||||
.dfg
|
.dfg
|
||||||
.replace_ebb_param(arg.value, abi.value_type);
|
.replace_ebb_param(arg.value, abi.value_type);
|
||||||
@@ -199,7 +200,8 @@ impl<'a> Context<'a> {
|
|||||||
self.cur.use_srcloc(inst);
|
self.cur.use_srcloc(inst);
|
||||||
|
|
||||||
// Get the operand constraints for `inst` that we are trying to satisfy.
|
// Get the operand constraints for `inst` that we are trying to satisfy.
|
||||||
let constraints = self.encinfo
|
let constraints = self
|
||||||
|
.encinfo
|
||||||
.operand_constraints(encoding)
|
.operand_constraints(encoding)
|
||||||
.expect("Missing instruction encoding");
|
.expect("Missing instruction encoding");
|
||||||
|
|
||||||
@@ -276,7 +278,8 @@ impl<'a> Context<'a> {
|
|||||||
// Same thing for spilled call return values.
|
// Same thing for spilled call return values.
|
||||||
let retvals = &defs[constraints.outs.len()..];
|
let retvals = &defs[constraints.outs.len()..];
|
||||||
if !retvals.is_empty() {
|
if !retvals.is_empty() {
|
||||||
let sig = self.cur
|
let sig = self
|
||||||
|
.cur
|
||||||
.func
|
.func
|
||||||
.dfg
|
.dfg
|
||||||
.call_signature(inst)
|
.call_signature(inst)
|
||||||
|
|||||||
@@ -125,7 +125,8 @@ impl<'a> Context<'a> {
|
|||||||
self.process_spills(tracker);
|
self.process_spills(tracker);
|
||||||
|
|
||||||
while let Some(inst) = self.cur.next_inst() {
|
while let Some(inst) = self.cur.next_inst() {
|
||||||
if let Some(constraints) = self.encinfo
|
if let Some(constraints) = self
|
||||||
|
.encinfo
|
||||||
.operand_constraints(self.cur.func.encodings[inst])
|
.operand_constraints(self.cur.func.encodings[inst])
|
||||||
{
|
{
|
||||||
self.visit_inst(inst, ebb, constraints, tracker);
|
self.visit_inst(inst, ebb, constraints, tracker);
|
||||||
@@ -494,7 +495,8 @@ impl<'a> Context<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Assign a spill slot for the whole virtual register.
|
// Assign a spill slot for the whole virtual register.
|
||||||
let ss = self.cur
|
let ss = self
|
||||||
|
.cur
|
||||||
.func
|
.func
|
||||||
.stack_slots
|
.stack_slots
|
||||||
.make_spill_slot(self.cur.func.dfg.value_type(value));
|
.make_spill_slot(self.cur.func.dfg.value_type(value));
|
||||||
|
|||||||
@@ -152,7 +152,8 @@ impl VirtRegs {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Determine the insertion position for `single`.
|
// Determine the insertion position for `single`.
|
||||||
let index = match self.values(vreg)
|
let index = match self
|
||||||
|
.values(vreg)
|
||||||
.binary_search_by(|&v| preorder.pre_cmp_def(v, single, func))
|
.binary_search_by(|&v| preorder.pre_cmp_def(v, single, func))
|
||||||
{
|
{
|
||||||
Ok(_) => panic!("{} already in {}", single, vreg),
|
Ok(_) => panic!("{} already in {}", single, vreg),
|
||||||
|
|||||||
@@ -49,7 +49,8 @@ pub fn layout_stack(frame: &mut StackSlots, alignment: StackSize) -> CodegenResu
|
|||||||
incoming_min = min(incoming_min, slot.offset.unwrap());
|
incoming_min = min(incoming_min, slot.offset.unwrap());
|
||||||
}
|
}
|
||||||
StackSlotKind::OutgoingArg => {
|
StackSlotKind::OutgoingArg => {
|
||||||
let offset = slot.offset
|
let offset = slot
|
||||||
|
.offset
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.checked_add(slot.size as StackOffset)
|
.checked_add(slot.size as StackOffset)
|
||||||
.ok_or(CodegenError::ImplLimitExceeded)?;
|
.ok_or(CodegenError::ImplLimitExceeded)?;
|
||||||
|
|||||||
@@ -98,7 +98,8 @@ impl<'a> FlagsVerifier<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Does the instruction have an encoding that clobbers the CPU flags?
|
// Does the instruction have an encoding that clobbers the CPU flags?
|
||||||
if self.encinfo
|
if self
|
||||||
|
.encinfo
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.and_then(|ei| ei.operand_constraints(self.func.encodings[inst]))
|
.and_then(|ei| ei.operand_constraints(self.func.encodings[inst]))
|
||||||
.map_or(false, |c| c.clobbers_flags) && live_val.is_some()
|
.map_or(false, |c| c.clobbers_flags) && live_val.is_some()
|
||||||
|
|||||||
@@ -87,7 +87,8 @@ impl<'a> LocationVerifier<'a> {
|
|||||||
enc: isa::Encoding,
|
enc: isa::Encoding,
|
||||||
divert: &RegDiversions,
|
divert: &RegDiversions,
|
||||||
) -> VerifierResult<()> {
|
) -> VerifierResult<()> {
|
||||||
let constraints = self.encinfo
|
let constraints = self
|
||||||
|
.encinfo
|
||||||
.operand_constraints(enc)
|
.operand_constraints(enc)
|
||||||
.expect("check_enc_constraints requires a legal encoding");
|
.expect("check_enc_constraints requires a legal encoding");
|
||||||
|
|
||||||
|
|||||||
@@ -191,7 +191,8 @@ impl<'a> Verifier<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if let ir::GlobalValueData::VMContext { .. } = self.func.global_values[cur] {
|
if let ir::GlobalValueData::VMContext { .. } = self.func.global_values[cur] {
|
||||||
if self.func
|
if self
|
||||||
|
.func
|
||||||
.special_param(ir::ArgumentPurpose::VMContext)
|
.special_param(ir::ArgumentPurpose::VMContext)
|
||||||
.is_none()
|
.is_none()
|
||||||
{
|
{
|
||||||
@@ -253,7 +254,8 @@ impl<'a> Verifier<'a> {
|
|||||||
|
|
||||||
let fixed_results = inst_data.opcode().constraints().fixed_results();
|
let fixed_results = inst_data.opcode().constraints().fixed_results();
|
||||||
// var_results is 0 if we aren't a call instruction
|
// var_results is 0 if we aren't a call instruction
|
||||||
let var_results = dfg.call_signature(inst)
|
let var_results = dfg
|
||||||
|
.call_signature(inst)
|
||||||
.map_or(0, |sig| dfg.signatures[sig].returns.len());
|
.map_or(0, |sig| dfg.signatures[sig].returns.len());
|
||||||
let total_results = fixed_results + var_results;
|
let total_results = fixed_results + var_results;
|
||||||
|
|
||||||
@@ -498,7 +500,8 @@ impl<'a> Verifier<'a> {
|
|||||||
}
|
}
|
||||||
// Defining instruction dominates the instruction that uses the value.
|
// Defining instruction dominates the instruction that uses the value.
|
||||||
if is_reachable {
|
if is_reachable {
|
||||||
if !self.expected_domtree
|
if !self
|
||||||
|
.expected_domtree
|
||||||
.dominates(def_inst, loc_inst, &self.func.layout)
|
.dominates(def_inst, loc_inst, &self.func.layout)
|
||||||
{
|
{
|
||||||
return err!(loc_inst, "uses value from non-dominating {}", def_inst);
|
return err!(loc_inst, "uses value from non-dominating {}", def_inst);
|
||||||
@@ -529,7 +532,8 @@ impl<'a> Verifier<'a> {
|
|||||||
}
|
}
|
||||||
// The defining EBB dominates the instruction using this value.
|
// The defining EBB dominates the instruction using this value.
|
||||||
if is_reachable
|
if is_reachable
|
||||||
&& !self.expected_domtree
|
&& !self
|
||||||
|
.expected_domtree
|
||||||
.dominates(ebb, loc_inst, &self.func.layout)
|
.dominates(ebb, loc_inst, &self.func.layout)
|
||||||
{
|
{
|
||||||
return err!(loc_inst, "uses value arg from non-dominating {}", ebb);
|
return err!(loc_inst, "uses value arg from non-dominating {}", ebb);
|
||||||
@@ -604,7 +608,8 @@ impl<'a> Verifier<'a> {
|
|||||||
}
|
}
|
||||||
// We verify rpo_cmp on pairs of adjacent ebbs in the postorder
|
// We verify rpo_cmp on pairs of adjacent ebbs in the postorder
|
||||||
for (&prev_ebb, &next_ebb) in domtree.cfg_postorder().iter().adjacent_pairs() {
|
for (&prev_ebb, &next_ebb) in domtree.cfg_postorder().iter().adjacent_pairs() {
|
||||||
if self.expected_domtree
|
if self
|
||||||
|
.expected_domtree
|
||||||
.rpo_cmp(prev_ebb, next_ebb, &self.func.layout) != Ordering::Greater
|
.rpo_cmp(prev_ebb, next_ebb, &self.func.layout) != Ordering::Greater
|
||||||
{
|
{
|
||||||
return err!(
|
return err!(
|
||||||
@@ -743,7 +748,8 @@ impl<'a> Verifier<'a> {
|
|||||||
fn typecheck_variable_args(&self, inst: Inst) -> VerifierResult<()> {
|
fn typecheck_variable_args(&self, inst: Inst) -> VerifierResult<()> {
|
||||||
match self.func.dfg.analyze_branch(inst) {
|
match self.func.dfg.analyze_branch(inst) {
|
||||||
BranchInfo::SingleDest(ebb, _) => {
|
BranchInfo::SingleDest(ebb, _) => {
|
||||||
let iter = self.func
|
let iter = self
|
||||||
|
.func
|
||||||
.dfg
|
.dfg
|
||||||
.ebb_params(ebb)
|
.ebb_params(ebb)
|
||||||
.iter()
|
.iter()
|
||||||
@@ -1038,11 +1044,12 @@ impl<'a> Verifier<'a> {
|
|||||||
|
|
||||||
let encoding = self.func.encodings[inst];
|
let encoding = self.func.encodings[inst];
|
||||||
if encoding.is_legal() {
|
if encoding.is_legal() {
|
||||||
let mut encodings = isa.legal_encodings(
|
let mut encodings =
|
||||||
&self.func,
|
isa.legal_encodings(
|
||||||
&self.func.dfg[inst],
|
&self.func,
|
||||||
self.func.dfg.ctrl_typevar(inst),
|
&self.func.dfg[inst],
|
||||||
).peekable();
|
self.func.dfg.ctrl_typevar(inst),
|
||||||
|
).peekable();
|
||||||
|
|
||||||
if encodings.peek().is_none() {
|
if encodings.peek().is_none() {
|
||||||
return err!(
|
return err!(
|
||||||
|
|||||||
@@ -282,7 +282,8 @@ impl TestRunner {
|
|||||||
/// Print out a report of slow tests.
|
/// Print out a report of slow tests.
|
||||||
fn report_slow_tests(&self) {
|
fn report_slow_tests(&self) {
|
||||||
// Collect runtimes of succeeded tests.
|
// Collect runtimes of succeeded tests.
|
||||||
let mut times = self.tests
|
let mut times = self
|
||||||
|
.tests
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|entry| match *entry {
|
.filter_map(|entry| match *entry {
|
||||||
QueueEntry {
|
QueueEntry {
|
||||||
|
|||||||
@@ -116,7 +116,8 @@ impl SubTest for TestBinEmit {
|
|||||||
let mut func = func.into_owned();
|
let mut func = func.into_owned();
|
||||||
|
|
||||||
// Fix the stack frame layout so we can test spill/fill encodings.
|
// Fix the stack frame layout so we can test spill/fill encodings.
|
||||||
let min_offset = func.stack_slots
|
let min_offset = func
|
||||||
|
.stack_slots
|
||||||
.values()
|
.values()
|
||||||
.map(|slot| slot.offset.unwrap())
|
.map(|slot| slot.offset.unwrap())
|
||||||
.min();
|
.min();
|
||||||
@@ -133,14 +134,12 @@ impl SubTest for TestBinEmit {
|
|||||||
// Find an encoding that satisfies both immediate field and register
|
// Find an encoding that satisfies both immediate field and register
|
||||||
// constraints.
|
// constraints.
|
||||||
if let Some(enc) = {
|
if let Some(enc) = {
|
||||||
let mut legal_encodings = isa.legal_encodings(
|
let mut legal_encodings = isa
|
||||||
&func,
|
.legal_encodings(&func, &func.dfg[inst], func.dfg.ctrl_typevar(inst))
|
||||||
&func.dfg[inst],
|
.filter(|e| {
|
||||||
func.dfg.ctrl_typevar(inst),
|
let recipe_constraints = &encinfo.constraints[e.recipe()];
|
||||||
).filter(|e| {
|
recipe_constraints.satisfied(inst, &divert, &func)
|
||||||
let recipe_constraints = &encinfo.constraints[e.recipe()];
|
});
|
||||||
recipe_constraints.satisfied(inst, &divert, &func)
|
|
||||||
});
|
|
||||||
|
|
||||||
if opt_level == OptLevel::Best {
|
if opt_level == OptLevel::Best {
|
||||||
// Get the smallest legal encoding
|
// Get the smallest legal encoding
|
||||||
@@ -207,7 +206,8 @@ impl SubTest for TestBinEmit {
|
|||||||
// Send legal encodings into the emitter.
|
// Send legal encodings into the emitter.
|
||||||
if enc.is_legal() {
|
if enc.is_legal() {
|
||||||
// Generate a better error message if output locations are not specified.
|
// Generate a better error message if output locations are not specified.
|
||||||
if let Some(&v) = func.dfg
|
if let Some(&v) = func
|
||||||
|
.dfg
|
||||||
.inst_results(inst)
|
.inst_results(inst)
|
||||||
.iter()
|
.iter()
|
||||||
.find(|&&v| !func.locations[v].is_assigned())
|
.find(|&&v| !func.locations[v].is_assigned())
|
||||||
@@ -236,7 +236,8 @@ impl SubTest for TestBinEmit {
|
|||||||
if !enc.is_legal() {
|
if !enc.is_legal() {
|
||||||
// A possible cause of an unencoded instruction is a missing location for
|
// A possible cause of an unencoded instruction is a missing location for
|
||||||
// one of the input operands.
|
// one of the input operands.
|
||||||
if let Some(&v) = func.dfg
|
if let Some(&v) = func
|
||||||
|
.dfg
|
||||||
.inst_args(inst)
|
.inst_args(inst)
|
||||||
.iter()
|
.iter()
|
||||||
.find(|&&v| !func.locations[v].is_assigned())
|
.find(|&&v| !func.locations[v].is_assigned())
|
||||||
@@ -249,11 +250,9 @@ impl SubTest for TestBinEmit {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Do any encodings exist?
|
// Do any encodings exist?
|
||||||
let encodings = isa.legal_encodings(
|
let encodings = isa
|
||||||
&func,
|
.legal_encodings(&func, &func.dfg[inst], func.dfg.ctrl_typevar(inst))
|
||||||
&func.dfg[inst],
|
.map(|e| encinfo.display(e))
|
||||||
func.dfg.ctrl_typevar(inst),
|
|
||||||
).map(|e| encinfo.display(e))
|
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
if encodings.is_empty() {
|
if encodings.is_empty() {
|
||||||
|
|||||||
@@ -93,7 +93,8 @@ impl SubTest for TestDomtree {
|
|||||||
|
|
||||||
// Now we know that everything in `expected` is consistent with `domtree`.
|
// Now we know that everything in `expected` is consistent with `domtree`.
|
||||||
// All other EBB's should be either unreachable or the entry block.
|
// All other EBB's should be either unreachable or the entry block.
|
||||||
for ebb in func.layout
|
for ebb in func
|
||||||
|
.layout
|
||||||
.ebbs()
|
.ebbs()
|
||||||
.skip(1)
|
.skip(1)
|
||||||
.filter(|ebb| !expected.contains_key(ebb))
|
.filter(|ebb| !expected.contains_key(ebb))
|
||||||
|
|||||||
@@ -167,7 +167,8 @@ where
|
|||||||
// capable of having the same successor appear
|
// capable of having the same successor appear
|
||||||
// multiple times, so we must deduplicate.
|
// multiple times, so we must deduplicate.
|
||||||
let mut unique = EntitySet::<Ebb>::new();
|
let mut unique = EntitySet::<Ebb>::new();
|
||||||
for dest_ebb in self.builder
|
for dest_ebb in self
|
||||||
|
.builder
|
||||||
.func
|
.func
|
||||||
.jump_tables
|
.jump_tables
|
||||||
.get(table)
|
.get(table)
|
||||||
@@ -544,7 +545,8 @@ where
|
|||||||
Some(entry) => self.position.ebb.unwrap() == entry,
|
Some(entry) => self.position.ebb.unwrap() == entry,
|
||||||
};
|
};
|
||||||
!is_entry && self.func_ctx.ssa.is_sealed(self.position.ebb.unwrap())
|
!is_entry && self.func_ctx.ssa.is_sealed(self.position.ebb.unwrap())
|
||||||
&& !self.func_ctx
|
&& !self
|
||||||
|
.func_ctx
|
||||||
.ssa
|
.ssa
|
||||||
.has_any_predecessors(self.position.ebb.unwrap())
|
.has_any_predecessors(self.position.ebb.unwrap())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -103,7 +103,8 @@ impl<Variable> BlockData<Variable> {
|
|||||||
BlockData::EbbHeader(ref mut data) => {
|
BlockData::EbbHeader(ref mut data) => {
|
||||||
// This a linear complexity operation but the number of predecessors is low
|
// This a linear complexity operation but the number of predecessors is low
|
||||||
// in all non-pathological cases
|
// in all non-pathological cases
|
||||||
let pred: usize = data.predecessors
|
let pred: usize = data
|
||||||
|
.predecessors
|
||||||
.iter()
|
.iter()
|
||||||
.position(|&PredBlock { branch, .. }| branch == inst)
|
.position(|&PredBlock { branch, .. }| branch == inst)
|
||||||
.expect("the predecessor you are trying to remove is not declared");
|
.expect("the predecessor you are trying to remove is not declared");
|
||||||
@@ -597,7 +598,8 @@ where
|
|||||||
} in &mut preds
|
} in &mut preds
|
||||||
{
|
{
|
||||||
// We already did a full `use_var` above, so we can do just the fast path.
|
// We already did a full `use_var` above, so we can do just the fast path.
|
||||||
let pred_val = self.variables
|
let pred_val = self
|
||||||
|
.variables
|
||||||
.get(temp_arg_var)
|
.get(temp_arg_var)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.get(*pred_block)
|
.get(*pred_block)
|
||||||
|
|||||||
@@ -526,7 +526,8 @@ where
|
|||||||
"imported data cannot contain references"
|
"imported data cannot contain references"
|
||||||
);
|
);
|
||||||
self.backend.write_data_funcaddr(
|
self.backend.write_data_funcaddr(
|
||||||
&mut info.compiled
|
&mut info
|
||||||
|
.compiled
|
||||||
.as_mut()
|
.as_mut()
|
||||||
.expect("`data` must refer to a defined data object"),
|
.expect("`data` must refer to a defined data object"),
|
||||||
offset,
|
offset,
|
||||||
@@ -549,7 +550,8 @@ where
|
|||||||
"imported data cannot contain references"
|
"imported data cannot contain references"
|
||||||
);
|
);
|
||||||
self.backend.write_data_dataaddr(
|
self.backend.write_data_dataaddr(
|
||||||
&mut info.compiled
|
&mut info
|
||||||
|
.compiled
|
||||||
.as_mut()
|
.as_mut()
|
||||||
.expect("`data` must refer to a defined data object"),
|
.expect("`data` must refer to a defined data object"),
|
||||||
offset,
|
offset,
|
||||||
|
|||||||
@@ -644,10 +644,12 @@ impl<'a> Parser<'a> {
|
|||||||
if let Some(Token::Name(name)) = self.token() {
|
if let Some(Token::Name(name)) = self.token() {
|
||||||
self.consume();
|
self.consume();
|
||||||
match isa {
|
match isa {
|
||||||
Some(isa) => isa.register_info()
|
Some(isa) => isa
|
||||||
|
.register_info()
|
||||||
.parse_regunit(name)
|
.parse_regunit(name)
|
||||||
.ok_or_else(|| self.error("invalid register name")),
|
.ok_or_else(|| self.error("invalid register name")),
|
||||||
None => name.parse()
|
None => name
|
||||||
|
.parse()
|
||||||
.map_err(|_| self.error("invalid register number")),
|
.map_err(|_| self.error("invalid register number")),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@@ -1032,7 +1034,8 @@ impl<'a> Parser<'a> {
|
|||||||
self.parse_jump_table_decl()
|
self.parse_jump_table_decl()
|
||||||
.and_then(|(jt, dat)| ctx.add_jt(jt, dat, self.loc))
|
.and_then(|(jt, dat)| ctx.add_jt(jt, dat, self.loc))
|
||||||
}
|
}
|
||||||
Some(Token::Identifier("stack_limit")) => self.parse_stack_limit_decl()
|
Some(Token::Identifier("stack_limit")) => self
|
||||||
|
.parse_stack_limit_decl()
|
||||||
.and_then(|gv| ctx.set_stack_limit(gv, self.loc)),
|
.and_then(|gv| ctx.set_stack_limit(gv, self.loc)),
|
||||||
// More to come..
|
// More to come..
|
||||||
_ => return Ok(()),
|
_ => return Ok(()),
|
||||||
@@ -1053,7 +1056,8 @@ impl<'a> Parser<'a> {
|
|||||||
let kind = self.match_enum("expected stack slot kind")?;
|
let kind = self.match_enum("expected stack slot kind")?;
|
||||||
|
|
||||||
// stack-slot-decl ::= StackSlot(ss) "=" stack-slot-kind * Bytes {"," stack-slot-flag}
|
// stack-slot-decl ::= StackSlot(ss) "=" stack-slot-kind * Bytes {"," stack-slot-flag}
|
||||||
let bytes: i64 = self.match_imm64("expected byte-size in stack_slot decl")?
|
let bytes: i64 = self
|
||||||
|
.match_imm64("expected byte-size in stack_slot decl")?
|
||||||
.into();
|
.into();
|
||||||
if bytes < 0 {
|
if bytes < 0 {
|
||||||
return err!(self.loc, "negative stack slot size");
|
return err!(self.loc, "negative stack slot size");
|
||||||
@@ -1708,7 +1712,8 @@ impl<'a> Parser<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if let Some(result_locations) = result_locations {
|
if let Some(result_locations) = result_locations {
|
||||||
for (&value, loc) in ctx.function
|
for (&value, loc) in ctx
|
||||||
|
.function
|
||||||
.dfg
|
.dfg
|
||||||
.inst_results(inst)
|
.inst_results(inst)
|
||||||
.iter()
|
.iter()
|
||||||
|
|||||||
@@ -122,7 +122,8 @@ impl<'simple_jit_backend> Backend for SimpleJITBackend {
|
|||||||
code_size: u32,
|
code_size: u32,
|
||||||
) -> ModuleResult<Self::CompiledFunction> {
|
) -> ModuleResult<Self::CompiledFunction> {
|
||||||
let size = code_size as usize;
|
let size = code_size as usize;
|
||||||
let ptr = self.code_memory
|
let ptr = self
|
||||||
|
.code_memory
|
||||||
.allocate(size)
|
.allocate(size)
|
||||||
.expect("TODO: handle OOM etc.");
|
.expect("TODO: handle OOM etc.");
|
||||||
let mut reloc_sink = SimpleJITRelocSink::new();
|
let mut reloc_sink = SimpleJITRelocSink::new();
|
||||||
@@ -155,10 +156,12 @@ impl<'simple_jit_backend> Backend for SimpleJITBackend {
|
|||||||
|
|
||||||
let size = init.size();
|
let size = init.size();
|
||||||
let storage = match writable {
|
let storage = match writable {
|
||||||
Writability::Readonly => self.writable_memory
|
Writability::Readonly => self
|
||||||
|
.writable_memory
|
||||||
.allocate(size)
|
.allocate(size)
|
||||||
.expect("TODO: handle OOM etc."),
|
.expect("TODO: handle OOM etc."),
|
||||||
Writability::Writable => self.writable_memory
|
Writability::Writable => self
|
||||||
|
.writable_memory
|
||||||
.allocate(size)
|
.allocate(size)
|
||||||
.expect("TODO: handle OOM etc."),
|
.expect("TODO: handle OOM etc."),
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -210,7 +210,8 @@ impl<'dummy_environment> FuncEnvironment for DummyFuncEnvironment<'dummy_environ
|
|||||||
call_args: &[ir::Value],
|
call_args: &[ir::Value],
|
||||||
) -> WasmResult<ir::Inst> {
|
) -> WasmResult<ir::Inst> {
|
||||||
// Pass the current function's vmctx parameter on to the callee.
|
// Pass the current function's vmctx parameter on to the callee.
|
||||||
let vmctx = pos.func
|
let vmctx = pos
|
||||||
|
.func
|
||||||
.special_param(ir::ArgumentPurpose::VMContext)
|
.special_param(ir::ArgumentPurpose::VMContext)
|
||||||
.expect("Missing vmctx parameter");
|
.expect("Missing vmctx parameter");
|
||||||
|
|
||||||
@@ -236,7 +237,8 @@ impl<'dummy_environment> FuncEnvironment for DummyFuncEnvironment<'dummy_environ
|
|||||||
args.extend(call_args.iter().cloned(), &mut pos.func.dfg.value_lists);
|
args.extend(call_args.iter().cloned(), &mut pos.func.dfg.value_lists);
|
||||||
args.push(vmctx, &mut pos.func.dfg.value_lists);
|
args.push(vmctx, &mut pos.func.dfg.value_lists);
|
||||||
|
|
||||||
Ok(pos.ins()
|
Ok(pos
|
||||||
|
.ins()
|
||||||
.CallIndirect(ir::Opcode::CallIndirect, VOID, sig_ref, args)
|
.CallIndirect(ir::Opcode::CallIndirect, VOID, sig_ref, args)
|
||||||
.0)
|
.0)
|
||||||
}
|
}
|
||||||
@@ -249,7 +251,8 @@ impl<'dummy_environment> FuncEnvironment for DummyFuncEnvironment<'dummy_environ
|
|||||||
call_args: &[ir::Value],
|
call_args: &[ir::Value],
|
||||||
) -> WasmResult<ir::Inst> {
|
) -> WasmResult<ir::Inst> {
|
||||||
// Pass the current function's vmctx parameter on to the callee.
|
// Pass the current function's vmctx parameter on to the callee.
|
||||||
let vmctx = pos.func
|
let vmctx = pos
|
||||||
|
.func
|
||||||
.special_param(ir::ArgumentPurpose::VMContext)
|
.special_param(ir::ArgumentPurpose::VMContext)
|
||||||
.expect("Missing vmctx parameter");
|
.expect("Missing vmctx parameter");
|
||||||
|
|
||||||
|
|||||||
@@ -248,10 +248,8 @@ mod tests {
|
|||||||
// )
|
// )
|
||||||
const BODY: [u8; 7] = [
|
const BODY: [u8; 7] = [
|
||||||
0x00, // local decl count
|
0x00, // local decl count
|
||||||
0x20,
|
0x20, 0x00, // get_local 0
|
||||||
0x00, // get_local 0
|
0x41, 0x01, // i32.const 1
|
||||||
0x41,
|
|
||||||
0x01, // i32.const 1
|
|
||||||
0x6a, // i32.add
|
0x6a, // i32.add
|
||||||
0x0b, // end
|
0x0b, // end
|
||||||
];
|
];
|
||||||
@@ -280,10 +278,8 @@ mod tests {
|
|||||||
// )
|
// )
|
||||||
const BODY: [u8; 8] = [
|
const BODY: [u8; 8] = [
|
||||||
0x00, // local decl count
|
0x00, // local decl count
|
||||||
0x20,
|
0x20, 0x00, // get_local 0
|
||||||
0x00, // get_local 0
|
0x41, 0x01, // i32.const 1
|
||||||
0x41,
|
|
||||||
0x01, // i32.const 1
|
|
||||||
0x6a, // i32.add
|
0x6a, // i32.add
|
||||||
0x0f, // return
|
0x0f, // return
|
||||||
0x0b, // end
|
0x0b, // end
|
||||||
@@ -318,19 +314,13 @@ mod tests {
|
|||||||
// )
|
// )
|
||||||
const BODY: [u8; 16] = [
|
const BODY: [u8; 16] = [
|
||||||
0x01, // 1 local decl.
|
0x01, // 1 local decl.
|
||||||
0x01,
|
0x01, 0x7f, // 1 i32 local.
|
||||||
0x7f, // 1 i32 local.
|
0x03, 0x7f, // loop i32
|
||||||
0x03,
|
0x20, 0x00, // get_local 0
|
||||||
0x7f, // loop i32
|
0x41, 0x01, // i32.const 0
|
||||||
0x20,
|
|
||||||
0x00, // get_local 0
|
|
||||||
0x41,
|
|
||||||
0x01, // i32.const 0
|
|
||||||
0x6a, // i32.add
|
0x6a, // i32.add
|
||||||
0x21,
|
0x21, 0x00, // set_local 0
|
||||||
0x00, // set_local 0
|
0x0c, 0x00, // br 0
|
||||||
0x0c,
|
|
||||||
0x00, // br 0
|
|
||||||
0x0b, // end
|
0x0b, // end
|
||||||
0x0b, // end
|
0x0b, // end
|
||||||
];
|
];
|
||||||
|
|||||||
@@ -412,9 +412,11 @@ pub fn parse_code_section<'data>(
|
|||||||
}
|
}
|
||||||
let mut reader = parser.create_binary_reader();
|
let mut reader = parser.create_binary_reader();
|
||||||
let size = reader.bytes_remaining();
|
let size = reader.bytes_remaining();
|
||||||
environ.define_function_body(reader
|
environ.define_function_body(
|
||||||
.read_bytes(size)
|
reader
|
||||||
.map_err(WasmError::from_binary_reader_error)?)?;
|
.read_bytes(size)
|
||||||
|
.map_err(WasmError::from_binary_reader_error)?,
|
||||||
|
)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -282,7 +282,8 @@ impl TranslationState {
|
|||||||
environ: &mut FE,
|
environ: &mut FE,
|
||||||
) -> GlobalVariable {
|
) -> GlobalVariable {
|
||||||
let index = index as GlobalIndex;
|
let index = index as GlobalIndex;
|
||||||
*self.globals
|
*self
|
||||||
|
.globals
|
||||||
.entry(index)
|
.entry(index)
|
||||||
.or_insert_with(|| environ.make_global(func, index))
|
.or_insert_with(|| environ.make_global(func, index))
|
||||||
}
|
}
|
||||||
@@ -296,7 +297,8 @@ impl TranslationState {
|
|||||||
environ: &mut FE,
|
environ: &mut FE,
|
||||||
) -> ir::Heap {
|
) -> ir::Heap {
|
||||||
let index = index as MemoryIndex;
|
let index = index as MemoryIndex;
|
||||||
*self.heaps
|
*self
|
||||||
|
.heaps
|
||||||
.entry(index)
|
.entry(index)
|
||||||
.or_insert_with(|| environ.make_heap(func, index))
|
.or_insert_with(|| environ.make_heap(func, index))
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user