Use clippy (#276)
* cton-util: fix some clippy unnecessary pass-by-value warnings * clippy: ignore too many arguments / cyclomatic complexity in module since these functions are taking args coming from the command line, i dont think this is actually a valid lint, morally the arguments are all from one structure * cton-util: take care of remaining clippy warnings * cton-reader: fix all non-suspicious clippy warnings * cton-reader: disable clippy at site of suspicious lint * cton-frontend: disable clippy at the site of an invalid lint * cton-frontend: fix clippy warnings, or ignore benign ones * clippy: ignore the camelcase word WebAssembly in docs * cton-wasm: fix clippy complaints or ignore benign ones * cton-wasm tests: fix clippy complaints * cretonne: starting point turns off all clippy warnings * cretonne: clippy fixes, or lower allow() to source of problem * cretonne: more clippy fixes * cretonne: fix or disable needless_lifetimes lint this linter is buggy when the declared lifetime is used for another type constraint. * cretonne: fix clippy complaint about Pass::NoPass * rustfmt * fix prev minor api changes clippy suggested * add clippy to test-all * cton-filetests: clippy fixes * simplify clippy reporting in test-all * cretonne: document clippy allows better * cretonne: fix some more clippy lints * cretonne: fix clippy lints (mostly doc comments) * cretonne: allow all needless_lifetimes clippy warnings remove overrides at the false positives * rustfmt
This commit is contained in:
@@ -437,7 +437,7 @@ impl<F: Forest> Path<F> {
|
||||
|
||||
// Discard the root node if it has shrunk to a single sub-tree.
|
||||
let mut ns = 0;
|
||||
while let &NodeData::Inner { size: 0, ref tree, .. } = &pool[self.node[ns]] {
|
||||
while let NodeData::Inner { size: 0, ref tree, .. } = pool[self.node[ns]] {
|
||||
ns += 1;
|
||||
self.node[ns] = tree[0];
|
||||
}
|
||||
@@ -529,12 +529,11 @@ impl<F: Forest> Path<F> {
|
||||
// current entry[level] was one off the end of the node, it will now point at a proper
|
||||
// entry.
|
||||
debug_assert!(usize::from(self.entry[level]) < pool[self.node[level]].entries());
|
||||
} else {
|
||||
|
||||
} else if usize::from(self.entry[level]) >= pool[self.node[level]].entries() {
|
||||
// There's no right sibling at this level, so the node can't be rebalanced.
|
||||
// Check if we are in an off-the-end position.
|
||||
if usize::from(self.entry[level]) >= pool[self.node[level]].entries() {
|
||||
self.size = 0;
|
||||
}
|
||||
self.size = 0;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -57,6 +57,7 @@ impl<F: Forest> NodePool<F> {
|
||||
pub fn free_tree(&mut self, node: Node) {
|
||||
if let NodeData::Inner { size, tree, .. } = self[node] {
|
||||
// Note that we have to capture `tree` by value to avoid borrow checker trouble.
|
||||
#[cfg_attr(feature = "cargo-clippy", allow(needless_range_loop))]
|
||||
for i in 0..usize::from(size + 1) {
|
||||
// Recursively free sub-trees. This recursion can never be deeper than `MAX_PATH`,
|
||||
// and since most trees have less than a handful of nodes, it is worthwhile to
|
||||
|
||||
@@ -76,14 +76,13 @@ pub fn relax_branches(func: &mut Function, isa: &TargetIsa) -> Result<CodeOffset
|
||||
if let Some(range) = encinfo.branch_range(enc) {
|
||||
if let Some(dest) = cur.func.dfg[inst].branch_destination() {
|
||||
let dest_offset = cur.func.offsets[dest];
|
||||
if !range.contains(offset, dest_offset) {
|
||||
// This is an out-of-range branch.
|
||||
// Relax it unless the destination offset has not been computed yet.
|
||||
if dest_offset != 0 || Some(dest) == cur.func.layout.entry_block() {
|
||||
offset +=
|
||||
relax_branch(&mut cur, offset, dest_offset, &encinfo, isa);
|
||||
continue;
|
||||
}
|
||||
// This could be an out-of-range branch.
|
||||
// Relax it unless the destination offset has not been computed yet.
|
||||
if !range.contains(offset, dest_offset) &&
|
||||
(dest_offset != 0 || Some(dest) == cur.func.layout.entry_block())
|
||||
{
|
||||
offset += relax_branch(&mut cur, offset, dest_offset, &encinfo, isa);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -132,12 +132,12 @@ impl Context {
|
||||
}
|
||||
|
||||
/// Run the locations verifier on the function.
|
||||
pub fn verify_locations<'a>(&self, isa: &TargetIsa) -> verifier::Result {
|
||||
pub fn verify_locations(&self, isa: &TargetIsa) -> verifier::Result {
|
||||
verifier::verify_locations(isa, &self.func, None)
|
||||
}
|
||||
|
||||
/// Run the locations verifier only if the `enable_verifier` setting is true.
|
||||
pub fn verify_locations_if<'a>(&self, isa: &TargetIsa) -> CtonResult {
|
||||
pub fn verify_locations_if(&self, isa: &TargetIsa) -> CtonResult {
|
||||
if isa.flags().enable_verifier() {
|
||||
self.verify_locations(isa).map_err(Into::into)
|
||||
} else {
|
||||
|
||||
@@ -744,8 +744,9 @@ impl<'c, 'f> ir::InstInserterBase<'c> for &'c mut EncCursor<'f> {
|
||||
if !self.srcloc.is_default() {
|
||||
self.func.srclocs[inst] = self.srcloc;
|
||||
}
|
||||
|
||||
// Assign an encoding.
|
||||
// XXX Is there a way to describe this error to the user?
|
||||
#[cfg_attr(feature = "cargo-clippy", allow(match_wild_err_arm))]
|
||||
match self.isa.encode(
|
||||
&self.func.dfg,
|
||||
&self.func.dfg[inst],
|
||||
|
||||
@@ -16,7 +16,7 @@ const TESTCASE_NAME_LENGTH: usize = 16;
|
||||
/// to keep track of a sy mbol table.
|
||||
///
|
||||
/// External names are primarily used as keys by code using Cretonne to map
|
||||
/// from a cretonne::ir::FuncRef or similar to additional associated data.
|
||||
/// from a `cretonne::ir::FuncRef` or similar to additional associated data.
|
||||
///
|
||||
/// External names can also serve as a primitive testing and debugging tool.
|
||||
/// In particular, many `.cton` test files use function names to identify
|
||||
|
||||
@@ -428,7 +428,7 @@ impl Layout {
|
||||
}
|
||||
|
||||
/// Return an iterator over all EBBs in layout order.
|
||||
pub fn ebbs<'f>(&'f self) -> Ebbs<'f> {
|
||||
pub fn ebbs(&self) -> Ebbs {
|
||||
Ebbs {
|
||||
layout: self,
|
||||
next: self.first_ebb,
|
||||
@@ -611,7 +611,7 @@ impl Layout {
|
||||
}
|
||||
|
||||
/// Iterate over the instructions in `ebb` in layout order.
|
||||
pub fn ebb_insts<'f>(&'f self, ebb: Ebb) -> Insts<'f> {
|
||||
pub fn ebb_insts(&self, ebb: Ebb) -> Insts {
|
||||
Insts {
|
||||
layout: self,
|
||||
head: self.ebbs[ebb].first_inst.into(),
|
||||
|
||||
@@ -4,13 +4,44 @@
|
||||
trivial_numeric_casts,
|
||||
unused_extern_crates)]
|
||||
|
||||
#![cfg_attr(feature="clippy",
|
||||
plugin(clippy(conf_file="../../clippy.toml")))]
|
||||
|
||||
#![cfg_attr(feature="cargo-clippy", allow(
|
||||
// Rustfmt 0.9.0 is at odds with this lint:
|
||||
block_in_if_condition_stmt,
|
||||
// Produces only a false positive:
|
||||
while_let_loop,
|
||||
// Produces many false positives, but did produce some valid lints, now fixed:
|
||||
needless_lifetimes,
|
||||
// Generated code makes some style transgressions, but readability doesn't suffer much:
|
||||
many_single_char_names,
|
||||
identity_op,
|
||||
needless_borrow,
|
||||
cast_lossless,
|
||||
unreadable_literal,
|
||||
assign_op_pattern,
|
||||
empty_line_after_outer_attr,
|
||||
// Hard to avoid in generated code:
|
||||
cyclomatic_complexity,
|
||||
too_many_arguments,
|
||||
// Code generator doesn't have a way to collapse identical arms:
|
||||
match_same_arms,
|
||||
// These are relatively minor style issues, but would be easy to fix:
|
||||
new_without_default,
|
||||
new_without_default_derive,
|
||||
should_implement_trait,
|
||||
redundant_field_names,
|
||||
useless_let_if_seq,
|
||||
len_without_is_empty))]
|
||||
|
||||
pub use context::Context;
|
||||
pub use legalizer::legalize_function;
|
||||
pub use verifier::verify_function;
|
||||
pub use write::write_function;
|
||||
|
||||
/// Version number of the cretonne crate.
|
||||
pub const VERSION: &'static str = env!("CARGO_PKG_VERSION");
|
||||
pub const VERSION: &str = env!("CARGO_PKG_VERSION");
|
||||
|
||||
#[macro_use]
|
||||
pub mod dbg;
|
||||
|
||||
@@ -150,6 +150,7 @@ fn remove_loop_invariant_instructions(
|
||||
loop_values.insert(*val);
|
||||
}
|
||||
pos.goto_top(*ebb);
|
||||
#[cfg_attr(feature = "cargo-clippy", allow(block_in_if_condition_stmt))]
|
||||
while let Some(inst) = pos.next_inst() {
|
||||
if pos.func.dfg.has_results(inst) &&
|
||||
pos.func.dfg.inst_args(inst).into_iter().all(|arg| {
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
/// The order of elements is not preserved, unless the slice is already partitioned.
|
||||
///
|
||||
/// Returns the number of elements where `p(t)` is true.
|
||||
pub fn partition_slice<'a, T: 'a, F>(s: &'a mut [T], mut p: F) -> usize
|
||||
pub fn partition_slice<T, F>(s: &mut [T], mut p: F) -> usize
|
||||
where
|
||||
F: FnMut(&T) -> bool,
|
||||
{
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
//! bound is implemented by all the native integer types as well as `Imm64`.
|
||||
//!
|
||||
//! Some of these predicates may be unused in certain ISA configurations, so we suppress the
|
||||
//! dead_code warning.
|
||||
//! dead code warning.
|
||||
|
||||
/// Check that `x` is the same as `y`.
|
||||
#[allow(dead_code)]
|
||||
|
||||
@@ -114,7 +114,7 @@ fn package_up_divrem_info(
|
||||
fn get_div_info(inst: Inst, dfg: &DataFlowGraph) -> Option<DivRemByConstInfo> {
|
||||
let idata: &InstructionData = &dfg[inst];
|
||||
|
||||
if let &InstructionData::BinaryImm { opcode, arg, imm } = idata {
|
||||
if let InstructionData::BinaryImm { opcode, arg, imm } = *idata {
|
||||
let (isSigned, isRem) = match opcode {
|
||||
Opcode::UdivImm => (false, false),
|
||||
Opcode::UremImm => (false, true),
|
||||
@@ -132,7 +132,7 @@ fn get_div_info(inst: Inst, dfg: &DataFlowGraph) -> Option<DivRemByConstInfo> {
|
||||
// that some previous constant propagation pass has pushed all such
|
||||
// immediates to their use points, creating BinaryImm instructions
|
||||
// instead? For now we take the conservative approach.
|
||||
if let &InstructionData::Binary { opcode, args } = idata {
|
||||
if let InstructionData::Binary { opcode, args } = *idata {
|
||||
let (isSigned, isRem) = match opcode {
|
||||
Opcode::Udiv => (false, false),
|
||||
Opcode::Urem => (false, true),
|
||||
@@ -484,7 +484,7 @@ fn get_const(value: Value, dfg: &DataFlowGraph) -> Option<i64> {
|
||||
match dfg.value_def(value) {
|
||||
ValueDef::Result(definingInst, resultNo) => {
|
||||
let definingIData: &InstructionData = &dfg[definingInst];
|
||||
if let &InstructionData::UnaryImm { opcode, imm } = definingIData {
|
||||
if let InstructionData::UnaryImm { opcode, imm } = *definingIData {
|
||||
if opcode == Opcode::Iconst && resultNo == 0 {
|
||||
return Some(imm.into());
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ use std::fmt::Write;
|
||||
pub fn pretty_verifier_error(
|
||||
func: &ir::Function,
|
||||
isa: Option<&TargetIsa>,
|
||||
err: verifier::Error,
|
||||
err: &verifier::Error,
|
||||
) -> String {
|
||||
let mut msg = err.to_string();
|
||||
match err.location {
|
||||
@@ -26,7 +26,7 @@ pub fn pretty_verifier_error(
|
||||
/// Pretty-print a Cretonne error.
|
||||
pub fn pretty_error(func: &ir::Function, isa: Option<&TargetIsa>, err: CtonError) -> String {
|
||||
if let CtonError::Verifier(e) = err {
|
||||
pretty_verifier_error(func, isa, e)
|
||||
pretty_verifier_error(func, isa, &e)
|
||||
} else {
|
||||
err.to_string()
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
//! Functions for converting a reference into a singleton slice.
|
||||
//!
|
||||
//! See also the ref_slice crate on crates.io.
|
||||
//! See also the [`ref_slice` crate](https://crates.io/crates/ref_slice).
|
||||
//!
|
||||
//! We define the functions here to avoid external dependencies, and to ensure that they are
|
||||
//! inlined in this crate.
|
||||
|
||||
@@ -197,9 +197,9 @@ impl<'a> fmt::Display for DisplayAllocatableSet<'a> {
|
||||
"{}",
|
||||
bank.names
|
||||
.get(offset as usize)
|
||||
.and_then(|name| name.chars().skip(1).next())
|
||||
.unwrap_or(
|
||||
char::from_digit(u32::from(offset % 10), 10).unwrap(),
|
||||
.and_then(|name| name.chars().nth(1))
|
||||
.unwrap_or_else(
|
||||
|| char::from_digit(u32::from(offset % 10), 10).unwrap(),
|
||||
)
|
||||
)?;
|
||||
}
|
||||
|
||||
@@ -276,33 +276,31 @@ impl<PO: ProgramOrder> GenLiveRange<PO> {
|
||||
} else {
|
||||
return first_time_livein;
|
||||
}
|
||||
} else {
|
||||
} else if let Some((_, end)) = c.prev() {
|
||||
// There's no interval beginning at `ebb`, but we could still be live-in at `ebb` with
|
||||
// a coalesced interval that begins before and ends after.
|
||||
if let Some((_, end)) = c.prev() {
|
||||
if order.cmp(end, ebb) == Ordering::Greater {
|
||||
// Yep, the previous interval overlaps `ebb`.
|
||||
first_time_livein = false;
|
||||
if order.cmp(end, to) == Ordering::Less {
|
||||
*c.value_mut().unwrap() = to;
|
||||
} else {
|
||||
return first_time_livein;
|
||||
}
|
||||
if order.cmp(end, ebb) == Ordering::Greater {
|
||||
// Yep, the previous interval overlaps `ebb`.
|
||||
first_time_livein = false;
|
||||
if order.cmp(end, to) == Ordering::Less {
|
||||
*c.value_mut().unwrap() = to;
|
||||
} else {
|
||||
first_time_livein = true;
|
||||
// The current interval does not overlap `ebb`, but it may still be possible to
|
||||
// coalesce with it.
|
||||
if order.is_ebb_gap(end, ebb) {
|
||||
*c.value_mut().unwrap() = to;
|
||||
} else {
|
||||
c.insert(ebb, to);
|
||||
}
|
||||
return first_time_livein;
|
||||
}
|
||||
} else {
|
||||
// There is no existing interval before `ebb`.
|
||||
first_time_livein = true;
|
||||
c.insert(ebb, to);
|
||||
// The current interval does not overlap `ebb`, but it may still be possible to
|
||||
// coalesce with it.
|
||||
if order.is_ebb_gap(end, ebb) {
|
||||
*c.value_mut().unwrap() = to;
|
||||
} else {
|
||||
c.insert(ebb, to);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// There is no existing interval before `ebb`.
|
||||
first_time_livein = true;
|
||||
c.insert(ebb, to);
|
||||
}
|
||||
|
||||
// Now `c` to left pointing at an interval that ends in `to`.
|
||||
|
||||
@@ -306,14 +306,12 @@ impl<'a> Context<'a> {
|
||||
let args = self.cur.func.dfg.inst_args(inst);
|
||||
|
||||
for (argidx, (op, &arg)) in constraints.ins.iter().zip(args).enumerate() {
|
||||
if op.kind != ConstraintKind::Stack {
|
||||
if self.liveness[arg].affinity.is_stack() {
|
||||
self.candidates.push(ReloadCandidate {
|
||||
argidx,
|
||||
value: arg,
|
||||
regclass: op.regclass,
|
||||
})
|
||||
}
|
||||
if op.kind != ConstraintKind::Stack && self.liveness[arg].affinity.is_stack() {
|
||||
self.candidates.push(ReloadCandidate {
|
||||
argidx,
|
||||
value: arg,
|
||||
regclass: op.regclass,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -299,6 +299,7 @@ impl Move {
|
||||
}
|
||||
|
||||
/// Get the "from" register and register class, if possible.
|
||||
#[cfg_attr(feature = "cargo-clippy", allow(wrong_self_convention))]
|
||||
fn from_reg(&self) -> Option<(RegClass, RegUnit)> {
|
||||
match *self {
|
||||
Move::Reg { rc, from, .. } |
|
||||
|
||||
@@ -101,8 +101,10 @@ impl VirtRegs {
|
||||
where
|
||||
'a: 'b,
|
||||
{
|
||||
self.get(*value).map(|vr| self.values(vr)).unwrap_or(
|
||||
ref_slice(value),
|
||||
self.get(*value).map(|vr| self.values(vr)).unwrap_or_else(
|
||||
|| {
|
||||
ref_slice(value)
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
@@ -371,7 +373,7 @@ impl VirtRegs {
|
||||
let vreg = self.get(leader).unwrap_or_else(|| {
|
||||
// Allocate a vreg for `leader`, but leave it empty.
|
||||
let vr = self.alloc();
|
||||
if let &mut Some(ref mut vec) = &mut new_vregs {
|
||||
if let Some(ref mut vec) = new_vregs {
|
||||
vec.push(vr);
|
||||
}
|
||||
self.value_vregs[leader] = vr.into();
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
//! ScopedHashMap
|
||||
//! `ScopedHashMap`
|
||||
//!
|
||||
//! This module defines a struct `ScopedHashMap<K, V>` which defines a `HashMap`-like
|
||||
//! container that has a concept of scopes that can be entered and exited, such that
|
||||
|
||||
@@ -11,7 +11,7 @@ pub use self::details::{TimingToken, PassTimes, take_current, add_to_current};
|
||||
//
|
||||
// This macro defines:
|
||||
//
|
||||
// - A C-style enum containing all the pass names and a `NoPass` variant.
|
||||
// - A C-style enum containing all the pass names and a `None` variant.
|
||||
// - A usize constant with the number of defined passes.
|
||||
// - A const array of pass descriptions.
|
||||
// - A public function per pass used to start the timing of that pass.
|
||||
@@ -21,9 +21,9 @@ macro_rules! define_passes {
|
||||
} => {
|
||||
#[allow(non_camel_case_types)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
enum $enum { $($pass,)+ NoPass }
|
||||
enum $enum { $($pass,)+ None}
|
||||
|
||||
const $num_passes: usize = $enum::NoPass as usize;
|
||||
const $num_passes: usize = $enum::None as usize;
|
||||
|
||||
const $descriptions: [&str; $num_passes] = [ $($desc),+ ];
|
||||
|
||||
@@ -164,7 +164,7 @@ mod details {
|
||||
|
||||
/// Information about passes in a single thread.
|
||||
thread_local!{
|
||||
static CURRENT_PASS: Cell<Pass> = Cell::new(Pass::NoPass);
|
||||
static CURRENT_PASS: Cell<Pass> = Cell::new(Pass::None);
|
||||
static PASS_TIME: RefCell<PassTimes> = RefCell::new(Default::default());
|
||||
}
|
||||
|
||||
@@ -204,7 +204,7 @@ mod details {
|
||||
}
|
||||
|
||||
/// Add `timings` to the accumulated timings for the current thread.
|
||||
pub fn add_to_current(times: PassTimes) {
|
||||
pub fn add_to_current(times: &PassTimes) {
|
||||
PASS_TIME.with(|rc| for (a, b) in rc.borrow_mut().pass.iter_mut().zip(
|
||||
×.pass,
|
||||
)
|
||||
@@ -221,7 +221,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn display() {
|
||||
assert_eq!(Pass::NoPass.to_string(), "<no pass>");
|
||||
assert_eq!(Pass::None.to_string(), "<no pass>");
|
||||
assert_eq!(Pass::regalloc.to_string(), "Register allocation");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -86,16 +86,14 @@ impl<'a> LivenessVerifier<'a> {
|
||||
self.isa.encoding_info().display(encoding)
|
||||
);
|
||||
}
|
||||
} else {
|
||||
} else if !lr.affinity.is_none() {
|
||||
// A non-encoded instruction can only define ghost values.
|
||||
if !lr.affinity.is_none() {
|
||||
return err!(
|
||||
inst,
|
||||
"{} is a real {} value defined by a ghost instruction",
|
||||
val,
|
||||
lr.affinity.display(&self.isa.register_info())
|
||||
);
|
||||
}
|
||||
return err!(
|
||||
inst,
|
||||
"{} is a real {} value defined by a ghost instruction",
|
||||
val,
|
||||
lr.affinity.display(&self.isa.register_info())
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -109,16 +107,14 @@ impl<'a> LivenessVerifier<'a> {
|
||||
return err!(inst, "{} is not live at this use", val);
|
||||
}
|
||||
|
||||
if encoding.is_legal() {
|
||||
// A legal instruction is not allowed to depend on ghost values.
|
||||
if lr.affinity.is_none() {
|
||||
return err!(
|
||||
inst,
|
||||
"{} is a ghost value used by a real [{}] instruction",
|
||||
val,
|
||||
self.isa.encoding_info().display(encoding)
|
||||
);
|
||||
}
|
||||
// A legal instruction is not allowed to depend on ghost values.
|
||||
if encoding.is_legal() && lr.affinity.is_none() {
|
||||
return err!(
|
||||
inst,
|
||||
"{} is a ghost value used by a real [{}] instruction",
|
||||
val,
|
||||
self.isa.encoding_info().display(encoding)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,7 +34,7 @@
|
||||
//! For polymorphic opcodes, determine the controlling type variable first.
|
||||
//! - Branches and jumps must pass arguments to destination EBBs that match the
|
||||
//! expected types exactly. The number of arguments must match.
|
||||
//! - All EBBs in a jump_table must take no arguments.
|
||||
//! - All EBBs in a jump table must take no arguments.
|
||||
//! - Function calls are type checked against their signature.
|
||||
//! - The entry block must take arguments that match the signature of the current
|
||||
//! function.
|
||||
@@ -871,50 +871,47 @@ impl<'a> Verifier<'a> {
|
||||
// Check special-purpose type constraints that can't be expressed in the normal opcode
|
||||
// constraints.
|
||||
fn typecheck_special(&self, inst: Inst, ctrl_type: Type) -> Result {
|
||||
match self.func.dfg[inst] {
|
||||
ir::InstructionData::Unary { opcode, arg } => {
|
||||
let arg_type = self.func.dfg.value_type(arg);
|
||||
match opcode {
|
||||
Opcode::Bextend | Opcode::Uextend | Opcode::Sextend | Opcode::Fpromote => {
|
||||
if arg_type.lane_count() != ctrl_type.lane_count() {
|
||||
return err!(
|
||||
inst,
|
||||
"input {} and output {} must have same number of lanes",
|
||||
arg_type,
|
||||
ctrl_type
|
||||
);
|
||||
}
|
||||
if arg_type.lane_bits() >= ctrl_type.lane_bits() {
|
||||
return err!(
|
||||
inst,
|
||||
"input {} must be smaller than output {}",
|
||||
arg_type,
|
||||
ctrl_type
|
||||
);
|
||||
}
|
||||
if let ir::InstructionData::Unary { opcode, arg } = self.func.dfg[inst] {
|
||||
let arg_type = self.func.dfg.value_type(arg);
|
||||
match opcode {
|
||||
Opcode::Bextend | Opcode::Uextend | Opcode::Sextend | Opcode::Fpromote => {
|
||||
if arg_type.lane_count() != ctrl_type.lane_count() {
|
||||
return err!(
|
||||
inst,
|
||||
"input {} and output {} must have same number of lanes",
|
||||
arg_type,
|
||||
ctrl_type
|
||||
);
|
||||
}
|
||||
Opcode::Breduce | Opcode::Ireduce | Opcode::Fdemote => {
|
||||
if arg_type.lane_count() != ctrl_type.lane_count() {
|
||||
return err!(
|
||||
inst,
|
||||
"input {} and output {} must have same number of lanes",
|
||||
arg_type,
|
||||
ctrl_type
|
||||
);
|
||||
}
|
||||
if arg_type.lane_bits() <= ctrl_type.lane_bits() {
|
||||
return err!(
|
||||
inst,
|
||||
"input {} must be larger than output {}",
|
||||
arg_type,
|
||||
ctrl_type
|
||||
);
|
||||
}
|
||||
if arg_type.lane_bits() >= ctrl_type.lane_bits() {
|
||||
return err!(
|
||||
inst,
|
||||
"input {} must be smaller than output {}",
|
||||
arg_type,
|
||||
ctrl_type
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
Opcode::Breduce | Opcode::Ireduce | Opcode::Fdemote => {
|
||||
if arg_type.lane_count() != ctrl_type.lane_count() {
|
||||
return err!(
|
||||
inst,
|
||||
"input {} and output {} must have same number of lanes",
|
||||
arg_type,
|
||||
ctrl_type
|
||||
);
|
||||
}
|
||||
if arg_type.lane_bits() <= ctrl_type.lane_bits() {
|
||||
return err!(
|
||||
inst,
|
||||
"input {} must be larger than output {}",
|
||||
arg_type,
|
||||
ctrl_type
|
||||
);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user