Fix typos.
This commit is contained in:
committed by
Dan Gohman
parent
4224a95f0d
commit
4f8753fa11
@@ -42,7 +42,7 @@ class ClifObject(ObjectDescription):
|
|||||||
|
|
||||||
def add_target_and_index(self, name, sig, signode):
|
def add_target_and_index(self, name, sig, signode):
|
||||||
"""
|
"""
|
||||||
Add ``name`` the the index.
|
Add ``name`` to the index.
|
||||||
|
|
||||||
:param name: The object name returned by :func:`handle_signature`.
|
:param name: The object name returned by :func:`handle_signature`.
|
||||||
:param sig: The signature text.
|
:param sig: The signature text.
|
||||||
@@ -76,7 +76,7 @@ def parse_type(name, signode):
|
|||||||
"""
|
"""
|
||||||
Parse a type with embedded type vars and append to signode.
|
Parse a type with embedded type vars and append to signode.
|
||||||
|
|
||||||
Return a a string that can be compiled into a regular expression matching
|
Return a string that can be compiled into a regular expression matching
|
||||||
the type.
|
the type.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -193,7 +193,7 @@ ebb0:
|
|||||||
; Comparisons.
|
; Comparisons.
|
||||||
;
|
;
|
||||||
; Only `supported_floatccs` are tested here. Others are handled by
|
; Only `supported_floatccs` are tested here. Others are handled by
|
||||||
; legalization paterns.
|
; legalization patterns.
|
||||||
|
|
||||||
; asm: ucomiss %xmm2, %xmm5
|
; asm: ucomiss %xmm2, %xmm5
|
||||||
; asm: setnp %bl
|
; asm: setnp %bl
|
||||||
@@ -434,7 +434,7 @@ ebb0:
|
|||||||
; Comparisons.
|
; Comparisons.
|
||||||
;
|
;
|
||||||
; Only `supported_floatccs` are tested here. Others are handled by
|
; Only `supported_floatccs` are tested here. Others are handled by
|
||||||
; legalization paterns.
|
; legalization patterns.
|
||||||
|
|
||||||
; asm: ucomisd %xmm2, %xmm5
|
; asm: ucomisd %xmm2, %xmm5
|
||||||
; asm: setnp %bl
|
; asm: setnp %bl
|
||||||
|
|||||||
@@ -206,7 +206,7 @@ ebb0:
|
|||||||
; Comparisons.
|
; Comparisons.
|
||||||
;
|
;
|
||||||
; Only `supported_floatccs` are tested here. Others are handled by
|
; Only `supported_floatccs` are tested here. Others are handled by
|
||||||
; legalization paterns.
|
; legalization patterns.
|
||||||
|
|
||||||
; asm: ucomiss %xmm10, %xmm5
|
; asm: ucomiss %xmm10, %xmm5
|
||||||
; asm: setnp %bl
|
; asm: setnp %bl
|
||||||
@@ -469,7 +469,7 @@ ebb0:
|
|||||||
; Comparisons.
|
; Comparisons.
|
||||||
;
|
;
|
||||||
; Only `supported_floatccs` are tested here. Others are handled by
|
; Only `supported_floatccs` are tested here. Others are handled by
|
||||||
; legalization paterns.
|
; legalization patterns.
|
||||||
|
|
||||||
; asm: ucomisd %xmm10, %xmm5
|
; asm: ucomisd %xmm10, %xmm5
|
||||||
; asm: setnp %bl
|
; asm: setnp %bl
|
||||||
|
|||||||
@@ -476,7 +476,7 @@ impl<F: Forest> Path<F> {
|
|||||||
match status {
|
match status {
|
||||||
Removed::Healthy => {}
|
Removed::Healthy => {}
|
||||||
Removed::Rightmost => {
|
Removed::Rightmost => {
|
||||||
// The rightmost entry was removed from the curent node, so move the path so it
|
// The rightmost entry was removed from the current node, so move the path so it
|
||||||
// points at the first entry of the next node at this level.
|
// points at the first entry of the next node at this level.
|
||||||
debug_assert_eq!(
|
debug_assert_eq!(
|
||||||
usize::from(self.entry[level]),
|
usize::from(self.entry[level]),
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
Cranelift DSL classes.
|
Cranelift DSL classes.
|
||||||
|
|
||||||
This module defines the classes that are used to define Cranelift instructions
|
This module defines the classes that are used to define Cranelift instructions
|
||||||
and other entitties.
|
and other entities.
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
import re
|
import re
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
Abstract syntax trees.
|
Abstract syntax trees.
|
||||||
|
|
||||||
This module defines classes that can be used to create abstract syntax trees
|
This module defines classes that can be used to create abstract syntax trees
|
||||||
for patern matching an rewriting of cranelift instructions.
|
for pattern matching an rewriting of cranelift instructions.
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
from . import instructions
|
from . import instructions
|
||||||
@@ -79,7 +79,7 @@ class Def(object):
|
|||||||
# type: (VarAtomMap) -> Def
|
# type: (VarAtomMap) -> Def
|
||||||
"""
|
"""
|
||||||
Return a copy of this Def with vars replaced with fresh variables,
|
Return a copy of this Def with vars replaced with fresh variables,
|
||||||
in accordance with the map m. Update m as neccessary.
|
in accordance with the map m. Update m as necessary.
|
||||||
"""
|
"""
|
||||||
new_expr = self.expr.copy(m)
|
new_expr = self.expr.copy(m)
|
||||||
new_defs = [] # type: List[Var]
|
new_defs = [] # type: List[Var]
|
||||||
@@ -423,7 +423,7 @@ class Apply(Expr):
|
|||||||
# type: (VarAtomMap) -> Apply
|
# type: (VarAtomMap) -> Apply
|
||||||
"""
|
"""
|
||||||
Return a copy of this Expr with vars replaced with fresh variables,
|
Return a copy of this Expr with vars replaced with fresh variables,
|
||||||
in accordance with the map m. Update m as neccessary.
|
in accordance with the map m. Update m as necessary.
|
||||||
"""
|
"""
|
||||||
return Apply(self.inst, tuple(map(lambda e: replace_var(e, m),
|
return Apply(self.inst, tuple(map(lambda e: replace_var(e, m),
|
||||||
self.args)))
|
self.args)))
|
||||||
@@ -441,7 +441,7 @@ class Apply(Expr):
|
|||||||
def substitution(self, other, s):
|
def substitution(self, other, s):
|
||||||
# type: (Apply, VarAtomMap) -> Optional[VarAtomMap]
|
# type: (Apply, VarAtomMap) -> Optional[VarAtomMap]
|
||||||
"""
|
"""
|
||||||
If there is a substituion from Var->Atom that converts self to other,
|
If there is a substitution from Var->Atom that converts self to other,
|
||||||
return it, otherwise return None. Note that this is strictly weaker
|
return it, otherwise return None. Note that this is strictly weaker
|
||||||
than unification (see TestXForm.test_subst_enum_bad_var_const for
|
than unification (see TestXForm.test_subst_enum_bad_var_const for
|
||||||
example).
|
example).
|
||||||
@@ -513,7 +513,7 @@ class ConstantInt(Literal):
|
|||||||
A value of an integer immediate operand.
|
A value of an integer immediate operand.
|
||||||
|
|
||||||
Immediate operands like `imm64` or `offset32` can be specified in AST
|
Immediate operands like `imm64` or `offset32` can be specified in AST
|
||||||
expressions using the call syntax: `imm64(5)` which greates a `ConstantInt`
|
expressions using the call syntax: `imm64(5)` which creates a `ConstantInt`
|
||||||
node.
|
node.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -502,7 +502,7 @@ class TestXForm(TypeCheckingBaseTest):
|
|||||||
# For any patterns where the type env includes constraints, at
|
# For any patterns where the type env includes constraints, at
|
||||||
# least one of the "theoretically possible" concrete typings must
|
# least one of the "theoretically possible" concrete typings must
|
||||||
# be prevented by the constraints. (i.e. we are not emitting
|
# be prevented by the constraints. (i.e. we are not emitting
|
||||||
# unneccessary constraints).
|
# unnecessary constraints).
|
||||||
# We check that by asserting that the number of concrete typings is
|
# We check that by asserting that the number of concrete typings is
|
||||||
# less than the number of all possible free typevar assignments
|
# less than the number of all possible free typevar assignments
|
||||||
if (len(xform.ti.constraints) > 0):
|
if (len(xform.ti.constraints) > 0):
|
||||||
|
|||||||
@@ -233,7 +233,7 @@ class TestTypeVar(TestCase):
|
|||||||
intersect = ts1.copy()
|
intersect = ts1.copy()
|
||||||
intersect &= ts2
|
intersect &= ts2
|
||||||
|
|
||||||
# Propagate instersections backward
|
# Propagate intersections backward
|
||||||
ts1_src = reduce(lambda ts, func: ts.preimage(func),
|
ts1_src = reduce(lambda ts, func: ts.preimage(func),
|
||||||
reversed(i1),
|
reversed(i1),
|
||||||
intersect)
|
intersect)
|
||||||
|
|||||||
@@ -276,7 +276,7 @@ class SameWidth(TypeConstraint):
|
|||||||
|
|
||||||
class TypeEnv(object):
|
class TypeEnv(object):
|
||||||
"""
|
"""
|
||||||
Class encapsulating the neccessary book keeping for type inference.
|
Class encapsulating the necessary book keeping for type inference.
|
||||||
:attribute type_map: dict holding the equivalence relations between tvs
|
:attribute type_map: dict holding the equivalence relations between tvs
|
||||||
:attribute constraints: a list of accumulated constraints - tuples
|
:attribute constraints: a list of accumulated constraints - tuples
|
||||||
(tv1, tv2)) where tv1 and tv2 are equal
|
(tv1, tv2)) where tv1 and tv2 are equal
|
||||||
@@ -331,7 +331,7 @@ class TypeEnv(object):
|
|||||||
"""
|
"""
|
||||||
Record a that the free tv1 is part of the same equivalence class as
|
Record a that the free tv1 is part of the same equivalence class as
|
||||||
tv2. The canonical representative of the merged class is tv2's
|
tv2. The canonical representative of the merged class is tv2's
|
||||||
cannonical representative.
|
canonical representative.
|
||||||
"""
|
"""
|
||||||
assert not tv1.is_derived
|
assert not tv1.is_derived
|
||||||
assert self[tv1] == tv1
|
assert self[tv1] == tv1
|
||||||
@@ -376,7 +376,7 @@ class TypeEnv(object):
|
|||||||
non-derived TVs implicitly get the lowest rank (0). Derived variables
|
non-derived TVs implicitly get the lowest rank (0). Derived variables
|
||||||
get their rank from their free typevar. Singletons have the highest
|
get their rank from their free typevar. Singletons have the highest
|
||||||
rank. TVs associated with vars in a source pattern have a higher rank
|
rank. TVs associated with vars in a source pattern have a higher rank
|
||||||
than TVs associted with temporary vars.
|
than TVs associated with temporary vars.
|
||||||
"""
|
"""
|
||||||
default_rank = TypeEnv.RANK_INTERNAL if tv.singleton_type() is None \
|
default_rank = TypeEnv.RANK_INTERNAL if tv.singleton_type() is None \
|
||||||
else TypeEnv.RANK_SINGLETON
|
else TypeEnv.RANK_SINGLETON
|
||||||
@@ -837,7 +837,7 @@ def ti_def(definition, typ):
|
|||||||
fresh_formal_tvs = move_first(fresh_formal_tvs, idx)
|
fresh_formal_tvs = move_first(fresh_formal_tvs, idx)
|
||||||
actual_tvs = move_first(actual_tvs, idx)
|
actual_tvs = move_first(actual_tvs, idx)
|
||||||
|
|
||||||
# Unify each actual typevar with the correpsonding fresh formal tv
|
# Unify each actual typevar with the corresponding fresh formal tv
|
||||||
for (actual_tv, formal_tv) in zip(actual_tvs, fresh_formal_tvs):
|
for (actual_tv, formal_tv) in zip(actual_tvs, fresh_formal_tvs):
|
||||||
typ_or_err = unify(actual_tv, formal_tv, typ)
|
typ_or_err = unify(actual_tv, formal_tv, typ)
|
||||||
err = get_error(typ_or_err)
|
err = get_error(typ_or_err)
|
||||||
|
|||||||
@@ -51,7 +51,7 @@ class Rtl(object):
|
|||||||
# type: (VarAtomMap) -> Rtl
|
# type: (VarAtomMap) -> Rtl
|
||||||
"""
|
"""
|
||||||
Return a copy of this rtl with all Vars substituted with copies or
|
Return a copy of this rtl with all Vars substituted with copies or
|
||||||
according to m. Update m as neccessary.
|
according to m. Update m as necessary.
|
||||||
"""
|
"""
|
||||||
return Rtl(*[d.copy(m) for d in self.rtl])
|
return Rtl(*[d.copy(m) for d in self.rtl])
|
||||||
|
|
||||||
|
|||||||
@@ -32,10 +32,10 @@ except ImportError:
|
|||||||
def get_runtime_typechecks(xform):
|
def get_runtime_typechecks(xform):
|
||||||
# type: (XForm) -> List[TypeConstraint]
|
# type: (XForm) -> List[TypeConstraint]
|
||||||
"""
|
"""
|
||||||
Given a XForm build a list of runtime type checks neccessary to determine
|
Given a XForm build a list of runtime type checks necessary to determine
|
||||||
if it applies. We have 2 types of runtime checks:
|
if it applies. We have 2 types of runtime checks:
|
||||||
1) typevar tv belongs to typeset T - needed for free tvs whose
|
1) typevar tv belongs to typeset T - needed for free tvs whose
|
||||||
typeset is constrainted by their use in the dst pattern
|
typeset is constrained by their use in the dst pattern
|
||||||
|
|
||||||
2) tv1 == tv2 where tv1 and tv2 are derived TVs - caused by unification
|
2) tv1 == tv2 where tv1 and tv2 are derived TVs - caused by unification
|
||||||
of non-bijective functions
|
of non-bijective functions
|
||||||
|
|||||||
@@ -201,7 +201,7 @@ def equivalent(r1, r2, inp_m, out_m):
|
|||||||
assert isinstance(v2, Var)
|
assert isinstance(v2, Var)
|
||||||
results_eq_exp.append(mk_eq(m1[v1], m2[v2]))
|
results_eq_exp.append(mk_eq(m1[v1], m2[v2]))
|
||||||
|
|
||||||
# Put the whole query toghether
|
# Put the whole query together
|
||||||
return q1 + q2 + args_eq_exp + [Not(And(*results_eq_exp))]
|
return q1 + q2 + args_eq_exp + [Not(And(*results_eq_exp))]
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -49,7 +49,7 @@ pub enum Reloc {
|
|||||||
|
|
||||||
impl fmt::Display for Reloc {
|
impl fmt::Display for Reloc {
|
||||||
/// Display trait implementation drops the arch, since its used in contexts where the arch is
|
/// Display trait implementation drops the arch, since its used in contexts where the arch is
|
||||||
/// already unambigious, e.g. clif syntax with isa specified. In other contexts, use Debug.
|
/// already unambiguous, e.g. clif syntax with isa specified. In other contexts, use Debug.
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
match *self {
|
match *self {
|
||||||
Reloc::Abs4 => write!(f, "Abs4"),
|
Reloc::Abs4 => write!(f, "Abs4"),
|
||||||
|
|||||||
@@ -505,7 +505,7 @@ impl DominatorTree {
|
|||||||
/// - An ordering of EBBs according to a dominator tree pre-order.
|
/// - An ordering of EBBs according to a dominator tree pre-order.
|
||||||
/// - Constant time dominance checks at the EBB granularity.
|
/// - Constant time dominance checks at the EBB granularity.
|
||||||
///
|
///
|
||||||
/// The information in this auxillary data structure is not easy to update when the control flow
|
/// The information in this auxiliary data structure is not easy to update when the control flow
|
||||||
/// graph changes, which is why it is kept separate.
|
/// graph changes, which is why it is kept separate.
|
||||||
pub struct DominatorTreePreorder {
|
pub struct DominatorTreePreorder {
|
||||||
nodes: SecondaryMap<Ebb, ExtraNode>,
|
nodes: SecondaryMap<Ebb, ExtraNode>,
|
||||||
|
|||||||
@@ -162,7 +162,7 @@ where
|
|||||||
{
|
{
|
||||||
let dfg = self.inserter.data_flow_graph_mut();
|
let dfg = self.inserter.data_flow_graph_mut();
|
||||||
inst = dfg.make_inst(data);
|
inst = dfg.make_inst(data);
|
||||||
// Make an `Interator<Item = Option<Value>>`.
|
// Make an `Iterator<Item = Option<Value>>`.
|
||||||
let ru = self.reuse.as_ref().iter().cloned();
|
let ru = self.reuse.as_ref().iter().cloned();
|
||||||
dfg.make_inst_results_reusing(inst, ctrl_typevar, ru);
|
dfg.make_inst_results_reusing(inst, ctrl_typevar, ru);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -92,8 +92,8 @@ impl MemFlags {
|
|||||||
|
|
||||||
/// Test if the `readonly` flag is set.
|
/// Test if the `readonly` flag is set.
|
||||||
///
|
///
|
||||||
/// Loads with this flag have no memory dependendies.
|
/// Loads with this flag have no memory dependencies.
|
||||||
/// This results in indefined behavior if the dereferenced memory is mutated at any time
|
/// This results in undefined behavior if the dereferenced memory is mutated at any time
|
||||||
/// between when the function is called and when it is exited.
|
/// between when the function is called and when it is exited.
|
||||||
pub fn readonly(self) -> bool {
|
pub fn readonly(self) -> bool {
|
||||||
self.read(FlagBit::Readonly)
|
self.read(FlagBit::Readonly)
|
||||||
|
|||||||
@@ -118,7 +118,7 @@ impl LoopAnalysis {
|
|||||||
self.valid
|
self.valid
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Clear all the data structures contanted in the loop analysis. This will leave the
|
/// Clear all the data structures contained in the loop analysis. This will leave the
|
||||||
/// analysis in a similar state to a context returned by `new()` except that allocated
|
/// analysis in a similar state to a context returned by `new()` except that allocated
|
||||||
/// memory be retained.
|
/// memory be retained.
|
||||||
pub fn clear(&mut self) {
|
pub fn clear(&mut self) {
|
||||||
@@ -191,7 +191,7 @@ impl LoopAnalysis {
|
|||||||
let mut node_loop_parent_option = self.loops[node_loop].parent;
|
let mut node_loop_parent_option = self.loops[node_loop].parent;
|
||||||
while let Some(node_loop_parent) = node_loop_parent_option.expand() {
|
while let Some(node_loop_parent) = node_loop_parent_option.expand() {
|
||||||
if node_loop_parent == lp {
|
if node_loop_parent == lp {
|
||||||
// We have encounterd lp so we stop (already visited)
|
// We have encountered lp so we stop (already visited)
|
||||||
break;
|
break;
|
||||||
} else {
|
} else {
|
||||||
//
|
//
|
||||||
|
|||||||
@@ -157,7 +157,7 @@ mod details {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Write a duration as secs.milis, trailing space.
|
// Write a duration as secs.millis, trailing space.
|
||||||
fn fmtdur(mut dur: Duration, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmtdur(mut dur: Duration, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
// Round to nearest ms by adding 500us.
|
// Round to nearest ms by adding 500us.
|
||||||
dur += Duration::new(0, 500_000);
|
dur += Duration::new(0, 500_000);
|
||||||
|
|||||||
@@ -377,7 +377,7 @@ fn write_instruction(
|
|||||||
write_operands(w, &func.dfg, isa, inst)?;
|
write_operands(w, &func.dfg, isa, inst)?;
|
||||||
writeln!(w)?;
|
writeln!(w)?;
|
||||||
|
|
||||||
// Value aliases come out on lines after the instruction defining the referrent.
|
// Value aliases come out on lines after the instruction defining the referent.
|
||||||
for r in func.dfg.inst_results(inst) {
|
for r in func.dfg.inst_results(inst) {
|
||||||
write_value_aliases(w, aliases, *r, indent)?;
|
write_value_aliases(w, aliases, *r, indent)?;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
//! A double-ended iterator over entity references.
|
//! A double-ended iterator over entity references.
|
||||||
//!
|
//!
|
||||||
//! When `std::iter::Step` is stablized, `Keys` could be implemented as a wrapper around
|
//! When `std::iter::Step` is stabilized, `Keys` could be implemented as a wrapper around
|
||||||
//! `std::ops::Range`, but for now, we implment it manually.
|
//! `std::ops::Range`, but for now, we implement it manually.
|
||||||
|
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
use EntityRef;
|
use EntityRef;
|
||||||
|
|||||||
@@ -107,7 +107,7 @@ impl Backend for FaerieBackend {
|
|||||||
type CompiledFunction = FaerieCompiledFunction;
|
type CompiledFunction = FaerieCompiledFunction;
|
||||||
type CompiledData = FaerieCompiledData;
|
type CompiledData = FaerieCompiledData;
|
||||||
|
|
||||||
// There's no need to return invidual artifacts; we're writing them into
|
// There's no need to return individual artifacts; we're writing them into
|
||||||
// the output file instead.
|
// the output file instead.
|
||||||
type FinalizedFunction = ();
|
type FinalizedFunction = ();
|
||||||
type FinalizedData = ();
|
type FinalizedData = ();
|
||||||
|
|||||||
@@ -580,7 +580,7 @@ impl<'a> FunctionBuilder<'a> {
|
|||||||
self.ins().call(libc_memcpy, &[dest, src, size]);
|
self.ins().call(libc_memcpy, &[dest, src, size]);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Optimised memcpy for small copys.
|
/// Optimised memcpy for small copies.
|
||||||
pub fn emit_small_memcpy(
|
pub fn emit_small_memcpy(
|
||||||
&mut self,
|
&mut self,
|
||||||
config: TargetFrontendConfig,
|
config: TargetFrontendConfig,
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
This crate performes early-stage optimizations on [Cranelift](https://crates.io/crates/cranelift) IR.
|
This crate performs early-stage optimizations on [Cranelift](https://crates.io/crates/cranelift) IR.
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
//! Performes early-stage optimizations on Cranelift IR.
|
//! Performs early-stage optimizations on Cranelift IR.
|
||||||
|
|
||||||
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
|
||||||
#![warn(unused_import_braces)]
|
#![warn(unused_import_braces)]
|
||||||
|
|||||||
@@ -2034,7 +2034,7 @@ impl<'a> Parser<'a> {
|
|||||||
opcode
|
opcode
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
// Treat it as a syntax error to speficy a typevar on a non-polymorphic opcode.
|
// Treat it as a syntax error to specify a typevar on a non-polymorphic opcode.
|
||||||
} else if ctrl_type != INVALID {
|
} else if ctrl_type != INVALID {
|
||||||
return err!(self.loc, "{} does not take a typevar", opcode);
|
return err!(self.loc, "{} does not take a typevar", opcode);
|
||||||
}
|
}
|
||||||
@@ -2080,7 +2080,7 @@ impl<'a> Parser<'a> {
|
|||||||
Ok(args)
|
Ok(args)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse an optional value list enclosed in parantheses.
|
// Parse an optional value list enclosed in parentheses.
|
||||||
fn parse_opt_value_list(&mut self) -> ParseResult<VariableArgs> {
|
fn parse_opt_value_list(&mut self) -> ParseResult<VariableArgs> {
|
||||||
if !self.optional(Token::LPar) {
|
if !self.optional(Token::LPar) {
|
||||||
return Ok(VariableArgs::new());
|
return Ok(VariableArgs::new());
|
||||||
|
|||||||
@@ -897,7 +897,7 @@ pub fn translate_operator<FE: FuncEnvironment + ?Sized>(
|
|||||||
#[cfg_attr(feature = "cargo-clippy", allow(unneeded_field_pattern))]
|
#[cfg_attr(feature = "cargo-clippy", allow(unneeded_field_pattern))]
|
||||||
/// Deals with a Wasm instruction located in an unreachable portion of the code. Most of them
|
/// Deals with a Wasm instruction located in an unreachable portion of the code. Most of them
|
||||||
/// are dropped but special ones like `End` or `Else` signal the potential end of the unreachable
|
/// are dropped but special ones like `End` or `Else` signal the potential end of the unreachable
|
||||||
/// portion so the translation state muts be updated accordingly.
|
/// portion so the translation state must be updated accordingly.
|
||||||
fn translate_unreachable_operator(
|
fn translate_unreachable_operator(
|
||||||
op: &Operator,
|
op: &Operator,
|
||||||
builder: &mut FunctionBuilder,
|
builder: &mut FunctionBuilder,
|
||||||
|
|||||||
Reference in New Issue
Block a user