Return a function pointer from TargetIsa::encode().

Replace the isa::Legalize enumeration with a function pointer. This
allows an ISA to define its own specific legalization actions instead of
relying on the default two.

Generate a LEGALIZE_ACTIONS table for each ISA which contains
legalization function pointers indexed by the legalization codes that
are already in the encoding tables. Include this table in
isa/*/enc_tables.rs.

Give the `Encodings` iterator a reference to the action table and change
its `legalize()` method to return a function pointer instead of an
ISA-specific code.

The Result<> returned from TargetIsa::encode() no longer implements
Debug, so eliminate uses of unwrap and expect on that type.
This commit is contained in:
Jakob Stoklund Olesen
2017-07-27 14:46:56 -07:00
parent 1bbc06e2d6
commit b04a2c30d2
19 changed files with 140 additions and 102 deletions

View File

@@ -10,6 +10,7 @@ try:
from typing import Union, Iterator, Sequence, Iterable, List, Dict # noqa from typing import Union, Iterator, Sequence, Iterable, List, Dict # noqa
from typing import Optional, Set # noqa from typing import Optional, Set # noqa
from .ast import Expr, VarMap # noqa from .ast import Expr, VarMap # noqa
from .isa import TargetISA # noqa
from .ti import TypeConstraint # noqa from .ti import TypeConstraint # noqa
from .typevar import TypeVar # noqa from .typevar import TypeVar # noqa
DefApply = Union[Def, Apply] DefApply = Union[Def, Apply]
@@ -282,17 +283,37 @@ class XForm(object):
class XFormGroup(object): class XFormGroup(object):
""" """
A group of related transformations. A group of related transformations.
:param isa: A target ISA whose instructions are allowed.
:param chain: A next level group to try if this one doesn't match.
""" """
def __init__(self, name, doc): def __init__(self, name, doc, isa=None, chain=None):
# type: (str, str) -> None # type: (str, str, TargetISA, XFormGroup) -> None
self.xforms = list() # type: List[XForm] self.xforms = list() # type: List[XForm]
self.name = name self.name = name
self.__doc__ = doc self.__doc__ = doc
self.isa = isa
self.chain = chain
def __str__(self): def __str__(self):
# type: () -> str # type: () -> str
return self.name if self.isa:
return '{}.{}'.format(self.isa.name, self.name)
else:
return self.name
def rust_name(self):
# type: () -> str
"""
Get the Rust name of this function implementing this transform.
"""
if self.isa:
# This is a function in the same module as the LEGALIZE_ACTION
# table referring to it.
return self.name
else:
return '::legalizer::{}'.format(self.name)
def legalize(self, src, dst): def legalize(self, src, dst):
# type: (Union[Def, Apply], Rtl) -> None # type: (Union[Def, Apply], Rtl) -> None

View File

@@ -875,7 +875,7 @@ def gen_isa(isa, fmt):
emit_recipe_sizing(isa, fmt) emit_recipe_sizing(isa, fmt)
# Finally, tie it all together in an `EncInfo`. # Finally, tie it all together in an `EncInfo`.
with fmt.indented('pub static INFO: EncInfo = EncInfo {', '};'): with fmt.indented('pub static INFO: isa::EncInfo = isa::EncInfo {', '};'):
fmt.line('constraints: &RECIPE_CONSTRAINTS,') fmt.line('constraints: &RECIPE_CONSTRAINTS,')
fmt.line('sizing: &RECIPE_SIZING,') fmt.line('sizing: &RECIPE_SIZING,')
fmt.line('names: &RECIPE_NAMES,') fmt.line('names: &RECIPE_NAMES,')

View File

@@ -345,6 +345,8 @@ def gen_typesets_table(fmt, type_sets):
""" """
Generate the table of ValueTypeSets described by type_sets. Generate the table of ValueTypeSets described by type_sets.
""" """
if len(type_sets.table) == 0:
return
fmt.comment('Table of value type sets.') fmt.comment('Table of value type sets.')
assert len(type_sets.table) <= typeset_limit, "Too many type sets" assert len(type_sets.table) <= typeset_limit, "Too many type sets"
with fmt.indented( with fmt.indented(

View File

@@ -9,7 +9,7 @@ the input instruction.
""" """
from __future__ import absolute_import from __future__ import absolute_import
from srcgen import Formatter from srcgen import Formatter
from base import legalize, instructions from base import instructions
from cdsl.ast import Var from cdsl.ast import Var
from cdsl.ti import ti_rtl, TypeEnv, get_type_env, TypesEqual,\ from cdsl.ti import ti_rtl, TypeEnv, get_type_env, TypesEqual,\
InTypeset, WiderOrEq InTypeset, WiderOrEq
@@ -18,7 +18,7 @@ from gen_instr import gen_typesets_table
from cdsl.typevar import TypeVar from cdsl.typevar import TypeVar
try: try:
from typing import Sequence, List, Dict # noqa from typing import Sequence, List, Dict, Set # noqa
from cdsl.isa import TargetISA # noqa from cdsl.isa import TargetISA # noqa
from cdsl.ast import Def # noqa from cdsl.ast import Def # noqa
from cdsl.xform import XForm, XFormGroup # noqa from cdsl.xform import XForm, XFormGroup # noqa
@@ -167,7 +167,7 @@ def unwrap_inst(iref, node, fmt):
# The tuple of locals we're extracting is `expr.args`. # The tuple of locals we're extracting is `expr.args`.
with fmt.indented( with fmt.indented(
'let ({}) = if let InstructionData::{} {{' 'let ({}) = if let ir::InstructionData::{} {{'
.format(', '.join(map(str, expr.args)), iform.name), '};'): .format(', '.join(map(str, expr.args)), iform.name), '};'):
# Fields are encoded directly. # Fields are encoded directly.
for f in iform.imm_fields: for f in iform.imm_fields:
@@ -348,9 +348,11 @@ def gen_xform_group(xgrp, fmt, type_sets):
fmt.doc_comment("Legalize the instruction pointed to by `pos`.") fmt.doc_comment("Legalize the instruction pointed to by `pos`.")
fmt.line('#[allow(unused_variables,unused_assignments)]') fmt.line('#[allow(unused_variables,unused_assignments)]')
with fmt.indented( with fmt.indented(
'fn {}(dfg: &mut DataFlowGraph, ' 'pub fn {}(dfg: &mut ir::DataFlowGraph, '
'cfg: &mut ControlFlowGraph, pos: &mut Cursor) -> ' 'cfg: &mut ::flowgraph::ControlFlowGraph, '
'pos: &mut ir::Cursor) -> '
'bool {{'.format(xgrp.name), '}'): 'bool {{'.format(xgrp.name), '}'):
fmt.line('use ir::InstBuilder;')
# Gen the instruction to be legalized. The cursor we're passed must be # Gen the instruction to be legalized. The cursor we're passed must be
# pointing at an instruction. # pointing at an instruction.
@@ -360,21 +362,55 @@ def gen_xform_group(xgrp, fmt, type_sets):
for xform in xgrp.xforms: for xform in xgrp.xforms:
inst = xform.src.rtl[0].expr.inst inst = xform.src.rtl[0].expr.inst
with fmt.indented( with fmt.indented(
'Opcode::{} => {{'.format(inst.camel_name), '}'): 'ir::Opcode::{} => {{'.format(inst.camel_name), '}'):
gen_xform(xform, fmt, type_sets) gen_xform(xform, fmt, type_sets)
# We'll assume there are uncovered opcodes. # We'll assume there are uncovered opcodes.
fmt.line('_ => return false,') if xgrp.chain:
fmt.format('_ => return {}(dfg, cfg, pos),',
xgrp.chain.rust_name())
else:
fmt.line('_ => return false,')
fmt.line('true') fmt.line('true')
def gen_isa(isa, fmt, shared_groups):
# type: (TargetISA, Formatter, Set[XFormGroup]) -> None
"""
Generate legalization functions for `isa` and add any shared `XFormGroup`s
encountered to `shared_groups`.
Generate `TYPE_SETS` and `LEGALIZE_ACTION` tables.
"""
type_sets = UniqueTable()
for xgrp in isa.legalize_codes.keys():
if xgrp.isa is None:
shared_groups.add(xgrp)
else:
assert xgrp.isa == isa
gen_xform_group(xgrp, fmt, type_sets)
gen_typesets_table(fmt, type_sets)
with fmt.indented(
'pub static LEGALIZE_ACTIONS: [isa::Legalize; {}] = ['
.format(len(isa.legalize_codes)), '];'):
for xgrp in isa.legalize_codes.keys():
fmt.format('{},', xgrp.rust_name())
def generate(isas, out_dir): def generate(isas, out_dir):
# type: (Sequence[TargetISA], str) -> None # type: (Sequence[TargetISA], str) -> None
shared_groups = set() # type: Set[XFormGroup]
for isa in isas:
fmt = Formatter()
gen_isa(isa, fmt, shared_groups)
fmt.update_file('legalize-{}.rs'.format(isa.name), out_dir)
# Shared xform groups.
fmt = Formatter() fmt = Formatter()
# Table of TypeSet instances
type_sets = UniqueTable() type_sets = UniqueTable()
for xgrp in sorted(shared_groups, key=lambda g: g.name):
gen_xform_group(legalize.narrow, fmt, type_sets) gen_xform_group(xgrp, fmt, type_sets)
gen_xform_group(legalize.expand, fmt, type_sets)
gen_typesets_table(fmt, type_sets) gen_typesets_table(fmt, type_sets)
fmt.update_file('legalizer.rs', out_dir) fmt.update_file('legalizer.rs', out_dir)

View File

@@ -1,9 +1,10 @@
//! Encoding tables for ARM32 ISA. //! Encoding tables for ARM32 ISA.
use ir::types; use ir::types;
use isa::EncInfo; use isa;
use isa::constraints::*; use isa::constraints::*;
use isa::enc_tables::*; use isa::enc_tables::*;
use isa::encoding::RecipeSizing; use isa::encoding::RecipeSizing;
include!(concat!(env!("OUT_DIR"), "/encoding-arm32.rs")); include!(concat!(env!("OUT_DIR"), "/encoding-arm32.rs"));
include!(concat!(env!("OUT_DIR"), "/legalize-arm32.rs"));

View File

@@ -72,6 +72,7 @@ impl TargetIsa for Isa {
self.cpumode, self.cpumode,
&enc_tables::LEVEL2[..], &enc_tables::LEVEL2[..],
&enc_tables::ENCLISTS[..], &enc_tables::ENCLISTS[..],
&enc_tables::LEGALIZE_ACTIONS[..],
&enc_tables::RECIPE_PREDICATES[..], &enc_tables::RECIPE_PREDICATES[..],
&enc_tables::INST_PREDICATES[..], &enc_tables::INST_PREDICATES[..],
self.isa_flags.predicate_view()) self.isa_flags.predicate_view())

View File

@@ -1,9 +1,10 @@
//! Encoding tables for ARM64 ISA. //! Encoding tables for ARM64 ISA.
use ir::types; use ir::types;
use isa::EncInfo; use isa;
use isa::constraints::*; use isa::constraints::*;
use isa::enc_tables::*; use isa::enc_tables::*;
use isa::encoding::RecipeSizing; use isa::encoding::RecipeSizing;
include!(concat!(env!("OUT_DIR"), "/encoding-arm64.rs")); include!(concat!(env!("OUT_DIR"), "/encoding-arm64.rs"));
include!(concat!(env!("OUT_DIR"), "/legalize-arm64.rs"));

View File

@@ -65,6 +65,7 @@ impl TargetIsa for Isa {
&enc_tables::LEVEL1_A64[..], &enc_tables::LEVEL1_A64[..],
&enc_tables::LEVEL2[..], &enc_tables::LEVEL2[..],
&enc_tables::ENCLISTS[..], &enc_tables::ENCLISTS[..],
&enc_tables::LEGALIZE_ACTIONS[..],
&enc_tables::RECIPE_PREDICATES[..], &enc_tables::RECIPE_PREDICATES[..],
&enc_tables::INST_PREDICATES[..], &enc_tables::INST_PREDICATES[..],
self.isa_flags.predicate_view()) self.isa_flags.predicate_view())

View File

@@ -5,7 +5,7 @@
use constant_hash::{Table, probe}; use constant_hash::{Table, probe};
use ir::{Type, Opcode, DataFlowGraph, InstructionData}; use ir::{Type, Opcode, DataFlowGraph, InstructionData};
use isa::Encoding; use isa::{Encoding, Legalize};
use settings::PredicateView; use settings::PredicateView;
use std::ops::Range; use std::ops::Range;
@@ -109,6 +109,7 @@ pub fn lookup_enclist<'a, OffT1, OffT2>(ctrl_typevar: Type,
level1_table: &'static [Level1Entry<OffT1>], level1_table: &'static [Level1Entry<OffT1>],
level2_table: &'static [Level2Entry<OffT2>], level2_table: &'static [Level2Entry<OffT2>],
enclist: &'static [EncListEntry], enclist: &'static [EncListEntry],
legalize_actions: &'static [Legalize],
recipe_preds: &'static [RecipePredicate], recipe_preds: &'static [RecipePredicate],
inst_preds: &'static [InstPredicate], inst_preds: &'static [InstPredicate],
isa_preds: PredicateView<'a>) isa_preds: PredicateView<'a>)
@@ -148,6 +149,7 @@ pub fn lookup_enclist<'a, OffT1, OffT2>(ctrl_typevar: Type,
inst, inst,
dfg, dfg,
enclist, enclist,
legalize_actions,
recipe_preds, recipe_preds,
inst_preds, inst_preds,
isa_preds) isa_preds)
@@ -173,6 +175,7 @@ pub struct Encodings<'a> {
inst: &'a InstructionData, inst: &'a InstructionData,
dfg: &'a DataFlowGraph, dfg: &'a DataFlowGraph,
enclist: &'static [EncListEntry], enclist: &'static [EncListEntry],
legalize_actions: &'static [Legalize],
recipe_preds: &'static [RecipePredicate], recipe_preds: &'static [RecipePredicate],
inst_preds: &'static [InstPredicate], inst_preds: &'static [InstPredicate],
isa_preds: PredicateView<'a>, isa_preds: PredicateView<'a>,
@@ -189,6 +192,7 @@ impl<'a> Encodings<'a> {
inst: &'a InstructionData, inst: &'a InstructionData,
dfg: &'a DataFlowGraph, dfg: &'a DataFlowGraph,
enclist: &'static [EncListEntry], enclist: &'static [EncListEntry],
legalize_actions: &'static [Legalize],
recipe_preds: &'static [RecipePredicate], recipe_preds: &'static [RecipePredicate],
inst_preds: &'static [InstPredicate], inst_preds: &'static [InstPredicate],
isa_preds: PredicateView<'a>) isa_preds: PredicateView<'a>)
@@ -202,6 +206,7 @@ impl<'a> Encodings<'a> {
recipe_preds, recipe_preds,
inst_preds, inst_preds,
enclist, enclist,
legalize_actions,
} }
} }
@@ -210,9 +215,9 @@ impl<'a> Encodings<'a> {
/// instruction. /// instruction.
/// ///
/// This method must only be called after the iterator returns `None`. /// This method must only be called after the iterator returns `None`.
pub fn legalize(&self) -> LegalizeCode { pub fn legalize(&self) -> Legalize {
debug_assert_eq!(self.offset, !0, "Premature Encodings::legalize()"); debug_assert_eq!(self.offset, !0, "Premature Encodings::legalize()");
self.legalize self.legalize_actions[self.legalize as usize]
} }
/// Check if the `rpred` recipe predicate s satisfied. /// Check if the `rpred` recipe predicate s satisfied.

View File

@@ -1,7 +1,7 @@
//! Encoding tables for Intel ISAs. //! Encoding tables for Intel ISAs.
use ir::{self, types, Opcode}; use ir::{self, types, Opcode};
use isa::EncInfo; use isa;
use isa::constraints::*; use isa::constraints::*;
use isa::enc_tables::*; use isa::enc_tables::*;
use isa::encoding::RecipeSizing; use isa::encoding::RecipeSizing;
@@ -9,3 +9,4 @@ use predicates;
use super::registers::*; use super::registers::*;
include!(concat!(env!("OUT_DIR"), "/encoding-intel.rs")); include!(concat!(env!("OUT_DIR"), "/encoding-intel.rs"));
include!(concat!(env!("OUT_DIR"), "/legalize-intel.rs"));

View File

@@ -72,6 +72,7 @@ impl TargetIsa for Isa {
self.cpumode, self.cpumode,
&enc_tables::LEVEL2[..], &enc_tables::LEVEL2[..],
&enc_tables::ENCLISTS[..], &enc_tables::ENCLISTS[..],
&enc_tables::LEGALIZE_ACTIONS[..],
&enc_tables::RECIPE_PREDICATES[..], &enc_tables::RECIPE_PREDICATES[..],
&enc_tables::INST_PREDICATES[..], &enc_tables::INST_PREDICATES[..],
self.isa_flags.predicate_view()) self.isa_flags.predicate_view())

View File

@@ -45,6 +45,7 @@ pub use isa::encoding::{Encoding, EncInfo};
pub use isa::registers::{RegInfo, RegUnit, RegClass, RegClassIndex, regs_overlap}; pub use isa::registers::{RegInfo, RegUnit, RegClass, RegClassIndex, regs_overlap};
use binemit; use binemit;
use flowgraph;
use settings; use settings;
use ir; use ir;
use regalloc; use regalloc;
@@ -116,29 +117,11 @@ impl settings::Configurable for Builder {
/// After determining that an instruction doesn't have an encoding, how should we proceed to /// After determining that an instruction doesn't have an encoding, how should we proceed to
/// legalize it? /// legalize it?
/// ///
/// These actions correspond to the transformation groups defined in `meta/cretonne/legalize.py`. /// The `Encodings` iterator returns a legalization function to call.
#[derive(Clone, Copy, PartialEq, Eq, Debug)] pub type Legalize = fn(&mut ir::DataFlowGraph,
pub enum Legalize { &mut flowgraph::ControlFlowGraph,
/// Legalize in terms of narrower types. &mut ir::Cursor)
Narrow, -> bool;
/// Expanding in terms of other instructions using the same types.
Expand,
}
/// Translate a legalization code into a `Legalize` enum.
///
/// This mapping is going away soon. It depends on matching the `TargetISA.legalize_code()`
/// mapping.
impl From<u8> for Legalize {
fn from(x: u8) -> Legalize {
match x {
0 => Legalize::Narrow,
1 => Legalize::Expand,
_ => panic!("Unknown legalization code {}"),
}
}
}
/// Methods that are specialized to a target ISA. /// Methods that are specialized to a target ISA.
pub trait TargetIsa { pub trait TargetIsa {

View File

@@ -2,7 +2,7 @@
use ir::condcodes::IntCC; use ir::condcodes::IntCC;
use ir::{self, types, Opcode}; use ir::{self, types, Opcode};
use isa::EncInfo; use isa;
use isa::constraints::*; use isa::constraints::*;
use isa::enc_tables::*; use isa::enc_tables::*;
use isa::encoding::RecipeSizing; use isa::encoding::RecipeSizing;
@@ -16,3 +16,4 @@ use super::registers::*;
// - `ENCLIST` // - `ENCLIST`
// - `INFO` // - `INFO`
include!(concat!(env!("OUT_DIR"), "/encoding-riscv.rs")); include!(concat!(env!("OUT_DIR"), "/encoding-riscv.rs"));
include!(concat!(env!("OUT_DIR"), "/legalize-riscv.rs"));

View File

@@ -72,6 +72,7 @@ impl TargetIsa for Isa {
self.cpumode, self.cpumode,
&enc_tables::LEVEL2[..], &enc_tables::LEVEL2[..],
&enc_tables::ENCLISTS[..], &enc_tables::ENCLISTS[..],
&enc_tables::LEGALIZE_ACTIONS[..],
&enc_tables::RECIPE_PREDICATES[..], &enc_tables::RECIPE_PREDICATES[..],
&enc_tables::INST_PREDICATES[..], &enc_tables::INST_PREDICATES[..],
self.isa_flags.predicate_view()) self.isa_flags.predicate_view())
@@ -113,8 +114,11 @@ mod tests {
use ir::{DataFlowGraph, InstructionData, Opcode}; use ir::{DataFlowGraph, InstructionData, Opcode};
use ir::{types, immediates}; use ir::{types, immediates};
fn encstr(isa: &isa::TargetIsa, enc: isa::Encoding) -> String { fn encstr(isa: &isa::TargetIsa, enc: Result<isa::Encoding, isa::Legalize>) -> String {
isa.encoding_info().display(enc).to_string() match enc {
Ok(e) => isa.encoding_info().display(e).to_string(),
Err(_) => "no encoding".to_string(),
}
} }
#[test] #[test]
@@ -137,8 +141,7 @@ mod tests {
}; };
// ADDI is I/0b00100 // ADDI is I/0b00100
assert_eq!(encstr(&*isa, isa.encode(&dfg, &inst64, types::I64).unwrap()), assert_eq!(encstr(&*isa, isa.encode(&dfg, &inst64, types::I64)), "I#04");
"I#04");
// Try to encode iadd_imm.i64 v1, -10000. // Try to encode iadd_imm.i64 v1, -10000.
let inst64_large = InstructionData::BinaryImm { let inst64_large = InstructionData::BinaryImm {
@@ -148,8 +151,7 @@ mod tests {
}; };
// Immediate is out of range for ADDI. // Immediate is out of range for ADDI.
assert_eq!(isa.encode(&dfg, &inst64_large, types::I64), assert!(isa.encode(&dfg, &inst64_large, types::I64).is_err());
Err(isa::Legalize::Expand));
// Create an iadd_imm.i32 which is encodable in RV64. // Create an iadd_imm.i32 which is encodable in RV64.
let inst32 = InstructionData::BinaryImm { let inst32 = InstructionData::BinaryImm {
@@ -159,8 +161,7 @@ mod tests {
}; };
// ADDIW is I/0b00110 // ADDIW is I/0b00110
assert_eq!(encstr(&*isa, isa.encode(&dfg, &inst32, types::I32).unwrap()), assert_eq!(encstr(&*isa, isa.encode(&dfg, &inst32, types::I32)), "I#06");
"I#06");
} }
// Same as above, but for RV32. // Same as above, but for RV32.
@@ -184,8 +185,7 @@ mod tests {
}; };
// In 32-bit mode, an i64 bit add should be narrowed. // In 32-bit mode, an i64 bit add should be narrowed.
assert_eq!(isa.encode(&dfg, &inst64, types::I64), assert!(isa.encode(&dfg, &inst64, types::I64).is_err());
Err(isa::Legalize::Narrow));
// Try to encode iadd_imm.i64 v1, -10000. // Try to encode iadd_imm.i64 v1, -10000.
let inst64_large = InstructionData::BinaryImm { let inst64_large = InstructionData::BinaryImm {
@@ -195,8 +195,7 @@ mod tests {
}; };
// In 32-bit mode, an i64 bit add should be narrowed. // In 32-bit mode, an i64 bit add should be narrowed.
assert_eq!(isa.encode(&dfg, &inst64_large, types::I64), assert!(isa.encode(&dfg, &inst64_large, types::I64).is_err());
Err(isa::Legalize::Narrow));
// Create an iadd_imm.i32 which is encodable in RV32. // Create an iadd_imm.i32 which is encodable in RV32.
let inst32 = InstructionData::BinaryImm { let inst32 = InstructionData::BinaryImm {
@@ -206,8 +205,7 @@ mod tests {
}; };
// ADDI is I/0b00100 // ADDI is I/0b00100
assert_eq!(encstr(&*isa, isa.encode(&dfg, &inst32, types::I32).unwrap()), assert_eq!(encstr(&*isa, isa.encode(&dfg, &inst32, types::I32)), "I#04");
"I#04");
// Create an imul.i32 which is encodable in RV32, but only when use_m is true. // Create an imul.i32 which is encodable in RV32, but only when use_m is true.
let mul32 = InstructionData::Binary { let mul32 = InstructionData::Binary {
@@ -215,8 +213,7 @@ mod tests {
args: [arg32, arg32], args: [arg32, arg32],
}; };
assert_eq!(isa.encode(&dfg, &mul32, types::I32), assert!(isa.encode(&dfg, &mul32, types::I32).is_err());
Err(isa::Legalize::Expand));
} }
#[test] #[test]
@@ -241,7 +238,6 @@ mod tests {
opcode: Opcode::Imul, opcode: Opcode::Imul,
args: [arg32, arg32], args: [arg32, arg32],
}; };
assert_eq!(encstr(&*isa, isa.encode(&dfg, &mul32, types::I32).unwrap()), assert_eq!(encstr(&*isa, isa.encode(&dfg, &mul32, types::I32)), "R#10c");
"R#10c");
} }
} }

View File

@@ -15,9 +15,9 @@
use dominator_tree::DominatorTree; use dominator_tree::DominatorTree;
use flowgraph::ControlFlowGraph; use flowgraph::ControlFlowGraph;
use ir::{Function, Cursor, DataFlowGraph, InstructionData, Opcode, InstBuilder}; use ir::{self, Function, Cursor};
use ir::condcodes::IntCC; use ir::condcodes::IntCC;
use isa::{TargetIsa, Legalize}; use isa::TargetIsa;
use bitset::BitSet; use bitset::BitSet;
use ir::instructions::ValueTypeSet; use ir::instructions::ValueTypeSet;
@@ -73,22 +73,7 @@ pub fn legalize_function(func: &mut Function,
Ok(encoding) => *func.encodings.ensure(inst) = encoding, Ok(encoding) => *func.encodings.ensure(inst) = encoding,
Err(action) => { Err(action) => {
// We should transform the instruction into legal equivalents. // We should transform the instruction into legal equivalents.
// Possible strategies are: let changed = action(&mut func.dfg, cfg, &mut pos);
// 1. Legalize::Expand: Expand instruction into sequence of legal instructions.
// Possibly iteratively. ()
// 2. Legalize::Narrow: Split the controlling type variable into high and low
// parts. This applies both to SIMD vector types which can be halved and to
// integer types such as `i64` used on a 32-bit ISA. ().
// 3. TODO: Promote the controlling type variable to a larger type. This
// typically means expressing `i8` and `i16` arithmetic in terms if `i32`
// operations on RISC targets. (It may or may not be beneficial to promote
// small vector types versus splitting them.)
// 4. TODO: Convert to library calls. For example, floating point operations on
// an ISA with no IEEE 754 support.
let changed = match action {
Legalize::Expand => expand(&mut func.dfg, cfg, &mut pos),
Legalize::Narrow => narrow(&mut func.dfg, cfg, &mut pos),
};
// If the current instruction was replaced, we need to double back and revisit // If the current instruction was replaced, we need to double back and revisit
// the expanded sequence. This is both to assign encodings and possible to // the expanded sequence. This is both to assign encodings and possible to
// expand further. // expand further.

View File

@@ -479,9 +479,10 @@ impl<'a> Context<'a> {
self.func.dfg.display_inst(pred_inst, self.isa)); self.func.dfg.display_inst(pred_inst, self.isa));
// Give it an encoding. // Give it an encoding.
let encoding = self.isa let encoding = match self.isa.encode(&self.func.dfg, &self.func.dfg[inst], ty) {
.encode(&self.func.dfg, &self.func.dfg[inst], ty) Ok(e) => e,
.expect("Can't encode copy"); Err(_) => panic!("Can't encode copy.{}", ty),
};
*self.func.encodings.ensure(inst) = encoding; *self.func.encodings.ensure(inst) = encoding;
// Create a live range for the new value. // Create a live range for the new value.
@@ -525,9 +526,10 @@ impl<'a> Context<'a> {
ty); ty);
// Give it an encoding. // Give it an encoding.
let encoding = self.isa let encoding = match self.isa.encode(&self.func.dfg, &self.func.dfg[inst], ty) {
.encode(&self.func.dfg, &self.func.dfg[inst], ty) Ok(e) => e,
.expect("Can't encode copy"); Err(_) => panic!("Can't encode copy.{}", ty),
};
*self.func.encodings.ensure(inst) = encoding; *self.func.encodings.ensure(inst) = encoding;
// Create a live range for the new value. // Create a live range for the new value.

View File

@@ -220,9 +220,10 @@ impl<'a> Context<'a> {
let reg = dfg.ins(pos).fill(cand.value); let reg = dfg.ins(pos).fill(cand.value);
let fill = dfg.value_def(reg).unwrap_inst(); let fill = dfg.value_def(reg).unwrap_inst();
*encodings.ensure(fill) = self.isa match self.isa.encode(dfg, &dfg[fill], dfg.value_type(reg)) {
.encode(dfg, &dfg[fill], dfg.value_type(reg)) Ok(e) => *encodings.ensure(fill) = e,
.expect("Can't encode fill"); Err(_) => panic!("Can't encode fill {}", cand.value),
}
self.reloads self.reloads
.insert(ReloadedValue { .insert(ReloadedValue {
@@ -351,9 +352,10 @@ impl<'a> Context<'a> {
.Unary(Opcode::Spill, ty, reg); .Unary(Opcode::Spill, ty, reg);
// Give it an encoding. // Give it an encoding.
*encodings.ensure(inst) = self.isa match self.isa.encode(dfg, &dfg[inst], ty) {
.encode(dfg, &dfg[inst], ty) Ok(e) => *encodings.ensure(inst) = e,
.expect("Can't encode spill"); Err(_) => panic!("Can't encode spill.{}", ty),
}
// Update live ranges. // Update live ranges.
self.liveness.move_def_locally(stack, inst); self.liveness.move_def_locally(stack, inst);

View File

@@ -533,10 +533,10 @@ impl<'a> Context<'a> {
let ty = dfg.value_type(copy); let ty = dfg.value_type(copy);
// Give it an encoding. // Give it an encoding.
let encoding = self.isa match self.isa.encode(dfg, &dfg[inst], ty) {
.encode(dfg, &dfg[inst], ty) Ok(e) => *self.encodings.ensure(inst) = e,
.expect("Can't encode copy"); Err(_) => panic!("Can't encode {}", dfg.display_inst(inst, self.isa)),
*self.encodings.ensure(inst) = encoding; }
// Update live ranges. // Update live ranges.
self.liveness.create_dead(copy, inst, Affinity::Reg(rci)); self.liveness.create_dead(copy, inst, Affinity::Reg(rci));

View File

@@ -697,11 +697,10 @@ impl<'a> Verifier<'a> {
isa.encoding_info().display(encoding)); isa.encoding_info().display(encoding));
} }
} }
Err(e) => { Err(_) => {
return err!(inst, return err!(inst,
"Instruction failed to re-encode {}: {:?}", "Instruction failed to re-encode {}",
isa.encoding_info().display(encoding), isa.encoding_info().display(encoding))
e)
} }
} }
return Ok(()); return Ok(());