cranelift: remove load_complex and store_complex (#3976)

This change removes all variants of `load*_complex` and `store*_complex`
from Cranelift; this is a breaking change to the instructions exposed by
CLIF. The complete list of instructions removed is: `load_complex`,
`store_complex`, `uload8_complex`, `sload8_complex`, `istore8_complex`,
`sload8_complex`, `uload16_complex`, `sload16_complex`,
`istore16_complex`, `uload32_complex`, `sload32_complex`,
`istore32_complex`, `uload8x8_complex`, `sload8x8_complex`,
`sload16x4_complex`, `uload16x4_complex`, `uload32x2_complex`,
`sload32x2_complex`.

The rationale for this removal is that the Cranelift backend now has the
ability to pattern-match multiple upstream additions in order to
calculate the address to access. Previously, this was not possible so
the `*_complex` instructions were needed. Over time, these instructions
have fallen out of use in this repository, making the additional
overhead of maintaining them a chore.
This commit is contained in:
Andrew Brown
2022-03-31 10:05:10 -07:00
committed by GitHub
parent c8daf0b8db
commit bd6fe11ca9
20 changed files with 51 additions and 892 deletions

View File

@@ -28,7 +28,6 @@ pub(crate) struct Formats {
pub(crate) int_select: Rc<InstructionFormat>, pub(crate) int_select: Rc<InstructionFormat>,
pub(crate) jump: Rc<InstructionFormat>, pub(crate) jump: Rc<InstructionFormat>,
pub(crate) load: Rc<InstructionFormat>, pub(crate) load: Rc<InstructionFormat>,
pub(crate) load_complex: Rc<InstructionFormat>,
pub(crate) load_no_offset: Rc<InstructionFormat>, pub(crate) load_no_offset: Rc<InstructionFormat>,
pub(crate) multiary: Rc<InstructionFormat>, pub(crate) multiary: Rc<InstructionFormat>,
pub(crate) nullary: Rc<InstructionFormat>, pub(crate) nullary: Rc<InstructionFormat>,
@@ -36,7 +35,6 @@ pub(crate) struct Formats {
pub(crate) stack_load: Rc<InstructionFormat>, pub(crate) stack_load: Rc<InstructionFormat>,
pub(crate) stack_store: Rc<InstructionFormat>, pub(crate) stack_store: Rc<InstructionFormat>,
pub(crate) store: Rc<InstructionFormat>, pub(crate) store: Rc<InstructionFormat>,
pub(crate) store_complex: Rc<InstructionFormat>,
pub(crate) store_no_offset: Rc<InstructionFormat>, pub(crate) store_no_offset: Rc<InstructionFormat>,
pub(crate) table_addr: Rc<InstructionFormat>, pub(crate) table_addr: Rc<InstructionFormat>,
pub(crate) ternary: Rc<InstructionFormat>, pub(crate) ternary: Rc<InstructionFormat>,
@@ -203,12 +201,6 @@ impl Formats {
.imm(&imm.offset32) .imm(&imm.offset32)
.build(), .build(),
load_complex: Builder::new("LoadComplex")
.imm(&imm.memflags)
.varargs()
.imm(&imm.offset32)
.build(),
load_no_offset: Builder::new("LoadNoOffset") load_no_offset: Builder::new("LoadNoOffset")
.imm(&imm.memflags) .imm(&imm.memflags)
.value() .value()
@@ -221,13 +213,6 @@ impl Formats {
.imm(&imm.offset32) .imm(&imm.offset32)
.build(), .build(),
store_complex: Builder::new("StoreComplex")
.imm(&imm.memflags)
.value()
.varargs()
.imm(&imm.offset32)
.build(),
store_no_offset: Builder::new("StoreNoOffset") store_no_offset: Builder::new("StoreNoOffset")
.imm(&imm.memflags) .imm(&imm.memflags)
.value() .value()

View File

@@ -792,7 +792,6 @@ pub(crate) fn define(
let a = &Operand::new("a", Mem).with_doc("Value loaded"); let a = &Operand::new("a", Mem).with_doc("Value loaded");
let p = &Operand::new("p", iAddr); let p = &Operand::new("p", iAddr);
let MemFlags = &Operand::new("MemFlags", &imm.memflags); let MemFlags = &Operand::new("MemFlags", &imm.memflags);
let args = &Operand::new("args", &entities.varargs).with_doc("Address arguments");
ig.push( ig.push(
Inst::new( Inst::new(
@@ -810,22 +809,6 @@ pub(crate) fn define(
.can_load(true), .can_load(true),
); );
ig.push(
Inst::new(
"load_complex",
r#"
Load from memory at ``sum(args) + Offset``.
This is a polymorphic instruction that can load any value type which
has a memory representation.
"#,
&formats.load_complex,
)
.operands_in(vec![MemFlags, args, Offset])
.operands_out(vec![a])
.can_load(true),
);
ig.push( ig.push(
Inst::new( Inst::new(
"store", "store",
@@ -841,21 +824,6 @@ pub(crate) fn define(
.can_store(true), .can_store(true),
); );
ig.push(
Inst::new(
"store_complex",
r#"
Store ``x`` to memory at ``sum(args) + Offset``.
This is a polymorphic instruction that can store any value type with a
memory representation.
"#,
&formats.store_complex,
)
.operands_in(vec![MemFlags, x, args, Offset])
.can_store(true),
);
let iExt8 = &TypeVar::new( let iExt8 = &TypeVar::new(
"iExt8", "iExt8",
"An integer type with more than 8 bits", "An integer type with more than 8 bits",
@@ -879,21 +847,6 @@ pub(crate) fn define(
.can_load(true), .can_load(true),
); );
ig.push(
Inst::new(
"uload8_complex",
r#"
Load 8 bits from memory at ``sum(args) + Offset`` and zero-extend.
This is equivalent to ``load.i8`` followed by ``uextend``.
"#,
&formats.load_complex,
)
.operands_in(vec![MemFlags, args, Offset])
.operands_out(vec![a])
.can_load(true),
);
ig.push( ig.push(
Inst::new( Inst::new(
"sload8", "sload8",
@@ -909,21 +862,6 @@ pub(crate) fn define(
.can_load(true), .can_load(true),
); );
ig.push(
Inst::new(
"sload8_complex",
r#"
Load 8 bits from memory at ``sum(args) + Offset`` and sign-extend.
This is equivalent to ``load.i8`` followed by ``sextend``.
"#,
&formats.load_complex,
)
.operands_in(vec![MemFlags, args, Offset])
.operands_out(vec![a])
.can_load(true),
);
ig.push( ig.push(
Inst::new( Inst::new(
"istore8", "istore8",
@@ -938,20 +876,6 @@ pub(crate) fn define(
.can_store(true), .can_store(true),
); );
ig.push(
Inst::new(
"istore8_complex",
r#"
Store the low 8 bits of ``x`` to memory at ``sum(args) + Offset``.
This is equivalent to ``ireduce.i8`` followed by ``store.i8``.
"#,
&formats.store_complex,
)
.operands_in(vec![MemFlags, x, args, Offset])
.can_store(true),
);
let iExt16 = &TypeVar::new( let iExt16 = &TypeVar::new(
"iExt16", "iExt16",
"An integer type with more than 16 bits", "An integer type with more than 16 bits",
@@ -975,21 +899,6 @@ pub(crate) fn define(
.can_load(true), .can_load(true),
); );
ig.push(
Inst::new(
"uload16_complex",
r#"
Load 16 bits from memory at ``sum(args) + Offset`` and zero-extend.
This is equivalent to ``load.i16`` followed by ``uextend``.
"#,
&formats.load_complex,
)
.operands_in(vec![MemFlags, args, Offset])
.operands_out(vec![a])
.can_load(true),
);
ig.push( ig.push(
Inst::new( Inst::new(
"sload16", "sload16",
@@ -1005,21 +914,6 @@ pub(crate) fn define(
.can_load(true), .can_load(true),
); );
ig.push(
Inst::new(
"sload16_complex",
r#"
Load 16 bits from memory at ``sum(args) + Offset`` and sign-extend.
This is equivalent to ``load.i16`` followed by ``sextend``.
"#,
&formats.load_complex,
)
.operands_in(vec![MemFlags, args, Offset])
.operands_out(vec![a])
.can_load(true),
);
ig.push( ig.push(
Inst::new( Inst::new(
"istore16", "istore16",
@@ -1034,20 +928,6 @@ pub(crate) fn define(
.can_store(true), .can_store(true),
); );
ig.push(
Inst::new(
"istore16_complex",
r#"
Store the low 16 bits of ``x`` to memory at ``sum(args) + Offset``.
This is equivalent to ``ireduce.i16`` followed by ``store.i16``.
"#,
&formats.store_complex,
)
.operands_in(vec![MemFlags, x, args, Offset])
.can_store(true),
);
let iExt32 = &TypeVar::new( let iExt32 = &TypeVar::new(
"iExt32", "iExt32",
"An integer type with more than 32 bits", "An integer type with more than 32 bits",
@@ -1071,21 +951,6 @@ pub(crate) fn define(
.can_load(true), .can_load(true),
); );
ig.push(
Inst::new(
"uload32_complex",
r#"
Load 32 bits from memory at ``sum(args) + Offset`` and zero-extend.
This is equivalent to ``load.i32`` followed by ``uextend``.
"#,
&formats.load_complex,
)
.operands_in(vec![MemFlags, args, Offset])
.operands_out(vec![a])
.can_load(true),
);
ig.push( ig.push(
Inst::new( Inst::new(
"sload32", "sload32",
@@ -1101,21 +966,6 @@ pub(crate) fn define(
.can_load(true), .can_load(true),
); );
ig.push(
Inst::new(
"sload32_complex",
r#"
Load 32 bits from memory at ``sum(args) + Offset`` and sign-extend.
This is equivalent to ``load.i32`` followed by ``sextend``.
"#,
&formats.load_complex,
)
.operands_in(vec![MemFlags, args, Offset])
.operands_out(vec![a])
.can_load(true),
);
ig.push( ig.push(
Inst::new( Inst::new(
"istore32", "istore32",
@@ -1130,20 +980,6 @@ pub(crate) fn define(
.can_store(true), .can_store(true),
); );
ig.push(
Inst::new(
"istore32_complex",
r#"
Store the low 32 bits of ``x`` to memory at ``sum(args) + Offset``.
This is equivalent to ``ireduce.i32`` followed by ``store.i32``.
"#,
&formats.store_complex,
)
.operands_in(vec![MemFlags, x, args, Offset])
.can_store(true),
);
let I16x8 = &TypeVar::new( let I16x8 = &TypeVar::new(
"I16x8", "I16x8",
"A SIMD vector with exactly 8 lanes of 16-bit values", "A SIMD vector with exactly 8 lanes of 16-bit values",
@@ -1169,20 +1005,6 @@ pub(crate) fn define(
.can_load(true), .can_load(true),
); );
ig.push(
Inst::new(
"uload8x8_complex",
r#"
Load an 8x8 vector (64 bits) from memory at ``sum(args) + Offset`` and zero-extend into an
i16x8 vector.
"#,
&formats.load_complex,
)
.operands_in(vec![MemFlags, args, Offset])
.operands_out(vec![a])
.can_load(true),
);
ig.push( ig.push(
Inst::new( Inst::new(
"sload8x8", "sload8x8",
@@ -1197,20 +1019,6 @@ pub(crate) fn define(
.can_load(true), .can_load(true),
); );
ig.push(
Inst::new(
"sload8x8_complex",
r#"
Load an 8x8 vector (64 bits) from memory at ``sum(args) + Offset`` and sign-extend into an
i16x8 vector.
"#,
&formats.load_complex,
)
.operands_in(vec![MemFlags, args, Offset])
.operands_out(vec![a])
.can_load(true),
);
let I32x4 = &TypeVar::new( let I32x4 = &TypeVar::new(
"I32x4", "I32x4",
"A SIMD vector with exactly 4 lanes of 32-bit values", "A SIMD vector with exactly 4 lanes of 32-bit values",
@@ -1236,20 +1044,6 @@ pub(crate) fn define(
.can_load(true), .can_load(true),
); );
ig.push(
Inst::new(
"uload16x4_complex",
r#"
Load a 16x4 vector (64 bits) from memory at ``sum(args) + Offset`` and zero-extend into an
i32x4 vector.
"#,
&formats.load_complex,
)
.operands_in(vec![MemFlags, args, Offset])
.operands_out(vec![a])
.can_load(true),
);
ig.push( ig.push(
Inst::new( Inst::new(
"sload16x4", "sload16x4",
@@ -1264,20 +1058,6 @@ pub(crate) fn define(
.can_load(true), .can_load(true),
); );
ig.push(
Inst::new(
"sload16x4_complex",
r#"
Load a 16x4 vector (64 bits) from memory at ``sum(args) + Offset`` and sign-extend into an
i32x4 vector.
"#,
&formats.load_complex,
)
.operands_in(vec![MemFlags, args, Offset])
.operands_out(vec![a])
.can_load(true),
);
let I64x2 = &TypeVar::new( let I64x2 = &TypeVar::new(
"I64x2", "I64x2",
"A SIMD vector with exactly 2 lanes of 64-bit values", "A SIMD vector with exactly 2 lanes of 64-bit values",
@@ -1303,20 +1083,6 @@ pub(crate) fn define(
.can_load(true), .can_load(true),
); );
ig.push(
Inst::new(
"uload32x2_complex",
r#"
Load a 32x2 vector (64 bits) from memory at ``sum(args) + Offset`` and zero-extend into an
i64x2 vector.
"#,
&formats.load_complex,
)
.operands_in(vec![MemFlags, args, Offset])
.operands_out(vec![a])
.can_load(true),
);
ig.push( ig.push(
Inst::new( Inst::new(
"sload32x2", "sload32x2",
@@ -1331,20 +1097,6 @@ pub(crate) fn define(
.can_load(true), .can_load(true),
); );
ig.push(
Inst::new(
"sload32x2_complex",
r#"
Load a 32x2 vector (64 bits) from memory at ``sum(args) + Offset`` and sign-extend into an
i64x2 vector.
"#,
&formats.load_complex,
)
.operands_in(vec![MemFlags, args, Offset])
.operands_out(vec![a])
.can_load(true),
);
let x = &Operand::new("x", Mem).with_doc("Value to be stored"); let x = &Operand::new("x", Mem).with_doc("Value to be stored");
let a = &Operand::new("a", Mem).with_doc("Value loaded"); let a = &Operand::new("a", Mem).with_doc("Value loaded");
let Offset = let Offset =

View File

@@ -148,39 +148,22 @@
UsubSat UsubSat
SsubSat SsubSat
Load Load
LoadComplex
Store Store
StoreComplex
Uload8 Uload8
Uload8Complex
Sload8 Sload8
Sload8Complex
Istore8 Istore8
Istore8Complex
Uload16 Uload16
Uload16Complex
Sload16 Sload16
Sload16Complex
Istore16 Istore16
Istore16Complex
Uload32 Uload32
Uload32Complex
Sload32 Sload32
Sload32Complex
Istore32 Istore32
Istore32Complex
Uload8x8 Uload8x8
Uload8x8Complex
Sload8x8 Sload8x8
Sload8x8Complex
Uload16x4 Uload16x4
Uload16x4Complex
Sload16x4 Sload16x4
Sload16x4Complex
Uload32x2 Uload32x2
Uload32x2Complex
Sload32x2 Sload32x2
Sload32x2Complex
StackLoad StackLoad
StackStore StackStore
StackAddr StackAddr
@@ -364,7 +347,6 @@
(IntSelect (opcode Opcode) (args ValueArray3) (cond IntCC)) (IntSelect (opcode Opcode) (args ValueArray3) (cond IntCC))
(Jump (opcode Opcode) (args ValueList) (destination Block)) (Jump (opcode Opcode) (args ValueList) (destination Block))
(Load (opcode Opcode) (arg Value) (flags MemFlags) (offset Offset32)) (Load (opcode Opcode) (arg Value) (flags MemFlags) (offset Offset32))
(LoadComplex (opcode Opcode) (args ValueList) (flags MemFlags) (offset Offset32))
(LoadNoOffset (opcode Opcode) (arg Value) (flags MemFlags)) (LoadNoOffset (opcode Opcode) (arg Value) (flags MemFlags))
(MultiAry (opcode Opcode) (args ValueList)) (MultiAry (opcode Opcode) (args ValueList))
(NullAry (opcode Opcode)) (NullAry (opcode Opcode))
@@ -372,7 +354,6 @@
(StackLoad (opcode Opcode) (stack_slot StackSlot) (offset Offset32)) (StackLoad (opcode Opcode) (stack_slot StackSlot) (offset Offset32))
(StackStore (opcode Opcode) (arg Value) (stack_slot StackSlot) (offset Offset32)) (StackStore (opcode Opcode) (arg Value) (stack_slot StackSlot) (offset Offset32))
(Store (opcode Opcode) (args ValueArray2) (flags MemFlags) (offset Offset32)) (Store (opcode Opcode) (args ValueArray2) (flags MemFlags) (offset Offset32))
(StoreComplex (opcode Opcode) (args ValueList) (flags MemFlags) (offset Offset32))
(StoreNoOffset (opcode Opcode) (args ValueArray2) (flags MemFlags)) (StoreNoOffset (opcode Opcode) (args ValueArray2) (flags MemFlags))
(TableAddr (opcode Opcode) (arg Value) (table Table) (offset Offset32)) (TableAddr (opcode Opcode) (arg Value) (table Table) (offset Offset32))
(Ternary (opcode Opcode) (args ValueArray3)) (Ternary (opcode Opcode) (args ValueArray3))
@@ -594,204 +575,102 @@
(inst_data (InstructionData.Load (Opcode.Load) p MemFlags Offset)) (inst_data (InstructionData.Load (Opcode.Load) p MemFlags Offset))
) )
(decl load_complex (MemFlags ValueSlice Offset32) Inst)
(extractor
(load_complex MemFlags args Offset)
(inst_data (InstructionData.LoadComplex (Opcode.LoadComplex) (value_list_slice args) MemFlags Offset))
)
(decl store (MemFlags Value Value Offset32) Inst) (decl store (MemFlags Value Value Offset32) Inst)
(extractor (extractor
(store MemFlags x p Offset) (store MemFlags x p Offset)
(inst_data (InstructionData.Store (Opcode.Store) (value_array_2 x p) MemFlags Offset)) (inst_data (InstructionData.Store (Opcode.Store) (value_array_2 x p) MemFlags Offset))
) )
(decl store_complex (MemFlags Value ValueSlice Offset32) Inst)
(extractor
(store_complex MemFlags x args Offset)
(inst_data (InstructionData.StoreComplex (Opcode.StoreComplex) (unwrap_head_value_list_1 x args) MemFlags Offset))
)
(decl uload8 (MemFlags Value Offset32) Inst) (decl uload8 (MemFlags Value Offset32) Inst)
(extractor (extractor
(uload8 MemFlags p Offset) (uload8 MemFlags p Offset)
(inst_data (InstructionData.Load (Opcode.Uload8) p MemFlags Offset)) (inst_data (InstructionData.Load (Opcode.Uload8) p MemFlags Offset))
) )
(decl uload8_complex (MemFlags ValueSlice Offset32) Inst)
(extractor
(uload8_complex MemFlags args Offset)
(inst_data (InstructionData.LoadComplex (Opcode.Uload8Complex) (value_list_slice args) MemFlags Offset))
)
(decl sload8 (MemFlags Value Offset32) Inst) (decl sload8 (MemFlags Value Offset32) Inst)
(extractor (extractor
(sload8 MemFlags p Offset) (sload8 MemFlags p Offset)
(inst_data (InstructionData.Load (Opcode.Sload8) p MemFlags Offset)) (inst_data (InstructionData.Load (Opcode.Sload8) p MemFlags Offset))
) )
(decl sload8_complex (MemFlags ValueSlice Offset32) Inst)
(extractor
(sload8_complex MemFlags args Offset)
(inst_data (InstructionData.LoadComplex (Opcode.Sload8Complex) (value_list_slice args) MemFlags Offset))
)
(decl istore8 (MemFlags Value Value Offset32) Inst) (decl istore8 (MemFlags Value Value Offset32) Inst)
(extractor (extractor
(istore8 MemFlags x p Offset) (istore8 MemFlags x p Offset)
(inst_data (InstructionData.Store (Opcode.Istore8) (value_array_2 x p) MemFlags Offset)) (inst_data (InstructionData.Store (Opcode.Istore8) (value_array_2 x p) MemFlags Offset))
) )
(decl istore8_complex (MemFlags Value ValueSlice Offset32) Inst)
(extractor
(istore8_complex MemFlags x args Offset)
(inst_data (InstructionData.StoreComplex (Opcode.Istore8Complex) (unwrap_head_value_list_1 x args) MemFlags Offset))
)
(decl uload16 (MemFlags Value Offset32) Inst) (decl uload16 (MemFlags Value Offset32) Inst)
(extractor (extractor
(uload16 MemFlags p Offset) (uload16 MemFlags p Offset)
(inst_data (InstructionData.Load (Opcode.Uload16) p MemFlags Offset)) (inst_data (InstructionData.Load (Opcode.Uload16) p MemFlags Offset))
) )
(decl uload16_complex (MemFlags ValueSlice Offset32) Inst)
(extractor
(uload16_complex MemFlags args Offset)
(inst_data (InstructionData.LoadComplex (Opcode.Uload16Complex) (value_list_slice args) MemFlags Offset))
)
(decl sload16 (MemFlags Value Offset32) Inst) (decl sload16 (MemFlags Value Offset32) Inst)
(extractor (extractor
(sload16 MemFlags p Offset) (sload16 MemFlags p Offset)
(inst_data (InstructionData.Load (Opcode.Sload16) p MemFlags Offset)) (inst_data (InstructionData.Load (Opcode.Sload16) p MemFlags Offset))
) )
(decl sload16_complex (MemFlags ValueSlice Offset32) Inst)
(extractor
(sload16_complex MemFlags args Offset)
(inst_data (InstructionData.LoadComplex (Opcode.Sload16Complex) (value_list_slice args) MemFlags Offset))
)
(decl istore16 (MemFlags Value Value Offset32) Inst) (decl istore16 (MemFlags Value Value Offset32) Inst)
(extractor (extractor
(istore16 MemFlags x p Offset) (istore16 MemFlags x p Offset)
(inst_data (InstructionData.Store (Opcode.Istore16) (value_array_2 x p) MemFlags Offset)) (inst_data (InstructionData.Store (Opcode.Istore16) (value_array_2 x p) MemFlags Offset))
) )
(decl istore16_complex (MemFlags Value ValueSlice Offset32) Inst)
(extractor
(istore16_complex MemFlags x args Offset)
(inst_data (InstructionData.StoreComplex (Opcode.Istore16Complex) (unwrap_head_value_list_1 x args) MemFlags Offset))
)
(decl uload32 (MemFlags Value Offset32) Inst) (decl uload32 (MemFlags Value Offset32) Inst)
(extractor (extractor
(uload32 MemFlags p Offset) (uload32 MemFlags p Offset)
(inst_data (InstructionData.Load (Opcode.Uload32) p MemFlags Offset)) (inst_data (InstructionData.Load (Opcode.Uload32) p MemFlags Offset))
) )
(decl uload32_complex (MemFlags ValueSlice Offset32) Inst)
(extractor
(uload32_complex MemFlags args Offset)
(inst_data (InstructionData.LoadComplex (Opcode.Uload32Complex) (value_list_slice args) MemFlags Offset))
)
(decl sload32 (MemFlags Value Offset32) Inst) (decl sload32 (MemFlags Value Offset32) Inst)
(extractor (extractor
(sload32 MemFlags p Offset) (sload32 MemFlags p Offset)
(inst_data (InstructionData.Load (Opcode.Sload32) p MemFlags Offset)) (inst_data (InstructionData.Load (Opcode.Sload32) p MemFlags Offset))
) )
(decl sload32_complex (MemFlags ValueSlice Offset32) Inst)
(extractor
(sload32_complex MemFlags args Offset)
(inst_data (InstructionData.LoadComplex (Opcode.Sload32Complex) (value_list_slice args) MemFlags Offset))
)
(decl istore32 (MemFlags Value Value Offset32) Inst) (decl istore32 (MemFlags Value Value Offset32) Inst)
(extractor (extractor
(istore32 MemFlags x p Offset) (istore32 MemFlags x p Offset)
(inst_data (InstructionData.Store (Opcode.Istore32) (value_array_2 x p) MemFlags Offset)) (inst_data (InstructionData.Store (Opcode.Istore32) (value_array_2 x p) MemFlags Offset))
) )
(decl istore32_complex (MemFlags Value ValueSlice Offset32) Inst)
(extractor
(istore32_complex MemFlags x args Offset)
(inst_data (InstructionData.StoreComplex (Opcode.Istore32Complex) (unwrap_head_value_list_1 x args) MemFlags Offset))
)
(decl uload8x8 (MemFlags Value Offset32) Inst) (decl uload8x8 (MemFlags Value Offset32) Inst)
(extractor (extractor
(uload8x8 MemFlags p Offset) (uload8x8 MemFlags p Offset)
(inst_data (InstructionData.Load (Opcode.Uload8x8) p MemFlags Offset)) (inst_data (InstructionData.Load (Opcode.Uload8x8) p MemFlags Offset))
) )
(decl uload8x8_complex (MemFlags ValueSlice Offset32) Inst)
(extractor
(uload8x8_complex MemFlags args Offset)
(inst_data (InstructionData.LoadComplex (Opcode.Uload8x8Complex) (value_list_slice args) MemFlags Offset))
)
(decl sload8x8 (MemFlags Value Offset32) Inst) (decl sload8x8 (MemFlags Value Offset32) Inst)
(extractor (extractor
(sload8x8 MemFlags p Offset) (sload8x8 MemFlags p Offset)
(inst_data (InstructionData.Load (Opcode.Sload8x8) p MemFlags Offset)) (inst_data (InstructionData.Load (Opcode.Sload8x8) p MemFlags Offset))
) )
(decl sload8x8_complex (MemFlags ValueSlice Offset32) Inst)
(extractor
(sload8x8_complex MemFlags args Offset)
(inst_data (InstructionData.LoadComplex (Opcode.Sload8x8Complex) (value_list_slice args) MemFlags Offset))
)
(decl uload16x4 (MemFlags Value Offset32) Inst) (decl uload16x4 (MemFlags Value Offset32) Inst)
(extractor (extractor
(uload16x4 MemFlags p Offset) (uload16x4 MemFlags p Offset)
(inst_data (InstructionData.Load (Opcode.Uload16x4) p MemFlags Offset)) (inst_data (InstructionData.Load (Opcode.Uload16x4) p MemFlags Offset))
) )
(decl uload16x4_complex (MemFlags ValueSlice Offset32) Inst)
(extractor
(uload16x4_complex MemFlags args Offset)
(inst_data (InstructionData.LoadComplex (Opcode.Uload16x4Complex) (value_list_slice args) MemFlags Offset))
)
(decl sload16x4 (MemFlags Value Offset32) Inst) (decl sload16x4 (MemFlags Value Offset32) Inst)
(extractor (extractor
(sload16x4 MemFlags p Offset) (sload16x4 MemFlags p Offset)
(inst_data (InstructionData.Load (Opcode.Sload16x4) p MemFlags Offset)) (inst_data (InstructionData.Load (Opcode.Sload16x4) p MemFlags Offset))
) )
(decl sload16x4_complex (MemFlags ValueSlice Offset32) Inst)
(extractor
(sload16x4_complex MemFlags args Offset)
(inst_data (InstructionData.LoadComplex (Opcode.Sload16x4Complex) (value_list_slice args) MemFlags Offset))
)
(decl uload32x2 (MemFlags Value Offset32) Inst) (decl uload32x2 (MemFlags Value Offset32) Inst)
(extractor (extractor
(uload32x2 MemFlags p Offset) (uload32x2 MemFlags p Offset)
(inst_data (InstructionData.Load (Opcode.Uload32x2) p MemFlags Offset)) (inst_data (InstructionData.Load (Opcode.Uload32x2) p MemFlags Offset))
) )
(decl uload32x2_complex (MemFlags ValueSlice Offset32) Inst)
(extractor
(uload32x2_complex MemFlags args Offset)
(inst_data (InstructionData.LoadComplex (Opcode.Uload32x2Complex) (value_list_slice args) MemFlags Offset))
)
(decl sload32x2 (MemFlags Value Offset32) Inst) (decl sload32x2 (MemFlags Value Offset32) Inst)
(extractor (extractor
(sload32x2 MemFlags p Offset) (sload32x2 MemFlags p Offset)
(inst_data (InstructionData.Load (Opcode.Sload32x2) p MemFlags Offset)) (inst_data (InstructionData.Load (Opcode.Sload32x2) p MemFlags Offset))
) )
(decl sload32x2_complex (MemFlags ValueSlice Offset32) Inst)
(extractor
(sload32x2_complex MemFlags args Offset)
(inst_data (InstructionData.LoadComplex (Opcode.Sload32x2Complex) (value_list_slice args) MemFlags Offset))
)
(decl stack_load (StackSlot Offset32) Inst) (decl stack_load (StackSlot Offset32) Inst)
(extractor (extractor
(stack_load SS Offset) (stack_load SS Offset)

View File

@@ -304,9 +304,7 @@ impl InstructionData {
Some(DataValue::from(imm as i32)) // Note the switch from unsigned to signed. Some(DataValue::from(imm as i32)) // Note the switch from unsigned to signed.
} }
&InstructionData::Load { offset, .. } &InstructionData::Load { offset, .. }
| &InstructionData::LoadComplex { offset, .. }
| &InstructionData::Store { offset, .. } | &InstructionData::Store { offset, .. }
| &InstructionData::StoreComplex { offset, .. }
| &InstructionData::StackLoad { offset, .. } | &InstructionData::StackLoad { offset, .. }
| &InstructionData::StackStore { offset, .. } | &InstructionData::StackStore { offset, .. }
| &InstructionData::TableAddr { offset, .. } => Some(DataValue::from(offset)), | &InstructionData::TableAddr { offset, .. } => Some(DataValue::from(offset)),
@@ -388,10 +386,8 @@ impl InstructionData {
match self { match self {
&InstructionData::Load { offset, .. } &InstructionData::Load { offset, .. }
| &InstructionData::StackLoad { offset, .. } | &InstructionData::StackLoad { offset, .. }
| &InstructionData::LoadComplex { offset, .. }
| &InstructionData::Store { offset, .. } | &InstructionData::Store { offset, .. }
| &InstructionData::StackStore { offset, .. } | &InstructionData::StackStore { offset, .. } => Some(offset.into()),
| &InstructionData::StoreComplex { offset, .. } => Some(offset.into()),
_ => None, _ => None,
} }
} }
@@ -400,10 +396,8 @@ impl InstructionData {
pub fn memflags(&self) -> Option<MemFlags> { pub fn memflags(&self) -> Option<MemFlags> {
match self { match self {
&InstructionData::Load { flags, .. } &InstructionData::Load { flags, .. }
| &InstructionData::LoadComplex { flags, .. }
| &InstructionData::LoadNoOffset { flags, .. } | &InstructionData::LoadNoOffset { flags, .. }
| &InstructionData::Store { flags, .. } | &InstructionData::Store { flags, .. }
| &InstructionData::StoreComplex { flags, .. }
| &InstructionData::StoreNoOffset { flags, .. } => Some(flags), | &InstructionData::StoreNoOffset { flags, .. } => Some(flags),
_ => None, _ => None,
} }

View File

@@ -662,8 +662,8 @@ pub(crate) fn lower_address<C: LowerCtx<I = Inst>>(
roots: &[InsnInput], roots: &[InsnInput],
offset: i32, offset: i32,
) -> AMode { ) -> AMode {
// TODO: support base_reg + scale * index_reg. For this, we would need to pattern-match shl or // TODO: support base_reg + scale * index_reg. For this, we would need to
// mul instructions (Load/StoreComplex don't include scale factors). // pattern-match shl or mul instructions.
// Collect addends through an arbitrary tree of 32-to-64-bit sign/zero // Collect addends through an arbitrary tree of 32-to-64-bit sign/zero
// extends and addition ops. We update these as we consume address // extends and addition ops. We update these as we consume address
@@ -1510,25 +1510,13 @@ pub(crate) fn emit_atomic_load<C: LowerCtx<I = Inst>>(
fn load_op_to_ty(op: Opcode) -> Option<Type> { fn load_op_to_ty(op: Opcode) -> Option<Type> {
match op { match op {
Opcode::Sload8 | Opcode::Uload8 | Opcode::Sload8Complex | Opcode::Uload8Complex => Some(I8), Opcode::Sload8 | Opcode::Uload8 => Some(I8),
Opcode::Sload16 | Opcode::Uload16 | Opcode::Sload16Complex | Opcode::Uload16Complex => { Opcode::Sload16 | Opcode::Uload16 => Some(I16),
Some(I16) Opcode::Sload32 | Opcode::Uload32 => Some(I32),
} Opcode::Load => None,
Opcode::Sload32 | Opcode::Uload32 | Opcode::Sload32Complex | Opcode::Uload32Complex => { Opcode::Sload8x8 | Opcode::Uload8x8 => Some(I8X8),
Some(I32) Opcode::Sload16x4 | Opcode::Uload16x4 => Some(I16X4),
} Opcode::Sload32x2 | Opcode::Uload32x2 => Some(I32X2),
Opcode::Load | Opcode::LoadComplex => None,
Opcode::Sload8x8 | Opcode::Uload8x8 | Opcode::Sload8x8Complex | Opcode::Uload8x8Complex => {
Some(I8X8)
}
Opcode::Sload16x4
| Opcode::Uload16x4
| Opcode::Sload16x4Complex
| Opcode::Uload16x4Complex => Some(I16X4),
Opcode::Sload32x2
| Opcode::Uload32x2
| Opcode::Sload32x2Complex
| Opcode::Uload32x2Complex => Some(I32X2),
_ => None, _ => None,
} }
} }

View File

@@ -1,4 +1,4 @@
src/clif.isle 9ea75a6f790b5c03 src/clif.isle 443b34b797fc8ace
src/prelude.isle 74d9514ac948e163 src/prelude.isle 74d9514ac948e163
src/isa/aarch64/inst.isle 19ccefb6a496d392 src/isa/aarch64/inst.isle 19ccefb6a496d392
src/isa/aarch64/lower.isle d88b62dd6b40622 src/isa/aarch64/lower.isle d88b62dd6b40622

View File

@@ -98,32 +98,14 @@ pub(crate) fn lower_insn_to_regs<C: LowerCtx<I = Inst>>(
| Opcode::Sload16 | Opcode::Sload16
| Opcode::Uload32 | Opcode::Uload32
| Opcode::Sload32 | Opcode::Sload32
| Opcode::LoadComplex
| Opcode::Uload8Complex
| Opcode::Sload8Complex
| Opcode::Uload16Complex
| Opcode::Sload16Complex
| Opcode::Uload32Complex
| Opcode::Sload32Complex
| Opcode::Sload8x8 | Opcode::Sload8x8
| Opcode::Uload8x8 | Opcode::Uload8x8
| Opcode::Sload16x4 | Opcode::Sload16x4
| Opcode::Uload16x4 | Opcode::Uload16x4
| Opcode::Sload32x2 | Opcode::Sload32x2
| Opcode::Uload32x2 | Opcode::Uload32x2 => {
| Opcode::Uload8x8Complex
| Opcode::Sload8x8Complex
| Opcode::Uload16x4Complex
| Opcode::Sload16x4Complex
| Opcode::Uload32x2Complex
| Opcode::Sload32x2Complex => {
let sign_extend = match op { let sign_extend = match op {
Opcode::Sload8 Opcode::Sload8 | Opcode::Sload16 | Opcode::Sload32 => true,
| Opcode::Sload8Complex
| Opcode::Sload16
| Opcode::Sload16Complex
| Opcode::Sload32
| Opcode::Sload32Complex => true,
_ => false, _ => false,
}; };
let flags = ctx let flags = ctx
@@ -174,17 +156,11 @@ pub(crate) fn lower_insn_to_regs<C: LowerCtx<I = Inst>>(
let vec_extend = match op { let vec_extend = match op {
Opcode::Sload8x8 => Some(VecExtendOp::Sxtl8), Opcode::Sload8x8 => Some(VecExtendOp::Sxtl8),
Opcode::Sload8x8Complex => Some(VecExtendOp::Sxtl8),
Opcode::Uload8x8 => Some(VecExtendOp::Uxtl8), Opcode::Uload8x8 => Some(VecExtendOp::Uxtl8),
Opcode::Uload8x8Complex => Some(VecExtendOp::Uxtl8),
Opcode::Sload16x4 => Some(VecExtendOp::Sxtl16), Opcode::Sload16x4 => Some(VecExtendOp::Sxtl16),
Opcode::Sload16x4Complex => Some(VecExtendOp::Sxtl16),
Opcode::Uload16x4 => Some(VecExtendOp::Uxtl16), Opcode::Uload16x4 => Some(VecExtendOp::Uxtl16),
Opcode::Uload16x4Complex => Some(VecExtendOp::Uxtl16),
Opcode::Sload32x2 => Some(VecExtendOp::Sxtl32), Opcode::Sload32x2 => Some(VecExtendOp::Sxtl32),
Opcode::Sload32x2Complex => Some(VecExtendOp::Sxtl32),
Opcode::Uload32x2 => Some(VecExtendOp::Uxtl32), Opcode::Uload32x2 => Some(VecExtendOp::Uxtl32),
Opcode::Uload32x2Complex => Some(VecExtendOp::Uxtl32),
_ => None, _ => None,
}; };
@@ -204,20 +180,13 @@ pub(crate) fn lower_insn_to_regs<C: LowerCtx<I = Inst>>(
} }
} }
Opcode::Store Opcode::Store | Opcode::Istore8 | Opcode::Istore16 | Opcode::Istore32 => {
| Opcode::Istore8
| Opcode::Istore16
| Opcode::Istore32
| Opcode::StoreComplex
| Opcode::Istore8Complex
| Opcode::Istore16Complex
| Opcode::Istore32Complex => {
let off = ctx.data(insn).load_store_offset().unwrap(); let off = ctx.data(insn).load_store_offset().unwrap();
let elem_ty = match op { let elem_ty = match op {
Opcode::Istore8 | Opcode::Istore8Complex => I8, Opcode::Istore8 => I8,
Opcode::Istore16 | Opcode::Istore16Complex => I16, Opcode::Istore16 => I16,
Opcode::Istore32 | Opcode::Istore32Complex => I32, Opcode::Istore32 => I32,
Opcode::Store | Opcode::StoreComplex => ctx.input_ty(insn, 0), Opcode::Store => ctx.input_ty(insn, 0),
_ => unreachable!(), _ => unreachable!(),
}; };
let is_float = ty_has_float_or_vec_representation(elem_ty); let is_float = ty_has_float_or_vec_representation(elem_ty);

View File

@@ -291,17 +291,11 @@ fn lower_insn_to_regs<C: LowerCtx<I = Inst>>(
| Opcode::FminPseudo | Opcode::FminPseudo
| Opcode::FmaxPseudo | Opcode::FmaxPseudo
| Opcode::Uload8x8 | Opcode::Uload8x8
| Opcode::Uload8x8Complex
| Opcode::Sload8x8 | Opcode::Sload8x8
| Opcode::Sload8x8Complex
| Opcode::Uload16x4 | Opcode::Uload16x4
| Opcode::Uload16x4Complex
| Opcode::Sload16x4 | Opcode::Sload16x4
| Opcode::Sload16x4Complex
| Opcode::Uload32x2 | Opcode::Uload32x2
| Opcode::Uload32x2Complex
| Opcode::Sload32x2 | Opcode::Sload32x2
| Opcode::Sload32x2Complex
| Opcode::Vconst | Opcode::Vconst
| Opcode::Shuffle | Opcode::Shuffle
| Opcode::Vsplit | Opcode::Vsplit
@@ -333,20 +327,6 @@ fn lower_insn_to_regs<C: LowerCtx<I = Inst>>(
panic!("Unused opcode should not be encountered."); panic!("Unused opcode should not be encountered.");
} }
Opcode::LoadComplex
| Opcode::Uload8Complex
| Opcode::Sload8Complex
| Opcode::Uload16Complex
| Opcode::Sload16Complex
| Opcode::Uload32Complex
| Opcode::Sload32Complex
| Opcode::StoreComplex
| Opcode::Istore8Complex
| Opcode::Istore16Complex
| Opcode::Istore32Complex => {
panic!("Load/store complex opcode should not be encountered.");
}
Opcode::Ifcmp Opcode::Ifcmp
| Opcode::Ffcmp | Opcode::Ffcmp
| Opcode::Trapff | Opcode::Trapff

View File

@@ -1,4 +1,4 @@
src/clif.isle 9ea75a6f790b5c03 src/clif.isle 443b34b797fc8ace
src/prelude.isle 74d9514ac948e163 src/prelude.isle 74d9514ac948e163
src/isa/s390x/inst.isle d91a16074ab186a8 src/isa/s390x/inst.isle d91a16074ab186a8
src/isa/s390x/lower.isle 1cc5a12adc8c75f9 src/isa/s390x/lower.isle 1cc5a12adc8c75f9

View File

@@ -2173,13 +2173,6 @@ fn lower_insn_to_regs<C: LowerCtx<I = Inst>>(
| Opcode::Sload16 | Opcode::Sload16
| Opcode::Uload32 | Opcode::Uload32
| Opcode::Sload32 | Opcode::Sload32
| Opcode::LoadComplex
| Opcode::Uload8Complex
| Opcode::Sload8Complex
| Opcode::Uload16Complex
| Opcode::Sload16Complex
| Opcode::Uload32Complex
| Opcode::Sload32Complex
| Opcode::Sload8x8 | Opcode::Sload8x8
| Opcode::Uload8x8 | Opcode::Uload8x8
| Opcode::Sload16x4 | Opcode::Sload16x4
@@ -2189,30 +2182,13 @@ fn lower_insn_to_regs<C: LowerCtx<I = Inst>>(
let offset = ctx.data(insn).load_store_offset().unwrap(); let offset = ctx.data(insn).load_store_offset().unwrap();
let elem_ty = match op { let elem_ty = match op {
Opcode::Sload8 | Opcode::Uload8 | Opcode::Sload8Complex | Opcode::Uload8Complex => { Opcode::Sload8 | Opcode::Uload8 => types::I8,
types::I8 Opcode::Sload16 | Opcode::Uload16 => types::I16,
} Opcode::Sload32 | Opcode::Uload32 => types::I32,
Opcode::Sload16 Opcode::Sload8x8 | Opcode::Uload8x8 => types::I8X8,
| Opcode::Uload16 Opcode::Sload16x4 | Opcode::Uload16x4 => types::I16X4,
| Opcode::Sload16Complex Opcode::Sload32x2 | Opcode::Uload32x2 => types::I32X2,
| Opcode::Uload16Complex => types::I16, Opcode::Load => ctx.output_ty(insn, 0),
Opcode::Sload32
| Opcode::Uload32
| Opcode::Sload32Complex
| Opcode::Uload32Complex => types::I32,
Opcode::Sload8x8
| Opcode::Uload8x8
| Opcode::Sload8x8Complex
| Opcode::Uload8x8Complex => types::I8X8,
Opcode::Sload16x4
| Opcode::Uload16x4
| Opcode::Sload16x4Complex
| Opcode::Uload16x4Complex => types::I16X4,
Opcode::Sload32x2
| Opcode::Uload32x2
| Opcode::Sload32x2Complex
| Opcode::Uload32x2Complex => types::I32X2,
Opcode::Load | Opcode::LoadComplex => ctx.output_ty(insn, 0),
_ => unimplemented!(), _ => unimplemented!(),
}; };
@@ -2220,17 +2196,11 @@ fn lower_insn_to_regs<C: LowerCtx<I = Inst>>(
let sign_extend = match op { let sign_extend = match op {
Opcode::Sload8 Opcode::Sload8
| Opcode::Sload8Complex
| Opcode::Sload16 | Opcode::Sload16
| Opcode::Sload16Complex
| Opcode::Sload32 | Opcode::Sload32
| Opcode::Sload32Complex
| Opcode::Sload8x8 | Opcode::Sload8x8
| Opcode::Sload8x8Complex
| Opcode::Sload16x4 | Opcode::Sload16x4
| Opcode::Sload16x4Complex | Opcode::Sload32x2 => true,
| Opcode::Sload32x2
| Opcode::Sload32x2Complex => true,
_ => false, _ => false,
}; };
@@ -2251,37 +2221,6 @@ fn lower_insn_to_regs<C: LowerCtx<I = Inst>>(
assert_eq!(inputs.len(), 1, "only one input for load operands"); assert_eq!(inputs.len(), 1, "only one input for load operands");
lower_to_amode(ctx, inputs[0], offset) lower_to_amode(ctx, inputs[0], offset)
} }
Opcode::LoadComplex
| Opcode::Uload8Complex
| Opcode::Sload8Complex
| Opcode::Uload16Complex
| Opcode::Sload16Complex
| Opcode::Uload32Complex
| Opcode::Sload32Complex
| Opcode::Sload8x8Complex
| Opcode::Uload8x8Complex
| Opcode::Sload16x4Complex
| Opcode::Uload16x4Complex
| Opcode::Sload32x2Complex
| Opcode::Uload32x2Complex => {
assert_eq!(
inputs.len(),
2,
"can't handle more than two inputs in complex load"
);
let base = put_input_in_reg(ctx, inputs[0]);
let index = put_input_in_reg(ctx, inputs[1]);
let shift = 0;
let flags = ctx.memflags(insn).expect("load should have memflags");
Amode::imm_reg_reg_shift(
offset as u32,
Gpr::new(base).unwrap(),
Gpr::new(index).unwrap(),
shift,
)
.with_flags(flags)
}
_ => unreachable!(), _ => unreachable!(),
}; };
@@ -2347,21 +2286,14 @@ fn lower_insn_to_regs<C: LowerCtx<I = Inst>>(
} }
} }
Opcode::Store Opcode::Store | Opcode::Istore8 | Opcode::Istore16 | Opcode::Istore32 => {
| Opcode::Istore8
| Opcode::Istore16
| Opcode::Istore32
| Opcode::StoreComplex
| Opcode::Istore8Complex
| Opcode::Istore16Complex
| Opcode::Istore32Complex => {
let offset = ctx.data(insn).load_store_offset().unwrap(); let offset = ctx.data(insn).load_store_offset().unwrap();
let elem_ty = match op { let elem_ty = match op {
Opcode::Istore8 | Opcode::Istore8Complex => types::I8, Opcode::Istore8 => types::I8,
Opcode::Istore16 | Opcode::Istore16Complex => types::I16, Opcode::Istore16 => types::I16,
Opcode::Istore32 | Opcode::Istore32Complex => types::I32, Opcode::Istore32 => types::I32,
Opcode::Store | Opcode::StoreComplex => ctx.input_ty(insn, 0), Opcode::Store => ctx.input_ty(insn, 0),
_ => unreachable!(), _ => unreachable!(),
}; };
@@ -2370,29 +2302,6 @@ fn lower_insn_to_regs<C: LowerCtx<I = Inst>>(
assert_eq!(inputs.len(), 2, "only one input for store memory operands"); assert_eq!(inputs.len(), 2, "only one input for store memory operands");
lower_to_amode(ctx, inputs[1], offset) lower_to_amode(ctx, inputs[1], offset)
} }
Opcode::StoreComplex
| Opcode::Istore8Complex
| Opcode::Istore16Complex
| Opcode::Istore32Complex => {
assert_eq!(
inputs.len(),
3,
"can't handle more than two inputs in complex store"
);
let base = put_input_in_reg(ctx, inputs[1]);
let index = put_input_in_reg(ctx, inputs[2]);
let shift = 0;
let flags = ctx.memflags(insn).expect("store should have memflags");
Amode::imm_reg_reg_shift(
offset as u32,
Gpr::new(base).unwrap(),
Gpr::new(index).unwrap(),
shift,
)
.with_flags(flags)
}
_ => unreachable!(), _ => unreachable!(),
}; };
@@ -3293,15 +3202,6 @@ fn lower_insn_to_regs<C: LowerCtx<I = Inst>>(
// Unimplemented opcodes below. These are not currently used by Wasm // Unimplemented opcodes below. These are not currently used by Wasm
// lowering or other known embeddings, but should be either supported or // lowering or other known embeddings, but should be either supported or
// removed eventually. // removed eventually.
Opcode::Uload8x8Complex
| Opcode::Sload8x8Complex
| Opcode::Uload16x4Complex
| Opcode::Sload16x4Complex
| Opcode::Uload32x2Complex
| Opcode::Sload32x2Complex => {
unimplemented!("Vector load {:?} not implemented", op);
}
Opcode::Cls => unimplemented!("Cls not supported"), Opcode::Cls => unimplemented!("Cls not supported"),
Opcode::Fma => unimplemented!("Fma not supported"), Opcode::Fma => unimplemented!("Fma not supported"),

View File

@@ -1,4 +1,4 @@
src/clif.isle 9ea75a6f790b5c03 src/clif.isle 443b34b797fc8ace
src/prelude.isle 74d9514ac948e163 src/prelude.isle 74d9514ac948e163
src/isa/x64/inst.isle a002d62dcfce285 src/isa/x64/inst.isle a002d62dcfce285
src/isa/x64/lower.isle 8f3e1ed2929fd07e src/isa/x64/lower.isle 8f3e1ed2929fd07e

View File

@@ -147,9 +147,7 @@ fn trivially_unsafe_for_licm(opcode: Opcode) -> bool {
fn is_unsafe_load(inst_data: &InstructionData) -> bool { fn is_unsafe_load(inst_data: &InstructionData) -> bool {
match *inst_data { match *inst_data {
InstructionData::Load { flags, .. } | InstructionData::LoadComplex { flags, .. } => { InstructionData::Load { flags, .. } => !flags.readonly() || !flags.notrap(),
!flags.readonly() || !flags.notrap()
}
_ => inst_data.opcode().can_load(), _ => inst_data.opcode().can_load(),
} }
} }

View File

@@ -1101,10 +1101,8 @@ impl<'func, I: VCodeInst> LowerCtx for Lower<'func, I> {
&InstructionData::AtomicCas { flags, .. } => Some(flags), &InstructionData::AtomicCas { flags, .. } => Some(flags),
&InstructionData::AtomicRmw { flags, .. } => Some(flags), &InstructionData::AtomicRmw { flags, .. } => Some(flags),
&InstructionData::Load { flags, .. } &InstructionData::Load { flags, .. }
| &InstructionData::LoadComplex { flags, .. }
| &InstructionData::LoadNoOffset { flags, .. } | &InstructionData::LoadNoOffset { flags, .. }
| &InstructionData::Store { flags, .. } | &InstructionData::Store { flags, .. } => Some(flags),
| &InstructionData::StoreComplex { flags, .. } => Some(flags),
&InstructionData::StoreNoOffset { flags, .. } => Some(flags), &InstructionData::StoreNoOffset { flags, .. } => Some(flags),
_ => None, _ => None,
} }

View File

@@ -24,9 +24,7 @@ fn trivially_unsafe_for_gvn(opcode: Opcode) -> bool {
/// Test that, if the specified instruction is a load, it doesn't have the `readonly` memflag. /// Test that, if the specified instruction is a load, it doesn't have the `readonly` memflag.
fn is_load_and_not_readonly(inst_data: &InstructionData) -> bool { fn is_load_and_not_readonly(inst_data: &InstructionData) -> bool {
match *inst_data { match *inst_data {
InstructionData::Load { flags, .. } | InstructionData::LoadComplex { flags, .. } => { InstructionData::Load { flags, .. } => !flags.readonly(),
!flags.readonly()
}
_ => inst_data.opcode().can_load(), _ => inst_data.opcode().can_load(),
} }
} }

View File

@@ -690,13 +690,6 @@ impl<'a> Verifier<'a> {
TableAddr { table, .. } => { TableAddr { table, .. } => {
self.verify_table(inst, table, errors)?; self.verify_table(inst, table, errors)?;
} }
LoadComplex { ref args, .. } => {
self.verify_value_list(inst, args, errors)?;
}
StoreComplex { ref args, .. } => {
self.verify_value_list(inst, args, errors)?;
}
NullAry { NullAry {
opcode: Opcode::GetPinnedReg, opcode: Opcode::GetPinnedReg,
} }
@@ -1627,8 +1620,7 @@ impl<'a> Verifier<'a> {
let inst_data = &self.func.dfg[inst]; let inst_data = &self.func.dfg[inst];
match *inst_data { match *inst_data {
ir::InstructionData::Store { flags, .. } ir::InstructionData::Store { flags, .. } => {
| ir::InstructionData::StoreComplex { flags, .. } => {
if flags.readonly() { if flags.readonly() {
errors.fatal(( errors.fatal((
inst, inst,

View File

@@ -498,43 +498,12 @@ pub fn write_operands(w: &mut dyn Write, dfg: &DataFlowGraph, inst: Inst) -> fmt
Load { Load {
flags, arg, offset, .. flags, arg, offset, ..
} => write!(w, "{} {}{}", flags, arg, offset), } => write!(w, "{} {}{}", flags, arg, offset),
LoadComplex {
flags,
ref args,
offset,
..
} => {
let args = args.as_slice(pool);
write!(
w,
"{} {}{}",
flags,
DisplayValuesWithDelimiter(&args, '+'),
offset
)
}
Store { Store {
flags, flags,
args, args,
offset, offset,
.. ..
} => write!(w, "{} {}, {}{}", flags, args[0], args[1], offset), } => write!(w, "{} {}, {}{}", flags, args[0], args[1], offset),
StoreComplex {
flags,
ref args,
offset,
..
} => {
let args = args.as_slice(pool);
write!(
w,
"{} {}, {}{}",
flags,
args[0],
DisplayValuesWithDelimiter(&args[1..], '+'),
offset
)
}
Trap { code, .. } => write!(w, " {}", code), Trap { code, .. } => write!(w, " {}", code),
CondTrap { arg, code, .. } => write!(w, " {}, {}", arg, code), CondTrap { arg, code, .. } => write!(w, " {}, {}", arg, code),
IntCondTrap { IntCondTrap {

View File

@@ -2,70 +2,6 @@ test compile precise-output
set unwind_info=false set unwind_info=false
target aarch64 target aarch64
function %f0(i64, i32) -> i32 {
block0(v0: i64, v1: i32):
v2 = uextend.i64 v1
v3 = load_complex.i32 v0+v2
return v3
}
; VCode_ShowWithRRU {{
; Entry block: 0
; Block 0:
; (original IR block: block0)
; (instruction range: 0 .. 2)
; Inst 0: ldr w0, [x0, w1, UXTW]
; Inst 1: ret
; }}
function %f2(i64, i32) -> i32 {
block0(v0: i64, v1: i32):
v2 = uextend.i64 v1
v3 = load_complex.i32 v2+v0
return v3
}
; VCode_ShowWithRRU {{
; Entry block: 0
; Block 0:
; (original IR block: block0)
; (instruction range: 0 .. 2)
; Inst 0: ldr w0, [x0, w1, UXTW]
; Inst 1: ret
; }}
function %f3(i64, i32) -> i32 {
block0(v0: i64, v1: i32):
v2 = sextend.i64 v1
v3 = load_complex.i32 v0+v2
return v3
}
; VCode_ShowWithRRU {{
; Entry block: 0
; Block 0:
; (original IR block: block0)
; (instruction range: 0 .. 2)
; Inst 0: ldr w0, [x0, w1, SXTW]
; Inst 1: ret
; }}
function %f4(i64, i32) -> i32 {
block0(v0: i64, v1: i32):
v2 = sextend.i64 v1
v3 = load_complex.i32 v2+v0
return v3
}
; VCode_ShowWithRRU {{
; Entry block: 0
; Block 0:
; (original IR block: block0)
; (instruction range: 0 .. 2)
; Inst 0: ldr w0, [x0, w1, SXTW]
; Inst 1: ret
; }}
function %f5(i64, i32) -> i32 { function %f5(i64, i32) -> i32 {
block0(v0: i64, v1: i32): block0(v0: i64, v1: i32):
v2 = sextend.i64 v1 v2 = sextend.i64 v1
@@ -294,91 +230,6 @@ block0(v0: i32, v1: i32):
; Inst 2: ret ; Inst 2: ret
; }} ; }}
function %f16(i64) -> i32 {
block0(v0: i64):
v1 = iconst.i32 0
v2 = uextend.i64 v1
v3 = load_complex.i32 v0+v2
return v3
}
; VCode_ShowWithRRU {{
; Entry block: 0
; Block 0:
; (original IR block: block0)
; (instruction range: 0 .. 2)
; Inst 0: ldr w0, [x0]
; Inst 1: ret
; }}
function %f17(i64) -> i32 {
block0(v0: i64):
v1 = iconst.i32 4
v2 = uextend.i64 v1
v3 = load_complex.i32 v0+v2
return v3
}
; VCode_ShowWithRRU {{
; Entry block: 0
; Block 0:
; (original IR block: block0)
; (instruction range: 0 .. 2)
; Inst 0: ldur w0, [x0, #4]
; Inst 1: ret
; }}
function %f18(i64, i32) -> i16x8 {
block0(v0: i64, v1: i32):
v2 = uextend.i64 v1
v3 = sload8x8_complex v2+v0
return v3
}
; VCode_ShowWithRRU {{
; Entry block: 0
; Block 0:
; (original IR block: block0)
; (instruction range: 0 .. 3)
; Inst 0: ldr d0, [x0, w1, UXTW]
; Inst 1: sxtl v0.8h, v0.8b
; Inst 2: ret
; }}
function %f19(i64, i64) -> i32x4 {
block0(v0: i64, v1: i64):
v2 = uload16x4_complex v0+v1+8
return v2
}
; VCode_ShowWithRRU {{
; Entry block: 0
; Block 0:
; (original IR block: block0)
; (instruction range: 0 .. 4)
; Inst 0: add x0, x0, x1
; Inst 1: ldr d0, [x0, #8]
; Inst 2: uxtl v0.4s, v0.4h
; Inst 3: ret
; }}
function %f20(i64, i32) -> i64x2 {
block0(v0: i64, v1: i32):
v2 = sextend.i64 v1
v3 = uload32x2_complex v2+v0
return v3
}
; VCode_ShowWithRRU {{
; Entry block: 0
; Block 0:
; (original IR block: block0)
; (instruction range: 0 .. 3)
; Inst 0: ldr d0, [x0, w1, SXTW]
; Inst 1: uxtl v0.2d, v0.2s
; Inst 2: ret
; }}
function %f18(i64, i64, i64) -> i32 { function %f18(i64, i64, i64) -> i32 {
block0(v0: i64, v1: i64, v2: i64): block0(v0: i64, v1: i64, v2: i64):
v3 = iconst.i32 -4098 v3 = iconst.i32 -4098

View File

@@ -152,13 +152,9 @@ block0(v1: i32):
v6 = load.i64 aligned notrap v1 v6 = load.i64 aligned notrap v1
v7 = load.i64 v1-12 v7 = load.i64 v1-12
v8 = load.i64 notrap v1+0x1_0000 v8 = load.i64 notrap v1+0x1_0000
v9 = load_complex.i64 v1+v2
v10 = load_complex.i64 v1+v2+0x1
store v2, v1 store v2, v1
store aligned v3, v1+12 store aligned v3, v1+12
store notrap aligned v3, v1-12 store notrap aligned v3, v1-12
store_complex v3, v1+v2
store_complex v3, v1+v2+0x1
} }
; sameln: function %memory(i32) fast { ; sameln: function %memory(i32) fast {
; nextln: block0(v1: i32): ; nextln: block0(v1: i32):
@@ -169,13 +165,9 @@ block0(v1: i32):
; nextln: v6 = load.i64 notrap aligned v1 ; nextln: v6 = load.i64 notrap aligned v1
; nextln: v7 = load.i64 v1-12 ; nextln: v7 = load.i64 v1-12
; nextln: v8 = load.i64 notrap v1+0x0001_0000 ; nextln: v8 = load.i64 notrap v1+0x0001_0000
; nextln: v9 = load_complex.i64 v1+v2
; nextln: v10 = load_complex.i64 v1+v2+1
; nextln: store v2, v1 ; nextln: store v2, v1
; nextln: store aligned v3, v1+12 ; nextln: store aligned v3, v1+12
; nextln: store notrap aligned v3, v1-12 ; nextln: store notrap aligned v3, v1-12
; nextln: store_complex v3, v1+v2
; nextln: store_complex v3, v1+v2+1
function %cond_traps(i32) { function %cond_traps(i32) {
block0(v0: i32): block0(v0: i32):

View File

@@ -285,64 +285,33 @@ where
Opcode::CallIndirect => unimplemented!("CallIndirect"), Opcode::CallIndirect => unimplemented!("CallIndirect"),
Opcode::FuncAddr => unimplemented!("FuncAddr"), Opcode::FuncAddr => unimplemented!("FuncAddr"),
Opcode::Load Opcode::Load
| Opcode::LoadComplex
| Opcode::Uload8 | Opcode::Uload8
| Opcode::Uload8Complex
| Opcode::Sload8 | Opcode::Sload8
| Opcode::Sload8Complex
| Opcode::Uload16 | Opcode::Uload16
| Opcode::Uload16Complex
| Opcode::Sload16 | Opcode::Sload16
| Opcode::Sload16Complex
| Opcode::Uload32 | Opcode::Uload32
| Opcode::Uload32Complex
| Opcode::Sload32 | Opcode::Sload32
| Opcode::Sload32Complex
| Opcode::Uload8x8 | Opcode::Uload8x8
| Opcode::Uload8x8Complex
| Opcode::Sload8x8 | Opcode::Sload8x8
| Opcode::Sload8x8Complex
| Opcode::Uload16x4 | Opcode::Uload16x4
| Opcode::Uload16x4Complex
| Opcode::Sload16x4 | Opcode::Sload16x4
| Opcode::Sload16x4Complex
| Opcode::Uload32x2 | Opcode::Uload32x2
| Opcode::Uload32x2Complex | Opcode::Sload32x2 => {
| Opcode::Sload32x2
| Opcode::Sload32x2Complex => {
let ctrl_ty = inst_context.controlling_type().unwrap(); let ctrl_ty = inst_context.controlling_type().unwrap();
let (load_ty, kind) = match inst.opcode() { let (load_ty, kind) = match inst.opcode() {
Opcode::Load | Opcode::LoadComplex => (ctrl_ty, None), Opcode::Load => (ctrl_ty, None),
Opcode::Uload8 | Opcode::Uload8Complex => { Opcode::Uload8 => (types::I8, Some(ValueConversionKind::ZeroExtend(ctrl_ty))),
(types::I8, Some(ValueConversionKind::ZeroExtend(ctrl_ty))) Opcode::Sload8 => (types::I8, Some(ValueConversionKind::SignExtend(ctrl_ty))),
} Opcode::Uload16 => (types::I16, Some(ValueConversionKind::ZeroExtend(ctrl_ty))),
Opcode::Sload8 | Opcode::Sload8Complex => { Opcode::Sload16 => (types::I16, Some(ValueConversionKind::SignExtend(ctrl_ty))),
(types::I8, Some(ValueConversionKind::SignExtend(ctrl_ty))) Opcode::Uload32 => (types::I32, Some(ValueConversionKind::ZeroExtend(ctrl_ty))),
} Opcode::Sload32 => (types::I32, Some(ValueConversionKind::SignExtend(ctrl_ty))),
Opcode::Uload16 | Opcode::Uload16Complex => {
(types::I16, Some(ValueConversionKind::ZeroExtend(ctrl_ty)))
}
Opcode::Sload16 | Opcode::Sload16Complex => {
(types::I16, Some(ValueConversionKind::SignExtend(ctrl_ty)))
}
Opcode::Uload32 | Opcode::Uload32Complex => {
(types::I32, Some(ValueConversionKind::ZeroExtend(ctrl_ty)))
}
Opcode::Sload32 | Opcode::Sload32Complex => {
(types::I32, Some(ValueConversionKind::SignExtend(ctrl_ty)))
}
Opcode::Uload8x8 Opcode::Uload8x8
| Opcode::Uload8x8Complex
| Opcode::Sload8x8 | Opcode::Sload8x8
| Opcode::Sload8x8Complex
| Opcode::Uload16x4 | Opcode::Uload16x4
| Opcode::Uload16x4Complex
| Opcode::Sload16x4 | Opcode::Sload16x4
| Opcode::Sload16x4Complex
| Opcode::Uload32x2 | Opcode::Uload32x2
| Opcode::Uload32x2Complex | Opcode::Sload32x2 => unimplemented!(),
| Opcode::Sload32x2
| Opcode::Sload32x2Complex => unimplemented!(),
_ => unreachable!(), _ => unreachable!(),
}; };
@@ -360,25 +329,12 @@ where
(cf, _) => cf, (cf, _) => cf,
} }
} }
Opcode::Store Opcode::Store | Opcode::Istore8 | Opcode::Istore16 | Opcode::Istore32 => {
| Opcode::StoreComplex
| Opcode::Istore8
| Opcode::Istore8Complex
| Opcode::Istore16
| Opcode::Istore16Complex
| Opcode::Istore32
| Opcode::Istore32Complex => {
let kind = match inst.opcode() { let kind = match inst.opcode() {
Opcode::Store | Opcode::StoreComplex => None, Opcode::Store => None,
Opcode::Istore8 | Opcode::Istore8Complex => { Opcode::Istore8 => Some(ValueConversionKind::Truncate(types::I8)),
Some(ValueConversionKind::Truncate(types::I8)) Opcode::Istore16 => Some(ValueConversionKind::Truncate(types::I16)),
} Opcode::Istore32 => Some(ValueConversionKind::Truncate(types::I32)),
Opcode::Istore16 | Opcode::Istore16Complex => {
Some(ValueConversionKind::Truncate(types::I16))
}
Opcode::Istore32 | Opcode::Istore32Complex => {
Some(ValueConversionKind::Truncate(types::I32))
}
_ => unreachable!(), _ => unreachable!(),
}; };

View File

@@ -2246,23 +2246,6 @@ impl<'a> Parser<'a> {
Ok(args) Ok(args)
} }
fn parse_value_sequence(&mut self) -> ParseResult<VariableArgs> {
let mut args = VariableArgs::new();
if let Some(Token::Value(v)) = self.token() {
args.push(v);
self.consume();
} else {
return Ok(args);
}
while self.optional(Token::Plus) {
args.push(self.match_value("expected value in argument list")?);
}
Ok(args)
}
// Parse an optional value list enclosed in parentheses. // Parse an optional value list enclosed in parentheses.
fn parse_opt_value_list(&mut self) -> ParseResult<VariableArgs> { fn parse_opt_value_list(&mut self) -> ParseResult<VariableArgs> {
if !self.optional(Token::LPar) { if !self.optional(Token::LPar) {
@@ -2880,17 +2863,6 @@ impl<'a> Parser<'a> {
offset, offset,
} }
} }
InstructionFormat::LoadComplex => {
let flags = self.optional_memflags();
let args = self.parse_value_sequence()?;
let offset = self.optional_offset32()?;
InstructionData::LoadComplex {
opcode,
flags,
args: args.into_value_list(&[], &mut ctx.function.dfg.value_lists),
offset,
}
}
InstructionFormat::Store => { InstructionFormat::Store => {
let flags = self.optional_memflags(); let flags = self.optional_memflags();
let arg = self.match_value("expected SSA value operand")?; let arg = self.match_value("expected SSA value operand")?;
@@ -2904,20 +2876,6 @@ impl<'a> Parser<'a> {
offset, offset,
} }
} }
InstructionFormat::StoreComplex => {
let flags = self.optional_memflags();
let src = self.match_value("expected SSA value operand")?;
self.match_token(Token::Comma, "expected ',' between operands")?;
let args = self.parse_value_sequence()?;
let offset = self.optional_offset32()?;
InstructionData::StoreComplex {
opcode,
flags,
args: args.into_value_list(&[src], &mut ctx.function.dfg.value_lists),
offset,
}
}
InstructionFormat::Trap => { InstructionFormat::Trap => {
let code = self.match_enum("expected trap code")?; let code = self.match_enum("expected trap code")?;
InstructionData::Trap { opcode, code } InstructionData::Trap { opcode, code }