This PR switches the default backend on x86, for both the `cranelift-codegen` crate and for Wasmtime, to the new (`MachInst`-style, `VCode`-based) backend that has been under development and testing for some time now. The old backend is still available by default in builds with the `old-x86-backend` feature, or by requesting `BackendVariant::Legacy` from the appropriate APIs. As part of that switch, it adds some more runtime-configurable plumbing to the testing infrastructure so that tests can be run using the appropriate backend. `clif-util test` is now capable of parsing a backend selector option from filetests and instantiating the correct backend. CI has been updated so that the old x86 backend continues to run its tests, just as we used to run the new x64 backend separately. At some point, we will remove the old x86 backend entirely, once we are satisfied that the new backend has not caused any unforeseen issues and we do not need to revert.
207 lines
5.9 KiB
Plaintext
207 lines
5.9 KiB
Plaintext
test legalizer
|
|
target x86_64 legacy
|
|
|
|
function %reverse_bits_8(i8) -> i8 {
|
|
block0(v0: i8):
|
|
v1 = bitrev.i8 v0
|
|
return v1
|
|
}
|
|
; check: v16 = uextend.i32 v0
|
|
; check: v17 = band_imm v16, 170
|
|
; check: v2 = ireduce.i8 v17
|
|
; check: v18 = uextend.i32 v2
|
|
; check: v19 = ushr_imm v18, 1
|
|
; check: v3 = ireduce.i8 v19
|
|
; check: v20 = uextend.i32 v0
|
|
; check: v21 = band_imm v20, 85
|
|
; check: v4 = ireduce.i8 v21
|
|
; check: v22 = uextend.i32 v4
|
|
; check: v23 = ishl_imm v22, 1
|
|
; check: v5 = ireduce.i8 v23
|
|
; check: v24 = uextend.i32 v3
|
|
; check: v25 = uextend.i32 v5
|
|
; check: v26 = bor v24, v25
|
|
; check: v6 = ireduce.i8 v26
|
|
; check: v27 = uextend.i32 v6
|
|
; check: v28 = band_imm v27, 204
|
|
; check: v7 = ireduce.i8 v28
|
|
; check: v29 = uextend.i32 v7
|
|
; check: v30 = ushr_imm v29, 2
|
|
; check: v8 = ireduce.i8 v30
|
|
; check: v31 = uextend.i32 v6
|
|
; check: v32 = band_imm v31, 51
|
|
; check: v9 = ireduce.i8 v32
|
|
; check: v33 = uextend.i32 v9
|
|
; check: v34 = ishl_imm v33, 2
|
|
; check: v10 = ireduce.i8 v34
|
|
; check: v35 = uextend.i32 v8
|
|
; check: v36 = uextend.i32 v10
|
|
; check: v37 = bor v35, v36
|
|
; check: v11 = ireduce.i8 v37
|
|
; check: v38 = uextend.i32 v11
|
|
; check: v39 = band_imm v38, 240
|
|
; check: v12 = ireduce.i8 v39
|
|
; check: v40 = uextend.i32 v12
|
|
; check: v41 = ushr_imm v40, 4
|
|
; check: v13 = ireduce.i8 v41
|
|
; check: v42 = uextend.i32 v11
|
|
; check: v43 = band_imm v42, 15
|
|
; check: v14 = ireduce.i8 v43
|
|
; check: v44 = uextend.i32 v14
|
|
; check: v45 = ishl_imm v44, 4
|
|
; check: v15 = ireduce.i8 v45
|
|
; check: v46 = uextend.i32 v13
|
|
; check: v47 = uextend.i32 v15
|
|
; check: v48 = bor v46, v47
|
|
; check: v1 = ireduce.i8 v48
|
|
; check: return v1
|
|
|
|
function %reverse_bits_16(i16) -> i16 {
|
|
block0(v0: i16):
|
|
v1 = bitrev.i16 v0
|
|
return v1
|
|
}
|
|
; check: v21 = uextend.i32 v0
|
|
; check: v22 = band_imm v21, 0xaaaa
|
|
; check: v2 = ireduce.i16 v22
|
|
; check: v23 = uextend.i32 v2
|
|
; check: v24 = ushr_imm v23, 1
|
|
; check: v3 = ireduce.i16 v24
|
|
; check: v25 = uextend.i32 v0
|
|
; check: v26 = band_imm v25, 0x5555
|
|
; check: v4 = ireduce.i16 v26
|
|
; check: v27 = uextend.i32 v4
|
|
; check: v28 = ishl_imm v27, 1
|
|
; check: v5 = ireduce.i16 v28
|
|
; check: v29 = uextend.i32 v3
|
|
; check: v30 = uextend.i32 v5
|
|
; check: v31 = bor v29, v30
|
|
; check: v6 = ireduce.i16 v31
|
|
; check: v32 = uextend.i32 v6
|
|
; check: v33 = band_imm v32, 0xcccc
|
|
; check: v7 = ireduce.i16 v33
|
|
; check: v34 = uextend.i32 v7
|
|
; check: v35 = ushr_imm v34, 2
|
|
; check: v8 = ireduce.i16 v35
|
|
; check: v36 = uextend.i32 v6
|
|
; check: v37 = band_imm v36, 0x3333
|
|
; check: v9 = ireduce.i16 v37
|
|
; check: v38 = uextend.i32 v9
|
|
; check: v39 = ishl_imm v38, 2
|
|
; check: v10 = ireduce.i16 v39
|
|
; check: v40 = uextend.i32 v8
|
|
; check: v41 = uextend.i32 v10
|
|
; check: v42 = bor v40, v41
|
|
; check: v11 = ireduce.i16 v42
|
|
; check: v43 = uextend.i32 v11
|
|
; check: v44 = band_imm v43, 0xf0f0
|
|
; check: v12 = ireduce.i16 v44
|
|
; check: v45 = uextend.i32 v12
|
|
; check: v46 = ushr_imm v45, 4
|
|
; check: v13 = ireduce.i16 v46
|
|
; check: v47 = uextend.i32 v11
|
|
; check: v48 = band_imm v47, 3855
|
|
; check: v14 = ireduce.i16 v48
|
|
; check: v49 = uextend.i32 v14
|
|
; check: v50 = ishl_imm v49, 4
|
|
; check: v15 = ireduce.i16 v50
|
|
; check: v51 = uextend.i32 v13
|
|
; check: v52 = uextend.i32 v15
|
|
; check: v53 = bor v51, v52
|
|
; check: v16 = ireduce.i16 v53
|
|
; check: v54 = uextend.i32 v16
|
|
; check: v55 = band_imm v54, 0xff00
|
|
; check: v17 = ireduce.i16 v55
|
|
; check: v56 = uextend.i32 v17
|
|
; check: v57 = ushr_imm v56, 8
|
|
; check: v18 = ireduce.i16 v57
|
|
; check: v58 = uextend.i32 v16
|
|
; check: v59 = band_imm v58, 255
|
|
; check: v19 = ireduce.i16 v59
|
|
; check: v60 = uextend.i32 v19
|
|
; check: v61 = ishl_imm v60, 8
|
|
; check: v20 = ireduce.i16 v61
|
|
; check: v62 = uextend.i32 v18
|
|
; check: v63 = uextend.i32 v20
|
|
; check: v64 = bor v62, v63
|
|
; check: v1 = ireduce.i16 v64
|
|
; check: return v1
|
|
|
|
function %reverse_bits_32(i32) -> i32 {
|
|
block0(v0: i32):
|
|
v1 = bitrev.i32 v0
|
|
return v1
|
|
}
|
|
; check: v24 = iconst.i32 0xaaaa_aaaa
|
|
; check: v2 = band v0, v24
|
|
; check: v3 = ushr_imm v2, 1
|
|
; check: v4 = band_imm v0, 0x5555_5555
|
|
; check: v5 = ishl_imm v4, 1
|
|
; check: v6 = bor v3, v5
|
|
; check: v25 = iconst.i32 0xcccc_cccc
|
|
; check: v7 = band v6, v25
|
|
; check: v8 = ushr_imm v7, 2
|
|
; check: v9 = band_imm v6, 0x3333_3333
|
|
; check: v10 = ishl_imm v9, 2
|
|
; check: v11 = bor v8, v10
|
|
; check: v26 = iconst.i32 0xf0f0_f0f0
|
|
; check: v12 = band v11, v26
|
|
; check: v13 = ushr_imm v12, 4
|
|
; check: v14 = band_imm v11, 0x0f0f_0f0f
|
|
; check: v15 = ishl_imm v14, 4
|
|
; check: v16 = bor v13, v15
|
|
; check: v27 = iconst.i32 0xff00_ff00
|
|
; check: v17 = band v16, v27
|
|
; check: v18 = ushr_imm v17, 8
|
|
; check: v19 = band_imm v16, 0x00ff_00ff
|
|
; check: v20 = ishl_imm v19, 8
|
|
; check: v21 = bor v18, v20
|
|
; check: v22 = ushr_imm v21, 16
|
|
; check: v23 = ishl_imm v21, 16
|
|
; check: v1 = bor v22, v23
|
|
|
|
|
|
function %reverse_bits_64(i64) -> i64 {
|
|
block0(v0: i64):
|
|
v1 = bitrev.i64 v0
|
|
return v1
|
|
}
|
|
; check: v29 = iconst.i64 0xaaaa_aaaa_aaaa_aaaa
|
|
; check: v2 = band v0, v29
|
|
; check: v3 = ushr_imm v2, 1
|
|
; check: v30 = iconst.i64 0x5555_5555_5555_5555
|
|
; check: v4 = band v0, v30
|
|
; check: v5 = ishl_imm v4, 1
|
|
; check: v6 = bor v3, v5
|
|
; check: v31 = iconst.i64 0xcccc_cccc_cccc_cccc
|
|
; check: v7 = band v6, v31
|
|
; check: v8 = ushr_imm v7, 2
|
|
; check: v32 = iconst.i64 0x3333_3333_3333_3333
|
|
; check: v9 = band v6, v32
|
|
; check: v10 = ishl_imm v9, 2
|
|
; check: v11 = bor v8, v10
|
|
; check: v33 = iconst.i64 0xf0f0_f0f0_f0f0_f0f0
|
|
; check: v12 = band v11, v33
|
|
; check: v13 = ushr_imm v12, 4
|
|
; check: v34 = iconst.i64 0x0f0f_0f0f_0f0f_0f0f
|
|
; check: v14 = band v11, v34
|
|
; check: v15 = ishl_imm v14, 4
|
|
; check: v16 = bor v13, v15
|
|
; check: v35 = iconst.i64 0xff00_ff00_ff00_ff00
|
|
; check: v17 = band v16, v35
|
|
; check: v18 = ushr_imm v17, 8
|
|
; check: v36 = iconst.i64 0x00ff_00ff_00ff_00ff
|
|
; check: v19 = band v16, v36
|
|
; check: v20 = ishl_imm v19, 8
|
|
; check: v21 = bor v18, v20
|
|
; check: v37 = iconst.i64 0xffff_0000_ffff_0000
|
|
; check: v22 = band v21, v37
|
|
; check: v23 = ushr_imm v22, 16
|
|
; check: v38 = iconst.i64 0xffff_0000_ffff
|
|
; check: v24 = band v21, v38
|
|
; check: v25 = ishl_imm v24, 16
|
|
; check: v26 = bor v23, v25
|
|
; check: v27 = ushr_imm v26, 32
|
|
; check: v28 = ishl_imm v26, 32
|
|
; check: v1 = bor v27, v28
|