Following up on the discussion in https://github.com/bytecodealliance/wasmtime/pull/6011 this adds an improved implementation of TrapIf for s390x using a single conditional branch instruction. If the trap conditions is true, we branch into the middle of the branch instruction - those middle two bytes are zero, which matches the encoding of the trap instruction. In addition, show the trap code for Trap and TrapIf instructions in assembler output.
396 lines
7.8 KiB
Plaintext
396 lines
7.8 KiB
Plaintext
test compile precise-output
|
|
target s390x
|
|
|
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
;; CALL
|
|
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
|
|
|
function %call(i64) -> i64 {
|
|
fn0 = %g(i64) -> i64
|
|
|
|
block0(v0: i64):
|
|
v1 = call fn0(v0)
|
|
return v1
|
|
}
|
|
|
|
; VCode:
|
|
; stmg %r14, %r15, 112(%r15)
|
|
; aghi %r15, -160
|
|
; virtual_sp_offset_adjust 160
|
|
; block0:
|
|
; bras %r1, 12 ; data %g + 0 ; lg %r5, 0(%r1)
|
|
; basr %r14, %r5
|
|
; lmg %r14, %r15, 272(%r15)
|
|
; br %r14
|
|
;
|
|
; Disassembled:
|
|
; block0: ; offset 0x0
|
|
; stmg %r14, %r15, 0x70(%r15)
|
|
; aghi %r15, -0xa0
|
|
; block1: ; offset 0xa
|
|
; bras %r1, 0x16
|
|
; .byte 0x00, 0x00 ; reloc_external Abs8 %g 0
|
|
; .byte 0x00, 0x00
|
|
; .byte 0x00, 0x00
|
|
; .byte 0x00, 0x00
|
|
; lg %r5, 0(%r1)
|
|
; basr %r14, %r5
|
|
; lmg %r14, %r15, 0x110(%r15)
|
|
; br %r14
|
|
|
|
function %call_uext(i32) -> i64 {
|
|
fn0 = %g(i32 uext) -> i64
|
|
|
|
block0(v0: i32):
|
|
v1 = call fn0(v0)
|
|
return v1
|
|
}
|
|
|
|
; VCode:
|
|
; stmg %r14, %r15, 112(%r15)
|
|
; aghi %r15, -160
|
|
; virtual_sp_offset_adjust 160
|
|
; block0:
|
|
; llgfr %r2, %r2
|
|
; bras %r1, 12 ; data %g + 0 ; lg %r3, 0(%r1)
|
|
; basr %r14, %r3
|
|
; lmg %r14, %r15, 272(%r15)
|
|
; br %r14
|
|
;
|
|
; Disassembled:
|
|
; block0: ; offset 0x0
|
|
; stmg %r14, %r15, 0x70(%r15)
|
|
; aghi %r15, -0xa0
|
|
; block1: ; offset 0xa
|
|
; llgfr %r2, %r2
|
|
; bras %r1, 0x1a
|
|
; .byte 0x00, 0x00 ; reloc_external Abs8 %g 0
|
|
; .byte 0x00, 0x00
|
|
; .byte 0x00, 0x00
|
|
; .byte 0x00, 0x00
|
|
; lg %r3, 0(%r1)
|
|
; basr %r14, %r3
|
|
; lmg %r14, %r15, 0x110(%r15)
|
|
; br %r14
|
|
|
|
function %ret_uext(i32) -> i32 uext {
|
|
block0(v0: i32):
|
|
return v0
|
|
}
|
|
|
|
; VCode:
|
|
; block0:
|
|
; llgfr %r2, %r2
|
|
; br %r14
|
|
;
|
|
; Disassembled:
|
|
; block0: ; offset 0x0
|
|
; llgfr %r2, %r2
|
|
; br %r14
|
|
|
|
function %call_uext(i32) -> i64 {
|
|
fn0 = %g(i32 sext) -> i64
|
|
|
|
block0(v0: i32):
|
|
v1 = call fn0(v0)
|
|
return v1
|
|
}
|
|
|
|
; VCode:
|
|
; stmg %r14, %r15, 112(%r15)
|
|
; aghi %r15, -160
|
|
; virtual_sp_offset_adjust 160
|
|
; block0:
|
|
; lgfr %r2, %r2
|
|
; bras %r1, 12 ; data %g + 0 ; lg %r3, 0(%r1)
|
|
; basr %r14, %r3
|
|
; lmg %r14, %r15, 272(%r15)
|
|
; br %r14
|
|
;
|
|
; Disassembled:
|
|
; block0: ; offset 0x0
|
|
; stmg %r14, %r15, 0x70(%r15)
|
|
; aghi %r15, -0xa0
|
|
; block1: ; offset 0xa
|
|
; lgfr %r2, %r2
|
|
; bras %r1, 0x1a
|
|
; .byte 0x00, 0x00 ; reloc_external Abs8 %g 0
|
|
; .byte 0x00, 0x00
|
|
; .byte 0x00, 0x00
|
|
; .byte 0x00, 0x00
|
|
; lg %r3, 0(%r1)
|
|
; basr %r14, %r3
|
|
; lmg %r14, %r15, 0x110(%r15)
|
|
; br %r14
|
|
|
|
function %ret_uext(i32) -> i32 sext {
|
|
block0(v0: i32):
|
|
return v0
|
|
}
|
|
|
|
; VCode:
|
|
; block0:
|
|
; lgfr %r2, %r2
|
|
; br %r14
|
|
;
|
|
; Disassembled:
|
|
; block0: ; offset 0x0
|
|
; lgfr %r2, %r2
|
|
; br %r14
|
|
|
|
function %call_colocated(i64) -> i64 {
|
|
fn0 = colocated %g(i64) -> i64
|
|
|
|
block0(v0: i64):
|
|
v1 = call fn0(v0)
|
|
return v1
|
|
}
|
|
|
|
; VCode:
|
|
; stmg %r14, %r15, 112(%r15)
|
|
; aghi %r15, -160
|
|
; virtual_sp_offset_adjust 160
|
|
; block0:
|
|
; brasl %r14, %g
|
|
; lmg %r14, %r15, 272(%r15)
|
|
; br %r14
|
|
;
|
|
; Disassembled:
|
|
; block0: ; offset 0x0
|
|
; stmg %r14, %r15, 0x70(%r15)
|
|
; aghi %r15, -0xa0
|
|
; block1: ; offset 0xa
|
|
; brasl %r14, 0xa ; reloc_external PLTRel32Dbl %g 2
|
|
; lmg %r14, %r15, 0x110(%r15)
|
|
; br %r14
|
|
|
|
function %f2(i32) -> i64 {
|
|
fn0 = %g(i32 uext) -> i64
|
|
|
|
block0(v0: i32):
|
|
v1 = call fn0(v0)
|
|
return v1
|
|
}
|
|
|
|
; VCode:
|
|
; stmg %r14, %r15, 112(%r15)
|
|
; aghi %r15, -160
|
|
; virtual_sp_offset_adjust 160
|
|
; block0:
|
|
; llgfr %r2, %r2
|
|
; bras %r1, 12 ; data %g + 0 ; lg %r3, 0(%r1)
|
|
; basr %r14, %r3
|
|
; lmg %r14, %r15, 272(%r15)
|
|
; br %r14
|
|
;
|
|
; Disassembled:
|
|
; block0: ; offset 0x0
|
|
; stmg %r14, %r15, 0x70(%r15)
|
|
; aghi %r15, -0xa0
|
|
; block1: ; offset 0xa
|
|
; llgfr %r2, %r2
|
|
; bras %r1, 0x1a
|
|
; .byte 0x00, 0x00 ; reloc_external Abs8 %g 0
|
|
; .byte 0x00, 0x00
|
|
; .byte 0x00, 0x00
|
|
; .byte 0x00, 0x00
|
|
; lg %r3, 0(%r1)
|
|
; basr %r14, %r3
|
|
; lmg %r14, %r15, 0x110(%r15)
|
|
; br %r14
|
|
|
|
function %call_indirect(i64, i64) -> i64 {
|
|
sig0 = (i64) -> i64
|
|
block0(v0: i64, v1: i64):
|
|
v2 = call_indirect.i64 sig0, v1(v0)
|
|
return v2
|
|
}
|
|
|
|
; VCode:
|
|
; stmg %r14, %r15, 112(%r15)
|
|
; aghi %r15, -160
|
|
; virtual_sp_offset_adjust 160
|
|
; block0:
|
|
; basr %r14, %r3
|
|
; lmg %r14, %r15, 272(%r15)
|
|
; br %r14
|
|
;
|
|
; Disassembled:
|
|
; block0: ; offset 0x0
|
|
; stmg %r14, %r15, 0x70(%r15)
|
|
; aghi %r15, -0xa0
|
|
; block1: ; offset 0xa
|
|
; basr %r14, %r3
|
|
; lmg %r14, %r15, 0x110(%r15)
|
|
; br %r14
|
|
|
|
function %incoming_args(i64, i32, i32 uext, i32 sext, i16, i16 uext, i16 sext, i8, i8 uext, i8 sext) -> i64 {
|
|
block0(v0: i64, v1: i32, v2: i32, v3: i32, v4: i16, v5: i16, v6: i16, v7: i8, v8: i8, v9: i8):
|
|
v10 = uextend.i64 v1
|
|
v11 = uextend.i64 v2
|
|
v12 = uextend.i64 v3
|
|
v13 = uextend.i64 v4
|
|
v14 = uextend.i64 v5
|
|
v15 = uextend.i64 v6
|
|
v16 = uextend.i64 v7
|
|
v17 = uextend.i64 v8
|
|
v18 = uextend.i64 v9
|
|
v19 = iadd v0, v10
|
|
v20 = iadd v11, v12
|
|
v21 = iadd v13, v14
|
|
v22 = iadd v15, v16
|
|
v23 = iadd v17, v18
|
|
v24 = iadd v19, v20
|
|
v25 = iadd v21, v22
|
|
v26 = iadd v23, v24
|
|
v27 = iadd v25, v26
|
|
return v27
|
|
}
|
|
|
|
; VCode:
|
|
; stmg %r6, %r15, 48(%r15)
|
|
; block0:
|
|
; lg %r12, 160(%r15)
|
|
; lg %r14, 168(%r15)
|
|
; llgc %r7, 183(%r15)
|
|
; lg %r9, 184(%r15)
|
|
; lg %r11, 192(%r15)
|
|
; llgfr %r3, %r3
|
|
; llgfr %r4, %r4
|
|
; llgfr %r13, %r5
|
|
; llghr %r6, %r6
|
|
; llghr %r5, %r12
|
|
; llghr %r12, %r14
|
|
; llgcr %r14, %r7
|
|
; llgcr %r7, %r9
|
|
; llgcr %r8, %r11
|
|
; agrk %r3, %r2, %r3
|
|
; agr %r4, %r13
|
|
; agrk %r5, %r6, %r5
|
|
; agrk %r2, %r12, %r14
|
|
; agrk %r12, %r7, %r8
|
|
; agr %r3, %r4
|
|
; agrk %r4, %r5, %r2
|
|
; agrk %r3, %r12, %r3
|
|
; agrk %r2, %r4, %r3
|
|
; lmg %r6, %r15, 48(%r15)
|
|
; br %r14
|
|
;
|
|
; Disassembled:
|
|
; block0: ; offset 0x0
|
|
; stmg %r6, %r15, 0x30(%r15)
|
|
; block1: ; offset 0x6
|
|
; lg %r12, 0xa0(%r15)
|
|
; lg %r14, 0xa8(%r15)
|
|
; llgc %r7, 0xb7(%r15)
|
|
; lg %r9, 0xb8(%r15)
|
|
; lg %r11, 0xc0(%r15)
|
|
; llgfr %r3, %r3
|
|
; llgfr %r4, %r4
|
|
; llgfr %r13, %r5
|
|
; llghr %r6, %r6
|
|
; llghr %r5, %r12
|
|
; llghr %r12, %r14
|
|
; llgcr %r14, %r7
|
|
; llgcr %r7, %r9
|
|
; llgcr %r8, %r11
|
|
; agrk %r3, %r2, %r3
|
|
; agr %r4, %r13
|
|
; agrk %r5, %r6, %r5
|
|
; agrk %r2, %r12, %r14
|
|
; agrk %r12, %r7, %r8
|
|
; agr %r3, %r4
|
|
; agrk %r4, %r5, %r2
|
|
; agrk %r3, %r12, %r3
|
|
; agrk %r2, %r4, %r3
|
|
; lmg %r6, %r15, 0x30(%r15)
|
|
; br %r14
|
|
|
|
function %incoming_args_i128(i128, i128, i128, i128, i128, i128, i128, i128) -> i128 {
|
|
block0(v0: i128, v1: i128, v2: i128, v3: i128, v4: i128, v5: i128, v6: i128, v7: i128):
|
|
v8 = iadd v0, v1
|
|
v9 = iadd v2, v3
|
|
v10 = iadd v4, v5
|
|
v11 = iadd v6, v7
|
|
v12 = iadd v8, v9
|
|
v13 = iadd v10, v11
|
|
v14 = iadd v12, v13
|
|
return v14
|
|
}
|
|
|
|
; VCode:
|
|
; block0:
|
|
; vl %v1, 0(%r3)
|
|
; vl %v3, 0(%r4)
|
|
; vl %v5, 0(%r5)
|
|
; vl %v7, 0(%r6)
|
|
; lg %r3, 160(%r15)
|
|
; vl %v18, 0(%r3)
|
|
; lg %r3, 168(%r15)
|
|
; vl %v21, 0(%r3)
|
|
; lg %r5, 176(%r15)
|
|
; vl %v24, 0(%r5)
|
|
; lg %r4, 184(%r15)
|
|
; vl %v27, 0(%r4)
|
|
; vaq %v16, %v1, %v3
|
|
; vaq %v17, %v5, %v7
|
|
; vaq %v18, %v18, %v21
|
|
; vaq %v19, %v24, %v27
|
|
; vaq %v16, %v16, %v17
|
|
; vaq %v17, %v18, %v19
|
|
; vaq %v16, %v16, %v17
|
|
; vst %v16, 0(%r2)
|
|
; br %r14
|
|
;
|
|
; Disassembled:
|
|
; block0: ; offset 0x0
|
|
; vl %v1, 0(%r3)
|
|
; vl %v3, 0(%r4)
|
|
; vl %v5, 0(%r5)
|
|
; vl %v7, 0(%r6)
|
|
; lg %r3, 0xa0(%r15)
|
|
; vl %v18, 0(%r3)
|
|
; lg %r3, 0xa8(%r15)
|
|
; vl %v21, 0(%r3)
|
|
; lg %r5, 0xb0(%r15)
|
|
; vl %v24, 0(%r5)
|
|
; lg %r4, 0xb8(%r15)
|
|
; vl %v27, 0(%r4)
|
|
; vaq %v16, %v1, %v3
|
|
; vaq %v17, %v5, %v7
|
|
; vaq %v18, %v18, %v21
|
|
; vaq %v19, %v24, %v27
|
|
; vaq %v16, %v16, %v17
|
|
; vaq %v17, %v18, %v19
|
|
; vaq %v16, %v16, %v17
|
|
; vst %v16, 0(%r2)
|
|
; br %r14
|
|
|
|
function %call_sret() -> i64 {
|
|
fn0 = colocated %g(i64 sret)
|
|
|
|
block0:
|
|
v1 = iconst.i64 0
|
|
call fn0(v1)
|
|
trap user0
|
|
}
|
|
|
|
; VCode:
|
|
; stmg %r14, %r15, 112(%r15)
|
|
; aghi %r15, -160
|
|
; virtual_sp_offset_adjust 160
|
|
; block0:
|
|
; lghi %r2, 0
|
|
; brasl %r14, %g
|
|
; .word 0x0000 # trap=user0
|
|
;
|
|
; Disassembled:
|
|
; block0: ; offset 0x0
|
|
; stmg %r14, %r15, 0x70(%r15)
|
|
; aghi %r15, -0xa0
|
|
; block1: ; offset 0xa
|
|
; lghi %r2, 0
|
|
; brasl %r14, 0xe ; reloc_external PLTRel32Dbl %g 2
|
|
; .byte 0x00, 0x00 ; trap: user0
|
|
|