
When either one of the operands is all ones in high or low parts, splitting these opens up other opportunities for combines. One of two new instructions will either be removed or become a simple copy.
2168 lines
96 KiB
LLVM
2168 lines
96 KiB
LLVM
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 4
|
|
; RUN: llc -global-isel=0 -mtriple=amdgcn-amd-amdhsa -mcpu=gfx900 < %s | FileCheck -check-prefixes=GCN,SDAG %s
|
|
; RUN: llc -global-isel=1 -mtriple=amdgcn-amd-amdhsa -mcpu=gfx900 < %s | FileCheck -check-prefixes=GCN,GISEL %s
|
|
|
|
define i128 @fptosi_f64_to_i128(double %x) {
|
|
; SDAG-LABEL: fptosi_f64_to_i128:
|
|
; SDAG: ; %bb.0: ; %fp-to-i-entry
|
|
; SDAG-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
|
|
; SDAG-NEXT: v_mov_b32_e32 v5, v1
|
|
; SDAG-NEXT: v_bfe_u32 v6, v5, 20, 11
|
|
; SDAG-NEXT: v_mov_b32_e32 v7, 0
|
|
; SDAG-NEXT: s_mov_b64 s[4:5], 0x3fe
|
|
; SDAG-NEXT: v_mov_b32_e32 v4, v0
|
|
; SDAG-NEXT: v_cmp_lt_u64_e32 vcc, s[4:5], v[6:7]
|
|
; SDAG-NEXT: v_mov_b32_e32 v0, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v2, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v1, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v3, 0
|
|
; SDAG-NEXT: s_and_saveexec_b64 s[8:9], vcc
|
|
; SDAG-NEXT: s_cbranch_execz .LBB0_10
|
|
; SDAG-NEXT: ; %bb.1: ; %fp-to-i-if-end
|
|
; SDAG-NEXT: v_add_co_u32_e32 v0, vcc, 0xfffffb81, v6
|
|
; SDAG-NEXT: v_addc_co_u32_e32 v1, vcc, -1, v7, vcc
|
|
; SDAG-NEXT: v_addc_co_u32_e32 v2, vcc, -1, v7, vcc
|
|
; SDAG-NEXT: s_movk_i32 s6, 0xff7f
|
|
; SDAG-NEXT: v_addc_co_u32_e32 v3, vcc, -1, v7, vcc
|
|
; SDAG-NEXT: s_mov_b32 s7, -1
|
|
; SDAG-NEXT: v_cmp_eq_u64_e64 s[4:5], -1, v[2:3]
|
|
; SDAG-NEXT: v_cmp_lt_u64_e64 s[6:7], s[6:7], v[0:1]
|
|
; SDAG-NEXT: v_cmp_lt_i64_e32 vcc, -1, v[4:5]
|
|
; SDAG-NEXT: s_and_b64 s[4:5], s[4:5], s[6:7]
|
|
; SDAG-NEXT: ; implicit-def: $vgpr0_vgpr1
|
|
; SDAG-NEXT: ; implicit-def: $vgpr2_vgpr3
|
|
; SDAG-NEXT: s_and_saveexec_b64 s[6:7], s[4:5]
|
|
; SDAG-NEXT: s_xor_b64 s[10:11], exec, s[6:7]
|
|
; SDAG-NEXT: s_cbranch_execz .LBB0_7
|
|
; SDAG-NEXT: ; %bb.2: ; %fp-to-i-if-end9
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v0, 0, 1, vcc
|
|
; SDAG-NEXT: v_add_co_u32_e64 v9, s[4:5], -1, v0
|
|
; SDAG-NEXT: s_mov_b64 s[4:5], 0x432
|
|
; SDAG-NEXT: v_and_b32_e32 v0, 0xfffff, v5
|
|
; SDAG-NEXT: v_cmp_lt_u64_e64 s[4:5], s[4:5], v[6:7]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v8, -1, 0, vcc
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v10, -1, 1, vcc
|
|
; SDAG-NEXT: v_or_b32_e32 v5, 0x100000, v0
|
|
; SDAG-NEXT: ; implicit-def: $vgpr0_vgpr1
|
|
; SDAG-NEXT: ; implicit-def: $vgpr2_vgpr3
|
|
; SDAG-NEXT: s_and_saveexec_b64 s[6:7], s[4:5]
|
|
; SDAG-NEXT: s_xor_b64 s[12:13], exec, s[6:7]
|
|
; SDAG-NEXT: s_cbranch_execz .LBB0_4
|
|
; SDAG-NEXT: ; %bb.3: ; %fp-to-i-if-else
|
|
; SDAG-NEXT: v_sub_u32_e32 v0, 0x473, v6
|
|
; SDAG-NEXT: v_add_u32_e32 v2, 0xfffffb8d, v6
|
|
; SDAG-NEXT: v_add_u32_e32 v7, 0xfffffbcd, v6
|
|
; SDAG-NEXT: v_lshrrev_b64 v[0:1], v0, v[4:5]
|
|
; SDAG-NEXT: v_lshlrev_b64 v[2:3], v2, v[4:5]
|
|
; SDAG-NEXT: v_cmp_gt_u32_e64 s[4:5], 64, v7
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v1, v3, v1, s[4:5]
|
|
; SDAG-NEXT: v_cmp_ne_u32_e64 s[6:7], 0, v7
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v6, 0, v1, s[6:7]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v2, v2, v0, s[4:5]
|
|
; SDAG-NEXT: v_lshlrev_b64 v[0:1], v7, v[4:5]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v2, 0, v2, s[6:7]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v12, 0, v0, s[4:5]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v11, 0, v1, s[4:5]
|
|
; SDAG-NEXT: v_mad_u64_u32 v[0:1], s[4:5], v12, v10, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v3, 0
|
|
; SDAG-NEXT: v_mul_lo_u32 v13, v8, v2
|
|
; SDAG-NEXT: v_mad_u64_u32 v[4:5], s[4:5], v10, v2, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v2, v1
|
|
; SDAG-NEXT: v_mul_lo_u32 v14, v10, v6
|
|
; SDAG-NEXT: v_mad_u64_u32 v[6:7], s[4:5], v11, v10, v[2:3]
|
|
; SDAG-NEXT: ; implicit-def: $vgpr10
|
|
; SDAG-NEXT: v_add3_u32 v5, v5, v14, v13
|
|
; SDAG-NEXT: v_mov_b32_e32 v2, v6
|
|
; SDAG-NEXT: v_mad_u64_u32 v[1:2], s[4:5], v12, v8, v[2:3]
|
|
; SDAG-NEXT: v_mad_u64_u32 v[4:5], s[4:5], v9, v12, v[4:5]
|
|
; SDAG-NEXT: v_add_co_u32_e64 v2, s[4:5], v7, v2
|
|
; SDAG-NEXT: v_addc_co_u32_e64 v3, s[4:5], 0, 0, s[4:5]
|
|
; SDAG-NEXT: v_mul_lo_u32 v6, v9, v11
|
|
; SDAG-NEXT: v_mul_lo_u32 v9, v9, v12
|
|
; SDAG-NEXT: v_mad_u64_u32 v[2:3], s[4:5], v11, v8, v[2:3]
|
|
; SDAG-NEXT: ; implicit-def: $vgpr8
|
|
; SDAG-NEXT: v_add3_u32 v5, v9, v5, v6
|
|
; SDAG-NEXT: v_add_co_u32_e64 v2, s[4:5], v2, v4
|
|
; SDAG-NEXT: v_addc_co_u32_e64 v3, s[4:5], v3, v5, s[4:5]
|
|
; SDAG-NEXT: ; implicit-def: $vgpr6_vgpr7
|
|
; SDAG-NEXT: ; implicit-def: $vgpr4_vgpr5
|
|
; SDAG-NEXT: ; implicit-def: $vgpr9
|
|
; SDAG-NEXT: .LBB0_4: ; %Flow
|
|
; SDAG-NEXT: s_andn2_saveexec_b64 s[12:13], s[12:13]
|
|
; SDAG-NEXT: s_cbranch_execz .LBB0_6
|
|
; SDAG-NEXT: ; %bb.5: ; %fp-to-i-if-then12
|
|
; SDAG-NEXT: v_sub_u32_e32 v2, 0x433, v6
|
|
; SDAG-NEXT: v_lshrrev_b64 v[0:1], v2, v[4:5]
|
|
; SDAG-NEXT: v_cmp_gt_u32_e64 s[4:5], 64, v2
|
|
; SDAG-NEXT: v_cmp_eq_u32_e64 s[6:7], 0, v2
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v0, 0, v0, s[4:5]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v1, 0, v1, s[4:5]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v6, v0, v4, s[6:7]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v5, v1, v5, s[6:7]
|
|
; SDAG-NEXT: v_mad_u64_u32 v[0:1], s[4:5], v6, v10, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v2, 0
|
|
; SDAG-NEXT: v_mad_u64_u32 v[3:4], s[4:5], v5, v10, v[1:2]
|
|
; SDAG-NEXT: v_mov_b32_e32 v1, v3
|
|
; SDAG-NEXT: v_mad_u64_u32 v[1:2], s[4:5], v6, v8, v[1:2]
|
|
; SDAG-NEXT: v_add_co_u32_e64 v2, s[4:5], v4, v2
|
|
; SDAG-NEXT: v_addc_co_u32_e64 v3, s[4:5], 0, 0, s[4:5]
|
|
; SDAG-NEXT: v_mad_u64_u32 v[2:3], s[4:5], v5, v8, v[2:3]
|
|
; SDAG-NEXT: v_mad_u64_u32 v[2:3], s[4:5], v9, v6, v[2:3]
|
|
; SDAG-NEXT: v_mad_u64_u32 v[3:4], s[4:5], v9, v6, v[3:4]
|
|
; SDAG-NEXT: v_mad_i32_i24 v3, v9, v5, v3
|
|
; SDAG-NEXT: .LBB0_6: ; %Flow1
|
|
; SDAG-NEXT: s_or_b64 exec, exec, s[12:13]
|
|
; SDAG-NEXT: .LBB0_7: ; %Flow2
|
|
; SDAG-NEXT: s_andn2_saveexec_b64 s[4:5], s[10:11]
|
|
; SDAG-NEXT: ; %bb.8: ; %fp-to-i-if-then5
|
|
; SDAG-NEXT: v_bfrev_b32_e32 v0, 1
|
|
; SDAG-NEXT: v_bfrev_b32_e32 v1, -2
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v2, 0, -1, vcc
|
|
; SDAG-NEXT: v_cndmask_b32_e32 v3, v0, v1, vcc
|
|
; SDAG-NEXT: v_mov_b32_e32 v0, v2
|
|
; SDAG-NEXT: v_mov_b32_e32 v1, v2
|
|
; SDAG-NEXT: ; %bb.9: ; %Flow3
|
|
; SDAG-NEXT: s_or_b64 exec, exec, s[4:5]
|
|
; SDAG-NEXT: .LBB0_10: ; %fp-to-i-cleanup
|
|
; SDAG-NEXT: s_or_b64 exec, exec, s[8:9]
|
|
; SDAG-NEXT: s_setpc_b64 s[30:31]
|
|
;
|
|
; GISEL-LABEL: fptosi_f64_to_i128:
|
|
; GISEL: ; %bb.0: ; %fp-to-i-entry
|
|
; GISEL-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
|
|
; GISEL-NEXT: v_mov_b32_e32 v5, v1
|
|
; GISEL-NEXT: v_mov_b32_e32 v4, v0
|
|
; GISEL-NEXT: v_lshrrev_b32_e32 v2, 20, v5
|
|
; GISEL-NEXT: v_mov_b32_e32 v0, 0x3ff
|
|
; GISEL-NEXT: s_mov_b64 s[4:5], 0
|
|
; GISEL-NEXT: v_mov_b32_e32 v7, 0
|
|
; GISEL-NEXT: v_mov_b32_e32 v1, 0
|
|
; GISEL-NEXT: v_and_b32_e32 v6, 0x7ff, v2
|
|
; GISEL-NEXT: v_cmp_ge_u64_e32 vcc, v[6:7], v[0:1]
|
|
; GISEL-NEXT: s_mov_b64 s[6:7], s[4:5]
|
|
; GISEL-NEXT: v_mov_b32_e32 v0, s4
|
|
; GISEL-NEXT: v_mov_b32_e32 v1, s5
|
|
; GISEL-NEXT: v_mov_b32_e32 v2, s6
|
|
; GISEL-NEXT: v_mov_b32_e32 v3, s7
|
|
; GISEL-NEXT: s_and_saveexec_b64 s[12:13], vcc
|
|
; GISEL-NEXT: s_cbranch_execz .LBB0_10
|
|
; GISEL-NEXT: ; %bb.1: ; %fp-to-i-if-end
|
|
; GISEL-NEXT: v_add_co_u32_e32 v0, vcc, 0xfffffb81, v6
|
|
; GISEL-NEXT: v_mov_b32_e32 v2, 0xffffff80
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v1, s[6:7], 0, -1, vcc
|
|
; GISEL-NEXT: v_mov_b32_e32 v3, -1
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v8, s[6:7], 0, -1, s[6:7]
|
|
; GISEL-NEXT: v_cmp_ge_u64_e32 vcc, v[0:1], v[2:3]
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v9, s[6:7], 0, -1, s[6:7]
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v0, 0, 1, vcc
|
|
; GISEL-NEXT: v_cmp_le_u64_e32 vcc, -1, v[8:9]
|
|
; GISEL-NEXT: v_cmp_lt_i64_e64 s[4:5], -1, v[4:5]
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v1, 0, 1, vcc
|
|
; GISEL-NEXT: v_cmp_eq_u64_e32 vcc, -1, v[8:9]
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v0, v1, v0, vcc
|
|
; GISEL-NEXT: v_and_b32_e32 v0, 1, v0
|
|
; GISEL-NEXT: v_cmp_ne_u32_e32 vcc, 0, v0
|
|
; GISEL-NEXT: ; implicit-def: $vgpr0_vgpr1_vgpr2_vgpr3
|
|
; GISEL-NEXT: s_and_saveexec_b64 s[6:7], vcc
|
|
; GISEL-NEXT: s_xor_b64 s[14:15], exec, s[6:7]
|
|
; GISEL-NEXT: s_cbranch_execz .LBB0_7
|
|
; GISEL-NEXT: ; %bb.2: ; %fp-to-i-if-end9
|
|
; GISEL-NEXT: s_xor_b64 s[6:7], s[4:5], -1
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v0, 0, -1, s[6:7]
|
|
; GISEL-NEXT: v_and_b32_e32 v0, 1, v0
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v1, 0, 1, s[6:7]
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v2, 1, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v2
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v3, 2, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v0, v2
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v3
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v8, 3, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v3
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v8
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v9, 4, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v8
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v9
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v10, 5, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v9
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v10
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v11, 6, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v10
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v11
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v12, 7, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v11
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v12
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v13, 8, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v12
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v13
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v14, 9, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v13
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v14
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v15, 10, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v14
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v15
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v16, 11, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v15
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v16
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v17, 12, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v16
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v17
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v18, 13, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v17
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v18
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v19, 14, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v18
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v19
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v0, 15, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v19
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v2, v0
|
|
; GISEL-NEXT: v_and_b32_e32 v0, 0xffff, v0
|
|
; GISEL-NEXT: v_and_b32_e32 v1, 0xffff, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v2, 16, v0
|
|
; GISEL-NEXT: v_lshl_or_b32 v9, v0, 16, v0
|
|
; GISEL-NEXT: v_or3_b32 v8, v1, v2, 1
|
|
; GISEL-NEXT: v_mov_b32_e32 v0, 0x433
|
|
; GISEL-NEXT: v_mov_b32_e32 v1, 0
|
|
; GISEL-NEXT: v_mov_b32_e32 v2, 0xfffff
|
|
; GISEL-NEXT: s_mov_b32 s6, 0x100000
|
|
; GISEL-NEXT: v_cmp_ge_u64_e32 vcc, v[6:7], v[0:1]
|
|
; GISEL-NEXT: v_and_or_b32 v5, v5, v2, s6
|
|
; GISEL-NEXT: ; implicit-def: $vgpr0_vgpr1_vgpr2_vgpr3
|
|
; GISEL-NEXT: s_and_saveexec_b64 s[6:7], vcc
|
|
; GISEL-NEXT: s_xor_b64 s[16:17], exec, s[6:7]
|
|
; GISEL-NEXT: s_cbranch_execz .LBB0_4
|
|
; GISEL-NEXT: ; %bb.3: ; %fp-to-i-if-else
|
|
; GISEL-NEXT: v_add_u32_e32 v7, 0xfffffbcd, v6
|
|
; GISEL-NEXT: v_lshlrev_b64 v[0:1], v7, v[4:5]
|
|
; GISEL-NEXT: v_cmp_gt_u32_e32 vcc, 64, v7
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v10, 0, v0, vcc
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v11, 0, v1, vcc
|
|
; GISEL-NEXT: v_mad_u64_u32 v[0:1], s[6:7], v10, v9, 0
|
|
; GISEL-NEXT: v_add_u32_e32 v6, 0xfffffb8d, v6
|
|
; GISEL-NEXT: v_sub_u32_e32 v2, 64, v7
|
|
; GISEL-NEXT: v_lshrrev_b64 v[2:3], v2, v[4:5]
|
|
; GISEL-NEXT: v_lshlrev_b64 v[4:5], v6, v[4:5]
|
|
; GISEL-NEXT: v_cmp_eq_u32_e64 s[6:7], 0, v7
|
|
; GISEL-NEXT: v_mad_u64_u32 v[6:7], s[8:9], v11, v9, v[0:1]
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v2, v4, v2, vcc
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v12, v2, 0, s[6:7]
|
|
; GISEL-NEXT: v_mad_u64_u32 v[6:7], s[8:9], v12, v8, v[6:7]
|
|
; GISEL-NEXT: v_mad_u64_u32 v[0:1], s[8:9], v10, v8, 0
|
|
; GISEL-NEXT: v_mov_b32_e32 v2, v6
|
|
; GISEL-NEXT: v_mul_lo_u32 v6, v10, v9
|
|
; GISEL-NEXT: v_mad_u64_u32 v[1:2], s[8:9], v10, v9, v[1:2]
|
|
; GISEL-NEXT: v_mul_lo_u32 v4, v11, v9
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v3, v5, v3, vcc
|
|
; GISEL-NEXT: v_mad_u64_u32 v[1:2], s[10:11], v11, v8, v[1:2]
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v6, s[10:11], v7, v6, s[10:11]
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v4, s[8:9], v6, v4, s[8:9]
|
|
; GISEL-NEXT: v_mad_u64_u32 v[6:7], s[8:9], v12, v9, v[4:5]
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v3, v3, 0, s[6:7]
|
|
; GISEL-NEXT: ; implicit-def: $vgpr9
|
|
; GISEL-NEXT: v_mad_u64_u32 v[3:4], s[6:7], v3, v8, v[6:7]
|
|
; GISEL-NEXT: ; implicit-def: $vgpr6
|
|
; GISEL-NEXT: ; implicit-def: $vgpr4_vgpr5
|
|
; GISEL-NEXT: ; implicit-def: $vgpr8
|
|
; GISEL-NEXT: .LBB0_4: ; %Flow
|
|
; GISEL-NEXT: s_andn2_saveexec_b64 s[8:9], s[16:17]
|
|
; GISEL-NEXT: s_cbranch_execz .LBB0_6
|
|
; GISEL-NEXT: ; %bb.5: ; %fp-to-i-if-then12
|
|
; GISEL-NEXT: v_sub_co_u32_e32 v2, vcc, 0x433, v6
|
|
; GISEL-NEXT: v_lshrrev_b64 v[0:1], v2, v[4:5]
|
|
; GISEL-NEXT: v_cmp_gt_u32_e32 vcc, 64, v2
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v0, 0, v0, vcc
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v1, 0, v1, vcc
|
|
; GISEL-NEXT: v_cmp_eq_u32_e32 vcc, 0, v2
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v4, v0, v4, vcc
|
|
; GISEL-NEXT: v_mad_u64_u32 v[2:3], s[6:7], v4, v9, 0
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v5, v1, v5, vcc
|
|
; GISEL-NEXT: v_mad_u64_u32 v[0:1], s[6:7], v4, v8, 0
|
|
; GISEL-NEXT: v_mad_u64_u32 v[2:3], s[6:7], v5, v9, v[2:3]
|
|
; GISEL-NEXT: v_mul_lo_u32 v6, v5, v9
|
|
; GISEL-NEXT: v_mad_u64_u32 v[1:2], vcc, v4, v9, v[1:2]
|
|
; GISEL-NEXT: v_mul_lo_u32 v4, v4, v9
|
|
; GISEL-NEXT: v_mad_u64_u32 v[1:2], s[6:7], v5, v8, v[1:2]
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v3, s[6:7], v3, v4, s[6:7]
|
|
; GISEL-NEXT: v_addc_co_u32_e32 v3, vcc, v3, v6, vcc
|
|
; GISEL-NEXT: .LBB0_6: ; %Flow1
|
|
; GISEL-NEXT: s_or_b64 exec, exec, s[8:9]
|
|
; GISEL-NEXT: .LBB0_7: ; %Flow2
|
|
; GISEL-NEXT: s_andn2_saveexec_b64 s[6:7], s[14:15]
|
|
; GISEL-NEXT: s_cbranch_execz .LBB0_9
|
|
; GISEL-NEXT: ; %bb.8: ; %fp-to-i-if-then5
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v1, 0, -1, s[4:5]
|
|
; GISEL-NEXT: v_and_b32_e32 v1, 1, v1
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v0, 0, 1, s[4:5]
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v2, 1, v1
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v2
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v3, 2, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v4, 3, v1
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v1, v2
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v3, v4
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v5, 4, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v6, 5, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v3, v4
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v5, v6
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v7, 6, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v8, 7, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v5, v6
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v7, v8
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v9, 8, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v10, 9, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v7, v8
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v9, v10
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v11, 10, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v12, 11, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v9, v10
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v11, v12
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v13, 12, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v14, 13, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v11, v12
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v13, v14
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v15, 14, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v16, 15, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v13, v14
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v15, v16
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v17, 16, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v18, 17, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v15, v16
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v17, v18
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v19, 18, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v3, 19, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v17, v18
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v19, v3
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v4, 20, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v5, 21, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v19, v3
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v4, v5
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v6, 22, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v7, 23, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v4, v5
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v6, v7
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v8, 24, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v9, 25, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v6, v7
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v8, v9
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v10, 26, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v11, 27, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v8, v9
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v10, v11
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v12, 28, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v13, 29, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v10, v11
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v12, v13
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v14, 30, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v1, 31, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v12, v13
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v14, v1
|
|
; GISEL-NEXT: v_or3_b32 v1, v2, v14, v1
|
|
; GISEL-NEXT: v_add_u32_e32 v3, 0x80000000, v1
|
|
; GISEL-NEXT: v_mov_b32_e32 v2, v1
|
|
; GISEL-NEXT: .LBB0_9: ; %Flow3
|
|
; GISEL-NEXT: s_or_b64 exec, exec, s[6:7]
|
|
; GISEL-NEXT: .LBB0_10: ; %fp-to-i-cleanup
|
|
; GISEL-NEXT: s_or_b64 exec, exec, s[12:13]
|
|
; GISEL-NEXT: s_setpc_b64 s[30:31]
|
|
%cvt = fptosi double %x to i128
|
|
ret i128 %cvt
|
|
}
|
|
|
|
define i128 @fptoui_f64_to_i128(double %x) {
|
|
; SDAG-LABEL: fptoui_f64_to_i128:
|
|
; SDAG: ; %bb.0: ; %fp-to-i-entry
|
|
; SDAG-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
|
|
; SDAG-NEXT: v_mov_b32_e32 v5, v1
|
|
; SDAG-NEXT: v_bfe_u32 v6, v5, 20, 11
|
|
; SDAG-NEXT: v_mov_b32_e32 v7, 0
|
|
; SDAG-NEXT: s_mov_b64 s[4:5], 0x3fe
|
|
; SDAG-NEXT: v_mov_b32_e32 v4, v0
|
|
; SDAG-NEXT: v_cmp_lt_u64_e32 vcc, s[4:5], v[6:7]
|
|
; SDAG-NEXT: v_mov_b32_e32 v0, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v2, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v1, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v3, 0
|
|
; SDAG-NEXT: s_and_saveexec_b64 s[8:9], vcc
|
|
; SDAG-NEXT: s_cbranch_execz .LBB1_10
|
|
; SDAG-NEXT: ; %bb.1: ; %fp-to-i-if-end
|
|
; SDAG-NEXT: v_add_co_u32_e32 v0, vcc, 0xfffffb81, v6
|
|
; SDAG-NEXT: v_addc_co_u32_e32 v1, vcc, -1, v7, vcc
|
|
; SDAG-NEXT: v_addc_co_u32_e32 v2, vcc, -1, v7, vcc
|
|
; SDAG-NEXT: s_movk_i32 s6, 0xff7f
|
|
; SDAG-NEXT: v_addc_co_u32_e32 v3, vcc, -1, v7, vcc
|
|
; SDAG-NEXT: s_mov_b32 s7, -1
|
|
; SDAG-NEXT: v_cmp_eq_u64_e64 s[4:5], -1, v[2:3]
|
|
; SDAG-NEXT: v_cmp_lt_u64_e64 s[6:7], s[6:7], v[0:1]
|
|
; SDAG-NEXT: v_cmp_lt_i64_e32 vcc, -1, v[4:5]
|
|
; SDAG-NEXT: s_and_b64 s[4:5], s[4:5], s[6:7]
|
|
; SDAG-NEXT: ; implicit-def: $vgpr0_vgpr1
|
|
; SDAG-NEXT: ; implicit-def: $vgpr2_vgpr3
|
|
; SDAG-NEXT: s_and_saveexec_b64 s[6:7], s[4:5]
|
|
; SDAG-NEXT: s_xor_b64 s[10:11], exec, s[6:7]
|
|
; SDAG-NEXT: s_cbranch_execz .LBB1_7
|
|
; SDAG-NEXT: ; %bb.2: ; %fp-to-i-if-end9
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v0, 0, 1, vcc
|
|
; SDAG-NEXT: v_add_co_u32_e64 v9, s[4:5], -1, v0
|
|
; SDAG-NEXT: s_mov_b64 s[4:5], 0x432
|
|
; SDAG-NEXT: v_and_b32_e32 v0, 0xfffff, v5
|
|
; SDAG-NEXT: v_cmp_lt_u64_e64 s[4:5], s[4:5], v[6:7]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v8, -1, 0, vcc
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v10, -1, 1, vcc
|
|
; SDAG-NEXT: v_or_b32_e32 v5, 0x100000, v0
|
|
; SDAG-NEXT: ; implicit-def: $vgpr0_vgpr1
|
|
; SDAG-NEXT: ; implicit-def: $vgpr2_vgpr3
|
|
; SDAG-NEXT: s_and_saveexec_b64 s[6:7], s[4:5]
|
|
; SDAG-NEXT: s_xor_b64 s[12:13], exec, s[6:7]
|
|
; SDAG-NEXT: s_cbranch_execz .LBB1_4
|
|
; SDAG-NEXT: ; %bb.3: ; %fp-to-i-if-else
|
|
; SDAG-NEXT: v_sub_u32_e32 v0, 0x473, v6
|
|
; SDAG-NEXT: v_add_u32_e32 v2, 0xfffffb8d, v6
|
|
; SDAG-NEXT: v_add_u32_e32 v7, 0xfffffbcd, v6
|
|
; SDAG-NEXT: v_lshrrev_b64 v[0:1], v0, v[4:5]
|
|
; SDAG-NEXT: v_lshlrev_b64 v[2:3], v2, v[4:5]
|
|
; SDAG-NEXT: v_cmp_gt_u32_e64 s[4:5], 64, v7
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v1, v3, v1, s[4:5]
|
|
; SDAG-NEXT: v_cmp_ne_u32_e64 s[6:7], 0, v7
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v6, 0, v1, s[6:7]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v2, v2, v0, s[4:5]
|
|
; SDAG-NEXT: v_lshlrev_b64 v[0:1], v7, v[4:5]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v2, 0, v2, s[6:7]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v12, 0, v0, s[4:5]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v11, 0, v1, s[4:5]
|
|
; SDAG-NEXT: v_mad_u64_u32 v[0:1], s[4:5], v12, v10, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v3, 0
|
|
; SDAG-NEXT: v_mul_lo_u32 v13, v8, v2
|
|
; SDAG-NEXT: v_mad_u64_u32 v[4:5], s[4:5], v10, v2, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v2, v1
|
|
; SDAG-NEXT: v_mul_lo_u32 v14, v10, v6
|
|
; SDAG-NEXT: v_mad_u64_u32 v[6:7], s[4:5], v11, v10, v[2:3]
|
|
; SDAG-NEXT: ; implicit-def: $vgpr10
|
|
; SDAG-NEXT: v_add3_u32 v5, v5, v14, v13
|
|
; SDAG-NEXT: v_mov_b32_e32 v2, v6
|
|
; SDAG-NEXT: v_mad_u64_u32 v[1:2], s[4:5], v12, v8, v[2:3]
|
|
; SDAG-NEXT: v_mad_u64_u32 v[4:5], s[4:5], v9, v12, v[4:5]
|
|
; SDAG-NEXT: v_add_co_u32_e64 v2, s[4:5], v7, v2
|
|
; SDAG-NEXT: v_addc_co_u32_e64 v3, s[4:5], 0, 0, s[4:5]
|
|
; SDAG-NEXT: v_mul_lo_u32 v6, v9, v11
|
|
; SDAG-NEXT: v_mul_lo_u32 v9, v9, v12
|
|
; SDAG-NEXT: v_mad_u64_u32 v[2:3], s[4:5], v11, v8, v[2:3]
|
|
; SDAG-NEXT: ; implicit-def: $vgpr8
|
|
; SDAG-NEXT: v_add3_u32 v5, v9, v5, v6
|
|
; SDAG-NEXT: v_add_co_u32_e64 v2, s[4:5], v2, v4
|
|
; SDAG-NEXT: v_addc_co_u32_e64 v3, s[4:5], v3, v5, s[4:5]
|
|
; SDAG-NEXT: ; implicit-def: $vgpr6_vgpr7
|
|
; SDAG-NEXT: ; implicit-def: $vgpr4_vgpr5
|
|
; SDAG-NEXT: ; implicit-def: $vgpr9
|
|
; SDAG-NEXT: .LBB1_4: ; %Flow
|
|
; SDAG-NEXT: s_andn2_saveexec_b64 s[12:13], s[12:13]
|
|
; SDAG-NEXT: s_cbranch_execz .LBB1_6
|
|
; SDAG-NEXT: ; %bb.5: ; %fp-to-i-if-then12
|
|
; SDAG-NEXT: v_sub_u32_e32 v2, 0x433, v6
|
|
; SDAG-NEXT: v_lshrrev_b64 v[0:1], v2, v[4:5]
|
|
; SDAG-NEXT: v_cmp_gt_u32_e64 s[4:5], 64, v2
|
|
; SDAG-NEXT: v_cmp_eq_u32_e64 s[6:7], 0, v2
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v0, 0, v0, s[4:5]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v1, 0, v1, s[4:5]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v6, v0, v4, s[6:7]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v5, v1, v5, s[6:7]
|
|
; SDAG-NEXT: v_mad_u64_u32 v[0:1], s[4:5], v6, v10, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v2, 0
|
|
; SDAG-NEXT: v_mad_u64_u32 v[3:4], s[4:5], v5, v10, v[1:2]
|
|
; SDAG-NEXT: v_mov_b32_e32 v1, v3
|
|
; SDAG-NEXT: v_mad_u64_u32 v[1:2], s[4:5], v6, v8, v[1:2]
|
|
; SDAG-NEXT: v_add_co_u32_e64 v2, s[4:5], v4, v2
|
|
; SDAG-NEXT: v_addc_co_u32_e64 v3, s[4:5], 0, 0, s[4:5]
|
|
; SDAG-NEXT: v_mad_u64_u32 v[2:3], s[4:5], v5, v8, v[2:3]
|
|
; SDAG-NEXT: v_mad_u64_u32 v[2:3], s[4:5], v9, v6, v[2:3]
|
|
; SDAG-NEXT: v_mad_u64_u32 v[3:4], s[4:5], v9, v6, v[3:4]
|
|
; SDAG-NEXT: v_mad_i32_i24 v3, v9, v5, v3
|
|
; SDAG-NEXT: .LBB1_6: ; %Flow1
|
|
; SDAG-NEXT: s_or_b64 exec, exec, s[12:13]
|
|
; SDAG-NEXT: .LBB1_7: ; %Flow2
|
|
; SDAG-NEXT: s_andn2_saveexec_b64 s[4:5], s[10:11]
|
|
; SDAG-NEXT: ; %bb.8: ; %fp-to-i-if-then5
|
|
; SDAG-NEXT: v_bfrev_b32_e32 v0, 1
|
|
; SDAG-NEXT: v_bfrev_b32_e32 v1, -2
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v2, 0, -1, vcc
|
|
; SDAG-NEXT: v_cndmask_b32_e32 v3, v0, v1, vcc
|
|
; SDAG-NEXT: v_mov_b32_e32 v0, v2
|
|
; SDAG-NEXT: v_mov_b32_e32 v1, v2
|
|
; SDAG-NEXT: ; %bb.9: ; %Flow3
|
|
; SDAG-NEXT: s_or_b64 exec, exec, s[4:5]
|
|
; SDAG-NEXT: .LBB1_10: ; %fp-to-i-cleanup
|
|
; SDAG-NEXT: s_or_b64 exec, exec, s[8:9]
|
|
; SDAG-NEXT: s_setpc_b64 s[30:31]
|
|
;
|
|
; GISEL-LABEL: fptoui_f64_to_i128:
|
|
; GISEL: ; %bb.0: ; %fp-to-i-entry
|
|
; GISEL-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
|
|
; GISEL-NEXT: v_mov_b32_e32 v5, v1
|
|
; GISEL-NEXT: v_mov_b32_e32 v4, v0
|
|
; GISEL-NEXT: v_lshrrev_b32_e32 v2, 20, v5
|
|
; GISEL-NEXT: v_mov_b32_e32 v0, 0x3ff
|
|
; GISEL-NEXT: s_mov_b64 s[4:5], 0
|
|
; GISEL-NEXT: v_mov_b32_e32 v7, 0
|
|
; GISEL-NEXT: v_mov_b32_e32 v1, 0
|
|
; GISEL-NEXT: v_and_b32_e32 v6, 0x7ff, v2
|
|
; GISEL-NEXT: v_cmp_ge_u64_e32 vcc, v[6:7], v[0:1]
|
|
; GISEL-NEXT: s_mov_b64 s[6:7], s[4:5]
|
|
; GISEL-NEXT: v_mov_b32_e32 v0, s4
|
|
; GISEL-NEXT: v_mov_b32_e32 v1, s5
|
|
; GISEL-NEXT: v_mov_b32_e32 v2, s6
|
|
; GISEL-NEXT: v_mov_b32_e32 v3, s7
|
|
; GISEL-NEXT: s_and_saveexec_b64 s[12:13], vcc
|
|
; GISEL-NEXT: s_cbranch_execz .LBB1_10
|
|
; GISEL-NEXT: ; %bb.1: ; %fp-to-i-if-end
|
|
; GISEL-NEXT: v_add_co_u32_e32 v0, vcc, 0xfffffb81, v6
|
|
; GISEL-NEXT: v_mov_b32_e32 v2, 0xffffff80
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v1, s[6:7], 0, -1, vcc
|
|
; GISEL-NEXT: v_mov_b32_e32 v3, -1
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v8, s[6:7], 0, -1, s[6:7]
|
|
; GISEL-NEXT: v_cmp_ge_u64_e32 vcc, v[0:1], v[2:3]
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v9, s[6:7], 0, -1, s[6:7]
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v0, 0, 1, vcc
|
|
; GISEL-NEXT: v_cmp_le_u64_e32 vcc, -1, v[8:9]
|
|
; GISEL-NEXT: v_cmp_lt_i64_e64 s[4:5], -1, v[4:5]
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v1, 0, 1, vcc
|
|
; GISEL-NEXT: v_cmp_eq_u64_e32 vcc, -1, v[8:9]
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v0, v1, v0, vcc
|
|
; GISEL-NEXT: v_and_b32_e32 v0, 1, v0
|
|
; GISEL-NEXT: v_cmp_ne_u32_e32 vcc, 0, v0
|
|
; GISEL-NEXT: ; implicit-def: $vgpr0_vgpr1_vgpr2_vgpr3
|
|
; GISEL-NEXT: s_and_saveexec_b64 s[6:7], vcc
|
|
; GISEL-NEXT: s_xor_b64 s[14:15], exec, s[6:7]
|
|
; GISEL-NEXT: s_cbranch_execz .LBB1_7
|
|
; GISEL-NEXT: ; %bb.2: ; %fp-to-i-if-end9
|
|
; GISEL-NEXT: s_xor_b64 s[6:7], s[4:5], -1
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v0, 0, -1, s[6:7]
|
|
; GISEL-NEXT: v_and_b32_e32 v0, 1, v0
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v1, 0, 1, s[6:7]
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v2, 1, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v2
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v3, 2, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v0, v2
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v3
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v8, 3, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v3
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v8
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v9, 4, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v8
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v9
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v10, 5, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v9
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v10
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v11, 6, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v10
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v11
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v12, 7, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v11
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v12
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v13, 8, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v12
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v13
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v14, 9, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v13
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v14
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v15, 10, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v14
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v15
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v16, 11, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v15
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v16
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v17, 12, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v16
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v17
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v18, 13, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v17
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v18
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v19, 14, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v18
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v19
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v0, 15, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v19
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v2, v0
|
|
; GISEL-NEXT: v_and_b32_e32 v0, 0xffff, v0
|
|
; GISEL-NEXT: v_and_b32_e32 v1, 0xffff, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v2, 16, v0
|
|
; GISEL-NEXT: v_lshl_or_b32 v9, v0, 16, v0
|
|
; GISEL-NEXT: v_or3_b32 v8, v1, v2, 1
|
|
; GISEL-NEXT: v_mov_b32_e32 v0, 0x433
|
|
; GISEL-NEXT: v_mov_b32_e32 v1, 0
|
|
; GISEL-NEXT: v_mov_b32_e32 v2, 0xfffff
|
|
; GISEL-NEXT: s_mov_b32 s6, 0x100000
|
|
; GISEL-NEXT: v_cmp_ge_u64_e32 vcc, v[6:7], v[0:1]
|
|
; GISEL-NEXT: v_and_or_b32 v5, v5, v2, s6
|
|
; GISEL-NEXT: ; implicit-def: $vgpr0_vgpr1_vgpr2_vgpr3
|
|
; GISEL-NEXT: s_and_saveexec_b64 s[6:7], vcc
|
|
; GISEL-NEXT: s_xor_b64 s[16:17], exec, s[6:7]
|
|
; GISEL-NEXT: s_cbranch_execz .LBB1_4
|
|
; GISEL-NEXT: ; %bb.3: ; %fp-to-i-if-else
|
|
; GISEL-NEXT: v_add_u32_e32 v7, 0xfffffbcd, v6
|
|
; GISEL-NEXT: v_lshlrev_b64 v[0:1], v7, v[4:5]
|
|
; GISEL-NEXT: v_cmp_gt_u32_e32 vcc, 64, v7
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v10, 0, v0, vcc
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v11, 0, v1, vcc
|
|
; GISEL-NEXT: v_mad_u64_u32 v[0:1], s[6:7], v10, v9, 0
|
|
; GISEL-NEXT: v_add_u32_e32 v6, 0xfffffb8d, v6
|
|
; GISEL-NEXT: v_sub_u32_e32 v2, 64, v7
|
|
; GISEL-NEXT: v_lshrrev_b64 v[2:3], v2, v[4:5]
|
|
; GISEL-NEXT: v_lshlrev_b64 v[4:5], v6, v[4:5]
|
|
; GISEL-NEXT: v_cmp_eq_u32_e64 s[6:7], 0, v7
|
|
; GISEL-NEXT: v_mad_u64_u32 v[6:7], s[8:9], v11, v9, v[0:1]
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v2, v4, v2, vcc
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v12, v2, 0, s[6:7]
|
|
; GISEL-NEXT: v_mad_u64_u32 v[6:7], s[8:9], v12, v8, v[6:7]
|
|
; GISEL-NEXT: v_mad_u64_u32 v[0:1], s[8:9], v10, v8, 0
|
|
; GISEL-NEXT: v_mov_b32_e32 v2, v6
|
|
; GISEL-NEXT: v_mul_lo_u32 v6, v10, v9
|
|
; GISEL-NEXT: v_mad_u64_u32 v[1:2], s[8:9], v10, v9, v[1:2]
|
|
; GISEL-NEXT: v_mul_lo_u32 v4, v11, v9
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v3, v5, v3, vcc
|
|
; GISEL-NEXT: v_mad_u64_u32 v[1:2], s[10:11], v11, v8, v[1:2]
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v6, s[10:11], v7, v6, s[10:11]
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v4, s[8:9], v6, v4, s[8:9]
|
|
; GISEL-NEXT: v_mad_u64_u32 v[6:7], s[8:9], v12, v9, v[4:5]
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v3, v3, 0, s[6:7]
|
|
; GISEL-NEXT: ; implicit-def: $vgpr9
|
|
; GISEL-NEXT: v_mad_u64_u32 v[3:4], s[6:7], v3, v8, v[6:7]
|
|
; GISEL-NEXT: ; implicit-def: $vgpr6
|
|
; GISEL-NEXT: ; implicit-def: $vgpr4_vgpr5
|
|
; GISEL-NEXT: ; implicit-def: $vgpr8
|
|
; GISEL-NEXT: .LBB1_4: ; %Flow
|
|
; GISEL-NEXT: s_andn2_saveexec_b64 s[8:9], s[16:17]
|
|
; GISEL-NEXT: s_cbranch_execz .LBB1_6
|
|
; GISEL-NEXT: ; %bb.5: ; %fp-to-i-if-then12
|
|
; GISEL-NEXT: v_sub_co_u32_e32 v2, vcc, 0x433, v6
|
|
; GISEL-NEXT: v_lshrrev_b64 v[0:1], v2, v[4:5]
|
|
; GISEL-NEXT: v_cmp_gt_u32_e32 vcc, 64, v2
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v0, 0, v0, vcc
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v1, 0, v1, vcc
|
|
; GISEL-NEXT: v_cmp_eq_u32_e32 vcc, 0, v2
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v4, v0, v4, vcc
|
|
; GISEL-NEXT: v_mad_u64_u32 v[2:3], s[6:7], v4, v9, 0
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v5, v1, v5, vcc
|
|
; GISEL-NEXT: v_mad_u64_u32 v[0:1], s[6:7], v4, v8, 0
|
|
; GISEL-NEXT: v_mad_u64_u32 v[2:3], s[6:7], v5, v9, v[2:3]
|
|
; GISEL-NEXT: v_mul_lo_u32 v6, v5, v9
|
|
; GISEL-NEXT: v_mad_u64_u32 v[1:2], vcc, v4, v9, v[1:2]
|
|
; GISEL-NEXT: v_mul_lo_u32 v4, v4, v9
|
|
; GISEL-NEXT: v_mad_u64_u32 v[1:2], s[6:7], v5, v8, v[1:2]
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v3, s[6:7], v3, v4, s[6:7]
|
|
; GISEL-NEXT: v_addc_co_u32_e32 v3, vcc, v3, v6, vcc
|
|
; GISEL-NEXT: .LBB1_6: ; %Flow1
|
|
; GISEL-NEXT: s_or_b64 exec, exec, s[8:9]
|
|
; GISEL-NEXT: .LBB1_7: ; %Flow2
|
|
; GISEL-NEXT: s_andn2_saveexec_b64 s[6:7], s[14:15]
|
|
; GISEL-NEXT: s_cbranch_execz .LBB1_9
|
|
; GISEL-NEXT: ; %bb.8: ; %fp-to-i-if-then5
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v1, 0, -1, s[4:5]
|
|
; GISEL-NEXT: v_and_b32_e32 v1, 1, v1
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v0, 0, 1, s[4:5]
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v2, 1, v1
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v2
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v3, 2, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v4, 3, v1
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v1, v2
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v3, v4
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v5, 4, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v6, 5, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v3, v4
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v5, v6
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v7, 6, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v8, 7, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v5, v6
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v7, v8
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v9, 8, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v10, 9, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v7, v8
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v9, v10
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v11, 10, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v12, 11, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v9, v10
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v11, v12
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v13, 12, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v14, 13, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v11, v12
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v13, v14
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v15, 14, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v16, 15, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v13, v14
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v15, v16
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v17, 16, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v18, 17, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v15, v16
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v17, v18
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v19, 18, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v3, 19, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v17, v18
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v19, v3
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v4, 20, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v5, 21, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v19, v3
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v4, v5
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v6, 22, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v7, 23, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v4, v5
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v6, v7
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v8, 24, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v9, 25, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v6, v7
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v8, v9
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v10, 26, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v11, 27, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v8, v9
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v10, v11
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v12, 28, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v13, 29, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v10, v11
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v12, v13
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v14, 30, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v1, 31, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v12, v13
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v14, v1
|
|
; GISEL-NEXT: v_or3_b32 v1, v2, v14, v1
|
|
; GISEL-NEXT: v_add_u32_e32 v3, 0x80000000, v1
|
|
; GISEL-NEXT: v_mov_b32_e32 v2, v1
|
|
; GISEL-NEXT: .LBB1_9: ; %Flow3
|
|
; GISEL-NEXT: s_or_b64 exec, exec, s[6:7]
|
|
; GISEL-NEXT: .LBB1_10: ; %fp-to-i-cleanup
|
|
; GISEL-NEXT: s_or_b64 exec, exec, s[12:13]
|
|
; GISEL-NEXT: s_setpc_b64 s[30:31]
|
|
%cvt = fptoui double %x to i128
|
|
ret i128 %cvt
|
|
}
|
|
|
|
define i128 @fptosi_f32_to_i128(float %x) {
|
|
; SDAG-LABEL: fptosi_f32_to_i128:
|
|
; SDAG: ; %bb.0: ; %fp-to-i-entry
|
|
; SDAG-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
|
|
; SDAG-NEXT: v_mov_b32_e32 v4, v0
|
|
; SDAG-NEXT: v_bfe_u32 v5, v4, 23, 8
|
|
; SDAG-NEXT: s_movk_i32 s4, 0x7e
|
|
; SDAG-NEXT: v_mov_b32_e32 v0, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v2, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v6, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v1, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v3, 0
|
|
; SDAG-NEXT: v_cmp_lt_u32_e32 vcc, s4, v5
|
|
; SDAG-NEXT: s_and_saveexec_b64 s[8:9], vcc
|
|
; SDAG-NEXT: s_cbranch_execz .LBB2_10
|
|
; SDAG-NEXT: ; %bb.1: ; %fp-to-i-if-end
|
|
; SDAG-NEXT: v_add_co_u32_e32 v0, vcc, 0xffffff01, v5
|
|
; SDAG-NEXT: v_addc_co_u32_e32 v1, vcc, -1, v6, vcc
|
|
; SDAG-NEXT: v_addc_co_u32_e32 v2, vcc, -1, v6, vcc
|
|
; SDAG-NEXT: s_movk_i32 s6, 0xff7f
|
|
; SDAG-NEXT: v_addc_co_u32_e32 v3, vcc, -1, v6, vcc
|
|
; SDAG-NEXT: s_mov_b32 s7, -1
|
|
; SDAG-NEXT: v_cmp_eq_u64_e64 s[4:5], -1, v[2:3]
|
|
; SDAG-NEXT: v_cmp_lt_u64_e64 s[6:7], s[6:7], v[0:1]
|
|
; SDAG-NEXT: v_cmp_lt_i32_e32 vcc, -1, v4
|
|
; SDAG-NEXT: s_and_b64 s[4:5], s[4:5], s[6:7]
|
|
; SDAG-NEXT: ; implicit-def: $vgpr0_vgpr1
|
|
; SDAG-NEXT: ; implicit-def: $vgpr2_vgpr3
|
|
; SDAG-NEXT: s_and_saveexec_b64 s[6:7], s[4:5]
|
|
; SDAG-NEXT: s_xor_b64 s[10:11], exec, s[6:7]
|
|
; SDAG-NEXT: s_cbranch_execz .LBB2_7
|
|
; SDAG-NEXT: ; %bb.2: ; %fp-to-i-if-end9
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v0, 0, 1, vcc
|
|
; SDAG-NEXT: v_add_co_u32_e64 v9, s[4:5], -1, v0
|
|
; SDAG-NEXT: s_mov_b64 s[4:5], 0x95
|
|
; SDAG-NEXT: v_and_b32_e32 v0, 0x7fffff, v4
|
|
; SDAG-NEXT: v_cmp_lt_u64_e64 s[4:5], s[4:5], v[5:6]
|
|
; SDAG-NEXT: v_mov_b32_e32 v7, 0
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v8, -1, 0, vcc
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v10, -1, 1, vcc
|
|
; SDAG-NEXT: v_or_b32_e32 v6, 0x800000, v0
|
|
; SDAG-NEXT: ; implicit-def: $vgpr0_vgpr1
|
|
; SDAG-NEXT: ; implicit-def: $vgpr2_vgpr3
|
|
; SDAG-NEXT: s_and_saveexec_b64 s[6:7], s[4:5]
|
|
; SDAG-NEXT: s_xor_b64 s[12:13], exec, s[6:7]
|
|
; SDAG-NEXT: s_cbranch_execz .LBB2_4
|
|
; SDAG-NEXT: ; %bb.3: ; %fp-to-i-if-else
|
|
; SDAG-NEXT: v_sub_u32_e32 v0, 0xd6, v5
|
|
; SDAG-NEXT: v_add_u32_e32 v2, 0xffffff2a, v5
|
|
; SDAG-NEXT: v_add_u32_e32 v4, 0xffffff6a, v5
|
|
; SDAG-NEXT: v_lshrrev_b64 v[0:1], v0, v[6:7]
|
|
; SDAG-NEXT: v_lshlrev_b64 v[2:3], v2, v[6:7]
|
|
; SDAG-NEXT: v_cmp_gt_u32_e64 s[4:5], 64, v4
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v1, v3, v1, s[4:5]
|
|
; SDAG-NEXT: v_cmp_ne_u32_e64 s[6:7], 0, v4
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v3, 0, v1, s[6:7]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v2, v2, v0, s[4:5]
|
|
; SDAG-NEXT: v_lshlrev_b64 v[0:1], v4, v[6:7]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v2, 0, v2, s[6:7]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v12, 0, v0, s[4:5]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v11, 0, v1, s[4:5]
|
|
; SDAG-NEXT: v_mad_u64_u32 v[0:1], s[4:5], v12, v10, 0
|
|
; SDAG-NEXT: v_mul_lo_u32 v13, v8, v2
|
|
; SDAG-NEXT: v_mul_lo_u32 v14, v10, v3
|
|
; SDAG-NEXT: v_mov_b32_e32 v6, v1
|
|
; SDAG-NEXT: v_mad_u64_u32 v[4:5], s[4:5], v11, v10, v[6:7]
|
|
; SDAG-NEXT: v_mad_u64_u32 v[2:3], s[4:5], v10, v2, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v6, v4
|
|
; SDAG-NEXT: v_mad_u64_u32 v[6:7], s[4:5], v12, v8, v[6:7]
|
|
; SDAG-NEXT: v_add3_u32 v3, v3, v14, v13
|
|
; SDAG-NEXT: v_mad_u64_u32 v[1:2], s[4:5], v9, v12, v[2:3]
|
|
; SDAG-NEXT: v_add_co_u32_e64 v3, s[4:5], v5, v7
|
|
; SDAG-NEXT: v_addc_co_u32_e64 v4, s[4:5], 0, 0, s[4:5]
|
|
; SDAG-NEXT: v_mul_lo_u32 v10, v9, v11
|
|
; SDAG-NEXT: v_mul_lo_u32 v9, v9, v12
|
|
; SDAG-NEXT: v_mad_u64_u32 v[3:4], s[4:5], v11, v8, v[3:4]
|
|
; SDAG-NEXT: ; implicit-def: $vgpr8
|
|
; SDAG-NEXT: v_add3_u32 v5, v9, v2, v10
|
|
; SDAG-NEXT: v_add_co_u32_e64 v2, s[4:5], v3, v1
|
|
; SDAG-NEXT: v_addc_co_u32_e64 v3, s[4:5], v4, v5, s[4:5]
|
|
; SDAG-NEXT: v_mov_b32_e32 v1, v6
|
|
; SDAG-NEXT: ; implicit-def: $vgpr5_vgpr6
|
|
; SDAG-NEXT: ; implicit-def: $vgpr6_vgpr7
|
|
; SDAG-NEXT: ; implicit-def: $vgpr10
|
|
; SDAG-NEXT: ; implicit-def: $vgpr9
|
|
; SDAG-NEXT: .LBB2_4: ; %Flow
|
|
; SDAG-NEXT: s_andn2_saveexec_b64 s[6:7], s[12:13]
|
|
; SDAG-NEXT: s_cbranch_execz .LBB2_6
|
|
; SDAG-NEXT: ; %bb.5: ; %fp-to-i-if-then12
|
|
; SDAG-NEXT: v_sub_u32_e32 v2, 0x96, v5
|
|
; SDAG-NEXT: v_lshrrev_b64 v[0:1], v2, v[6:7]
|
|
; SDAG-NEXT: v_cmp_gt_u32_e64 s[4:5], 64, v2
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v0, 0, v0, s[4:5]
|
|
; SDAG-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v2
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v3, v0, v6, s[4:5]
|
|
; SDAG-NEXT: v_mad_u64_u32 v[0:1], s[4:5], v3, v10, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v2, 0
|
|
; SDAG-NEXT: v_mad_u64_u32 v[4:5], s[4:5], v3, v8, v[1:2]
|
|
; SDAG-NEXT: v_mov_b32_e32 v1, v5
|
|
; SDAG-NEXT: v_mad_i64_i32 v[2:3], s[4:5], v9, v3, v[1:2]
|
|
; SDAG-NEXT: v_mov_b32_e32 v1, v4
|
|
; SDAG-NEXT: .LBB2_6: ; %Flow1
|
|
; SDAG-NEXT: s_or_b64 exec, exec, s[6:7]
|
|
; SDAG-NEXT: .LBB2_7: ; %Flow2
|
|
; SDAG-NEXT: s_andn2_saveexec_b64 s[4:5], s[10:11]
|
|
; SDAG-NEXT: ; %bb.8: ; %fp-to-i-if-then5
|
|
; SDAG-NEXT: v_bfrev_b32_e32 v0, 1
|
|
; SDAG-NEXT: v_bfrev_b32_e32 v1, -2
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v2, 0, -1, vcc
|
|
; SDAG-NEXT: v_cndmask_b32_e32 v3, v0, v1, vcc
|
|
; SDAG-NEXT: v_mov_b32_e32 v0, v2
|
|
; SDAG-NEXT: v_mov_b32_e32 v1, v2
|
|
; SDAG-NEXT: ; %bb.9: ; %Flow3
|
|
; SDAG-NEXT: s_or_b64 exec, exec, s[4:5]
|
|
; SDAG-NEXT: .LBB2_10: ; %fp-to-i-cleanup
|
|
; SDAG-NEXT: s_or_b64 exec, exec, s[8:9]
|
|
; SDAG-NEXT: s_setpc_b64 s[30:31]
|
|
;
|
|
; GISEL-LABEL: fptosi_f32_to_i128:
|
|
; GISEL: ; %bb.0: ; %fp-to-i-entry
|
|
; GISEL-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
|
|
; GISEL-NEXT: v_mov_b32_e32 v4, v0
|
|
; GISEL-NEXT: v_mov_b32_e32 v5, 0
|
|
; GISEL-NEXT: v_lshrrev_b64 v[0:1], 23, v[4:5]
|
|
; GISEL-NEXT: s_mov_b64 s[4:5], 0
|
|
; GISEL-NEXT: v_bfe_u32 v6, v0, 0, 8
|
|
; GISEL-NEXT: v_mov_b32_e32 v0, 0x7f
|
|
; GISEL-NEXT: v_mov_b32_e32 v1, 0
|
|
; GISEL-NEXT: v_mov_b32_e32 v7, v5
|
|
; GISEL-NEXT: v_cmp_ge_u64_e32 vcc, v[6:7], v[0:1]
|
|
; GISEL-NEXT: s_mov_b64 s[6:7], s[4:5]
|
|
; GISEL-NEXT: v_mov_b32_e32 v0, s4
|
|
; GISEL-NEXT: v_mov_b32_e32 v1, s5
|
|
; GISEL-NEXT: v_mov_b32_e32 v2, s6
|
|
; GISEL-NEXT: v_mov_b32_e32 v3, s7
|
|
; GISEL-NEXT: s_and_saveexec_b64 s[12:13], vcc
|
|
; GISEL-NEXT: s_cbranch_execz .LBB2_10
|
|
; GISEL-NEXT: ; %bb.1: ; %fp-to-i-if-end
|
|
; GISEL-NEXT: v_add_co_u32_e32 v0, vcc, 0xffffff01, v6
|
|
; GISEL-NEXT: v_mov_b32_e32 v2, 0xffffff80
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v1, s[6:7], 0, -1, vcc
|
|
; GISEL-NEXT: v_mov_b32_e32 v3, -1
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v8, s[6:7], 0, -1, s[6:7]
|
|
; GISEL-NEXT: v_cmp_ge_u64_e32 vcc, v[0:1], v[2:3]
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v9, s[6:7], 0, -1, s[6:7]
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v0, 0, 1, vcc
|
|
; GISEL-NEXT: v_cmp_le_u64_e32 vcc, -1, v[8:9]
|
|
; GISEL-NEXT: v_cmp_lt_i32_e64 s[4:5], -1, v4
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v1, 0, 1, vcc
|
|
; GISEL-NEXT: v_cmp_eq_u64_e32 vcc, -1, v[8:9]
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v0, v1, v0, vcc
|
|
; GISEL-NEXT: v_and_b32_e32 v0, 1, v0
|
|
; GISEL-NEXT: v_cmp_ne_u32_e32 vcc, 0, v0
|
|
; GISEL-NEXT: ; implicit-def: $vgpr0_vgpr1_vgpr2_vgpr3
|
|
; GISEL-NEXT: s_and_saveexec_b64 s[6:7], vcc
|
|
; GISEL-NEXT: s_xor_b64 s[14:15], exec, s[6:7]
|
|
; GISEL-NEXT: s_cbranch_execz .LBB2_7
|
|
; GISEL-NEXT: ; %bb.2: ; %fp-to-i-if-end9
|
|
; GISEL-NEXT: s_xor_b64 s[6:7], s[4:5], -1
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v0, 0, -1, s[6:7]
|
|
; GISEL-NEXT: v_and_b32_e32 v0, 1, v0
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v1, 0, 1, s[6:7]
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v2, 1, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v2
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v3, 2, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v0, v2
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v3
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v8, 3, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v3
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v8
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v9, 4, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v8
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v9
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v10, 5, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v9
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v10
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v11, 6, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v10
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v11
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v12, 7, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v11
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v12
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v13, 8, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v12
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v13
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v14, 9, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v13
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v14
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v15, 10, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v14
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v15
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v16, 11, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v15
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v16
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v17, 12, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v16
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v17
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v18, 13, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v17
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v18
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v19, 14, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v18
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v19
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v0, 15, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v19
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v2, v0
|
|
; GISEL-NEXT: v_and_b32_e32 v0, 0xffff, v0
|
|
; GISEL-NEXT: v_and_b32_e32 v1, 0xffff, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v2, 16, v0
|
|
; GISEL-NEXT: v_lshl_or_b32 v8, v0, 16, v0
|
|
; GISEL-NEXT: v_or3_b32 v9, v1, v2, 1
|
|
; GISEL-NEXT: v_mov_b32_e32 v0, 0x96
|
|
; GISEL-NEXT: v_mov_b32_e32 v1, 0
|
|
; GISEL-NEXT: v_and_b32_e32 v2, 0x7fffff, v4
|
|
; GISEL-NEXT: v_cmp_ge_u64_e32 vcc, v[6:7], v[0:1]
|
|
; GISEL-NEXT: v_or_b32_e32 v4, 0x800000, v2
|
|
; GISEL-NEXT: ; implicit-def: $vgpr0_vgpr1_vgpr2_vgpr3
|
|
; GISEL-NEXT: s_and_saveexec_b64 s[6:7], vcc
|
|
; GISEL-NEXT: s_xor_b64 s[16:17], exec, s[6:7]
|
|
; GISEL-NEXT: s_cbranch_execz .LBB2_4
|
|
; GISEL-NEXT: ; %bb.3: ; %fp-to-i-if-else
|
|
; GISEL-NEXT: v_add_u32_e32 v7, 0xffffff6a, v6
|
|
; GISEL-NEXT: v_lshlrev_b64 v[0:1], v7, v[4:5]
|
|
; GISEL-NEXT: v_cmp_gt_u32_e32 vcc, 64, v7
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v10, 0, v0, vcc
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v11, 0, v1, vcc
|
|
; GISEL-NEXT: v_mad_u64_u32 v[0:1], s[6:7], v10, v8, 0
|
|
; GISEL-NEXT: v_add_u32_e32 v6, 0xffffff2a, v6
|
|
; GISEL-NEXT: v_sub_u32_e32 v2, 64, v7
|
|
; GISEL-NEXT: v_lshrrev_b64 v[2:3], v2, v[4:5]
|
|
; GISEL-NEXT: v_lshlrev_b64 v[4:5], v6, v[4:5]
|
|
; GISEL-NEXT: v_cmp_eq_u32_e64 s[6:7], 0, v7
|
|
; GISEL-NEXT: v_mad_u64_u32 v[6:7], s[8:9], v11, v8, v[0:1]
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v2, v4, v2, vcc
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v12, v2, 0, s[6:7]
|
|
; GISEL-NEXT: v_mad_u64_u32 v[6:7], s[8:9], v12, v9, v[6:7]
|
|
; GISEL-NEXT: v_mad_u64_u32 v[0:1], s[8:9], v10, v9, 0
|
|
; GISEL-NEXT: v_mov_b32_e32 v2, v6
|
|
; GISEL-NEXT: v_mul_lo_u32 v6, v10, v8
|
|
; GISEL-NEXT: v_mad_u64_u32 v[1:2], s[8:9], v10, v8, v[1:2]
|
|
; GISEL-NEXT: v_mul_lo_u32 v4, v11, v8
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v3, v5, v3, vcc
|
|
; GISEL-NEXT: v_mad_u64_u32 v[1:2], s[10:11], v11, v9, v[1:2]
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v6, s[10:11], v7, v6, s[10:11]
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v4, s[8:9], v6, v4, s[8:9]
|
|
; GISEL-NEXT: v_mad_u64_u32 v[6:7], s[8:9], v12, v8, v[4:5]
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v3, v3, 0, s[6:7]
|
|
; GISEL-NEXT: ; implicit-def: $vgpr8
|
|
; GISEL-NEXT: v_mad_u64_u32 v[3:4], s[6:7], v3, v9, v[6:7]
|
|
; GISEL-NEXT: ; implicit-def: $vgpr6
|
|
; GISEL-NEXT: ; implicit-def: $vgpr4_vgpr5
|
|
; GISEL-NEXT: ; implicit-def: $vgpr9
|
|
; GISEL-NEXT: .LBB2_4: ; %Flow
|
|
; GISEL-NEXT: s_andn2_saveexec_b64 s[6:7], s[16:17]
|
|
; GISEL-NEXT: s_cbranch_execz .LBB2_6
|
|
; GISEL-NEXT: ; %bb.5: ; %fp-to-i-if-then12
|
|
; GISEL-NEXT: v_sub_co_u32_e32 v2, vcc, 0x96, v6
|
|
; GISEL-NEXT: v_lshrrev_b64 v[0:1], v2, v[4:5]
|
|
; GISEL-NEXT: v_cmp_gt_u32_e32 vcc, 64, v2
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v0, 0, v0, vcc
|
|
; GISEL-NEXT: v_cmp_eq_u32_e32 vcc, 0, v2
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v4, v0, v4, vcc
|
|
; GISEL-NEXT: v_mad_u64_u32 v[0:1], s[8:9], v4, v9, 0
|
|
; GISEL-NEXT: v_mad_u64_u32 v[2:3], s[8:9], v4, v8, 0
|
|
; GISEL-NEXT: v_mul_lo_u32 v5, v4, v8
|
|
; GISEL-NEXT: v_mad_u64_u32 v[1:2], vcc, v4, v8, v[1:2]
|
|
; GISEL-NEXT: v_addc_co_u32_e32 v3, vcc, v3, v5, vcc
|
|
; GISEL-NEXT: .LBB2_6: ; %Flow1
|
|
; GISEL-NEXT: s_or_b64 exec, exec, s[6:7]
|
|
; GISEL-NEXT: .LBB2_7: ; %Flow2
|
|
; GISEL-NEXT: s_andn2_saveexec_b64 s[6:7], s[14:15]
|
|
; GISEL-NEXT: s_cbranch_execz .LBB2_9
|
|
; GISEL-NEXT: ; %bb.8: ; %fp-to-i-if-then5
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v1, 0, -1, s[4:5]
|
|
; GISEL-NEXT: v_and_b32_e32 v1, 1, v1
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v0, 0, 1, s[4:5]
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v2, 1, v1
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v2
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v3, 2, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v4, 3, v1
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v1, v2
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v3, v4
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v5, 4, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v6, 5, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v3, v4
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v5, v6
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v7, 6, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v8, 7, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v5, v6
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v7, v8
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v9, 8, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v10, 9, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v7, v8
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v9, v10
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v11, 10, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v12, 11, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v9, v10
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v11, v12
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v13, 12, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v14, 13, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v11, v12
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v13, v14
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v15, 14, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v16, 15, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v13, v14
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v15, v16
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v17, 16, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v18, 17, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v15, v16
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v17, v18
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v19, 18, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v3, 19, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v17, v18
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v19, v3
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v4, 20, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v5, 21, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v19, v3
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v4, v5
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v6, 22, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v7, 23, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v4, v5
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v6, v7
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v8, 24, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v9, 25, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v6, v7
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v8, v9
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v10, 26, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v11, 27, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v8, v9
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v10, v11
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v12, 28, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v13, 29, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v10, v11
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v12, v13
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v14, 30, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v1, 31, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v12, v13
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v14, v1
|
|
; GISEL-NEXT: v_or3_b32 v1, v2, v14, v1
|
|
; GISEL-NEXT: v_add_u32_e32 v3, 0x80000000, v1
|
|
; GISEL-NEXT: v_mov_b32_e32 v2, v1
|
|
; GISEL-NEXT: .LBB2_9: ; %Flow3
|
|
; GISEL-NEXT: s_or_b64 exec, exec, s[6:7]
|
|
; GISEL-NEXT: .LBB2_10: ; %fp-to-i-cleanup
|
|
; GISEL-NEXT: s_or_b64 exec, exec, s[12:13]
|
|
; GISEL-NEXT: s_setpc_b64 s[30:31]
|
|
%cvt = fptosi float %x to i128
|
|
ret i128 %cvt
|
|
}
|
|
|
|
define i128 @fptoui_f32_to_i128(float %x) {
|
|
; SDAG-LABEL: fptoui_f32_to_i128:
|
|
; SDAG: ; %bb.0: ; %fp-to-i-entry
|
|
; SDAG-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
|
|
; SDAG-NEXT: v_mov_b32_e32 v4, v0
|
|
; SDAG-NEXT: v_bfe_u32 v5, v4, 23, 8
|
|
; SDAG-NEXT: s_movk_i32 s4, 0x7e
|
|
; SDAG-NEXT: v_mov_b32_e32 v0, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v2, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v6, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v1, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v3, 0
|
|
; SDAG-NEXT: v_cmp_lt_u32_e32 vcc, s4, v5
|
|
; SDAG-NEXT: s_and_saveexec_b64 s[8:9], vcc
|
|
; SDAG-NEXT: s_cbranch_execz .LBB3_10
|
|
; SDAG-NEXT: ; %bb.1: ; %fp-to-i-if-end
|
|
; SDAG-NEXT: v_add_co_u32_e32 v0, vcc, 0xffffff01, v5
|
|
; SDAG-NEXT: v_addc_co_u32_e32 v1, vcc, -1, v6, vcc
|
|
; SDAG-NEXT: v_addc_co_u32_e32 v2, vcc, -1, v6, vcc
|
|
; SDAG-NEXT: s_movk_i32 s6, 0xff7f
|
|
; SDAG-NEXT: v_addc_co_u32_e32 v3, vcc, -1, v6, vcc
|
|
; SDAG-NEXT: s_mov_b32 s7, -1
|
|
; SDAG-NEXT: v_cmp_eq_u64_e64 s[4:5], -1, v[2:3]
|
|
; SDAG-NEXT: v_cmp_lt_u64_e64 s[6:7], s[6:7], v[0:1]
|
|
; SDAG-NEXT: v_cmp_lt_i32_e32 vcc, -1, v4
|
|
; SDAG-NEXT: s_and_b64 s[4:5], s[4:5], s[6:7]
|
|
; SDAG-NEXT: ; implicit-def: $vgpr0_vgpr1
|
|
; SDAG-NEXT: ; implicit-def: $vgpr2_vgpr3
|
|
; SDAG-NEXT: s_and_saveexec_b64 s[6:7], s[4:5]
|
|
; SDAG-NEXT: s_xor_b64 s[10:11], exec, s[6:7]
|
|
; SDAG-NEXT: s_cbranch_execz .LBB3_7
|
|
; SDAG-NEXT: ; %bb.2: ; %fp-to-i-if-end9
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v0, 0, 1, vcc
|
|
; SDAG-NEXT: v_add_co_u32_e64 v9, s[4:5], -1, v0
|
|
; SDAG-NEXT: s_mov_b64 s[4:5], 0x95
|
|
; SDAG-NEXT: v_and_b32_e32 v0, 0x7fffff, v4
|
|
; SDAG-NEXT: v_cmp_lt_u64_e64 s[4:5], s[4:5], v[5:6]
|
|
; SDAG-NEXT: v_mov_b32_e32 v7, 0
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v8, -1, 0, vcc
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v10, -1, 1, vcc
|
|
; SDAG-NEXT: v_or_b32_e32 v6, 0x800000, v0
|
|
; SDAG-NEXT: ; implicit-def: $vgpr0_vgpr1
|
|
; SDAG-NEXT: ; implicit-def: $vgpr2_vgpr3
|
|
; SDAG-NEXT: s_and_saveexec_b64 s[6:7], s[4:5]
|
|
; SDAG-NEXT: s_xor_b64 s[12:13], exec, s[6:7]
|
|
; SDAG-NEXT: s_cbranch_execz .LBB3_4
|
|
; SDAG-NEXT: ; %bb.3: ; %fp-to-i-if-else
|
|
; SDAG-NEXT: v_sub_u32_e32 v0, 0xd6, v5
|
|
; SDAG-NEXT: v_add_u32_e32 v2, 0xffffff2a, v5
|
|
; SDAG-NEXT: v_add_u32_e32 v4, 0xffffff6a, v5
|
|
; SDAG-NEXT: v_lshrrev_b64 v[0:1], v0, v[6:7]
|
|
; SDAG-NEXT: v_lshlrev_b64 v[2:3], v2, v[6:7]
|
|
; SDAG-NEXT: v_cmp_gt_u32_e64 s[4:5], 64, v4
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v1, v3, v1, s[4:5]
|
|
; SDAG-NEXT: v_cmp_ne_u32_e64 s[6:7], 0, v4
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v3, 0, v1, s[6:7]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v2, v2, v0, s[4:5]
|
|
; SDAG-NEXT: v_lshlrev_b64 v[0:1], v4, v[6:7]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v2, 0, v2, s[6:7]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v12, 0, v0, s[4:5]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v11, 0, v1, s[4:5]
|
|
; SDAG-NEXT: v_mad_u64_u32 v[0:1], s[4:5], v12, v10, 0
|
|
; SDAG-NEXT: v_mul_lo_u32 v13, v8, v2
|
|
; SDAG-NEXT: v_mul_lo_u32 v14, v10, v3
|
|
; SDAG-NEXT: v_mov_b32_e32 v6, v1
|
|
; SDAG-NEXT: v_mad_u64_u32 v[4:5], s[4:5], v11, v10, v[6:7]
|
|
; SDAG-NEXT: v_mad_u64_u32 v[2:3], s[4:5], v10, v2, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v6, v4
|
|
; SDAG-NEXT: v_mad_u64_u32 v[6:7], s[4:5], v12, v8, v[6:7]
|
|
; SDAG-NEXT: v_add3_u32 v3, v3, v14, v13
|
|
; SDAG-NEXT: v_mad_u64_u32 v[1:2], s[4:5], v9, v12, v[2:3]
|
|
; SDAG-NEXT: v_add_co_u32_e64 v3, s[4:5], v5, v7
|
|
; SDAG-NEXT: v_addc_co_u32_e64 v4, s[4:5], 0, 0, s[4:5]
|
|
; SDAG-NEXT: v_mul_lo_u32 v10, v9, v11
|
|
; SDAG-NEXT: v_mul_lo_u32 v9, v9, v12
|
|
; SDAG-NEXT: v_mad_u64_u32 v[3:4], s[4:5], v11, v8, v[3:4]
|
|
; SDAG-NEXT: ; implicit-def: $vgpr8
|
|
; SDAG-NEXT: v_add3_u32 v5, v9, v2, v10
|
|
; SDAG-NEXT: v_add_co_u32_e64 v2, s[4:5], v3, v1
|
|
; SDAG-NEXT: v_addc_co_u32_e64 v3, s[4:5], v4, v5, s[4:5]
|
|
; SDAG-NEXT: v_mov_b32_e32 v1, v6
|
|
; SDAG-NEXT: ; implicit-def: $vgpr5_vgpr6
|
|
; SDAG-NEXT: ; implicit-def: $vgpr6_vgpr7
|
|
; SDAG-NEXT: ; implicit-def: $vgpr10
|
|
; SDAG-NEXT: ; implicit-def: $vgpr9
|
|
; SDAG-NEXT: .LBB3_4: ; %Flow
|
|
; SDAG-NEXT: s_andn2_saveexec_b64 s[6:7], s[12:13]
|
|
; SDAG-NEXT: s_cbranch_execz .LBB3_6
|
|
; SDAG-NEXT: ; %bb.5: ; %fp-to-i-if-then12
|
|
; SDAG-NEXT: v_sub_u32_e32 v2, 0x96, v5
|
|
; SDAG-NEXT: v_lshrrev_b64 v[0:1], v2, v[6:7]
|
|
; SDAG-NEXT: v_cmp_gt_u32_e64 s[4:5], 64, v2
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v0, 0, v0, s[4:5]
|
|
; SDAG-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v2
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v3, v0, v6, s[4:5]
|
|
; SDAG-NEXT: v_mad_u64_u32 v[0:1], s[4:5], v3, v10, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v2, 0
|
|
; SDAG-NEXT: v_mad_u64_u32 v[4:5], s[4:5], v3, v8, v[1:2]
|
|
; SDAG-NEXT: v_mov_b32_e32 v1, v5
|
|
; SDAG-NEXT: v_mad_i64_i32 v[2:3], s[4:5], v9, v3, v[1:2]
|
|
; SDAG-NEXT: v_mov_b32_e32 v1, v4
|
|
; SDAG-NEXT: .LBB3_6: ; %Flow1
|
|
; SDAG-NEXT: s_or_b64 exec, exec, s[6:7]
|
|
; SDAG-NEXT: .LBB3_7: ; %Flow2
|
|
; SDAG-NEXT: s_andn2_saveexec_b64 s[4:5], s[10:11]
|
|
; SDAG-NEXT: ; %bb.8: ; %fp-to-i-if-then5
|
|
; SDAG-NEXT: v_bfrev_b32_e32 v0, 1
|
|
; SDAG-NEXT: v_bfrev_b32_e32 v1, -2
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v2, 0, -1, vcc
|
|
; SDAG-NEXT: v_cndmask_b32_e32 v3, v0, v1, vcc
|
|
; SDAG-NEXT: v_mov_b32_e32 v0, v2
|
|
; SDAG-NEXT: v_mov_b32_e32 v1, v2
|
|
; SDAG-NEXT: ; %bb.9: ; %Flow3
|
|
; SDAG-NEXT: s_or_b64 exec, exec, s[4:5]
|
|
; SDAG-NEXT: .LBB3_10: ; %fp-to-i-cleanup
|
|
; SDAG-NEXT: s_or_b64 exec, exec, s[8:9]
|
|
; SDAG-NEXT: s_setpc_b64 s[30:31]
|
|
;
|
|
; GISEL-LABEL: fptoui_f32_to_i128:
|
|
; GISEL: ; %bb.0: ; %fp-to-i-entry
|
|
; GISEL-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
|
|
; GISEL-NEXT: v_mov_b32_e32 v4, v0
|
|
; GISEL-NEXT: v_mov_b32_e32 v5, 0
|
|
; GISEL-NEXT: v_lshrrev_b64 v[0:1], 23, v[4:5]
|
|
; GISEL-NEXT: s_mov_b64 s[4:5], 0
|
|
; GISEL-NEXT: v_bfe_u32 v6, v0, 0, 8
|
|
; GISEL-NEXT: v_mov_b32_e32 v0, 0x7f
|
|
; GISEL-NEXT: v_mov_b32_e32 v1, 0
|
|
; GISEL-NEXT: v_mov_b32_e32 v7, v5
|
|
; GISEL-NEXT: v_cmp_ge_u64_e32 vcc, v[6:7], v[0:1]
|
|
; GISEL-NEXT: s_mov_b64 s[6:7], s[4:5]
|
|
; GISEL-NEXT: v_mov_b32_e32 v0, s4
|
|
; GISEL-NEXT: v_mov_b32_e32 v1, s5
|
|
; GISEL-NEXT: v_mov_b32_e32 v2, s6
|
|
; GISEL-NEXT: v_mov_b32_e32 v3, s7
|
|
; GISEL-NEXT: s_and_saveexec_b64 s[12:13], vcc
|
|
; GISEL-NEXT: s_cbranch_execz .LBB3_10
|
|
; GISEL-NEXT: ; %bb.1: ; %fp-to-i-if-end
|
|
; GISEL-NEXT: v_add_co_u32_e32 v0, vcc, 0xffffff01, v6
|
|
; GISEL-NEXT: v_mov_b32_e32 v2, 0xffffff80
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v1, s[6:7], 0, -1, vcc
|
|
; GISEL-NEXT: v_mov_b32_e32 v3, -1
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v8, s[6:7], 0, -1, s[6:7]
|
|
; GISEL-NEXT: v_cmp_ge_u64_e32 vcc, v[0:1], v[2:3]
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v9, s[6:7], 0, -1, s[6:7]
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v0, 0, 1, vcc
|
|
; GISEL-NEXT: v_cmp_le_u64_e32 vcc, -1, v[8:9]
|
|
; GISEL-NEXT: v_cmp_lt_i32_e64 s[4:5], -1, v4
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v1, 0, 1, vcc
|
|
; GISEL-NEXT: v_cmp_eq_u64_e32 vcc, -1, v[8:9]
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v0, v1, v0, vcc
|
|
; GISEL-NEXT: v_and_b32_e32 v0, 1, v0
|
|
; GISEL-NEXT: v_cmp_ne_u32_e32 vcc, 0, v0
|
|
; GISEL-NEXT: ; implicit-def: $vgpr0_vgpr1_vgpr2_vgpr3
|
|
; GISEL-NEXT: s_and_saveexec_b64 s[6:7], vcc
|
|
; GISEL-NEXT: s_xor_b64 s[14:15], exec, s[6:7]
|
|
; GISEL-NEXT: s_cbranch_execz .LBB3_7
|
|
; GISEL-NEXT: ; %bb.2: ; %fp-to-i-if-end9
|
|
; GISEL-NEXT: s_xor_b64 s[6:7], s[4:5], -1
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v0, 0, -1, s[6:7]
|
|
; GISEL-NEXT: v_and_b32_e32 v0, 1, v0
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v1, 0, 1, s[6:7]
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v2, 1, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v2
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v3, 2, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v0, v2
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v3
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v8, 3, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v3
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v8
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v9, 4, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v8
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v9
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v10, 5, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v9
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v10
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v11, 6, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v10
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v11
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v12, 7, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v11
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v12
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v13, 8, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v12
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v13
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v14, 9, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v13
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v14
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v15, 10, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v14
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v15
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v16, 11, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v15
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v16
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v17, 12, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v16
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v17
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v18, 13, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v17
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v18
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v19, 14, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v18
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v19
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v0, 15, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v2, v19
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v2, v0
|
|
; GISEL-NEXT: v_and_b32_e32 v0, 0xffff, v0
|
|
; GISEL-NEXT: v_and_b32_e32 v1, 0xffff, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v2, 16, v0
|
|
; GISEL-NEXT: v_lshl_or_b32 v8, v0, 16, v0
|
|
; GISEL-NEXT: v_or3_b32 v9, v1, v2, 1
|
|
; GISEL-NEXT: v_mov_b32_e32 v0, 0x96
|
|
; GISEL-NEXT: v_mov_b32_e32 v1, 0
|
|
; GISEL-NEXT: v_and_b32_e32 v2, 0x7fffff, v4
|
|
; GISEL-NEXT: v_cmp_ge_u64_e32 vcc, v[6:7], v[0:1]
|
|
; GISEL-NEXT: v_or_b32_e32 v4, 0x800000, v2
|
|
; GISEL-NEXT: ; implicit-def: $vgpr0_vgpr1_vgpr2_vgpr3
|
|
; GISEL-NEXT: s_and_saveexec_b64 s[6:7], vcc
|
|
; GISEL-NEXT: s_xor_b64 s[16:17], exec, s[6:7]
|
|
; GISEL-NEXT: s_cbranch_execz .LBB3_4
|
|
; GISEL-NEXT: ; %bb.3: ; %fp-to-i-if-else
|
|
; GISEL-NEXT: v_add_u32_e32 v7, 0xffffff6a, v6
|
|
; GISEL-NEXT: v_lshlrev_b64 v[0:1], v7, v[4:5]
|
|
; GISEL-NEXT: v_cmp_gt_u32_e32 vcc, 64, v7
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v10, 0, v0, vcc
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v11, 0, v1, vcc
|
|
; GISEL-NEXT: v_mad_u64_u32 v[0:1], s[6:7], v10, v8, 0
|
|
; GISEL-NEXT: v_add_u32_e32 v6, 0xffffff2a, v6
|
|
; GISEL-NEXT: v_sub_u32_e32 v2, 64, v7
|
|
; GISEL-NEXT: v_lshrrev_b64 v[2:3], v2, v[4:5]
|
|
; GISEL-NEXT: v_lshlrev_b64 v[4:5], v6, v[4:5]
|
|
; GISEL-NEXT: v_cmp_eq_u32_e64 s[6:7], 0, v7
|
|
; GISEL-NEXT: v_mad_u64_u32 v[6:7], s[8:9], v11, v8, v[0:1]
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v2, v4, v2, vcc
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v12, v2, 0, s[6:7]
|
|
; GISEL-NEXT: v_mad_u64_u32 v[6:7], s[8:9], v12, v9, v[6:7]
|
|
; GISEL-NEXT: v_mad_u64_u32 v[0:1], s[8:9], v10, v9, 0
|
|
; GISEL-NEXT: v_mov_b32_e32 v2, v6
|
|
; GISEL-NEXT: v_mul_lo_u32 v6, v10, v8
|
|
; GISEL-NEXT: v_mad_u64_u32 v[1:2], s[8:9], v10, v8, v[1:2]
|
|
; GISEL-NEXT: v_mul_lo_u32 v4, v11, v8
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v3, v5, v3, vcc
|
|
; GISEL-NEXT: v_mad_u64_u32 v[1:2], s[10:11], v11, v9, v[1:2]
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v6, s[10:11], v7, v6, s[10:11]
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v4, s[8:9], v6, v4, s[8:9]
|
|
; GISEL-NEXT: v_mad_u64_u32 v[6:7], s[8:9], v12, v8, v[4:5]
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v3, v3, 0, s[6:7]
|
|
; GISEL-NEXT: ; implicit-def: $vgpr8
|
|
; GISEL-NEXT: v_mad_u64_u32 v[3:4], s[6:7], v3, v9, v[6:7]
|
|
; GISEL-NEXT: ; implicit-def: $vgpr6
|
|
; GISEL-NEXT: ; implicit-def: $vgpr4_vgpr5
|
|
; GISEL-NEXT: ; implicit-def: $vgpr9
|
|
; GISEL-NEXT: .LBB3_4: ; %Flow
|
|
; GISEL-NEXT: s_andn2_saveexec_b64 s[6:7], s[16:17]
|
|
; GISEL-NEXT: s_cbranch_execz .LBB3_6
|
|
; GISEL-NEXT: ; %bb.5: ; %fp-to-i-if-then12
|
|
; GISEL-NEXT: v_sub_co_u32_e32 v2, vcc, 0x96, v6
|
|
; GISEL-NEXT: v_lshrrev_b64 v[0:1], v2, v[4:5]
|
|
; GISEL-NEXT: v_cmp_gt_u32_e32 vcc, 64, v2
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v0, 0, v0, vcc
|
|
; GISEL-NEXT: v_cmp_eq_u32_e32 vcc, 0, v2
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v4, v0, v4, vcc
|
|
; GISEL-NEXT: v_mad_u64_u32 v[0:1], s[8:9], v4, v9, 0
|
|
; GISEL-NEXT: v_mad_u64_u32 v[2:3], s[8:9], v4, v8, 0
|
|
; GISEL-NEXT: v_mul_lo_u32 v5, v4, v8
|
|
; GISEL-NEXT: v_mad_u64_u32 v[1:2], vcc, v4, v8, v[1:2]
|
|
; GISEL-NEXT: v_addc_co_u32_e32 v3, vcc, v3, v5, vcc
|
|
; GISEL-NEXT: .LBB3_6: ; %Flow1
|
|
; GISEL-NEXT: s_or_b64 exec, exec, s[6:7]
|
|
; GISEL-NEXT: .LBB3_7: ; %Flow2
|
|
; GISEL-NEXT: s_andn2_saveexec_b64 s[6:7], s[14:15]
|
|
; GISEL-NEXT: s_cbranch_execz .LBB3_9
|
|
; GISEL-NEXT: ; %bb.8: ; %fp-to-i-if-then5
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v1, 0, -1, s[4:5]
|
|
; GISEL-NEXT: v_and_b32_e32 v1, 1, v1
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v0, 0, 1, s[4:5]
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v2, 1, v1
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v2
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v3, 2, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v4, 3, v1
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v1, v2
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v3, v4
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v5, 4, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v6, 5, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v3, v4
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v5, v6
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v7, 6, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v8, 7, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v5, v6
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v7, v8
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v9, 8, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v10, 9, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v7, v8
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v9, v10
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v11, 10, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v12, 11, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v9, v10
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v11, v12
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v13, 12, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v14, 13, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v11, v12
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v13, v14
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v15, 14, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v16, 15, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v13, v14
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v15, v16
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v17, 16, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v18, 17, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v15, v16
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v17, v18
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v19, 18, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v3, 19, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v17, v18
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v19, v3
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v4, 20, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v5, 21, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v19, v3
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v4, v5
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v6, 22, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v7, 23, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v4, v5
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v6, v7
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v8, 24, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v9, 25, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v6, v7
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v8, v9
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v10, 26, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v11, 27, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v8, v9
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v10, v11
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v12, 28, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v13, 29, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v10, v11
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v12, v13
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v14, 30, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v1, 31, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v12, v13
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v14, v1
|
|
; GISEL-NEXT: v_or3_b32 v1, v2, v14, v1
|
|
; GISEL-NEXT: v_add_u32_e32 v3, 0x80000000, v1
|
|
; GISEL-NEXT: v_mov_b32_e32 v2, v1
|
|
; GISEL-NEXT: .LBB3_9: ; %Flow3
|
|
; GISEL-NEXT: s_or_b64 exec, exec, s[6:7]
|
|
; GISEL-NEXT: .LBB3_10: ; %fp-to-i-cleanup
|
|
; GISEL-NEXT: s_or_b64 exec, exec, s[12:13]
|
|
; GISEL-NEXT: s_setpc_b64 s[30:31]
|
|
%cvt = fptoui float %x to i128
|
|
ret i128 %cvt
|
|
}
|
|
|
|
define i128 @fptosi_f16_to_i128(half %x) {
|
|
; SDAG-LABEL: fptosi_f16_to_i128:
|
|
; SDAG: ; %bb.0:
|
|
; SDAG-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
|
|
; SDAG-NEXT: v_cvt_f32_f16_e32 v0, v0
|
|
; SDAG-NEXT: v_cvt_i32_f32_e32 v0, v0
|
|
; SDAG-NEXT: v_ashrrev_i32_e32 v1, 31, v0
|
|
; SDAG-NEXT: v_ashrrev_i32_e32 v2, 31, v1
|
|
; SDAG-NEXT: v_mov_b32_e32 v3, v2
|
|
; SDAG-NEXT: s_setpc_b64 s[30:31]
|
|
;
|
|
; GISEL-LABEL: fptosi_f16_to_i128:
|
|
; GISEL: ; %bb.0:
|
|
; GISEL-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
|
|
; GISEL-NEXT: v_cvt_f32_f16_e32 v0, v0
|
|
; GISEL-NEXT: v_cvt_i32_f32_e32 v0, v0
|
|
; GISEL-NEXT: v_ashrrev_i32_e32 v1, 31, v0
|
|
; GISEL-NEXT: v_mov_b32_e32 v2, v1
|
|
; GISEL-NEXT: v_mov_b32_e32 v3, v1
|
|
; GISEL-NEXT: s_setpc_b64 s[30:31]
|
|
%cvt = fptosi half %x to i128
|
|
ret i128 %cvt
|
|
}
|
|
|
|
define i128 @fptoui_f16_to_i128(half %x) {
|
|
; GCN-LABEL: fptoui_f16_to_i128:
|
|
; GCN: ; %bb.0:
|
|
; GCN-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
|
|
; GCN-NEXT: v_cvt_f32_f16_e32 v0, v0
|
|
; GCN-NEXT: v_mov_b32_e32 v1, 0
|
|
; GCN-NEXT: v_mov_b32_e32 v2, 0
|
|
; GCN-NEXT: v_mov_b32_e32 v3, 0
|
|
; GCN-NEXT: v_cvt_u32_f32_e32 v0, v0
|
|
; GCN-NEXT: s_setpc_b64 s[30:31]
|
|
%cvt = fptoui half %x to i128
|
|
ret i128 %cvt
|
|
}
|
|
|
|
define i128 @fptosi_bf16_to_i128(bfloat %x) {
|
|
; SDAG-LABEL: fptosi_bf16_to_i128:
|
|
; SDAG: ; %bb.0: ; %fp-to-i-entry
|
|
; SDAG-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
|
|
; SDAG-NEXT: v_mov_b32_e32 v4, v0
|
|
; SDAG-NEXT: v_bfe_u32 v5, v4, 7, 8
|
|
; SDAG-NEXT: s_movk_i32 s4, 0x7e
|
|
; SDAG-NEXT: v_mov_b32_e32 v0, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v2, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v6, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v1, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v3, 0
|
|
; SDAG-NEXT: v_cmp_lt_u32_e32 vcc, s4, v5
|
|
; SDAG-NEXT: s_and_saveexec_b64 s[8:9], vcc
|
|
; SDAG-NEXT: s_cbranch_execz .LBB6_10
|
|
; SDAG-NEXT: ; %bb.1: ; %fp-to-i-if-end
|
|
; SDAG-NEXT: v_add_co_u32_e32 v0, vcc, 0xffffff01, v5
|
|
; SDAG-NEXT: v_addc_co_u32_e32 v1, vcc, -1, v6, vcc
|
|
; SDAG-NEXT: v_addc_co_u32_e32 v2, vcc, -1, v6, vcc
|
|
; SDAG-NEXT: s_movk_i32 s6, 0xff7f
|
|
; SDAG-NEXT: v_addc_co_u32_e32 v3, vcc, -1, v6, vcc
|
|
; SDAG-NEXT: s_mov_b32 s7, -1
|
|
; SDAG-NEXT: v_cmp_eq_u64_e64 s[4:5], -1, v[2:3]
|
|
; SDAG-NEXT: v_cmp_lt_u64_e64 s[6:7], s[6:7], v[0:1]
|
|
; SDAG-NEXT: v_cmp_lt_i16_e32 vcc, -1, v4
|
|
; SDAG-NEXT: s_and_b64 s[4:5], s[4:5], s[6:7]
|
|
; SDAG-NEXT: ; implicit-def: $vgpr0_vgpr1
|
|
; SDAG-NEXT: ; implicit-def: $vgpr2_vgpr3
|
|
; SDAG-NEXT: s_and_saveexec_b64 s[6:7], s[4:5]
|
|
; SDAG-NEXT: s_xor_b64 s[10:11], exec, s[6:7]
|
|
; SDAG-NEXT: s_cbranch_execz .LBB6_7
|
|
; SDAG-NEXT: ; %bb.2: ; %fp-to-i-if-end9
|
|
; SDAG-NEXT: s_movk_i32 s4, 0x7f
|
|
; SDAG-NEXT: v_and_b32_sdwa v0, v4, s4 dst_sel:DWORD dst_unused:UNUSED_PAD src0_sel:WORD_0 src1_sel:DWORD
|
|
; SDAG-NEXT: s_mov_b64 s[4:5], 0x85
|
|
; SDAG-NEXT: v_cmp_lt_u64_e64 s[4:5], s[4:5], v[5:6]
|
|
; SDAG-NEXT: v_mov_b32_e32 v7, 0
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v9, -1, 0, vcc
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v8, -1, 1, vcc
|
|
; SDAG-NEXT: v_or_b32_e32 v6, 0x80, v0
|
|
; SDAG-NEXT: ; implicit-def: $vgpr0_vgpr1
|
|
; SDAG-NEXT: ; implicit-def: $vgpr2_vgpr3
|
|
; SDAG-NEXT: s_and_saveexec_b64 s[6:7], s[4:5]
|
|
; SDAG-NEXT: s_xor_b64 s[12:13], exec, s[6:7]
|
|
; SDAG-NEXT: s_cbranch_execz .LBB6_4
|
|
; SDAG-NEXT: ; %bb.3: ; %fp-to-i-if-else
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v0, 0, 1, vcc
|
|
; SDAG-NEXT: v_add_co_u32_e64 v10, s[4:5], -1, v0
|
|
; SDAG-NEXT: v_sub_u32_e32 v0, 0xc6, v5
|
|
; SDAG-NEXT: v_add_u32_e32 v2, 0xffffff3a, v5
|
|
; SDAG-NEXT: v_add_u32_e32 v4, 0xffffff7a, v5
|
|
; SDAG-NEXT: v_lshrrev_b64 v[0:1], v0, v[6:7]
|
|
; SDAG-NEXT: v_lshlrev_b64 v[2:3], v2, v[6:7]
|
|
; SDAG-NEXT: v_cmp_gt_u32_e64 s[4:5], 64, v4
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v1, v3, v1, s[4:5]
|
|
; SDAG-NEXT: v_cmp_ne_u32_e64 s[6:7], 0, v4
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v3, 0, v1, s[6:7]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v2, v2, v0, s[4:5]
|
|
; SDAG-NEXT: v_lshlrev_b64 v[0:1], v4, v[6:7]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v2, 0, v2, s[6:7]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v12, 0, v0, s[4:5]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v11, 0, v1, s[4:5]
|
|
; SDAG-NEXT: v_mad_u64_u32 v[0:1], s[4:5], v12, v8, 0
|
|
; SDAG-NEXT: v_mul_lo_u32 v13, v9, v2
|
|
; SDAG-NEXT: v_mul_lo_u32 v14, v8, v3
|
|
; SDAG-NEXT: v_mov_b32_e32 v6, v1
|
|
; SDAG-NEXT: v_mad_u64_u32 v[4:5], s[4:5], v11, v8, v[6:7]
|
|
; SDAG-NEXT: v_mad_u64_u32 v[2:3], s[4:5], v8, v2, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v6, v4
|
|
; SDAG-NEXT: v_mad_u64_u32 v[6:7], s[4:5], v12, v9, v[6:7]
|
|
; SDAG-NEXT: v_add3_u32 v3, v3, v14, v13
|
|
; SDAG-NEXT: v_mad_u64_u32 v[1:2], s[4:5], v10, v12, v[2:3]
|
|
; SDAG-NEXT: v_add_co_u32_e64 v3, s[4:5], v5, v7
|
|
; SDAG-NEXT: v_addc_co_u32_e64 v4, s[4:5], 0, 0, s[4:5]
|
|
; SDAG-NEXT: v_mul_lo_u32 v8, v10, v11
|
|
; SDAG-NEXT: v_mul_lo_u32 v10, v10, v12
|
|
; SDAG-NEXT: v_mad_u64_u32 v[3:4], s[4:5], v11, v9, v[3:4]
|
|
; SDAG-NEXT: v_add3_u32 v5, v10, v2, v8
|
|
; SDAG-NEXT: v_add_co_u32_e64 v2, s[4:5], v3, v1
|
|
; SDAG-NEXT: v_addc_co_u32_e64 v3, s[4:5], v4, v5, s[4:5]
|
|
; SDAG-NEXT: v_mov_b32_e32 v1, v6
|
|
; SDAG-NEXT: ; implicit-def: $vgpr5_vgpr6
|
|
; SDAG-NEXT: ; implicit-def: $vgpr6_vgpr7
|
|
; SDAG-NEXT: ; implicit-def: $vgpr8
|
|
; SDAG-NEXT: .LBB6_4: ; %Flow
|
|
; SDAG-NEXT: s_andn2_saveexec_b64 s[6:7], s[12:13]
|
|
; SDAG-NEXT: s_cbranch_execz .LBB6_6
|
|
; SDAG-NEXT: ; %bb.5: ; %fp-to-i-if-then12
|
|
; SDAG-NEXT: v_sub_u32_e32 v2, 0x86, v5
|
|
; SDAG-NEXT: v_lshrrev_b64 v[0:1], v2, v[6:7]
|
|
; SDAG-NEXT: v_cmp_gt_u32_e64 s[4:5], 64, v2
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v0, 0, v0, s[4:5]
|
|
; SDAG-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v2
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v0, v0, v6, s[4:5]
|
|
; SDAG-NEXT: v_mul_hi_i32_i24_e32 v1, v0, v8
|
|
; SDAG-NEXT: v_ashrrev_i32_e32 v2, 31, v1
|
|
; SDAG-NEXT: v_mul_i32_i24_e32 v0, v0, v8
|
|
; SDAG-NEXT: v_mov_b32_e32 v3, v2
|
|
; SDAG-NEXT: .LBB6_6: ; %Flow1
|
|
; SDAG-NEXT: s_or_b64 exec, exec, s[6:7]
|
|
; SDAG-NEXT: .LBB6_7: ; %Flow2
|
|
; SDAG-NEXT: s_andn2_saveexec_b64 s[4:5], s[10:11]
|
|
; SDAG-NEXT: ; %bb.8: ; %fp-to-i-if-then5
|
|
; SDAG-NEXT: v_bfrev_b32_e32 v0, 1
|
|
; SDAG-NEXT: v_bfrev_b32_e32 v1, -2
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v2, 0, -1, vcc
|
|
; SDAG-NEXT: v_cndmask_b32_e32 v3, v0, v1, vcc
|
|
; SDAG-NEXT: v_mov_b32_e32 v0, v2
|
|
; SDAG-NEXT: v_mov_b32_e32 v1, v2
|
|
; SDAG-NEXT: ; %bb.9: ; %Flow3
|
|
; SDAG-NEXT: s_or_b64 exec, exec, s[4:5]
|
|
; SDAG-NEXT: .LBB6_10: ; %fp-to-i-cleanup
|
|
; SDAG-NEXT: s_or_b64 exec, exec, s[8:9]
|
|
; SDAG-NEXT: s_setpc_b64 s[30:31]
|
|
;
|
|
; GISEL-LABEL: fptosi_bf16_to_i128:
|
|
; GISEL: ; %bb.0: ; %fp-to-i-entry
|
|
; GISEL-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
|
|
; GISEL-NEXT: v_mov_b32_e32 v4, v0
|
|
; GISEL-NEXT: v_and_b32_e32 v5, 0xffff, v4
|
|
; GISEL-NEXT: v_mov_b32_e32 v6, 0
|
|
; GISEL-NEXT: v_lshrrev_b64 v[0:1], 7, v[5:6]
|
|
; GISEL-NEXT: v_mov_b32_e32 v1, 0x7f
|
|
; GISEL-NEXT: s_mov_b64 s[4:5], 0
|
|
; GISEL-NEXT: v_mov_b32_e32 v2, 0
|
|
; GISEL-NEXT: v_bfe_u32 v5, v0, 0, 8
|
|
; GISEL-NEXT: v_cmp_ge_u64_e32 vcc, v[5:6], v[1:2]
|
|
; GISEL-NEXT: s_mov_b64 s[6:7], s[4:5]
|
|
; GISEL-NEXT: v_mov_b32_e32 v0, s4
|
|
; GISEL-NEXT: v_mov_b32_e32 v1, s5
|
|
; GISEL-NEXT: v_mov_b32_e32 v2, s6
|
|
; GISEL-NEXT: v_mov_b32_e32 v3, s7
|
|
; GISEL-NEXT: s_and_saveexec_b64 s[12:13], vcc
|
|
; GISEL-NEXT: s_cbranch_execz .LBB6_10
|
|
; GISEL-NEXT: ; %bb.1: ; %fp-to-i-if-end
|
|
; GISEL-NEXT: v_add_co_u32_e32 v0, vcc, 0xffffff01, v5
|
|
; GISEL-NEXT: v_mov_b32_e32 v2, 0xffffff80
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v1, s[6:7], 0, -1, vcc
|
|
; GISEL-NEXT: v_mov_b32_e32 v3, -1
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v7, s[6:7], 0, -1, s[6:7]
|
|
; GISEL-NEXT: v_cmp_ge_u64_e32 vcc, v[0:1], v[2:3]
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v8, s[6:7], 0, -1, s[6:7]
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v0, 0, 1, vcc
|
|
; GISEL-NEXT: v_cmp_le_u64_e32 vcc, -1, v[7:8]
|
|
; GISEL-NEXT: v_cmp_lt_i16_e64 s[4:5], -1, v4
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v1, 0, 1, vcc
|
|
; GISEL-NEXT: v_cmp_eq_u64_e32 vcc, -1, v[7:8]
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v0, v1, v0, vcc
|
|
; GISEL-NEXT: v_and_b32_e32 v0, 1, v0
|
|
; GISEL-NEXT: v_cmp_ne_u32_e32 vcc, 0, v0
|
|
; GISEL-NEXT: ; implicit-def: $vgpr0_vgpr1_vgpr2_vgpr3
|
|
; GISEL-NEXT: s_and_saveexec_b64 s[6:7], vcc
|
|
; GISEL-NEXT: s_xor_b64 s[14:15], exec, s[6:7]
|
|
; GISEL-NEXT: s_cbranch_execz .LBB6_7
|
|
; GISEL-NEXT: ; %bb.2: ; %fp-to-i-if-end9
|
|
; GISEL-NEXT: s_xor_b64 s[6:7], s[4:5], -1
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v0, 0, -1, s[6:7]
|
|
; GISEL-NEXT: v_and_b32_e32 v0, 1, v0
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v2, 1, v0
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v1, 0, 1, s[6:7]
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v3, 2, v0
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v7, 3, v0
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v8, 4, v0
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v9, 5, v0
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v10, 6, v0
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v11, 7, v0
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v12, 8, v0
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v13, 9, v0
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v14, 10, v0
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v15, 11, v0
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v16, 12, v0
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v17, 13, v0
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v18, 14, v0
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v19, 15, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v2
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v2
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v3
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v3
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v7
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v7
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v8
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v8
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v9
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v9
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v10
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v10
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v11
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v11
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v12
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v12
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v13
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v13
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v14
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v14
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v15
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v15
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v16
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v16
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v17
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v17
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v18
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v18
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v19
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v19
|
|
; GISEL-NEXT: v_and_b32_e32 v0, 0xffff, v0
|
|
; GISEL-NEXT: v_and_b32_e32 v1, 0xffff, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v2, 16, v0
|
|
; GISEL-NEXT: v_lshl_or_b32 v9, v0, 16, v0
|
|
; GISEL-NEXT: v_or3_b32 v8, v1, v2, 1
|
|
; GISEL-NEXT: v_mov_b32_e32 v0, 0x86
|
|
; GISEL-NEXT: v_mov_b32_e32 v1, 0
|
|
; GISEL-NEXT: v_and_b32_e32 v2, 0x7f, v4
|
|
; GISEL-NEXT: v_cmp_ge_u64_e32 vcc, v[5:6], v[0:1]
|
|
; GISEL-NEXT: v_mov_b32_e32 v7, 0
|
|
; GISEL-NEXT: v_or_b32_e32 v6, 0x80, v2
|
|
; GISEL-NEXT: ; implicit-def: $vgpr0_vgpr1_vgpr2_vgpr3
|
|
; GISEL-NEXT: s_and_saveexec_b64 s[6:7], vcc
|
|
; GISEL-NEXT: s_xor_b64 s[16:17], exec, s[6:7]
|
|
; GISEL-NEXT: s_cbranch_execz .LBB6_4
|
|
; GISEL-NEXT: ; %bb.3: ; %fp-to-i-if-else
|
|
; GISEL-NEXT: v_add_u32_e32 v10, 0xffffff7a, v5
|
|
; GISEL-NEXT: v_lshlrev_b64 v[0:1], v10, v[6:7]
|
|
; GISEL-NEXT: v_cmp_gt_u32_e32 vcc, 64, v10
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v11, 0, v0, vcc
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v12, 0, v1, vcc
|
|
; GISEL-NEXT: v_mad_u64_u32 v[0:1], s[6:7], v11, v9, 0
|
|
; GISEL-NEXT: v_add_u32_e32 v4, 0xffffff3a, v5
|
|
; GISEL-NEXT: v_sub_u32_e32 v2, 64, v10
|
|
; GISEL-NEXT: v_lshrrev_b64 v[2:3], v2, v[6:7]
|
|
; GISEL-NEXT: v_lshlrev_b64 v[4:5], v4, v[6:7]
|
|
; GISEL-NEXT: v_mad_u64_u32 v[6:7], s[8:9], v12, v9, v[0:1]
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v2, v4, v2, vcc
|
|
; GISEL-NEXT: v_cmp_eq_u32_e64 s[6:7], 0, v10
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v10, v2, 0, s[6:7]
|
|
; GISEL-NEXT: v_mad_u64_u32 v[6:7], s[8:9], v10, v8, v[6:7]
|
|
; GISEL-NEXT: v_mad_u64_u32 v[0:1], s[8:9], v11, v8, 0
|
|
; GISEL-NEXT: v_mov_b32_e32 v2, v6
|
|
; GISEL-NEXT: v_mul_lo_u32 v6, v11, v9
|
|
; GISEL-NEXT: v_mad_u64_u32 v[1:2], s[8:9], v11, v9, v[1:2]
|
|
; GISEL-NEXT: v_mul_lo_u32 v4, v12, v9
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v3, v5, v3, vcc
|
|
; GISEL-NEXT: v_mad_u64_u32 v[1:2], s[10:11], v12, v8, v[1:2]
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v6, s[10:11], v7, v6, s[10:11]
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v4, s[8:9], v6, v4, s[8:9]
|
|
; GISEL-NEXT: v_mad_u64_u32 v[6:7], s[8:9], v10, v9, v[4:5]
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v3, v3, 0, s[6:7]
|
|
; GISEL-NEXT: ; implicit-def: $vgpr5
|
|
; GISEL-NEXT: v_mad_u64_u32 v[3:4], s[6:7], v3, v8, v[6:7]
|
|
; GISEL-NEXT: ; implicit-def: $vgpr6_vgpr7
|
|
; GISEL-NEXT: ; implicit-def: $vgpr8
|
|
; GISEL-NEXT: .LBB6_4: ; %Flow
|
|
; GISEL-NEXT: s_andn2_saveexec_b64 s[6:7], s[16:17]
|
|
; GISEL-NEXT: s_cbranch_execz .LBB6_6
|
|
; GISEL-NEXT: ; %bb.5: ; %fp-to-i-if-then12
|
|
; GISEL-NEXT: v_sub_co_u32_e32 v2, vcc, 0x86, v5
|
|
; GISEL-NEXT: v_lshrrev_b64 v[0:1], v2, v[6:7]
|
|
; GISEL-NEXT: v_cmp_gt_u32_e32 vcc, 64, v2
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v0, 0, v0, vcc
|
|
; GISEL-NEXT: v_cmp_eq_u32_e32 vcc, 0, v2
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v0, v0, v6, vcc
|
|
; GISEL-NEXT: v_mul_hi_i32_i24_e32 v1, v0, v8
|
|
; GISEL-NEXT: v_ashrrev_i32_e32 v2, 31, v1
|
|
; GISEL-NEXT: v_mul_i32_i24_e32 v0, v0, v8
|
|
; GISEL-NEXT: v_mov_b32_e32 v3, v2
|
|
; GISEL-NEXT: .LBB6_6: ; %Flow1
|
|
; GISEL-NEXT: s_or_b64 exec, exec, s[6:7]
|
|
; GISEL-NEXT: .LBB6_7: ; %Flow2
|
|
; GISEL-NEXT: s_andn2_saveexec_b64 s[6:7], s[14:15]
|
|
; GISEL-NEXT: s_cbranch_execz .LBB6_9
|
|
; GISEL-NEXT: ; %bb.8: ; %fp-to-i-if-then5
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v1, 0, -1, s[4:5]
|
|
; GISEL-NEXT: v_and_b32_e32 v1, 1, v1
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v0, 0, 1, s[4:5]
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v2, 1, v1
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v2
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v3, 2, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v4, 3, v1
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v1, v2
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v3, v4
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v5, 4, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v6, 5, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v3, v4
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v5, v6
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v7, 6, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v8, 7, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v5, v6
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v7, v8
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v9, 8, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v10, 9, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v7, v8
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v9, v10
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v11, 10, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v12, 11, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v9, v10
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v11, v12
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v13, 12, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v14, 13, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v11, v12
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v13, v14
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v15, 14, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v16, 15, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v13, v14
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v15, v16
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v17, 16, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v18, 17, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v15, v16
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v17, v18
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v19, 18, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v3, 19, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v17, v18
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v19, v3
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v4, 20, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v5, 21, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v19, v3
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v4, v5
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v6, 22, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v7, 23, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v4, v5
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v6, v7
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v8, 24, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v9, 25, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v6, v7
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v8, v9
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v10, 26, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v11, 27, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v8, v9
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v10, v11
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v12, 28, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v13, 29, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v10, v11
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v12, v13
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v14, 30, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v1, 31, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v12, v13
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v14, v1
|
|
; GISEL-NEXT: v_or3_b32 v1, v2, v14, v1
|
|
; GISEL-NEXT: v_add_u32_e32 v3, 0x80000000, v1
|
|
; GISEL-NEXT: v_mov_b32_e32 v2, v1
|
|
; GISEL-NEXT: .LBB6_9: ; %Flow3
|
|
; GISEL-NEXT: s_or_b64 exec, exec, s[6:7]
|
|
; GISEL-NEXT: .LBB6_10: ; %fp-to-i-cleanup
|
|
; GISEL-NEXT: s_or_b64 exec, exec, s[12:13]
|
|
; GISEL-NEXT: s_setpc_b64 s[30:31]
|
|
%cvt = fptosi bfloat %x to i128
|
|
ret i128 %cvt
|
|
}
|
|
|
|
define i128 @fptoui_bf16_to_i128(bfloat %x) {
|
|
; SDAG-LABEL: fptoui_bf16_to_i128:
|
|
; SDAG: ; %bb.0: ; %fp-to-i-entry
|
|
; SDAG-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
|
|
; SDAG-NEXT: v_mov_b32_e32 v4, v0
|
|
; SDAG-NEXT: v_bfe_u32 v5, v4, 7, 8
|
|
; SDAG-NEXT: s_movk_i32 s4, 0x7e
|
|
; SDAG-NEXT: v_mov_b32_e32 v0, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v2, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v6, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v1, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v3, 0
|
|
; SDAG-NEXT: v_cmp_lt_u32_e32 vcc, s4, v5
|
|
; SDAG-NEXT: s_and_saveexec_b64 s[8:9], vcc
|
|
; SDAG-NEXT: s_cbranch_execz .LBB7_10
|
|
; SDAG-NEXT: ; %bb.1: ; %fp-to-i-if-end
|
|
; SDAG-NEXT: v_add_co_u32_e32 v0, vcc, 0xffffff01, v5
|
|
; SDAG-NEXT: v_addc_co_u32_e32 v1, vcc, -1, v6, vcc
|
|
; SDAG-NEXT: v_addc_co_u32_e32 v2, vcc, -1, v6, vcc
|
|
; SDAG-NEXT: s_movk_i32 s6, 0xff7f
|
|
; SDAG-NEXT: v_addc_co_u32_e32 v3, vcc, -1, v6, vcc
|
|
; SDAG-NEXT: s_mov_b32 s7, -1
|
|
; SDAG-NEXT: v_cmp_eq_u64_e64 s[4:5], -1, v[2:3]
|
|
; SDAG-NEXT: v_cmp_lt_u64_e64 s[6:7], s[6:7], v[0:1]
|
|
; SDAG-NEXT: v_cmp_lt_i16_e32 vcc, -1, v4
|
|
; SDAG-NEXT: s_and_b64 s[4:5], s[4:5], s[6:7]
|
|
; SDAG-NEXT: ; implicit-def: $vgpr0_vgpr1
|
|
; SDAG-NEXT: ; implicit-def: $vgpr2_vgpr3
|
|
; SDAG-NEXT: s_and_saveexec_b64 s[6:7], s[4:5]
|
|
; SDAG-NEXT: s_xor_b64 s[10:11], exec, s[6:7]
|
|
; SDAG-NEXT: s_cbranch_execz .LBB7_7
|
|
; SDAG-NEXT: ; %bb.2: ; %fp-to-i-if-end9
|
|
; SDAG-NEXT: s_movk_i32 s4, 0x7f
|
|
; SDAG-NEXT: v_and_b32_sdwa v0, v4, s4 dst_sel:DWORD dst_unused:UNUSED_PAD src0_sel:WORD_0 src1_sel:DWORD
|
|
; SDAG-NEXT: s_mov_b64 s[4:5], 0x85
|
|
; SDAG-NEXT: v_cmp_lt_u64_e64 s[4:5], s[4:5], v[5:6]
|
|
; SDAG-NEXT: v_mov_b32_e32 v7, 0
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v9, -1, 0, vcc
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v8, -1, 1, vcc
|
|
; SDAG-NEXT: v_or_b32_e32 v6, 0x80, v0
|
|
; SDAG-NEXT: ; implicit-def: $vgpr0_vgpr1
|
|
; SDAG-NEXT: ; implicit-def: $vgpr2_vgpr3
|
|
; SDAG-NEXT: s_and_saveexec_b64 s[6:7], s[4:5]
|
|
; SDAG-NEXT: s_xor_b64 s[12:13], exec, s[6:7]
|
|
; SDAG-NEXT: s_cbranch_execz .LBB7_4
|
|
; SDAG-NEXT: ; %bb.3: ; %fp-to-i-if-else
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v0, 0, 1, vcc
|
|
; SDAG-NEXT: v_add_co_u32_e64 v10, s[4:5], -1, v0
|
|
; SDAG-NEXT: v_sub_u32_e32 v0, 0xc6, v5
|
|
; SDAG-NEXT: v_add_u32_e32 v2, 0xffffff3a, v5
|
|
; SDAG-NEXT: v_add_u32_e32 v4, 0xffffff7a, v5
|
|
; SDAG-NEXT: v_lshrrev_b64 v[0:1], v0, v[6:7]
|
|
; SDAG-NEXT: v_lshlrev_b64 v[2:3], v2, v[6:7]
|
|
; SDAG-NEXT: v_cmp_gt_u32_e64 s[4:5], 64, v4
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v1, v3, v1, s[4:5]
|
|
; SDAG-NEXT: v_cmp_ne_u32_e64 s[6:7], 0, v4
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v3, 0, v1, s[6:7]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v2, v2, v0, s[4:5]
|
|
; SDAG-NEXT: v_lshlrev_b64 v[0:1], v4, v[6:7]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v2, 0, v2, s[6:7]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v12, 0, v0, s[4:5]
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v11, 0, v1, s[4:5]
|
|
; SDAG-NEXT: v_mad_u64_u32 v[0:1], s[4:5], v12, v8, 0
|
|
; SDAG-NEXT: v_mul_lo_u32 v13, v9, v2
|
|
; SDAG-NEXT: v_mul_lo_u32 v14, v8, v3
|
|
; SDAG-NEXT: v_mov_b32_e32 v6, v1
|
|
; SDAG-NEXT: v_mad_u64_u32 v[4:5], s[4:5], v11, v8, v[6:7]
|
|
; SDAG-NEXT: v_mad_u64_u32 v[2:3], s[4:5], v8, v2, 0
|
|
; SDAG-NEXT: v_mov_b32_e32 v6, v4
|
|
; SDAG-NEXT: v_mad_u64_u32 v[6:7], s[4:5], v12, v9, v[6:7]
|
|
; SDAG-NEXT: v_add3_u32 v3, v3, v14, v13
|
|
; SDAG-NEXT: v_mad_u64_u32 v[1:2], s[4:5], v10, v12, v[2:3]
|
|
; SDAG-NEXT: v_add_co_u32_e64 v3, s[4:5], v5, v7
|
|
; SDAG-NEXT: v_addc_co_u32_e64 v4, s[4:5], 0, 0, s[4:5]
|
|
; SDAG-NEXT: v_mul_lo_u32 v8, v10, v11
|
|
; SDAG-NEXT: v_mul_lo_u32 v10, v10, v12
|
|
; SDAG-NEXT: v_mad_u64_u32 v[3:4], s[4:5], v11, v9, v[3:4]
|
|
; SDAG-NEXT: v_add3_u32 v5, v10, v2, v8
|
|
; SDAG-NEXT: v_add_co_u32_e64 v2, s[4:5], v3, v1
|
|
; SDAG-NEXT: v_addc_co_u32_e64 v3, s[4:5], v4, v5, s[4:5]
|
|
; SDAG-NEXT: v_mov_b32_e32 v1, v6
|
|
; SDAG-NEXT: ; implicit-def: $vgpr5_vgpr6
|
|
; SDAG-NEXT: ; implicit-def: $vgpr6_vgpr7
|
|
; SDAG-NEXT: ; implicit-def: $vgpr8
|
|
; SDAG-NEXT: .LBB7_4: ; %Flow
|
|
; SDAG-NEXT: s_andn2_saveexec_b64 s[6:7], s[12:13]
|
|
; SDAG-NEXT: s_cbranch_execz .LBB7_6
|
|
; SDAG-NEXT: ; %bb.5: ; %fp-to-i-if-then12
|
|
; SDAG-NEXT: v_sub_u32_e32 v2, 0x86, v5
|
|
; SDAG-NEXT: v_lshrrev_b64 v[0:1], v2, v[6:7]
|
|
; SDAG-NEXT: v_cmp_gt_u32_e64 s[4:5], 64, v2
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v0, 0, v0, s[4:5]
|
|
; SDAG-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v2
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v0, v0, v6, s[4:5]
|
|
; SDAG-NEXT: v_mul_hi_i32_i24_e32 v1, v0, v8
|
|
; SDAG-NEXT: v_ashrrev_i32_e32 v2, 31, v1
|
|
; SDAG-NEXT: v_mul_i32_i24_e32 v0, v0, v8
|
|
; SDAG-NEXT: v_mov_b32_e32 v3, v2
|
|
; SDAG-NEXT: .LBB7_6: ; %Flow1
|
|
; SDAG-NEXT: s_or_b64 exec, exec, s[6:7]
|
|
; SDAG-NEXT: .LBB7_7: ; %Flow2
|
|
; SDAG-NEXT: s_andn2_saveexec_b64 s[4:5], s[10:11]
|
|
; SDAG-NEXT: ; %bb.8: ; %fp-to-i-if-then5
|
|
; SDAG-NEXT: v_bfrev_b32_e32 v0, 1
|
|
; SDAG-NEXT: v_bfrev_b32_e32 v1, -2
|
|
; SDAG-NEXT: v_cndmask_b32_e64 v2, 0, -1, vcc
|
|
; SDAG-NEXT: v_cndmask_b32_e32 v3, v0, v1, vcc
|
|
; SDAG-NEXT: v_mov_b32_e32 v0, v2
|
|
; SDAG-NEXT: v_mov_b32_e32 v1, v2
|
|
; SDAG-NEXT: ; %bb.9: ; %Flow3
|
|
; SDAG-NEXT: s_or_b64 exec, exec, s[4:5]
|
|
; SDAG-NEXT: .LBB7_10: ; %fp-to-i-cleanup
|
|
; SDAG-NEXT: s_or_b64 exec, exec, s[8:9]
|
|
; SDAG-NEXT: s_setpc_b64 s[30:31]
|
|
;
|
|
; GISEL-LABEL: fptoui_bf16_to_i128:
|
|
; GISEL: ; %bb.0: ; %fp-to-i-entry
|
|
; GISEL-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
|
|
; GISEL-NEXT: v_mov_b32_e32 v4, v0
|
|
; GISEL-NEXT: v_and_b32_e32 v5, 0xffff, v4
|
|
; GISEL-NEXT: v_mov_b32_e32 v6, 0
|
|
; GISEL-NEXT: v_lshrrev_b64 v[0:1], 7, v[5:6]
|
|
; GISEL-NEXT: v_mov_b32_e32 v1, 0x7f
|
|
; GISEL-NEXT: s_mov_b64 s[4:5], 0
|
|
; GISEL-NEXT: v_mov_b32_e32 v2, 0
|
|
; GISEL-NEXT: v_bfe_u32 v5, v0, 0, 8
|
|
; GISEL-NEXT: v_cmp_ge_u64_e32 vcc, v[5:6], v[1:2]
|
|
; GISEL-NEXT: s_mov_b64 s[6:7], s[4:5]
|
|
; GISEL-NEXT: v_mov_b32_e32 v0, s4
|
|
; GISEL-NEXT: v_mov_b32_e32 v1, s5
|
|
; GISEL-NEXT: v_mov_b32_e32 v2, s6
|
|
; GISEL-NEXT: v_mov_b32_e32 v3, s7
|
|
; GISEL-NEXT: s_and_saveexec_b64 s[12:13], vcc
|
|
; GISEL-NEXT: s_cbranch_execz .LBB7_10
|
|
; GISEL-NEXT: ; %bb.1: ; %fp-to-i-if-end
|
|
; GISEL-NEXT: v_add_co_u32_e32 v0, vcc, 0xffffff01, v5
|
|
; GISEL-NEXT: v_mov_b32_e32 v2, 0xffffff80
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v1, s[6:7], 0, -1, vcc
|
|
; GISEL-NEXT: v_mov_b32_e32 v3, -1
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v7, s[6:7], 0, -1, s[6:7]
|
|
; GISEL-NEXT: v_cmp_ge_u64_e32 vcc, v[0:1], v[2:3]
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v8, s[6:7], 0, -1, s[6:7]
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v0, 0, 1, vcc
|
|
; GISEL-NEXT: v_cmp_le_u64_e32 vcc, -1, v[7:8]
|
|
; GISEL-NEXT: v_cmp_lt_i16_e64 s[4:5], -1, v4
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v1, 0, 1, vcc
|
|
; GISEL-NEXT: v_cmp_eq_u64_e32 vcc, -1, v[7:8]
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v0, v1, v0, vcc
|
|
; GISEL-NEXT: v_and_b32_e32 v0, 1, v0
|
|
; GISEL-NEXT: v_cmp_ne_u32_e32 vcc, 0, v0
|
|
; GISEL-NEXT: ; implicit-def: $vgpr0_vgpr1_vgpr2_vgpr3
|
|
; GISEL-NEXT: s_and_saveexec_b64 s[6:7], vcc
|
|
; GISEL-NEXT: s_xor_b64 s[14:15], exec, s[6:7]
|
|
; GISEL-NEXT: s_cbranch_execz .LBB7_7
|
|
; GISEL-NEXT: ; %bb.2: ; %fp-to-i-if-end9
|
|
; GISEL-NEXT: s_xor_b64 s[6:7], s[4:5], -1
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v0, 0, -1, s[6:7]
|
|
; GISEL-NEXT: v_and_b32_e32 v0, 1, v0
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v2, 1, v0
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v1, 0, 1, s[6:7]
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v3, 2, v0
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v7, 3, v0
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v8, 4, v0
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v9, 5, v0
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v10, 6, v0
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v11, 7, v0
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v12, 8, v0
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v13, 9, v0
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v14, 10, v0
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v15, 11, v0
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v16, 12, v0
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v17, 13, v0
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v18, 14, v0
|
|
; GISEL-NEXT: v_lshlrev_b16_e32 v19, 15, v0
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v2
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v2
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v3
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v3
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v7
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v7
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v8
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v8
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v9
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v9
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v10
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v10
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v11
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v11
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v12
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v12
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v13
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v13
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v14
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v14
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v15
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v15
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v16
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v16
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v17
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v17
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v18
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v18
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v19
|
|
; GISEL-NEXT: v_or_b32_e32 v1, v1, v19
|
|
; GISEL-NEXT: v_and_b32_e32 v0, 0xffff, v0
|
|
; GISEL-NEXT: v_and_b32_e32 v1, 0xffff, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v2, 16, v0
|
|
; GISEL-NEXT: v_lshl_or_b32 v9, v0, 16, v0
|
|
; GISEL-NEXT: v_or3_b32 v8, v1, v2, 1
|
|
; GISEL-NEXT: v_mov_b32_e32 v0, 0x86
|
|
; GISEL-NEXT: v_mov_b32_e32 v1, 0
|
|
; GISEL-NEXT: v_and_b32_e32 v2, 0x7f, v4
|
|
; GISEL-NEXT: v_cmp_ge_u64_e32 vcc, v[5:6], v[0:1]
|
|
; GISEL-NEXT: v_mov_b32_e32 v7, 0
|
|
; GISEL-NEXT: v_or_b32_e32 v6, 0x80, v2
|
|
; GISEL-NEXT: ; implicit-def: $vgpr0_vgpr1_vgpr2_vgpr3
|
|
; GISEL-NEXT: s_and_saveexec_b64 s[6:7], vcc
|
|
; GISEL-NEXT: s_xor_b64 s[16:17], exec, s[6:7]
|
|
; GISEL-NEXT: s_cbranch_execz .LBB7_4
|
|
; GISEL-NEXT: ; %bb.3: ; %fp-to-i-if-else
|
|
; GISEL-NEXT: v_add_u32_e32 v10, 0xffffff7a, v5
|
|
; GISEL-NEXT: v_lshlrev_b64 v[0:1], v10, v[6:7]
|
|
; GISEL-NEXT: v_cmp_gt_u32_e32 vcc, 64, v10
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v11, 0, v0, vcc
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v12, 0, v1, vcc
|
|
; GISEL-NEXT: v_mad_u64_u32 v[0:1], s[6:7], v11, v9, 0
|
|
; GISEL-NEXT: v_add_u32_e32 v4, 0xffffff3a, v5
|
|
; GISEL-NEXT: v_sub_u32_e32 v2, 64, v10
|
|
; GISEL-NEXT: v_lshrrev_b64 v[2:3], v2, v[6:7]
|
|
; GISEL-NEXT: v_lshlrev_b64 v[4:5], v4, v[6:7]
|
|
; GISEL-NEXT: v_mad_u64_u32 v[6:7], s[8:9], v12, v9, v[0:1]
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v2, v4, v2, vcc
|
|
; GISEL-NEXT: v_cmp_eq_u32_e64 s[6:7], 0, v10
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v10, v2, 0, s[6:7]
|
|
; GISEL-NEXT: v_mad_u64_u32 v[6:7], s[8:9], v10, v8, v[6:7]
|
|
; GISEL-NEXT: v_mad_u64_u32 v[0:1], s[8:9], v11, v8, 0
|
|
; GISEL-NEXT: v_mov_b32_e32 v2, v6
|
|
; GISEL-NEXT: v_mul_lo_u32 v6, v11, v9
|
|
; GISEL-NEXT: v_mad_u64_u32 v[1:2], s[8:9], v11, v9, v[1:2]
|
|
; GISEL-NEXT: v_mul_lo_u32 v4, v12, v9
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v3, v5, v3, vcc
|
|
; GISEL-NEXT: v_mad_u64_u32 v[1:2], s[10:11], v12, v8, v[1:2]
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v6, s[10:11], v7, v6, s[10:11]
|
|
; GISEL-NEXT: v_addc_co_u32_e64 v4, s[8:9], v6, v4, s[8:9]
|
|
; GISEL-NEXT: v_mad_u64_u32 v[6:7], s[8:9], v10, v9, v[4:5]
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v3, v3, 0, s[6:7]
|
|
; GISEL-NEXT: ; implicit-def: $vgpr5
|
|
; GISEL-NEXT: v_mad_u64_u32 v[3:4], s[6:7], v3, v8, v[6:7]
|
|
; GISEL-NEXT: ; implicit-def: $vgpr6_vgpr7
|
|
; GISEL-NEXT: ; implicit-def: $vgpr8
|
|
; GISEL-NEXT: .LBB7_4: ; %Flow
|
|
; GISEL-NEXT: s_andn2_saveexec_b64 s[6:7], s[16:17]
|
|
; GISEL-NEXT: s_cbranch_execz .LBB7_6
|
|
; GISEL-NEXT: ; %bb.5: ; %fp-to-i-if-then12
|
|
; GISEL-NEXT: v_sub_co_u32_e32 v2, vcc, 0x86, v5
|
|
; GISEL-NEXT: v_lshrrev_b64 v[0:1], v2, v[6:7]
|
|
; GISEL-NEXT: v_cmp_gt_u32_e32 vcc, 64, v2
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v0, 0, v0, vcc
|
|
; GISEL-NEXT: v_cmp_eq_u32_e32 vcc, 0, v2
|
|
; GISEL-NEXT: v_cndmask_b32_e32 v0, v0, v6, vcc
|
|
; GISEL-NEXT: v_mul_hi_i32_i24_e32 v1, v0, v8
|
|
; GISEL-NEXT: v_ashrrev_i32_e32 v2, 31, v1
|
|
; GISEL-NEXT: v_mul_i32_i24_e32 v0, v0, v8
|
|
; GISEL-NEXT: v_mov_b32_e32 v3, v2
|
|
; GISEL-NEXT: .LBB7_6: ; %Flow1
|
|
; GISEL-NEXT: s_or_b64 exec, exec, s[6:7]
|
|
; GISEL-NEXT: .LBB7_7: ; %Flow2
|
|
; GISEL-NEXT: s_andn2_saveexec_b64 s[6:7], s[14:15]
|
|
; GISEL-NEXT: s_cbranch_execz .LBB7_9
|
|
; GISEL-NEXT: ; %bb.8: ; %fp-to-i-if-then5
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v1, 0, -1, s[4:5]
|
|
; GISEL-NEXT: v_and_b32_e32 v1, 1, v1
|
|
; GISEL-NEXT: v_cndmask_b32_e64 v0, 0, 1, s[4:5]
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v2, 1, v1
|
|
; GISEL-NEXT: v_or_b32_e32 v0, v0, v2
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v3, 2, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v4, 3, v1
|
|
; GISEL-NEXT: v_or_b32_e32 v2, v1, v2
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v3, v4
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v5, 4, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v6, 5, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v3, v4
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v5, v6
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v7, 6, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v8, 7, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v5, v6
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v7, v8
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v9, 8, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v10, 9, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v7, v8
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v9, v10
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v11, 10, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v12, 11, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v9, v10
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v11, v12
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v13, 12, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v14, 13, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v11, v12
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v13, v14
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v15, 14, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v16, 15, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v13, v14
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v15, v16
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v17, 16, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v18, 17, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v15, v16
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v17, v18
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v19, 18, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v3, 19, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v17, v18
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v19, v3
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v4, 20, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v5, 21, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v19, v3
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v4, v5
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v6, 22, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v7, 23, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v4, v5
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v6, v7
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v8, 24, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v9, 25, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v6, v7
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v8, v9
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v10, 26, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v11, 27, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v8, v9
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v10, v11
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v12, 28, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v13, 29, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v10, v11
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v12, v13
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v14, 30, v1
|
|
; GISEL-NEXT: v_lshlrev_b32_e32 v1, 31, v1
|
|
; GISEL-NEXT: v_or3_b32 v2, v2, v12, v13
|
|
; GISEL-NEXT: v_or3_b32 v0, v0, v14, v1
|
|
; GISEL-NEXT: v_or3_b32 v1, v2, v14, v1
|
|
; GISEL-NEXT: v_add_u32_e32 v3, 0x80000000, v1
|
|
; GISEL-NEXT: v_mov_b32_e32 v2, v1
|
|
; GISEL-NEXT: .LBB7_9: ; %Flow3
|
|
; GISEL-NEXT: s_or_b64 exec, exec, s[6:7]
|
|
; GISEL-NEXT: .LBB7_10: ; %fp-to-i-cleanup
|
|
; GISEL-NEXT: s_or_b64 exec, exec, s[12:13]
|
|
; GISEL-NEXT: s_setpc_b64 s[30:31]
|
|
%cvt = fptoui bfloat %x to i128
|
|
ret i128 %cvt
|
|
}
|