
Generalize the code over the properties of the mov instruction, rather than maintaining parallel logic to figure out the type of mov to use. I've maintained the behavior with 16-bit physical SGPRs, though I think the behavior here is broken and corrupting any value that happens to be live in the high bits. It just happens there's no way to separately write to those with a real instruction but I don't think we should be trying to make assumptions around that property. This is NFC-ish. It now does a better job with imm pseudos which practically won't reach here. This also will make it easier to support more folds in a future patch. I added a couple of new tests with 16-bit extract of 64-bit sources. The only other test change is an immediate rendering change from zero extended to sign extended.
866 lines
30 KiB
YAML
866 lines
30 KiB
YAML
# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
|
|
# RUN: llc -mtriple=amdgcn--amdhsa -mcpu=gfx942 -verify-machineinstrs -run-pass peephole-opt -o - %s | FileCheck -check-prefixes=GCN,GFX942 %s
|
|
# RUN: llc -mtriple=amdgcn--amdhsa -mcpu=gfx1250 -run-pass peephole-opt -o - %s | FileCheck -check-prefixes=GCN,GFX1250 %s
|
|
|
|
---
|
|
name: fold_simm_virtual
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_simm_virtual
|
|
; GCN: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 0
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG
|
|
%0:sreg_32 = S_MOV_B32 0
|
|
%1:sreg_32 = COPY killed %0
|
|
SI_RETURN_TO_EPILOG
|
|
|
|
...
|
|
|
|
---
|
|
name: fold_simm_physical
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_simm_physical
|
|
; GCN: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 0
|
|
; GCN-NEXT: $sgpr1 = S_MOV_B32 0
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG
|
|
%0:sreg_32 = S_MOV_B32 0
|
|
$sgpr1 = COPY killed %0
|
|
SI_RETURN_TO_EPILOG
|
|
|
|
...
|
|
|
|
---
|
|
name: dont_fold_simm_scc
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: dont_fold_simm_scc
|
|
; GCN: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 0
|
|
; GCN-NEXT: $scc = COPY killed [[S_MOV_B32_]]
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG
|
|
%0:sreg_32 = S_MOV_B32 0
|
|
$scc = COPY killed %0
|
|
SI_RETURN_TO_EPILOG
|
|
|
|
...
|
|
|
|
---
|
|
name: fold_simm_16_sub_to_lo
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_simm_16_sub_to_lo
|
|
; GCN: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 2048
|
|
; GCN-NEXT: [[COPY:%[0-9]+]]:sgpr_lo16 = COPY killed [[S_MOV_B32_]].lo16
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG [[COPY]]
|
|
%0:sreg_32 = S_MOV_B32 2048
|
|
%1:sgpr_lo16 = COPY killed %0.lo16
|
|
SI_RETURN_TO_EPILOG %1
|
|
|
|
...
|
|
|
|
---
|
|
name: fold_simm_16_sub_to_phys
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_simm_16_sub_to_phys
|
|
; GCN: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 2048
|
|
; GCN-NEXT: $sgpr0 = S_MOV_B32 2048
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG $sgpr0_lo16
|
|
%0:sreg_32 = S_MOV_B32 2048
|
|
$sgpr0_lo16 = COPY killed %0.lo16
|
|
SI_RETURN_TO_EPILOG $sgpr0_lo16
|
|
|
|
...
|
|
|
|
---
|
|
name: fold_aimm_16_sub_to_phys
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_aimm_16_sub_to_phys
|
|
; GCN: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 0
|
|
; GCN-NEXT: $agpr0 = V_ACCVGPR_WRITE_B32_e64 0, implicit $exec
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG $agpr0_lo16
|
|
%0:sreg_32 = S_MOV_B32 0
|
|
$agpr0_lo16 = COPY killed %0.lo16
|
|
SI_RETURN_TO_EPILOG $agpr0_lo16
|
|
|
|
...
|
|
|
|
---
|
|
name: fold_vimm_16_sub_to_lo
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_vimm_16_sub_to_lo
|
|
; GCN: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 2048
|
|
; GCN-NEXT: [[COPY:%[0-9]+]]:vgpr_16 = COPY killed [[S_MOV_B32_]].lo16
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG [[COPY]]
|
|
%0:sreg_32 = S_MOV_B32 2048
|
|
%1:vgpr_16 = COPY killed %0.lo16
|
|
SI_RETURN_TO_EPILOG %1
|
|
|
|
...
|
|
|
|
---
|
|
name: fold_vimm_16_sub_to_phys
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_vimm_16_sub_to_phys
|
|
; GCN: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 2048
|
|
; GCN-NEXT: $vgpr0_lo16 = COPY killed [[S_MOV_B32_]].lo16
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG $vgpr0_lo16
|
|
%0:sreg_32 = S_MOV_B32 2048
|
|
$vgpr0_lo16 = COPY killed %0.lo16
|
|
SI_RETURN_TO_EPILOG $vgpr0_lo16
|
|
|
|
...
|
|
|
|
---
|
|
name: fold_sreg_64_sub0_to_vgpr_32
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_sreg_64_sub0_to_vgpr_32
|
|
; GCN: [[S_MOV_B:%[0-9]+]]:sreg_64 = S_MOV_B64_IMM_PSEUDO 1311768467750121200
|
|
; GCN-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 -1412567312, implicit $exec
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG [[V_MOV_B32_e32_]]
|
|
%0:sreg_64 = S_MOV_B64_IMM_PSEUDO 1311768467750121200
|
|
%1:vgpr_32 = COPY killed %0.sub0
|
|
SI_RETURN_TO_EPILOG %1
|
|
|
|
...
|
|
|
|
---
|
|
name: fold_sreg_64_sub1_to_vgpr_32
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_sreg_64_sub1_to_vgpr_32
|
|
; GCN: [[S_MOV_B64_:%[0-9]+]]:sreg_64 = S_MOV_B64 1311768467750121200
|
|
; GCN-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 305419896, implicit $exec
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG [[V_MOV_B32_e32_]]
|
|
%0:sreg_64 = S_MOV_B64 1311768467750121200
|
|
%1:vgpr_32 = COPY killed %0.sub1
|
|
SI_RETURN_TO_EPILOG %1
|
|
|
|
...
|
|
|
|
---
|
|
name: fold_vreg_64_sub1_to_vgpr_32
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_vreg_64_sub1_to_vgpr_32
|
|
; GCN: [[V_MOV_B:%[0-9]+]]:vreg_64_align2 = V_MOV_B64_PSEUDO 1311768467750121200, implicit $exec
|
|
; GCN-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 305419896, implicit $exec
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG [[V_MOV_B32_e32_]]
|
|
%0:vreg_64_align2 = V_MOV_B64_PSEUDO 1311768467750121200, implicit $exec
|
|
%1:vgpr_32 = COPY killed %0.sub1
|
|
SI_RETURN_TO_EPILOG %1
|
|
|
|
...
|
|
|
|
---
|
|
name: fold_sreg_64_to_vreg_64
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_sreg_64_to_vreg_64
|
|
; GCN: [[S_MOV_B:%[0-9]+]]:sreg_64 = S_MOV_B64_IMM_PSEUDO 1311768467750121200
|
|
; GCN-NEXT: [[V_MOV_B:%[0-9]+]]:vreg_64_align2 = V_MOV_B64_PSEUDO 1311768467750121200, implicit $exec
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG [[V_MOV_B]]
|
|
%0:sreg_64 = S_MOV_B64_IMM_PSEUDO 1311768467750121200
|
|
%1:vreg_64_align2 = COPY killed %0
|
|
SI_RETURN_TO_EPILOG %1
|
|
|
|
...
|
|
|
|
---
|
|
name: fold_sreg_64_to_sreg_64
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_sreg_64_to_sreg_64
|
|
; GCN: [[S_MOV_B64_:%[0-9]+]]:sreg_64 = S_MOV_B64 1311768467750121200
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG [[S_MOV_B64_]]
|
|
%0:sreg_64 = S_MOV_B64 1311768467750121200
|
|
%1:sreg_64 = COPY killed %0
|
|
SI_RETURN_TO_EPILOG %1
|
|
|
|
...
|
|
|
|
---
|
|
name: fold_sreg_64_lo16_to_sgpr_lo16
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_sreg_64_lo16_to_sgpr_lo16
|
|
; GCN: [[S_MOV_B:%[0-9]+]]:sreg_64 = S_MOV_B64_IMM_PSEUDO 1125912791875585
|
|
; GCN-NEXT: $sgpr0 = S_MOV_B32 1
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG $sgpr0_lo16
|
|
%0:sreg_64 = S_MOV_B64_IMM_PSEUDO 1125912791875585
|
|
$sgpr0_lo16 = COPY killed %0.lo16
|
|
SI_RETURN_TO_EPILOG $sgpr0_lo16
|
|
|
|
...
|
|
|
|
---
|
|
name: fold_sreg_64_hi16_to_sgpr_lo16
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_sreg_64_hi16_to_sgpr_lo16
|
|
; GCN: [[S_MOV_B:%[0-9]+]]:sreg_64 = S_MOV_B64_IMM_PSEUDO 1125912791875585
|
|
; GCN-NEXT: $sgpr0 = S_MOV_B32 2
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG $sgpr0_lo16
|
|
%0:sreg_64 = S_MOV_B64_IMM_PSEUDO 1125912791875585
|
|
$sgpr0_lo16 = COPY killed %0.hi16
|
|
SI_RETURN_TO_EPILOG $sgpr0_lo16
|
|
|
|
...
|
|
|
|
---
|
|
name: fold_sreg_64_sub1_lo16_to_sgpr_lo16
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_sreg_64_sub1_lo16_to_sgpr_lo16
|
|
; GCN: [[S_MOV_B:%[0-9]+]]:sreg_64 = S_MOV_B64_IMM_PSEUDO 1125912791875585
|
|
; GCN-NEXT: $sgpr0 = S_MOV_B32 3
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG $sgpr0_lo16
|
|
%0:sreg_64 = S_MOV_B64_IMM_PSEUDO 1125912791875585
|
|
$sgpr0_lo16 = COPY killed %0.sub1_lo16
|
|
SI_RETURN_TO_EPILOG $sgpr0_lo16
|
|
|
|
...
|
|
|
|
---
|
|
name: fold_sreg_64_sub1_hi16_to_sgpr_lo16
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_sreg_64_sub1_hi16_to_sgpr_lo16
|
|
; GCN: [[S_MOV_B:%[0-9]+]]:sreg_64 = S_MOV_B64_IMM_PSEUDO 1125912791875585
|
|
; GCN-NEXT: $sgpr0 = S_MOV_B32 4
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG $sgpr0_lo16
|
|
%0:sreg_64 = S_MOV_B64_IMM_PSEUDO 1125912791875585
|
|
$sgpr0_lo16 = COPY killed %0.sub1_hi16
|
|
SI_RETURN_TO_EPILOG $sgpr0_lo16
|
|
|
|
...
|
|
|
|
---
|
|
name: fmac_sreg_64_sub0_src0_to_fmamk
|
|
tracksRegLiveness: true
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fmac_sreg_64_sub0_src0_to_fmamk
|
|
; GCN: [[DEF:%[0-9]+]]:vgpr_32 = IMPLICIT_DEF
|
|
; GCN-NEXT: [[DEF1:%[0-9]+]]:vgpr_32 = IMPLICIT_DEF
|
|
; GCN-NEXT: [[V_FMAMK_F32_:%[0-9]+]]:vgpr_32 = V_FMAMK_F32 [[DEF]], -1412567312, [[DEF1]], implicit $mode, implicit $exec
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG [[V_FMAMK_F32_]]
|
|
%0:vgpr_32 = IMPLICIT_DEF
|
|
%1:vgpr_32 = IMPLICIT_DEF
|
|
%2:sreg_64 = S_MOV_B64_IMM_PSEUDO 1311768467750121200
|
|
%3:vgpr_32 = V_FMAC_F32_e64 0, %2.sub0, 0, %0, 0, %1, 0, 0, implicit $mode, implicit $exec
|
|
SI_RETURN_TO_EPILOG %3
|
|
...
|
|
|
|
---
|
|
name: fmac_sreg_64_sub1_src0_to_fmamk
|
|
tracksRegLiveness: true
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fmac_sreg_64_sub1_src0_to_fmamk
|
|
; GCN: [[DEF:%[0-9]+]]:vgpr_32 = IMPLICIT_DEF
|
|
; GCN-NEXT: [[DEF1:%[0-9]+]]:vgpr_32 = IMPLICIT_DEF
|
|
; GCN-NEXT: [[V_FMAMK_F32_:%[0-9]+]]:vgpr_32 = V_FMAMK_F32 [[DEF]], 305419896, [[DEF1]], implicit $mode, implicit $exec
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG [[V_FMAMK_F32_]]
|
|
%0:vgpr_32 = IMPLICIT_DEF
|
|
%1:vgpr_32 = IMPLICIT_DEF
|
|
%2:sreg_64 = S_MOV_B64_IMM_PSEUDO 1311768467750121200
|
|
%3:vgpr_32 = V_FMAC_F32_e64 0, %2.sub1, 0, %0, 0, %1, 0, 0, implicit $mode, implicit $exec
|
|
SI_RETURN_TO_EPILOG %3
|
|
...
|
|
|
|
---
|
|
name: fmac_sreg_64_sub1_src1_to_fmaak
|
|
tracksRegLiveness: true
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fmac_sreg_64_sub1_src1_to_fmaak
|
|
; GCN: [[DEF:%[0-9]+]]:vgpr_32 = IMPLICIT_DEF
|
|
; GCN-NEXT: [[DEF1:%[0-9]+]]:vgpr_32 = IMPLICIT_DEF
|
|
; GCN-NEXT: [[V_FMAMK_F32_:%[0-9]+]]:vgpr_32 = V_FMAMK_F32 [[DEF]], 305419896, [[DEF1]], implicit $mode, implicit $exec
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG [[V_FMAMK_F32_]]
|
|
%0:vgpr_32 = IMPLICIT_DEF
|
|
%1:vgpr_32 = IMPLICIT_DEF
|
|
%2:sreg_64 = S_MOV_B64_IMM_PSEUDO 1311768467750121200
|
|
%3:vgpr_32 = V_FMAC_F32_e64 0, %0, 0, %2.sub1, 0, %1, 0, 0, implicit $mode, implicit $exec
|
|
SI_RETURN_TO_EPILOG %3
|
|
...
|
|
|
|
---
|
|
name: fma_sreg_64_sub0_to_fmaak
|
|
tracksRegLiveness: true
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fma_sreg_64_sub0_to_fmaak
|
|
; GCN: [[DEF:%[0-9]+]]:vgpr_32 = IMPLICIT_DEF
|
|
; GCN-NEXT: [[DEF1:%[0-9]+]]:vgpr_32 = IMPLICIT_DEF
|
|
; GCN-NEXT: [[V_FMAAK_F32_:%[0-9]+]]:vgpr_32 = V_FMAAK_F32 [[DEF]], [[DEF1]], -1412567312, implicit $mode, implicit $exec
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG [[V_FMAAK_F32_]]
|
|
%0:vgpr_32 = IMPLICIT_DEF
|
|
%1:vgpr_32 = IMPLICIT_DEF
|
|
%2:sreg_64 = S_MOV_B64_IMM_PSEUDO 1311768467750121200
|
|
%3:vgpr_32 = V_FMA_F32_e64 0, %0, 0, %1, 0, %2.sub0, 0, 0, implicit $mode, implicit $exec
|
|
SI_RETURN_TO_EPILOG %3
|
|
...
|
|
|
|
---
|
|
name: fma_sreg_64_sub1_to_fmaak
|
|
tracksRegLiveness: true
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fma_sreg_64_sub1_to_fmaak
|
|
; GCN: [[DEF:%[0-9]+]]:vgpr_32 = IMPLICIT_DEF
|
|
; GCN-NEXT: [[DEF1:%[0-9]+]]:vgpr_32 = IMPLICIT_DEF
|
|
; GCN-NEXT: [[V_FMAAK_F32_:%[0-9]+]]:vgpr_32 = V_FMAAK_F32 [[DEF]], [[DEF1]], 305419896, implicit $mode, implicit $exec
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG [[V_FMAAK_F32_]]
|
|
%0:vgpr_32 = IMPLICIT_DEF
|
|
%1:vgpr_32 = IMPLICIT_DEF
|
|
%2:sreg_64 = S_MOV_B64_IMM_PSEUDO 1311768467750121200
|
|
%3:vgpr_32 = V_FMA_F32_e64 0, %0, 0, %1, 0, %2.sub1, 0, 0, implicit $mode, implicit $exec
|
|
SI_RETURN_TO_EPILOG %3
|
|
...
|
|
|
|
---
|
|
name: fold_aimm_virtual
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_aimm_virtual
|
|
; GCN: [[V_ACCVGPR_WRITE_B32_e64_:%[0-9]+]]:agpr_32 = V_ACCVGPR_WRITE_B32_e64 64, implicit $exec
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG implicit [[V_ACCVGPR_WRITE_B32_e64_]]
|
|
%0:agpr_32 = V_ACCVGPR_WRITE_B32_e64 64, implicit $exec
|
|
%1:agpr_32 = COPY killed %0
|
|
SI_RETURN_TO_EPILOG implicit %1
|
|
|
|
...
|
|
|
|
---
|
|
name: fold_aimm_virtual_copy_to_vgpr
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_aimm_virtual_copy_to_vgpr
|
|
; GCN: [[V_ACCVGPR_WRITE_B32_e64_:%[0-9]+]]:agpr_32 = V_ACCVGPR_WRITE_B32_e64 64, implicit $exec
|
|
; GCN-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 64, implicit $exec
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG implicit [[V_MOV_B32_e32_]]
|
|
%0:agpr_32 = V_ACCVGPR_WRITE_B32_e64 64, implicit $exec
|
|
%1:vgpr_32 = COPY killed %0
|
|
SI_RETURN_TO_EPILOG implicit %1
|
|
|
|
...
|
|
|
|
---
|
|
name: fold_v_mov_b64_64_sub0_to_vgpr_32
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_v_mov_b64_64_sub0_to_vgpr_32
|
|
; GCN: [[V_MOV_B64_e32_:%[0-9]+]]:vreg_64_align2 = V_MOV_B64_e32 1311768467750121200, implicit $exec
|
|
; GCN-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 -1412567312, implicit $exec
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG [[V_MOV_B32_e32_]]
|
|
%0:vreg_64_align2 = V_MOV_B64_e32 1311768467750121200, implicit $exec
|
|
%1:vgpr_32 = COPY killed %0.sub0
|
|
SI_RETURN_TO_EPILOG %1
|
|
|
|
...
|
|
|
|
---
|
|
name: fold_v_mov_b64_64_sub1_to_vgpr_32
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_v_mov_b64_64_sub1_to_vgpr_32
|
|
; GCN: [[V_MOV_B64_e32_:%[0-9]+]]:vreg_64_align2 = V_MOV_B64_e32 1311768467750121200, implicit $exec
|
|
; GCN-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 305419896, implicit $exec
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG [[V_MOV_B32_e32_]]
|
|
%0:vreg_64_align2 = V_MOV_B64_e32 1311768467750121200, implicit $exec
|
|
%1:vgpr_32 = COPY killed %0.sub1
|
|
SI_RETURN_TO_EPILOG %1
|
|
|
|
...
|
|
|
|
---
|
|
name: fold_v_mov_b64_64
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_v_mov_b64_64
|
|
; GCN: [[V_MOV_B64_e32_:%[0-9]+]]:vreg_64_align2 = V_MOV_B64_e32 1311768467750121200, implicit $exec
|
|
; GCN-NEXT: [[V_MOV_B:%[0-9]+]]:vreg_64_align2 = V_MOV_B64_PSEUDO 1311768467750121200, implicit $exec
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG implicit [[V_MOV_B]]
|
|
%0:vreg_64_align2 = V_MOV_B64_e32 1311768467750121200, implicit $exec
|
|
%1:vreg_64_align2 = COPY killed %0
|
|
SI_RETURN_TO_EPILOG implicit %1
|
|
|
|
...
|
|
|
|
---
|
|
name: fold_v_mov_b64_64_to_unaligned
|
|
body: |
|
|
bb.0:
|
|
; GCN-LABEL: name: fold_v_mov_b64_64_to_unaligned
|
|
; GCN: [[V_MOV_B64_e32_:%[0-9]+]]:vreg_64_align2 = V_MOV_B64_e32 1311768467750121200, implicit $exec
|
|
; GCN-NEXT: [[V_MOV_B:%[0-9]+]]:vreg_64_align2 = V_MOV_B64_PSEUDO 1311768467750121200, implicit $exec
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG implicit [[V_MOV_B]]
|
|
%0:vreg_64_align2 = V_MOV_B64_e32 1311768467750121200, implicit $exec
|
|
%1:vreg_64 = COPY killed %0
|
|
SI_RETURN_TO_EPILOG implicit %1
|
|
...
|
|
|
|
---
|
|
name: fold_v_mov_b64_pseudo_64_to_unaligned
|
|
body: |
|
|
bb.0:
|
|
; GCN-LABEL: name: fold_v_mov_b64_pseudo_64_to_unaligned
|
|
; GCN: [[V_MOV_B:%[0-9]+]]:vreg_64_align2 = V_MOV_B64_PSEUDO 1311768467750121200, implicit $exec
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG implicit [[V_MOV_B]]
|
|
%0:vreg_64_align2 = V_MOV_B64_PSEUDO 1311768467750121200, implicit $exec
|
|
%1:vreg_64 = COPY killed %0
|
|
SI_RETURN_TO_EPILOG implicit %1
|
|
...
|
|
|
|
---
|
|
name: fold_s_brev_b32_simm_virtual_0
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_s_brev_b32_simm_virtual_0
|
|
; GCN: [[S_BREV_B32_:%[0-9]+]]:sreg_32 = S_BREV_B32 1
|
|
; GCN-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 -2147483648
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG
|
|
%0:sreg_32 = S_BREV_B32 1
|
|
%1:sreg_32 = COPY killed %0
|
|
SI_RETURN_TO_EPILOG
|
|
|
|
...
|
|
|
|
---
|
|
name: fold_s_brev_b32_simm_virtual_1
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_s_brev_b32_simm_virtual_1
|
|
; GCN: [[S_BREV_B32_:%[0-9]+]]:sreg_32 = S_BREV_B32 -64
|
|
; GCN-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 67108863
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG
|
|
%0:sreg_32 = S_BREV_B32 -64
|
|
%1:sreg_32 = COPY killed %0
|
|
SI_RETURN_TO_EPILOG
|
|
|
|
...
|
|
|
|
---
|
|
name: fold_v_bfrev_b32_e32_imm
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_v_bfrev_b32_e32_imm
|
|
; GCN: [[V_BFREV_B32_e32_:%[0-9]+]]:vgpr_32 = V_BFREV_B32_e32 1, implicit $exec
|
|
; GCN-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 -2147483648, implicit $exec
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG [[V_MOV_B32_e32_]]
|
|
%0:vgpr_32 = V_BFREV_B32_e32 1, implicit $exec
|
|
%1:vgpr_32 = COPY killed %0
|
|
SI_RETURN_TO_EPILOG %1
|
|
|
|
...
|
|
|
|
---
|
|
name: fold_v_bfrev_b32_e64_imm
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_v_bfrev_b32_e64_imm
|
|
; GCN: [[V_BFREV_B32_e64_:%[0-9]+]]:vgpr_32 = V_BFREV_B32_e64 1, implicit $exec
|
|
; GCN-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 -2147483648, implicit $exec
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG [[V_MOV_B32_e32_]]
|
|
%0:vgpr_32 = V_BFREV_B32_e64 1, implicit $exec
|
|
%1:vgpr_32 = COPY killed %0
|
|
SI_RETURN_TO_EPILOG %1
|
|
|
|
...
|
|
|
|
---
|
|
name: fold_s_not_b32_simm_virtual_0
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_s_not_b32_simm_virtual_0
|
|
; GCN: [[S_NOT_B32_:%[0-9]+]]:sreg_32 = S_NOT_B32 1, implicit-def $scc
|
|
; GCN-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 -2
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG
|
|
%0:sreg_32 = S_NOT_B32 1, implicit-def $scc
|
|
%1:sreg_32 = COPY killed %0
|
|
SI_RETURN_TO_EPILOG
|
|
|
|
...
|
|
|
|
---
|
|
name: fold_s_not_b32_simm_virtual_1
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_s_not_b32_simm_virtual_1
|
|
; GCN: [[S_NOT_B32_:%[0-9]+]]:sreg_32 = S_NOT_B32 -64, implicit-def $scc
|
|
; GCN-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 63
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG
|
|
%0:sreg_32 = S_NOT_B32 -64, implicit-def $scc
|
|
%1:sreg_32 = COPY killed %0
|
|
SI_RETURN_TO_EPILOG
|
|
|
|
...
|
|
|
|
---
|
|
name: fold_v_not_b32_e32_imm
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_v_not_b32_e32_imm
|
|
; GCN: [[V_NOT_B32_e32_:%[0-9]+]]:vgpr_32 = V_NOT_B32_e32 1, implicit $exec
|
|
; GCN-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 -2, implicit $exec
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG [[V_MOV_B32_e32_]]
|
|
%0:vgpr_32 = V_NOT_B32_e32 1, implicit $exec
|
|
%1:vgpr_32 = COPY killed %0
|
|
SI_RETURN_TO_EPILOG %1
|
|
|
|
...
|
|
|
|
---
|
|
name: fold_v_not_b32_e64_imm
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_v_not_b32_e64_imm
|
|
; GCN: [[V_NOT_B32_e64_:%[0-9]+]]:vgpr_32 = V_NOT_B32_e64 1, implicit $exec
|
|
; GCN-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 -2, implicit $exec
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG [[V_MOV_B32_e32_]]
|
|
%0:vgpr_32 = V_NOT_B32_e64 1, implicit $exec
|
|
%1:vgpr_32 = COPY killed %0
|
|
SI_RETURN_TO_EPILOG %1
|
|
|
|
...
|
|
|
|
---
|
|
name: fmac_sreg_64_src0_to_fmamk_f64
|
|
tracksRegLiveness: true
|
|
body: |
|
|
bb.0:
|
|
|
|
; GFX942-LABEL: name: fmac_sreg_64_src0_to_fmamk_f64
|
|
; GFX942: [[DEF:%[0-9]+]]:vreg_64_align2 = IMPLICIT_DEF
|
|
; GFX942-NEXT: [[DEF1:%[0-9]+]]:vreg_64_align2 = IMPLICIT_DEF
|
|
; GFX942-NEXT: [[S_MOV_B:%[0-9]+]]:sreg_64 = S_MOV_B64_IMM_PSEUDO 1311768467750121200
|
|
; GFX942-NEXT: [[V_FMAC_F64_e64_:%[0-9]+]]:vreg_64_align2 = V_FMAC_F64_e64 0, [[S_MOV_B]], 0, [[DEF]], 0, [[DEF1]], 0, 0, implicit $mode, implicit $exec
|
|
; GFX942-NEXT: SI_RETURN_TO_EPILOG [[V_FMAC_F64_e64_]]
|
|
;
|
|
; GFX1250-LABEL: name: fmac_sreg_64_src0_to_fmamk_f64
|
|
; GFX1250: [[DEF:%[0-9]+]]:vreg_64_align2 = IMPLICIT_DEF
|
|
; GFX1250-NEXT: [[DEF1:%[0-9]+]]:vreg_64_align2 = IMPLICIT_DEF
|
|
; GFX1250-NEXT: [[V_FMAMK_F64_:%[0-9]+]]:vreg_64_align2 = V_FMAMK_F64 [[DEF]], 1311768467750121200, [[DEF1]], implicit $mode, implicit $exec
|
|
; GFX1250-NEXT: SI_RETURN_TO_EPILOG [[V_FMAMK_F64_]]
|
|
%0:vreg_64_align2 = IMPLICIT_DEF
|
|
%1:vreg_64_align2 = IMPLICIT_DEF
|
|
%2:sreg_64 = S_MOV_B64_IMM_PSEUDO 1311768467750121200
|
|
%3:vreg_64_align2 = V_FMAC_F64_e64 0, %2, 0, %0, 0, %1, 0, 0, implicit $mode, implicit $exec
|
|
SI_RETURN_TO_EPILOG %3
|
|
...
|
|
|
|
---
|
|
name: fmac_sreg_64_src1_to_fmamk_f64
|
|
tracksRegLiveness: true
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fmac_sreg_64_src1_to_fmamk_f64
|
|
; GCN: [[DEF:%[0-9]+]]:vreg_64_align2 = IMPLICIT_DEF
|
|
; GCN-NEXT: [[DEF1:%[0-9]+]]:vreg_64_align2 = IMPLICIT_DEF
|
|
; GCN-NEXT: [[S_MOV_B:%[0-9]+]]:sreg_64 = S_MOV_B64_IMM_PSEUDO 1311768467750121200
|
|
; GCN-NEXT: [[V_FMAC_F64_e64_:%[0-9]+]]:vreg_64_align2 = V_FMAC_F64_e64 0, [[DEF]], 0, [[DEF1]], 0, [[DEF1]], 0, 0, implicit $mode, implicit $exec
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG [[V_FMAC_F64_e64_]]
|
|
%0:vreg_64_align2 = IMPLICIT_DEF
|
|
%1:vreg_64_align2 = IMPLICIT_DEF
|
|
%2:sreg_64 = S_MOV_B64_IMM_PSEUDO 1311768467750121200
|
|
%3:vreg_64_align2 = V_FMAC_F64_e64 0, %0, 0, %1, 0, %1, 0, 0, implicit $mode, implicit $exec
|
|
SI_RETURN_TO_EPILOG %3
|
|
...
|
|
|
|
---
|
|
name: fmac_vreg_64_to_fmaak_f64
|
|
tracksRegLiveness: true
|
|
body: |
|
|
bb.0:
|
|
|
|
; GFX942-LABEL: name: fmac_vreg_64_to_fmaak_f64
|
|
; GFX942: [[DEF:%[0-9]+]]:vreg_64_align2 = IMPLICIT_DEF
|
|
; GFX942-NEXT: [[DEF1:%[0-9]+]]:vreg_64_align2 = IMPLICIT_DEF
|
|
; GFX942-NEXT: [[V_MOV_B:%[0-9]+]]:vreg_64_align2 = V_MOV_B64_PSEUDO 1311768467750121200, implicit $exec
|
|
; GFX942-NEXT: [[V_FMAC_F64_e64_:%[0-9]+]]:vreg_64_align2 = V_FMAC_F64_e64 0, [[DEF]], 0, [[DEF1]], 0, [[V_MOV_B]], 0, 0, implicit $mode, implicit $exec
|
|
; GFX942-NEXT: SI_RETURN_TO_EPILOG [[V_FMAC_F64_e64_]]
|
|
;
|
|
; GFX1250-LABEL: name: fmac_vreg_64_to_fmaak_f64
|
|
; GFX1250: [[DEF:%[0-9]+]]:vreg_64_align2 = IMPLICIT_DEF
|
|
; GFX1250-NEXT: [[DEF1:%[0-9]+]]:vreg_64_align2 = IMPLICIT_DEF
|
|
; GFX1250-NEXT: [[V_FMAAK_F64_:%[0-9]+]]:vreg_64_align2 = V_FMAAK_F64 [[DEF]], [[DEF1]], 1311768467750121200, implicit $mode, implicit $exec
|
|
; GFX1250-NEXT: SI_RETURN_TO_EPILOG [[V_FMAAK_F64_]]
|
|
%0:vreg_64_align2 = IMPLICIT_DEF
|
|
%1:vreg_64_align2 = IMPLICIT_DEF
|
|
%2:vreg_64_align2 = V_MOV_B64_PSEUDO 1311768467750121200, implicit $exec
|
|
%3:vreg_64_align2 = V_FMAC_F64_e64 0, %0, 0, %1, 0, %2, 0, 0, implicit $mode, implicit $exec
|
|
SI_RETURN_TO_EPILOG %3
|
|
...
|
|
|
|
---
|
|
name: fma_sreg_64_src0_to_fmamk_f64
|
|
tracksRegLiveness: true
|
|
body: |
|
|
bb.0:
|
|
|
|
; GFX942-LABEL: name: fma_sreg_64_src0_to_fmamk_f64
|
|
; GFX942: [[DEF:%[0-9]+]]:vreg_64_align2 = IMPLICIT_DEF
|
|
; GFX942-NEXT: [[DEF1:%[0-9]+]]:vreg_64_align2 = IMPLICIT_DEF
|
|
; GFX942-NEXT: [[S_MOV_B:%[0-9]+]]:sreg_64 = S_MOV_B64_IMM_PSEUDO 1311768467750121200
|
|
; GFX942-NEXT: [[V_FMA_F64_e64_:%[0-9]+]]:vreg_64_align2 = V_FMA_F64_e64 0, [[S_MOV_B]], 0, [[DEF]], 0, [[DEF1]], 0, 0, implicit $mode, implicit $exec
|
|
; GFX942-NEXT: SI_RETURN_TO_EPILOG [[V_FMA_F64_e64_]]
|
|
;
|
|
; GFX1250-LABEL: name: fma_sreg_64_src0_to_fmamk_f64
|
|
; GFX1250: [[DEF:%[0-9]+]]:vreg_64_align2 = IMPLICIT_DEF
|
|
; GFX1250-NEXT: [[DEF1:%[0-9]+]]:vreg_64_align2 = IMPLICIT_DEF
|
|
; GFX1250-NEXT: [[V_FMAMK_F64_:%[0-9]+]]:vreg_64_align2 = V_FMAMK_F64 [[DEF]], 1311768467750121200, [[DEF1]], implicit $mode, implicit $exec
|
|
; GFX1250-NEXT: SI_RETURN_TO_EPILOG [[V_FMAMK_F64_]]
|
|
%0:vreg_64_align2 = IMPLICIT_DEF
|
|
%1:vreg_64_align2 = IMPLICIT_DEF
|
|
%2:sreg_64 = S_MOV_B64_IMM_PSEUDO 1311768467750121200
|
|
%3:vreg_64_align2 = V_FMA_F64_e64 0, %2, 0, %0, 0, %1, 0, 0, implicit $mode, implicit $exec
|
|
SI_RETURN_TO_EPILOG %3
|
|
...
|
|
|
|
---
|
|
name: fma_sreg_64_src1_to_fmamk_f64
|
|
tracksRegLiveness: true
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fma_sreg_64_src1_to_fmamk_f64
|
|
; GCN: [[DEF:%[0-9]+]]:vreg_64_align2 = IMPLICIT_DEF
|
|
; GCN-NEXT: [[DEF1:%[0-9]+]]:vreg_64_align2 = IMPLICIT_DEF
|
|
; GCN-NEXT: [[S_MOV_B:%[0-9]+]]:sreg_64 = S_MOV_B64_IMM_PSEUDO 1311768467750121200
|
|
; GCN-NEXT: [[V_FMA_F64_e64_:%[0-9]+]]:vreg_64_align2 = V_FMA_F64_e64 0, [[DEF]], 0, [[DEF1]], 0, [[DEF1]], 0, 0, implicit $mode, implicit $exec
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG [[V_FMA_F64_e64_]]
|
|
%0:vreg_64_align2 = IMPLICIT_DEF
|
|
%1:vreg_64_align2 = IMPLICIT_DEF
|
|
%2:sreg_64 = S_MOV_B64_IMM_PSEUDO 1311768467750121200
|
|
%3:vreg_64_align2 = V_FMA_F64_e64 0, %0, 0, %1, 0, %1, 0, 0, implicit $mode, implicit $exec
|
|
SI_RETURN_TO_EPILOG %3
|
|
...
|
|
|
|
---
|
|
name: fma_vreg_64_to_fmaak_f64
|
|
tracksRegLiveness: true
|
|
body: |
|
|
bb.0:
|
|
|
|
; GFX942-LABEL: name: fma_vreg_64_to_fmaak_f64
|
|
; GFX942: [[DEF:%[0-9]+]]:vreg_64_align2 = IMPLICIT_DEF
|
|
; GFX942-NEXT: [[DEF1:%[0-9]+]]:vreg_64_align2 = IMPLICIT_DEF
|
|
; GFX942-NEXT: [[V_MOV_B:%[0-9]+]]:vreg_64_align2 = V_MOV_B64_PSEUDO 1311768467750121200, implicit $exec
|
|
; GFX942-NEXT: [[V_FMA_F64_e64_:%[0-9]+]]:vreg_64_align2 = V_FMA_F64_e64 0, [[DEF]], 0, [[DEF1]], 0, [[V_MOV_B]], 0, 0, implicit $mode, implicit $exec
|
|
; GFX942-NEXT: SI_RETURN_TO_EPILOG [[V_FMA_F64_e64_]]
|
|
;
|
|
; GFX1250-LABEL: name: fma_vreg_64_to_fmaak_f64
|
|
; GFX1250: [[DEF:%[0-9]+]]:vreg_64_align2 = IMPLICIT_DEF
|
|
; GFX1250-NEXT: [[DEF1:%[0-9]+]]:vreg_64_align2 = IMPLICIT_DEF
|
|
; GFX1250-NEXT: [[V_FMAAK_F64_:%[0-9]+]]:vreg_64_align2 = V_FMAAK_F64 [[DEF]], [[DEF1]], 1311768467750121200, implicit $mode, implicit $exec
|
|
; GFX1250-NEXT: SI_RETURN_TO_EPILOG [[V_FMAAK_F64_]]
|
|
%0:vreg_64_align2 = IMPLICIT_DEF
|
|
%1:vreg_64_align2 = IMPLICIT_DEF
|
|
%2:vreg_64_align2 = V_MOV_B64_PSEUDO 1311768467750121200, implicit $exec
|
|
%3:vreg_64_align2 = V_FMA_F64_e64 0, %0, 0, %1, 0, %2, 0, 0, implicit $mode, implicit $exec
|
|
SI_RETURN_TO_EPILOG %3
|
|
...
|
|
|
|
---
|
|
name: fold_v_mov_b32_e32_literal_to_agpr
|
|
body: |
|
|
bb.0:
|
|
; GCN-LABEL: name: fold_v_mov_b32_e32_literal_to_agpr
|
|
; GCN: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 999, implicit $exec
|
|
; GCN-NEXT: [[COPY:%[0-9]+]]:agpr_32 = COPY killed [[V_MOV_B32_e32_]]
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG implicit [[COPY]]
|
|
%0:vgpr_32 = V_MOV_B32_e32 999, implicit $exec
|
|
%1:agpr_32 = COPY killed %0
|
|
SI_RETURN_TO_EPILOG implicit %1
|
|
...
|
|
|
|
---
|
|
name: fold_v_mov_b32_e32_inlineimm_to_agpr
|
|
body: |
|
|
bb.0:
|
|
; GCN-LABEL: name: fold_v_mov_b32_e32_inlineimm_to_agpr
|
|
; GCN: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 64, implicit $exec
|
|
; GCN-NEXT: [[V_ACCVGPR_WRITE_B32_e64_:%[0-9]+]]:agpr_32 = V_ACCVGPR_WRITE_B32_e64 64, implicit $exec
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG implicit [[V_ACCVGPR_WRITE_B32_e64_]]
|
|
%0:vgpr_32 = V_MOV_B32_e32 64, implicit $exec
|
|
%1:agpr_32 = COPY killed %0
|
|
SI_RETURN_TO_EPILOG implicit %1
|
|
...
|
|
|
|
---
|
|
name: fold_av_mov_b32_imm_pseudo_inlineimm_to_vgpr
|
|
body: |
|
|
bb.0:
|
|
; GCN-LABEL: name: fold_av_mov_b32_imm_pseudo_inlineimm_to_vgpr
|
|
; GCN: [[AV_MOV_:%[0-9]+]]:av_32 = AV_MOV_B32_IMM_PSEUDO 64, implicit $exec
|
|
; GCN-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 64, implicit $exec
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG implicit [[V_MOV_B32_e32_]]
|
|
%0:av_32 = AV_MOV_B32_IMM_PSEUDO 64, implicit $exec
|
|
%1:vgpr_32 = COPY killed %0
|
|
SI_RETURN_TO_EPILOG implicit %1
|
|
...
|
|
|
|
---
|
|
name: fold_av_mov_b32_imm_pseudo_inlineimm_to_agpr
|
|
body: |
|
|
bb.0:
|
|
; GCN-LABEL: name: fold_av_mov_b32_imm_pseudo_inlineimm_to_agpr
|
|
; GCN: [[AV_MOV_:%[0-9]+]]:av_32 = AV_MOV_B32_IMM_PSEUDO 64, implicit $exec
|
|
; GCN-NEXT: [[V_ACCVGPR_WRITE_B32_e64_:%[0-9]+]]:agpr_32 = V_ACCVGPR_WRITE_B32_e64 64, implicit $exec
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG implicit [[V_ACCVGPR_WRITE_B32_e64_]]
|
|
%0:av_32 = AV_MOV_B32_IMM_PSEUDO 64, implicit $exec
|
|
%1:agpr_32 = COPY killed %0
|
|
SI_RETURN_TO_EPILOG implicit %1
|
|
...
|
|
|
|
---
|
|
name: fold_av_mov_b32_imm_pseudo_inlineimm_to_av
|
|
body: |
|
|
bb.0:
|
|
; GCN-LABEL: name: fold_av_mov_b32_imm_pseudo_inlineimm_to_av
|
|
; GCN: [[AV_MOV_:%[0-9]+]]:av_32 = AV_MOV_B32_IMM_PSEUDO 64, implicit $exec
|
|
; GCN-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 64, implicit $exec
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG implicit [[V_MOV_B32_e32_]]
|
|
%0:av_32 = AV_MOV_B32_IMM_PSEUDO 64, implicit $exec
|
|
%1:av_32 = COPY killed %0
|
|
SI_RETURN_TO_EPILOG implicit %1
|
|
...
|
|
|
|
---
|
|
name: fold_av_mov_b64_imm_pseudo_inlineimm_to_vgpr
|
|
body: |
|
|
bb.0:
|
|
; GCN-LABEL: name: fold_av_mov_b64_imm_pseudo_inlineimm_to_vgpr
|
|
; GCN: [[AV_MOV_:%[0-9]+]]:av_64_align2 = AV_MOV_B64_IMM_PSEUDO 64, implicit $exec
|
|
; GCN-NEXT: [[V_MOV_B:%[0-9]+]]:vreg_64_align2 = V_MOV_B64_PSEUDO 64, implicit $exec
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG implicit [[V_MOV_B]]
|
|
%0:av_64_align2 = AV_MOV_B64_IMM_PSEUDO 64, implicit $exec
|
|
%1:vreg_64_align2 = COPY killed %0
|
|
SI_RETURN_TO_EPILOG implicit %1
|
|
...
|
|
|
|
---
|
|
name: fold_av_mov_b64_imm_pseudo_inlineimm_to_agpr
|
|
body: |
|
|
bb.0:
|
|
; GCN-LABEL: name: fold_av_mov_b64_imm_pseudo_inlineimm_to_agpr
|
|
; GCN: [[AV_MOV_:%[0-9]+]]:av_64_align2 = AV_MOV_B64_IMM_PSEUDO 64, implicit $exec
|
|
; GCN-NEXT: [[COPY:%[0-9]+]]:areg_64_align2 = COPY killed [[AV_MOV_]]
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG implicit [[COPY]]
|
|
%0:av_64_align2 = AV_MOV_B64_IMM_PSEUDO 64, implicit $exec
|
|
%1:areg_64_align2 = COPY killed %0
|
|
SI_RETURN_TO_EPILOG implicit %1
|
|
...
|
|
|
|
---
|
|
name: fold_av_mov_b64_imm_pseudo_inlineimm_to_av
|
|
body: |
|
|
bb.0:
|
|
; GCN-LABEL: name: fold_av_mov_b64_imm_pseudo_inlineimm_to_av
|
|
; GCN: [[AV_MOV_:%[0-9]+]]:av_64_align2 = AV_MOV_B64_IMM_PSEUDO 64, implicit $exec
|
|
; GCN-NEXT: [[V_MOV_B:%[0-9]+]]:vreg_64_align2 = V_MOV_B64_PSEUDO 64, implicit $exec
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG implicit [[V_MOV_B]]
|
|
%0:av_64_align2 = AV_MOV_B64_IMM_PSEUDO 64, implicit $exec
|
|
%1:av_64_align2 = COPY killed %0
|
|
SI_RETURN_TO_EPILOG implicit %1
|
|
...
|
|
|
|
---
|
|
name: fold_simm_16_sub_to_lo_from_mov_64_virt_sgpr16
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_simm_16_sub_to_lo_from_mov_64_virt_sgpr16
|
|
; GCN: [[S_MOV_B64_:%[0-9]+]]:sreg_64 = S_MOV_B64 64
|
|
; GCN-NEXT: [[COPY:%[0-9]+]]:sgpr_lo16 = COPY killed [[S_MOV_B64_]].lo16
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG [[COPY]]
|
|
%0:sreg_64 = S_MOV_B64 64
|
|
%1:sgpr_lo16 = COPY killed %0.lo16
|
|
SI_RETURN_TO_EPILOG %1
|
|
|
|
...
|
|
---
|
|
name: fold_simm_16_sub_to_hi_from_mov_64_inline_imm_virt_sgpr16
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_simm_16_sub_to_hi_from_mov_64_inline_imm_virt_sgpr16
|
|
; GCN: [[S_MOV_B64_:%[0-9]+]]:sreg_64 = S_MOV_B64 64
|
|
; GCN-NEXT: [[COPY:%[0-9]+]]:sgpr_lo16 = COPY killed [[S_MOV_B64_]].hi16
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG [[COPY]]
|
|
%0:sreg_64 = S_MOV_B64 64
|
|
%1:sgpr_lo16 = COPY killed %0.hi16
|
|
SI_RETURN_TO_EPILOG %1
|
|
|
|
...
|
|
|
|
---
|
|
name: fold_simm_16_sub_to_lo_from_mov_64_phys_sgpr16_lo
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_simm_16_sub_to_lo_from_mov_64_phys_sgpr16_lo
|
|
; GCN: [[S_MOV_B64_:%[0-9]+]]:sreg_64 = S_MOV_B64 64
|
|
; GCN-NEXT: $sgpr0 = S_MOV_B32 64
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG $sgpr0_lo16
|
|
%0:sreg_64 = S_MOV_B64 64
|
|
$sgpr0_lo16 = COPY killed %0.lo16
|
|
SI_RETURN_TO_EPILOG $sgpr0_lo16
|
|
|
|
...
|
|
---
|
|
name: fold_simm_16_sub_to_hi_from_mov_64_inline_imm_phys_sgpr16_lo
|
|
body: |
|
|
bb.0:
|
|
|
|
; GCN-LABEL: name: fold_simm_16_sub_to_hi_from_mov_64_inline_imm_phys_sgpr16_lo
|
|
; GCN: [[S_MOV_B64_:%[0-9]+]]:sreg_64 = S_MOV_B64 64
|
|
; GCN-NEXT: $sgpr0 = S_MOV_B32 0
|
|
; GCN-NEXT: SI_RETURN_TO_EPILOG $sgpr0_lo16
|
|
%0:sreg_64 = S_MOV_B64 64
|
|
$sgpr0_lo16 = COPY killed %0.hi16
|
|
SI_RETURN_TO_EPILOG $sgpr0_lo16
|
|
|
|
...
|