| # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py UTC_ARGS: --version 5 |
| # RUN: llc -mtriple=amdgcn -run-pass si-fold-operands -verify-machineinstrs %s -o - | FileCheck -check-prefix=GCN %s |
| |
| --- |
| name: fold-imm-copy |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| liveins: $vgpr0, $sgpr0_sgpr1 |
| ; GCN-LABEL: name: fold-imm-copy |
| ; GCN: liveins: $vgpr0, $sgpr0_sgpr1 |
| ; GCN-NEXT: {{ $}} |
| ; GCN-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr0 |
| ; GCN-NEXT: [[COPY1:%[0-9]+]]:sgpr_64 = COPY $sgpr0_sgpr1 |
| ; GCN-NEXT: [[S_LOAD_DWORDX4_IMM:%[0-9]+]]:sgpr_128 = S_LOAD_DWORDX4_IMM [[COPY1]], 9, 0 |
| ; GCN-NEXT: [[V_LSHLREV_B32_e64_:%[0-9]+]]:vgpr_32 = V_LSHLREV_B32_e64 2, [[COPY]], implicit $exec |
| ; GCN-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 0, implicit $exec |
| ; GCN-NEXT: [[REG_SEQUENCE:%[0-9]+]]:vreg_64 = REG_SEQUENCE killed [[V_LSHLREV_B32_e64_]], %subreg.sub0, killed [[V_MOV_B32_e32_]], %subreg.sub1 |
| ; GCN-NEXT: [[BUFFER_LOAD_DWORD_ADDR64_:%[0-9]+]]:vgpr_32 = BUFFER_LOAD_DWORD_ADDR64 [[REG_SEQUENCE]], [[S_LOAD_DWORDX4_IMM]], 0, 4, 0, 0, implicit $exec |
| ; GCN-NEXT: [[V_AND_B32_e32_:%[0-9]+]]:vgpr_32 = V_AND_B32_e32 65535, [[BUFFER_LOAD_DWORD_ADDR64_]], implicit $exec |
| %0:vgpr_32 = COPY $vgpr0 |
| %1:sgpr_64 = COPY $sgpr0_sgpr1 |
| %2:sgpr_128 = S_LOAD_DWORDX4_IMM %1, 9, 0 |
| %3:sreg_32_xm0 = S_MOV_B32 2 |
| %4:vgpr_32 = V_LSHLREV_B32_e64 killed %3, %0, implicit $exec |
| %5:vgpr_32 = V_MOV_B32_e32 0, implicit $exec |
| %6:vreg_64 = REG_SEQUENCE killed %4, %subreg.sub0, killed %5, %subreg.sub1 |
| %7:vgpr_32 = BUFFER_LOAD_DWORD_ADDR64 %6, %2, 0, 4, 0, 0, implicit $exec |
| %8:sreg_32_xm0 = S_MOV_B32 65535 |
| %9:vgpr_32 = COPY %8 |
| %10:vgpr_32 = V_AND_B32_e32 %7, %9, implicit $exec |
| ... |
| |
| --- |
| # The first XOR needs commuting to fold that immediate operand. |
| name: no_extra_fold_on_same_opnd |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| ; GCN-LABEL: name: no_extra_fold_on_same_opnd |
| ; GCN: [[DEF:%[0-9]+]]:vgpr_32 = IMPLICIT_DEF |
| ; GCN-NEXT: [[DEF1:%[0-9]+]]:vgpr_32 = IMPLICIT_DEF |
| ; GCN-NEXT: [[DEF2:%[0-9]+]]:vgpr_32 = IMPLICIT_DEF |
| ; GCN-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 0, implicit $exec |
| ; GCN-NEXT: [[REG_SEQUENCE:%[0-9]+]]:vreg_64 = REG_SEQUENCE killed [[DEF]], %subreg.sub0, killed [[V_MOV_B32_e32_]], %subreg.sub1 |
| ; GCN-NEXT: [[V_XOR_B32_e32_:%[0-9]+]]:vgpr_32 = V_XOR_B32_e32 [[DEF2]], [[REG_SEQUENCE]].sub0, implicit $exec |
| %0:vgpr_32 = IMPLICIT_DEF |
| %1:vgpr_32 = IMPLICIT_DEF |
| %2:vgpr_32 = IMPLICIT_DEF |
| %3:vgpr_32 = V_MOV_B32_e32 0, implicit $exec |
| %4:vreg_64 = REG_SEQUENCE killed %0, %subreg.sub0, killed %3, %subreg.sub1 |
| %5:vgpr_32 = V_XOR_B32_e32 %1, %4.sub1, implicit $exec |
| %6:vgpr_32 = V_XOR_B32_e32 %2, %4.sub0, implicit $exec |
| ... |
| |
| --- |
| |
| # Make sure the subreg index is not reinterpreted when folding |
| # immediates |
| # |
| name: clear_subreg_imm_fold |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| ; GCN-LABEL: name: clear_subreg_imm_fold |
| ; GCN: [[S_MOV_B32_:%[0-9]+]]:sgpr_32 = S_MOV_B32 4294967288 |
| ; GCN-NEXT: [[S_MOV_B32_1:%[0-9]+]]:sgpr_32 = S_MOV_B32 4294967295 |
| ; GCN-NEXT: S_ENDPGM 0, implicit [[S_MOV_B32_]], implicit [[S_MOV_B32_1]] |
| %0:sreg_64 = S_MOV_B64 -8 |
| %1:sgpr_32 = COPY %0.sub0 |
| %2:sgpr_32 = COPY %0.sub1 |
| S_ENDPGM 0, implicit %1, implicit %2 |
| |
| ... |
| |
| --- |
| name: no_fold_imm_into_m0 |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| ; GCN-LABEL: name: no_fold_imm_into_m0 |
| ; GCN: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 -8 |
| ; GCN-NEXT: $m0 = COPY [[S_MOV_B32_]] |
| ; GCN-NEXT: S_ENDPGM 0, implicit $m0 |
| %0:sreg_32 = S_MOV_B32 -8 |
| $m0 = COPY %0 |
| S_ENDPGM 0, implicit $m0 |
| |
| ... |
| |
| --- |
| name: fold_sgpr_imm_to_vgpr_copy |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| ; GCN-LABEL: name: fold_sgpr_imm_to_vgpr_copy |
| ; GCN: $vgpr0 = V_MOV_B32_e32 -8, implicit $exec |
| ; GCN-NEXT: S_ENDPGM 0, implicit $vgpr0 |
| %0:sreg_32 = S_MOV_B32 -8 |
| $vgpr0 = COPY %0 |
| S_ENDPGM 0, implicit $vgpr0 |
| |
| ... |
| |
| # The users of $vgpr1 should not be visited for further immediate |
| # folding. |
| |
| --- |
| name: no_fold_physreg_users_vgpr |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| ; GCN-LABEL: name: no_fold_physreg_users_vgpr |
| ; GCN: $vgpr1 = V_MOV_B32_e32 0, implicit $exec |
| ; GCN-NEXT: S_NOP 0, implicit-def $vgpr1 |
| ; GCN-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr1 |
| ; GCN-NEXT: $vgpr2 = COPY [[COPY]] |
| ; GCN-NEXT: S_ENDPGM 0 |
| %0:sreg_32 = S_MOV_B32 0 |
| %1:vgpr_32 = COPY %0 |
| $vgpr1 = COPY %0 |
| S_NOP 0, implicit-def $vgpr1 |
| %2:vgpr_32 = COPY $vgpr1 |
| $vgpr2 = COPY %2 |
| S_ENDPGM 0 |
| |
| ... |
| |
| --- |
| name: av_mov_b32_imm_pseudo_copy_av_32_to_physreg_agpr |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| ; GCN-LABEL: name: av_mov_b32_imm_pseudo_copy_av_32_to_physreg_agpr |
| ; GCN: [[AV_MOV_:%[0-9]+]]:av_32 = AV_MOV_B32_IMM_PSEUDO 0, implicit $exec |
| ; GCN-NEXT: $agpr0 = COPY [[AV_MOV_]] |
| ; GCN-NEXT: S_ENDPGM 0, implicit $agpr0 |
| %0:av_32 = AV_MOV_B32_IMM_PSEUDO 0, implicit $exec |
| $agpr0 = COPY %0 |
| S_ENDPGM 0, implicit $agpr0 |
| |
| ... |
| |
| --- |
| name: av_mov_b32_imm_pseudo_copy_av_32_to_physreg_vgpr |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| ; GCN-LABEL: name: av_mov_b32_imm_pseudo_copy_av_32_to_physreg_vgpr |
| ; GCN: [[AV_MOV_:%[0-9]+]]:av_32 = AV_MOV_B32_IMM_PSEUDO 0, implicit $exec |
| ; GCN-NEXT: $vgpr0 = COPY [[AV_MOV_]] |
| ; GCN-NEXT: S_ENDPGM 0, implicit $vgpr0 |
| %0:av_32 = AV_MOV_B32_IMM_PSEUDO 0, implicit $exec |
| $vgpr0 = COPY %0 |
| S_ENDPGM 0, implicit $vgpr0 |
| |
| ... |
| |
| --- |
| name: av_mov_b32_imm_pseudo_copy_av_32_to_virtreg_agpr |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| ; GCN-LABEL: name: av_mov_b32_imm_pseudo_copy_av_32_to_virtreg_agpr |
| ; GCN: [[V_ACCVGPR_WRITE_B32_e64_:%[0-9]+]]:agpr_32 = V_ACCVGPR_WRITE_B32_e64 0, implicit $exec |
| ; GCN-NEXT: S_ENDPGM 0, implicit [[V_ACCVGPR_WRITE_B32_e64_]] |
| %0:av_32 = AV_MOV_B32_IMM_PSEUDO 0, implicit $exec |
| %1:agpr_32 = COPY %0 |
| S_ENDPGM 0, implicit %1 |
| |
| ... |
| |
| --- |
| name: av_mov_b32_imm_pseudo_copy_av_32_to_virtreg_vgpr |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| ; GCN-LABEL: name: av_mov_b32_imm_pseudo_copy_av_32_to_virtreg_vgpr |
| ; GCN: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 0, implicit $exec |
| ; GCN-NEXT: S_ENDPGM 0, implicit [[V_MOV_B32_e32_]] |
| %0:av_32 = AV_MOV_B32_IMM_PSEUDO 0, implicit $exec |
| %1:vgpr_32 = COPY %0 |
| S_ENDPGM 0, implicit %1 |
| |
| ... |
| |
| --- |
| name: v_mov_b32_imm_literal_copy_v_to_agpr_32 |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| ; GCN-LABEL: name: v_mov_b32_imm_literal_copy_v_to_agpr_32 |
| ; GCN: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 999, implicit $exec |
| ; GCN-NEXT: [[COPY:%[0-9]+]]:agpr_32 = COPY [[V_MOV_B32_e32_]] |
| ; GCN-NEXT: $agpr0 = COPY [[COPY]] |
| ; GCN-NEXT: S_ENDPGM 0 |
| %0:vgpr_32 = V_MOV_B32_e32 999, implicit $exec |
| %1:agpr_32 = COPY %0 |
| $agpr0 = COPY %1 |
| S_ENDPGM 0 |
| |
| ... |
| |
| --- |
| name: s_mov_b32_inlineimm_copy_s_to_av_32 |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| ; GCN-LABEL: name: s_mov_b32_inlineimm_copy_s_to_av_32 |
| ; GCN: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 32 |
| ; GCN-NEXT: [[COPY:%[0-9]+]]:av_32 = COPY [[S_MOV_B32_]] |
| ; GCN-NEXT: $agpr0 = COPY [[COPY]] |
| ; GCN-NEXT: S_ENDPGM 0 |
| %0:sreg_32 = S_MOV_B32 32 |
| %1:av_32 = COPY %0 |
| $agpr0 = COPY %1 |
| S_ENDPGM 0 |
| |
| ... |
| |
| --- |
| name: v_mov_b32_inlineimm_copy_v_to_av_32 |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| ; GCN-LABEL: name: v_mov_b32_inlineimm_copy_v_to_av_32 |
| ; GCN: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 32, implicit $exec |
| ; GCN-NEXT: [[COPY:%[0-9]+]]:av_32 = COPY [[V_MOV_B32_e32_]] |
| ; GCN-NEXT: $agpr0 = COPY [[COPY]] |
| ; GCN-NEXT: S_ENDPGM 0 |
| %0:vgpr_32 = V_MOV_B32_e32 32, implicit $exec |
| %1:av_32 = COPY %0 |
| $agpr0 = COPY %1 |
| S_ENDPGM 0 |
| |
| ... |
| |
| --- |
| name: s_mov_b32_imm_literal_copy_s_to_av_32 |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| ; GCN-LABEL: name: s_mov_b32_imm_literal_copy_s_to_av_32 |
| ; GCN: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 999 |
| ; GCN-NEXT: [[COPY:%[0-9]+]]:av_32 = COPY [[S_MOV_B32_]] |
| ; GCN-NEXT: $agpr0 = COPY [[COPY]] |
| ; GCN-NEXT: S_ENDPGM 0 |
| %0:sreg_32 = S_MOV_B32 999 |
| %1:av_32 = COPY %0 |
| $agpr0 = COPY %1 |
| S_ENDPGM 0 |
| |
| ... |
| |
| --- |
| name: v_mov_b32_imm_literal_copy_v_to_av_32 |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| ; GCN-LABEL: name: v_mov_b32_imm_literal_copy_v_to_av_32 |
| ; GCN: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 999, implicit $exec |
| ; GCN-NEXT: [[COPY:%[0-9]+]]:av_32 = COPY [[V_MOV_B32_e32_]] |
| ; GCN-NEXT: $agpr0 = COPY [[COPY]] |
| ; GCN-NEXT: S_ENDPGM 0 |
| %0:vgpr_32 = V_MOV_B32_e32 999, implicit $exec |
| %1:av_32 = COPY %0 |
| $agpr0 = COPY %1 |
| S_ENDPGM 0 |
| |
| ... |
| |
| --- |
| name: v_mov_b64_pseudo_imm_0_full_copy_to_f64_use |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| liveins: $vgpr0_vgpr1 |
| ; GCN-LABEL: name: v_mov_b64_pseudo_imm_0_full_copy_to_f64_use |
| ; GCN: liveins: $vgpr0_vgpr1 |
| ; GCN-NEXT: {{ $}} |
| ; GCN-NEXT: [[COPY:%[0-9]+]]:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| ; GCN-NEXT: [[V_ADD_F64_e64_:%[0-9]+]]:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, [[COPY]], 1, 0, 0, 0, implicit $mode, implicit $exec |
| ; GCN-NEXT: $vgpr0_vgpr1 = COPY [[V_ADD_F64_e64_]] |
| ; GCN-NEXT: S_ENDPGM 0 |
| %0:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| %1:vreg_64_align2 = V_MOV_B64_PSEUDO 0, implicit $exec |
| %2:vreg_64_align2 = COPY %1 |
| %3:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, %0, 1, %2, 0, 0, implicit $mode, implicit $exec |
| $vgpr0_vgpr1 = COPY %3 |
| S_ENDPGM 0 |
| |
| ... |
| |
| --- |
| name: v_mov_b64_pseudo_imm_fp64_1_full_copy_to_f64_use |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| liveins: $vgpr0_vgpr1 |
| ; GCN-LABEL: name: v_mov_b64_pseudo_imm_fp64_1_full_copy_to_f64_use |
| ; GCN: liveins: $vgpr0_vgpr1 |
| ; GCN-NEXT: {{ $}} |
| ; GCN-NEXT: [[COPY:%[0-9]+]]:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| ; GCN-NEXT: [[V_ADD_F64_e64_:%[0-9]+]]:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, [[COPY]], 1, 4607182418800017408, 0, 0, implicit $mode, implicit $exec |
| ; GCN-NEXT: $vgpr0_vgpr1 = COPY [[V_ADD_F64_e64_]] |
| ; GCN-NEXT: S_ENDPGM 0 |
| %0:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| %1:vreg_64_align2 = V_MOV_B64_PSEUDO 4607182418800017408, implicit $exec |
| %2:vreg_64_align2 = COPY %1 |
| %3:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, %0, 1, %2, 0, 0, implicit $mode, implicit $exec |
| $vgpr0_vgpr1 = COPY %3 |
| S_ENDPGM 0 |
| |
| ... |
| |
| --- |
| name: v_mov_b64_pseudo_lit_full_copy_to_f64_use |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| liveins: $vgpr0_vgpr1 |
| ; GCN-LABEL: name: v_mov_b64_pseudo_lit_full_copy_to_f64_use |
| ; GCN: liveins: $vgpr0_vgpr1 |
| ; GCN-NEXT: {{ $}} |
| ; GCN-NEXT: [[COPY:%[0-9]+]]:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| ; GCN-NEXT: [[V_MOV_B:%[0-9]+]]:vreg_64_align2 = V_MOV_B64_PSEUDO 4290672329592, implicit $exec |
| ; GCN-NEXT: [[V_ADD_F64_e64_:%[0-9]+]]:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, [[COPY]], 1, [[V_MOV_B]], 0, 0, implicit $mode, implicit $exec |
| ; GCN-NEXT: $vgpr0_vgpr1 = COPY [[V_ADD_F64_e64_]] |
| ; GCN-NEXT: S_ENDPGM 0 |
| %0:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| %1:vreg_64_align2 = V_MOV_B64_PSEUDO 4290672329592, implicit $exec |
| %2:vreg_64_align2 = COPY %1 |
| %3:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, %0, 1, %2, 0, 0, implicit $mode, implicit $exec |
| $vgpr0_vgpr1 = COPY %3 |
| S_ENDPGM 0 |
| |
| ... |
| |
| --- |
| name: v_mov_b64_pseudo_imm_fp32_1_splat_full_copy_to_f64_use |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| liveins: $vgpr0_vgpr1 |
| ; GCN-LABEL: name: v_mov_b64_pseudo_imm_fp32_1_splat_full_copy_to_f64_use |
| ; GCN: liveins: $vgpr0_vgpr1 |
| ; GCN-NEXT: {{ $}} |
| ; GCN-NEXT: [[COPY:%[0-9]+]]:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| ; GCN-NEXT: [[V_MOV_B:%[0-9]+]]:vreg_64_align2 = V_MOV_B64_PSEUDO 4575657222473777152, implicit $exec |
| ; GCN-NEXT: [[V_ADD_F64_e64_:%[0-9]+]]:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, [[COPY]], 1, [[V_MOV_B]], 0, 0, implicit $mode, implicit $exec |
| ; GCN-NEXT: $vgpr0_vgpr1 = COPY [[V_ADD_F64_e64_]] |
| ; GCN-NEXT: S_ENDPGM 0 |
| %0:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| %1:vreg_64_align2 = V_MOV_B64_PSEUDO 4575657222473777152, implicit $exec |
| %2:vreg_64_align2 = COPY %1 |
| %3:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, %0, 1, %2, 0, 0, implicit $mode, implicit $exec |
| $vgpr0_vgpr1 = COPY %3 |
| S_ENDPGM 0 |
| |
| ... |
| |
| --- |
| name: v_mov_b64_pseudo_imm_fp32_1_splat_copy_extract_sub0_to_f32_use |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| liveins: $vgpr0 |
| ; GCN-LABEL: name: v_mov_b64_pseudo_imm_fp32_1_splat_copy_extract_sub0_to_f32_use |
| ; GCN: liveins: $vgpr0 |
| ; GCN-NEXT: {{ $}} |
| ; GCN-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr0 |
| ; GCN-NEXT: [[V_ADD_F32_e64_:%[0-9]+]]:vgpr_32 = nofpexcept V_ADD_F32_e64 0, [[COPY]], 1, 1065353216, 0, 0, implicit $mode, implicit $exec |
| ; GCN-NEXT: $vgpr0 = COPY [[V_ADD_F32_e64_]] |
| ; GCN-NEXT: S_ENDPGM 0 |
| %0:vgpr_32 = COPY $vgpr0 |
| %1:vreg_64_align2 = V_MOV_B64_PSEUDO 4575657222473777152, implicit $exec |
| %2:vgpr_32 = COPY %1.sub0 |
| %3:vgpr_32 = nofpexcept V_ADD_F32_e64 0, %0, 1, %2, 0, 0, implicit $mode, implicit $exec |
| $vgpr0 = COPY %3 |
| S_ENDPGM 0 |
| |
| ... |
| |
| --- |
| name: v_mov_b64_pseudo_imm_fp32_1_splat_copy_extract_sub1_to_f32_use |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| liveins: $vgpr0 |
| ; GCN-LABEL: name: v_mov_b64_pseudo_imm_fp32_1_splat_copy_extract_sub1_to_f32_use |
| ; GCN: liveins: $vgpr0 |
| ; GCN-NEXT: {{ $}} |
| ; GCN-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr0 |
| ; GCN-NEXT: [[V_ADD_F32_e64_:%[0-9]+]]:vgpr_32 = nofpexcept V_ADD_F32_e64 0, [[COPY]], 1, 1065353216, 0, 0, implicit $mode, implicit $exec |
| ; GCN-NEXT: $vgpr0 = COPY [[V_ADD_F32_e64_]] |
| ; GCN-NEXT: S_ENDPGM 0 |
| %0:vgpr_32 = COPY $vgpr0 |
| %1:vreg_64_align2 = V_MOV_B64_PSEUDO 4575657222473777152, implicit $exec |
| %2:vgpr_32 = COPY %1.sub1 |
| %3:vgpr_32 = nofpexcept V_ADD_F32_e64 0, %0, 1, %2, 0, 0, implicit $mode, implicit $exec |
| $vgpr0 = COPY %3 |
| S_ENDPGM 0 |
| |
| ... |
| |
| --- |
| name: v_mov_b64_pseudo_imm_fp64_1_copy_extract_sub0_to_f32_use |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| liveins: $vgpr0 |
| ; GCN-LABEL: name: v_mov_b64_pseudo_imm_fp64_1_copy_extract_sub0_to_f32_use |
| ; GCN: liveins: $vgpr0 |
| ; GCN-NEXT: {{ $}} |
| ; GCN-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr0 |
| ; GCN-NEXT: [[V_ADD_F32_e64_:%[0-9]+]]:vgpr_32 = nofpexcept V_ADD_F32_e64 0, [[COPY]], 1, 0, 0, 0, implicit $mode, implicit $exec |
| ; GCN-NEXT: $vgpr0 = COPY [[V_ADD_F32_e64_]] |
| ; GCN-NEXT: S_ENDPGM 0 |
| %0:vgpr_32 = COPY $vgpr0 |
| %1:vreg_64_align2 = V_MOV_B64_PSEUDO 4607182418800017408, implicit $exec |
| %2:vgpr_32 = COPY %1.sub0 |
| %3:vgpr_32 = nofpexcept V_ADD_F32_e64 0, %0, 1, %2, 0, 0, implicit $mode, implicit $exec |
| $vgpr0 = COPY %3 |
| S_ENDPGM 0 |
| |
| ... |
| |
| --- |
| name: v_mov_b64_pseudo_imm_fp64_1_copy_extract_sub1_to_f32_use |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| liveins: $vgpr0 |
| ; GCN-LABEL: name: v_mov_b64_pseudo_imm_fp64_1_copy_extract_sub1_to_f32_use |
| ; GCN: liveins: $vgpr0 |
| ; GCN-NEXT: {{ $}} |
| ; GCN-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr0 |
| ; GCN-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 1072693248, implicit $exec |
| ; GCN-NEXT: [[V_ADD_F32_e64_:%[0-9]+]]:vgpr_32 = nofpexcept V_ADD_F32_e64 0, [[COPY]], 1, [[V_MOV_B32_e32_]], 0, 0, implicit $mode, implicit $exec |
| ; GCN-NEXT: $vgpr0 = COPY [[V_ADD_F32_e64_]] |
| ; GCN-NEXT: S_ENDPGM 0 |
| %0:vgpr_32 = COPY $vgpr0 |
| %1:vreg_64_align2 = V_MOV_B64_PSEUDO 4607182418800017408, implicit $exec |
| %2:vgpr_32 = COPY %1.sub1 |
| %3:vgpr_32 = nofpexcept V_ADD_F32_e64 0, %0, 1, %2, 0, 0, implicit $mode, implicit $exec |
| $vgpr0 = COPY %3 |
| S_ENDPGM 0 |
| |
| ... |
| |
| --- |
| name: reg_sequence_3x_0_f64_use |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| liveins: $vgpr0_vgpr1 |
| |
| ; GCN-LABEL: name: reg_sequence_3x_0_f64_use |
| ; GCN: liveins: $vgpr0_vgpr1 |
| ; GCN-NEXT: {{ $}} |
| ; GCN-NEXT: [[COPY:%[0-9]+]]:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| ; GCN-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 0, implicit $exec |
| ; GCN-NEXT: [[V_MOV_B32_e32_1:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 0, implicit $exec |
| ; GCN-NEXT: [[V_MOV_B32_e32_2:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 0, implicit $exec |
| ; GCN-NEXT: [[REG_SEQUENCE:%[0-9]+]]:vreg_96_align2 = REG_SEQUENCE [[V_MOV_B32_e32_]], %subreg.sub0, [[V_MOV_B32_e32_1]], %subreg.sub1, [[V_MOV_B32_e32_2]], %subreg.sub2 |
| ; GCN-NEXT: [[V_ADD_F64_e64_:%[0-9]+]]:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, [[COPY]], 1, 0, 0, 0, implicit $mode, implicit $exec |
| ; GCN-NEXT: $vgpr0_vgpr1 = COPY [[V_ADD_F64_e64_]] |
| ; GCN-NEXT: S_ENDPGM 0 |
| %0:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| %1:vgpr_32 = V_MOV_B32_e32 0, implicit $exec |
| %2:vgpr_32 = V_MOV_B32_e32 0, implicit $exec |
| %3:vgpr_32 = V_MOV_B32_e32 0, implicit $exec |
| %4:vreg_96_align2 = REG_SEQUENCE %1, %subreg.sub0, %2, %subreg.sub1, %3, %subreg.sub2 |
| %5:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, %0, 1, %4.sub0_sub1, 0, 0, implicit $mode, implicit $exec |
| $vgpr0_vgpr1 = COPY %5 |
| S_ENDPGM 0 |
| |
| ... |
| |
| --- |
| name: reg_sequence_3x_neg1_f64_use |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| liveins: $vgpr0_vgpr1 |
| |
| ; GCN-LABEL: name: reg_sequence_3x_neg1_f64_use |
| ; GCN: liveins: $vgpr0_vgpr1 |
| ; GCN-NEXT: {{ $}} |
| ; GCN-NEXT: [[COPY:%[0-9]+]]:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| ; GCN-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 -1, implicit $exec |
| ; GCN-NEXT: [[V_MOV_B32_e32_1:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 -1, implicit $exec |
| ; GCN-NEXT: [[V_MOV_B32_e32_2:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 -1, implicit $exec |
| ; GCN-NEXT: [[REG_SEQUENCE:%[0-9]+]]:vreg_96_align2 = REG_SEQUENCE [[V_MOV_B32_e32_]], %subreg.sub0, [[V_MOV_B32_e32_1]], %subreg.sub1, [[V_MOV_B32_e32_2]], %subreg.sub2 |
| ; GCN-NEXT: [[V_ADD_F64_e64_:%[0-9]+]]:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, [[COPY]], 1, -1, 0, 0, implicit $mode, implicit $exec |
| ; GCN-NEXT: $vgpr0_vgpr1 = COPY [[V_ADD_F64_e64_]] |
| ; GCN-NEXT: S_ENDPGM 0 |
| %0:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| %1:vgpr_32 = V_MOV_B32_e32 -1, implicit $exec |
| %2:vgpr_32 = V_MOV_B32_e32 -1, implicit $exec |
| %3:vgpr_32 = V_MOV_B32_e32 -1, implicit $exec |
| %4:vreg_96_align2 = REG_SEQUENCE %1, %subreg.sub0, %2, %subreg.sub1, %3, %subreg.sub2 |
| %5:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, %0, 1, %4.sub0_sub1, 0, 0, implicit $mode, implicit $exec |
| $vgpr0_vgpr1 = COPY %5 |
| S_ENDPGM 0 |
| |
| ... |
| |
| --- |
| name: reg_sequence_3x_1_f64_use |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| liveins: $vgpr0_vgpr1 |
| |
| ; GCN-LABEL: name: reg_sequence_3x_1_f64_use |
| ; GCN: liveins: $vgpr0_vgpr1 |
| ; GCN-NEXT: {{ $}} |
| ; GCN-NEXT: [[COPY:%[0-9]+]]:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| ; GCN-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 1, implicit $exec |
| ; GCN-NEXT: [[V_MOV_B32_e32_1:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 1, implicit $exec |
| ; GCN-NEXT: [[V_MOV_B32_e32_2:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 1, implicit $exec |
| ; GCN-NEXT: [[REG_SEQUENCE:%[0-9]+]]:vreg_96_align2 = REG_SEQUENCE [[V_MOV_B32_e32_]], %subreg.sub0, [[V_MOV_B32_e32_1]], %subreg.sub1, [[V_MOV_B32_e32_2]], %subreg.sub2 |
| ; GCN-NEXT: [[V_ADD_F64_e64_:%[0-9]+]]:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, [[COPY]], 1, [[REG_SEQUENCE]].sub0_sub1, 0, 0, implicit $mode, implicit $exec |
| ; GCN-NEXT: $vgpr0_vgpr1 = COPY [[V_ADD_F64_e64_]] |
| ; GCN-NEXT: S_ENDPGM 0 |
| %0:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| %1:vgpr_32 = V_MOV_B32_e32 1, implicit $exec |
| %2:vgpr_32 = V_MOV_B32_e32 1, implicit $exec |
| %3:vgpr_32 = V_MOV_B32_e32 1, implicit $exec |
| %4:vreg_96_align2 = REG_SEQUENCE %1, %subreg.sub0, %2, %subreg.sub1, %3, %subreg.sub2 |
| %5:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, %0, 1, %4.sub0_sub1, 0, 0, implicit $mode, implicit $exec |
| $vgpr0_vgpr1 = COPY %5 |
| S_ENDPGM 0 |
| |
| ... |
| |
| --- |
| name: reg_sequence_0_0_1_f64_use |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| liveins: $vgpr0_vgpr1 |
| |
| ; GCN-LABEL: name: reg_sequence_0_0_1_f64_use |
| ; GCN: liveins: $vgpr0_vgpr1 |
| ; GCN-NEXT: {{ $}} |
| ; GCN-NEXT: [[COPY:%[0-9]+]]:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| ; GCN-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 0, implicit $exec |
| ; GCN-NEXT: [[V_MOV_B32_e32_1:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 0, implicit $exec |
| ; GCN-NEXT: [[V_MOV_B32_e32_2:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 1, implicit $exec |
| ; GCN-NEXT: [[REG_SEQUENCE:%[0-9]+]]:vreg_96_align2 = REG_SEQUENCE [[V_MOV_B32_e32_]], %subreg.sub0, [[V_MOV_B32_e32_1]], %subreg.sub1, [[V_MOV_B32_e32_2]], %subreg.sub2 |
| ; GCN-NEXT: [[V_ADD_F64_e64_:%[0-9]+]]:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, [[COPY]], 1, [[REG_SEQUENCE]].sub0_sub1, 0, 0, implicit $mode, implicit $exec |
| ; GCN-NEXT: $vgpr0_vgpr1 = COPY [[V_ADD_F64_e64_]] |
| ; GCN-NEXT: S_ENDPGM 0 |
| %0:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| %1:vgpr_32 = V_MOV_B32_e32 0, implicit $exec |
| %2:vgpr_32 = V_MOV_B32_e32 0, implicit $exec |
| %3:vgpr_32 = V_MOV_B32_e32 1, implicit $exec |
| %4:vreg_96_align2 = REG_SEQUENCE %1, %subreg.sub0, %2, %subreg.sub1, %3, %subreg.sub2 |
| %5:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, %0, 1, %4.sub0_sub1, 0, 0, implicit $mode, implicit $exec |
| $vgpr0_vgpr1 = COPY %5 |
| S_ENDPGM 0 |
| |
| ... |
| |
| --- |
| name: reg_sequence_neg1_neg1_0_f64_use |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| liveins: $vgpr0_vgpr1 |
| |
| ; GCN-LABEL: name: reg_sequence_neg1_neg1_0_f64_use |
| ; GCN: liveins: $vgpr0_vgpr1 |
| ; GCN-NEXT: {{ $}} |
| ; GCN-NEXT: [[COPY:%[0-9]+]]:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| ; GCN-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 -1, implicit $exec |
| ; GCN-NEXT: [[V_MOV_B32_e32_1:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 -1, implicit $exec |
| ; GCN-NEXT: [[V_MOV_B32_e32_2:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 0, implicit $exec |
| ; GCN-NEXT: [[REG_SEQUENCE:%[0-9]+]]:vreg_96_align2 = REG_SEQUENCE [[V_MOV_B32_e32_]], %subreg.sub0, [[V_MOV_B32_e32_1]], %subreg.sub1, [[V_MOV_B32_e32_2]], %subreg.sub2 |
| ; GCN-NEXT: [[V_ADD_F64_e64_:%[0-9]+]]:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, [[COPY]], 1, [[REG_SEQUENCE]].sub0_sub1, 0, 0, implicit $mode, implicit $exec |
| ; GCN-NEXT: $vgpr0_vgpr1 = COPY [[V_ADD_F64_e64_]] |
| ; GCN-NEXT: S_ENDPGM 0 |
| %0:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| %1:vgpr_32 = V_MOV_B32_e32 -1, implicit $exec |
| %2:vgpr_32 = V_MOV_B32_e32 -1, implicit $exec |
| %3:vgpr_32 = V_MOV_B32_e32 0, implicit $exec |
| %4:vreg_96_align2 = REG_SEQUENCE %1, %subreg.sub0, %2, %subreg.sub1, %3, %subreg.sub2 |
| %5:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, %0, 1, %4.sub0_sub1, 0, 0, implicit $mode, implicit $exec |
| $vgpr0_vgpr1 = COPY %5 |
| S_ENDPGM 0 |
| |
| ... |
| |
| --- |
| name: reg_sequence_split_f64_1_use |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| liveins: $vgpr0_vgpr1 |
| |
| ; GCN-LABEL: name: reg_sequence_split_f64_1_use |
| ; GCN: liveins: $vgpr0_vgpr1 |
| ; GCN-NEXT: {{ $}} |
| ; GCN-NEXT: [[COPY:%[0-9]+]]:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| ; GCN-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 0, implicit $exec |
| ; GCN-NEXT: [[V_MOV_B32_e32_1:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 1072693248, implicit $exec |
| ; GCN-NEXT: [[REG_SEQUENCE:%[0-9]+]]:vreg_64_align2 = REG_SEQUENCE [[V_MOV_B32_e32_]], %subreg.sub0, [[V_MOV_B32_e32_1]], %subreg.sub1 |
| ; GCN-NEXT: [[V_ADD_F64_e64_:%[0-9]+]]:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, [[COPY]], 1, [[REG_SEQUENCE]], 0, 0, implicit $mode, implicit $exec |
| ; GCN-NEXT: $vgpr0_vgpr1 = COPY [[V_ADD_F64_e64_]] |
| ; GCN-NEXT: S_ENDPGM 0 |
| %0:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| %1:vgpr_32 = V_MOV_B32_e32 0, implicit $exec |
| %2:vgpr_32 = V_MOV_B32_e32 1072693248, implicit $exec |
| %3:vreg_64_align2 = REG_SEQUENCE %1, %subreg.sub0, %2, %subreg.sub1 |
| %4:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, %0, 1, %3, 0, 0, implicit $mode, implicit $exec |
| $vgpr0_vgpr1 = COPY %4 |
| S_ENDPGM 0 |
| |
| ... |
| |
| --- |
| name: reg_sequence_split_f64_1_use_wrong_order |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| liveins: $vgpr0_vgpr1 |
| |
| ; GCN-LABEL: name: reg_sequence_split_f64_1_use_wrong_order |
| ; GCN: liveins: $vgpr0_vgpr1 |
| ; GCN-NEXT: {{ $}} |
| ; GCN-NEXT: [[COPY:%[0-9]+]]:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| ; GCN-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 1072693248, implicit $exec |
| ; GCN-NEXT: [[V_MOV_B32_e32_1:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 0, implicit $exec |
| ; GCN-NEXT: [[REG_SEQUENCE:%[0-9]+]]:vreg_64_align2 = REG_SEQUENCE [[V_MOV_B32_e32_]], %subreg.sub0, [[V_MOV_B32_e32_1]], %subreg.sub1 |
| ; GCN-NEXT: [[V_ADD_F64_e64_:%[0-9]+]]:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, [[COPY]], 1, [[REG_SEQUENCE]], 0, 0, implicit $mode, implicit $exec |
| ; GCN-NEXT: $vgpr0_vgpr1 = COPY [[V_ADD_F64_e64_]] |
| ; GCN-NEXT: S_ENDPGM 0 |
| %0:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| %1:vgpr_32 = V_MOV_B32_e32 1072693248, implicit $exec |
| %2:vgpr_32 = V_MOV_B32_e32 0, implicit $exec |
| %3:vreg_64_align2 = REG_SEQUENCE %1, %subreg.sub0, %2, %subreg.sub1 |
| %4:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, %0, 1, %3, 0, 0, implicit $mode, implicit $exec |
| $vgpr0_vgpr1 = COPY %4 |
| S_ENDPGM 0 |
| |
| ... |
| |
| --- |
| name: reg_sequence_split_f64_1_use_backwards_sequence |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| liveins: $vgpr0_vgpr1 |
| |
| ; GCN-LABEL: name: reg_sequence_split_f64_1_use_backwards_sequence |
| ; GCN: liveins: $vgpr0_vgpr1 |
| ; GCN-NEXT: {{ $}} |
| ; GCN-NEXT: [[COPY:%[0-9]+]]:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| ; GCN-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 0, implicit $exec |
| ; GCN-NEXT: [[V_MOV_B32_e32_1:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 1072693248, implicit $exec |
| ; GCN-NEXT: [[REG_SEQUENCE:%[0-9]+]]:vreg_64_align2 = REG_SEQUENCE [[V_MOV_B32_e32_1]], %subreg.sub1, [[V_MOV_B32_e32_]], %subreg.sub0 |
| ; GCN-NEXT: [[V_ADD_F64_e64_:%[0-9]+]]:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, [[COPY]], 1, [[REG_SEQUENCE]], 0, 0, implicit $mode, implicit $exec |
| ; GCN-NEXT: $vgpr0_vgpr1 = COPY [[V_ADD_F64_e64_]] |
| ; GCN-NEXT: S_ENDPGM 0 |
| %0:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| %1:vgpr_32 = V_MOV_B32_e32 0, implicit $exec |
| %2:vgpr_32 = V_MOV_B32_e32 1072693248, implicit $exec |
| %3:vreg_64_align2 = REG_SEQUENCE %2, %subreg.sub1, %1, %subreg.sub0 |
| %4:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, %0, 1, %3, 0, 0, implicit $mode, implicit $exec |
| $vgpr0_vgpr1 = COPY %4 |
| S_ENDPGM 0 |
| |
| ... |
| |
| --- |
| name: reg_sequence_split_x2_f64_1_use_0 |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| liveins: $vgpr0_vgpr1 |
| |
| ; GCN-LABEL: name: reg_sequence_split_x2_f64_1_use_0 |
| ; GCN: liveins: $vgpr0_vgpr1 |
| ; GCN-NEXT: {{ $}} |
| ; GCN-NEXT: [[COPY:%[0-9]+]]:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| ; GCN-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 0, implicit $exec |
| ; GCN-NEXT: [[V_MOV_B32_e32_1:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 1072693248, implicit $exec |
| ; GCN-NEXT: [[REG_SEQUENCE:%[0-9]+]]:vreg_128_align2 = REG_SEQUENCE [[V_MOV_B32_e32_]], %subreg.sub0, [[V_MOV_B32_e32_1]], %subreg.sub1, [[V_MOV_B32_e32_]], %subreg.sub2, [[V_MOV_B32_e32_1]], %subreg.sub3 |
| ; GCN-NEXT: [[V_ADD_F64_e64_:%[0-9]+]]:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, [[COPY]], 1, [[REG_SEQUENCE]].sub0_sub1, 0, 0, implicit $mode, implicit $exec |
| ; GCN-NEXT: $vgpr0_vgpr1 = COPY [[V_ADD_F64_e64_]] |
| ; GCN-NEXT: S_ENDPGM 0 |
| %0:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| %1:vgpr_32 = V_MOV_B32_e32 0, implicit $exec |
| %2:vgpr_32 = V_MOV_B32_e32 1072693248, implicit $exec |
| %3:vreg_128_align2 = REG_SEQUENCE %1, %subreg.sub0, %2, %subreg.sub1, %1, %subreg.sub2, %2, %subreg.sub3 |
| %4:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, %0, 1, %3.sub0_sub1, 0, 0, implicit $mode, implicit $exec |
| $vgpr0_vgpr1 = COPY %4 |
| S_ENDPGM 0 |
| |
| ... |
| |
| --- |
| name: reg_sequence_split_x2_f64_1_use_1 |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| liveins: $vgpr0_vgpr1 |
| |
| ; GCN-LABEL: name: reg_sequence_split_x2_f64_1_use_1 |
| ; GCN: liveins: $vgpr0_vgpr1 |
| ; GCN-NEXT: {{ $}} |
| ; GCN-NEXT: [[COPY:%[0-9]+]]:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| ; GCN-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 0, implicit $exec |
| ; GCN-NEXT: [[V_MOV_B32_e32_1:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 1072693248, implicit $exec |
| ; GCN-NEXT: [[REG_SEQUENCE:%[0-9]+]]:vreg_128_align2 = REG_SEQUENCE [[V_MOV_B32_e32_]], %subreg.sub0, [[V_MOV_B32_e32_1]], %subreg.sub1, [[V_MOV_B32_e32_]], %subreg.sub2, [[V_MOV_B32_e32_1]], %subreg.sub3 |
| ; GCN-NEXT: [[V_ADD_F64_e64_:%[0-9]+]]:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, [[COPY]], 1, [[REG_SEQUENCE]].sub1_sub2, 0, 0, implicit $mode, implicit $exec |
| ; GCN-NEXT: $vgpr0_vgpr1 = COPY [[V_ADD_F64_e64_]] |
| ; GCN-NEXT: S_ENDPGM 0 |
| %0:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| %1:vgpr_32 = V_MOV_B32_e32 0, implicit $exec |
| %2:vgpr_32 = V_MOV_B32_e32 1072693248, implicit $exec |
| %3:vreg_128_align2 = REG_SEQUENCE %1, %subreg.sub0, %2, %subreg.sub1, %1, %subreg.sub2, %2, %subreg.sub3 |
| %4:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, %0, 1, %3.sub1_sub2, 0, 0, implicit $mode, implicit $exec |
| $vgpr0_vgpr1 = COPY %4 |
| S_ENDPGM 0 |
| |
| ... |
| |
| --- |
| name: reg_sequence_split_x2_f64_1_use_0_sgpr |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| liveins: $vgpr0_vgpr1 |
| |
| ; GCN-LABEL: name: reg_sequence_split_x2_f64_1_use_0_sgpr |
| ; GCN: liveins: $vgpr0_vgpr1 |
| ; GCN-NEXT: {{ $}} |
| ; GCN-NEXT: [[COPY:%[0-9]+]]:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| ; GCN-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 0 |
| ; GCN-NEXT: [[S_MOV_B32_1:%[0-9]+]]:sreg_32 = S_MOV_B32 1072693248 |
| ; GCN-NEXT: [[REG_SEQUENCE:%[0-9]+]]:sgpr_128 = REG_SEQUENCE [[S_MOV_B32_]], %subreg.sub0, [[S_MOV_B32_1]], %subreg.sub1, [[S_MOV_B32_]], %subreg.sub2, [[S_MOV_B32_1]], %subreg.sub3 |
| ; GCN-NEXT: [[V_ADD_F64_e64_:%[0-9]+]]:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, [[COPY]], 1, [[REG_SEQUENCE]].sub0_sub1, 0, 0, implicit $mode, implicit $exec |
| ; GCN-NEXT: $vgpr0_vgpr1 = COPY [[V_ADD_F64_e64_]] |
| ; GCN-NEXT: S_ENDPGM 0 |
| %0:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| %1:sreg_32 = S_MOV_B32 0 |
| %2:sreg_32 = S_MOV_B32 1072693248 |
| %3:sgpr_128 = REG_SEQUENCE %1, %subreg.sub0, %2, %subreg.sub1, %1, %subreg.sub2, %2, %subreg.sub3 |
| %4:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, %0, 1, %3.sub0_sub1, 0, 0, implicit $mode, implicit $exec |
| $vgpr0_vgpr1 = COPY %4 |
| S_ENDPGM 0 |
| |
| ... |
| |
| --- |
| name: reg_sequence_split_x2_f64_1_use_nonconsecutive_reg_sequence |
| tracksRegLiveness: true |
| body: | |
| bb.0: |
| liveins: $vgpr0_vgpr1 |
| |
| ; GCN-LABEL: name: reg_sequence_split_x2_f64_1_use_nonconsecutive_reg_sequence |
| ; GCN: liveins: $vgpr0_vgpr1 |
| ; GCN-NEXT: {{ $}} |
| ; GCN-NEXT: [[COPY:%[0-9]+]]:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| ; GCN-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 0, implicit $exec |
| ; GCN-NEXT: [[V_MOV_B32_e32_1:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 1072693248, implicit $exec |
| ; GCN-NEXT: [[REG_SEQUENCE:%[0-9]+]]:vreg_128_align2 = REG_SEQUENCE [[V_MOV_B32_e32_]], %subreg.sub0, [[V_MOV_B32_e32_]], %subreg.sub2, [[V_MOV_B32_e32_1]], %subreg.sub1, [[V_MOV_B32_e32_1]], %subreg.sub3 |
| ; GCN-NEXT: [[V_ADD_F64_e64_:%[0-9]+]]:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, [[COPY]], 1, [[REG_SEQUENCE]].sub0_sub1, 0, 0, implicit $mode, implicit $exec |
| ; GCN-NEXT: $vgpr0_vgpr1 = COPY [[V_ADD_F64_e64_]] |
| ; GCN-NEXT: S_ENDPGM 0 |
| %0:vreg_64_align2 = COPY $vgpr0_vgpr1 |
| %1:vgpr_32 = V_MOV_B32_e32 0, implicit $exec |
| %2:vgpr_32 = V_MOV_B32_e32 1072693248, implicit $exec |
| %3:vreg_128_align2 = REG_SEQUENCE %1, %subreg.sub0, %1, %subreg.sub2, %2, %subreg.sub1, %2, %subreg.sub3 |
| %4:vreg_64_align2 = nofpexcept V_ADD_F64_e64 0, %0, 1, %3.sub0_sub1, 0, 0, implicit $mode, implicit $exec |
| $vgpr0_vgpr1 = COPY %4 |
| S_ENDPGM 0 |
| |
| ... |