| # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py |
| # RUN: llc -mtriple=amdgcn-amd-amdhsa -verify-machineinstrs -run-pass si-fold-operands,dead-mi-elimination %s -o - | FileCheck -check-prefix=GCN %s |
| |
| --- |
| |
| # Uses a carry out in an instruction that can't be shrunk. |
| |
| name: shrink_scalar_imm_vgpr_v_add_i32_e64_other_carry_out_use |
| tracksRegLiveness: true |
| |
| body: | |
| bb.0: |
| ; GCN-LABEL: name: shrink_scalar_imm_vgpr_v_add_i32_e64_other_carry_out_use |
| ; GCN: [[S_MOV_B32_:%[0-9]+]]:sreg_32_xm0 = S_MOV_B32 12345 |
| ; GCN: [[DEF:%[0-9]+]]:vgpr_32 = IMPLICIT_DEF |
| ; GCN: [[V_ADD_I32_e32_:%[0-9]+]]:vgpr_32 = V_ADD_I32_e32 [[S_MOV_B32_]], [[DEF]], implicit-def $vcc, implicit $exec |
| ; GCN: [[COPY:%[0-9]+]]:sreg_64_xexec = COPY killed $vcc |
| ; GCN: S_ENDPGM implicit [[COPY]] |
| %0:sreg_32_xm0 = S_MOV_B32 12345 |
| %1:vgpr_32 = IMPLICIT_DEF |
| %2:vgpr_32 = IMPLICIT_DEF |
| %3:vgpr_32 = IMPLICIT_DEF |
| |
| %4:vgpr_32, %5:sreg_64_xexec = V_ADD_I32_e64 %0, %1, implicit $exec |
| S_ENDPGM implicit %5 |
| |
| ... |
| --- |
| |
| name: shrink_scalar_imm_multi_use_with_used_carry |
| tracksRegLiveness: true |
| |
| body: | |
| bb.0: |
| ; GCN-LABEL: name: shrink_scalar_imm_multi_use_with_used_carry |
| ; GCN: [[S_MOV_B32_:%[0-9]+]]:sreg_32_xm0 = S_MOV_B32 12345 |
| ; GCN: [[DEF:%[0-9]+]]:vgpr_32 = IMPLICIT_DEF |
| ; GCN: [[DEF1:%[0-9]+]]:vgpr_32 = IMPLICIT_DEF |
| ; GCN: [[V_ADD_I32_e64_:%[0-9]+]]:vgpr_32, [[V_ADD_I32_e64_1:%[0-9]+]]:sreg_64_xexec = V_ADD_I32_e64 [[S_MOV_B32_]], [[DEF]], implicit $exec |
| ; GCN: [[V_ADD_I32_e64_2:%[0-9]+]]:vgpr_32, [[V_ADD_I32_e64_3:%[0-9]+]]:sreg_64_xexec = V_ADD_I32_e64 [[S_MOV_B32_]], [[DEF1]], implicit $exec |
| ; GCN: S_ENDPGM implicit [[V_ADD_I32_e64_1]], implicit [[V_ADD_I32_e64_2]] |
| %0:sreg_32_xm0 = S_MOV_B32 12345 |
| %1:vgpr_32 = IMPLICIT_DEF |
| %2:vgpr_32 = IMPLICIT_DEF |
| %3:vgpr_32 = IMPLICIT_DEF |
| %4:vgpr_32 = IMPLICIT_DEF |
| |
| %5:vgpr_32, %6:sreg_64_xexec = V_ADD_I32_e64 %0, %1, implicit $exec |
| %7:vgpr_32, %8:sreg_64_xexec = V_ADD_I32_e64 %0, %2, implicit $exec |
| S_ENDPGM implicit %6, implicit %7 |
| |
| ... |
| --- |
| |
| # TODO: Is it OK to leave the broken use around on the DBG_VALUE? |
| |
| name: shrink_scalar_imm_vgpr_v_add_i32_e64_dbg_only_carry_out_use |
| tracksRegLiveness: true |
| |
| body: | |
| bb.0: |
| ; GCN-LABEL: name: shrink_scalar_imm_vgpr_v_add_i32_e64_dbg_only_carry_out_use |
| ; GCN: [[S_MOV_B32_:%[0-9]+]]:sreg_32_xm0 = S_MOV_B32 12345 |
| ; GCN: [[DEF:%[0-9]+]]:vgpr_32 = IMPLICIT_DEF |
| ; GCN: [[V_ADD_I32_e32_:%[0-9]+]]:vgpr_32 = V_ADD_I32_e32 [[S_MOV_B32_]], [[DEF]], implicit-def $vcc, implicit $exec |
| ; GCN: DBG_VALUE %5:sreg_64_xexec, $noreg |
| ; GCN: S_ENDPGM implicit [[V_ADD_I32_e32_]] |
| %0:sreg_32_xm0 = S_MOV_B32 12345 |
| %1:vgpr_32 = IMPLICIT_DEF |
| %2:vgpr_32 = IMPLICIT_DEF |
| %3:vgpr_32 = IMPLICIT_DEF |
| |
| %4:vgpr_32, %5:sreg_64_xexec = V_ADD_I32_e64 %0, %1, implicit $exec |
| DBG_VALUE %5, $noreg |
| S_ENDPGM implicit %4 |
| |
| ... |
| |
| --- |
| |
| # Uses carry out in a normal pattern |
| |
| name: shrink_scalar_imm_vgpr_v_add_i32_e64_carry_out_use |
| tracksRegLiveness: true |
| |
| body: | |
| bb.0: |
| ; GCN-LABEL: name: shrink_scalar_imm_vgpr_v_add_i32_e64_carry_out_use |
| ; GCN: [[S_MOV_B32_:%[0-9]+]]:sreg_32_xm0 = S_MOV_B32 12345 |
| ; GCN: [[DEF:%[0-9]+]]:vgpr_32 = IMPLICIT_DEF |
| ; GCN: [[DEF1:%[0-9]+]]:vgpr_32 = IMPLICIT_DEF |
| ; GCN: [[DEF2:%[0-9]+]]:vgpr_32 = IMPLICIT_DEF |
| ; GCN: [[V_ADD_I32_e32_:%[0-9]+]]:vgpr_32 = V_ADD_I32_e32 [[S_MOV_B32_]], [[DEF]], implicit-def $vcc, implicit $exec |
| ; GCN: [[COPY:%[0-9]+]]:sreg_64_xexec = COPY killed $vcc |
| ; GCN: [[V_ADDC_U32_e64_:%[0-9]+]]:vgpr_32, [[V_ADDC_U32_e64_1:%[0-9]+]]:sreg_64_xexec = V_ADDC_U32_e64 [[DEF1]], [[DEF2]], [[COPY]], implicit $exec |
| ; GCN: S_ENDPGM implicit [[V_ADDC_U32_e64_]] |
| %0:sreg_32_xm0 = S_MOV_B32 12345 |
| %1:vgpr_32 = IMPLICIT_DEF |
| %2:vgpr_32 = IMPLICIT_DEF |
| %3:vgpr_32 = IMPLICIT_DEF |
| |
| %4:vgpr_32, %5:sreg_64_xexec = V_ADD_I32_e64 %0, %1, implicit $exec |
| %6:vgpr_32, %7:sreg_64_xexec = V_ADDC_U32_e64 %2, %3, %5, implicit $exec |
| S_ENDPGM implicit %6 |
| |
| ... |