blob: cf515e7d9286201b5fb8672dc456be0adbe382c1 [file] [log] [blame] [edit]
# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py UTC_ARGS: --version 5
# RUN: llc -mtriple=amdgcn--amdhsa -mcpu=gfx942 -run-pass=peephole-opt -o - %s | FileCheck %s
# Breaking mov of 64-bit inline immediate will increase instruction
# count.
---
name: no_break_s_mov_b64_multi_use_copy_inline_imm_extract
body: |
bb.0:
; GCN-LABEL: name: no_break_s_mov_b64_multi_use_inline_imm_extract
; GCN: [[S_MOV_B64_:%[0-9]+]]:sreg_64 = S_MOV_B64 0
; GCN-NEXT: [[COPY:%[0-9]+]]:sreg_32 = COPY killed [[S_MOV_B64_]].sub0
; GCN-NEXT: [[COPY1:%[0-9]+]]:sreg_32 = COPY killed [[S_MOV_B64_]].sub1
; GCN-NEXT: SI_RETURN_TO_EPILOG [[COPY]], [[COPY1]]
; CHECK-LABEL: name: no_break_s_mov_b64_multi_use_copy_inline_imm_extract
; CHECK: [[S_MOV_B64_:%[0-9]+]]:sreg_64 = S_MOV_B64 0
; CHECK-NEXT: [[COPY:%[0-9]+]]:sreg_32 = COPY killed [[S_MOV_B64_]].sub0
; CHECK-NEXT: [[COPY1:%[0-9]+]]:sreg_32 = COPY killed [[S_MOV_B64_]].sub1
; CHECK-NEXT: SI_RETURN_TO_EPILOG [[COPY]], [[COPY1]]
%0:sreg_64 = S_MOV_B64 0
%1:sreg_32 = COPY killed %0.sub0
%2:sreg_32 = COPY killed %0.sub1
SI_RETURN_TO_EPILOG %1, %2
...
---
name: no_break_v_mov_b64_multi_use_copy_inline_imm_extract
body: |
bb.0:
; GCN-LABEL: name: no_break_s_mov_b64_multi_use_inline_imm_extract
; GCN: [[S_MOV_B64_:%[0-9]+]]:sreg_64 = S_MOV_B64 0
; GCN-NEXT: [[COPY:%[0-9]+]]:sreg_32 = COPY killed [[S_MOV_B64_]].sub0
; GCN-NEXT: [[COPY1:%[0-9]+]]:sreg_32 = COPY killed [[S_MOV_B64_]].sub1
; GCN-NEXT: SI_RETURN_TO_EPILOG [[COPY]], [[COPY1]]
; CHECK-LABEL: name: no_break_v_mov_b64_multi_use_copy_inline_imm_extract
; CHECK: [[V_MOV_B64_e64_:%[0-9]+]]:vreg_64_align2 = V_MOV_B64_e64 0, implicit $exec
; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY killed [[V_MOV_B64_e64_]].sub0
; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY killed [[V_MOV_B64_e64_]].sub1
; CHECK-NEXT: SI_RETURN_TO_EPILOG [[COPY]], [[COPY1]]
%0:vreg_64_align2 = V_MOV_B64_e64 0, implicit $exec
%1:vgpr_32 = COPY killed %0.sub0
%2:vgpr_32 = COPY killed %0.sub1
SI_RETURN_TO_EPILOG %1, %2
...
# The high half extract is an inline immediate in the use context, so
# this should fold despite multiple uses.
---
name: break_s_mov_b64_multi_use_copy_extract_use_is_inline_imm
body: |
bb.0:
; GCN-LABEL: name: break_s_mov_b64_multi_use_extract_use_is_inline_imm
; GCN: [[S_MOV_B:%[0-9]+]]:sreg_64 = S_MOV_B64_IMM_PSEUDO -96
; GCN-NEXT: [[COPY:%[0-9]+]]:sreg_32 = COPY killed [[S_MOV_B]].sub0
; GCN-NEXT: [[COPY1:%[0-9]+]]:sreg_32 = COPY killed [[S_MOV_B]].sub1
; GCN-NEXT: SI_RETURN_TO_EPILOG [[COPY]], [[COPY1]]
; CHECK-LABEL: name: break_s_mov_b64_multi_use_copy_extract_use_is_inline_imm
; CHECK: [[S_MOV_B:%[0-9]+]]:sreg_64 = S_MOV_B64_IMM_PSEUDO -96
; CHECK-NEXT: [[COPY:%[0-9]+]]:sreg_32 = COPY killed [[S_MOV_B]].sub0
; CHECK-NEXT: [[COPY1:%[0-9]+]]:sreg_32 = COPY killed [[S_MOV_B]].sub1
; CHECK-NEXT: SI_RETURN_TO_EPILOG [[COPY]], [[COPY1]]
%0:sreg_64 = S_MOV_B64_IMM_PSEUDO -96
%1:sreg_32 = COPY killed %0.sub0
%2:sreg_32 = COPY killed %0.sub1
SI_RETURN_TO_EPILOG %1, %2
...
---
name: break_v_mov_b64_multi_use_copy_extract_use_is_inline_imm
body: |
bb.0:
; GCN-LABEL: name: break_s_mov_b64_multi_use_extract_use_is_inline_imm
; GCN: [[S_MOV_B:%[0-9]+]]:sreg_64 = S_MOV_B64_IMM_PSEUDO -96
; GCN-NEXT: [[COPY:%[0-9]+]]:sreg_32 = COPY killed [[S_MOV_B]].sub0
; GCN-NEXT: [[COPY1:%[0-9]+]]:sreg_32 = COPY killed [[S_MOV_B]].sub1
; GCN-NEXT: SI_RETURN_TO_EPILOG [[COPY]], [[COPY1]]
; CHECK-LABEL: name: break_v_mov_b64_multi_use_copy_extract_use_is_inline_imm
; CHECK: [[V_MOV_B:%[0-9]+]]:vreg_64_align2 = V_MOV_B64_PSEUDO -96, implicit $exec
; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY killed [[V_MOV_B]].sub0
; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY killed [[V_MOV_B]].sub1
; CHECK-NEXT: SI_RETURN_TO_EPILOG [[COPY]], [[COPY1]]
%0:vreg_64_align2 = V_MOV_B64_PSEUDO -96, implicit $exec
%1:vgpr_32 = COPY killed %0.sub0
%2:vgpr_32 = COPY killed %0.sub1
SI_RETURN_TO_EPILOG %1, %2
...