| # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py UTC_ARGS: --version 2 |
| # RUN: llc -march=amdgcn -mcpu=gfx1100 -verify-machineinstrs -run-pass si-load-store-opt -o - %s | FileCheck %s |
| |
| --- |
| name: merge_s_load_x1_x1 |
| body: | |
| bb.0: |
| ; CHECK-LABEL: name: merge_s_load_x1_x1 |
| ; CHECK: [[DEF:%[0-9]+]]:sgpr_64 = IMPLICIT_DEF |
| ; CHECK-NEXT: [[S_LOAD_DWORDX2_IMM:%[0-9]+]]:sreg_64_xexec = S_LOAD_DWORDX2_IMM [[DEF]], 0, 0 :: (dereferenceable invariant load (s64), align 4) |
| ; CHECK-NEXT: [[COPY:%[0-9]+]]:sreg_32_xm0_xexec = COPY [[S_LOAD_DWORDX2_IMM]].sub0 |
| ; CHECK-NEXT: [[COPY1:%[0-9]+]]:sreg_32_xm0_xexec = COPY killed [[S_LOAD_DWORDX2_IMM]].sub1 |
| %0:sgpr_64 = IMPLICIT_DEF |
| %1:sreg_32_xm0_xexec = S_LOAD_DWORD_IMM %0:sgpr_64, 0, 0 :: (dereferenceable invariant load (s32)) |
| %2:sreg_32_xm0_xexec = S_LOAD_DWORD_IMM %0:sgpr_64, 4, 0 :: (dereferenceable invariant load (s32)) |
| ... |
| |
| --- |
| name: merge_s_load_x1_x1_x1_x1 |
| body: | |
| bb.0: |
| ; CHECK-LABEL: name: merge_s_load_x1_x1_x1_x1 |
| ; CHECK: [[DEF:%[0-9]+]]:sgpr_64 = IMPLICIT_DEF |
| ; CHECK-NEXT: [[S_LOAD_DWORDX4_IMM:%[0-9]+]]:sgpr_128 = S_LOAD_DWORDX4_IMM [[DEF]], 0, 0 :: (dereferenceable invariant load (s128), align 4) |
| ; CHECK-NEXT: [[COPY:%[0-9]+]]:sreg_64_xexec = COPY [[S_LOAD_DWORDX4_IMM]].sub0_sub1 |
| ; CHECK-NEXT: [[COPY1:%[0-9]+]]:sreg_64_xexec = COPY killed [[S_LOAD_DWORDX4_IMM]].sub2_sub3 |
| ; CHECK-NEXT: [[COPY2:%[0-9]+]]:sreg_32_xm0_xexec = COPY [[COPY]].sub0 |
| ; CHECK-NEXT: [[COPY3:%[0-9]+]]:sreg_32_xm0_xexec = COPY killed [[COPY]].sub1 |
| ; CHECK-NEXT: [[COPY4:%[0-9]+]]:sreg_32_xm0_xexec = COPY [[COPY1]].sub0 |
| ; CHECK-NEXT: [[COPY5:%[0-9]+]]:sreg_32_xm0_xexec = COPY killed [[COPY1]].sub1 |
| %0:sgpr_64 = IMPLICIT_DEF |
| %1:sreg_32_xm0_xexec = S_LOAD_DWORD_IMM %0:sgpr_64, 0, 0 :: (dereferenceable invariant load (s32)) |
| %2:sreg_32_xm0_xexec = S_LOAD_DWORD_IMM %0:sgpr_64, 4, 0 :: (dereferenceable invariant load (s32)) |
| %3:sreg_32_xm0_xexec = S_LOAD_DWORD_IMM %0:sgpr_64, 8, 0 :: (dereferenceable invariant load (s32)) |
| %4:sreg_32_xm0_xexec = S_LOAD_DWORD_IMM %0:sgpr_64, 12, 0 :: (dereferenceable invariant load (s32)) |
| ... |
| |
| --- |
| name: merge_s_load_x1_x1_x1_x1_x1_x1_x1_x1 |
| body: | |
| bb.0: |
| ; CHECK-LABEL: name: merge_s_load_x1_x1_x1_x1_x1_x1_x1_x1 |
| ; CHECK: [[DEF:%[0-9]+]]:sgpr_64 = IMPLICIT_DEF |
| ; CHECK-NEXT: [[S_LOAD_DWORDX8_IMM:%[0-9]+]]:sgpr_256 = S_LOAD_DWORDX8_IMM [[DEF]], 0, 0 :: (dereferenceable invariant load (s256), align 4) |
| ; CHECK-NEXT: [[COPY:%[0-9]+]]:sgpr_128 = COPY [[S_LOAD_DWORDX8_IMM]].sub0_sub1_sub2_sub3 |
| ; CHECK-NEXT: [[COPY1:%[0-9]+]]:sgpr_128 = COPY killed [[S_LOAD_DWORDX8_IMM]].sub4_sub5_sub6_sub7 |
| ; CHECK-NEXT: [[COPY2:%[0-9]+]]:sreg_64_xexec = COPY [[COPY]].sub0_sub1 |
| ; CHECK-NEXT: [[COPY3:%[0-9]+]]:sreg_64_xexec = COPY killed [[COPY]].sub2_sub3 |
| ; CHECK-NEXT: [[COPY4:%[0-9]+]]:sreg_32_xm0_xexec = COPY [[COPY2]].sub0 |
| ; CHECK-NEXT: [[COPY5:%[0-9]+]]:sreg_32_xm0_xexec = COPY killed [[COPY2]].sub1 |
| ; CHECK-NEXT: [[COPY6:%[0-9]+]]:sreg_32_xm0_xexec = COPY [[COPY3]].sub0 |
| ; CHECK-NEXT: [[COPY7:%[0-9]+]]:sreg_32_xm0_xexec = COPY killed [[COPY3]].sub1 |
| ; CHECK-NEXT: [[COPY8:%[0-9]+]]:sreg_64_xexec = COPY [[COPY1]].sub0_sub1 |
| ; CHECK-NEXT: [[COPY9:%[0-9]+]]:sreg_64_xexec = COPY killed [[COPY1]].sub2_sub3 |
| ; CHECK-NEXT: [[COPY10:%[0-9]+]]:sreg_32_xm0_xexec = COPY [[COPY8]].sub0 |
| ; CHECK-NEXT: [[COPY11:%[0-9]+]]:sreg_32_xm0_xexec = COPY killed [[COPY8]].sub1 |
| ; CHECK-NEXT: [[COPY12:%[0-9]+]]:sreg_32_xm0_xexec = COPY [[COPY9]].sub0 |
| ; CHECK-NEXT: [[COPY13:%[0-9]+]]:sreg_32_xm0_xexec = COPY killed [[COPY9]].sub1 |
| %0:sgpr_64 = IMPLICIT_DEF |
| %1:sreg_32_xm0_xexec = S_LOAD_DWORD_IMM %0:sgpr_64, 0, 0 :: (dereferenceable invariant load (s32)) |
| %2:sreg_32_xm0_xexec = S_LOAD_DWORD_IMM %0:sgpr_64, 4, 0 :: (dereferenceable invariant load (s32)) |
| %3:sreg_32_xm0_xexec = S_LOAD_DWORD_IMM %0:sgpr_64, 8, 0 :: (dereferenceable invariant load (s32)) |
| %4:sreg_32_xm0_xexec = S_LOAD_DWORD_IMM %0:sgpr_64, 12, 0 :: (dereferenceable invariant load (s32)) |
| %5:sreg_32_xm0_xexec = S_LOAD_DWORD_IMM %0:sgpr_64, 16, 0 :: (dereferenceable invariant load (s32)) |
| %6:sreg_32_xm0_xexec = S_LOAD_DWORD_IMM %0:sgpr_64, 20, 0 :: (dereferenceable invariant load (s32)) |
| %7:sreg_32_xm0_xexec = S_LOAD_DWORD_IMM %0:sgpr_64, 24, 0 :: (dereferenceable invariant load (s32)) |
| %8:sreg_32_xm0_xexec = S_LOAD_DWORD_IMM %0:sgpr_64, 28, 0 :: (dereferenceable invariant load (s32)) |
| ... |
| |
| --- |
| name: merge_s_load_x2_x2 |
| body: | |
| bb.0: |
| ; CHECK-LABEL: name: merge_s_load_x2_x2 |
| ; CHECK: [[DEF:%[0-9]+]]:sgpr_64 = IMPLICIT_DEF |
| ; CHECK-NEXT: [[S_LOAD_DWORDX4_IMM:%[0-9]+]]:sgpr_128 = S_LOAD_DWORDX4_IMM [[DEF]], 0, 0 :: (dereferenceable invariant load (s128), align 8) |
| ; CHECK-NEXT: [[COPY:%[0-9]+]]:sgpr_64 = COPY [[S_LOAD_DWORDX4_IMM]].sub0_sub1 |
| ; CHECK-NEXT: [[COPY1:%[0-9]+]]:sgpr_64 = COPY killed [[S_LOAD_DWORDX4_IMM]].sub2_sub3 |
| %0:sgpr_64 = IMPLICIT_DEF |
| %1:sgpr_64 = S_LOAD_DWORDX2_IMM %0:sgpr_64, 0, 0 :: (dereferenceable invariant load (s64)) |
| %2:sgpr_64 = S_LOAD_DWORDX2_IMM %0:sgpr_64, 8, 0 :: (dereferenceable invariant load (s64)) |
| ... |
| |
| --- |
| name: merge_s_load_x2_x2_x2_x2 |
| body: | |
| bb.0: |
| ; CHECK-LABEL: name: merge_s_load_x2_x2_x2_x2 |
| ; CHECK: [[DEF:%[0-9]+]]:sgpr_64 = IMPLICIT_DEF |
| ; CHECK-NEXT: [[S_LOAD_DWORDX8_IMM:%[0-9]+]]:sgpr_256 = S_LOAD_DWORDX8_IMM [[DEF]], 0, 0 :: (dereferenceable invariant load (s256), align 8) |
| ; CHECK-NEXT: [[COPY:%[0-9]+]]:sgpr_128 = COPY [[S_LOAD_DWORDX8_IMM]].sub0_sub1_sub2_sub3 |
| ; CHECK-NEXT: [[COPY1:%[0-9]+]]:sgpr_128 = COPY killed [[S_LOAD_DWORDX8_IMM]].sub4_sub5_sub6_sub7 |
| ; CHECK-NEXT: [[COPY2:%[0-9]+]]:sgpr_64 = COPY [[COPY]].sub0_sub1 |
| ; CHECK-NEXT: [[COPY3:%[0-9]+]]:sgpr_64 = COPY killed [[COPY]].sub2_sub3 |
| ; CHECK-NEXT: [[COPY4:%[0-9]+]]:sgpr_64 = COPY [[COPY1]].sub0_sub1 |
| ; CHECK-NEXT: [[COPY5:%[0-9]+]]:sgpr_64 = COPY killed [[COPY1]].sub2_sub3 |
| %0:sgpr_64 = IMPLICIT_DEF |
| %1:sgpr_64 = S_LOAD_DWORDX2_IMM %0:sgpr_64, 0, 0 :: (dereferenceable invariant load (s64)) |
| %2:sgpr_64 = S_LOAD_DWORDX2_IMM %0:sgpr_64, 8, 0 :: (dereferenceable invariant load (s64)) |
| %3:sgpr_64 = S_LOAD_DWORDX2_IMM %0:sgpr_64, 16, 0 :: (dereferenceable invariant load (s64)) |
| %4:sgpr_64 = S_LOAD_DWORDX2_IMM %0:sgpr_64, 24, 0 :: (dereferenceable invariant load (s64)) |
| ... |
| |
| --- |
| name: merge_s_load_x4_x4 |
| body: | |
| bb.0: |
| ; CHECK-LABEL: name: merge_s_load_x4_x4 |
| ; CHECK: [[DEF:%[0-9]+]]:sgpr_64 = IMPLICIT_DEF |
| ; CHECK-NEXT: [[S_LOAD_DWORDX8_IMM:%[0-9]+]]:sgpr_256 = S_LOAD_DWORDX8_IMM [[DEF]], 0, 0 :: (dereferenceable invariant load (s256), align 16) |
| ; CHECK-NEXT: [[COPY:%[0-9]+]]:sgpr_128 = COPY [[S_LOAD_DWORDX8_IMM]].sub0_sub1_sub2_sub3 |
| ; CHECK-NEXT: [[COPY1:%[0-9]+]]:sgpr_128 = COPY killed [[S_LOAD_DWORDX8_IMM]].sub4_sub5_sub6_sub7 |
| %0:sgpr_64 = IMPLICIT_DEF |
| %1:sgpr_128 = S_LOAD_DWORDX4_IMM %0:sgpr_64, 0, 0 :: (dereferenceable invariant load (s128)) |
| %2:sgpr_128 = S_LOAD_DWORDX4_IMM %0:sgpr_64, 16, 0 :: (dereferenceable invariant load (s128)) |
| ... |