blob: 87cc6b50aca9cdb11b12a28f357ffdf27e6dfe69 [file] [log] [blame]
# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
# RUN: llc -mtriple=amdgcn-amd-amdhsa -mcpu=gfx908 -start-before=greedy,0 -stop-after=virtregrewriter,0 -o - %s | FileCheck %s
# This testcase used to fail due to introducing a spill of an SGPR
# 1024 for every subregister use inside the loop. With overlapping
# unspillable split ranges, it wasn't able to allocate one of the
# tuples. We avoid this by ensuring wide tuples are always allocated
# first (although the allocator should probably have been smart enough
# to handle this without that hint. Ideally it would understand we
# only need to spill/restore single subregisters at a time).
---
name: greedy_fail_alloc_sgpr1024_spill
tracksRegLiveness: true
frameInfo:
hasCalls: true
machineFunctionInfo:
explicitKernArgSize: 16
maxKernArgAlign: 8
isEntryFunction: true
waveLimiter: true
scratchRSrcReg: '$sgpr0_sgpr1_sgpr2_sgpr3'
stackPtrOffsetReg: '$sgpr32'
occupancy: 6
body: |
; CHECK-LABEL: name: greedy_fail_alloc_sgpr1024_spill
; CHECK: bb.0:
; CHECK: successors: %bb.1(0x80000000)
; CHECK: liveins: $sgpr14, $sgpr15, $vgpr0, $vgpr1, $vgpr2, $sgpr4_sgpr5, $sgpr6_sgpr7, $sgpr8_sgpr9, $sgpr10_sgpr11
; CHECK: renamable $sgpr34_sgpr35 = COPY $sgpr8_sgpr9
; CHECK: renamable $sgpr33 = COPY $sgpr15
; CHECK: renamable $sgpr42 = COPY $sgpr14
; CHECK: renamable $sgpr36_sgpr37 = COPY $sgpr10_sgpr11
; CHECK: renamable $sgpr38_sgpr39 = COPY $sgpr6_sgpr7
; CHECK: renamable $sgpr40_sgpr41 = COPY $sgpr4_sgpr5
; CHECK: renamable $sgpr66_sgpr67 = S_LOAD_DWORDX2_IMM renamable $sgpr34_sgpr35, 0, 0 :: (dereferenceable invariant load (s64), align 16, addrspace 4)
; CHECK: renamable $sgpr44 = S_MOV_B32 0
; CHECK: renamable $sgpr45 = S_MOV_B32 0
; CHECK: renamable $sgpr46 = S_MOV_B32 0
; CHECK: renamable $sgpr47 = S_MOV_B32 0
; CHECK: renamable $sgpr48 = S_MOV_B32 0
; CHECK: renamable $sgpr49 = S_MOV_B32 0
; CHECK: renamable $sgpr50 = S_MOV_B32 0
; CHECK: renamable $sgpr51 = S_MOV_B32 0
; CHECK: renamable $sgpr52 = S_MOV_B32 0
; CHECK: renamable $sgpr53 = S_MOV_B32 0
; CHECK: renamable $sgpr54 = S_MOV_B32 0
; CHECK: renamable $sgpr55 = S_MOV_B32 0
; CHECK: renamable $sgpr56 = S_MOV_B32 0
; CHECK: renamable $sgpr57 = S_MOV_B32 0
; CHECK: renamable $sgpr58 = S_MOV_B32 0
; CHECK: renamable $sgpr59 = S_MOV_B32 0
; CHECK: renamable $sgpr60 = S_MOV_B32 0
; CHECK: renamable $sgpr61 = S_MOV_B32 0
; CHECK: renamable $sgpr62 = S_MOV_B32 0
; CHECK: renamable $sgpr63 = S_MOV_B32 0
; CHECK: renamable $sgpr64 = S_MOV_B32 0
; CHECK: renamable $sgpr68_sgpr69 = IMPLICIT_DEF
; CHECK: ADJCALLSTACKUP 0, 0, implicit-def dead $scc, implicit-def $sgpr32, implicit $sgpr32
; CHECK: dead $sgpr30_sgpr31 = SI_CALL renamable $sgpr68_sgpr69, 0, csr_amdgpu_highregs, implicit $sgpr0_sgpr1_sgpr2_sgpr3
; CHECK: ADJCALLSTACKDOWN 0, 0, implicit-def dead $scc, implicit-def $sgpr32, implicit $sgpr32
; CHECK: ADJCALLSTACKUP 0, 0, implicit-def dead $scc, implicit-def $sgpr32, implicit $sgpr32
; CHECK: $sgpr4_sgpr5 = COPY killed renamable $sgpr40_sgpr41
; CHECK: $sgpr6_sgpr7 = COPY killed renamable $sgpr38_sgpr39
; CHECK: $sgpr8_sgpr9 = COPY killed renamable $sgpr34_sgpr35
; CHECK: $sgpr10_sgpr11 = COPY killed renamable $sgpr36_sgpr37
; CHECK: $sgpr12 = COPY killed renamable $sgpr42
; CHECK: $sgpr13 = COPY killed renamable $sgpr33
; CHECK: dead $sgpr30_sgpr31 = SI_CALL killed renamable $sgpr68_sgpr69, 0, csr_amdgpu_highregs, implicit $sgpr4_sgpr5, implicit $sgpr6_sgpr7, implicit $sgpr8_sgpr9, implicit $sgpr10_sgpr11, implicit killed $sgpr12, implicit killed $sgpr13, implicit $sgpr0_sgpr1_sgpr2_sgpr3
; CHECK: ADJCALLSTACKDOWN 0, 0, implicit-def dead $scc, implicit-def $sgpr32, implicit $sgpr32
; CHECK: renamable $sgpr4_sgpr5 = COPY $exec, implicit-def $exec
; CHECK: dead renamable $sgpr6_sgpr7 = IMPLICIT_DEF
; CHECK: bb.1:
; CHECK: successors: %bb.2(0x40000000), %bb.4(0x40000000)
; CHECK: liveins: $sgpr44_sgpr45_sgpr46_sgpr47_sgpr48_sgpr49_sgpr50_sgpr51_sgpr52_sgpr53_sgpr54_sgpr55_sgpr56_sgpr57_sgpr58_sgpr59_sgpr60_sgpr61_sgpr62_sgpr63_sgpr64_sgpr65_sgpr66_sgpr67_sgpr68_sgpr69_sgpr70_sgpr71_sgpr72_sgpr73_sgpr74_sgpr75:0x000003FFFFFFFFFF, $sgpr4_sgpr5, $sgpr66_sgpr67:0x000000000000000F
; CHECK: renamable $sgpr6_sgpr7 = COPY $exec, implicit-def $exec
; CHECK: S_CBRANCH_EXECZ %bb.4, implicit $exec
; CHECK: bb.2:
; CHECK: successors: %bb.3(0x80000000)
; CHECK: liveins: $sgpr44_sgpr45_sgpr46_sgpr47_sgpr48_sgpr49_sgpr50_sgpr51_sgpr52_sgpr53_sgpr54_sgpr55_sgpr56_sgpr57_sgpr58_sgpr59_sgpr60_sgpr61_sgpr62_sgpr63_sgpr64_sgpr65_sgpr66_sgpr67_sgpr68_sgpr69_sgpr70_sgpr71_sgpr72_sgpr73_sgpr74_sgpr75:0x000003FFFFFFFFFF, $sgpr4_sgpr5, $sgpr66_sgpr67:0x000000000000000F
; CHECK: [[COPY:%[0-9]+]]:vreg_1024 = COPY renamable $sgpr44_sgpr45_sgpr46_sgpr47_sgpr48_sgpr49_sgpr50_sgpr51_sgpr52_sgpr53_sgpr54_sgpr55_sgpr56_sgpr57_sgpr58_sgpr59_sgpr60_sgpr61_sgpr62_sgpr63_sgpr64_sgpr65_sgpr66_sgpr67_sgpr68_sgpr69_sgpr70_sgpr71_sgpr72_sgpr73_sgpr74_sgpr75
; CHECK: renamable $sgpr6 = S_LSHL_B32 renamable $sgpr67, 1, implicit-def dead $scc
; CHECK: dead [[COPY]]:vreg_1024 = V_INDIRECT_REG_WRITE_GPR_IDX_B32_V32 [[COPY]], 0, killed $sgpr6, 3, implicit-def $m0, implicit $m0, implicit $exec
; CHECK: bb.3:
; CHECK: successors: %bb.5(0x40000000), %bb.1(0x40000000)
; CHECK: liveins: $sgpr44_sgpr45_sgpr46_sgpr47_sgpr48_sgpr49_sgpr50_sgpr51_sgpr52_sgpr53_sgpr54_sgpr55_sgpr56_sgpr57_sgpr58_sgpr59_sgpr60_sgpr61_sgpr62_sgpr63_sgpr64_sgpr65_sgpr66_sgpr67_sgpr68_sgpr69_sgpr70_sgpr71_sgpr72_sgpr73_sgpr74_sgpr75:0x000003FFFFFFFFFF, $sgpr4_sgpr5, $sgpr66_sgpr67:0x000000000000000F
; CHECK: renamable $sgpr6_sgpr7 = S_OR_SAVEEXEC_B64 renamable $sgpr4_sgpr5, implicit-def $exec, implicit-def $scc, implicit $exec
; CHECK: renamable $sgpr68 = COPY renamable $sgpr44
; CHECK: renamable $sgpr69 = COPY renamable $sgpr44
; CHECK: renamable $sgpr70 = COPY renamable $sgpr44
; CHECK: renamable $sgpr71 = COPY renamable $sgpr44
; CHECK: renamable $sgpr72 = COPY renamable $sgpr44
; CHECK: renamable $sgpr73 = COPY renamable $sgpr44
; CHECK: renamable $sgpr74 = COPY renamable $sgpr44
; CHECK: renamable $sgpr75 = COPY renamable $sgpr44
; CHECK: renamable $sgpr76 = COPY renamable $sgpr44
; CHECK: renamable $sgpr77 = COPY renamable $sgpr44
; CHECK: renamable $sgpr78 = COPY renamable $sgpr44
; CHECK: renamable $sgpr79 = COPY renamable $sgpr44
; CHECK: renamable $sgpr80 = COPY renamable $sgpr44
; CHECK: renamable $sgpr81 = COPY renamable $sgpr44
; CHECK: renamable $sgpr82 = COPY renamable $sgpr44
; CHECK: renamable $sgpr83 = COPY renamable $sgpr44
; CHECK: renamable $sgpr84 = COPY renamable $sgpr44
; CHECK: renamable $sgpr85 = COPY renamable $sgpr44
; CHECK: renamable $sgpr86 = COPY renamable $sgpr44
; CHECK: renamable $sgpr87 = COPY renamable $sgpr44
; CHECK: renamable $sgpr88 = COPY renamable $sgpr44
; CHECK: renamable $sgpr89 = COPY renamable $sgpr44
; CHECK: dead %18:vreg_1024 = COPY renamable $sgpr68_sgpr69_sgpr70_sgpr71_sgpr72_sgpr73_sgpr74_sgpr75_sgpr76_sgpr77_sgpr78_sgpr79_sgpr80_sgpr81_sgpr82_sgpr83_sgpr84_sgpr85_sgpr86_sgpr87_sgpr88_sgpr89_sgpr90_sgpr91_sgpr92_sgpr93_sgpr94_sgpr95_sgpr96_sgpr97_sgpr98_sgpr99, implicit $exec
; CHECK: $exec = S_XOR_B64_term $exec, killed renamable $sgpr6_sgpr7, implicit-def $scc
; CHECK: S_CBRANCH_EXECZ %bb.5, implicit $exec
; CHECK: S_BRANCH %bb.1
; CHECK: bb.4:
; CHECK: successors: %bb.5(0x80000000)
; CHECK: liveins: $sgpr44_sgpr45_sgpr46_sgpr47_sgpr48_sgpr49_sgpr50_sgpr51_sgpr52_sgpr53_sgpr54_sgpr55_sgpr56_sgpr57_sgpr58_sgpr59_sgpr60_sgpr61_sgpr62_sgpr63_sgpr64_sgpr65_sgpr66_sgpr67_sgpr68_sgpr69_sgpr70_sgpr71_sgpr72_sgpr73_sgpr74_sgpr75:0x000003FFFFFFFFFF, $sgpr6_sgpr7, $sgpr66_sgpr67:0x0000000000000003
; CHECK: $exec = S_OR_B64 $exec, killed renamable $sgpr6_sgpr7, implicit-def $scc
; CHECK: dead renamable $sgpr4 = S_LSHL_B32 killed renamable $sgpr66, 1, implicit-def dead $scc
; CHECK: dead %16:vreg_1024 = COPY renamable $sgpr44_sgpr45_sgpr46_sgpr47_sgpr48_sgpr49_sgpr50_sgpr51_sgpr52_sgpr53_sgpr54_sgpr55_sgpr56_sgpr57_sgpr58_sgpr59_sgpr60_sgpr61_sgpr62_sgpr63_sgpr64_sgpr65_sgpr66_sgpr67_sgpr68_sgpr69_sgpr70_sgpr71_sgpr72_sgpr73_sgpr74_sgpr75
; CHECK: bb.5:
bb.0:
liveins: $vgpr0, $vgpr1, $vgpr2, $sgpr4_sgpr5, $sgpr6_sgpr7, $sgpr8_sgpr9, $sgpr10_sgpr11, $sgpr14, $sgpr15
%0:sgpr_64 = COPY $sgpr8_sgpr9
%1:sgpr_32 = COPY $sgpr15
%2:sgpr_32 = COPY $sgpr14
%3:sgpr_64 = COPY $sgpr10_sgpr11
%4:sgpr_64 = COPY $sgpr6_sgpr7
%5:sgpr_64 = COPY $sgpr4_sgpr5
%6:sreg_64_xexec = S_LOAD_DWORDX2_IMM %0, 0, 0 :: (dereferenceable invariant load (s64), align 16, addrspace 4)
undef %7.sub0:sgpr_1024 = S_MOV_B32 0
%7.sub1:sgpr_1024 = S_MOV_B32 0
%7.sub2:sgpr_1024 = S_MOV_B32 0
%7.sub3:sgpr_1024 = S_MOV_B32 0
%7.sub4:sgpr_1024 = S_MOV_B32 0
%7.sub5:sgpr_1024 = S_MOV_B32 0
%7.sub6:sgpr_1024 = S_MOV_B32 0
%7.sub7:sgpr_1024 = S_MOV_B32 0
%7.sub8:sgpr_1024 = S_MOV_B32 0
%7.sub9:sgpr_1024 = S_MOV_B32 0
%7.sub10:sgpr_1024 = S_MOV_B32 0
%7.sub11:sgpr_1024 = S_MOV_B32 0
%7.sub12:sgpr_1024 = S_MOV_B32 0
%7.sub13:sgpr_1024 = S_MOV_B32 0
%7.sub14:sgpr_1024 = S_MOV_B32 0
%7.sub15:sgpr_1024 = S_MOV_B32 0
%7.sub16:sgpr_1024 = S_MOV_B32 0
%7.sub17:sgpr_1024 = S_MOV_B32 0
%7.sub18:sgpr_1024 = S_MOV_B32 0
%7.sub19:sgpr_1024 = S_MOV_B32 0
%7.sub20:sgpr_1024 = S_MOV_B32 0
%8:sreg_64 = IMPLICIT_DEF
ADJCALLSTACKUP 0, 0, implicit-def dead $scc, implicit-def $sgpr32, implicit $sgpr32
dead $sgpr30_sgpr31 = SI_CALL %8, 0, csr_amdgpu_highregs, implicit $sgpr0_sgpr1_sgpr2_sgpr3
ADJCALLSTACKDOWN 0, 0, implicit-def dead $scc, implicit-def $sgpr32, implicit $sgpr32
ADJCALLSTACKUP 0, 0, implicit-def dead $scc, implicit-def $sgpr32, implicit $sgpr32
$sgpr4_sgpr5 = COPY %5
$sgpr6_sgpr7 = COPY %4
$sgpr8_sgpr9 = COPY %0
$sgpr10_sgpr11 = COPY %3
$sgpr12 = COPY %2
$sgpr13 = COPY %1
dead $sgpr30_sgpr31 = SI_CALL %8, 0, csr_amdgpu_highregs, implicit $sgpr4_sgpr5, implicit $sgpr6_sgpr7, implicit $sgpr8_sgpr9, implicit $sgpr10_sgpr11, implicit killed $sgpr12, implicit killed $sgpr13, implicit $sgpr0_sgpr1_sgpr2_sgpr3
ADJCALLSTACKDOWN 0, 0, implicit-def dead $scc, implicit-def $sgpr32, implicit $sgpr32
%9:sreg_64 = COPY $exec, implicit-def $exec
%10:sreg_64 = IMPLICIT_DEF
bb.1:
successors: %bb.2, %bb.4
%11:sreg_64 = COPY $exec, implicit-def $exec
S_CBRANCH_EXECZ %bb.4, implicit $exec
bb.2:
%12:vreg_1024 = COPY %7
%13:sreg_32 = S_LSHL_B32 %6.sub1, 1, implicit-def dead $scc
%12:vreg_1024 = V_INDIRECT_REG_WRITE_GPR_IDX_B32_V32 %12, 0, %13, 3, implicit-def $m0, implicit $m0, implicit $exec
bb.3:
%14:sreg_64 = S_OR_SAVEEXEC_B64 %9, implicit-def $exec, implicit-def $scc, implicit $exec
undef %15.sub0:sgpr_1024 = COPY %7.sub0
%15.sub1:sgpr_1024 = COPY %7.sub0
%15.sub2:sgpr_1024 = COPY %7.sub0
%15.sub3:sgpr_1024 = COPY %7.sub0
%15.sub4:sgpr_1024 = COPY %7.sub0
%15.sub5:sgpr_1024 = COPY %7.sub0
%15.sub6:sgpr_1024 = COPY %7.sub0
%15.sub7:sgpr_1024 = COPY %7.sub0
%15.sub8:sgpr_1024 = COPY %7.sub0
%15.sub9:sgpr_1024 = COPY %7.sub0
%15.sub10:sgpr_1024 = COPY %7.sub0
%15.sub11:sgpr_1024 = COPY %7.sub0
%15.sub12:sgpr_1024 = COPY %7.sub0
%15.sub13:sgpr_1024 = COPY %7.sub0
%15.sub14:sgpr_1024 = COPY %7.sub0
%15.sub15:sgpr_1024 = COPY %7.sub0
%15.sub16:sgpr_1024 = COPY %7.sub0
%15.sub17:sgpr_1024 = COPY %7.sub0
%15.sub18:sgpr_1024 = COPY %7.sub0
%15.sub19:sgpr_1024 = COPY %7.sub0
%15.sub20:sgpr_1024 = COPY %7.sub0
%15.sub21:sgpr_1024 = COPY %7.sub0
; Spill code ends up getting inserted here, and we end up with many unspillable sgpr1024 ranges
%16:vreg_1024 = COPY %15, implicit $exec
$exec = S_XOR_B64_term $exec, %14, implicit-def $scc
S_CBRANCH_EXECZ %bb.5, implicit $exec
S_BRANCH %bb.1
bb.4:
$exec = S_OR_B64 $exec, %11, implicit-def $scc
%17:sreg_32 = S_LSHL_B32 %6.sub0, 1, implicit-def dead $scc
%16:vreg_1024 = COPY %7
bb.5:
...