| # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py | 
 | # RUN: llc -o - %s -mtriple=amdgcn-amd-amdhsa -mcpu=gfx90a -run-pass=si-fold-operands -verify-machineinstrs | FileCheck %s | 
 |  | 
 | --- | 
 | name:            lshl_add_u64_gep | 
 | tracksRegLiveness: true | 
 | body:             | | 
 |   bb.0: | 
 |     liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3 | 
 |  | 
 |     ; CHECK-LABEL: name: lshl_add_u64_gep | 
 |     ; CHECK: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3 | 
 |     ; CHECK-NEXT: {{  $}} | 
 |     ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr3 | 
 |     ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr2 | 
 |     ; CHECK-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr1 | 
 |     ; CHECK-NEXT: [[COPY3:%[0-9]+]]:vgpr_32 = COPY $vgpr0 | 
 |     ; CHECK-NEXT: [[REG_SEQUENCE:%[0-9]+]]:vreg_64_align2 = REG_SEQUENCE [[COPY1]], %subreg.sub0, [[COPY]], %subreg.sub1 | 
 |     ; CHECK-NEXT: [[V_LSHLREV_B64_e64_:%[0-9]+]]:vreg_64_align2 = V_LSHLREV_B64_e64 2, [[REG_SEQUENCE]], implicit $exec | 
 |     ; CHECK-NEXT: [[V_ADD_CO_U32_e64_:%[0-9]+]]:vgpr_32, [[V_ADD_CO_U32_e64_1:%[0-9]+]]:sreg_64_xexec = V_ADD_CO_U32_e64 [[COPY3]], [[V_LSHLREV_B64_e64_]].sub0, 0, implicit $exec | 
 |     ; CHECK-NEXT: [[V_ADDC_U32_e64_:%[0-9]+]]:vgpr_32, dead [[V_ADDC_U32_e64_1:%[0-9]+]]:sreg_64_xexec = V_ADDC_U32_e64 [[COPY2]], [[V_LSHLREV_B64_e64_]].sub1, killed [[V_ADD_CO_U32_e64_1]], 0, implicit $exec | 
 |     ; CHECK-NEXT: [[REG_SEQUENCE1:%[0-9]+]]:vreg_64_align2 = REG_SEQUENCE [[V_ADD_CO_U32_e64_]], %subreg.sub0, [[V_ADDC_U32_e64_]], %subreg.sub1 | 
 |     ; CHECK-NEXT: [[FLAT_LOAD_DWORD:%[0-9]+]]:vgpr_32 = FLAT_LOAD_DWORD killed [[REG_SEQUENCE1]], 0, 0, implicit $exec, implicit $flat_scr | 
 |     ; CHECK-NEXT: $vgpr0 = COPY [[FLAT_LOAD_DWORD]] | 
 |     ; CHECK-NEXT: SI_RETURN implicit $vgpr0 | 
 |     %0:vgpr_32 = COPY $vgpr3 | 
 |     %1:vgpr_32 = COPY $vgpr2 | 
 |     %2:vgpr_32 = COPY $vgpr1 | 
 |     %3:vgpr_32 = COPY $vgpr0 | 
 |     %4:vreg_64_align2 = REG_SEQUENCE %1, %subreg.sub0, %0, %subreg.sub1 | 
 |     %5:sreg_32 = S_MOV_B32 2 | 
 |     %6:vreg_64_align2 = V_LSHLREV_B64_e64 killed %5, %4, implicit $exec | 
 |     %7:vgpr_32 = COPY %3 | 
 |     %8:vgpr_32 = COPY %6.sub0 | 
 |     %9:vgpr_32 = COPY %2 | 
 |     %10:vgpr_32 = COPY %6.sub1 | 
 |     %11:vgpr_32, %12:sreg_64_xexec = V_ADD_CO_U32_e64 %7, %8, 0, implicit $exec | 
 |     %13:vgpr_32, dead %14:sreg_64_xexec = V_ADDC_U32_e64 %9, %10, killed %12, 0, implicit $exec | 
 |     %15:vreg_64_align2 = REG_SEQUENCE %11, %subreg.sub0, %13, %subreg.sub1 | 
 |     %16:vgpr_32 = FLAT_LOAD_DWORD killed %15, 0, 0, implicit $exec, implicit $flat_scr | 
 |     $vgpr0 = COPY %16 | 
 |     SI_RETURN implicit $vgpr0 | 
 |  | 
 | ... |