|  | ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py | 
|  | ; RUN: llc -mtriple=aarch64 -mattr=+sve -verify-machineinstrs < %s | FileCheck %s | 
|  |  | 
|  | define i64 @vscale_lshr(i64 %TC) { | 
|  | ; CHECK-LABEL: vscale_lshr: | 
|  | ; CHECK:       // %bb.0: | 
|  | ; CHECK-NEXT:    rdvl x8, #1 | 
|  | ; CHECK-NEXT:    lsr x8, x8, #4 | 
|  | ; CHECK-NEXT:    lsr x8, x8, #3 | 
|  | ; CHECK-NEXT:    sub x8, x8, #1 | 
|  | ; CHECK-NEXT:    and x0, x0, x8 | 
|  | ; CHECK-NEXT:    ret | 
|  | %vscale = call i64 @llvm.vscale.i64() | 
|  | %shifted = lshr i64 %vscale, 3 | 
|  | %urem = urem i64 %TC, %shifted | 
|  | ret i64 %urem | 
|  | } | 
|  |  | 
|  | define i64 @vscale(i64 %TC) { | 
|  | ; CHECK-LABEL: vscale: | 
|  | ; CHECK:       // %bb.0: | 
|  | ; CHECK-NEXT:    rdvl x8, #1 | 
|  | ; CHECK-NEXT:    lsr x8, x8, #4 | 
|  | ; CHECK-NEXT:    sub x8, x8, #1 | 
|  | ; CHECK-NEXT:    and x0, x0, x8 | 
|  | ; CHECK-NEXT:    ret | 
|  | %vscale = call i64 @llvm.vscale.i64() | 
|  | %urem = urem i64 %TC, %vscale | 
|  | ret i64 %urem | 
|  | } | 
|  |  | 
|  | define i64 @vscale_shl(i64 %TC) { | 
|  | ; CHECK-LABEL: vscale_shl: | 
|  | ; CHECK:       // %bb.0: | 
|  | ; CHECK-NEXT:    cnth x8 | 
|  | ; CHECK-NEXT:    sub x8, x8, #1 | 
|  | ; CHECK-NEXT:    and x0, x0, x8 | 
|  | ; CHECK-NEXT:    ret | 
|  | %vscale = call i64 @llvm.vscale.i64() | 
|  | %shifted = shl i64 %vscale, 3 | 
|  | %urem = urem i64 %TC, %shifted | 
|  | ret i64 %urem | 
|  | } | 
|  |  | 
|  | declare i64 @llvm.vscale.i64() |