|  | ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 4 | 
|  | ; RUN: llc -mtriple=riscv32 -mattr=+v -o - %s | FileCheck %s | 
|  | ; RUN: llc -mtriple=riscv64 -mattr=+v -o - %s | FileCheck %s | 
|  |  | 
|  | define <4 x i32> @partial_reduce_add_v4i32_v4i32(<4 x i32> %accumulator, <4 x i32> %0) { | 
|  | ; CHECK-LABEL: partial_reduce_add_v4i32_v4i32: | 
|  | ; CHECK:       # %bb.0: # %entry | 
|  | ; CHECK-NEXT:    vsetivli zero, 4, e32, m1, ta, ma | 
|  | ; CHECK-NEXT:    vadd.vv v8, v8, v9 | 
|  | ; CHECK-NEXT:    ret | 
|  | entry: | 
|  | %partial.reduce = call <4 x i32> @llvm.experimental.vector.partial.reduce.add(<4 x i32> %accumulator, <4 x i32> %0) | 
|  | ret <4 x i32> %partial.reduce | 
|  | } | 
|  |  | 
|  | define <4 x i32> @partial_reduce_add_v4i32_v8i32(<4 x i32> %accumulator, <8 x i32> %0) { | 
|  | ; CHECK-LABEL: partial_reduce_add_v4i32_v8i32: | 
|  | ; CHECK:       # %bb.0: # %entry | 
|  | ; CHECK-NEXT:    vsetivli zero, 4, e32, m1, ta, ma | 
|  | ; CHECK-NEXT:    vadd.vv v12, v8, v10 | 
|  | ; CHECK-NEXT:    vsetivli zero, 4, e32, m2, ta, ma | 
|  | ; CHECK-NEXT:    vslidedown.vi v8, v10, 4 | 
|  | ; CHECK-NEXT:    vsetivli zero, 4, e32, m1, ta, ma | 
|  | ; CHECK-NEXT:    vadd.vv v8, v8, v12 | 
|  | ; CHECK-NEXT:    ret | 
|  | entry: | 
|  | %partial.reduce = call <4 x i32> @llvm.experimental.vector.partial.reduce.add(<4 x i32> %accumulator, <8 x i32> %0) | 
|  | ret <4 x i32> %partial.reduce | 
|  | } | 
|  |  | 
|  | define <vscale x 4 x i32> @partial_reduce_add_nvx4i32_nvx4i32(<vscale x 4 x i32> %accumulator, <vscale x 4 x i32> %0) { | 
|  | ; CHECK-LABEL: partial_reduce_add_nvx4i32_nvx4i32: | 
|  | ; CHECK:       # %bb.0: # %entry | 
|  | ; CHECK-NEXT:    vsetvli a0, zero, e32, m2, ta, ma | 
|  | ; CHECK-NEXT:    vadd.vv v8, v8, v10 | 
|  | ; CHECK-NEXT:    ret | 
|  | entry: | 
|  | %partial.reduce = call <vscale x 4 x i32> @llvm.experimental.vector.partial.reduce.add(<vscale x 4 x i32> %accumulator, <vscale x 4 x i32> %0) | 
|  | ret <vscale x 4 x i32> %partial.reduce | 
|  | } | 
|  |  | 
|  | define <vscale x 4 x i32> @partial_reduce_add_nvx4i32_nvx8i32(<vscale x 4 x i32> %accumulator, <vscale x 8 x i32> %0) { | 
|  | ; CHECK-LABEL: partial_reduce_add_nvx4i32_nvx8i32: | 
|  | ; CHECK:       # %bb.0: # %entry | 
|  | ; CHECK-NEXT:    vsetvli a0, zero, e32, m2, ta, ma | 
|  | ; CHECK-NEXT:    vadd.vv v8, v8, v12 | 
|  | ; CHECK-NEXT:    vadd.vv v8, v14, v8 | 
|  | ; CHECK-NEXT:    ret | 
|  | entry: | 
|  | %partial.reduce = call <vscale x 4 x i32> @llvm.experimental.vector.partial.reduce.add(<vscale x 4 x i32> %accumulator, <vscale x 8 x i32> %0) | 
|  | ret <vscale x 4 x i32> %partial.reduce | 
|  | } | 
|  |  | 
|  | define <vscale x 4 x i32> @partial_reduce_add_nvx4i32_nvx16i32(<vscale x 4 x i32> %accumulator, <vscale x 16 x i32> %0) { | 
|  | ; CHECK-LABEL: partial_reduce_add_nvx4i32_nvx16i32: | 
|  | ; CHECK:       # %bb.0: # %entry | 
|  | ; CHECK-NEXT:    vsetvli a0, zero, e32, m2, ta, ma | 
|  | ; CHECK-NEXT:    vadd.vv v10, v18, v20 | 
|  | ; CHECK-NEXT:    vadd.vv v8, v8, v16 | 
|  | ; CHECK-NEXT:    vadd.vv v8, v22, v8 | 
|  | ; CHECK-NEXT:    vadd.vv v8, v10, v8 | 
|  | ; CHECK-NEXT:    ret | 
|  | entry: | 
|  | %partial.reduce = call <vscale x 4 x i32> @llvm.experimental.vector.partial.reduce.add(<vscale x 4 x i32> %accumulator, <vscale x 16 x i32> %0) | 
|  | ret <vscale x 4 x i32> %partial.reduce | 
|  | } | 
|  |  | 
|  | define <vscale x 8 x i32> @partial_reduce_add_nvx8i32_nvx16i32(<vscale x 8 x i32> %accumulator, <vscale x 16 x i32> %0) { | 
|  | ; CHECK-LABEL: partial_reduce_add_nvx8i32_nvx16i32: | 
|  | ; CHECK:       # %bb.0: # %entry | 
|  | ; CHECK-NEXT:    vsetvli a0, zero, e32, m4, ta, ma | 
|  | ; CHECK-NEXT:    vadd.vv v8, v8, v16 | 
|  | ; CHECK-NEXT:    vadd.vv v8, v20, v8 | 
|  | ; CHECK-NEXT:    ret | 
|  | entry: | 
|  | %partial.reduce = call <vscale x 8 x i32> @llvm.experimental.vector.partial.reduce.add(<vscale x 8 x i32> %accumulator, <vscale x 16 x i32> %0) | 
|  | ret <vscale x 8 x i32> %partial.reduce | 
|  | } | 
|  |  |