blob: b8414bfb1aa8fdbd555c2a96a21d37d103a2a0e3 [file] [log] [blame]
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc -mtriple=riscv64 -mattr=+d,+experimental-zvlsseg,+experimental-zfh \
; RUN: -verify-machineinstrs < %s | FileCheck %s
declare void @llvm.riscv.vsuxseg2.nxv16i16.nxv16i16(<vscale x 16 x i16>,<vscale x 16 x i16>, i16*, <vscale x 16 x i16>, i64)
declare void @llvm.riscv.vsuxseg2.mask.nxv16i16.nxv16i16(<vscale x 16 x i16>,<vscale x 16 x i16>, i16*, <vscale x 16 x i16>, <vscale x 16 x i1>, i64)
define void @test_vsuxseg2_nxv16i16_nxv16i16(<vscale x 16 x i16> %val, i16* %base, <vscale x 16 x i16> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg2_nxv16i16_nxv16i16:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4
; CHECK-NEXT: vmv4r.v v28, v12
; CHECK-NEXT: vmv4r.v v12, v8
; CHECK-NEXT: vsetvli a1, a1, e16,m4,ta,mu
; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v28
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg2.nxv16i16.nxv16i16(<vscale x 16 x i16> %val,<vscale x 16 x i16> %val, i16* %base, <vscale x 16 x i16> %index, i64 %vl)
ret void
}
define void @test_vsuxseg2_mask_nxv16i16_nxv16i16(<vscale x 16 x i16> %val, i16* %base, <vscale x 16 x i16> %index, <vscale x 16 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg2_mask_nxv16i16_nxv16i16:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4
; CHECK-NEXT: vmv4r.v v28, v12
; CHECK-NEXT: vmv4r.v v12, v8
; CHECK-NEXT: vsetvli a1, a1, e16,m4,ta,mu
; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v28, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg2.mask.nxv16i16.nxv16i16(<vscale x 16 x i16> %val,<vscale x 16 x i16> %val, i16* %base, <vscale x 16 x i16> %index, <vscale x 16 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg2.nxv16i16.nxv16i8(<vscale x 16 x i16>,<vscale x 16 x i16>, i16*, <vscale x 16 x i8>, i64)
declare void @llvm.riscv.vsuxseg2.mask.nxv16i16.nxv16i8(<vscale x 16 x i16>,<vscale x 16 x i16>, i16*, <vscale x 16 x i8>, <vscale x 16 x i1>, i64)
define void @test_vsuxseg2_nxv16i16_nxv16i8(<vscale x 16 x i16> %val, i16* %base, <vscale x 16 x i8> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg2_nxv16i16_nxv16i8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4
; CHECK-NEXT: vmv2r.v v26, v12
; CHECK-NEXT: vmv4r.v v12, v8
; CHECK-NEXT: vsetvli a1, a1, e16,m4,ta,mu
; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v26
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg2.nxv16i16.nxv16i8(<vscale x 16 x i16> %val,<vscale x 16 x i16> %val, i16* %base, <vscale x 16 x i8> %index, i64 %vl)
ret void
}
define void @test_vsuxseg2_mask_nxv16i16_nxv16i8(<vscale x 16 x i16> %val, i16* %base, <vscale x 16 x i8> %index, <vscale x 16 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg2_mask_nxv16i16_nxv16i8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 killed $v8m4_v12m4 def $v8m4_v12m4
; CHECK-NEXT: vmv2r.v v26, v12
; CHECK-NEXT: vmv4r.v v12, v8
; CHECK-NEXT: vsetvli a1, a1, e16,m4,ta,mu
; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v26, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg2.mask.nxv16i16.nxv16i8(<vscale x 16 x i16> %val,<vscale x 16 x i16> %val, i16* %base, <vscale x 16 x i8> %index, <vscale x 16 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg2.nxv16i16.nxv16i32(<vscale x 16 x i16>,<vscale x 16 x i16>, i16*, <vscale x 16 x i32>, i64)
declare void @llvm.riscv.vsuxseg2.mask.nxv16i16.nxv16i32(<vscale x 16 x i16>,<vscale x 16 x i16>, i16*, <vscale x 16 x i32>, <vscale x 16 x i1>, i64)
define void @test_vsuxseg2_nxv16i16_nxv16i32(<vscale x 16 x i16> %val, i16* %base, <vscale x 16 x i32> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg2_nxv16i16_nxv16i32:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4
; CHECK-NEXT: vmv4r.v v12, v8
; CHECK-NEXT: vsetvli a1, a1, e16,m4,ta,mu
; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v16
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg2.nxv16i16.nxv16i32(<vscale x 16 x i16> %val,<vscale x 16 x i16> %val, i16* %base, <vscale x 16 x i32> %index, i64 %vl)
ret void
}
define void @test_vsuxseg2_mask_nxv16i16_nxv16i32(<vscale x 16 x i16> %val, i16* %base, <vscale x 16 x i32> %index, <vscale x 16 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg2_mask_nxv16i16_nxv16i32:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8m4 killed $v8m4 def $v8m4_v12m4
; CHECK-NEXT: vmv4r.v v12, v8
; CHECK-NEXT: vsetvli a1, a1, e16,m4,ta,mu
; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v16, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg2.mask.nxv16i16.nxv16i32(<vscale x 16 x i16> %val,<vscale x 16 x i16> %val, i16* %base, <vscale x 16 x i32> %index, <vscale x 16 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg2.nxv4i32.nxv4i32(<vscale x 4 x i32>,<vscale x 4 x i32>, i32*, <vscale x 4 x i32>, i64)
declare void @llvm.riscv.vsuxseg2.mask.nxv4i32.nxv4i32(<vscale x 4 x i32>,<vscale x 4 x i32>, i32*, <vscale x 4 x i32>, <vscale x 4 x i1>, i64)
define void @test_vsuxseg2_nxv4i32_nxv4i32(<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i32> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg2_nxv4i32_nxv4i32:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2
; CHECK-NEXT: vmv2r.v v26, v10
; CHECK-NEXT: vmv2r.v v10, v8
; CHECK-NEXT: vsetvli a1, a1, e32,m2,ta,mu
; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v26
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg2.nxv4i32.nxv4i32(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i32> %index, i64 %vl)
ret void
}
define void @test_vsuxseg2_mask_nxv4i32_nxv4i32(<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg2_mask_nxv4i32_nxv4i32:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2
; CHECK-NEXT: vmv2r.v v26, v10
; CHECK-NEXT: vmv2r.v v10, v8
; CHECK-NEXT: vsetvli a1, a1, e32,m2,ta,mu
; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v26, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg2.mask.nxv4i32.nxv4i32(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg2.nxv4i32.nxv4i8(<vscale x 4 x i32>,<vscale x 4 x i32>, i32*, <vscale x 4 x i8>, i64)
declare void @llvm.riscv.vsuxseg2.mask.nxv4i32.nxv4i8(<vscale x 4 x i32>,<vscale x 4 x i32>, i32*, <vscale x 4 x i8>, <vscale x 4 x i1>, i64)
define void @test_vsuxseg2_nxv4i32_nxv4i8(<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i8> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg2_nxv4i32_nxv4i8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2
; CHECK-NEXT: vmv1r.v v25, v10
; CHECK-NEXT: vmv2r.v v10, v8
; CHECK-NEXT: vsetvli a1, a1, e32,m2,ta,mu
; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v25
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg2.nxv4i32.nxv4i8(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i8> %index, i64 %vl)
ret void
}
define void @test_vsuxseg2_mask_nxv4i32_nxv4i8(<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg2_mask_nxv4i32_nxv4i8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2
; CHECK-NEXT: vmv1r.v v25, v10
; CHECK-NEXT: vmv2r.v v10, v8
; CHECK-NEXT: vsetvli a1, a1, e32,m2,ta,mu
; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v25, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg2.mask.nxv4i32.nxv4i8(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg2.nxv4i32.nxv4i64(<vscale x 4 x i32>,<vscale x 4 x i32>, i32*, <vscale x 4 x i64>, i64)
declare void @llvm.riscv.vsuxseg2.mask.nxv4i32.nxv4i64(<vscale x 4 x i32>,<vscale x 4 x i32>, i32*, <vscale x 4 x i64>, <vscale x 4 x i1>, i64)
define void @test_vsuxseg2_nxv4i32_nxv4i64(<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i64> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg2_nxv4i32_nxv4i64:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2
; CHECK-NEXT: vmv2r.v v10, v8
; CHECK-NEXT: vsetvli a1, a1, e32,m2,ta,mu
; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v12
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg2.nxv4i32.nxv4i64(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i64> %index, i64 %vl)
ret void
}
define void @test_vsuxseg2_mask_nxv4i32_nxv4i64(<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i64> %index, <vscale x 4 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg2_mask_nxv4i32_nxv4i64:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2
; CHECK-NEXT: vmv2r.v v10, v8
; CHECK-NEXT: vsetvli a1, a1, e32,m2,ta,mu
; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v12, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg2.mask.nxv4i32.nxv4i64(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i64> %index, <vscale x 4 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg2.nxv4i32.nxv4i16(<vscale x 4 x i32>,<vscale x 4 x i32>, i32*, <vscale x 4 x i16>, i64)
declare void @llvm.riscv.vsuxseg2.mask.nxv4i32.nxv4i16(<vscale x 4 x i32>,<vscale x 4 x i32>, i32*, <vscale x 4 x i16>, <vscale x 4 x i1>, i64)
define void @test_vsuxseg2_nxv4i32_nxv4i16(<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i16> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg2_nxv4i32_nxv4i16:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2
; CHECK-NEXT: vmv1r.v v25, v10
; CHECK-NEXT: vmv2r.v v10, v8
; CHECK-NEXT: vsetvli a1, a1, e32,m2,ta,mu
; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v25
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg2.nxv4i32.nxv4i16(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i16> %index, i64 %vl)
ret void
}
define void @test_vsuxseg2_mask_nxv4i32_nxv4i16(<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg2_mask_nxv4i32_nxv4i16:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2
; CHECK-NEXT: vmv1r.v v25, v10
; CHECK-NEXT: vmv2r.v v10, v8
; CHECK-NEXT: vsetvli a1, a1, e32,m2,ta,mu
; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v25, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg2.mask.nxv4i32.nxv4i16(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg3.nxv4i32.nxv4i32(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i32*, <vscale x 4 x i32>, i64)
declare void @llvm.riscv.vsuxseg3.mask.nxv4i32.nxv4i32(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i32*, <vscale x 4 x i32>, <vscale x 4 x i1>, i64)
define void @test_vsuxseg3_nxv4i32_nxv4i32(<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i32> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg3_nxv4i32_nxv4i32:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv2r.v v0, v8
; CHECK-NEXT: vmv2r.v v2, v0
; CHECK-NEXT: vmv2r.v v4, v0
; CHECK-NEXT: vsetvli a1, a1, e32,m2,ta,mu
; CHECK-NEXT: vsuxseg3ei32.v v0, (a0), v10
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg3.nxv4i32.nxv4i32(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i32> %index, i64 %vl)
ret void
}
define void @test_vsuxseg3_mask_nxv4i32_nxv4i32(<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg3_mask_nxv4i32_nxv4i32:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv2r.v v2, v8
; CHECK-NEXT: vmv2r.v v4, v2
; CHECK-NEXT: vmv2r.v v6, v2
; CHECK-NEXT: vsetvli a1, a1, e32,m2,ta,mu
; CHECK-NEXT: vsuxseg3ei32.v v2, (a0), v10, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg3.mask.nxv4i32.nxv4i32(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg3.nxv4i32.nxv4i8(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i32*, <vscale x 4 x i8>, i64)
declare void @llvm.riscv.vsuxseg3.mask.nxv4i32.nxv4i8(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i32*, <vscale x 4 x i8>, <vscale x 4 x i1>, i64)
define void @test_vsuxseg3_nxv4i32_nxv4i8(<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i8> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg3_nxv4i32_nxv4i8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv2r.v v0, v8
; CHECK-NEXT: vmv2r.v v2, v0
; CHECK-NEXT: vmv2r.v v4, v0
; CHECK-NEXT: vsetvli a1, a1, e32,m2,ta,mu
; CHECK-NEXT: vsuxseg3ei8.v v0, (a0), v10
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg3.nxv4i32.nxv4i8(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i8> %index, i64 %vl)
ret void
}
define void @test_vsuxseg3_mask_nxv4i32_nxv4i8(<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg3_mask_nxv4i32_nxv4i8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv2r.v v2, v8
; CHECK-NEXT: vmv2r.v v4, v2
; CHECK-NEXT: vmv2r.v v6, v2
; CHECK-NEXT: vsetvli a1, a1, e32,m2,ta,mu
; CHECK-NEXT: vsuxseg3ei8.v v2, (a0), v10, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg3.mask.nxv4i32.nxv4i8(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg3.nxv4i32.nxv4i64(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i32*, <vscale x 4 x i64>, i64)
declare void @llvm.riscv.vsuxseg3.mask.nxv4i32.nxv4i64(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i32*, <vscale x 4 x i64>, <vscale x 4 x i1>, i64)
define void @test_vsuxseg3_nxv4i32_nxv4i64(<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i64> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg3_nxv4i32_nxv4i64:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv2r.v v0, v8
; CHECK-NEXT: vmv2r.v v2, v0
; CHECK-NEXT: vmv2r.v v4, v0
; CHECK-NEXT: vsetvli a1, a1, e32,m2,ta,mu
; CHECK-NEXT: vsuxseg3ei64.v v0, (a0), v12
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg3.nxv4i32.nxv4i64(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i64> %index, i64 %vl)
ret void
}
define void @test_vsuxseg3_mask_nxv4i32_nxv4i64(<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i64> %index, <vscale x 4 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg3_mask_nxv4i32_nxv4i64:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv2r.v v2, v8
; CHECK-NEXT: vmv2r.v v4, v2
; CHECK-NEXT: vmv2r.v v6, v2
; CHECK-NEXT: vsetvli a1, a1, e32,m2,ta,mu
; CHECK-NEXT: vsuxseg3ei64.v v2, (a0), v12, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg3.mask.nxv4i32.nxv4i64(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i64> %index, <vscale x 4 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg3.nxv4i32.nxv4i16(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i32*, <vscale x 4 x i16>, i64)
declare void @llvm.riscv.vsuxseg3.mask.nxv4i32.nxv4i16(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i32*, <vscale x 4 x i16>, <vscale x 4 x i1>, i64)
define void @test_vsuxseg3_nxv4i32_nxv4i16(<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i16> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg3_nxv4i32_nxv4i16:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv2r.v v0, v8
; CHECK-NEXT: vmv2r.v v2, v0
; CHECK-NEXT: vmv2r.v v4, v0
; CHECK-NEXT: vsetvli a1, a1, e32,m2,ta,mu
; CHECK-NEXT: vsuxseg3ei16.v v0, (a0), v10
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg3.nxv4i32.nxv4i16(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i16> %index, i64 %vl)
ret void
}
define void @test_vsuxseg3_mask_nxv4i32_nxv4i16(<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg3_mask_nxv4i32_nxv4i16:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv2r.v v2, v8
; CHECK-NEXT: vmv2r.v v4, v2
; CHECK-NEXT: vmv2r.v v6, v2
; CHECK-NEXT: vsetvli a1, a1, e32,m2,ta,mu
; CHECK-NEXT: vsuxseg3ei16.v v2, (a0), v10, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg3.mask.nxv4i32.nxv4i16(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg4.nxv4i32.nxv4i32(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i32*, <vscale x 4 x i32>, i64)
declare void @llvm.riscv.vsuxseg4.mask.nxv4i32.nxv4i32(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i32*, <vscale x 4 x i32>, <vscale x 4 x i1>, i64)
define void @test_vsuxseg4_nxv4i32_nxv4i32(<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i32> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg4_nxv4i32_nxv4i32:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv2r.v v0, v8
; CHECK-NEXT: vmv2r.v v2, v0
; CHECK-NEXT: vmv2r.v v4, v0
; CHECK-NEXT: vmv2r.v v6, v0
; CHECK-NEXT: vsetvli a1, a1, e32,m2,ta,mu
; CHECK-NEXT: vsuxseg4ei32.v v0, (a0), v10
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg4.nxv4i32.nxv4i32(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i32> %index, i64 %vl)
ret void
}
define void @test_vsuxseg4_mask_nxv4i32_nxv4i32(<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg4_mask_nxv4i32_nxv4i32:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv2r.v v2, v8
; CHECK-NEXT: vmv2r.v v4, v2
; CHECK-NEXT: vmv2r.v v6, v2
; CHECK-NEXT: vmv2r.v v8, v2
; CHECK-NEXT: vsetvli a1, a1, e32,m2,ta,mu
; CHECK-NEXT: vsuxseg4ei32.v v2, (a0), v10, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg4.mask.nxv4i32.nxv4i32(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i32> %index, <vscale x 4 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg4.nxv4i32.nxv4i8(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i32*, <vscale x 4 x i8>, i64)
declare void @llvm.riscv.vsuxseg4.mask.nxv4i32.nxv4i8(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i32*, <vscale x 4 x i8>, <vscale x 4 x i1>, i64)
define void @test_vsuxseg4_nxv4i32_nxv4i8(<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i8> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg4_nxv4i32_nxv4i8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv2r.v v0, v8
; CHECK-NEXT: vmv2r.v v2, v0
; CHECK-NEXT: vmv2r.v v4, v0
; CHECK-NEXT: vmv2r.v v6, v0
; CHECK-NEXT: vsetvli a1, a1, e32,m2,ta,mu
; CHECK-NEXT: vsuxseg4ei8.v v0, (a0), v10
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg4.nxv4i32.nxv4i8(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i8> %index, i64 %vl)
ret void
}
define void @test_vsuxseg4_mask_nxv4i32_nxv4i8(<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg4_mask_nxv4i32_nxv4i8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv2r.v v2, v8
; CHECK-NEXT: vmv2r.v v4, v2
; CHECK-NEXT: vmv2r.v v6, v2
; CHECK-NEXT: vmv2r.v v8, v2
; CHECK-NEXT: vsetvli a1, a1, e32,m2,ta,mu
; CHECK-NEXT: vsuxseg4ei8.v v2, (a0), v10, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg4.mask.nxv4i32.nxv4i8(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i8> %index, <vscale x 4 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg4.nxv4i32.nxv4i64(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i32*, <vscale x 4 x i64>, i64)
declare void @llvm.riscv.vsuxseg4.mask.nxv4i32.nxv4i64(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i32*, <vscale x 4 x i64>, <vscale x 4 x i1>, i64)
define void @test_vsuxseg4_nxv4i32_nxv4i64(<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i64> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg4_nxv4i32_nxv4i64:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv2r.v v0, v8
; CHECK-NEXT: vmv2r.v v2, v0
; CHECK-NEXT: vmv2r.v v4, v0
; CHECK-NEXT: vmv2r.v v6, v0
; CHECK-NEXT: vsetvli a1, a1, e32,m2,ta,mu
; CHECK-NEXT: vsuxseg4ei64.v v0, (a0), v12
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg4.nxv4i32.nxv4i64(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i64> %index, i64 %vl)
ret void
}
define void @test_vsuxseg4_mask_nxv4i32_nxv4i64(<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i64> %index, <vscale x 4 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg4_mask_nxv4i32_nxv4i64:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv2r.v v2, v8
; CHECK-NEXT: vmv2r.v v4, v2
; CHECK-NEXT: vmv2r.v v6, v2
; CHECK-NEXT: vmv2r.v v8, v2
; CHECK-NEXT: vsetvli a1, a1, e32,m2,ta,mu
; CHECK-NEXT: vsuxseg4ei64.v v2, (a0), v12, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg4.mask.nxv4i32.nxv4i64(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i64> %index, <vscale x 4 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg4.nxv4i32.nxv4i16(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i32*, <vscale x 4 x i16>, i64)
declare void @llvm.riscv.vsuxseg4.mask.nxv4i32.nxv4i16(<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>,<vscale x 4 x i32>, i32*, <vscale x 4 x i16>, <vscale x 4 x i1>, i64)
define void @test_vsuxseg4_nxv4i32_nxv4i16(<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i16> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg4_nxv4i32_nxv4i16:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv2r.v v0, v8
; CHECK-NEXT: vmv2r.v v2, v0
; CHECK-NEXT: vmv2r.v v4, v0
; CHECK-NEXT: vmv2r.v v6, v0
; CHECK-NEXT: vsetvli a1, a1, e32,m2,ta,mu
; CHECK-NEXT: vsuxseg4ei16.v v0, (a0), v10
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg4.nxv4i32.nxv4i16(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i16> %index, i64 %vl)
ret void
}
define void @test_vsuxseg4_mask_nxv4i32_nxv4i16(<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg4_mask_nxv4i32_nxv4i16:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv2r.v v2, v8
; CHECK-NEXT: vmv2r.v v4, v2
; CHECK-NEXT: vmv2r.v v6, v2
; CHECK-NEXT: vmv2r.v v8, v2
; CHECK-NEXT: vsetvli a1, a1, e32,m2,ta,mu
; CHECK-NEXT: vsuxseg4ei16.v v2, (a0), v10, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg4.mask.nxv4i32.nxv4i16(<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val,<vscale x 4 x i32> %val, i32* %base, <vscale x 4 x i16> %index, <vscale x 4 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg2.nxv16i8.nxv16i16(<vscale x 16 x i8>,<vscale x 16 x i8>, i8*, <vscale x 16 x i16>, i64)
declare void @llvm.riscv.vsuxseg2.mask.nxv16i8.nxv16i16(<vscale x 16 x i8>,<vscale x 16 x i8>, i8*, <vscale x 16 x i16>, <vscale x 16 x i1>, i64)
define void @test_vsuxseg2_nxv16i8_nxv16i16(<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i16> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg2_nxv16i8_nxv16i16:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2
; CHECK-NEXT: vmv2r.v v10, v8
; CHECK-NEXT: vsetvli a1, a1, e8,m2,ta,mu
; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v12
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg2.nxv16i8.nxv16i16(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i16> %index, i64 %vl)
ret void
}
define void @test_vsuxseg2_mask_nxv16i8_nxv16i16(<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i16> %index, <vscale x 16 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg2_mask_nxv16i8_nxv16i16:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2
; CHECK-NEXT: vmv2r.v v10, v8
; CHECK-NEXT: vsetvli a1, a1, e8,m2,ta,mu
; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v12, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg2.mask.nxv16i8.nxv16i16(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i16> %index, <vscale x 16 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg2.nxv16i8.nxv16i8(<vscale x 16 x i8>,<vscale x 16 x i8>, i8*, <vscale x 16 x i8>, i64)
declare void @llvm.riscv.vsuxseg2.mask.nxv16i8.nxv16i8(<vscale x 16 x i8>,<vscale x 16 x i8>, i8*, <vscale x 16 x i8>, <vscale x 16 x i1>, i64)
define void @test_vsuxseg2_nxv16i8_nxv16i8(<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i8> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg2_nxv16i8_nxv16i8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2
; CHECK-NEXT: vmv2r.v v26, v10
; CHECK-NEXT: vmv2r.v v10, v8
; CHECK-NEXT: vsetvli a1, a1, e8,m2,ta,mu
; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v26
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg2.nxv16i8.nxv16i8(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i8> %index, i64 %vl)
ret void
}
define void @test_vsuxseg2_mask_nxv16i8_nxv16i8(<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i8> %index, <vscale x 16 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg2_mask_nxv16i8_nxv16i8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 killed $v8m2_v10m2 def $v8m2_v10m2
; CHECK-NEXT: vmv2r.v v26, v10
; CHECK-NEXT: vmv2r.v v10, v8
; CHECK-NEXT: vsetvli a1, a1, e8,m2,ta,mu
; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v26, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg2.mask.nxv16i8.nxv16i8(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i8> %index, <vscale x 16 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg2.nxv16i8.nxv16i32(<vscale x 16 x i8>,<vscale x 16 x i8>, i8*, <vscale x 16 x i32>, i64)
declare void @llvm.riscv.vsuxseg2.mask.nxv16i8.nxv16i32(<vscale x 16 x i8>,<vscale x 16 x i8>, i8*, <vscale x 16 x i32>, <vscale x 16 x i1>, i64)
define void @test_vsuxseg2_nxv16i8_nxv16i32(<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i32> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg2_nxv16i8_nxv16i32:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2
; CHECK-NEXT: vmv2r.v v10, v8
; CHECK-NEXT: vsetvli a1, a1, e8,m2,ta,mu
; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v16
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg2.nxv16i8.nxv16i32(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i32> %index, i64 %vl)
ret void
}
define void @test_vsuxseg2_mask_nxv16i8_nxv16i32(<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i32> %index, <vscale x 16 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg2_mask_nxv16i8_nxv16i32:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2
; CHECK-NEXT: vmv2r.v v10, v8
; CHECK-NEXT: vsetvli a1, a1, e8,m2,ta,mu
; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v16, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg2.mask.nxv16i8.nxv16i32(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i32> %index, <vscale x 16 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg3.nxv16i8.nxv16i16(<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, i8*, <vscale x 16 x i16>, i64)
declare void @llvm.riscv.vsuxseg3.mask.nxv16i8.nxv16i16(<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, i8*, <vscale x 16 x i16>, <vscale x 16 x i1>, i64)
define void @test_vsuxseg3_nxv16i8_nxv16i16(<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i16> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg3_nxv16i8_nxv16i16:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv2r.v v0, v8
; CHECK-NEXT: vmv2r.v v2, v0
; CHECK-NEXT: vmv2r.v v4, v0
; CHECK-NEXT: vsetvli a1, a1, e8,m2,ta,mu
; CHECK-NEXT: vsuxseg3ei16.v v0, (a0), v12
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg3.nxv16i8.nxv16i16(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i16> %index, i64 %vl)
ret void
}
define void @test_vsuxseg3_mask_nxv16i8_nxv16i16(<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i16> %index, <vscale x 16 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg3_mask_nxv16i8_nxv16i16:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv2r.v v2, v8
; CHECK-NEXT: vmv2r.v v4, v2
; CHECK-NEXT: vmv2r.v v6, v2
; CHECK-NEXT: vsetvli a1, a1, e8,m2,ta,mu
; CHECK-NEXT: vsuxseg3ei16.v v2, (a0), v12, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg3.mask.nxv16i8.nxv16i16(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i16> %index, <vscale x 16 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg3.nxv16i8.nxv16i8(<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, i8*, <vscale x 16 x i8>, i64)
declare void @llvm.riscv.vsuxseg3.mask.nxv16i8.nxv16i8(<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, i8*, <vscale x 16 x i8>, <vscale x 16 x i1>, i64)
define void @test_vsuxseg3_nxv16i8_nxv16i8(<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i8> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg3_nxv16i8_nxv16i8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv2r.v v0, v8
; CHECK-NEXT: vmv2r.v v2, v0
; CHECK-NEXT: vmv2r.v v4, v0
; CHECK-NEXT: vsetvli a1, a1, e8,m2,ta,mu
; CHECK-NEXT: vsuxseg3ei8.v v0, (a0), v10
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg3.nxv16i8.nxv16i8(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i8> %index, i64 %vl)
ret void
}
define void @test_vsuxseg3_mask_nxv16i8_nxv16i8(<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i8> %index, <vscale x 16 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg3_mask_nxv16i8_nxv16i8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv2r.v v2, v8
; CHECK-NEXT: vmv2r.v v4, v2
; CHECK-NEXT: vmv2r.v v6, v2
; CHECK-NEXT: vsetvli a1, a1, e8,m2,ta,mu
; CHECK-NEXT: vsuxseg3ei8.v v2, (a0), v10, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg3.mask.nxv16i8.nxv16i8(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i8> %index, <vscale x 16 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg3.nxv16i8.nxv16i32(<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, i8*, <vscale x 16 x i32>, i64)
declare void @llvm.riscv.vsuxseg3.mask.nxv16i8.nxv16i32(<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, i8*, <vscale x 16 x i32>, <vscale x 16 x i1>, i64)
define void @test_vsuxseg3_nxv16i8_nxv16i32(<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i32> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg3_nxv16i8_nxv16i32:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2
; CHECK-NEXT: vmv2r.v v10, v8
; CHECK-NEXT: vmv2r.v v12, v8
; CHECK-NEXT: vsetvli a1, a1, e8,m2,ta,mu
; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v16
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg3.nxv16i8.nxv16i32(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i32> %index, i64 %vl)
ret void
}
define void @test_vsuxseg3_mask_nxv16i8_nxv16i32(<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i32> %index, <vscale x 16 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg3_mask_nxv16i8_nxv16i32:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2
; CHECK-NEXT: vmv2r.v v10, v8
; CHECK-NEXT: vmv2r.v v12, v8
; CHECK-NEXT: vsetvli a1, a1, e8,m2,ta,mu
; CHECK-NEXT: vsuxseg3ei32.v v8, (a0), v16, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg3.mask.nxv16i8.nxv16i32(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i32> %index, <vscale x 16 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg4.nxv16i8.nxv16i16(<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, i8*, <vscale x 16 x i16>, i64)
declare void @llvm.riscv.vsuxseg4.mask.nxv16i8.nxv16i16(<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, i8*, <vscale x 16 x i16>, <vscale x 16 x i1>, i64)
define void @test_vsuxseg4_nxv16i8_nxv16i16(<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i16> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg4_nxv16i8_nxv16i16:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv2r.v v0, v8
; CHECK-NEXT: vmv2r.v v2, v0
; CHECK-NEXT: vmv2r.v v4, v0
; CHECK-NEXT: vmv2r.v v6, v0
; CHECK-NEXT: vsetvli a1, a1, e8,m2,ta,mu
; CHECK-NEXT: vsuxseg4ei16.v v0, (a0), v12
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg4.nxv16i8.nxv16i16(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i16> %index, i64 %vl)
ret void
}
define void @test_vsuxseg4_mask_nxv16i8_nxv16i16(<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i16> %index, <vscale x 16 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg4_mask_nxv16i8_nxv16i16:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv2r.v v2, v8
; CHECK-NEXT: vmv2r.v v4, v2
; CHECK-NEXT: vmv2r.v v6, v2
; CHECK-NEXT: vmv2r.v v8, v2
; CHECK-NEXT: vsetvli a1, a1, e8,m2,ta,mu
; CHECK-NEXT: vsuxseg4ei16.v v2, (a0), v12, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg4.mask.nxv16i8.nxv16i16(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i16> %index, <vscale x 16 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg4.nxv16i8.nxv16i8(<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, i8*, <vscale x 16 x i8>, i64)
declare void @llvm.riscv.vsuxseg4.mask.nxv16i8.nxv16i8(<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, i8*, <vscale x 16 x i8>, <vscale x 16 x i1>, i64)
define void @test_vsuxseg4_nxv16i8_nxv16i8(<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i8> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg4_nxv16i8_nxv16i8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv2r.v v0, v8
; CHECK-NEXT: vmv2r.v v2, v0
; CHECK-NEXT: vmv2r.v v4, v0
; CHECK-NEXT: vmv2r.v v6, v0
; CHECK-NEXT: vsetvli a1, a1, e8,m2,ta,mu
; CHECK-NEXT: vsuxseg4ei8.v v0, (a0), v10
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg4.nxv16i8.nxv16i8(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i8> %index, i64 %vl)
ret void
}
define void @test_vsuxseg4_mask_nxv16i8_nxv16i8(<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i8> %index, <vscale x 16 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg4_mask_nxv16i8_nxv16i8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv2r.v v2, v8
; CHECK-NEXT: vmv2r.v v4, v2
; CHECK-NEXT: vmv2r.v v6, v2
; CHECK-NEXT: vmv2r.v v8, v2
; CHECK-NEXT: vsetvli a1, a1, e8,m2,ta,mu
; CHECK-NEXT: vsuxseg4ei8.v v2, (a0), v10, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg4.mask.nxv16i8.nxv16i8(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i8> %index, <vscale x 16 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg4.nxv16i8.nxv16i32(<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, i8*, <vscale x 16 x i32>, i64)
declare void @llvm.riscv.vsuxseg4.mask.nxv16i8.nxv16i32(<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>,<vscale x 16 x i8>, i8*, <vscale x 16 x i32>, <vscale x 16 x i1>, i64)
define void @test_vsuxseg4_nxv16i8_nxv16i32(<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i32> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg4_nxv16i8_nxv16i32:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2
; CHECK-NEXT: vmv2r.v v10, v8
; CHECK-NEXT: vmv2r.v v12, v8
; CHECK-NEXT: vmv2r.v v14, v8
; CHECK-NEXT: vsetvli a1, a1, e8,m2,ta,mu
; CHECK-NEXT: vsuxseg4ei32.v v8, (a0), v16
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg4.nxv16i8.nxv16i32(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i32> %index, i64 %vl)
ret void
}
define void @test_vsuxseg4_mask_nxv16i8_nxv16i32(<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i32> %index, <vscale x 16 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg4_mask_nxv16i8_nxv16i32:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8m2 killed $v8m2 def $v8m2_v10m2_v12m2_v14m2
; CHECK-NEXT: vmv2r.v v10, v8
; CHECK-NEXT: vmv2r.v v12, v8
; CHECK-NEXT: vmv2r.v v14, v8
; CHECK-NEXT: vsetvli a1, a1, e8,m2,ta,mu
; CHECK-NEXT: vsuxseg4ei32.v v8, (a0), v16, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg4.mask.nxv16i8.nxv16i32(<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val,<vscale x 16 x i8> %val, i8* %base, <vscale x 16 x i32> %index, <vscale x 16 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg2.nxv1i64.nxv1i64(<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i64>, i64)
declare void @llvm.riscv.vsuxseg2.mask.nxv1i64.nxv1i64(<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i64>, <vscale x 1 x i1>, i64)
define void @test_vsuxseg2_nxv1i64_nxv1i64(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i64> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg2_nxv1i64_nxv1i64:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9
; CHECK-NEXT: vmv1r.v v25, v9
; CHECK-NEXT: vmv1r.v v9, v8
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v25
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg2.nxv1i64.nxv1i64(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i64> %index, i64 %vl)
ret void
}
define void @test_vsuxseg2_mask_nxv1i64_nxv1i64(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i64> %index, <vscale x 1 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg2_mask_nxv1i64_nxv1i64:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9
; CHECK-NEXT: vmv1r.v v25, v9
; CHECK-NEXT: vmv1r.v v9, v8
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg2ei64.v v8, (a0), v25, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg2.mask.nxv1i64.nxv1i64(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i64> %index, <vscale x 1 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg2.nxv1i64.nxv1i32(<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i32>, i64)
declare void @llvm.riscv.vsuxseg2.mask.nxv1i64.nxv1i32(<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i32>, <vscale x 1 x i1>, i64)
define void @test_vsuxseg2_nxv1i64_nxv1i32(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i32> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg2_nxv1i64_nxv1i32:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9
; CHECK-NEXT: vmv1r.v v25, v9
; CHECK-NEXT: vmv1r.v v9, v8
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v25
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg2.nxv1i64.nxv1i32(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i32> %index, i64 %vl)
ret void
}
define void @test_vsuxseg2_mask_nxv1i64_nxv1i32(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg2_mask_nxv1i64_nxv1i32:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9
; CHECK-NEXT: vmv1r.v v25, v9
; CHECK-NEXT: vmv1r.v v9, v8
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg2ei32.v v8, (a0), v25, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg2.mask.nxv1i64.nxv1i32(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg2.nxv1i64.nxv1i16(<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i16>, i64)
declare void @llvm.riscv.vsuxseg2.mask.nxv1i64.nxv1i16(<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i16>, <vscale x 1 x i1>, i64)
define void @test_vsuxseg2_nxv1i64_nxv1i16(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i16> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg2_nxv1i64_nxv1i16:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9
; CHECK-NEXT: vmv1r.v v25, v9
; CHECK-NEXT: vmv1r.v v9, v8
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v25
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg2.nxv1i64.nxv1i16(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i16> %index, i64 %vl)
ret void
}
define void @test_vsuxseg2_mask_nxv1i64_nxv1i16(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg2_mask_nxv1i64_nxv1i16:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9
; CHECK-NEXT: vmv1r.v v25, v9
; CHECK-NEXT: vmv1r.v v9, v8
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg2ei16.v v8, (a0), v25, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg2.mask.nxv1i64.nxv1i16(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg2.nxv1i64.nxv1i8(<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i8>, i64)
declare void @llvm.riscv.vsuxseg2.mask.nxv1i64.nxv1i8(<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i8>, <vscale x 1 x i1>, i64)
define void @test_vsuxseg2_nxv1i64_nxv1i8(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i8> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg2_nxv1i64_nxv1i8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9
; CHECK-NEXT: vmv1r.v v25, v9
; CHECK-NEXT: vmv1r.v v9, v8
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v25
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg2.nxv1i64.nxv1i8(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i8> %index, i64 %vl)
ret void
}
define void @test_vsuxseg2_mask_nxv1i64_nxv1i8(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg2_mask_nxv1i64_nxv1i8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: # kill: def $v8 killed $v8 killed $v8_v9 def $v8_v9
; CHECK-NEXT: vmv1r.v v25, v9
; CHECK-NEXT: vmv1r.v v9, v8
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg2ei8.v v8, (a0), v25, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg2.mask.nxv1i64.nxv1i8(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg3.nxv1i64.nxv1i64(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i64>, i64)
declare void @llvm.riscv.vsuxseg3.mask.nxv1i64.nxv1i64(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i64>, <vscale x 1 x i1>, i64)
define void @test_vsuxseg3_nxv1i64_nxv1i64(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i64> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg3_nxv1i64_nxv1i64:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v0, v8
; CHECK-NEXT: vmv1r.v v1, v0
; CHECK-NEXT: vmv1r.v v2, v0
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg3ei64.v v0, (a0), v9
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg3.nxv1i64.nxv1i64(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i64> %index, i64 %vl)
ret void
}
define void @test_vsuxseg3_mask_nxv1i64_nxv1i64(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i64> %index, <vscale x 1 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg3_mask_nxv1i64_nxv1i64:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v1, v8
; CHECK-NEXT: vmv1r.v v2, v1
; CHECK-NEXT: vmv1r.v v3, v1
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg3ei64.v v1, (a0), v9, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg3.mask.nxv1i64.nxv1i64(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i64> %index, <vscale x 1 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg3.nxv1i64.nxv1i32(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i32>, i64)
declare void @llvm.riscv.vsuxseg3.mask.nxv1i64.nxv1i32(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i32>, <vscale x 1 x i1>, i64)
define void @test_vsuxseg3_nxv1i64_nxv1i32(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i32> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg3_nxv1i64_nxv1i32:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v0, v8
; CHECK-NEXT: vmv1r.v v1, v0
; CHECK-NEXT: vmv1r.v v2, v0
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg3ei32.v v0, (a0), v9
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg3.nxv1i64.nxv1i32(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i32> %index, i64 %vl)
ret void
}
define void @test_vsuxseg3_mask_nxv1i64_nxv1i32(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg3_mask_nxv1i64_nxv1i32:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v1, v8
; CHECK-NEXT: vmv1r.v v2, v1
; CHECK-NEXT: vmv1r.v v3, v1
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg3ei32.v v1, (a0), v9, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg3.mask.nxv1i64.nxv1i32(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg3.nxv1i64.nxv1i16(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i16>, i64)
declare void @llvm.riscv.vsuxseg3.mask.nxv1i64.nxv1i16(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i16>, <vscale x 1 x i1>, i64)
define void @test_vsuxseg3_nxv1i64_nxv1i16(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i16> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg3_nxv1i64_nxv1i16:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v0, v8
; CHECK-NEXT: vmv1r.v v1, v0
; CHECK-NEXT: vmv1r.v v2, v0
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg3ei16.v v0, (a0), v9
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg3.nxv1i64.nxv1i16(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i16> %index, i64 %vl)
ret void
}
define void @test_vsuxseg3_mask_nxv1i64_nxv1i16(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg3_mask_nxv1i64_nxv1i16:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v1, v8
; CHECK-NEXT: vmv1r.v v2, v1
; CHECK-NEXT: vmv1r.v v3, v1
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg3ei16.v v1, (a0), v9, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg3.mask.nxv1i64.nxv1i16(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg3.nxv1i64.nxv1i8(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i8>, i64)
declare void @llvm.riscv.vsuxseg3.mask.nxv1i64.nxv1i8(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i8>, <vscale x 1 x i1>, i64)
define void @test_vsuxseg3_nxv1i64_nxv1i8(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i8> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg3_nxv1i64_nxv1i8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v0, v8
; CHECK-NEXT: vmv1r.v v1, v0
; CHECK-NEXT: vmv1r.v v2, v0
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg3ei8.v v0, (a0), v9
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg3.nxv1i64.nxv1i8(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i8> %index, i64 %vl)
ret void
}
define void @test_vsuxseg3_mask_nxv1i64_nxv1i8(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg3_mask_nxv1i64_nxv1i8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v1, v8
; CHECK-NEXT: vmv1r.v v2, v1
; CHECK-NEXT: vmv1r.v v3, v1
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg3ei8.v v1, (a0), v9, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg3.mask.nxv1i64.nxv1i8(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg4.nxv1i64.nxv1i64(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i64>, i64)
declare void @llvm.riscv.vsuxseg4.mask.nxv1i64.nxv1i64(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i64>, <vscale x 1 x i1>, i64)
define void @test_vsuxseg4_nxv1i64_nxv1i64(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i64> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg4_nxv1i64_nxv1i64:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v0, v8
; CHECK-NEXT: vmv1r.v v1, v0
; CHECK-NEXT: vmv1r.v v2, v0
; CHECK-NEXT: vmv1r.v v3, v0
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg4ei64.v v0, (a0), v9
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg4.nxv1i64.nxv1i64(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i64> %index, i64 %vl)
ret void
}
define void @test_vsuxseg4_mask_nxv1i64_nxv1i64(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i64> %index, <vscale x 1 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg4_mask_nxv1i64_nxv1i64:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v1, v8
; CHECK-NEXT: vmv1r.v v2, v1
; CHECK-NEXT: vmv1r.v v3, v1
; CHECK-NEXT: vmv1r.v v4, v1
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg4ei64.v v1, (a0), v9, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg4.mask.nxv1i64.nxv1i64(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i64> %index, <vscale x 1 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg4.nxv1i64.nxv1i32(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i32>, i64)
declare void @llvm.riscv.vsuxseg4.mask.nxv1i64.nxv1i32(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i32>, <vscale x 1 x i1>, i64)
define void @test_vsuxseg4_nxv1i64_nxv1i32(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i32> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg4_nxv1i64_nxv1i32:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v0, v8
; CHECK-NEXT: vmv1r.v v1, v0
; CHECK-NEXT: vmv1r.v v2, v0
; CHECK-NEXT: vmv1r.v v3, v0
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg4ei32.v v0, (a0), v9
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg4.nxv1i64.nxv1i32(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i32> %index, i64 %vl)
ret void
}
define void @test_vsuxseg4_mask_nxv1i64_nxv1i32(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg4_mask_nxv1i64_nxv1i32:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v1, v8
; CHECK-NEXT: vmv1r.v v2, v1
; CHECK-NEXT: vmv1r.v v3, v1
; CHECK-NEXT: vmv1r.v v4, v1
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg4ei32.v v1, (a0), v9, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg4.mask.nxv1i64.nxv1i32(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg4.nxv1i64.nxv1i16(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i16>, i64)
declare void @llvm.riscv.vsuxseg4.mask.nxv1i64.nxv1i16(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i16>, <vscale x 1 x i1>, i64)
define void @test_vsuxseg4_nxv1i64_nxv1i16(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i16> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg4_nxv1i64_nxv1i16:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v0, v8
; CHECK-NEXT: vmv1r.v v1, v0
; CHECK-NEXT: vmv1r.v v2, v0
; CHECK-NEXT: vmv1r.v v3, v0
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg4ei16.v v0, (a0), v9
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg4.nxv1i64.nxv1i16(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i16> %index, i64 %vl)
ret void
}
define void @test_vsuxseg4_mask_nxv1i64_nxv1i16(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg4_mask_nxv1i64_nxv1i16:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v1, v8
; CHECK-NEXT: vmv1r.v v2, v1
; CHECK-NEXT: vmv1r.v v3, v1
; CHECK-NEXT: vmv1r.v v4, v1
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg4ei16.v v1, (a0), v9, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg4.mask.nxv1i64.nxv1i16(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg4.nxv1i64.nxv1i8(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i8>, i64)
declare void @llvm.riscv.vsuxseg4.mask.nxv1i64.nxv1i8(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i8>, <vscale x 1 x i1>, i64)
define void @test_vsuxseg4_nxv1i64_nxv1i8(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i8> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg4_nxv1i64_nxv1i8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v0, v8
; CHECK-NEXT: vmv1r.v v1, v0
; CHECK-NEXT: vmv1r.v v2, v0
; CHECK-NEXT: vmv1r.v v3, v0
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg4ei8.v v0, (a0), v9
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg4.nxv1i64.nxv1i8(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i8> %index, i64 %vl)
ret void
}
define void @test_vsuxseg4_mask_nxv1i64_nxv1i8(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg4_mask_nxv1i64_nxv1i8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v1, v8
; CHECK-NEXT: vmv1r.v v2, v1
; CHECK-NEXT: vmv1r.v v3, v1
; CHECK-NEXT: vmv1r.v v4, v1
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg4ei8.v v1, (a0), v9, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg4.mask.nxv1i64.nxv1i8(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg5.nxv1i64.nxv1i64(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i64>, i64)
declare void @llvm.riscv.vsuxseg5.mask.nxv1i64.nxv1i64(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i64>, <vscale x 1 x i1>, i64)
define void @test_vsuxseg5_nxv1i64_nxv1i64(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i64> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg5_nxv1i64_nxv1i64:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v0, v8
; CHECK-NEXT: vmv1r.v v1, v0
; CHECK-NEXT: vmv1r.v v2, v0
; CHECK-NEXT: vmv1r.v v3, v0
; CHECK-NEXT: vmv1r.v v4, v0
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg5ei64.v v0, (a0), v9
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg5.nxv1i64.nxv1i64(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i64> %index, i64 %vl)
ret void
}
define void @test_vsuxseg5_mask_nxv1i64_nxv1i64(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i64> %index, <vscale x 1 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg5_mask_nxv1i64_nxv1i64:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v1, v8
; CHECK-NEXT: vmv1r.v v2, v1
; CHECK-NEXT: vmv1r.v v3, v1
; CHECK-NEXT: vmv1r.v v4, v1
; CHECK-NEXT: vmv1r.v v5, v1
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg5ei64.v v1, (a0), v9, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg5.mask.nxv1i64.nxv1i64(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i64> %index, <vscale x 1 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg5.nxv1i64.nxv1i32(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i32>, i64)
declare void @llvm.riscv.vsuxseg5.mask.nxv1i64.nxv1i32(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i32>, <vscale x 1 x i1>, i64)
define void @test_vsuxseg5_nxv1i64_nxv1i32(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i32> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg5_nxv1i64_nxv1i32:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v0, v8
; CHECK-NEXT: vmv1r.v v1, v0
; CHECK-NEXT: vmv1r.v v2, v0
; CHECK-NEXT: vmv1r.v v3, v0
; CHECK-NEXT: vmv1r.v v4, v0
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg5ei32.v v0, (a0), v9
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg5.nxv1i64.nxv1i32(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i32> %index, i64 %vl)
ret void
}
define void @test_vsuxseg5_mask_nxv1i64_nxv1i32(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg5_mask_nxv1i64_nxv1i32:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v1, v8
; CHECK-NEXT: vmv1r.v v2, v1
; CHECK-NEXT: vmv1r.v v3, v1
; CHECK-NEXT: vmv1r.v v4, v1
; CHECK-NEXT: vmv1r.v v5, v1
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg5ei32.v v1, (a0), v9, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg5.mask.nxv1i64.nxv1i32(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg5.nxv1i64.nxv1i16(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i16>, i64)
declare void @llvm.riscv.vsuxseg5.mask.nxv1i64.nxv1i16(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i16>, <vscale x 1 x i1>, i64)
define void @test_vsuxseg5_nxv1i64_nxv1i16(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i16> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg5_nxv1i64_nxv1i16:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v0, v8
; CHECK-NEXT: vmv1r.v v1, v0
; CHECK-NEXT: vmv1r.v v2, v0
; CHECK-NEXT: vmv1r.v v3, v0
; CHECK-NEXT: vmv1r.v v4, v0
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg5ei16.v v0, (a0), v9
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg5.nxv1i64.nxv1i16(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i16> %index, i64 %vl)
ret void
}
define void @test_vsuxseg5_mask_nxv1i64_nxv1i16(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg5_mask_nxv1i64_nxv1i16:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v1, v8
; CHECK-NEXT: vmv1r.v v2, v1
; CHECK-NEXT: vmv1r.v v3, v1
; CHECK-NEXT: vmv1r.v v4, v1
; CHECK-NEXT: vmv1r.v v5, v1
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg5ei16.v v1, (a0), v9, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg5.mask.nxv1i64.nxv1i16(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg5.nxv1i64.nxv1i8(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i8>, i64)
declare void @llvm.riscv.vsuxseg5.mask.nxv1i64.nxv1i8(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i8>, <vscale x 1 x i1>, i64)
define void @test_vsuxseg5_nxv1i64_nxv1i8(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i8> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg5_nxv1i64_nxv1i8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v0, v8
; CHECK-NEXT: vmv1r.v v1, v0
; CHECK-NEXT: vmv1r.v v2, v0
; CHECK-NEXT: vmv1r.v v3, v0
; CHECK-NEXT: vmv1r.v v4, v0
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg5ei8.v v0, (a0), v9
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg5.nxv1i64.nxv1i8(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i8> %index, i64 %vl)
ret void
}
define void @test_vsuxseg5_mask_nxv1i64_nxv1i8(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg5_mask_nxv1i64_nxv1i8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v1, v8
; CHECK-NEXT: vmv1r.v v2, v1
; CHECK-NEXT: vmv1r.v v3, v1
; CHECK-NEXT: vmv1r.v v4, v1
; CHECK-NEXT: vmv1r.v v5, v1
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg5ei8.v v1, (a0), v9, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg5.mask.nxv1i64.nxv1i8(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg6.nxv1i64.nxv1i64(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i64>, i64)
declare void @llvm.riscv.vsuxseg6.mask.nxv1i64.nxv1i64(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i64>, <vscale x 1 x i1>, i64)
define void @test_vsuxseg6_nxv1i64_nxv1i64(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i64> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg6_nxv1i64_nxv1i64:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v0, v8
; CHECK-NEXT: vmv1r.v v1, v0
; CHECK-NEXT: vmv1r.v v2, v0
; CHECK-NEXT: vmv1r.v v3, v0
; CHECK-NEXT: vmv1r.v v4, v0
; CHECK-NEXT: vmv1r.v v5, v0
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg6ei64.v v0, (a0), v9
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg6.nxv1i64.nxv1i64(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i64> %index, i64 %vl)
ret void
}
define void @test_vsuxseg6_mask_nxv1i64_nxv1i64(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i64> %index, <vscale x 1 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg6_mask_nxv1i64_nxv1i64:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v1, v8
; CHECK-NEXT: vmv1r.v v2, v1
; CHECK-NEXT: vmv1r.v v3, v1
; CHECK-NEXT: vmv1r.v v4, v1
; CHECK-NEXT: vmv1r.v v5, v1
; CHECK-NEXT: vmv1r.v v6, v1
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg6ei64.v v1, (a0), v9, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg6.mask.nxv1i64.nxv1i64(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i64> %index, <vscale x 1 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg6.nxv1i64.nxv1i32(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i32>, i64)
declare void @llvm.riscv.vsuxseg6.mask.nxv1i64.nxv1i32(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i32>, <vscale x 1 x i1>, i64)
define void @test_vsuxseg6_nxv1i64_nxv1i32(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i32> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg6_nxv1i64_nxv1i32:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v0, v8
; CHECK-NEXT: vmv1r.v v1, v0
; CHECK-NEXT: vmv1r.v v2, v0
; CHECK-NEXT: vmv1r.v v3, v0
; CHECK-NEXT: vmv1r.v v4, v0
; CHECK-NEXT: vmv1r.v v5, v0
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg6ei32.v v0, (a0), v9
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg6.nxv1i64.nxv1i32(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i32> %index, i64 %vl)
ret void
}
define void @test_vsuxseg6_mask_nxv1i64_nxv1i32(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg6_mask_nxv1i64_nxv1i32:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v1, v8
; CHECK-NEXT: vmv1r.v v2, v1
; CHECK-NEXT: vmv1r.v v3, v1
; CHECK-NEXT: vmv1r.v v4, v1
; CHECK-NEXT: vmv1r.v v5, v1
; CHECK-NEXT: vmv1r.v v6, v1
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg6ei32.v v1, (a0), v9, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg6.mask.nxv1i64.nxv1i32(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg6.nxv1i64.nxv1i16(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i16>, i64)
declare void @llvm.riscv.vsuxseg6.mask.nxv1i64.nxv1i16(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i16>, <vscale x 1 x i1>, i64)
define void @test_vsuxseg6_nxv1i64_nxv1i16(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i16> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg6_nxv1i64_nxv1i16:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v0, v8
; CHECK-NEXT: vmv1r.v v1, v0
; CHECK-NEXT: vmv1r.v v2, v0
; CHECK-NEXT: vmv1r.v v3, v0
; CHECK-NEXT: vmv1r.v v4, v0
; CHECK-NEXT: vmv1r.v v5, v0
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg6ei16.v v0, (a0), v9
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg6.nxv1i64.nxv1i16(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i16> %index, i64 %vl)
ret void
}
define void @test_vsuxseg6_mask_nxv1i64_nxv1i16(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg6_mask_nxv1i64_nxv1i16:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v1, v8
; CHECK-NEXT: vmv1r.v v2, v1
; CHECK-NEXT: vmv1r.v v3, v1
; CHECK-NEXT: vmv1r.v v4, v1
; CHECK-NEXT: vmv1r.v v5, v1
; CHECK-NEXT: vmv1r.v v6, v1
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg6ei16.v v1, (a0), v9, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg6.mask.nxv1i64.nxv1i16(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg6.nxv1i64.nxv1i8(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i8>, i64)
declare void @llvm.riscv.vsuxseg6.mask.nxv1i64.nxv1i8(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i8>, <vscale x 1 x i1>, i64)
define void @test_vsuxseg6_nxv1i64_nxv1i8(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i8> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg6_nxv1i64_nxv1i8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v0, v8
; CHECK-NEXT: vmv1r.v v1, v0
; CHECK-NEXT: vmv1r.v v2, v0
; CHECK-NEXT: vmv1r.v v3, v0
; CHECK-NEXT: vmv1r.v v4, v0
; CHECK-NEXT: vmv1r.v v5, v0
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg6ei8.v v0, (a0), v9
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg6.nxv1i64.nxv1i8(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i8> %index, i64 %vl)
ret void
}
define void @test_vsuxseg6_mask_nxv1i64_nxv1i8(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg6_mask_nxv1i64_nxv1i8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v1, v8
; CHECK-NEXT: vmv1r.v v2, v1
; CHECK-NEXT: vmv1r.v v3, v1
; CHECK-NEXT: vmv1r.v v4, v1
; CHECK-NEXT: vmv1r.v v5, v1
; CHECK-NEXT: vmv1r.v v6, v1
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg6ei8.v v1, (a0), v9, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg6.mask.nxv1i64.nxv1i8(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg7.nxv1i64.nxv1i64(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i64>, i64)
declare void @llvm.riscv.vsuxseg7.mask.nxv1i64.nxv1i64(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i64>, <vscale x 1 x i1>, i64)
define void @test_vsuxseg7_nxv1i64_nxv1i64(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i64> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg7_nxv1i64_nxv1i64:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v0, v8
; CHECK-NEXT: vmv1r.v v1, v0
; CHECK-NEXT: vmv1r.v v2, v0
; CHECK-NEXT: vmv1r.v v3, v0
; CHECK-NEXT: vmv1r.v v4, v0
; CHECK-NEXT: vmv1r.v v5, v0
; CHECK-NEXT: vmv1r.v v6, v0
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg7ei64.v v0, (a0), v9
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg7.nxv1i64.nxv1i64(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i64> %index, i64 %vl)
ret void
}
define void @test_vsuxseg7_mask_nxv1i64_nxv1i64(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i64> %index, <vscale x 1 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg7_mask_nxv1i64_nxv1i64:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v1, v8
; CHECK-NEXT: vmv1r.v v2, v1
; CHECK-NEXT: vmv1r.v v3, v1
; CHECK-NEXT: vmv1r.v v4, v1
; CHECK-NEXT: vmv1r.v v5, v1
; CHECK-NEXT: vmv1r.v v6, v1
; CHECK-NEXT: vmv1r.v v7, v1
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg7ei64.v v1, (a0), v9, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg7.mask.nxv1i64.nxv1i64(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i64> %index, <vscale x 1 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg7.nxv1i64.nxv1i32(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i32>, i64)
declare void @llvm.riscv.vsuxseg7.mask.nxv1i64.nxv1i32(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i32>, <vscale x 1 x i1>, i64)
define void @test_vsuxseg7_nxv1i64_nxv1i32(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i32> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg7_nxv1i64_nxv1i32:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v0, v8
; CHECK-NEXT: vmv1r.v v1, v0
; CHECK-NEXT: vmv1r.v v2, v0
; CHECK-NEXT: vmv1r.v v3, v0
; CHECK-NEXT: vmv1r.v v4, v0
; CHECK-NEXT: vmv1r.v v5, v0
; CHECK-NEXT: vmv1r.v v6, v0
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg7ei32.v v0, (a0), v9
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg7.nxv1i64.nxv1i32(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i32> %index, i64 %vl)
ret void
}
define void @test_vsuxseg7_mask_nxv1i64_nxv1i32(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg7_mask_nxv1i64_nxv1i32:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v1, v8
; CHECK-NEXT: vmv1r.v v2, v1
; CHECK-NEXT: vmv1r.v v3, v1
; CHECK-NEXT: vmv1r.v v4, v1
; CHECK-NEXT: vmv1r.v v5, v1
; CHECK-NEXT: vmv1r.v v6, v1
; CHECK-NEXT: vmv1r.v v7, v1
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg7ei32.v v1, (a0), v9, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg7.mask.nxv1i64.nxv1i32(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg7.nxv1i64.nxv1i16(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i16>, i64)
declare void @llvm.riscv.vsuxseg7.mask.nxv1i64.nxv1i16(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i16>, <vscale x 1 x i1>, i64)
define void @test_vsuxseg7_nxv1i64_nxv1i16(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i16> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg7_nxv1i64_nxv1i16:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v0, v8
; CHECK-NEXT: vmv1r.v v1, v0
; CHECK-NEXT: vmv1r.v v2, v0
; CHECK-NEXT: vmv1r.v v3, v0
; CHECK-NEXT: vmv1r.v v4, v0
; CHECK-NEXT: vmv1r.v v5, v0
; CHECK-NEXT: vmv1r.v v6, v0
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg7ei16.v v0, (a0), v9
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg7.nxv1i64.nxv1i16(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i16> %index, i64 %vl)
ret void
}
define void @test_vsuxseg7_mask_nxv1i64_nxv1i16(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg7_mask_nxv1i64_nxv1i16:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v1, v8
; CHECK-NEXT: vmv1r.v v2, v1
; CHECK-NEXT: vmv1r.v v3, v1
; CHECK-NEXT: vmv1r.v v4, v1
; CHECK-NEXT: vmv1r.v v5, v1
; CHECK-NEXT: vmv1r.v v6, v1
; CHECK-NEXT: vmv1r.v v7, v1
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg7ei16.v v1, (a0), v9, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg7.mask.nxv1i64.nxv1i16(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg7.nxv1i64.nxv1i8(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i8>, i64)
declare void @llvm.riscv.vsuxseg7.mask.nxv1i64.nxv1i8(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i8>, <vscale x 1 x i1>, i64)
define void @test_vsuxseg7_nxv1i64_nxv1i8(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i8> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg7_nxv1i64_nxv1i8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v0, v8
; CHECK-NEXT: vmv1r.v v1, v0
; CHECK-NEXT: vmv1r.v v2, v0
; CHECK-NEXT: vmv1r.v v3, v0
; CHECK-NEXT: vmv1r.v v4, v0
; CHECK-NEXT: vmv1r.v v5, v0
; CHECK-NEXT: vmv1r.v v6, v0
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg7ei8.v v0, (a0), v9
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg7.nxv1i64.nxv1i8(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i8> %index, i64 %vl)
ret void
}
define void @test_vsuxseg7_mask_nxv1i64_nxv1i8(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg7_mask_nxv1i64_nxv1i8:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v1, v8
; CHECK-NEXT: vmv1r.v v2, v1
; CHECK-NEXT: vmv1r.v v3, v1
; CHECK-NEXT: vmv1r.v v4, v1
; CHECK-NEXT: vmv1r.v v5, v1
; CHECK-NEXT: vmv1r.v v6, v1
; CHECK-NEXT: vmv1r.v v7, v1
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg7ei8.v v1, (a0), v9, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg7.mask.nxv1i64.nxv1i8(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i8> %index, <vscale x 1 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg8.nxv1i64.nxv1i64(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i64>, i64)
declare void @llvm.riscv.vsuxseg8.mask.nxv1i64.nxv1i64(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i64>, <vscale x 1 x i1>, i64)
define void @test_vsuxseg8_nxv1i64_nxv1i64(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i64> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg8_nxv1i64_nxv1i64:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v0, v8
; CHECK-NEXT: vmv1r.v v1, v0
; CHECK-NEXT: vmv1r.v v2, v0
; CHECK-NEXT: vmv1r.v v3, v0
; CHECK-NEXT: vmv1r.v v4, v0
; CHECK-NEXT: vmv1r.v v5, v0
; CHECK-NEXT: vmv1r.v v6, v0
; CHECK-NEXT: vmv1r.v v7, v0
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg8ei64.v v0, (a0), v9
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg8.nxv1i64.nxv1i64(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i64> %index, i64 %vl)
ret void
}
define void @test_vsuxseg8_mask_nxv1i64_nxv1i64(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i64> %index, <vscale x 1 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg8_mask_nxv1i64_nxv1i64:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v1, v8
; CHECK-NEXT: vmv1r.v v2, v1
; CHECK-NEXT: vmv1r.v v3, v1
; CHECK-NEXT: vmv1r.v v4, v1
; CHECK-NEXT: vmv1r.v v5, v1
; CHECK-NEXT: vmv1r.v v6, v1
; CHECK-NEXT: vmv1r.v v7, v1
; CHECK-NEXT: vmv1r.v v8, v1
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg8ei64.v v1, (a0), v9, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg8.mask.nxv1i64.nxv1i64(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i64> %index, <vscale x 1 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg8.nxv1i64.nxv1i32(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i32>, i64)
declare void @llvm.riscv.vsuxseg8.mask.nxv1i64.nxv1i32(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i32>, <vscale x 1 x i1>, i64)
define void @test_vsuxseg8_nxv1i64_nxv1i32(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i32> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg8_nxv1i64_nxv1i32:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v0, v8
; CHECK-NEXT: vmv1r.v v1, v0
; CHECK-NEXT: vmv1r.v v2, v0
; CHECK-NEXT: vmv1r.v v3, v0
; CHECK-NEXT: vmv1r.v v4, v0
; CHECK-NEXT: vmv1r.v v5, v0
; CHECK-NEXT: vmv1r.v v6, v0
; CHECK-NEXT: vmv1r.v v7, v0
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg8ei32.v v0, (a0), v9
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg8.nxv1i64.nxv1i32(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i32> %index, i64 %vl)
ret void
}
define void @test_vsuxseg8_mask_nxv1i64_nxv1i32(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg8_mask_nxv1i64_nxv1i32:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v1, v8
; CHECK-NEXT: vmv1r.v v2, v1
; CHECK-NEXT: vmv1r.v v3, v1
; CHECK-NEXT: vmv1r.v v4, v1
; CHECK-NEXT: vmv1r.v v5, v1
; CHECK-NEXT: vmv1r.v v6, v1
; CHECK-NEXT: vmv1r.v v7, v1
; CHECK-NEXT: vmv1r.v v8, v1
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg8ei32.v v1, (a0), v9, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg8.mask.nxv1i64.nxv1i32(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i32> %index, <vscale x 1 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg8.nxv1i64.nxv1i16(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i16>, i64)
declare void @llvm.riscv.vsuxseg8.mask.nxv1i64.nxv1i16(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i16>, <vscale x 1 x i1>, i64)
define void @test_vsuxseg8_nxv1i64_nxv1i16(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i16> %index, i64 %vl) {
; CHECK-LABEL: test_vsuxseg8_nxv1i64_nxv1i16:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v0, v8
; CHECK-NEXT: vmv1r.v v1, v0
; CHECK-NEXT: vmv1r.v v2, v0
; CHECK-NEXT: vmv1r.v v3, v0
; CHECK-NEXT: vmv1r.v v4, v0
; CHECK-NEXT: vmv1r.v v5, v0
; CHECK-NEXT: vmv1r.v v6, v0
; CHECK-NEXT: vmv1r.v v7, v0
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg8ei16.v v0, (a0), v9
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg8.nxv1i64.nxv1i16(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i16> %index, i64 %vl)
ret void
}
define void @test_vsuxseg8_mask_nxv1i64_nxv1i16(<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i64 %vl) {
; CHECK-LABEL: test_vsuxseg8_mask_nxv1i64_nxv1i16:
; CHECK: # %bb.0: # %entry
; CHECK-NEXT: vmv1r.v v1, v8
; CHECK-NEXT: vmv1r.v v2, v1
; CHECK-NEXT: vmv1r.v v3, v1
; CHECK-NEXT: vmv1r.v v4, v1
; CHECK-NEXT: vmv1r.v v5, v1
; CHECK-NEXT: vmv1r.v v6, v1
; CHECK-NEXT: vmv1r.v v7, v1
; CHECK-NEXT: vmv1r.v v8, v1
; CHECK-NEXT: vsetvli a1, a1, e64,m1,ta,mu
; CHECK-NEXT: vsuxseg8ei16.v v1, (a0), v9, v0.t
; CHECK-NEXT: ret
entry:
tail call void @llvm.riscv.vsuxseg8.mask.nxv1i64.nxv1i16(<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val,<vscale x 1 x i64> %val, i64* %base, <vscale x 1 x i16> %index, <vscale x 1 x i1> %mask, i64 %vl)
ret void
}
declare void @llvm.riscv.vsuxseg8.nxv1i64.nxv1i8(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i8>, i64)
declare void @llvm.riscv.vsuxseg8.mask.nxv1i64.nxv1i8(<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>,<vscale x 1 x i64>, i64*, <vscale x 1 x i8>, <vscale x 1 x i1>, i64)
define void @test_vsuxseg8_nxv1i64_nxv1i8