blob: f989f36d8fa0a0e996fe9a77b413aed8b3f355b6 [file] [log] [blame]
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc -mtriple=x86_64-linux -mcpu=skylake-avx512 < %s | FileCheck %s
define void @scatter_scale_512(ptr %result, <4 x i64> %idx, <4 x i1> %mask) {
; CHECK-LABEL: scatter_scale_512:
; CHECK: # %bb.0:
; CHECK-NEXT: vpslld $31, %xmm1, %xmm1
; CHECK-NEXT: vpmovd2m %xmm1, %k1
; CHECK-NEXT: vpsllq $9, %ymm0, %ymm0
; CHECK-NEXT: vpxor %xmm1, %xmm1, %xmm1
; CHECK-NEXT: vscatterqpd %ymm1, (%rdi,%ymm0) {%k1}
; CHECK-NEXT: vzeroupper
; CHECK-NEXT: retq
%gep = getelementptr inbounds [512 x i8], ptr %result, <4 x i64> %idx
call void @llvm.masked.scatter.v4f64.v4p0(<4 x double> zeroinitializer, <4 x ptr> %gep, i32 0, <4 x i1> %mask)
ret void
}
define void @scatter_scale_16(ptr %result, <4 x i64> %idx, <4 x i1> %mask) {
; CHECK-LABEL: scatter_scale_16:
; CHECK: # %bb.0:
; CHECK-NEXT: vpslld $31, %xmm1, %xmm1
; CHECK-NEXT: vpmovd2m %xmm1, %k1
; CHECK-NEXT: vpsllq $4, %ymm0, %ymm0
; CHECK-NEXT: vpxor %xmm1, %xmm1, %xmm1
; CHECK-NEXT: vscatterqpd %ymm1, (%rdi,%ymm0) {%k1}
; CHECK-NEXT: vzeroupper
; CHECK-NEXT: retq
%gep = getelementptr inbounds [16 x i8], ptr %result, <4 x i64> %idx
call void @llvm.masked.scatter.v4f64.v4p0(<4 x double> zeroinitializer, <4 x ptr> %gep, i32 0, <4 x i1> %mask)
ret void
}
define void @scatter_scale_8(ptr %result, <4 x i64> %idx, <4 x i1> %mask) {
; CHECK-LABEL: scatter_scale_8:
; CHECK: # %bb.0:
; CHECK-NEXT: vpslld $31, %xmm1, %xmm1
; CHECK-NEXT: vpmovd2m %xmm1, %k1
; CHECK-NEXT: vpxor %xmm1, %xmm1, %xmm1
; CHECK-NEXT: vscatterqpd %ymm1, (%rdi,%ymm0,8) {%k1}
; CHECK-NEXT: vzeroupper
; CHECK-NEXT: retq
%gep = getelementptr inbounds [8 x i8], ptr %result, <4 x i64> %idx
call void @llvm.masked.scatter.v4f64.v4p0(<4 x double> zeroinitializer, <4 x ptr> %gep, i32 0, <4 x i1> %mask)
ret void
}
define void @scatter_scale_4(ptr %result, <4 x i64> %idx, <4 x i1> %mask) {
; CHECK-LABEL: scatter_scale_4:
; CHECK: # %bb.0:
; CHECK-NEXT: vpslld $31, %xmm1, %xmm1
; CHECK-NEXT: vpmovd2m %xmm1, %k1
; CHECK-NEXT: vpxor %xmm1, %xmm1, %xmm1
; CHECK-NEXT: vscatterqpd %ymm1, (%rdi,%ymm0,4) {%k1}
; CHECK-NEXT: vzeroupper
; CHECK-NEXT: retq
%gep = getelementptr inbounds [4 x i8], ptr %result, <4 x i64> %idx
call void @llvm.masked.scatter.v4f64.v4p0(<4 x double> zeroinitializer, <4 x ptr> %gep, i32 0, <4 x i1> %mask)
ret void
}
define void @scatter_scale_3(ptr %result, <4 x i64> %idx, <4 x i1> %mask) {
; CHECK-LABEL: scatter_scale_3:
; CHECK: # %bb.0:
; CHECK-NEXT: vpslld $31, %xmm1, %xmm1
; CHECK-NEXT: vpmovd2m %xmm1, %k1
; CHECK-NEXT: vpaddq %ymm0, %ymm0, %ymm1
; CHECK-NEXT: vpaddq %ymm0, %ymm1, %ymm0
; CHECK-NEXT: vpxor %xmm1, %xmm1, %xmm1
; CHECK-NEXT: vscatterqpd %ymm1, (%rdi,%ymm0) {%k1}
; CHECK-NEXT: vzeroupper
; CHECK-NEXT: retq
%gep = getelementptr inbounds [3 x i8], ptr %result, <4 x i64> %idx
call void @llvm.masked.scatter.v4f64.v4p0(<4 x double> zeroinitializer, <4 x ptr> %gep, i32 0, <4 x i1> %mask)
ret void
}
define void @scatter_scale_1(ptr %result, <4 x i64> %idx, <4 x i1> %mask) {
; CHECK-LABEL: scatter_scale_1:
; CHECK: # %bb.0:
; CHECK-NEXT: vpslld $31, %xmm1, %xmm1
; CHECK-NEXT: vpmovd2m %xmm1, %k1
; CHECK-NEXT: vpxor %xmm1, %xmm1, %xmm1
; CHECK-NEXT: vscatterqpd %ymm1, (%rdi,%ymm0) {%k1}
; CHECK-NEXT: vzeroupper
; CHECK-NEXT: retq
%gep = getelementptr inbounds [1 x i8], ptr %result, <4 x i64> %idx
call void @llvm.masked.scatter.v4f64.v4p0(<4 x double> zeroinitializer, <4 x ptr> %gep, i32 0, <4 x i1> %mask)
ret void
}
define <4 x double> @gather_scale_512(ptr %result, <4 x i64> %idx, <4 x i1> %mask) {
; CHECK-LABEL: gather_scale_512:
; CHECK: # %bb.0:
; CHECK-NEXT: vpslld $31, %xmm1, %xmm1
; CHECK-NEXT: vpmovd2m %xmm1, %k1
; CHECK-NEXT: vpsllq $9, %ymm0, %ymm1
; CHECK-NEXT: vpxor %xmm0, %xmm0, %xmm0
; CHECK-NEXT: vgatherqpd (%rdi,%ymm1), %ymm0 {%k1}
; CHECK-NEXT: retq
%gep = getelementptr inbounds [512 x i8], ptr %result, <4 x i64> %idx
%res = call <4 x double> @llvm.masked.gather.v4f64.v4p0(<4 x ptr> %gep, i32 0, <4 x i1> %mask, <4 x double> zeroinitializer)
ret <4 x double> %res
}
define <4 x double> @gather_scale_16(ptr %result, <4 x i64> %idx, <4 x i1> %mask) {
; CHECK-LABEL: gather_scale_16:
; CHECK: # %bb.0:
; CHECK-NEXT: vpslld $31, %xmm1, %xmm1
; CHECK-NEXT: vpmovd2m %xmm1, %k1
; CHECK-NEXT: vpsllq $4, %ymm0, %ymm1
; CHECK-NEXT: vpxor %xmm0, %xmm0, %xmm0
; CHECK-NEXT: vgatherqpd (%rdi,%ymm1), %ymm0 {%k1}
; CHECK-NEXT: retq
%gep = getelementptr inbounds [16 x i8], ptr %result, <4 x i64> %idx
%res = call <4 x double> @llvm.masked.gather.v4f64.v4p0(<4 x ptr> %gep, i32 0, <4 x i1> %mask, <4 x double> zeroinitializer)
ret <4 x double> %res
}
define <4 x double> @gather_scale_8(ptr %result, <4 x i64> %idx, <4 x i1> %mask) {
; CHECK-LABEL: gather_scale_8:
; CHECK: # %bb.0:
; CHECK-NEXT: vpslld $31, %xmm1, %xmm1
; CHECK-NEXT: vpmovd2m %xmm1, %k1
; CHECK-NEXT: vpxor %xmm1, %xmm1, %xmm1
; CHECK-NEXT: vgatherqpd (%rdi,%ymm0,8), %ymm1 {%k1}
; CHECK-NEXT: vmovapd %ymm1, %ymm0
; CHECK-NEXT: retq
%gep = getelementptr inbounds [8 x i8], ptr %result, <4 x i64> %idx
%res = call <4 x double> @llvm.masked.gather.v4f64.v4p0(<4 x ptr> %gep, i32 0, <4 x i1> %mask, <4 x double> zeroinitializer)
ret <4 x double> %res
}
define <4 x double> @gather_scale_4(ptr %result, <4 x i64> %idx, <4 x i1> %mask) {
; CHECK-LABEL: gather_scale_4:
; CHECK: # %bb.0:
; CHECK-NEXT: vpslld $31, %xmm1, %xmm1
; CHECK-NEXT: vpmovd2m %xmm1, %k1
; CHECK-NEXT: vpxor %xmm1, %xmm1, %xmm1
; CHECK-NEXT: vgatherqpd (%rdi,%ymm0,4), %ymm1 {%k1}
; CHECK-NEXT: vmovapd %ymm1, %ymm0
; CHECK-NEXT: retq
%gep = getelementptr inbounds [4 x i8], ptr %result, <4 x i64> %idx
%res = call <4 x double> @llvm.masked.gather.v4f64.v4p0(<4 x ptr> %gep, i32 0, <4 x i1> %mask, <4 x double> zeroinitializer)
ret <4 x double> %res
}
define <4 x double> @gather_scale_3(ptr %result, <4 x i64> %idx, <4 x i1> %mask) {
; CHECK-LABEL: gather_scale_3:
; CHECK: # %bb.0:
; CHECK-NEXT: vpslld $31, %xmm1, %xmm1
; CHECK-NEXT: vpmovd2m %xmm1, %k1
; CHECK-NEXT: vpaddq %ymm0, %ymm0, %ymm1
; CHECK-NEXT: vpaddq %ymm0, %ymm1, %ymm1
; CHECK-NEXT: vpxor %xmm0, %xmm0, %xmm0
; CHECK-NEXT: vgatherqpd (%rdi,%ymm1), %ymm0 {%k1}
; CHECK-NEXT: retq
%gep = getelementptr inbounds [3 x i8], ptr %result, <4 x i64> %idx
%res = call <4 x double> @llvm.masked.gather.v4f64.v4p0(<4 x ptr> %gep, i32 0, <4 x i1> %mask, <4 x double> zeroinitializer)
ret <4 x double> %res
}
define <4 x double> @gather_scale_1(ptr %result, <4 x i64> %idx, <4 x i1> %mask) {
; CHECK-LABEL: gather_scale_1:
; CHECK: # %bb.0:
; CHECK-NEXT: vpslld $31, %xmm1, %xmm1
; CHECK-NEXT: vpmovd2m %xmm1, %k1
; CHECK-NEXT: vpxor %xmm1, %xmm1, %xmm1
; CHECK-NEXT: vgatherqpd (%rdi,%ymm0), %ymm1 {%k1}
; CHECK-NEXT: vmovapd %ymm1, %ymm0
; CHECK-NEXT: retq
%gep = getelementptr inbounds [1 x i8], ptr %result, <4 x i64> %idx
%res = call <4 x double> @llvm.masked.gather.v4f64.v4p0(<4 x ptr> %gep, i32 0, <4 x i1> %mask, <4 x double> zeroinitializer)
ret <4 x double> %res
}
declare void @llvm.masked.scatter.v4f64.v4p0(<4 x double>, <4 x ptr>, i32 immarg, <4 x i1>)
declare <4 x double> @llvm.masked.gather.v4f64.v4p0(<4 x ptr>, i32 immarg, <4 x i1>, <4 x double>)