| ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py |
| ; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=avx512fp16,avx512vl -O3 | FileCheck %s --check-prefixes=CHECK |
| ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=avx512fp16,avx512vl -O3 | FileCheck %s --check-prefixes=CHECK |
| |
| |
| declare <4 x i64> @llvm.experimental.constrained.fptosi.v4i64.v4f16(<4 x half>, metadata) |
| declare <4 x i64> @llvm.experimental.constrained.fptoui.v4i64.v4f16(<4 x half>, metadata) |
| declare <8 x i32> @llvm.experimental.constrained.fptosi.v8i32.v8f16(<8 x half>, metadata) |
| declare <8 x i32> @llvm.experimental.constrained.fptoui.v8i32.v8f16(<8 x half>, metadata) |
| declare <16 x i16> @llvm.experimental.constrained.fptosi.v16i16.v16f16(<16 x half>, metadata) |
| declare <16 x i16> @llvm.experimental.constrained.fptoui.v16i16.v16f16(<16 x half>, metadata) |
| declare <16 x i8> @llvm.experimental.constrained.fptosi.v16i8.v16f16(<16 x half>, metadata) |
| declare <16 x i8> @llvm.experimental.constrained.fptoui.v16i8.v16f16(<16 x half>, metadata) |
| declare <16 x i1> @llvm.experimental.constrained.fptosi.v16i1.v16f16(<16 x half>, metadata) |
| declare <16 x i1> @llvm.experimental.constrained.fptoui.v16i1.v16f16(<16 x half>, metadata) |
| |
| define <4 x i64> @strict_vector_fptosi_v4f16_to_v4i64(<4 x half> %a) #0 { |
| ; CHECK-LABEL: strict_vector_fptosi_v4f16_to_v4i64: |
| ; CHECK: # %bb.0: |
| ; CHECK-NEXT: vmovq {{.*#+}} xmm0 = xmm0[0],zero |
| ; CHECK-NEXT: vcvttph2qq %xmm0, %ymm0 |
| ; CHECK-NEXT: ret{{[l|q]}} |
| %ret = call <4 x i64> @llvm.experimental.constrained.fptosi.v4i64.v4f16(<4 x half> %a, |
| metadata !"fpexcept.strict") #0 |
| ret <4 x i64> %ret |
| } |
| |
| define <4 x i64> @strict_vector_fptoui_v4f16_to_v4i64(<4 x half> %a) #0 { |
| ; CHECK-LABEL: strict_vector_fptoui_v4f16_to_v4i64: |
| ; CHECK: # %bb.0: |
| ; CHECK-NEXT: vmovq {{.*#+}} xmm0 = xmm0[0],zero |
| ; CHECK-NEXT: vcvttph2uqq %xmm0, %ymm0 |
| ; CHECK-NEXT: ret{{[l|q]}} |
| %ret = call <4 x i64> @llvm.experimental.constrained.fptoui.v4i64.v4f16(<4 x half> %a, |
| metadata !"fpexcept.strict") #0 |
| ret <4 x i64> %ret |
| } |
| |
| define <8 x i32> @strict_vector_fptosi_v8f16_to_v8i32(<8 x half> %a) #0 { |
| ; CHECK-LABEL: strict_vector_fptosi_v8f16_to_v8i32: |
| ; CHECK: # %bb.0: |
| ; CHECK-NEXT: vcvttph2dq %xmm0, %ymm0 |
| ; CHECK-NEXT: ret{{[l|q]}} |
| %ret = call <8 x i32> @llvm.experimental.constrained.fptosi.v8i32.v8f16(<8 x half> %a, |
| metadata !"fpexcept.strict") #0 |
| ret <8 x i32> %ret |
| } |
| |
| define <8 x i32> @strict_vector_fptoui_v8f16_to_v8i32(<8 x half> %a) #0 { |
| ; CHECK-LABEL: strict_vector_fptoui_v8f16_to_v8i32: |
| ; CHECK: # %bb.0: |
| ; CHECK-NEXT: vcvttph2udq %xmm0, %ymm0 |
| ; CHECK-NEXT: ret{{[l|q]}} |
| %ret = call <8 x i32> @llvm.experimental.constrained.fptoui.v8i32.v8f16(<8 x half> %a, |
| metadata !"fpexcept.strict") #0 |
| ret <8 x i32> %ret |
| } |
| |
| define <16 x i16> @strict_vector_fptosi_v16f16_to_v16i16(<16 x half> %a) #0 { |
| ; CHECK-LABEL: strict_vector_fptosi_v16f16_to_v16i16: |
| ; CHECK: # %bb.0: |
| ; CHECK-NEXT: vcvttph2dq %ymm0, %zmm0 |
| ; CHECK-NEXT: vpmovdw %zmm0, %ymm0 |
| ; CHECK-NEXT: ret{{[l|q]}} |
| %ret = call <16 x i16> @llvm.experimental.constrained.fptosi.v16i16.v16f16(<16 x half> %a, |
| metadata !"fpexcept.strict") #0 |
| ret <16 x i16> %ret |
| } |
| |
| define <16 x i16> @strict_vector_fptoui_v16f16_to_v16i16(<16 x half> %a) #0 { |
| ; CHECK-LABEL: strict_vector_fptoui_v16f16_to_v16i16: |
| ; CHECK: # %bb.0: |
| ; CHECK-NEXT: vcvttph2dq %ymm0, %zmm0 |
| ; CHECK-NEXT: vpmovdw %zmm0, %ymm0 |
| ; CHECK-NEXT: ret{{[l|q]}} |
| %ret = call <16 x i16> @llvm.experimental.constrained.fptoui.v16i16.v16f16(<16 x half> %a, |
| metadata !"fpexcept.strict") #0 |
| ret <16 x i16> %ret |
| } |
| |
| define <16 x i8> @strict_vector_fptosi_v16f16_to_v16i8(<16 x half> %a) #0 { |
| ; CHECK-LABEL: strict_vector_fptosi_v16f16_to_v16i8: |
| ; CHECK: # %bb.0: |
| ; CHECK-NEXT: vcvttph2dq %ymm0, %zmm0 |
| ; CHECK-NEXT: vpmovdb %zmm0, %xmm0 |
| ; CHECK-NEXT: vzeroupper |
| ; CHECK-NEXT: ret{{[l|q]}} |
| %ret = call <16 x i8> @llvm.experimental.constrained.fptosi.v16i8.v16f16(<16 x half> %a, |
| metadata !"fpexcept.strict") #0 |
| ret <16 x i8> %ret |
| } |
| |
| define <16 x i8> @strict_vector_fptoui_v16f16_to_v16i8(<16 x half> %a) #0 { |
| ; CHECK-LABEL: strict_vector_fptoui_v16f16_to_v16i8: |
| ; CHECK: # %bb.0: |
| ; CHECK-NEXT: vcvttph2dq %ymm0, %zmm0 |
| ; CHECK-NEXT: vpmovdb %zmm0, %xmm0 |
| ; CHECK-NEXT: vzeroupper |
| ; CHECK-NEXT: ret{{[l|q]}} |
| %ret = call <16 x i8> @llvm.experimental.constrained.fptoui.v16i8.v16f16(<16 x half> %a, |
| metadata !"fpexcept.strict") #0 |
| ret <16 x i8> %ret |
| } |
| |
| define <16 x i1> @strict_vector_fptosi_v16f16_to_v16i1(<16 x half> %a) #0 { |
| ; CHECK-LABEL: strict_vector_fptosi_v16f16_to_v16i1: |
| ; CHECK: # %bb.0: |
| ; CHECK-NEXT: vcvttph2dq %ymm0, %zmm0 |
| ; CHECK-NEXT: vpmovd2m %zmm0, %k0 |
| ; CHECK-NEXT: vpmovm2b %k0, %xmm0 |
| ; CHECK-NEXT: vzeroupper |
| ; CHECK-NEXT: ret{{[l|q]}} |
| %ret = call <16 x i1> @llvm.experimental.constrained.fptosi.v16i1.v16f16(<16 x half> %a, |
| metadata !"fpexcept.strict") #0 |
| ret <16 x i1> %ret |
| } |
| |
| define <16 x i1> @strict_vector_fptoui_v16f16_to_v16i1(<16 x half> %a) #0 { |
| ; CHECK-LABEL: strict_vector_fptoui_v16f16_to_v16i1: |
| ; CHECK: # %bb.0: |
| ; CHECK-NEXT: vcvttph2dq %ymm0, %zmm0 |
| ; CHECK-NEXT: vpslld $31, %zmm0, %zmm0 |
| ; CHECK-NEXT: vpmovd2m %zmm0, %k0 |
| ; CHECK-NEXT: vpmovm2b %k0, %xmm0 |
| ; CHECK-NEXT: vzeroupper |
| ; CHECK-NEXT: ret{{[l|q]}} |
| %ret = call <16 x i1> @llvm.experimental.constrained.fptoui.v16i1.v16f16(<16 x half> %a, |
| metadata !"fpexcept.strict") #0 |
| ret <16 x i1> %ret |
| } |
| |
| attributes #0 = { strictfp } |