blob: 131e279aa645ce7824deca5c2614a1298ad857f9 [file] [log] [blame] [edit]
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc < %s -mtriple=i686-- -mattr=+sse2 | FileCheck %s --check-prefixes=SSE
; RUN: llc < %s -mtriple=i686-- -mattr=+avx | FileCheck %s --check-prefixes=AVX
; RUN: llc < %s -mtriple=i686-- -mcpu=x86-64-v4 | FileCheck %s --check-prefixes=AVX512
define zeroext i8 @oeq_f64_i32(double %x) nounwind readnone {
; SSE-LABEL: oeq_f64_i32:
; SSE: # %bb.0: # %entry
; SSE-NEXT: movsd {{.*#+}} xmm0 = mem[0],zero
; SSE-NEXT: cvttpd2dq %xmm0, %xmm1
; SSE-NEXT: cvtdq2pd %xmm1, %xmm1
; SSE-NEXT: cmpeqsd %xmm0, %xmm1
; SSE-NEXT: movd %xmm1, %eax
; SSE-NEXT: andl $1, %eax
; SSE-NEXT: # kill: def $al killed $al killed $eax
; SSE-NEXT: retl
;
; AVX-LABEL: oeq_f64_i32:
; AVX: # %bb.0: # %entry
; AVX-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
; AVX-NEXT: vcvttpd2dq %xmm0, %xmm1
; AVX-NEXT: vcvtdq2pd %xmm1, %xmm1
; AVX-NEXT: vcmpeqsd %xmm0, %xmm1, %xmm0
; AVX-NEXT: vmovd %xmm0, %eax
; AVX-NEXT: andl $1, %eax
; AVX-NEXT: # kill: def $al killed $al killed $eax
; AVX-NEXT: retl
;
; AVX512-LABEL: oeq_f64_i32:
; AVX512: # %bb.0: # %entry
; AVX512-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
; AVX512-NEXT: vcvttpd2dq %xmm0, %xmm1
; AVX512-NEXT: vcvtdq2pd %xmm1, %xmm1
; AVX512-NEXT: vcmpeqsd %xmm0, %xmm1, %k0
; AVX512-NEXT: kmovd %k0, %eax
; AVX512-NEXT: # kill: def $al killed $al killed $eax
; AVX512-NEXT: retl
entry:
%0 = fptosi double %x to i32
%1 = sitofp i32 %0 to double
%2 = fcmp oeq double %1, %x
%retval12 = zext i1 %2 to i8
ret i8 %retval12
}
define zeroext i8 @oeq_f64_u32(double %x) nounwind readnone {
; SSE-LABEL: oeq_f64_u32:
; SSE: # %bb.0: # %entry
; SSE-NEXT: movsd {{.*#+}} xmm0 = mem[0],zero
; SSE-NEXT: cvttsd2si %xmm0, %eax
; SSE-NEXT: movl %eax, %ecx
; SSE-NEXT: sarl $31, %ecx
; SSE-NEXT: movapd %xmm0, %xmm1
; SSE-NEXT: subsd {{\.?LCPI[0-9]+_[0-9]+}}, %xmm1
; SSE-NEXT: cvttsd2si %xmm1, %edx
; SSE-NEXT: andl %ecx, %edx
; SSE-NEXT: orl %eax, %edx
; SSE-NEXT: movd %edx, %xmm1
; SSE-NEXT: por {{\.?LCPI[0-9]+_[0-9]+}}, %xmm1
; SSE-NEXT: subsd {{\.?LCPI[0-9]+_[0-9]+}}, %xmm1
; SSE-NEXT: cmpeqsd %xmm0, %xmm1
; SSE-NEXT: movd %xmm1, %eax
; SSE-NEXT: andl $1, %eax
; SSE-NEXT: # kill: def $al killed $al killed $eax
; SSE-NEXT: retl
;
; AVX-LABEL: oeq_f64_u32:
; AVX: # %bb.0: # %entry
; AVX-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
; AVX-NEXT: vcvttsd2si %xmm0, %eax
; AVX-NEXT: movl %eax, %ecx
; AVX-NEXT: sarl $31, %ecx
; AVX-NEXT: vsubsd {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm1
; AVX-NEXT: vcvttsd2si %xmm1, %edx
; AVX-NEXT: andl %ecx, %edx
; AVX-NEXT: orl %eax, %edx
; AVX-NEXT: vmovd %edx, %xmm1
; AVX-NEXT: vpor {{\.?LCPI[0-9]+_[0-9]+}}, %xmm1, %xmm1
; AVX-NEXT: vsubsd {{\.?LCPI[0-9]+_[0-9]+}}, %xmm1, %xmm1
; AVX-NEXT: vcmpeqsd %xmm0, %xmm1, %xmm0
; AVX-NEXT: vmovd %xmm0, %eax
; AVX-NEXT: andl $1, %eax
; AVX-NEXT: # kill: def $al killed $al killed $eax
; AVX-NEXT: retl
;
; AVX512-LABEL: oeq_f64_u32:
; AVX512: # %bb.0: # %entry
; AVX512-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
; AVX512-NEXT: vcvttsd2usi %xmm0, %eax
; AVX512-NEXT: vcvtusi2sd %eax, %xmm7, %xmm1
; AVX512-NEXT: vcmpeqsd %xmm0, %xmm1, %k0
; AVX512-NEXT: kmovd %k0, %eax
; AVX512-NEXT: # kill: def $al killed $al killed $eax
; AVX512-NEXT: retl
entry:
%0 = fptoui double %x to i32
%1 = uitofp i32 %0 to double
%2 = fcmp oeq double %1, %x
%retval12 = zext i1 %2 to i8
ret i8 %retval12
}
define zeroext i8 @oeq_f64_i64(double %x) nounwind readnone {
; SSE-LABEL: oeq_f64_i64:
; SSE: # %bb.0: # %entry
; SSE-NEXT: pushl %ebp
; SSE-NEXT: movl %esp, %ebp
; SSE-NEXT: andl $-8, %esp
; SSE-NEXT: subl $32, %esp
; SSE-NEXT: movsd {{.*#+}} xmm0 = mem[0],zero
; SSE-NEXT: movsd %xmm0, {{[0-9]+}}(%esp)
; SSE-NEXT: fldl {{[0-9]+}}(%esp)
; SSE-NEXT: fnstcw {{[0-9]+}}(%esp)
; SSE-NEXT: movzwl {{[0-9]+}}(%esp), %eax
; SSE-NEXT: orl $3072, %eax # imm = 0xC00
; SSE-NEXT: movw %ax, {{[0-9]+}}(%esp)
; SSE-NEXT: fldcw {{[0-9]+}}(%esp)
; SSE-NEXT: fistpll {{[0-9]+}}(%esp)
; SSE-NEXT: fldcw {{[0-9]+}}(%esp)
; SSE-NEXT: movsd {{.*#+}} xmm1 = mem[0],zero
; SSE-NEXT: movlps %xmm1, {{[0-9]+}}(%esp)
; SSE-NEXT: fildll {{[0-9]+}}(%esp)
; SSE-NEXT: fstpl {{[0-9]+}}(%esp)
; SSE-NEXT: cmpeqsd {{[0-9]+}}(%esp), %xmm0
; SSE-NEXT: movd %xmm0, %eax
; SSE-NEXT: andl $1, %eax
; SSE-NEXT: # kill: def $al killed $al killed $eax
; SSE-NEXT: movl %ebp, %esp
; SSE-NEXT: popl %ebp
; SSE-NEXT: retl
;
; AVX-LABEL: oeq_f64_i64:
; AVX: # %bb.0: # %entry
; AVX-NEXT: pushl %ebp
; AVX-NEXT: movl %esp, %ebp
; AVX-NEXT: andl $-8, %esp
; AVX-NEXT: subl $24, %esp
; AVX-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
; AVX-NEXT: vmovsd %xmm0, (%esp)
; AVX-NEXT: fldl (%esp)
; AVX-NEXT: fisttpll (%esp)
; AVX-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
; AVX-NEXT: vmovlps %xmm1, {{[0-9]+}}(%esp)
; AVX-NEXT: fildll {{[0-9]+}}(%esp)
; AVX-NEXT: fstpl {{[0-9]+}}(%esp)
; AVX-NEXT: vcmpeqsd {{[0-9]+}}(%esp), %xmm0, %xmm0
; AVX-NEXT: vmovd %xmm0, %eax
; AVX-NEXT: andl $1, %eax
; AVX-NEXT: # kill: def $al killed $al killed $eax
; AVX-NEXT: movl %ebp, %esp
; AVX-NEXT: popl %ebp
; AVX-NEXT: retl
;
; AVX512-LABEL: oeq_f64_i64:
; AVX512: # %bb.0: # %entry
; AVX512-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
; AVX512-NEXT: vcvttpd2qq %xmm0, %xmm1
; AVX512-NEXT: vcvtqq2pd %ymm1, %ymm1
; AVX512-NEXT: vcmpeqsd %xmm0, %xmm1, %k0
; AVX512-NEXT: kmovd %k0, %eax
; AVX512-NEXT: # kill: def $al killed $al killed $eax
; AVX512-NEXT: vzeroupper
; AVX512-NEXT: retl
entry:
%0 = fptosi double %x to i64
%1 = sitofp i64 %0 to double
%2 = fcmp oeq double %1, %x
%retval12 = zext i1 %2 to i8
ret i8 %retval12
}
define zeroext i8 @oeq_f64_u64(double %x) nounwind readnone {
; SSE-LABEL: oeq_f64_u64:
; SSE: # %bb.0: # %entry
; SSE-NEXT: pushl %ebp
; SSE-NEXT: movl %esp, %ebp
; SSE-NEXT: andl $-8, %esp
; SSE-NEXT: subl $16, %esp
; SSE-NEXT: movsd {{.*#+}} xmm0 = mem[0],zero
; SSE-NEXT: movsd {{.*#+}} xmm1 = [9.2233720368547758E+18,0.0E+0]
; SSE-NEXT: ucomisd %xmm0, %xmm1
; SSE-NEXT: jbe .LBB3_2
; SSE-NEXT: # %bb.1: # %entry
; SSE-NEXT: xorpd %xmm1, %xmm1
; SSE-NEXT: .LBB3_2: # %entry
; SSE-NEXT: movapd %xmm0, %xmm2
; SSE-NEXT: subsd %xmm1, %xmm2
; SSE-NEXT: movsd %xmm2, {{[0-9]+}}(%esp)
; SSE-NEXT: setbe %al
; SSE-NEXT: fldl {{[0-9]+}}(%esp)
; SSE-NEXT: fnstcw {{[0-9]+}}(%esp)
; SSE-NEXT: movzwl {{[0-9]+}}(%esp), %ecx
; SSE-NEXT: orl $3072, %ecx # imm = 0xC00
; SSE-NEXT: movw %cx, {{[0-9]+}}(%esp)
; SSE-NEXT: fldcw {{[0-9]+}}(%esp)
; SSE-NEXT: fistpll {{[0-9]+}}(%esp)
; SSE-NEXT: fldcw {{[0-9]+}}(%esp)
; SSE-NEXT: movzbl %al, %eax
; SSE-NEXT: shll $31, %eax
; SSE-NEXT: xorl {{[0-9]+}}(%esp), %eax
; SSE-NEXT: movd %eax, %xmm1
; SSE-NEXT: movd {{.*#+}} xmm2 = mem[0],zero,zero,zero
; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],mem[0],xmm2[1],mem[1]
; SSE-NEXT: subpd {{\.?LCPI[0-9]+_[0-9]+}}, %xmm2
; SSE-NEXT: movapd %xmm2, %xmm1
; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
; SSE-NEXT: addsd %xmm2, %xmm1
; SSE-NEXT: cmpeqsd %xmm0, %xmm1
; SSE-NEXT: movd %xmm1, %eax
; SSE-NEXT: andl $1, %eax
; SSE-NEXT: # kill: def $al killed $al killed $eax
; SSE-NEXT: movl %ebp, %esp
; SSE-NEXT: popl %ebp
; SSE-NEXT: retl
;
; AVX-LABEL: oeq_f64_u64:
; AVX: # %bb.0: # %entry
; AVX-NEXT: pushl %ebp
; AVX-NEXT: movl %esp, %ebp
; AVX-NEXT: andl $-8, %esp
; AVX-NEXT: subl $8, %esp
; AVX-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
; AVX-NEXT: vmovsd {{.*#+}} xmm1 = [9.2233720368547758E+18,0.0E+0]
; AVX-NEXT: vucomisd %xmm0, %xmm1
; AVX-NEXT: jbe .LBB3_2
; AVX-NEXT: # %bb.1: # %entry
; AVX-NEXT: vxorpd %xmm1, %xmm1, %xmm1
; AVX-NEXT: .LBB3_2: # %entry
; AVX-NEXT: vsubsd %xmm1, %xmm0, %xmm1
; AVX-NEXT: vmovsd %xmm1, (%esp)
; AVX-NEXT: fldl (%esp)
; AVX-NEXT: fisttpll (%esp)
; AVX-NEXT: setbe %al
; AVX-NEXT: movzbl %al, %eax
; AVX-NEXT: shll $31, %eax
; AVX-NEXT: xorl {{[0-9]+}}(%esp), %eax
; AVX-NEXT: vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
; AVX-NEXT: vpinsrd $1, %eax, %xmm1, %xmm1
; AVX-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
; AVX-NEXT: vsubpd {{\.?LCPI[0-9]+_[0-9]+}}, %xmm1, %xmm1
; AVX-NEXT: vshufpd {{.*#+}} xmm2 = xmm1[1,0]
; AVX-NEXT: vaddsd %xmm1, %xmm2, %xmm1
; AVX-NEXT: vcmpeqsd %xmm0, %xmm1, %xmm0
; AVX-NEXT: vmovd %xmm0, %eax
; AVX-NEXT: andl $1, %eax
; AVX-NEXT: # kill: def $al killed $al killed $eax
; AVX-NEXT: movl %ebp, %esp
; AVX-NEXT: popl %ebp
; AVX-NEXT: retl
;
; AVX512-LABEL: oeq_f64_u64:
; AVX512: # %bb.0: # %entry
; AVX512-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
; AVX512-NEXT: vcvttpd2uqq %xmm0, %xmm1
; AVX512-NEXT: vcvtuqq2pd %ymm1, %ymm1
; AVX512-NEXT: vcmpeqsd %xmm0, %xmm1, %k0
; AVX512-NEXT: kmovd %k0, %eax
; AVX512-NEXT: # kill: def $al killed $al killed $eax
; AVX512-NEXT: vzeroupper
; AVX512-NEXT: retl
entry:
%0 = fptoui double %x to i64
%1 = uitofp i64 %0 to double
%2 = fcmp oeq double %1, %x
%retval12 = zext i1 %2 to i8
ret i8 %retval12
}
define zeroext i8 @une_f64_i32(double %x) nounwind readnone {
; SSE-LABEL: une_f64_i32:
; SSE: # %bb.0: # %entry
; SSE-NEXT: movsd {{.*#+}} xmm0 = mem[0],zero
; SSE-NEXT: cvttpd2dq %xmm0, %xmm1
; SSE-NEXT: cvtdq2pd %xmm1, %xmm1
; SSE-NEXT: cmpneqsd %xmm0, %xmm1
; SSE-NEXT: movd %xmm1, %eax
; SSE-NEXT: andl $1, %eax
; SSE-NEXT: # kill: def $al killed $al killed $eax
; SSE-NEXT: retl
;
; AVX-LABEL: une_f64_i32:
; AVX: # %bb.0: # %entry
; AVX-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
; AVX-NEXT: vcvttpd2dq %xmm0, %xmm1
; AVX-NEXT: vcvtdq2pd %xmm1, %xmm1
; AVX-NEXT: vcmpneqsd %xmm0, %xmm1, %xmm0
; AVX-NEXT: vmovd %xmm0, %eax
; AVX-NEXT: andl $1, %eax
; AVX-NEXT: # kill: def $al killed $al killed $eax
; AVX-NEXT: retl
;
; AVX512-LABEL: une_f64_i32:
; AVX512: # %bb.0: # %entry
; AVX512-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
; AVX512-NEXT: vcvttpd2dq %xmm0, %xmm1
; AVX512-NEXT: vcvtdq2pd %xmm1, %xmm1
; AVX512-NEXT: vcmpneqsd %xmm0, %xmm1, %k0
; AVX512-NEXT: kmovd %k0, %eax
; AVX512-NEXT: # kill: def $al killed $al killed $eax
; AVX512-NEXT: retl
entry:
%0 = fptosi double %x to i32
%1 = sitofp i32 %0 to double
%2 = fcmp une double %1, %x
%retval12 = zext i1 %2 to i8
ret i8 %retval12
}
define zeroext i8 @une_f64_u32(double %x) nounwind readnone {
; SSE-LABEL: une_f64_u32:
; SSE: # %bb.0: # %entry
; SSE-NEXT: movsd {{.*#+}} xmm0 = mem[0],zero
; SSE-NEXT: cvttsd2si %xmm0, %eax
; SSE-NEXT: movl %eax, %ecx
; SSE-NEXT: sarl $31, %ecx
; SSE-NEXT: movapd %xmm0, %xmm1
; SSE-NEXT: subsd {{\.?LCPI[0-9]+_[0-9]+}}, %xmm1
; SSE-NEXT: cvttsd2si %xmm1, %edx
; SSE-NEXT: andl %ecx, %edx
; SSE-NEXT: orl %eax, %edx
; SSE-NEXT: movd %edx, %xmm1
; SSE-NEXT: por {{\.?LCPI[0-9]+_[0-9]+}}, %xmm1
; SSE-NEXT: subsd {{\.?LCPI[0-9]+_[0-9]+}}, %xmm1
; SSE-NEXT: cmpneqsd %xmm0, %xmm1
; SSE-NEXT: movd %xmm1, %eax
; SSE-NEXT: andl $1, %eax
; SSE-NEXT: # kill: def $al killed $al killed $eax
; SSE-NEXT: retl
;
; AVX-LABEL: une_f64_u32:
; AVX: # %bb.0: # %entry
; AVX-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
; AVX-NEXT: vcvttsd2si %xmm0, %eax
; AVX-NEXT: movl %eax, %ecx
; AVX-NEXT: sarl $31, %ecx
; AVX-NEXT: vsubsd {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm1
; AVX-NEXT: vcvttsd2si %xmm1, %edx
; AVX-NEXT: andl %ecx, %edx
; AVX-NEXT: orl %eax, %edx
; AVX-NEXT: vmovd %edx, %xmm1
; AVX-NEXT: vpor {{\.?LCPI[0-9]+_[0-9]+}}, %xmm1, %xmm1
; AVX-NEXT: vsubsd {{\.?LCPI[0-9]+_[0-9]+}}, %xmm1, %xmm1
; AVX-NEXT: vcmpneqsd %xmm0, %xmm1, %xmm0
; AVX-NEXT: vmovd %xmm0, %eax
; AVX-NEXT: andl $1, %eax
; AVX-NEXT: # kill: def $al killed $al killed $eax
; AVX-NEXT: retl
;
; AVX512-LABEL: une_f64_u32:
; AVX512: # %bb.0: # %entry
; AVX512-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
; AVX512-NEXT: vcvttsd2usi %xmm0, %eax
; AVX512-NEXT: vcvtusi2sd %eax, %xmm7, %xmm1
; AVX512-NEXT: vcmpneqsd %xmm0, %xmm1, %k0
; AVX512-NEXT: kmovd %k0, %eax
; AVX512-NEXT: # kill: def $al killed $al killed $eax
; AVX512-NEXT: retl
entry:
%0 = fptoui double %x to i32
%1 = uitofp i32 %0 to double
%2 = fcmp une double %1, %x
%retval12 = zext i1 %2 to i8
ret i8 %retval12
}
define zeroext i8 @une_f64_i64(double %x) nounwind readnone {
; SSE-LABEL: une_f64_i64:
; SSE: # %bb.0: # %entry
; SSE-NEXT: pushl %ebp
; SSE-NEXT: movl %esp, %ebp
; SSE-NEXT: andl $-8, %esp
; SSE-NEXT: subl $32, %esp
; SSE-NEXT: movsd {{.*#+}} xmm0 = mem[0],zero
; SSE-NEXT: movsd %xmm0, {{[0-9]+}}(%esp)
; SSE-NEXT: fldl {{[0-9]+}}(%esp)
; SSE-NEXT: fnstcw {{[0-9]+}}(%esp)
; SSE-NEXT: movzwl {{[0-9]+}}(%esp), %eax
; SSE-NEXT: orl $3072, %eax # imm = 0xC00
; SSE-NEXT: movw %ax, {{[0-9]+}}(%esp)
; SSE-NEXT: fldcw {{[0-9]+}}(%esp)
; SSE-NEXT: fistpll {{[0-9]+}}(%esp)
; SSE-NEXT: fldcw {{[0-9]+}}(%esp)
; SSE-NEXT: movsd {{.*#+}} xmm1 = mem[0],zero
; SSE-NEXT: movlps %xmm1, {{[0-9]+}}(%esp)
; SSE-NEXT: fildll {{[0-9]+}}(%esp)
; SSE-NEXT: fstpl {{[0-9]+}}(%esp)
; SSE-NEXT: cmpneqsd {{[0-9]+}}(%esp), %xmm0
; SSE-NEXT: movd %xmm0, %eax
; SSE-NEXT: andl $1, %eax
; SSE-NEXT: # kill: def $al killed $al killed $eax
; SSE-NEXT: movl %ebp, %esp
; SSE-NEXT: popl %ebp
; SSE-NEXT: retl
;
; AVX-LABEL: une_f64_i64:
; AVX: # %bb.0: # %entry
; AVX-NEXT: pushl %ebp
; AVX-NEXT: movl %esp, %ebp
; AVX-NEXT: andl $-8, %esp
; AVX-NEXT: subl $24, %esp
; AVX-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
; AVX-NEXT: vmovsd %xmm0, (%esp)
; AVX-NEXT: fldl (%esp)
; AVX-NEXT: fisttpll (%esp)
; AVX-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
; AVX-NEXT: vmovlps %xmm1, {{[0-9]+}}(%esp)
; AVX-NEXT: fildll {{[0-9]+}}(%esp)
; AVX-NEXT: fstpl {{[0-9]+}}(%esp)
; AVX-NEXT: vcmpneqsd {{[0-9]+}}(%esp), %xmm0, %xmm0
; AVX-NEXT: vmovd %xmm0, %eax
; AVX-NEXT: andl $1, %eax
; AVX-NEXT: # kill: def $al killed $al killed $eax
; AVX-NEXT: movl %ebp, %esp
; AVX-NEXT: popl %ebp
; AVX-NEXT: retl
;
; AVX512-LABEL: une_f64_i64:
; AVX512: # %bb.0: # %entry
; AVX512-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
; AVX512-NEXT: vcvttpd2qq %xmm0, %xmm1
; AVX512-NEXT: vcvtqq2pd %ymm1, %ymm1
; AVX512-NEXT: vcmpneqsd %xmm0, %xmm1, %k0
; AVX512-NEXT: kmovd %k0, %eax
; AVX512-NEXT: # kill: def $al killed $al killed $eax
; AVX512-NEXT: vzeroupper
; AVX512-NEXT: retl
entry:
%0 = fptosi double %x to i64
%1 = sitofp i64 %0 to double
%2 = fcmp une double %1, %x
%retval12 = zext i1 %2 to i8
ret i8 %retval12
}
define zeroext i8 @une_f64_u64(double %x) nounwind readnone {
; SSE-LABEL: une_f64_u64:
; SSE: # %bb.0: # %entry
; SSE-NEXT: pushl %ebp
; SSE-NEXT: movl %esp, %ebp
; SSE-NEXT: andl $-8, %esp
; SSE-NEXT: subl $16, %esp
; SSE-NEXT: movsd {{.*#+}} xmm0 = mem[0],zero
; SSE-NEXT: movsd {{.*#+}} xmm1 = [9.2233720368547758E+18,0.0E+0]
; SSE-NEXT: ucomisd %xmm0, %xmm1
; SSE-NEXT: jbe .LBB7_2
; SSE-NEXT: # %bb.1: # %entry
; SSE-NEXT: xorpd %xmm1, %xmm1
; SSE-NEXT: .LBB7_2: # %entry
; SSE-NEXT: movapd %xmm0, %xmm2
; SSE-NEXT: subsd %xmm1, %xmm2
; SSE-NEXT: movsd %xmm2, {{[0-9]+}}(%esp)
; SSE-NEXT: setbe %al
; SSE-NEXT: fldl {{[0-9]+}}(%esp)
; SSE-NEXT: fnstcw {{[0-9]+}}(%esp)
; SSE-NEXT: movzwl {{[0-9]+}}(%esp), %ecx
; SSE-NEXT: orl $3072, %ecx # imm = 0xC00
; SSE-NEXT: movw %cx, {{[0-9]+}}(%esp)
; SSE-NEXT: fldcw {{[0-9]+}}(%esp)
; SSE-NEXT: fistpll {{[0-9]+}}(%esp)
; SSE-NEXT: fldcw {{[0-9]+}}(%esp)
; SSE-NEXT: movzbl %al, %eax
; SSE-NEXT: shll $31, %eax
; SSE-NEXT: xorl {{[0-9]+}}(%esp), %eax
; SSE-NEXT: movd %eax, %xmm1
; SSE-NEXT: movd {{.*#+}} xmm2 = mem[0],zero,zero,zero
; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
; SSE-NEXT: punpckldq {{.*#+}} xmm2 = xmm2[0],mem[0],xmm2[1],mem[1]
; SSE-NEXT: subpd {{\.?LCPI[0-9]+_[0-9]+}}, %xmm2
; SSE-NEXT: movapd %xmm2, %xmm1
; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1],xmm2[1]
; SSE-NEXT: addsd %xmm2, %xmm1
; SSE-NEXT: cmpneqsd %xmm0, %xmm1
; SSE-NEXT: movd %xmm1, %eax
; SSE-NEXT: andl $1, %eax
; SSE-NEXT: # kill: def $al killed $al killed $eax
; SSE-NEXT: movl %ebp, %esp
; SSE-NEXT: popl %ebp
; SSE-NEXT: retl
;
; AVX-LABEL: une_f64_u64:
; AVX: # %bb.0: # %entry
; AVX-NEXT: pushl %ebp
; AVX-NEXT: movl %esp, %ebp
; AVX-NEXT: andl $-8, %esp
; AVX-NEXT: subl $8, %esp
; AVX-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
; AVX-NEXT: vmovsd {{.*#+}} xmm1 = [9.2233720368547758E+18,0.0E+0]
; AVX-NEXT: vucomisd %xmm0, %xmm1
; AVX-NEXT: jbe .LBB7_2
; AVX-NEXT: # %bb.1: # %entry
; AVX-NEXT: vxorpd %xmm1, %xmm1, %xmm1
; AVX-NEXT: .LBB7_2: # %entry
; AVX-NEXT: vsubsd %xmm1, %xmm0, %xmm1
; AVX-NEXT: vmovsd %xmm1, (%esp)
; AVX-NEXT: fldl (%esp)
; AVX-NEXT: fisttpll (%esp)
; AVX-NEXT: setbe %al
; AVX-NEXT: movzbl %al, %eax
; AVX-NEXT: shll $31, %eax
; AVX-NEXT: xorl {{[0-9]+}}(%esp), %eax
; AVX-NEXT: vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
; AVX-NEXT: vpinsrd $1, %eax, %xmm1, %xmm1
; AVX-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm1[0],mem[0],xmm1[1],mem[1]
; AVX-NEXT: vsubpd {{\.?LCPI[0-9]+_[0-9]+}}, %xmm1, %xmm1
; AVX-NEXT: vshufpd {{.*#+}} xmm2 = xmm1[1,0]
; AVX-NEXT: vaddsd %xmm1, %xmm2, %xmm1
; AVX-NEXT: vcmpneqsd %xmm0, %xmm1, %xmm0
; AVX-NEXT: vmovd %xmm0, %eax
; AVX-NEXT: andl $1, %eax
; AVX-NEXT: # kill: def $al killed $al killed $eax
; AVX-NEXT: movl %ebp, %esp
; AVX-NEXT: popl %ebp
; AVX-NEXT: retl
;
; AVX512-LABEL: une_f64_u64:
; AVX512: # %bb.0: # %entry
; AVX512-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
; AVX512-NEXT: vcvttpd2uqq %xmm0, %xmm1
; AVX512-NEXT: vcvtuqq2pd %ymm1, %ymm1
; AVX512-NEXT: vcmpneqsd %xmm0, %xmm1, %k0
; AVX512-NEXT: kmovd %k0, %eax
; AVX512-NEXT: # kill: def $al killed $al killed $eax
; AVX512-NEXT: vzeroupper
; AVX512-NEXT: retl
entry:
%0 = fptoui double %x to i64
%1 = uitofp i64 %0 to double
%2 = fcmp une double %1, %x
%retval12 = zext i1 %2 to i8
ret i8 %retval12
}