blob: 74766821f6ce72ff79a22995c94e4a57a8ef08d0 [file] [log] [blame]
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 5
; RUN: llc < %s -mtriple=i686-- -mattr=-bmi,+sse2 | FileCheck %s --check-prefixes=X86,X86-NOBMI
; RUN: llc < %s -mtriple=i686-- -mattr=+bmi,+sse2 | FileCheck %s --check-prefixes=X86,X86-BMI
; RUN: llc < %s -mtriple=x86_64-- -mattr=-bmi | FileCheck %s --check-prefixes=X64,X64-NOBMI
; RUN: llc < %s -mtriple=x86_64-- -mattr=+bmi | FileCheck %s --check-prefixes=X64,X64-BMI
declare void @use(i32)
define i32 @fold_and_xor_neg_v1_32(i32 %x, i32 %y) nounwind {
; X86-NOBMI-LABEL: fold_and_xor_neg_v1_32:
; X86-NOBMI: # %bb.0:
; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %ecx
; X86-NOBMI-NEXT: movl %ecx, %eax
; X86-NOBMI-NEXT: negl %eax
; X86-NOBMI-NEXT: xorl %ecx, %eax
; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax
; X86-NOBMI-NEXT: retl
;
; X86-BMI-LABEL: fold_and_xor_neg_v1_32:
; X86-BMI: # %bb.0:
; X86-BMI-NEXT: blsmskl {{[0-9]+}}(%esp), %eax
; X86-BMI-NEXT: andnl {{[0-9]+}}(%esp), %eax, %eax
; X86-BMI-NEXT: retl
;
; X64-NOBMI-LABEL: fold_and_xor_neg_v1_32:
; X64-NOBMI: # %bb.0:
; X64-NOBMI-NEXT: movl %edi, %eax
; X64-NOBMI-NEXT: negl %eax
; X64-NOBMI-NEXT: xorl %edi, %eax
; X64-NOBMI-NEXT: andl %esi, %eax
; X64-NOBMI-NEXT: retq
;
; X64-BMI-LABEL: fold_and_xor_neg_v1_32:
; X64-BMI: # %bb.0:
; X64-BMI-NEXT: blsmskl %edi, %eax
; X64-BMI-NEXT: andnl %esi, %eax, %eax
; X64-BMI-NEXT: retq
%neg = sub i32 0, %x
%xor = xor i32 %x, %neg
%and = and i32 %xor, %y
ret i32 %and
}
define i32 @fold_and_xor_neg_v2_32(i32 %x, i32 %y) nounwind {
; X86-NOBMI-LABEL: fold_and_xor_neg_v2_32:
; X86-NOBMI: # %bb.0:
; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %ecx
; X86-NOBMI-NEXT: movl %ecx, %eax
; X86-NOBMI-NEXT: negl %eax
; X86-NOBMI-NEXT: xorl %ecx, %eax
; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax
; X86-NOBMI-NEXT: retl
;
; X86-BMI-LABEL: fold_and_xor_neg_v2_32:
; X86-BMI: # %bb.0:
; X86-BMI-NEXT: blsmskl {{[0-9]+}}(%esp), %eax
; X86-BMI-NEXT: andnl {{[0-9]+}}(%esp), %eax, %eax
; X86-BMI-NEXT: retl
;
; X64-NOBMI-LABEL: fold_and_xor_neg_v2_32:
; X64-NOBMI: # %bb.0:
; X64-NOBMI-NEXT: movl %edi, %eax
; X64-NOBMI-NEXT: negl %eax
; X64-NOBMI-NEXT: xorl %edi, %eax
; X64-NOBMI-NEXT: andl %esi, %eax
; X64-NOBMI-NEXT: retq
;
; X64-BMI-LABEL: fold_and_xor_neg_v2_32:
; X64-BMI: # %bb.0:
; X64-BMI-NEXT: blsmskl %edi, %eax
; X64-BMI-NEXT: andnl %esi, %eax, %eax
; X64-BMI-NEXT: retq
%neg = sub i32 0, %x
%xor = xor i32 %x, %neg
%and = and i32 %y, %xor
ret i32 %and
}
define i32 @fold_and_xor_neg_v3_32(i32 %x, i32 %y) nounwind {
; X86-NOBMI-LABEL: fold_and_xor_neg_v3_32:
; X86-NOBMI: # %bb.0:
; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %ecx
; X86-NOBMI-NEXT: movl %ecx, %eax
; X86-NOBMI-NEXT: negl %eax
; X86-NOBMI-NEXT: xorl %ecx, %eax
; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax
; X86-NOBMI-NEXT: retl
;
; X86-BMI-LABEL: fold_and_xor_neg_v3_32:
; X86-BMI: # %bb.0:
; X86-BMI-NEXT: blsmskl {{[0-9]+}}(%esp), %eax
; X86-BMI-NEXT: andnl {{[0-9]+}}(%esp), %eax, %eax
; X86-BMI-NEXT: retl
;
; X64-NOBMI-LABEL: fold_and_xor_neg_v3_32:
; X64-NOBMI: # %bb.0:
; X64-NOBMI-NEXT: movl %edi, %eax
; X64-NOBMI-NEXT: negl %eax
; X64-NOBMI-NEXT: xorl %edi, %eax
; X64-NOBMI-NEXT: andl %esi, %eax
; X64-NOBMI-NEXT: retq
;
; X64-BMI-LABEL: fold_and_xor_neg_v3_32:
; X64-BMI: # %bb.0:
; X64-BMI-NEXT: blsmskl %edi, %eax
; X64-BMI-NEXT: andnl %esi, %eax, %eax
; X64-BMI-NEXT: retq
%neg = sub i32 0, %x
%xor = xor i32 %neg, %x
%and = and i32 %xor, %y
ret i32 %and
}
define i32 @fold_and_xor_neg_v4_32(i32 %x, i32 %y) nounwind {
; X86-NOBMI-LABEL: fold_and_xor_neg_v4_32:
; X86-NOBMI: # %bb.0:
; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %ecx
; X86-NOBMI-NEXT: movl %ecx, %eax
; X86-NOBMI-NEXT: negl %eax
; X86-NOBMI-NEXT: xorl %ecx, %eax
; X86-NOBMI-NEXT: andl {{[0-9]+}}(%esp), %eax
; X86-NOBMI-NEXT: retl
;
; X86-BMI-LABEL: fold_and_xor_neg_v4_32:
; X86-BMI: # %bb.0:
; X86-BMI-NEXT: blsmskl {{[0-9]+}}(%esp), %eax
; X86-BMI-NEXT: andnl {{[0-9]+}}(%esp), %eax, %eax
; X86-BMI-NEXT: retl
;
; X64-NOBMI-LABEL: fold_and_xor_neg_v4_32:
; X64-NOBMI: # %bb.0:
; X64-NOBMI-NEXT: movl %edi, %eax
; X64-NOBMI-NEXT: negl %eax
; X64-NOBMI-NEXT: xorl %edi, %eax
; X64-NOBMI-NEXT: andl %esi, %eax
; X64-NOBMI-NEXT: retq
;
; X64-BMI-LABEL: fold_and_xor_neg_v4_32:
; X64-BMI: # %bb.0:
; X64-BMI-NEXT: blsmskl %edi, %eax
; X64-BMI-NEXT: andnl %esi, %eax, %eax
; X64-BMI-NEXT: retq
%neg = sub i32 0, %x
%xor = xor i32 %neg, %x
%and = and i32 %y, %xor
ret i32 %and
}
define i64 @fold_and_xor_neg_v1_64(i64 %x, i64 %y) nounwind {
; X86-LABEL: fold_and_xor_neg_v1_64:
; X86: # %bb.0:
; X86-NEXT: pushl %esi
; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
; X86-NEXT: xorl %edx, %edx
; X86-NEXT: movl %ecx, %eax
; X86-NEXT: negl %eax
; X86-NEXT: sbbl %esi, %edx
; X86-NEXT: xorl %esi, %edx
; X86-NEXT: xorl %ecx, %eax
; X86-NEXT: andl {{[0-9]+}}(%esp), %edx
; X86-NEXT: andl {{[0-9]+}}(%esp), %eax
; X86-NEXT: popl %esi
; X86-NEXT: retl
;
; X64-NOBMI-LABEL: fold_and_xor_neg_v1_64:
; X64-NOBMI: # %bb.0:
; X64-NOBMI-NEXT: movq %rdi, %rax
; X64-NOBMI-NEXT: negq %rax
; X64-NOBMI-NEXT: xorq %rdi, %rax
; X64-NOBMI-NEXT: andq %rsi, %rax
; X64-NOBMI-NEXT: retq
;
; X64-BMI-LABEL: fold_and_xor_neg_v1_64:
; X64-BMI: # %bb.0:
; X64-BMI-NEXT: blsmskq %rdi, %rax
; X64-BMI-NEXT: andnq %rsi, %rax, %rax
; X64-BMI-NEXT: retq
%neg = sub i64 0, %x
%xor = xor i64 %x, %neg
%and = and i64 %xor, %y
ret i64 %and
}
; Negative test
define i16 @fold_and_xor_neg_v1_16_negative(i16 %x, i16 %y) nounwind {
; X86-LABEL: fold_and_xor_neg_v1_16_negative:
; X86: # %bb.0:
; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT: movl %ecx, %eax
; X86-NEXT: negl %eax
; X86-NEXT: xorl %ecx, %eax
; X86-NEXT: andw {{[0-9]+}}(%esp), %ax
; X86-NEXT: # kill: def $ax killed $ax killed $eax
; X86-NEXT: retl
;
; X64-LABEL: fold_and_xor_neg_v1_16_negative:
; X64: # %bb.0:
; X64-NEXT: movl %edi, %eax
; X64-NEXT: negl %eax
; X64-NEXT: xorl %edi, %eax
; X64-NEXT: andl %esi, %eax
; X64-NEXT: # kill: def $ax killed $ax killed $eax
; X64-NEXT: retq
%neg = sub i16 0, %x
%xor = xor i16 %x, %neg
%and = and i16 %xor, %y
ret i16 %and
}
; Negative test
define <4 x i32> @fold_and_xor_neg_v1_v4x32_negative(<4 x i32> %x, <4 x i32> %y) nounwind {
; X86-LABEL: fold_and_xor_neg_v1_v4x32_negative:
; X86: # %bb.0:
; X86-NEXT: pxor %xmm2, %xmm2
; X86-NEXT: psubd %xmm0, %xmm2
; X86-NEXT: pxor %xmm2, %xmm0
; X86-NEXT: pand %xmm1, %xmm0
; X86-NEXT: retl
;
; X64-LABEL: fold_and_xor_neg_v1_v4x32_negative:
; X64: # %bb.0:
; X64-NEXT: pxor %xmm2, %xmm2
; X64-NEXT: psubd %xmm0, %xmm2
; X64-NEXT: pxor %xmm2, %xmm0
; X64-NEXT: pand %xmm1, %xmm0
; X64-NEXT: retq
%neg = sub <4 x i32> zeroinitializer, %x
%xor = xor <4 x i32> %x, %neg
%and = and <4 x i32> %xor, %y
ret <4 x i32> %and
}
; Negative test
define i32 @fold_and_xor_neg_v1_32_two_uses_xor_negative(i32 %x, i32 %y) nounwind {
; X86-LABEL: fold_and_xor_neg_v1_32_two_uses_xor_negative:
; X86: # %bb.0:
; X86-NEXT: pushl %esi
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-NEXT: movl %eax, %ecx
; X86-NEXT: negl %ecx
; X86-NEXT: xorl %eax, %ecx
; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
; X86-NEXT: andl %ecx, %esi
; X86-NEXT: pushl %ecx
; X86-NEXT: calll use@PLT
; X86-NEXT: addl $4, %esp
; X86-NEXT: movl %esi, %eax
; X86-NEXT: popl %esi
; X86-NEXT: retl
;
; X64-LABEL: fold_and_xor_neg_v1_32_two_uses_xor_negative:
; X64: # %bb.0:
; X64-NEXT: pushq %rbx
; X64-NEXT: movl %esi, %ebx
; X64-NEXT: movl %edi, %eax
; X64-NEXT: negl %eax
; X64-NEXT: xorl %eax, %edi
; X64-NEXT: andl %edi, %ebx
; X64-NEXT: callq use@PLT
; X64-NEXT: movl %ebx, %eax
; X64-NEXT: popq %rbx
; X64-NEXT: retq
%neg = sub i32 0, %x
%xor = xor i32 %x, %neg
%and = and i32 %xor, %y
call void @use(i32 %xor)
ret i32 %and
}
; Negative test
define i32 @fold_and_xor_neg_v1_32_two_uses_sub_negative(i32 %x, i32 %y) nounwind {
; X86-LABEL: fold_and_xor_neg_v1_32_two_uses_sub_negative:
; X86: # %bb.0:
; X86-NEXT: pushl %esi
; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
; X86-NEXT: movl %esi, %eax
; X86-NEXT: negl %eax
; X86-NEXT: xorl %eax, %esi
; X86-NEXT: andl {{[0-9]+}}(%esp), %esi
; X86-NEXT: pushl %eax
; X86-NEXT: calll use@PLT
; X86-NEXT: addl $4, %esp
; X86-NEXT: movl %esi, %eax
; X86-NEXT: popl %esi
; X86-NEXT: retl
;
; X64-LABEL: fold_and_xor_neg_v1_32_two_uses_sub_negative:
; X64: # %bb.0:
; X64-NEXT: pushq %rbx
; X64-NEXT: movl %edi, %ebx
; X64-NEXT: negl %edi
; X64-NEXT: xorl %edi, %ebx
; X64-NEXT: andl %esi, %ebx
; X64-NEXT: callq use@PLT
; X64-NEXT: movl %ebx, %eax
; X64-NEXT: popq %rbx
; X64-NEXT: retq
%neg = sub i32 0, %x
%xor = xor i32 %x, %neg
%and = and i32 %xor, %y
call void @use(i32 %neg)
ret i32 %and
}
; Negative test
define i32 @fold_and_xor_neg_v1_32_no_blsmsk_negative(i32 %x, i32 %y, i32 %z) nounwind {
; X86-LABEL: fold_and_xor_neg_v1_32_no_blsmsk_negative:
; X86: # %bb.0:
; X86-NEXT: xorl %eax, %eax
; X86-NEXT: subl {{[0-9]+}}(%esp), %eax
; X86-NEXT: xorl {{[0-9]+}}(%esp), %eax
; X86-NEXT: andl {{[0-9]+}}(%esp), %eax
; X86-NEXT: retl
;
; X64-LABEL: fold_and_xor_neg_v1_32_no_blsmsk_negative:
; X64: # %bb.0:
; X64-NEXT: movl %edx, %eax
; X64-NEXT: negl %eax
; X64-NEXT: xorl %edi, %eax
; X64-NEXT: andl %esi, %eax
; X64-NEXT: retq
%neg = sub i32 0, %z
%xor = xor i32 %x, %neg
%and = and i32 %xor, %y
ret i32 %and
}