| ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py |
| ; RUN: llc < %s -mtriple=i686-unknown | FileCheck %s --check-prefix=X86 |
| ; RUN: llc < %s -mtriple=x86_64-unknown | FileCheck %s --check-prefix=X64 |
| ; rdar://7367229 |
| |
| define i32 @t(i32 %a, i32 %b) nounwind ssp { |
| ; X86-LABEL: t: |
| ; X86: # %bb.0: # %entry |
| ; X86-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-NEXT: xorb {{[0-9]+}}(%esp), %al |
| ; X86-NEXT: testb $64, %al |
| ; X86-NEXT: je .LBB0_1 |
| ; X86-NEXT: # %bb.2: # %bb1 |
| ; X86-NEXT: jmp bar # TAILCALL |
| ; X86-NEXT: .LBB0_1: # %bb |
| ; X86-NEXT: jmp foo # TAILCALL |
| ; |
| ; X64-LABEL: t: |
| ; X64: # %bb.0: # %entry |
| ; X64-NEXT: xorl %esi, %edi |
| ; X64-NEXT: xorl %eax, %eax |
| ; X64-NEXT: testl $16384, %edi # imm = 0x4000 |
| ; X64-NEXT: je .LBB0_1 |
| ; X64-NEXT: # %bb.2: # %bb1 |
| ; X64-NEXT: jmp bar # TAILCALL |
| ; X64-NEXT: .LBB0_1: # %bb |
| ; X64-NEXT: jmp foo # TAILCALL |
| entry: |
| %0 = and i32 %a, 16384 |
| %1 = icmp ne i32 %0, 0 |
| %2 = and i32 %b, 16384 |
| %3 = icmp ne i32 %2, 0 |
| %4 = xor i1 %1, %3 |
| br i1 %4, label %bb1, label %bb |
| |
| bb: ; preds = %entry |
| %5 = tail call i32 (...) @foo() nounwind ; <i32> [#uses=1] |
| ret i32 %5 |
| |
| bb1: ; preds = %entry |
| %6 = tail call i32 (...) @bar() nounwind ; <i32> [#uses=1] |
| ret i32 %6 |
| } |
| |
| declare dso_local i32 @foo(...) |
| |
| declare dso_local i32 @bar(...) |
| |
| define i32 @t2(i32 %x, i32 %y) nounwind ssp { |
| ; X86-LABEL: t2: |
| ; X86: # %bb.0: # %entry |
| ; X86-NEXT: cmpl $0, {{[0-9]+}}(%esp) |
| ; X86-NEXT: sete %al |
| ; X86-NEXT: cmpl $0, {{[0-9]+}}(%esp) |
| ; X86-NEXT: sete %cl |
| ; X86-NEXT: cmpb %al, %cl |
| ; X86-NEXT: je .LBB1_1 |
| ; X86-NEXT: # %bb.2: # %bb |
| ; X86-NEXT: jmp foo # TAILCALL |
| ; X86-NEXT: .LBB1_1: # %return |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: t2: |
| ; X64: # %bb.0: # %entry |
| ; X64-NEXT: testl %edi, %edi |
| ; X64-NEXT: sete %al |
| ; X64-NEXT: testl %esi, %esi |
| ; X64-NEXT: sete %cl |
| ; X64-NEXT: cmpb %al, %cl |
| ; X64-NEXT: je .LBB1_1 |
| ; X64-NEXT: # %bb.2: # %bb |
| ; X64-NEXT: xorl %eax, %eax |
| ; X64-NEXT: jmp foo # TAILCALL |
| ; X64-NEXT: .LBB1_1: # %return |
| ; X64-NEXT: retq |
| |
| entry: |
| %0 = icmp eq i32 %x, 0 ; <i1> [#uses=1] |
| %1 = icmp eq i32 %y, 0 ; <i1> [#uses=1] |
| %2 = xor i1 %1, %0 ; <i1> [#uses=1] |
| br i1 %2, label %bb, label %return |
| |
| bb: ; preds = %entry |
| %3 = tail call i32 (...) @foo() nounwind ; <i32> [#uses=0] |
| ret i32 undef |
| |
| return: ; preds = %entry |
| ret i32 undef |
| } |
| |
| ; PR45703 |
| ; https://bugs.llvm.org/show_bug.cgi?id=45703 |
| |
| define i1 @xor_not_bools(i1 zeroext %x, i1 zeroext %y) nounwind { |
| ; X86-LABEL: xor_not_bools: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-NEXT: xorb {{[0-9]+}}(%esp), %al |
| ; X86-NEXT: xorb $1, %al |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: xor_not_bools: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movl %edi, %eax |
| ; X64-NEXT: xorl %esi, %eax |
| ; X64-NEXT: xorb $1, %al |
| ; X64-NEXT: # kill: def $al killed $al killed $eax |
| ; X64-NEXT: retq |
| %xor = xor i1 %x, %y |
| %not = xor i1 %xor, true |
| ret i1 %not |
| } |
| |
| ; This is probably not canonical IR; just testing another possible pattern. |
| |
| define zeroext i1 @xor_not_cmps(i32 %x, i32 %y) nounwind { |
| ; X86-LABEL: xor_not_cmps: |
| ; X86: # %bb.0: |
| ; X86-NEXT: cmpl $42, {{[0-9]+}}(%esp) |
| ; X86-NEXT: setne %cl |
| ; X86-NEXT: cmpl $235, {{[0-9]+}}(%esp) |
| ; X86-NEXT: sete %al |
| ; X86-NEXT: xorb %cl, %al |
| ; X86-NEXT: xorb $1, %al |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: xor_not_cmps: |
| ; X64: # %bb.0: |
| ; X64-NEXT: cmpl $42, %edi |
| ; X64-NEXT: setne %cl |
| ; X64-NEXT: cmpl $235, %esi |
| ; X64-NEXT: sete %al |
| ; X64-NEXT: xorb %cl, %al |
| ; X64-NEXT: xorb $1, %al |
| ; X64-NEXT: retq |
| %cmpx = icmp ne i32 %x, 42 |
| %cmpy = icmp eq i32 %y, 235 |
| %xor = xor i1 %cmpx, %cmpy |
| %not = xor i1 %xor, 1 |
| ret i1 %not |
| } |
| |
| define zeroext i1 @xor_not_cmps_extra_use(i32 %x, i32 %y, i32* %p) nounwind { |
| ; X86-LABEL: xor_not_cmps_extra_use: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: cmpl $42, {{[0-9]+}}(%esp) |
| ; X86-NEXT: setne %dl |
| ; X86-NEXT: cmpl $235, {{[0-9]+}}(%esp) |
| ; X86-NEXT: sete %al |
| ; X86-NEXT: xorb %dl, %al |
| ; X86-NEXT: movzbl %al, %edx |
| ; X86-NEXT: movl %edx, (%ecx) |
| ; X86-NEXT: xorb $1, %al |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: xor_not_cmps_extra_use: |
| ; X64: # %bb.0: |
| ; X64-NEXT: cmpl $42, %edi |
| ; X64-NEXT: setne %al |
| ; X64-NEXT: cmpl $235, %esi |
| ; X64-NEXT: sete %cl |
| ; X64-NEXT: xorb %al, %cl |
| ; X64-NEXT: movzbl %cl, %eax |
| ; X64-NEXT: movl %eax, (%rdx) |
| ; X64-NEXT: xorb $1, %al |
| ; X64-NEXT: # kill: def $al killed $al killed $eax |
| ; X64-NEXT: retq |
| %cmpx = icmp ne i32 %x, 42 |
| %cmpy = icmp eq i32 %y, 235 |
| %xor = xor i1 %cmpx, %cmpy |
| %z = zext i1 %xor to i32 |
| store i32 %z, i32* %p |
| %not = xor i1 %xor, 1 |
| ret i1 %not |
| } |