| ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py |
| ; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+cmov,+bmi | FileCheck %s --check-prefix=X86 |
| ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+cmov,+bmi | FileCheck %s --check-prefix=X64 |
| |
| define i32 @blsmsk_used2(i32 %a) nounwind { |
| ; X86-LABEL: blsmsk_used2: |
| ; X86: # %bb.0: # %entry |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: leal -1(%eax), %ecx |
| ; X86-NEXT: xorl %ecx, %eax |
| ; X86-NEXT: imull %ecx, %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: blsmsk_used2: |
| ; X64: # %bb.0: # %entry |
| ; X64-NEXT: # kill: def $edi killed $edi def $rdi |
| ; X64-NEXT: leal -1(%rdi), %eax |
| ; X64-NEXT: xorl %eax, %edi |
| ; X64-NEXT: imull %edi, %eax |
| ; X64-NEXT: retq |
| entry: |
| %sub = add i32 %a, -1 |
| %xor = xor i32 %sub, %a |
| %mul = mul i32 %xor, %sub |
| ret i32 %mul |
| } |
| |
| define i64 @blsmask_through1(i64 %a, i64 %b) nounwind { |
| ; X86-LABEL: blsmask_through1: |
| ; X86: # %bb.0: # %entry |
| ; X86-NEXT: pushl %esi |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NEXT: movl %ecx, %eax |
| ; X86-NEXT: addl $-1, %eax |
| ; X86-NEXT: movl %esi, %edx |
| ; X86-NEXT: adcl $-1, %edx |
| ; X86-NEXT: xorl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: xorl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: xorl %ecx, %eax |
| ; X86-NEXT: xorl %esi, %edx |
| ; X86-NEXT: popl %esi |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: blsmask_through1: |
| ; X64: # %bb.0: # %entry |
| ; X64-NEXT: blsmskq %rsi, %rax |
| ; X64-NEXT: xorq %rdi, %rax |
| ; X64-NEXT: retq |
| entry: |
| %sub = add i64 %b, -1 |
| %0 = xor i64 %sub, %a |
| %xor1 = xor i64 %0, %b |
| ret i64 %xor1 |
| } |
| |
| define i32 @blsmask_through2(i32 %a, i32 %b, i32 %c) nounwind { |
| ; X86-LABEL: blsmask_through2: |
| ; X86: # %bb.0: # %entry |
| ; X86-NEXT: blsmskl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: xorl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: xorl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: blsmask_through2: |
| ; X64: # %bb.0: # %entry |
| ; X64-NEXT: blsmskl %esi, %eax |
| ; X64-NEXT: xorl %edx, %edi |
| ; X64-NEXT: xorl %edi, %eax |
| ; X64-NEXT: retq |
| entry: |
| %sub = add nsw i32 %b, -1 |
| %0 = xor i32 %sub, %a |
| %1 = xor i32 %0, %c |
| %xor2 = xor i32 %1, %b |
| ret i32 %xor2 |
| } |
| |
| define i64 @blsmask_through3(i64 %a, i64 %b, i64 %c, i64 %d) nounwind { |
| ; X86-LABEL: blsmask_through3: |
| ; X86: # %bb.0: # %entry |
| ; X86-NEXT: pushl %esi |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl %esi, %eax |
| ; X86-NEXT: addl $-1, %eax |
| ; X86-NEXT: movl %ecx, %edx |
| ; X86-NEXT: adcl $-1, %edx |
| ; X86-NEXT: xorl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: xorl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: xorl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: xorl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: xorl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: xorl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: xorl %esi, %eax |
| ; X86-NEXT: xorl %ecx, %edx |
| ; X86-NEXT: popl %esi |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: blsmask_through3: |
| ; X64: # %bb.0: # %entry |
| ; X64-NEXT: leaq -1(%rsi), %rax |
| ; X64-NEXT: xorq %rdx, %rdi |
| ; X64-NEXT: xorq %rdi, %rax |
| ; X64-NEXT: xorq %rsi, %rcx |
| ; X64-NEXT: xorq %rcx, %rax |
| ; X64-NEXT: retq |
| entry: |
| %sub = add nsw i64 %b, -1 |
| %0 = xor i64 %sub, %a |
| %1 = xor i64 %0, %c |
| %2 = xor i64 %1, %d |
| %xor3 = xor i64 %2, %b |
| ret i64 %xor3 |
| } |
| |
| define i32 @blsmask_through1_used1(i32 %a, i32 %b) nounwind { |
| ; X86-LABEL: blsmask_through1_used1: |
| ; X86: # %bb.0: # %entry |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: leal -1(%ecx), %eax |
| ; X86-NEXT: xorl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: xorl %eax, %ecx |
| ; X86-NEXT: incl %eax |
| ; X86-NEXT: orl %ecx, %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: blsmask_through1_used1: |
| ; X64: # %bb.0: # %entry |
| ; X64-NEXT: # kill: def $esi killed $esi def $rsi |
| ; X64-NEXT: leal -1(%rsi), %eax |
| ; X64-NEXT: xorl %edi, %eax |
| ; X64-NEXT: xorl %eax, %esi |
| ; X64-NEXT: incl %eax |
| ; X64-NEXT: orl %esi, %eax |
| ; X64-NEXT: retq |
| entry: |
| %sub = add i32 %b, -1 |
| %xor = xor i32 %sub, %a |
| %xor1 = xor i32 %xor, %b |
| %add = add i32 %xor, 1 |
| %or = or i32 %add, %xor1 |
| ret i32 %or |
| } |
| |
| define i64 @blsmask_through1_used2(i64 %a, i64 %b) nounwind { |
| ; X86-LABEL: blsmask_through1_used2: |
| ; X86: # %bb.0: # %entry |
| ; X86-NEXT: pushl %ebp |
| ; X86-NEXT: pushl %ebx |
| ; X86-NEXT: pushl %edi |
| ; X86-NEXT: pushl %esi |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NEXT: movl %ecx, %edi |
| ; X86-NEXT: addl $-1, %edi |
| ; X86-NEXT: movl %esi, %ebp |
| ; X86-NEXT: adcl $-1, %ebp |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx |
| ; X86-NEXT: xorl %ebp, %ebx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: xorl %edi, %eax |
| ; X86-NEXT: xorl %ebx, %esi |
| ; X86-NEXT: xorl %eax, %ecx |
| ; X86-NEXT: imull %eax, %ebp |
| ; X86-NEXT: mull %edi |
| ; X86-NEXT: addl %ebp, %edx |
| ; X86-NEXT: imull %edi, %ebx |
| ; X86-NEXT: addl %ebx, %edx |
| ; X86-NEXT: orl %esi, %edx |
| ; X86-NEXT: orl %ecx, %eax |
| ; X86-NEXT: popl %esi |
| ; X86-NEXT: popl %edi |
| ; X86-NEXT: popl %ebx |
| ; X86-NEXT: popl %ebp |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: blsmask_through1_used2: |
| ; X64: # %bb.0: # %entry |
| ; X64-NEXT: leaq -1(%rsi), %rax |
| ; X64-NEXT: xorq %rax, %rdi |
| ; X64-NEXT: xorq %rdi, %rsi |
| ; X64-NEXT: imulq %rdi, %rax |
| ; X64-NEXT: orq %rsi, %rax |
| ; X64-NEXT: retq |
| entry: |
| %sub = add i64 %b, -1 |
| %xor = xor i64 %sub, %a |
| %xor1 = xor i64 %xor, %b |
| %mul = mul i64 %xor, %sub |
| %or = or i64 %mul, %xor1 |
| ret i64 %or |
| } |
| |
| define i32 @blsi_used2(i32 %a) nounwind { |
| ; X86-LABEL: blsi_used2: |
| ; X86: # %bb.0: # %entry |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movl %eax, %ecx |
| ; X86-NEXT: negl %ecx |
| ; X86-NEXT: andl %ecx, %eax |
| ; X86-NEXT: imull %ecx, %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: blsi_used2: |
| ; X64: # %bb.0: # %entry |
| ; X64-NEXT: movl %edi, %eax |
| ; X64-NEXT: negl %eax |
| ; X64-NEXT: andl %eax, %edi |
| ; X64-NEXT: imull %edi, %eax |
| ; X64-NEXT: retq |
| entry: |
| %sub = sub nsw i32 0, %a |
| %and = and i32 %sub, %a |
| %mul = mul nsw i32 %and, %sub |
| ret i32 %mul |
| } |
| |
| define i64 @blsi_through1(i64 %a, i64 %b) nounwind { |
| ; X86-LABEL: blsi_through1: |
| ; X86: # %bb.0: # %entry |
| ; X86-NEXT: pushl %esi |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NEXT: xorl %edx, %edx |
| ; X86-NEXT: movl %ecx, %eax |
| ; X86-NEXT: negl %eax |
| ; X86-NEXT: sbbl %esi, %edx |
| ; X86-NEXT: andl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: andl %esi, %edx |
| ; X86-NEXT: andl %ecx, %eax |
| ; X86-NEXT: popl %esi |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: blsi_through1: |
| ; X64: # %bb.0: # %entry |
| ; X64-NEXT: blsiq %rsi, %rax |
| ; X64-NEXT: andq %rdi, %rax |
| ; X64-NEXT: retq |
| entry: |
| %sub = sub nsw i64 0, %b |
| %and = and i64 %sub, %a |
| %and1 = and i64 %and, %b |
| ret i64 %and1 |
| } |
| |
| define i32 @blsi_through2(i32 %a, i32 %b, i32 %c) nounwind { |
| ; X86-LABEL: blsi_through2: |
| ; X86: # %bb.0: # %entry |
| ; X86-NEXT: blsil {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: blsi_through2: |
| ; X64: # %bb.0: # %entry |
| ; X64-NEXT: blsil %esi, %eax |
| ; X64-NEXT: andl %edx, %edi |
| ; X64-NEXT: andl %edi, %eax |
| ; X64-NEXT: retq |
| entry: |
| %sub = sub i32 0, %b |
| %and = and i32 %sub, %a |
| %0 = and i32 %and, %c |
| %and2 = and i32 %0, %b |
| ret i32 %and2 |
| } |
| |
| define i64 @blsi_through3(i64 %a, i64 %b, i64 %c) nounwind { |
| ; X86-LABEL: blsi_through3: |
| ; X86: # %bb.0: # %entry |
| ; X86-NEXT: pushl %esi |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NEXT: xorl %edx, %edx |
| ; X86-NEXT: movl %ecx, %eax |
| ; X86-NEXT: negl %eax |
| ; X86-NEXT: sbbl %esi, %edx |
| ; X86-NEXT: andl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: andl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: andl %esi, %edx |
| ; X86-NEXT: andl %ecx, %eax |
| ; X86-NEXT: popl %esi |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: blsi_through3: |
| ; X64: # %bb.0: # %entry |
| ; X64-NEXT: blsiq %rsi, %rax |
| ; X64-NEXT: andq %rdx, %rdi |
| ; X64-NEXT: andq %rdi, %rax |
| ; X64-NEXT: retq |
| entry: |
| %sub = sub i64 0, %b |
| %and = and i64 %sub, %a |
| %0 = and i64 %and, %c |
| %and3 = and i64 %0, %b |
| ret i64 %and3 |
| } |
| |
| define i32 @blsi_through1_used1(i32 %a, i32 %b) nounwind { |
| ; X86-LABEL: blsi_through1_used1: |
| ; X86: # %bb.0: # %entry |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl %ecx, %eax |
| ; X86-NEXT: negl %eax |
| ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: andl %eax, %ecx |
| ; X86-NEXT: incl %eax |
| ; X86-NEXT: orl %ecx, %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: blsi_through1_used1: |
| ; X64: # %bb.0: # %entry |
| ; X64-NEXT: movl %esi, %eax |
| ; X64-NEXT: negl %eax |
| ; X64-NEXT: andl %edi, %eax |
| ; X64-NEXT: andl %eax, %esi |
| ; X64-NEXT: incl %eax |
| ; X64-NEXT: orl %esi, %eax |
| ; X64-NEXT: retq |
| entry: |
| %sub = sub nsw i32 0, %b |
| %and = and i32 %sub, %a |
| %and1 = and i32 %and, %b |
| %add = add nsw i32 %and, 1 |
| %or = or i32 %add, %and1 |
| ret i32 %or |
| } |
| |
| define i64 @blsi_through1_used2(i64 %a, i64 %b) nounwind { |
| ; X86-LABEL: blsi_through1_used2: |
| ; X86: # %bb.0: # %entry |
| ; X86-NEXT: pushl %ebx |
| ; X86-NEXT: pushl %edi |
| ; X86-NEXT: pushl %esi |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NEXT: xorl %edi, %edi |
| ; X86-NEXT: movl %ecx, %edx |
| ; X86-NEXT: negl %edx |
| ; X86-NEXT: sbbl %esi, %edi |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx |
| ; X86-NEXT: andl %edi, %ebx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: andl %edx, %eax |
| ; X86-NEXT: andl %ebx, %esi |
| ; X86-NEXT: andl %eax, %ecx |
| ; X86-NEXT: imull %edx, %ebx |
| ; X86-NEXT: imull %eax, %edi |
| ; X86-NEXT: mull %edx |
| ; X86-NEXT: addl %edi, %edx |
| ; X86-NEXT: addl %ebx, %edx |
| ; X86-NEXT: orl %esi, %edx |
| ; X86-NEXT: orl %ecx, %eax |
| ; X86-NEXT: popl %esi |
| ; X86-NEXT: popl %edi |
| ; X86-NEXT: popl %ebx |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: blsi_through1_used2: |
| ; X64: # %bb.0: # %entry |
| ; X64-NEXT: movq %rsi, %rax |
| ; X64-NEXT: negq %rax |
| ; X64-NEXT: andq %rax, %rdi |
| ; X64-NEXT: andq %rdi, %rsi |
| ; X64-NEXT: imulq %rdi, %rax |
| ; X64-NEXT: orq %rsi, %rax |
| ; X64-NEXT: retq |
| entry: |
| %sub = sub nsw i64 0, %b |
| %and = and i64 %sub, %a |
| %and1 = and i64 %and, %b |
| %mul = mul nsw i64 %and, %sub |
| %or = or i64 %mul, %and1 |
| ret i64 %or |
| } |
| |
| define i32 @blsr_used2(i32 %a) nounwind { |
| ; X86-LABEL: blsr_used2: |
| ; X86: # %bb.0: # %entry |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: leal -1(%eax), %ecx |
| ; X86-NEXT: andl %ecx, %eax |
| ; X86-NEXT: imull %ecx, %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: blsr_used2: |
| ; X64: # %bb.0: # %entry |
| ; X64-NEXT: # kill: def $edi killed $edi def $rdi |
| ; X64-NEXT: leal -1(%rdi), %eax |
| ; X64-NEXT: andl %eax, %edi |
| ; X64-NEXT: imull %edi, %eax |
| ; X64-NEXT: retq |
| entry: |
| %sub = add i32 %a, -1 |
| %and = and i32 %sub, %a |
| %mul = mul i32 %and, %sub |
| ret i32 %mul |
| } |
| |
| define i64 @blsr_through1(i64 %a, i64 %b) nounwind { |
| ; X86-LABEL: blsr_through1: |
| ; X86: # %bb.0: # %entry |
| ; X86-NEXT: pushl %esi |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NEXT: movl %ecx, %eax |
| ; X86-NEXT: addl $-1, %eax |
| ; X86-NEXT: movl %esi, %edx |
| ; X86-NEXT: adcl $-1, %edx |
| ; X86-NEXT: andl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: andl %ecx, %eax |
| ; X86-NEXT: andl %esi, %edx |
| ; X86-NEXT: popl %esi |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: blsr_through1: |
| ; X64: # %bb.0: # %entry |
| ; X64-NEXT: blsrq %rsi, %rax |
| ; X64-NEXT: andq %rdi, %rax |
| ; X64-NEXT: retq |
| entry: |
| %sub = add i64 %b, -1 |
| %0 = and i64 %sub, %a |
| %and1 = and i64 %0, %b |
| ret i64 %and1 |
| } |
| |
| define i32 @blsr_through2(i32 %a, i32 %b, i32 %c) nounwind { |
| ; X86-LABEL: blsr_through2: |
| ; X86: # %bb.0: # %entry |
| ; X86-NEXT: blsrl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: blsr_through2: |
| ; X64: # %bb.0: # %entry |
| ; X64-NEXT: blsrl %esi, %eax |
| ; X64-NEXT: andl %edx, %edi |
| ; X64-NEXT: andl %edi, %eax |
| ; X64-NEXT: retq |
| entry: |
| %sub = add nsw i32 %b, -1 |
| %0 = and i32 %sub, %a |
| %1 = and i32 %0, %c |
| %and2 = and i32 %1, %b |
| ret i32 %and2 |
| } |
| |
| define i64 @blsr_through3(i64 %a, i64 %b, i64 %c, i64 %d) nounwind { |
| ; X86-LABEL: blsr_through3: |
| ; X86: # %bb.0: # %entry |
| ; X86-NEXT: pushl %esi |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NEXT: xorl %edx, %edx |
| ; X86-NEXT: movl %ecx, %eax |
| ; X86-NEXT: negl %eax |
| ; X86-NEXT: sbbl %esi, %edx |
| ; X86-NEXT: andl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: andl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: andl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: andl %esi, %edx |
| ; X86-NEXT: andl %ecx, %eax |
| ; X86-NEXT: popl %esi |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: blsr_through3: |
| ; X64: # %bb.0: # %entry |
| ; X64-NEXT: movq %rdi, %rax |
| ; X64-NEXT: andq %rsi, %rcx |
| ; X64-NEXT: negq %rsi |
| ; X64-NEXT: andq %rdx, %rax |
| ; X64-NEXT: andq %rsi, %rax |
| ; X64-NEXT: andq %rcx, %rax |
| ; X64-NEXT: retq |
| entry: |
| %sub = sub nsw i64 0, %b |
| %and = and i64 %sub, %a |
| %0 = and i64 %and, %c |
| %1 = and i64 %0, %d |
| %and4 = and i64 %1, %b |
| ret i64 %and4 |
| } |
| |
| define i32 @blsr_through1_used1(i32 %a, i32 %b) nounwind { |
| ; X86-LABEL: blsr_through1_used1: |
| ; X86: # %bb.0: # %entry |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: leal -1(%ecx), %eax |
| ; X86-NEXT: andl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: andl %eax, %ecx |
| ; X86-NEXT: incl %eax |
| ; X86-NEXT: orl %ecx, %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: blsr_through1_used1: |
| ; X64: # %bb.0: # %entry |
| ; X64-NEXT: # kill: def $esi killed $esi def $rsi |
| ; X64-NEXT: leal -1(%rsi), %eax |
| ; X64-NEXT: andl %edi, %eax |
| ; X64-NEXT: andl %eax, %esi |
| ; X64-NEXT: incl %eax |
| ; X64-NEXT: orl %esi, %eax |
| ; X64-NEXT: retq |
| entry: |
| %sub = add i32 %b, -1 |
| %and = and i32 %sub, %a |
| %and1 = and i32 %and, %b |
| %add = add i32 %and, 1 |
| %or = or i32 %add, %and1 |
| ret i32 %or |
| } |
| |
| define i64 @blsr_through1_used2(i64 %a, i64 %b) nounwind { |
| ; X86-LABEL: blsr_through1_used2: |
| ; X86: # %bb.0: # %entry |
| ; X86-NEXT: pushl %ebp |
| ; X86-NEXT: pushl %ebx |
| ; X86-NEXT: pushl %edi |
| ; X86-NEXT: pushl %esi |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NEXT: movl %ecx, %edi |
| ; X86-NEXT: addl $-1, %edi |
| ; X86-NEXT: movl %esi, %ebp |
| ; X86-NEXT: adcl $-1, %ebp |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ebx |
| ; X86-NEXT: andl %ebp, %ebx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: andl %edi, %eax |
| ; X86-NEXT: andl %ebx, %esi |
| ; X86-NEXT: andl %eax, %ecx |
| ; X86-NEXT: imull %eax, %ebp |
| ; X86-NEXT: mull %edi |
| ; X86-NEXT: addl %ebp, %edx |
| ; X86-NEXT: imull %edi, %ebx |
| ; X86-NEXT: addl %ebx, %edx |
| ; X86-NEXT: orl %esi, %edx |
| ; X86-NEXT: orl %ecx, %eax |
| ; X86-NEXT: popl %esi |
| ; X86-NEXT: popl %edi |
| ; X86-NEXT: popl %ebx |
| ; X86-NEXT: popl %ebp |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: blsr_through1_used2: |
| ; X64: # %bb.0: # %entry |
| ; X64-NEXT: leaq -1(%rsi), %rax |
| ; X64-NEXT: andq %rax, %rdi |
| ; X64-NEXT: andq %rdi, %rsi |
| ; X64-NEXT: imulq %rdi, %rax |
| ; X64-NEXT: orq %rsi, %rax |
| ; X64-NEXT: retq |
| entry: |
| %sub = add i64 %b, -1 |
| %and = and i64 %sub, %a |
| %and1 = and i64 %and, %b |
| %mul = mul i64 %and, %sub |
| %or = or i64 %mul, %and1 |
| ret i64 %or |
| } |