| ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py |
| ; RUN: llc < %s -mtriple=i686-- | FileCheck %s --check-prefixes=X86 |
| ; RUN: llc < %s -mtriple=x86_64-- | FileCheck %s --check-prefixes=X64 |
| |
| ; Fold and(sextinreg(v0,i5),sextinreg(v1,i5)) -> sextinreg(and(v0,v1),i5) |
| define i32 @sextinreg_i32(ptr %p0, ptr %p1) { |
| ; X86-LABEL: sextinreg_i32: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movzbl (%ecx), %ecx |
| ; X86-NEXT: movzbl (%eax), %eax |
| ; X86-NEXT: andl %ecx, %eax |
| ; X86-NEXT: shll $27, %eax |
| ; X86-NEXT: sarl $27, %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: sextinreg_i32: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movzbl (%rdi), %ecx |
| ; X64-NEXT: movzbl (%rsi), %eax |
| ; X64-NEXT: andl %ecx, %eax |
| ; X64-NEXT: shll $27, %eax |
| ; X64-NEXT: sarl $27, %eax |
| ; X64-NEXT: retq |
| %v0 = load i8, ptr %p0, align 1 |
| %v1 = load i8, ptr %p1, align 1 |
| %x0 = zext i8 %v0 to i32 |
| %x1 = zext i8 %v1 to i32 |
| %l0 = shl i32 %x0, 27 |
| %l1 = shl i32 %x1, 27 |
| %a0 = ashr exact i32 %l0, 27 |
| %a1 = ashr exact i32 %l1, 27 |
| %and = and i32 %a0, %a1 |
| ret i32 %and |
| } |
| |
| ; MISMATCH and(sextinreg(v0,i2),sextinreg(v1,i5)) != sextinreg(and(v0,v1),i2) |
| define i32 @sextinreg_i32_mismatch(ptr %p0, ptr %p1) { |
| ; X86-LABEL: sextinreg_i32_mismatch: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movzbl (%ecx), %ecx |
| ; X86-NEXT: movzbl (%eax), %eax |
| ; X86-NEXT: shll $30, %ecx |
| ; X86-NEXT: sarl $30, %ecx |
| ; X86-NEXT: shll $27, %eax |
| ; X86-NEXT: sarl $27, %eax |
| ; X86-NEXT: andl %ecx, %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: sextinreg_i32_mismatch: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movzbl (%rdi), %ecx |
| ; X64-NEXT: movzbl (%rsi), %eax |
| ; X64-NEXT: shll $30, %ecx |
| ; X64-NEXT: sarl $30, %ecx |
| ; X64-NEXT: shll $27, %eax |
| ; X64-NEXT: sarl $27, %eax |
| ; X64-NEXT: andl %ecx, %eax |
| ; X64-NEXT: retq |
| %v0 = load i8, ptr %p0, align 1 |
| %v1 = load i8, ptr %p1, align 1 |
| %x0 = zext i8 %v0 to i32 |
| %x1 = zext i8 %v1 to i32 |
| %l0 = shl i32 %x0, 30 |
| %l1 = shl i32 %x1, 27 |
| %a0 = ashr exact i32 %l0, 30 |
| %a1 = ashr exact i32 %l1, 27 |
| %and = and i32 %a0, %a1 |
| ret i32 %and |
| } |