| ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py |
| ; RUN: llc < %s -mtriple=i686-unknown-unknown | FileCheck %s --check-prefixes=X86 |
| ; RUN: llc < %s -mtriple=x86_64-unknown-unknown | FileCheck %s --check-prefixes=X64 |
| |
| define i32 @test_simple_rotl(i32 %x) { |
| ; X86-LABEL: test_simple_rotl: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: roll $7, %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: test_simple_rotl: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movl %edi, %eax |
| ; X64-NEXT: roll $7, %eax |
| ; X64-NEXT: retq |
| %shl = shl i32 %x, 7 |
| %shr = lshr i32 %x, 25 |
| %add = add i32 %shl, %shr |
| ret i32 %add |
| } |
| |
| define i32 @test_simple_rotr(i32 %x) { |
| ; X86-LABEL: test_simple_rotr: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: roll $25, %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: test_simple_rotr: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movl %edi, %eax |
| ; X64-NEXT: roll $25, %eax |
| ; X64-NEXT: retq |
| %shr = lshr i32 %x, 7 |
| %shl = shl i32 %x, 25 |
| %add = add i32 %shr, %shl |
| ret i32 %add |
| } |
| |
| define i32 @test_rotl_var(i32 %x, i32 %y) { |
| ; X86-LABEL: test_rotl_var: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: roll %cl, %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: test_rotl_var: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movl %esi, %ecx |
| ; X64-NEXT: movl %edi, %eax |
| ; X64-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NEXT: roll %cl, %eax |
| ; X64-NEXT: retq |
| %shl = shl i32 %x, %y |
| %sub = sub i32 32, %y |
| %shr = lshr i32 %x, %sub |
| %add = add i32 %shl, %shr |
| ret i32 %add |
| } |
| |
| define i32 @test_rotr_var(i32 %x, i32 %y) { |
| ; X86-LABEL: test_rotr_var: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: rorl %cl, %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: test_rotr_var: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movl %esi, %ecx |
| ; X64-NEXT: movl %edi, %eax |
| ; X64-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NEXT: rorl %cl, %eax |
| ; X64-NEXT: retq |
| %shr = lshr i32 %x, %y |
| %sub = sub i32 32, %y |
| %shl = shl i32 %x, %sub |
| %add = add i32 %shr, %shl |
| ret i32 %add |
| } |
| |
| define i32 @test_invalid_rotl_var_and(i32 %x, i32 %y) { |
| ; X86-LABEL: test_invalid_rotl_var_and: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movl %eax, %edx |
| ; X86-NEXT: shll %cl, %edx |
| ; X86-NEXT: negb %cl |
| ; X86-NEXT: shrl %cl, %eax |
| ; X86-NEXT: addl %edx, %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: test_invalid_rotl_var_and: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movl %esi, %ecx |
| ; X64-NEXT: movl %edi, %eax |
| ; X64-NEXT: shll %cl, %eax |
| ; X64-NEXT: negb %cl |
| ; X64-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NEXT: shrl %cl, %edi |
| ; X64-NEXT: addl %edi, %eax |
| ; X64-NEXT: retq |
| %shr = shl i32 %x, %y |
| %sub = sub nsw i32 0, %y |
| %and = and i32 %sub, 31 |
| %shl = lshr i32 %x, %and |
| %add = add i32 %shl, %shr |
| ret i32 %add |
| } |
| |
| define i32 @test_invalid_rotr_var_and(i32 %x, i32 %y) { |
| ; X86-LABEL: test_invalid_rotr_var_and: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movl %eax, %edx |
| ; X86-NEXT: shrl %cl, %edx |
| ; X86-NEXT: negb %cl |
| ; X86-NEXT: shll %cl, %eax |
| ; X86-NEXT: addl %edx, %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: test_invalid_rotr_var_and: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movl %esi, %ecx |
| ; X64-NEXT: movl %edi, %eax |
| ; X64-NEXT: shrl %cl, %eax |
| ; X64-NEXT: negb %cl |
| ; X64-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NEXT: shll %cl, %edi |
| ; X64-NEXT: addl %edi, %eax |
| ; X64-NEXT: retq |
| %shr = lshr i32 %x, %y |
| %sub = sub nsw i32 0, %y |
| %and = and i32 %sub, 31 |
| %shl = shl i32 %x, %and |
| %add = add i32 %shr, %shl |
| ret i32 %add |
| } |
| |
| define i32 @test_fshl_special_case(i32 %x0, i32 %x1, i32 %y) { |
| ; X86-LABEL: test_fshl_special_case: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: shldl %cl, %edx, %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: test_fshl_special_case: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movl %edx, %ecx |
| ; X64-NEXT: movl %edi, %eax |
| ; X64-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NEXT: shldl %cl, %esi, %eax |
| ; X64-NEXT: retq |
| %shl = shl i32 %x0, %y |
| %srli = lshr i32 %x1, 1 |
| %x = xor i32 %y, 31 |
| %srlo = lshr i32 %srli, %x |
| %o = add i32 %shl, %srlo |
| ret i32 %o |
| } |
| |
| define i32 @test_fshr_special_case(i32 %x0, i32 %x1, i32 %y) { |
| ; X86-LABEL: test_fshr_special_case: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: shrdl %cl, %edx, %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: test_fshr_special_case: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movl %edx, %ecx |
| ; X64-NEXT: movl %esi, %eax |
| ; X64-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NEXT: shrdl %cl, %edi, %eax |
| ; X64-NEXT: retq |
| %shl = lshr i32 %x1, %y |
| %srli = shl i32 %x0, 1 |
| %x = xor i32 %y, 31 |
| %srlo = shl i32 %srli, %x |
| %o = add i32 %shl, %srlo |
| ret i32 %o |
| } |
| |
| define i64 @test_rotl_udiv_special_case(i64 %i) { |
| ; X86-LABEL: test_rotl_udiv_special_case: |
| ; X86: # %bb.0: |
| ; X86-NEXT: pushl %ebx |
| ; X86-NEXT: .cfi_def_cfa_offset 8 |
| ; X86-NEXT: pushl %edi |
| ; X86-NEXT: .cfi_def_cfa_offset 12 |
| ; X86-NEXT: pushl %esi |
| ; X86-NEXT: .cfi_def_cfa_offset 16 |
| ; X86-NEXT: .cfi_offset %esi, -16 |
| ; X86-NEXT: .cfi_offset %edi, -12 |
| ; X86-NEXT: .cfi_offset %ebx, -8 |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-NEXT: movl %ecx, %esi |
| ; X86-NEXT: addl %edi, %esi |
| ; X86-NEXT: adcl $0, %esi |
| ; X86-NEXT: movl $-1431655765, %ebx # imm = 0xAAAAAAAB |
| ; X86-NEXT: movl %esi, %eax |
| ; X86-NEXT: mull %ebx |
| ; X86-NEXT: shrl %edx |
| ; X86-NEXT: leal (%edx,%edx,2), %eax |
| ; X86-NEXT: subl %eax, %esi |
| ; X86-NEXT: subl %esi, %ecx |
| ; X86-NEXT: sbbl $0, %edi |
| ; X86-NEXT: movl %ecx, %eax |
| ; X86-NEXT: mull %ebx |
| ; X86-NEXT: imull $-1431655766, %ecx, %ecx # imm = 0xAAAAAAAA |
| ; X86-NEXT: addl %ecx, %edx |
| ; X86-NEXT: imull $-1431655765, %edi, %ecx # imm = 0xAAAAAAAB |
| ; X86-NEXT: addl %ecx, %edx |
| ; X86-NEXT: movl %edx, %ecx |
| ; X86-NEXT: shldl $28, %eax, %ecx |
| ; X86-NEXT: shrdl $4, %eax, %edx |
| ; X86-NEXT: movl %ecx, %eax |
| ; X86-NEXT: popl %esi |
| ; X86-NEXT: .cfi_def_cfa_offset 12 |
| ; X86-NEXT: popl %edi |
| ; X86-NEXT: .cfi_def_cfa_offset 8 |
| ; X86-NEXT: popl %ebx |
| ; X86-NEXT: .cfi_def_cfa_offset 4 |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: test_rotl_udiv_special_case: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movq %rdi, %rax |
| ; X64-NEXT: movabsq $-6148914691236517205, %rcx # imm = 0xAAAAAAAAAAAAAAAB |
| ; X64-NEXT: mulq %rcx |
| ; X64-NEXT: movq %rdx, %rax |
| ; X64-NEXT: shrq %rax |
| ; X64-NEXT: rolq $60, %rax |
| ; X64-NEXT: retq |
| %lhs_div = udiv i64 %i, 3 |
| %rhs_div = udiv i64 %i, 48 |
| %lhs_shift = shl i64 %lhs_div, 60 |
| %out = add i64 %lhs_shift, %rhs_div |
| ret i64 %out |
| } |
| |
| define i32 @test_rotl_mul_special_case(i32 %i) { |
| ; X86-LABEL: test_rotl_mul_special_case: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: leal (%eax,%eax,8), %eax |
| ; X86-NEXT: roll $7, %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: test_rotl_mul_special_case: |
| ; X64: # %bb.0: |
| ; X64-NEXT: # kill: def $edi killed $edi def $rdi |
| ; X64-NEXT: leal (%rdi,%rdi,8), %eax |
| ; X64-NEXT: roll $7, %eax |
| ; X64-NEXT: retq |
| %lhs_mul = mul i32 %i, 9 |
| %rhs_mul = mul i32 %i, 1152 |
| %lhs_shift = lshr i32 %lhs_mul, 25 |
| %out = add i32 %lhs_shift, %rhs_mul |
| ret i32 %out |
| } |
| |
| define i64 @test_rotl_mul_with_mask_special_case(i64 %i) { |
| ; X86-LABEL: test_rotl_mul_with_mask_special_case: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: leal (%eax,%eax,8), %ecx |
| ; X86-NEXT: movl $9, %eax |
| ; X86-NEXT: mull {{[0-9]+}}(%esp) |
| ; X86-NEXT: addl %ecx, %edx |
| ; X86-NEXT: shrdl $25, %eax, %edx |
| ; X86-NEXT: movzbl %dl, %eax |
| ; X86-NEXT: xorl %edx, %edx |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: test_rotl_mul_with_mask_special_case: |
| ; X64: # %bb.0: |
| ; X64-NEXT: leaq (%rdi,%rdi,8), %rax |
| ; X64-NEXT: # kill: def $edi killed $edi killed $rdi def $rdi |
| ; X64-NEXT: shll $7, %edi |
| ; X64-NEXT: leal (%rdi,%rdi,8), %ecx |
| ; X64-NEXT: movzbl %cl, %ecx |
| ; X64-NEXT: shrq $57, %rax |
| ; X64-NEXT: orq %rcx, %rax |
| ; X64-NEXT: retq |
| %lhs_mul = mul i64 %i, 1152 |
| %rhs_mul = mul i64 %i, 9 |
| %lhs_and = and i64 %lhs_mul, 160 |
| %rhs_shift = lshr i64 %rhs_mul, 57 |
| %out = add i64 %lhs_and, %rhs_shift |
| ret i64 %out |
| } |
| |
| define i32 @test_fshl_with_mask_special_case(i32 %x) { |
| ; X86-LABEL: test_fshl_with_mask_special_case: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl %ecx, %eax |
| ; X86-NEXT: orl $1, %eax |
| ; X86-NEXT: shldl $5, %ecx, %eax |
| ; X86-NEXT: andl $-31, %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: test_fshl_with_mask_special_case: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movl %edi, %eax |
| ; X64-NEXT: orl $1, %eax |
| ; X64-NEXT: shldl $5, %edi, %eax |
| ; X64-NEXT: andl $-31, %eax |
| ; X64-NEXT: retq |
| %or1 = or i32 %x, 1 |
| %sh1 = shl i32 %or1, 5 |
| %sh2 = lshr i32 %x, 27 |
| %1 = and i32 %sh2, 1 |
| %r = add i32 %sh1, %1 |
| ret i32 %r |
| } |