| ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py |
| ; RUN: llc -mtriple=i686-unknown-linux-gnu -mattr=-bmi,-tbm,-bmi2,+fast-bextr < %s | FileCheck %s --check-prefixes=X86,X86-NOBMI |
| ; RUN: llc -mtriple=i686-unknown-linux-gnu -mattr=+bmi,-tbm,-bmi2,+fast-bextr < %s | FileCheck %s --check-prefixes=X86,X86-BMINOTBM,X86-BMI1 |
| ; RUN: llc -mtriple=i686-unknown-linux-gnu -mattr=+bmi,+tbm,-bmi2,+fast-bextr < %s | FileCheck %s --check-prefixes=X86,X86-BMITBM,X86-BMI1 |
| ; RUN: llc -mtriple=i686-unknown-linux-gnu -mattr=+bmi,+tbm,+bmi2,+fast-bextr < %s | FileCheck %s --check-prefixes=X86,X86-BMITBM,X86-BMI2 |
| ; RUN: llc -mtriple=i686-unknown-linux-gnu -mattr=+bmi,-tbm,+bmi2,+fast-bextr < %s | FileCheck %s --check-prefixes=X86,X86-BMINOTBM,X86-BMI2 |
| ; RUN: llc -mtriple=x86_64-unknown-linux-gnu -mattr=-bmi,-tbm,-bmi2,+fast-bextr < %s | FileCheck %s --check-prefixes=X64,X64-NOBMI |
| ; RUN: llc -mtriple=x86_64-unknown-linux-gnu -mattr=+bmi,-tbm,-bmi2,+fast-bextr < %s | FileCheck %s --check-prefixes=X64,X64-BMINOTBM,X64-BMI1 |
| ; RUN: llc -mtriple=x86_64-unknown-linux-gnu -mattr=+bmi,+tbm,-bmi2,+fast-bextr < %s | FileCheck %s --check-prefixes=X64,X64-BMITBM,X64-BMI1 |
| ; RUN: llc -mtriple=x86_64-unknown-linux-gnu -mattr=+bmi,+tbm,+bmi2,+fast-bextr < %s | FileCheck %s --check-prefixes=X64,X64-BMITBM,X64-BMI2 |
| ; RUN: llc -mtriple=x86_64-unknown-linux-gnu -mattr=+bmi,-tbm,+bmi2,+fast-bextr < %s | FileCheck %s --check-prefixes=X64,X64-BMINOTBM,X64-BMI2 |
| |
| ; *Please* keep in sync with test/CodeGen/AArch64/extract-bits.ll |
| |
| ; https://bugs.llvm.org/show_bug.cgi?id=36419 |
| ; https://bugs.llvm.org/show_bug.cgi?id=37603 |
| ; https://bugs.llvm.org/show_bug.cgi?id=37610 |
| |
| ; Patterns: |
| ; a) (x >> start) & (1 << nbits) - 1 |
| ; b) (x >> start) & ~(-1 << nbits) |
| ; c) (x >> start) & (-1 >> (32 - y)) |
| ; d) (x >> start) << (32 - y) >> (32 - y) |
| ; are equivalent. |
| |
| declare void @use32(i32) |
| declare void @use64(i64) |
| |
| ; ---------------------------------------------------------------------------- ; |
| ; Pattern a. 32-bit |
| ; ---------------------------------------------------------------------------- ; |
| |
| define i32 @bextr32_a0(i32 %val, i32 %numskipbits, i32 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr32_a0: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NOBMI-NEXT: shrl %cl, %esi |
| ; X86-NOBMI-NEXT: movl $1, %eax |
| ; X86-NOBMI-NEXT: movl %edx, %ecx |
| ; X86-NOBMI-NEXT: shll %cl, %eax |
| ; X86-NOBMI-NEXT: decl %eax |
| ; X86-NOBMI-NEXT: andl %esi, %eax |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr32_a0: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI1-NEXT: shll $8, %eax |
| ; X86-BMI1-NEXT: movzbl {{[0-9]+}}(%esp), %ecx |
| ; X86-BMI1-NEXT: orl %eax, %ecx |
| ; X86-BMI1-NEXT: bextrl %ecx, {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr32_a0: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: shrxl %ecx, {{[0-9]+}}(%esp), %ecx |
| ; X86-BMI2-NEXT: bzhil %eax, %ecx, %eax |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr32_a0: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NOBMI-NEXT: shrl %cl, %edi |
| ; X64-NOBMI-NEXT: movl $1, %eax |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shll %cl, %eax |
| ; X64-NOBMI-NEXT: decl %eax |
| ; X64-NOBMI-NEXT: andl %edi, %eax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr32_a0: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrl %eax, %edi, %eax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr32_a0: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxl %esi, %edi, %eax |
| ; X64-BMI2-NEXT: bzhil %edx, %eax, %eax |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i32 %val, %numskipbits |
| %onebit = shl i32 1, %numlowbits |
| %mask = add nsw i32 %onebit, -1 |
| %masked = and i32 %mask, %shifted |
| ret i32 %masked |
| } |
| |
| define i32 @bextr32_a0_arithmetic(i32 %val, i32 %numskipbits, i32 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr32_a0_arithmetic: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NOBMI-NEXT: sarl %cl, %esi |
| ; X86-NOBMI-NEXT: movl $1, %eax |
| ; X86-NOBMI-NEXT: movl %edx, %ecx |
| ; X86-NOBMI-NEXT: shll %cl, %eax |
| ; X86-NOBMI-NEXT: decl %eax |
| ; X86-NOBMI-NEXT: andl %esi, %eax |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr32_a0_arithmetic: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI1-NEXT: sarl %cl, %edx |
| ; X86-BMI1-NEXT: shll $8, %eax |
| ; X86-BMI1-NEXT: bextrl %eax, %edx, %eax |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr32_a0_arithmetic: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: sarxl %ecx, {{[0-9]+}}(%esp), %ecx |
| ; X86-BMI2-NEXT: bzhil %eax, %ecx, %eax |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr32_a0_arithmetic: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NOBMI-NEXT: sarl %cl, %edi |
| ; X64-NOBMI-NEXT: movl $1, %eax |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shll %cl, %eax |
| ; X64-NOBMI-NEXT: decl %eax |
| ; X64-NOBMI-NEXT: andl %edi, %eax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr32_a0_arithmetic: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: movl %esi, %ecx |
| ; X64-BMI1-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-BMI1-NEXT: sarl %cl, %edi |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: bextrl %edx, %edi, %eax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr32_a0_arithmetic: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: sarxl %esi, %edi, %eax |
| ; X64-BMI2-NEXT: bzhil %edx, %eax, %eax |
| ; X64-BMI2-NEXT: retq |
| %shifted = ashr i32 %val, %numskipbits |
| %onebit = shl i32 1, %numlowbits |
| %mask = add nsw i32 %onebit, -1 |
| %masked = and i32 %mask, %shifted |
| ret i32 %masked |
| } |
| |
| define i32 @bextr32_a1_indexzext(i32 %val, i8 zeroext %numskipbits, i8 zeroext %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr32_a1_indexzext: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NOBMI-NEXT: shrl %cl, %esi |
| ; X86-NOBMI-NEXT: movl $1, %eax |
| ; X86-NOBMI-NEXT: movl %edx, %ecx |
| ; X86-NOBMI-NEXT: shll %cl, %eax |
| ; X86-NOBMI-NEXT: decl %eax |
| ; X86-NOBMI-NEXT: andl %esi, %eax |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr32_a1_indexzext: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI1-NEXT: shll $8, %eax |
| ; X86-BMI1-NEXT: movzbl {{[0-9]+}}(%esp), %ecx |
| ; X86-BMI1-NEXT: orl %eax, %ecx |
| ; X86-BMI1-NEXT: bextrl %ecx, {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr32_a1_indexzext: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: shrxl %ecx, {{[0-9]+}}(%esp), %ecx |
| ; X86-BMI2-NEXT: bzhil %eax, %ecx, %eax |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr32_a1_indexzext: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NOBMI-NEXT: shrl %cl, %edi |
| ; X64-NOBMI-NEXT: movl $1, %eax |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shll %cl, %eax |
| ; X64-NOBMI-NEXT: decl %eax |
| ; X64-NOBMI-NEXT: andl %edi, %eax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr32_a1_indexzext: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrl %eax, %edi, %eax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr32_a1_indexzext: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxl %esi, %edi, %eax |
| ; X64-BMI2-NEXT: bzhil %edx, %eax, %eax |
| ; X64-BMI2-NEXT: retq |
| %skip = zext i8 %numskipbits to i32 |
| %shifted = lshr i32 %val, %skip |
| %conv = zext i8 %numlowbits to i32 |
| %onebit = shl i32 1, %conv |
| %mask = add nsw i32 %onebit, -1 |
| %masked = and i32 %mask, %shifted |
| ret i32 %masked |
| } |
| |
| define i32 @bextr32_a2_load(i32* %w, i32 %numskipbits, i32 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr32_a2_load: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl (%eax), %esi |
| ; X86-NOBMI-NEXT: shrl %cl, %esi |
| ; X86-NOBMI-NEXT: movl $1, %eax |
| ; X86-NOBMI-NEXT: movl %edx, %ecx |
| ; X86-NOBMI-NEXT: shll %cl, %eax |
| ; X86-NOBMI-NEXT: decl %eax |
| ; X86-NOBMI-NEXT: andl %esi, %eax |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr32_a2_load: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: shll $8, %ecx |
| ; X86-BMI1-NEXT: movzbl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI1-NEXT: orl %ecx, %edx |
| ; X86-BMI1-NEXT: bextrl %edx, (%eax), %eax |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr32_a2_load: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-BMI2-NEXT: shrxl %edx, (%ecx), %ecx |
| ; X86-BMI2-NEXT: bzhil %eax, %ecx, %eax |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr32_a2_load: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: movl (%rdi), %esi |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NOBMI-NEXT: shrl %cl, %esi |
| ; X64-NOBMI-NEXT: movl $1, %eax |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shll %cl, %eax |
| ; X64-NOBMI-NEXT: decl %eax |
| ; X64-NOBMI-NEXT: andl %esi, %eax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr32_a2_load: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrl %eax, (%rdi), %eax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr32_a2_load: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxl %esi, (%rdi), %eax |
| ; X64-BMI2-NEXT: bzhil %edx, %eax, %eax |
| ; X64-BMI2-NEXT: retq |
| %val = load i32, i32* %w |
| %shifted = lshr i32 %val, %numskipbits |
| %onebit = shl i32 1, %numlowbits |
| %mask = add nsw i32 %onebit, -1 |
| %masked = and i32 %mask, %shifted |
| ret i32 %masked |
| } |
| |
| define i32 @bextr32_a3_load_indexzext(i32* %w, i8 zeroext %numskipbits, i8 zeroext %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr32_a3_load_indexzext: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl (%eax), %esi |
| ; X86-NOBMI-NEXT: shrl %cl, %esi |
| ; X86-NOBMI-NEXT: movl $1, %eax |
| ; X86-NOBMI-NEXT: movl %edx, %ecx |
| ; X86-NOBMI-NEXT: shll %cl, %eax |
| ; X86-NOBMI-NEXT: decl %eax |
| ; X86-NOBMI-NEXT: andl %esi, %eax |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr32_a3_load_indexzext: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: shll $8, %ecx |
| ; X86-BMI1-NEXT: movzbl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI1-NEXT: orl %ecx, %edx |
| ; X86-BMI1-NEXT: bextrl %edx, (%eax), %eax |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr32_a3_load_indexzext: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-BMI2-NEXT: shrxl %edx, (%ecx), %ecx |
| ; X86-BMI2-NEXT: bzhil %eax, %ecx, %eax |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr32_a3_load_indexzext: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: movl (%rdi), %esi |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NOBMI-NEXT: shrl %cl, %esi |
| ; X64-NOBMI-NEXT: movl $1, %eax |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shll %cl, %eax |
| ; X64-NOBMI-NEXT: decl %eax |
| ; X64-NOBMI-NEXT: andl %esi, %eax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr32_a3_load_indexzext: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrl %eax, (%rdi), %eax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr32_a3_load_indexzext: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxl %esi, (%rdi), %eax |
| ; X64-BMI2-NEXT: bzhil %edx, %eax, %eax |
| ; X64-BMI2-NEXT: retq |
| %val = load i32, i32* %w |
| %skip = zext i8 %numskipbits to i32 |
| %shifted = lshr i32 %val, %skip |
| %conv = zext i8 %numlowbits to i32 |
| %onebit = shl i32 1, %conv |
| %mask = add nsw i32 %onebit, -1 |
| %masked = and i32 %mask, %shifted |
| ret i32 %masked |
| } |
| |
| define i32 @bextr32_a4_commutative(i32 %val, i32 %numskipbits, i32 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr32_a4_commutative: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NOBMI-NEXT: shrl %cl, %esi |
| ; X86-NOBMI-NEXT: movl $1, %eax |
| ; X86-NOBMI-NEXT: movl %edx, %ecx |
| ; X86-NOBMI-NEXT: shll %cl, %eax |
| ; X86-NOBMI-NEXT: decl %eax |
| ; X86-NOBMI-NEXT: andl %esi, %eax |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr32_a4_commutative: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI1-NEXT: shll $8, %eax |
| ; X86-BMI1-NEXT: movzbl {{[0-9]+}}(%esp), %ecx |
| ; X86-BMI1-NEXT: orl %eax, %ecx |
| ; X86-BMI1-NEXT: bextrl %ecx, {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr32_a4_commutative: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: shrxl %ecx, {{[0-9]+}}(%esp), %ecx |
| ; X86-BMI2-NEXT: bzhil %eax, %ecx, %eax |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr32_a4_commutative: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NOBMI-NEXT: shrl %cl, %edi |
| ; X64-NOBMI-NEXT: movl $1, %eax |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shll %cl, %eax |
| ; X64-NOBMI-NEXT: decl %eax |
| ; X64-NOBMI-NEXT: andl %edi, %eax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr32_a4_commutative: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrl %eax, %edi, %eax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr32_a4_commutative: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxl %esi, %edi, %eax |
| ; X64-BMI2-NEXT: bzhil %edx, %eax, %eax |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i32 %val, %numskipbits |
| %onebit = shl i32 1, %numlowbits |
| %mask = add nsw i32 %onebit, -1 |
| %masked = and i32 %shifted, %mask ; swapped order |
| ret i32 %masked |
| } |
| |
| define i32 @bextr32_a5_skipextrauses(i32 %val, i32 %numskipbits, i32 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr32_a5_skipextrauses: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: pushl %eax |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl %eax, %ecx |
| ; X86-NOBMI-NEXT: shrl %cl, %edi |
| ; X86-NOBMI-NEXT: movl $1, %esi |
| ; X86-NOBMI-NEXT: movl %edx, %ecx |
| ; X86-NOBMI-NEXT: shll %cl, %esi |
| ; X86-NOBMI-NEXT: decl %esi |
| ; X86-NOBMI-NEXT: andl %edi, %esi |
| ; X86-NOBMI-NEXT: movl %eax, (%esp) |
| ; X86-NOBMI-NEXT: calll use32@PLT |
| ; X86-NOBMI-NEXT: movl %esi, %eax |
| ; X86-NOBMI-NEXT: addl $4, %esp |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr32_a5_skipextrauses: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: subl $8, %esp |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: shll $8, %ecx |
| ; X86-BMI1-NEXT: movzbl %al, %edx |
| ; X86-BMI1-NEXT: orl %ecx, %edx |
| ; X86-BMI1-NEXT: bextrl %edx, {{[0-9]+}}(%esp), %esi |
| ; X86-BMI1-NEXT: movl %eax, (%esp) |
| ; X86-BMI1-NEXT: calll use32@PLT |
| ; X86-BMI1-NEXT: movl %esi, %eax |
| ; X86-BMI1-NEXT: addl $8, %esp |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr32_a5_skipextrauses: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: subl $8, %esp |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-BMI2-NEXT: shrxl %ecx, {{[0-9]+}}(%esp), %edx |
| ; X86-BMI2-NEXT: bzhil %eax, %edx, %esi |
| ; X86-BMI2-NEXT: movl %ecx, (%esp) |
| ; X86-BMI2-NEXT: calll use32@PLT |
| ; X86-BMI2-NEXT: movl %esi, %eax |
| ; X86-BMI2-NEXT: addl $8, %esp |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr32_a5_skipextrauses: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: pushq %rbx |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: shrl %cl, %edi |
| ; X64-NOBMI-NEXT: movl $1, %ebx |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shll %cl, %ebx |
| ; X64-NOBMI-NEXT: decl %ebx |
| ; X64-NOBMI-NEXT: andl %edi, %ebx |
| ; X64-NOBMI-NEXT: movl %esi, %edi |
| ; X64-NOBMI-NEXT: callq use32@PLT |
| ; X64-NOBMI-NEXT: movl %ebx, %eax |
| ; X64-NOBMI-NEXT: popq %rbx |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr32_a5_skipextrauses: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: pushq %rbx |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrl %eax, %edi, %ebx |
| ; X64-BMI1-NEXT: movl %esi, %edi |
| ; X64-BMI1-NEXT: callq use32@PLT |
| ; X64-BMI1-NEXT: movl %ebx, %eax |
| ; X64-BMI1-NEXT: popq %rbx |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr32_a5_skipextrauses: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: pushq %rbx |
| ; X64-BMI2-NEXT: shrxl %esi, %edi, %eax |
| ; X64-BMI2-NEXT: bzhil %edx, %eax, %ebx |
| ; X64-BMI2-NEXT: movl %esi, %edi |
| ; X64-BMI2-NEXT: callq use32@PLT |
| ; X64-BMI2-NEXT: movl %ebx, %eax |
| ; X64-BMI2-NEXT: popq %rbx |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i32 %val, %numskipbits |
| %onebit = shl i32 1, %numlowbits |
| %mask = add nsw i32 %onebit, -1 |
| %masked = and i32 %mask, %shifted |
| call void @use32(i32 %numskipbits) |
| ret i32 %masked |
| } |
| |
| ; 64-bit |
| |
| define i64 @bextr64_a0(i64 %val, i64 %numskipbits, i64 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_a0: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %ch |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl %eax, %edi |
| ; X86-NOBMI-NEXT: shrl %cl, %edi |
| ; X86-NOBMI-NEXT: shrdl %cl, %eax, %esi |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: je .LBB7_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %edi, %esi |
| ; X86-NOBMI-NEXT: xorl %edi, %edi |
| ; X86-NOBMI-NEXT: .LBB7_2: |
| ; X86-NOBMI-NEXT: movl $1, %eax |
| ; X86-NOBMI-NEXT: xorl %edx, %edx |
| ; X86-NOBMI-NEXT: movb %ch, %cl |
| ; X86-NOBMI-NEXT: shldl %cl, %eax, %edx |
| ; X86-NOBMI-NEXT: shll %cl, %eax |
| ; X86-NOBMI-NEXT: testb $32, %ch |
| ; X86-NOBMI-NEXT: je .LBB7_4 |
| ; X86-NOBMI-NEXT: # %bb.3: |
| ; X86-NOBMI-NEXT: movl %eax, %edx |
| ; X86-NOBMI-NEXT: xorl %eax, %eax |
| ; X86-NOBMI-NEXT: .LBB7_4: |
| ; X86-NOBMI-NEXT: addl $-1, %eax |
| ; X86-NOBMI-NEXT: adcl $-1, %edx |
| ; X86-NOBMI-NEXT: andl %esi, %eax |
| ; X86-NOBMI-NEXT: andl %edi, %edx |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_a0: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %ch |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: movl %eax, %edi |
| ; X86-BMI1-NEXT: shrl %cl, %edi |
| ; X86-BMI1-NEXT: shrdl %cl, %eax, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: je .LBB7_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %edi, %esi |
| ; X86-BMI1-NEXT: xorl %edi, %edi |
| ; X86-BMI1-NEXT: .LBB7_2: |
| ; X86-BMI1-NEXT: movl $1, %eax |
| ; X86-BMI1-NEXT: xorl %edx, %edx |
| ; X86-BMI1-NEXT: movb %ch, %cl |
| ; X86-BMI1-NEXT: shldl %cl, %eax, %edx |
| ; X86-BMI1-NEXT: shll %cl, %eax |
| ; X86-BMI1-NEXT: testb $32, %ch |
| ; X86-BMI1-NEXT: je .LBB7_4 |
| ; X86-BMI1-NEXT: # %bb.3: |
| ; X86-BMI1-NEXT: movl %eax, %edx |
| ; X86-BMI1-NEXT: xorl %eax, %eax |
| ; X86-BMI1-NEXT: .LBB7_4: |
| ; X86-BMI1-NEXT: addl $-1, %eax |
| ; X86-BMI1-NEXT: adcl $-1, %edx |
| ; X86-BMI1-NEXT: andl %esi, %eax |
| ; X86-BMI1-NEXT: andl %edi, %edx |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_a0: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %edi |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %bl |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI2-NEXT: shrdl %cl, %eax, %esi |
| ; X86-BMI2-NEXT: shrxl %ecx, %eax, %edi |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB7_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: movl %edi, %esi |
| ; X86-BMI2-NEXT: xorl %edi, %edi |
| ; X86-BMI2-NEXT: .LBB7_2: |
| ; X86-BMI2-NEXT: movl $1, %eax |
| ; X86-BMI2-NEXT: xorl %edx, %edx |
| ; X86-BMI2-NEXT: movl %ebx, %ecx |
| ; X86-BMI2-NEXT: shldl %cl, %eax, %edx |
| ; X86-BMI2-NEXT: shlxl %ebx, %eax, %eax |
| ; X86-BMI2-NEXT: testb $32, %bl |
| ; X86-BMI2-NEXT: je .LBB7_4 |
| ; X86-BMI2-NEXT: # %bb.3: |
| ; X86-BMI2-NEXT: movl %eax, %edx |
| ; X86-BMI2-NEXT: xorl %eax, %eax |
| ; X86-BMI2-NEXT: .LBB7_4: |
| ; X86-BMI2-NEXT: addl $-1, %eax |
| ; X86-BMI2-NEXT: adcl $-1, %edx |
| ; X86-BMI2-NEXT: andl %esi, %eax |
| ; X86-BMI2-NEXT: andl %edi, %edx |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: popl %edi |
| ; X86-BMI2-NEXT: popl %ebx |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_a0: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movq %rsi, %rcx |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-NOBMI-NEXT: shrq %cl, %rdi |
| ; X64-NOBMI-NEXT: movl $1, %eax |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shlq %cl, %rax |
| ; X64-NOBMI-NEXT: decq %rax |
| ; X64-NOBMI-NEXT: andq %rdi, %rax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_a0: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrq %rax, %rdi, %rax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_a0: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxq %rsi, %rdi, %rax |
| ; X64-BMI2-NEXT: bzhiq %rdx, %rax, %rax |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i64 %val, %numskipbits |
| %onebit = shl i64 1, %numlowbits |
| %mask = add nsw i64 %onebit, -1 |
| %masked = and i64 %mask, %shifted |
| ret i64 %masked |
| } |
| |
| define i64 @bextr64_a0_arithmetic(i64 %val, i64 %numskipbits, i64 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_a0_arithmetic: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %ch |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl %eax, %esi |
| ; X86-NOBMI-NEXT: sarl %cl, %esi |
| ; X86-NOBMI-NEXT: shrdl %cl, %eax, %edi |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: je .LBB8_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: sarl $31, %eax |
| ; X86-NOBMI-NEXT: movl %esi, %edi |
| ; X86-NOBMI-NEXT: movl %eax, %esi |
| ; X86-NOBMI-NEXT: .LBB8_2: |
| ; X86-NOBMI-NEXT: movl $1, %eax |
| ; X86-NOBMI-NEXT: xorl %edx, %edx |
| ; X86-NOBMI-NEXT: movb %ch, %cl |
| ; X86-NOBMI-NEXT: shldl %cl, %eax, %edx |
| ; X86-NOBMI-NEXT: shll %cl, %eax |
| ; X86-NOBMI-NEXT: testb $32, %ch |
| ; X86-NOBMI-NEXT: je .LBB8_4 |
| ; X86-NOBMI-NEXT: # %bb.3: |
| ; X86-NOBMI-NEXT: movl %eax, %edx |
| ; X86-NOBMI-NEXT: xorl %eax, %eax |
| ; X86-NOBMI-NEXT: .LBB8_4: |
| ; X86-NOBMI-NEXT: addl $-1, %eax |
| ; X86-NOBMI-NEXT: adcl $-1, %edx |
| ; X86-NOBMI-NEXT: andl %edi, %eax |
| ; X86-NOBMI-NEXT: andl %esi, %edx |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_a0_arithmetic: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %ch |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: movl %eax, %esi |
| ; X86-BMI1-NEXT: sarl %cl, %esi |
| ; X86-BMI1-NEXT: shrdl %cl, %eax, %edi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: je .LBB8_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: sarl $31, %eax |
| ; X86-BMI1-NEXT: movl %esi, %edi |
| ; X86-BMI1-NEXT: movl %eax, %esi |
| ; X86-BMI1-NEXT: .LBB8_2: |
| ; X86-BMI1-NEXT: movl $1, %eax |
| ; X86-BMI1-NEXT: xorl %edx, %edx |
| ; X86-BMI1-NEXT: movb %ch, %cl |
| ; X86-BMI1-NEXT: shldl %cl, %eax, %edx |
| ; X86-BMI1-NEXT: shll %cl, %eax |
| ; X86-BMI1-NEXT: testb $32, %ch |
| ; X86-BMI1-NEXT: je .LBB8_4 |
| ; X86-BMI1-NEXT: # %bb.3: |
| ; X86-BMI1-NEXT: movl %eax, %edx |
| ; X86-BMI1-NEXT: xorl %eax, %eax |
| ; X86-BMI1-NEXT: .LBB8_4: |
| ; X86-BMI1-NEXT: addl $-1, %eax |
| ; X86-BMI1-NEXT: adcl $-1, %edx |
| ; X86-BMI1-NEXT: andl %edi, %eax |
| ; X86-BMI1-NEXT: andl %esi, %edx |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_a0_arithmetic: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %edi |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %bl |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI2-NEXT: shrdl %cl, %eax, %esi |
| ; X86-BMI2-NEXT: sarxl %ecx, %eax, %edi |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB8_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: sarl $31, %eax |
| ; X86-BMI2-NEXT: movl %edi, %esi |
| ; X86-BMI2-NEXT: movl %eax, %edi |
| ; X86-BMI2-NEXT: .LBB8_2: |
| ; X86-BMI2-NEXT: movl $1, %eax |
| ; X86-BMI2-NEXT: xorl %edx, %edx |
| ; X86-BMI2-NEXT: movl %ebx, %ecx |
| ; X86-BMI2-NEXT: shldl %cl, %eax, %edx |
| ; X86-BMI2-NEXT: shlxl %ebx, %eax, %eax |
| ; X86-BMI2-NEXT: testb $32, %bl |
| ; X86-BMI2-NEXT: je .LBB8_4 |
| ; X86-BMI2-NEXT: # %bb.3: |
| ; X86-BMI2-NEXT: movl %eax, %edx |
| ; X86-BMI2-NEXT: xorl %eax, %eax |
| ; X86-BMI2-NEXT: .LBB8_4: |
| ; X86-BMI2-NEXT: addl $-1, %eax |
| ; X86-BMI2-NEXT: adcl $-1, %edx |
| ; X86-BMI2-NEXT: andl %esi, %eax |
| ; X86-BMI2-NEXT: andl %edi, %edx |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: popl %edi |
| ; X86-BMI2-NEXT: popl %ebx |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_a0_arithmetic: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movq %rsi, %rcx |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-NOBMI-NEXT: sarq %cl, %rdi |
| ; X64-NOBMI-NEXT: movl $1, %eax |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shlq %cl, %rax |
| ; X64-NOBMI-NEXT: decq %rax |
| ; X64-NOBMI-NEXT: andq %rdi, %rax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_a0_arithmetic: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: movq %rsi, %rcx |
| ; X64-BMI1-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-BMI1-NEXT: sarq %cl, %rdi |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: bextrq %rdx, %rdi, %rax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_a0_arithmetic: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: sarxq %rsi, %rdi, %rax |
| ; X64-BMI2-NEXT: bzhiq %rdx, %rax, %rax |
| ; X64-BMI2-NEXT: retq |
| %shifted = ashr i64 %val, %numskipbits |
| %onebit = shl i64 1, %numlowbits |
| %mask = add nsw i64 %onebit, -1 |
| %masked = and i64 %mask, %shifted |
| ret i64 %masked |
| } |
| |
| define i64 @bextr64_a1_indexzext(i64 %val, i8 zeroext %numskipbits, i8 zeroext %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_a1_indexzext: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %ch |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl %eax, %edi |
| ; X86-NOBMI-NEXT: shrl %cl, %edi |
| ; X86-NOBMI-NEXT: shrdl %cl, %eax, %esi |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: je .LBB9_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %edi, %esi |
| ; X86-NOBMI-NEXT: xorl %edi, %edi |
| ; X86-NOBMI-NEXT: .LBB9_2: |
| ; X86-NOBMI-NEXT: movl $1, %eax |
| ; X86-NOBMI-NEXT: xorl %edx, %edx |
| ; X86-NOBMI-NEXT: movb %ch, %cl |
| ; X86-NOBMI-NEXT: shldl %cl, %eax, %edx |
| ; X86-NOBMI-NEXT: shll %cl, %eax |
| ; X86-NOBMI-NEXT: testb $32, %ch |
| ; X86-NOBMI-NEXT: je .LBB9_4 |
| ; X86-NOBMI-NEXT: # %bb.3: |
| ; X86-NOBMI-NEXT: movl %eax, %edx |
| ; X86-NOBMI-NEXT: xorl %eax, %eax |
| ; X86-NOBMI-NEXT: .LBB9_4: |
| ; X86-NOBMI-NEXT: addl $-1, %eax |
| ; X86-NOBMI-NEXT: adcl $-1, %edx |
| ; X86-NOBMI-NEXT: andl %esi, %eax |
| ; X86-NOBMI-NEXT: andl %edi, %edx |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_a1_indexzext: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %ch |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: movl %eax, %edi |
| ; X86-BMI1-NEXT: shrl %cl, %edi |
| ; X86-BMI1-NEXT: shrdl %cl, %eax, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: je .LBB9_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %edi, %esi |
| ; X86-BMI1-NEXT: xorl %edi, %edi |
| ; X86-BMI1-NEXT: .LBB9_2: |
| ; X86-BMI1-NEXT: movl $1, %eax |
| ; X86-BMI1-NEXT: xorl %edx, %edx |
| ; X86-BMI1-NEXT: movb %ch, %cl |
| ; X86-BMI1-NEXT: shldl %cl, %eax, %edx |
| ; X86-BMI1-NEXT: shll %cl, %eax |
| ; X86-BMI1-NEXT: testb $32, %ch |
| ; X86-BMI1-NEXT: je .LBB9_4 |
| ; X86-BMI1-NEXT: # %bb.3: |
| ; X86-BMI1-NEXT: movl %eax, %edx |
| ; X86-BMI1-NEXT: xorl %eax, %eax |
| ; X86-BMI1-NEXT: .LBB9_4: |
| ; X86-BMI1-NEXT: addl $-1, %eax |
| ; X86-BMI1-NEXT: adcl $-1, %edx |
| ; X86-BMI1-NEXT: andl %esi, %eax |
| ; X86-BMI1-NEXT: andl %edi, %edx |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_a1_indexzext: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %edi |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %bl |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI2-NEXT: shrdl %cl, %eax, %esi |
| ; X86-BMI2-NEXT: shrxl %ecx, %eax, %edi |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB9_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: movl %edi, %esi |
| ; X86-BMI2-NEXT: xorl %edi, %edi |
| ; X86-BMI2-NEXT: .LBB9_2: |
| ; X86-BMI2-NEXT: movl $1, %eax |
| ; X86-BMI2-NEXT: xorl %edx, %edx |
| ; X86-BMI2-NEXT: movl %ebx, %ecx |
| ; X86-BMI2-NEXT: shldl %cl, %eax, %edx |
| ; X86-BMI2-NEXT: shlxl %ebx, %eax, %eax |
| ; X86-BMI2-NEXT: testb $32, %bl |
| ; X86-BMI2-NEXT: je .LBB9_4 |
| ; X86-BMI2-NEXT: # %bb.3: |
| ; X86-BMI2-NEXT: movl %eax, %edx |
| ; X86-BMI2-NEXT: xorl %eax, %eax |
| ; X86-BMI2-NEXT: .LBB9_4: |
| ; X86-BMI2-NEXT: addl $-1, %eax |
| ; X86-BMI2-NEXT: adcl $-1, %edx |
| ; X86-BMI2-NEXT: andl %esi, %eax |
| ; X86-BMI2-NEXT: andl %edi, %edx |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: popl %edi |
| ; X86-BMI2-NEXT: popl %ebx |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_a1_indexzext: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NOBMI-NEXT: shrq %cl, %rdi |
| ; X64-NOBMI-NEXT: movl $1, %eax |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shlq %cl, %rax |
| ; X64-NOBMI-NEXT: decq %rax |
| ; X64-NOBMI-NEXT: andq %rdi, %rax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_a1_indexzext: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrq %rax, %rdi, %rax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_a1_indexzext: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: # kill: def $edx killed $edx def $rdx |
| ; X64-BMI2-NEXT: # kill: def $esi killed $esi def $rsi |
| ; X64-BMI2-NEXT: shrxq %rsi, %rdi, %rax |
| ; X64-BMI2-NEXT: bzhiq %rdx, %rax, %rax |
| ; X64-BMI2-NEXT: retq |
| %skip = zext i8 %numskipbits to i64 |
| %shifted = lshr i64 %val, %skip |
| %conv = zext i8 %numlowbits to i64 |
| %onebit = shl i64 1, %conv |
| %mask = add nsw i64 %onebit, -1 |
| %masked = and i64 %mask, %shifted |
| ret i64 %masked |
| } |
| |
| define i64 @bextr64_a2_load(i64* %w, i64 %numskipbits, i64 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_a2_load: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %ch |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl (%eax), %esi |
| ; X86-NOBMI-NEXT: movl 4(%eax), %eax |
| ; X86-NOBMI-NEXT: movl %eax, %edi |
| ; X86-NOBMI-NEXT: shrl %cl, %edi |
| ; X86-NOBMI-NEXT: shrdl %cl, %eax, %esi |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: je .LBB10_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %edi, %esi |
| ; X86-NOBMI-NEXT: xorl %edi, %edi |
| ; X86-NOBMI-NEXT: .LBB10_2: |
| ; X86-NOBMI-NEXT: movl $1, %eax |
| ; X86-NOBMI-NEXT: xorl %edx, %edx |
| ; X86-NOBMI-NEXT: movb %ch, %cl |
| ; X86-NOBMI-NEXT: shldl %cl, %eax, %edx |
| ; X86-NOBMI-NEXT: shll %cl, %eax |
| ; X86-NOBMI-NEXT: testb $32, %ch |
| ; X86-NOBMI-NEXT: je .LBB10_4 |
| ; X86-NOBMI-NEXT: # %bb.3: |
| ; X86-NOBMI-NEXT: movl %eax, %edx |
| ; X86-NOBMI-NEXT: xorl %eax, %eax |
| ; X86-NOBMI-NEXT: .LBB10_4: |
| ; X86-NOBMI-NEXT: addl $-1, %eax |
| ; X86-NOBMI-NEXT: adcl $-1, %edx |
| ; X86-NOBMI-NEXT: andl %esi, %eax |
| ; X86-NOBMI-NEXT: andl %edi, %edx |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_a2_load: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %ch |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: movl (%eax), %esi |
| ; X86-BMI1-NEXT: movl 4(%eax), %eax |
| ; X86-BMI1-NEXT: movl %eax, %edi |
| ; X86-BMI1-NEXT: shrl %cl, %edi |
| ; X86-BMI1-NEXT: shrdl %cl, %eax, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: je .LBB10_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %edi, %esi |
| ; X86-BMI1-NEXT: xorl %edi, %edi |
| ; X86-BMI1-NEXT: .LBB10_2: |
| ; X86-BMI1-NEXT: movl $1, %eax |
| ; X86-BMI1-NEXT: xorl %edx, %edx |
| ; X86-BMI1-NEXT: movb %ch, %cl |
| ; X86-BMI1-NEXT: shldl %cl, %eax, %edx |
| ; X86-BMI1-NEXT: shll %cl, %eax |
| ; X86-BMI1-NEXT: testb $32, %ch |
| ; X86-BMI1-NEXT: je .LBB10_4 |
| ; X86-BMI1-NEXT: # %bb.3: |
| ; X86-BMI1-NEXT: movl %eax, %edx |
| ; X86-BMI1-NEXT: xorl %eax, %eax |
| ; X86-BMI1-NEXT: .LBB10_4: |
| ; X86-BMI1-NEXT: addl $-1, %eax |
| ; X86-BMI1-NEXT: adcl $-1, %edx |
| ; X86-BMI1-NEXT: andl %esi, %eax |
| ; X86-BMI1-NEXT: andl %edi, %edx |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_a2_load: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %edi |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %bl |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI2-NEXT: movl (%eax), %esi |
| ; X86-BMI2-NEXT: movl 4(%eax), %eax |
| ; X86-BMI2-NEXT: shrxl %ecx, %eax, %edi |
| ; X86-BMI2-NEXT: shrdl %cl, %eax, %esi |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB10_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: movl %edi, %esi |
| ; X86-BMI2-NEXT: xorl %edi, %edi |
| ; X86-BMI2-NEXT: .LBB10_2: |
| ; X86-BMI2-NEXT: movl $1, %eax |
| ; X86-BMI2-NEXT: xorl %edx, %edx |
| ; X86-BMI2-NEXT: movl %ebx, %ecx |
| ; X86-BMI2-NEXT: shldl %cl, %eax, %edx |
| ; X86-BMI2-NEXT: shlxl %ebx, %eax, %eax |
| ; X86-BMI2-NEXT: testb $32, %bl |
| ; X86-BMI2-NEXT: je .LBB10_4 |
| ; X86-BMI2-NEXT: # %bb.3: |
| ; X86-BMI2-NEXT: movl %eax, %edx |
| ; X86-BMI2-NEXT: xorl %eax, %eax |
| ; X86-BMI2-NEXT: .LBB10_4: |
| ; X86-BMI2-NEXT: addl $-1, %eax |
| ; X86-BMI2-NEXT: adcl $-1, %edx |
| ; X86-BMI2-NEXT: andl %esi, %eax |
| ; X86-BMI2-NEXT: andl %edi, %edx |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: popl %edi |
| ; X86-BMI2-NEXT: popl %ebx |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_a2_load: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movq %rsi, %rcx |
| ; X64-NOBMI-NEXT: movq (%rdi), %rsi |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-NOBMI-NEXT: shrq %cl, %rsi |
| ; X64-NOBMI-NEXT: movl $1, %eax |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shlq %cl, %rax |
| ; X64-NOBMI-NEXT: decq %rax |
| ; X64-NOBMI-NEXT: andq %rsi, %rax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_a2_load: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrq %rax, (%rdi), %rax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_a2_load: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxq %rsi, (%rdi), %rax |
| ; X64-BMI2-NEXT: bzhiq %rdx, %rax, %rax |
| ; X64-BMI2-NEXT: retq |
| %val = load i64, i64* %w |
| %shifted = lshr i64 %val, %numskipbits |
| %onebit = shl i64 1, %numlowbits |
| %mask = add nsw i64 %onebit, -1 |
| %masked = and i64 %mask, %shifted |
| ret i64 %masked |
| } |
| |
| define i64 @bextr64_a3_load_indexzext(i64* %w, i8 zeroext %numskipbits, i8 zeroext %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_a3_load_indexzext: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %ch |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl (%eax), %esi |
| ; X86-NOBMI-NEXT: movl 4(%eax), %eax |
| ; X86-NOBMI-NEXT: movl %eax, %edi |
| ; X86-NOBMI-NEXT: shrl %cl, %edi |
| ; X86-NOBMI-NEXT: shrdl %cl, %eax, %esi |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: je .LBB11_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %edi, %esi |
| ; X86-NOBMI-NEXT: xorl %edi, %edi |
| ; X86-NOBMI-NEXT: .LBB11_2: |
| ; X86-NOBMI-NEXT: movl $1, %eax |
| ; X86-NOBMI-NEXT: xorl %edx, %edx |
| ; X86-NOBMI-NEXT: movb %ch, %cl |
| ; X86-NOBMI-NEXT: shldl %cl, %eax, %edx |
| ; X86-NOBMI-NEXT: shll %cl, %eax |
| ; X86-NOBMI-NEXT: testb $32, %ch |
| ; X86-NOBMI-NEXT: je .LBB11_4 |
| ; X86-NOBMI-NEXT: # %bb.3: |
| ; X86-NOBMI-NEXT: movl %eax, %edx |
| ; X86-NOBMI-NEXT: xorl %eax, %eax |
| ; X86-NOBMI-NEXT: .LBB11_4: |
| ; X86-NOBMI-NEXT: addl $-1, %eax |
| ; X86-NOBMI-NEXT: adcl $-1, %edx |
| ; X86-NOBMI-NEXT: andl %esi, %eax |
| ; X86-NOBMI-NEXT: andl %edi, %edx |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_a3_load_indexzext: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %ch |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: movl (%eax), %esi |
| ; X86-BMI1-NEXT: movl 4(%eax), %eax |
| ; X86-BMI1-NEXT: movl %eax, %edi |
| ; X86-BMI1-NEXT: shrl %cl, %edi |
| ; X86-BMI1-NEXT: shrdl %cl, %eax, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: je .LBB11_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %edi, %esi |
| ; X86-BMI1-NEXT: xorl %edi, %edi |
| ; X86-BMI1-NEXT: .LBB11_2: |
| ; X86-BMI1-NEXT: movl $1, %eax |
| ; X86-BMI1-NEXT: xorl %edx, %edx |
| ; X86-BMI1-NEXT: movb %ch, %cl |
| ; X86-BMI1-NEXT: shldl %cl, %eax, %edx |
| ; X86-BMI1-NEXT: shll %cl, %eax |
| ; X86-BMI1-NEXT: testb $32, %ch |
| ; X86-BMI1-NEXT: je .LBB11_4 |
| ; X86-BMI1-NEXT: # %bb.3: |
| ; X86-BMI1-NEXT: movl %eax, %edx |
| ; X86-BMI1-NEXT: xorl %eax, %eax |
| ; X86-BMI1-NEXT: .LBB11_4: |
| ; X86-BMI1-NEXT: addl $-1, %eax |
| ; X86-BMI1-NEXT: adcl $-1, %edx |
| ; X86-BMI1-NEXT: andl %esi, %eax |
| ; X86-BMI1-NEXT: andl %edi, %edx |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_a3_load_indexzext: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %edi |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %bl |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI2-NEXT: movl (%eax), %esi |
| ; X86-BMI2-NEXT: movl 4(%eax), %eax |
| ; X86-BMI2-NEXT: shrxl %ecx, %eax, %edi |
| ; X86-BMI2-NEXT: shrdl %cl, %eax, %esi |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB11_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: movl %edi, %esi |
| ; X86-BMI2-NEXT: xorl %edi, %edi |
| ; X86-BMI2-NEXT: .LBB11_2: |
| ; X86-BMI2-NEXT: movl $1, %eax |
| ; X86-BMI2-NEXT: xorl %edx, %edx |
| ; X86-BMI2-NEXT: movl %ebx, %ecx |
| ; X86-BMI2-NEXT: shldl %cl, %eax, %edx |
| ; X86-BMI2-NEXT: shlxl %ebx, %eax, %eax |
| ; X86-BMI2-NEXT: testb $32, %bl |
| ; X86-BMI2-NEXT: je .LBB11_4 |
| ; X86-BMI2-NEXT: # %bb.3: |
| ; X86-BMI2-NEXT: movl %eax, %edx |
| ; X86-BMI2-NEXT: xorl %eax, %eax |
| ; X86-BMI2-NEXT: .LBB11_4: |
| ; X86-BMI2-NEXT: addl $-1, %eax |
| ; X86-BMI2-NEXT: adcl $-1, %edx |
| ; X86-BMI2-NEXT: andl %esi, %eax |
| ; X86-BMI2-NEXT: andl %edi, %edx |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: popl %edi |
| ; X86-BMI2-NEXT: popl %ebx |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_a3_load_indexzext: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: movq (%rdi), %rsi |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NOBMI-NEXT: shrq %cl, %rsi |
| ; X64-NOBMI-NEXT: movl $1, %eax |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shlq %cl, %rax |
| ; X64-NOBMI-NEXT: decq %rax |
| ; X64-NOBMI-NEXT: andq %rsi, %rax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_a3_load_indexzext: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrq %rax, (%rdi), %rax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_a3_load_indexzext: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: # kill: def $edx killed $edx def $rdx |
| ; X64-BMI2-NEXT: # kill: def $esi killed $esi def $rsi |
| ; X64-BMI2-NEXT: shrxq %rsi, (%rdi), %rax |
| ; X64-BMI2-NEXT: bzhiq %rdx, %rax, %rax |
| ; X64-BMI2-NEXT: retq |
| %val = load i64, i64* %w |
| %skip = zext i8 %numskipbits to i64 |
| %shifted = lshr i64 %val, %skip |
| %conv = zext i8 %numlowbits to i64 |
| %onebit = shl i64 1, %conv |
| %mask = add nsw i64 %onebit, -1 |
| %masked = and i64 %mask, %shifted |
| ret i64 %masked |
| } |
| |
| define i64 @bextr64_a4_commutative(i64 %val, i64 %numskipbits, i64 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_a4_commutative: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %ch |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NOBMI-NEXT: movl %esi, %edx |
| ; X86-NOBMI-NEXT: shrl %cl, %edx |
| ; X86-NOBMI-NEXT: shrdl %cl, %esi, %eax |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: je .LBB12_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %edx, %eax |
| ; X86-NOBMI-NEXT: xorl %edx, %edx |
| ; X86-NOBMI-NEXT: .LBB12_2: |
| ; X86-NOBMI-NEXT: movl $1, %esi |
| ; X86-NOBMI-NEXT: xorl %edi, %edi |
| ; X86-NOBMI-NEXT: movb %ch, %cl |
| ; X86-NOBMI-NEXT: shldl %cl, %esi, %edi |
| ; X86-NOBMI-NEXT: shll %cl, %esi |
| ; X86-NOBMI-NEXT: testb $32, %ch |
| ; X86-NOBMI-NEXT: je .LBB12_4 |
| ; X86-NOBMI-NEXT: # %bb.3: |
| ; X86-NOBMI-NEXT: movl %esi, %edi |
| ; X86-NOBMI-NEXT: xorl %esi, %esi |
| ; X86-NOBMI-NEXT: .LBB12_4: |
| ; X86-NOBMI-NEXT: addl $-1, %esi |
| ; X86-NOBMI-NEXT: adcl $-1, %edi |
| ; X86-NOBMI-NEXT: andl %esi, %eax |
| ; X86-NOBMI-NEXT: andl %edi, %edx |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_a4_commutative: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %ch |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI1-NEXT: movl %esi, %edx |
| ; X86-BMI1-NEXT: shrl %cl, %edx |
| ; X86-BMI1-NEXT: shrdl %cl, %esi, %eax |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: je .LBB12_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %edx, %eax |
| ; X86-BMI1-NEXT: xorl %edx, %edx |
| ; X86-BMI1-NEXT: .LBB12_2: |
| ; X86-BMI1-NEXT: movl $1, %esi |
| ; X86-BMI1-NEXT: xorl %edi, %edi |
| ; X86-BMI1-NEXT: movb %ch, %cl |
| ; X86-BMI1-NEXT: shldl %cl, %esi, %edi |
| ; X86-BMI1-NEXT: shll %cl, %esi |
| ; X86-BMI1-NEXT: testb $32, %ch |
| ; X86-BMI1-NEXT: je .LBB12_4 |
| ; X86-BMI1-NEXT: # %bb.3: |
| ; X86-BMI1-NEXT: movl %esi, %edi |
| ; X86-BMI1-NEXT: xorl %esi, %esi |
| ; X86-BMI1-NEXT: .LBB12_4: |
| ; X86-BMI1-NEXT: addl $-1, %esi |
| ; X86-BMI1-NEXT: adcl $-1, %edi |
| ; X86-BMI1-NEXT: andl %esi, %eax |
| ; X86-BMI1-NEXT: andl %edi, %edx |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_a4_commutative: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %edi |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %bl |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI2-NEXT: shrdl %cl, %edx, %eax |
| ; X86-BMI2-NEXT: shrxl %ecx, %edx, %edx |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB12_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: movl %edx, %eax |
| ; X86-BMI2-NEXT: xorl %edx, %edx |
| ; X86-BMI2-NEXT: .LBB12_2: |
| ; X86-BMI2-NEXT: movl $1, %edi |
| ; X86-BMI2-NEXT: xorl %esi, %esi |
| ; X86-BMI2-NEXT: movl %ebx, %ecx |
| ; X86-BMI2-NEXT: shldl %cl, %edi, %esi |
| ; X86-BMI2-NEXT: shlxl %ebx, %edi, %ecx |
| ; X86-BMI2-NEXT: testb $32, %bl |
| ; X86-BMI2-NEXT: je .LBB12_4 |
| ; X86-BMI2-NEXT: # %bb.3: |
| ; X86-BMI2-NEXT: movl %ecx, %esi |
| ; X86-BMI2-NEXT: xorl %ecx, %ecx |
| ; X86-BMI2-NEXT: .LBB12_4: |
| ; X86-BMI2-NEXT: addl $-1, %ecx |
| ; X86-BMI2-NEXT: adcl $-1, %esi |
| ; X86-BMI2-NEXT: andl %ecx, %eax |
| ; X86-BMI2-NEXT: andl %esi, %edx |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: popl %edi |
| ; X86-BMI2-NEXT: popl %ebx |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_a4_commutative: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movq %rsi, %rcx |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-NOBMI-NEXT: shrq %cl, %rdi |
| ; X64-NOBMI-NEXT: movl $1, %eax |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shlq %cl, %rax |
| ; X64-NOBMI-NEXT: decq %rax |
| ; X64-NOBMI-NEXT: andq %rdi, %rax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_a4_commutative: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrq %rax, %rdi, %rax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_a4_commutative: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxq %rsi, %rdi, %rax |
| ; X64-BMI2-NEXT: bzhiq %rdx, %rax, %rax |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i64 %val, %numskipbits |
| %onebit = shl i64 1, %numlowbits |
| %mask = add nsw i64 %onebit, -1 |
| %masked = and i64 %shifted, %mask ; swapped order |
| ret i64 %masked |
| } |
| |
| define i64 @bextr64_a5_skipextrauses(i64 %val, i64 %numskipbits, i64 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_a5_skipextrauses: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %ebp |
| ; X86-NOBMI-NEXT: pushl %ebx |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: subl $12, %esp |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %ebx |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl %esi, %ebp |
| ; X86-NOBMI-NEXT: movl %eax, %ecx |
| ; X86-NOBMI-NEXT: shrl %cl, %ebp |
| ; X86-NOBMI-NEXT: shrdl %cl, %esi, %ebx |
| ; X86-NOBMI-NEXT: testb $32, %al |
| ; X86-NOBMI-NEXT: je .LBB13_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %ebp, %ebx |
| ; X86-NOBMI-NEXT: xorl %ebp, %ebp |
| ; X86-NOBMI-NEXT: .LBB13_2: |
| ; X86-NOBMI-NEXT: movl $1, %esi |
| ; X86-NOBMI-NEXT: xorl %edi, %edi |
| ; X86-NOBMI-NEXT: movl %edx, %ecx |
| ; X86-NOBMI-NEXT: shldl %cl, %esi, %edi |
| ; X86-NOBMI-NEXT: shll %cl, %esi |
| ; X86-NOBMI-NEXT: testb $32, %dl |
| ; X86-NOBMI-NEXT: je .LBB13_4 |
| ; X86-NOBMI-NEXT: # %bb.3: |
| ; X86-NOBMI-NEXT: movl %esi, %edi |
| ; X86-NOBMI-NEXT: xorl %esi, %esi |
| ; X86-NOBMI-NEXT: .LBB13_4: |
| ; X86-NOBMI-NEXT: addl $-1, %esi |
| ; X86-NOBMI-NEXT: adcl $-1, %edi |
| ; X86-NOBMI-NEXT: andl %ebx, %esi |
| ; X86-NOBMI-NEXT: andl %ebp, %edi |
| ; X86-NOBMI-NEXT: subl $8, %esp |
| ; X86-NOBMI-NEXT: pushl {{[0-9]+}}(%esp) |
| ; X86-NOBMI-NEXT: pushl %eax |
| ; X86-NOBMI-NEXT: calll use64@PLT |
| ; X86-NOBMI-NEXT: addl $16, %esp |
| ; X86-NOBMI-NEXT: movl %esi, %eax |
| ; X86-NOBMI-NEXT: movl %edi, %edx |
| ; X86-NOBMI-NEXT: addl $12, %esp |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: popl %ebx |
| ; X86-NOBMI-NEXT: popl %ebp |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_a5_skipextrauses: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %ebp |
| ; X86-BMI1-NEXT: pushl %ebx |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: subl $12, %esp |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %ebx |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: movl %esi, %ebp |
| ; X86-BMI1-NEXT: movl %eax, %ecx |
| ; X86-BMI1-NEXT: shrl %cl, %ebp |
| ; X86-BMI1-NEXT: shrdl %cl, %esi, %ebx |
| ; X86-BMI1-NEXT: testb $32, %al |
| ; X86-BMI1-NEXT: je .LBB13_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %ebp, %ebx |
| ; X86-BMI1-NEXT: xorl %ebp, %ebp |
| ; X86-BMI1-NEXT: .LBB13_2: |
| ; X86-BMI1-NEXT: movl $1, %esi |
| ; X86-BMI1-NEXT: xorl %edi, %edi |
| ; X86-BMI1-NEXT: movl %edx, %ecx |
| ; X86-BMI1-NEXT: shldl %cl, %esi, %edi |
| ; X86-BMI1-NEXT: shll %cl, %esi |
| ; X86-BMI1-NEXT: testb $32, %dl |
| ; X86-BMI1-NEXT: je .LBB13_4 |
| ; X86-BMI1-NEXT: # %bb.3: |
| ; X86-BMI1-NEXT: movl %esi, %edi |
| ; X86-BMI1-NEXT: xorl %esi, %esi |
| ; X86-BMI1-NEXT: .LBB13_4: |
| ; X86-BMI1-NEXT: addl $-1, %esi |
| ; X86-BMI1-NEXT: adcl $-1, %edi |
| ; X86-BMI1-NEXT: andl %ebx, %esi |
| ; X86-BMI1-NEXT: andl %ebp, %edi |
| ; X86-BMI1-NEXT: subl $8, %esp |
| ; X86-BMI1-NEXT: pushl {{[0-9]+}}(%esp) |
| ; X86-BMI1-NEXT: pushl %eax |
| ; X86-BMI1-NEXT: calll use64@PLT |
| ; X86-BMI1-NEXT: addl $16, %esp |
| ; X86-BMI1-NEXT: movl %esi, %eax |
| ; X86-BMI1-NEXT: movl %edi, %edx |
| ; X86-BMI1-NEXT: addl $12, %esp |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: popl %ebx |
| ; X86-BMI1-NEXT: popl %ebp |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_a5_skipextrauses: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %ebp |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %edi |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: subl $12, %esp |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %ebx |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI2-NEXT: movl %eax, %ecx |
| ; X86-BMI2-NEXT: shrdl %cl, %esi, %ebx |
| ; X86-BMI2-NEXT: shrxl %eax, %esi, %ebp |
| ; X86-BMI2-NEXT: testb $32, %al |
| ; X86-BMI2-NEXT: je .LBB13_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: movl %ebp, %ebx |
| ; X86-BMI2-NEXT: xorl %ebp, %ebp |
| ; X86-BMI2-NEXT: .LBB13_2: |
| ; X86-BMI2-NEXT: movl $1, %edi |
| ; X86-BMI2-NEXT: xorl %esi, %esi |
| ; X86-BMI2-NEXT: movl %edx, %ecx |
| ; X86-BMI2-NEXT: shldl %cl, %edi, %esi |
| ; X86-BMI2-NEXT: shlxl %edx, %edi, %edi |
| ; X86-BMI2-NEXT: testb $32, %dl |
| ; X86-BMI2-NEXT: je .LBB13_4 |
| ; X86-BMI2-NEXT: # %bb.3: |
| ; X86-BMI2-NEXT: movl %edi, %esi |
| ; X86-BMI2-NEXT: xorl %edi, %edi |
| ; X86-BMI2-NEXT: .LBB13_4: |
| ; X86-BMI2-NEXT: addl $-1, %edi |
| ; X86-BMI2-NEXT: adcl $-1, %esi |
| ; X86-BMI2-NEXT: andl %ebx, %edi |
| ; X86-BMI2-NEXT: andl %ebp, %esi |
| ; X86-BMI2-NEXT: subl $8, %esp |
| ; X86-BMI2-NEXT: pushl {{[0-9]+}}(%esp) |
| ; X86-BMI2-NEXT: pushl %eax |
| ; X86-BMI2-NEXT: calll use64@PLT |
| ; X86-BMI2-NEXT: addl $16, %esp |
| ; X86-BMI2-NEXT: movl %edi, %eax |
| ; X86-BMI2-NEXT: movl %esi, %edx |
| ; X86-BMI2-NEXT: addl $12, %esp |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: popl %edi |
| ; X86-BMI2-NEXT: popl %ebx |
| ; X86-BMI2-NEXT: popl %ebp |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_a5_skipextrauses: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: pushq %rbx |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: shrq %cl, %rdi |
| ; X64-NOBMI-NEXT: movl $1, %ebx |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shlq %cl, %rbx |
| ; X64-NOBMI-NEXT: decq %rbx |
| ; X64-NOBMI-NEXT: andq %rdi, %rbx |
| ; X64-NOBMI-NEXT: movq %rsi, %rdi |
| ; X64-NOBMI-NEXT: callq use64@PLT |
| ; X64-NOBMI-NEXT: movq %rbx, %rax |
| ; X64-NOBMI-NEXT: popq %rbx |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_a5_skipextrauses: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: pushq %rbx |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrq %rax, %rdi, %rbx |
| ; X64-BMI1-NEXT: movq %rsi, %rdi |
| ; X64-BMI1-NEXT: callq use64@PLT |
| ; X64-BMI1-NEXT: movq %rbx, %rax |
| ; X64-BMI1-NEXT: popq %rbx |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_a5_skipextrauses: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: pushq %rbx |
| ; X64-BMI2-NEXT: shrxq %rsi, %rdi, %rax |
| ; X64-BMI2-NEXT: bzhiq %rdx, %rax, %rbx |
| ; X64-BMI2-NEXT: movq %rsi, %rdi |
| ; X64-BMI2-NEXT: callq use64@PLT |
| ; X64-BMI2-NEXT: movq %rbx, %rax |
| ; X64-BMI2-NEXT: popq %rbx |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i64 %val, %numskipbits |
| %onebit = shl i64 1, %numlowbits |
| %mask = add nsw i64 %onebit, -1 |
| %masked = and i64 %mask, %shifted |
| call void @use64(i64 %numskipbits) |
| ret i64 %masked |
| } |
| |
| ; 64-bit, but with 32-bit output |
| |
| ; Everything done in 64-bit, truncation happens last. |
| define i32 @bextr64_32_a0(i64 %val, i64 %numskipbits, i64 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_32_a0: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-NOBMI-NEXT: movl %edi, %esi |
| ; X86-NOBMI-NEXT: shrl %cl, %esi |
| ; X86-NOBMI-NEXT: shrdl %cl, %edi, %eax |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: jne .LBB14_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %eax, %esi |
| ; X86-NOBMI-NEXT: .LBB14_2: |
| ; X86-NOBMI-NEXT: movl $1, %edi |
| ; X86-NOBMI-NEXT: movl %edx, %ecx |
| ; X86-NOBMI-NEXT: shll %cl, %edi |
| ; X86-NOBMI-NEXT: xorl %eax, %eax |
| ; X86-NOBMI-NEXT: testb $32, %dl |
| ; X86-NOBMI-NEXT: jne .LBB14_4 |
| ; X86-NOBMI-NEXT: # %bb.3: |
| ; X86-NOBMI-NEXT: movl %edi, %eax |
| ; X86-NOBMI-NEXT: .LBB14_4: |
| ; X86-NOBMI-NEXT: decl %eax |
| ; X86-NOBMI-NEXT: andl %esi, %eax |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_32_a0: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-BMI1-NEXT: movl %edi, %esi |
| ; X86-BMI1-NEXT: shrl %cl, %esi |
| ; X86-BMI1-NEXT: shrdl %cl, %edi, %eax |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: jne .LBB14_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %eax, %esi |
| ; X86-BMI1-NEXT: .LBB14_2: |
| ; X86-BMI1-NEXT: movl $1, %edi |
| ; X86-BMI1-NEXT: movl %edx, %ecx |
| ; X86-BMI1-NEXT: shll %cl, %edi |
| ; X86-BMI1-NEXT: xorl %eax, %eax |
| ; X86-BMI1-NEXT: testb $32, %dl |
| ; X86-BMI1-NEXT: jne .LBB14_4 |
| ; X86-BMI1-NEXT: # %bb.3: |
| ; X86-BMI1-NEXT: movl %edi, %eax |
| ; X86-BMI1-NEXT: .LBB14_4: |
| ; X86-BMI1-NEXT: decl %eax |
| ; X86-BMI1-NEXT: andl %esi, %eax |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_32_a0: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %bl |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI2-NEXT: shrdl %cl, %eax, %edx |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB14_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: shrxl %ecx, %eax, %edx |
| ; X86-BMI2-NEXT: .LBB14_2: |
| ; X86-BMI2-NEXT: xorl %eax, %eax |
| ; X86-BMI2-NEXT: testb $32, %bl |
| ; X86-BMI2-NEXT: jne .LBB14_4 |
| ; X86-BMI2-NEXT: # %bb.3: |
| ; X86-BMI2-NEXT: movl $1, %eax |
| ; X86-BMI2-NEXT: shlxl %ebx, %eax, %eax |
| ; X86-BMI2-NEXT: .LBB14_4: |
| ; X86-BMI2-NEXT: decl %eax |
| ; X86-BMI2-NEXT: andl %edx, %eax |
| ; X86-BMI2-NEXT: popl %ebx |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_32_a0: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movq %rsi, %rcx |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-NOBMI-NEXT: shrq %cl, %rdi |
| ; X64-NOBMI-NEXT: movl $1, %eax |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shlq %cl, %rax |
| ; X64-NOBMI-NEXT: decl %eax |
| ; X64-NOBMI-NEXT: andl %edi, %eax |
| ; X64-NOBMI-NEXT: # kill: def $eax killed $eax killed $rax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_32_a0: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrq %rax, %rdi, %rax |
| ; X64-BMI1-NEXT: # kill: def $eax killed $eax killed $rax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_32_a0: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxq %rsi, %rdi, %rax |
| ; X64-BMI2-NEXT: bzhil %edx, %eax, %eax |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i64 %val, %numskipbits |
| %onebit = shl i64 1, %numlowbits |
| %mask = add nsw i64 %onebit, -1 |
| %masked = and i64 %mask, %shifted |
| %res = trunc i64 %masked to i32 |
| ret i32 %res |
| } |
| |
| ; Shifting happens in 64-bit, then truncation. Masking is 32-bit. |
| define i32 @bextr64_32_a1(i64 %val, i64 %numskipbits, i32 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_32_a1: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-NOBMI-NEXT: movl %edi, %esi |
| ; X86-NOBMI-NEXT: shrl %cl, %esi |
| ; X86-NOBMI-NEXT: shrdl %cl, %edi, %eax |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: jne .LBB15_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %eax, %esi |
| ; X86-NOBMI-NEXT: .LBB15_2: |
| ; X86-NOBMI-NEXT: movl $1, %eax |
| ; X86-NOBMI-NEXT: movl %edx, %ecx |
| ; X86-NOBMI-NEXT: shll %cl, %eax |
| ; X86-NOBMI-NEXT: decl %eax |
| ; X86-NOBMI-NEXT: andl %esi, %eax |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_32_a1: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-BMI1-NEXT: movl %edi, %edx |
| ; X86-BMI1-NEXT: shrl %cl, %edx |
| ; X86-BMI1-NEXT: shrdl %cl, %edi, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: jne .LBB15_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %esi, %edx |
| ; X86-BMI1-NEXT: .LBB15_2: |
| ; X86-BMI1-NEXT: shll $8, %eax |
| ; X86-BMI1-NEXT: bextrl %eax, %edx, %eax |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_32_a1: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI2-NEXT: shrdl %cl, %esi, %edx |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB15_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: shrxl %ecx, %esi, %edx |
| ; X86-BMI2-NEXT: .LBB15_2: |
| ; X86-BMI2-NEXT: bzhil %eax, %edx, %eax |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_32_a1: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movq %rsi, %rcx |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-NOBMI-NEXT: shrq %cl, %rdi |
| ; X64-NOBMI-NEXT: movl $1, %eax |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shll %cl, %eax |
| ; X64-NOBMI-NEXT: decl %eax |
| ; X64-NOBMI-NEXT: andl %edi, %eax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_32_a1: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrq %rax, %rdi, %rax |
| ; X64-BMI1-NEXT: # kill: def $eax killed $eax killed $rax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_32_a1: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxq %rsi, %rdi, %rax |
| ; X64-BMI2-NEXT: bzhil %edx, %eax, %eax |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i64 %val, %numskipbits |
| %truncshifted = trunc i64 %shifted to i32 |
| %onebit = shl i32 1, %numlowbits |
| %mask = add nsw i32 %onebit, -1 |
| %masked = and i32 %mask, %truncshifted |
| ret i32 %masked |
| } |
| |
| ; Shifting happens in 64-bit, then truncation (with extra use). |
| ; Masking is 32-bit. |
| define i32 @bextr64_32_a1_trunc_extrause(i64 %val, i64 %numskipbits, i32 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_32_a1_trunc_extrause: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %ebx |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: pushl %eax |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %bl |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-NOBMI-NEXT: movl %edx, %esi |
| ; X86-NOBMI-NEXT: shrl %cl, %esi |
| ; X86-NOBMI-NEXT: shrdl %cl, %edx, %eax |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: jne .LBB16_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %eax, %esi |
| ; X86-NOBMI-NEXT: .LBB16_2: |
| ; X86-NOBMI-NEXT: movl %esi, (%esp) |
| ; X86-NOBMI-NEXT: calll use32@PLT |
| ; X86-NOBMI-NEXT: movl $1, %eax |
| ; X86-NOBMI-NEXT: movl %ebx, %ecx |
| ; X86-NOBMI-NEXT: shll %cl, %eax |
| ; X86-NOBMI-NEXT: decl %eax |
| ; X86-NOBMI-NEXT: andl %esi, %eax |
| ; X86-NOBMI-NEXT: addl $4, %esp |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %ebx |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_32_a1_trunc_extrause: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %ebx |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: pushl %eax |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %bl |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI1-NEXT: movl %edx, %esi |
| ; X86-BMI1-NEXT: shrl %cl, %esi |
| ; X86-BMI1-NEXT: shrdl %cl, %edx, %eax |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: jne .LBB16_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %eax, %esi |
| ; X86-BMI1-NEXT: .LBB16_2: |
| ; X86-BMI1-NEXT: movl %esi, (%esp) |
| ; X86-BMI1-NEXT: calll use32@PLT |
| ; X86-BMI1-NEXT: shll $8, %ebx |
| ; X86-BMI1-NEXT: bextrl %ebx, %esi, %eax |
| ; X86-BMI1-NEXT: addl $4, %esp |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %ebx |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_32_a1_trunc_extrause: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: pushl %eax |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %bl |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI2-NEXT: shrdl %cl, %eax, %esi |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB16_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: shrxl %ecx, %eax, %esi |
| ; X86-BMI2-NEXT: .LBB16_2: |
| ; X86-BMI2-NEXT: movl %esi, (%esp) |
| ; X86-BMI2-NEXT: calll use32@PLT |
| ; X86-BMI2-NEXT: bzhil %ebx, %esi, %eax |
| ; X86-BMI2-NEXT: addl $4, %esp |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: popl %ebx |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_32_a1_trunc_extrause: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: pushq %rbp |
| ; X64-NOBMI-NEXT: pushq %rbx |
| ; X64-NOBMI-NEXT: pushq %rax |
| ; X64-NOBMI-NEXT: movl %edx, %ebp |
| ; X64-NOBMI-NEXT: movq %rsi, %rcx |
| ; X64-NOBMI-NEXT: movq %rdi, %rbx |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-NOBMI-NEXT: shrq %cl, %rbx |
| ; X64-NOBMI-NEXT: movl %ebx, %edi |
| ; X64-NOBMI-NEXT: callq use32@PLT |
| ; X64-NOBMI-NEXT: movl $1, %eax |
| ; X64-NOBMI-NEXT: movl %ebp, %ecx |
| ; X64-NOBMI-NEXT: shll %cl, %eax |
| ; X64-NOBMI-NEXT: decl %eax |
| ; X64-NOBMI-NEXT: andl %ebx, %eax |
| ; X64-NOBMI-NEXT: addq $8, %rsp |
| ; X64-NOBMI-NEXT: popq %rbx |
| ; X64-NOBMI-NEXT: popq %rbp |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_32_a1_trunc_extrause: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: pushq %rbp |
| ; X64-BMI1-NEXT: pushq %rbx |
| ; X64-BMI1-NEXT: pushq %rax |
| ; X64-BMI1-NEXT: movl %edx, %ebp |
| ; X64-BMI1-NEXT: movq %rsi, %rcx |
| ; X64-BMI1-NEXT: movq %rdi, %rbx |
| ; X64-BMI1-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-BMI1-NEXT: shrq %cl, %rbx |
| ; X64-BMI1-NEXT: movl %ebx, %edi |
| ; X64-BMI1-NEXT: callq use32@PLT |
| ; X64-BMI1-NEXT: shll $8, %ebp |
| ; X64-BMI1-NEXT: bextrl %ebp, %ebx, %eax |
| ; X64-BMI1-NEXT: addq $8, %rsp |
| ; X64-BMI1-NEXT: popq %rbx |
| ; X64-BMI1-NEXT: popq %rbp |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_32_a1_trunc_extrause: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: pushq %rbp |
| ; X64-BMI2-NEXT: pushq %rbx |
| ; X64-BMI2-NEXT: pushq %rax |
| ; X64-BMI2-NEXT: movl %edx, %ebp |
| ; X64-BMI2-NEXT: shrxq %rsi, %rdi, %rbx |
| ; X64-BMI2-NEXT: movl %ebx, %edi |
| ; X64-BMI2-NEXT: callq use32@PLT |
| ; X64-BMI2-NEXT: bzhil %ebp, %ebx, %eax |
| ; X64-BMI2-NEXT: addq $8, %rsp |
| ; X64-BMI2-NEXT: popq %rbx |
| ; X64-BMI2-NEXT: popq %rbp |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i64 %val, %numskipbits |
| %truncshifted = trunc i64 %shifted to i32 |
| call void @use32(i32 %truncshifted) |
| %onebit = shl i32 1, %numlowbits |
| %mask = add nsw i32 %onebit, -1 |
| %masked = and i32 %mask, %truncshifted |
| ret i32 %masked |
| } |
| |
| ; Shifting happens in 64-bit. Mask is 32-bit, but extended to 64-bit. |
| ; Masking is 64-bit. Then truncation. |
| define i32 @bextr64_32_a2(i64 %val, i64 %numskipbits, i32 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_32_a2: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-NOBMI-NEXT: movl %edi, %esi |
| ; X86-NOBMI-NEXT: shrl %cl, %esi |
| ; X86-NOBMI-NEXT: shrdl %cl, %edi, %eax |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: jne .LBB17_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %eax, %esi |
| ; X86-NOBMI-NEXT: .LBB17_2: |
| ; X86-NOBMI-NEXT: movl $1, %eax |
| ; X86-NOBMI-NEXT: movl %edx, %ecx |
| ; X86-NOBMI-NEXT: shll %cl, %eax |
| ; X86-NOBMI-NEXT: decl %eax |
| ; X86-NOBMI-NEXT: andl %esi, %eax |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_32_a2: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-BMI1-NEXT: movl %edi, %edx |
| ; X86-BMI1-NEXT: shrl %cl, %edx |
| ; X86-BMI1-NEXT: shrdl %cl, %edi, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: jne .LBB17_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %esi, %edx |
| ; X86-BMI1-NEXT: .LBB17_2: |
| ; X86-BMI1-NEXT: shll $8, %eax |
| ; X86-BMI1-NEXT: bextrl %eax, %edx, %eax |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_32_a2: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI2-NEXT: shrdl %cl, %esi, %edx |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB17_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: shrxl %ecx, %esi, %edx |
| ; X86-BMI2-NEXT: .LBB17_2: |
| ; X86-BMI2-NEXT: bzhil %eax, %edx, %eax |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_32_a2: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movq %rsi, %rcx |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-NOBMI-NEXT: shrq %cl, %rdi |
| ; X64-NOBMI-NEXT: movl $1, %eax |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shll %cl, %eax |
| ; X64-NOBMI-NEXT: decl %eax |
| ; X64-NOBMI-NEXT: andl %edi, %eax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_32_a2: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrq %rax, %rdi, %rax |
| ; X64-BMI1-NEXT: # kill: def $eax killed $eax killed $rax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_32_a2: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxq %rsi, %rdi, %rax |
| ; X64-BMI2-NEXT: bzhil %edx, %eax, %eax |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i64 %val, %numskipbits |
| %onebit = shl i32 1, %numlowbits |
| %mask = add nsw i32 %onebit, -1 |
| %zextmask = zext i32 %mask to i64 |
| %masked = and i64 %zextmask, %shifted |
| %truncmasked = trunc i64 %masked to i32 |
| ret i32 %truncmasked |
| } |
| |
| ; Shifting happens in 64-bit. Mask is 32-bit, but calculated in 64-bit. |
| ; Masking is 64-bit. Then truncation. |
| define i32 @bextr64_32_a3(i64 %val, i64 %numskipbits, i64 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_32_a3: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-NOBMI-NEXT: movl %edi, %esi |
| ; X86-NOBMI-NEXT: shrl %cl, %esi |
| ; X86-NOBMI-NEXT: shrdl %cl, %edi, %eax |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: jne .LBB18_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %eax, %esi |
| ; X86-NOBMI-NEXT: .LBB18_2: |
| ; X86-NOBMI-NEXT: movl $1, %edi |
| ; X86-NOBMI-NEXT: movl %edx, %ecx |
| ; X86-NOBMI-NEXT: shll %cl, %edi |
| ; X86-NOBMI-NEXT: xorl %eax, %eax |
| ; X86-NOBMI-NEXT: testb $32, %dl |
| ; X86-NOBMI-NEXT: jne .LBB18_4 |
| ; X86-NOBMI-NEXT: # %bb.3: |
| ; X86-NOBMI-NEXT: movl %edi, %eax |
| ; X86-NOBMI-NEXT: .LBB18_4: |
| ; X86-NOBMI-NEXT: decl %eax |
| ; X86-NOBMI-NEXT: andl %esi, %eax |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_32_a3: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-BMI1-NEXT: movl %edi, %esi |
| ; X86-BMI1-NEXT: shrl %cl, %esi |
| ; X86-BMI1-NEXT: shrdl %cl, %edi, %eax |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: jne .LBB18_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %eax, %esi |
| ; X86-BMI1-NEXT: .LBB18_2: |
| ; X86-BMI1-NEXT: movl $1, %edi |
| ; X86-BMI1-NEXT: movl %edx, %ecx |
| ; X86-BMI1-NEXT: shll %cl, %edi |
| ; X86-BMI1-NEXT: xorl %eax, %eax |
| ; X86-BMI1-NEXT: testb $32, %dl |
| ; X86-BMI1-NEXT: jne .LBB18_4 |
| ; X86-BMI1-NEXT: # %bb.3: |
| ; X86-BMI1-NEXT: movl %edi, %eax |
| ; X86-BMI1-NEXT: .LBB18_4: |
| ; X86-BMI1-NEXT: decl %eax |
| ; X86-BMI1-NEXT: andl %esi, %eax |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_32_a3: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %bl |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI2-NEXT: shrdl %cl, %eax, %edx |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB18_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: shrxl %ecx, %eax, %edx |
| ; X86-BMI2-NEXT: .LBB18_2: |
| ; X86-BMI2-NEXT: xorl %eax, %eax |
| ; X86-BMI2-NEXT: testb $32, %bl |
| ; X86-BMI2-NEXT: jne .LBB18_4 |
| ; X86-BMI2-NEXT: # %bb.3: |
| ; X86-BMI2-NEXT: movl $1, %eax |
| ; X86-BMI2-NEXT: shlxl %ebx, %eax, %eax |
| ; X86-BMI2-NEXT: .LBB18_4: |
| ; X86-BMI2-NEXT: decl %eax |
| ; X86-BMI2-NEXT: andl %edx, %eax |
| ; X86-BMI2-NEXT: popl %ebx |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_32_a3: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movq %rsi, %rcx |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-NOBMI-NEXT: shrq %cl, %rdi |
| ; X64-NOBMI-NEXT: movl $1, %eax |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shlq %cl, %rax |
| ; X64-NOBMI-NEXT: decl %eax |
| ; X64-NOBMI-NEXT: andl %edi, %eax |
| ; X64-NOBMI-NEXT: # kill: def $eax killed $eax killed $rax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_32_a3: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrq %rax, %rdi, %rax |
| ; X64-BMI1-NEXT: # kill: def $eax killed $eax killed $rax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_32_a3: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxq %rsi, %rdi, %rax |
| ; X64-BMI2-NEXT: bzhil %edx, %eax, %eax |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i64 %val, %numskipbits |
| %onebit = shl i64 1, %numlowbits |
| %mask = add nsw i64 %onebit, 4294967295 |
| %masked = and i64 %mask, %shifted |
| %truncmasked = trunc i64 %masked to i32 |
| ret i32 %truncmasked |
| } |
| |
| ; ---------------------------------------------------------------------------- ; |
| ; Pattern b. 32-bit |
| ; ---------------------------------------------------------------------------- ; |
| |
| define i32 @bextr32_b0(i32 %val, i32 %numskipbits, i32 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr32_b0: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NOBMI-NEXT: shrl %cl, %esi |
| ; X86-NOBMI-NEXT: movl $-1, %eax |
| ; X86-NOBMI-NEXT: movl %edx, %ecx |
| ; X86-NOBMI-NEXT: shll %cl, %eax |
| ; X86-NOBMI-NEXT: notl %eax |
| ; X86-NOBMI-NEXT: andl %esi, %eax |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr32_b0: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI1-NEXT: shll $8, %eax |
| ; X86-BMI1-NEXT: movzbl {{[0-9]+}}(%esp), %ecx |
| ; X86-BMI1-NEXT: orl %eax, %ecx |
| ; X86-BMI1-NEXT: bextrl %ecx, {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr32_b0: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: shrxl %ecx, {{[0-9]+}}(%esp), %ecx |
| ; X86-BMI2-NEXT: bzhil %eax, %ecx, %eax |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr32_b0: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NOBMI-NEXT: shrl %cl, %edi |
| ; X64-NOBMI-NEXT: movl $-1, %eax |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shll %cl, %eax |
| ; X64-NOBMI-NEXT: notl %eax |
| ; X64-NOBMI-NEXT: andl %edi, %eax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr32_b0: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrl %eax, %edi, %eax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr32_b0: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxl %esi, %edi, %eax |
| ; X64-BMI2-NEXT: bzhil %edx, %eax, %eax |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i32 %val, %numskipbits |
| %notmask = shl i32 -1, %numlowbits |
| %mask = xor i32 %notmask, -1 |
| %masked = and i32 %mask, %shifted |
| ret i32 %masked |
| } |
| |
| define i32 @bextr32_b1_indexzext(i32 %val, i8 zeroext %numskipbits, i8 zeroext %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr32_b1_indexzext: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NOBMI-NEXT: shrl %cl, %esi |
| ; X86-NOBMI-NEXT: movl $-1, %eax |
| ; X86-NOBMI-NEXT: movl %edx, %ecx |
| ; X86-NOBMI-NEXT: shll %cl, %eax |
| ; X86-NOBMI-NEXT: notl %eax |
| ; X86-NOBMI-NEXT: andl %esi, %eax |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr32_b1_indexzext: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI1-NEXT: shll $8, %eax |
| ; X86-BMI1-NEXT: movzbl {{[0-9]+}}(%esp), %ecx |
| ; X86-BMI1-NEXT: orl %eax, %ecx |
| ; X86-BMI1-NEXT: bextrl %ecx, {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr32_b1_indexzext: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: shrxl %ecx, {{[0-9]+}}(%esp), %ecx |
| ; X86-BMI2-NEXT: bzhil %eax, %ecx, %eax |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr32_b1_indexzext: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NOBMI-NEXT: shrl %cl, %edi |
| ; X64-NOBMI-NEXT: movl $-1, %eax |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shll %cl, %eax |
| ; X64-NOBMI-NEXT: notl %eax |
| ; X64-NOBMI-NEXT: andl %edi, %eax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr32_b1_indexzext: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrl %eax, %edi, %eax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr32_b1_indexzext: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxl %esi, %edi, %eax |
| ; X64-BMI2-NEXT: bzhil %edx, %eax, %eax |
| ; X64-BMI2-NEXT: retq |
| %skip = zext i8 %numskipbits to i32 |
| %shifted = lshr i32 %val, %skip |
| %conv = zext i8 %numlowbits to i32 |
| %notmask = shl i32 -1, %conv |
| %mask = xor i32 %notmask, -1 |
| %masked = and i32 %mask, %shifted |
| ret i32 %masked |
| } |
| |
| define i32 @bextr32_b2_load(i32* %w, i32 %numskipbits, i32 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr32_b2_load: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl (%eax), %esi |
| ; X86-NOBMI-NEXT: shrl %cl, %esi |
| ; X86-NOBMI-NEXT: movl $-1, %eax |
| ; X86-NOBMI-NEXT: movl %edx, %ecx |
| ; X86-NOBMI-NEXT: shll %cl, %eax |
| ; X86-NOBMI-NEXT: notl %eax |
| ; X86-NOBMI-NEXT: andl %esi, %eax |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr32_b2_load: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: shll $8, %ecx |
| ; X86-BMI1-NEXT: movzbl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI1-NEXT: orl %ecx, %edx |
| ; X86-BMI1-NEXT: bextrl %edx, (%eax), %eax |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr32_b2_load: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-BMI2-NEXT: shrxl %edx, (%ecx), %ecx |
| ; X86-BMI2-NEXT: bzhil %eax, %ecx, %eax |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr32_b2_load: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: movl (%rdi), %esi |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NOBMI-NEXT: shrl %cl, %esi |
| ; X64-NOBMI-NEXT: movl $-1, %eax |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shll %cl, %eax |
| ; X64-NOBMI-NEXT: notl %eax |
| ; X64-NOBMI-NEXT: andl %esi, %eax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr32_b2_load: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrl %eax, (%rdi), %eax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr32_b2_load: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxl %esi, (%rdi), %eax |
| ; X64-BMI2-NEXT: bzhil %edx, %eax, %eax |
| ; X64-BMI2-NEXT: retq |
| %val = load i32, i32* %w |
| %shifted = lshr i32 %val, %numskipbits |
| %notmask = shl i32 -1, %numlowbits |
| %mask = xor i32 %notmask, -1 |
| %masked = and i32 %mask, %shifted |
| ret i32 %masked |
| } |
| |
| define i32 @bextr32_b3_load_indexzext(i32* %w, i8 zeroext %numskipbits, i8 zeroext %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr32_b3_load_indexzext: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl (%eax), %esi |
| ; X86-NOBMI-NEXT: shrl %cl, %esi |
| ; X86-NOBMI-NEXT: movl $-1, %eax |
| ; X86-NOBMI-NEXT: movl %edx, %ecx |
| ; X86-NOBMI-NEXT: shll %cl, %eax |
| ; X86-NOBMI-NEXT: notl %eax |
| ; X86-NOBMI-NEXT: andl %esi, %eax |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr32_b3_load_indexzext: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: shll $8, %ecx |
| ; X86-BMI1-NEXT: movzbl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI1-NEXT: orl %ecx, %edx |
| ; X86-BMI1-NEXT: bextrl %edx, (%eax), %eax |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr32_b3_load_indexzext: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-BMI2-NEXT: shrxl %edx, (%ecx), %ecx |
| ; X86-BMI2-NEXT: bzhil %eax, %ecx, %eax |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr32_b3_load_indexzext: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: movl (%rdi), %esi |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NOBMI-NEXT: shrl %cl, %esi |
| ; X64-NOBMI-NEXT: movl $-1, %eax |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shll %cl, %eax |
| ; X64-NOBMI-NEXT: notl %eax |
| ; X64-NOBMI-NEXT: andl %esi, %eax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr32_b3_load_indexzext: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrl %eax, (%rdi), %eax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr32_b3_load_indexzext: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxl %esi, (%rdi), %eax |
| ; X64-BMI2-NEXT: bzhil %edx, %eax, %eax |
| ; X64-BMI2-NEXT: retq |
| %val = load i32, i32* %w |
| %skip = zext i8 %numskipbits to i32 |
| %shifted = lshr i32 %val, %skip |
| %conv = zext i8 %numlowbits to i32 |
| %notmask = shl i32 -1, %conv |
| %mask = xor i32 %notmask, -1 |
| %masked = and i32 %mask, %shifted |
| ret i32 %masked |
| } |
| |
| define i32 @bextr32_b4_commutative(i32 %val, i32 %numskipbits, i32 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr32_b4_commutative: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NOBMI-NEXT: shrl %cl, %esi |
| ; X86-NOBMI-NEXT: movl $-1, %eax |
| ; X86-NOBMI-NEXT: movl %edx, %ecx |
| ; X86-NOBMI-NEXT: shll %cl, %eax |
| ; X86-NOBMI-NEXT: notl %eax |
| ; X86-NOBMI-NEXT: andl %esi, %eax |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr32_b4_commutative: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI1-NEXT: shll $8, %eax |
| ; X86-BMI1-NEXT: movzbl {{[0-9]+}}(%esp), %ecx |
| ; X86-BMI1-NEXT: orl %eax, %ecx |
| ; X86-BMI1-NEXT: bextrl %ecx, {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr32_b4_commutative: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: shrxl %ecx, {{[0-9]+}}(%esp), %ecx |
| ; X86-BMI2-NEXT: bzhil %eax, %ecx, %eax |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr32_b4_commutative: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NOBMI-NEXT: shrl %cl, %edi |
| ; X64-NOBMI-NEXT: movl $-1, %eax |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shll %cl, %eax |
| ; X64-NOBMI-NEXT: notl %eax |
| ; X64-NOBMI-NEXT: andl %edi, %eax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr32_b4_commutative: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrl %eax, %edi, %eax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr32_b4_commutative: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxl %esi, %edi, %eax |
| ; X64-BMI2-NEXT: bzhil %edx, %eax, %eax |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i32 %val, %numskipbits |
| %notmask = shl i32 -1, %numlowbits |
| %mask = xor i32 %notmask, -1 |
| %masked = and i32 %shifted, %mask ; swapped order |
| ret i32 %masked |
| } |
| |
| define i32 @bextr32_b5_skipextrauses(i32 %val, i32 %numskipbits, i32 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr32_b5_skipextrauses: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: pushl %eax |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl %eax, %ecx |
| ; X86-NOBMI-NEXT: shrl %cl, %edi |
| ; X86-NOBMI-NEXT: movl $-1, %esi |
| ; X86-NOBMI-NEXT: movl %edx, %ecx |
| ; X86-NOBMI-NEXT: shll %cl, %esi |
| ; X86-NOBMI-NEXT: notl %esi |
| ; X86-NOBMI-NEXT: andl %edi, %esi |
| ; X86-NOBMI-NEXT: movl %eax, (%esp) |
| ; X86-NOBMI-NEXT: calll use32@PLT |
| ; X86-NOBMI-NEXT: movl %esi, %eax |
| ; X86-NOBMI-NEXT: addl $4, %esp |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr32_b5_skipextrauses: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: subl $8, %esp |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: shll $8, %ecx |
| ; X86-BMI1-NEXT: movzbl %al, %edx |
| ; X86-BMI1-NEXT: orl %ecx, %edx |
| ; X86-BMI1-NEXT: bextrl %edx, {{[0-9]+}}(%esp), %esi |
| ; X86-BMI1-NEXT: movl %eax, (%esp) |
| ; X86-BMI1-NEXT: calll use32@PLT |
| ; X86-BMI1-NEXT: movl %esi, %eax |
| ; X86-BMI1-NEXT: addl $8, %esp |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr32_b5_skipextrauses: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: subl $8, %esp |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-BMI2-NEXT: shrxl %ecx, {{[0-9]+}}(%esp), %edx |
| ; X86-BMI2-NEXT: bzhil %eax, %edx, %esi |
| ; X86-BMI2-NEXT: movl %ecx, (%esp) |
| ; X86-BMI2-NEXT: calll use32@PLT |
| ; X86-BMI2-NEXT: movl %esi, %eax |
| ; X86-BMI2-NEXT: addl $8, %esp |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr32_b5_skipextrauses: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: pushq %rbx |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: shrl %cl, %edi |
| ; X64-NOBMI-NEXT: movl $-1, %ebx |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shll %cl, %ebx |
| ; X64-NOBMI-NEXT: notl %ebx |
| ; X64-NOBMI-NEXT: andl %edi, %ebx |
| ; X64-NOBMI-NEXT: movl %esi, %edi |
| ; X64-NOBMI-NEXT: callq use32@PLT |
| ; X64-NOBMI-NEXT: movl %ebx, %eax |
| ; X64-NOBMI-NEXT: popq %rbx |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr32_b5_skipextrauses: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: pushq %rbx |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrl %eax, %edi, %ebx |
| ; X64-BMI1-NEXT: movl %esi, %edi |
| ; X64-BMI1-NEXT: callq use32@PLT |
| ; X64-BMI1-NEXT: movl %ebx, %eax |
| ; X64-BMI1-NEXT: popq %rbx |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr32_b5_skipextrauses: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: pushq %rbx |
| ; X64-BMI2-NEXT: shrxl %esi, %edi, %eax |
| ; X64-BMI2-NEXT: bzhil %edx, %eax, %ebx |
| ; X64-BMI2-NEXT: movl %esi, %edi |
| ; X64-BMI2-NEXT: callq use32@PLT |
| ; X64-BMI2-NEXT: movl %ebx, %eax |
| ; X64-BMI2-NEXT: popq %rbx |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i32 %val, %numskipbits |
| %notmask = shl i32 -1, %numlowbits |
| %mask = xor i32 %notmask, -1 |
| %masked = and i32 %mask, %shifted |
| call void @use32(i32 %numskipbits) |
| ret i32 %masked |
| } |
| |
| ; 64-bit |
| |
| define i64 @bextr64_b0(i64 %val, i64 %numskipbits, i64 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_b0: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %ebx |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %ch |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl %eax, %edi |
| ; X86-NOBMI-NEXT: shrl %cl, %edi |
| ; X86-NOBMI-NEXT: shrdl %cl, %eax, %esi |
| ; X86-NOBMI-NEXT: xorl %eax, %eax |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: je .LBB25_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %edi, %esi |
| ; X86-NOBMI-NEXT: xorl %edi, %edi |
| ; X86-NOBMI-NEXT: .LBB25_2: |
| ; X86-NOBMI-NEXT: movl $-1, %edx |
| ; X86-NOBMI-NEXT: movl $-1, %ebx |
| ; X86-NOBMI-NEXT: movb %ch, %cl |
| ; X86-NOBMI-NEXT: shll %cl, %ebx |
| ; X86-NOBMI-NEXT: testb $32, %ch |
| ; X86-NOBMI-NEXT: jne .LBB25_3 |
| ; X86-NOBMI-NEXT: # %bb.4: |
| ; X86-NOBMI-NEXT: movl %ebx, %eax |
| ; X86-NOBMI-NEXT: jmp .LBB25_5 |
| ; X86-NOBMI-NEXT: .LBB25_3: |
| ; X86-NOBMI-NEXT: movl %ebx, %edx |
| ; X86-NOBMI-NEXT: .LBB25_5: |
| ; X86-NOBMI-NEXT: notl %edx |
| ; X86-NOBMI-NEXT: andl %edi, %edx |
| ; X86-NOBMI-NEXT: notl %eax |
| ; X86-NOBMI-NEXT: andl %esi, %eax |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: popl %ebx |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_b0: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %ebx |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-BMI1-NEXT: movl %edi, %edx |
| ; X86-BMI1-NEXT: shrl %cl, %edx |
| ; X86-BMI1-NEXT: shrdl %cl, %edi, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: je .LBB25_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %edx, %esi |
| ; X86-BMI1-NEXT: xorl %edx, %edx |
| ; X86-BMI1-NEXT: .LBB25_2: |
| ; X86-BMI1-NEXT: movl $-1, %edi |
| ; X86-BMI1-NEXT: movl $-1, %ebx |
| ; X86-BMI1-NEXT: movl %eax, %ecx |
| ; X86-BMI1-NEXT: shll %cl, %ebx |
| ; X86-BMI1-NEXT: testb $32, %al |
| ; X86-BMI1-NEXT: je .LBB25_4 |
| ; X86-BMI1-NEXT: # %bb.3: |
| ; X86-BMI1-NEXT: movl %ebx, %edi |
| ; X86-BMI1-NEXT: xorl %ebx, %ebx |
| ; X86-BMI1-NEXT: .LBB25_4: |
| ; X86-BMI1-NEXT: andnl %edx, %edi, %edx |
| ; X86-BMI1-NEXT: andnl %esi, %ebx, %eax |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: popl %ebx |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_b0: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %bl |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI2-NEXT: shrdl %cl, %edx, %eax |
| ; X86-BMI2-NEXT: shrxl %ecx, %edx, %edx |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB25_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: movl %edx, %eax |
| ; X86-BMI2-NEXT: xorl %edx, %edx |
| ; X86-BMI2-NEXT: .LBB25_2: |
| ; X86-BMI2-NEXT: movl $-1, %esi |
| ; X86-BMI2-NEXT: shlxl %ebx, %esi, %ecx |
| ; X86-BMI2-NEXT: testb $32, %bl |
| ; X86-BMI2-NEXT: je .LBB25_4 |
| ; X86-BMI2-NEXT: # %bb.3: |
| ; X86-BMI2-NEXT: movl %ecx, %esi |
| ; X86-BMI2-NEXT: xorl %ecx, %ecx |
| ; X86-BMI2-NEXT: .LBB25_4: |
| ; X86-BMI2-NEXT: andnl %edx, %esi, %edx |
| ; X86-BMI2-NEXT: andnl %eax, %ecx, %eax |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: popl %ebx |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_b0: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movq %rsi, %rcx |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-NOBMI-NEXT: shrq %cl, %rdi |
| ; X64-NOBMI-NEXT: movq $-1, %rax |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shlq %cl, %rax |
| ; X64-NOBMI-NEXT: notq %rax |
| ; X64-NOBMI-NEXT: andq %rdi, %rax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_b0: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrq %rax, %rdi, %rax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_b0: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxq %rsi, %rdi, %rax |
| ; X64-BMI2-NEXT: bzhiq %rdx, %rax, %rax |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i64 %val, %numskipbits |
| %notmask = shl i64 -1, %numlowbits |
| %mask = xor i64 %notmask, -1 |
| %masked = and i64 %mask, %shifted |
| ret i64 %masked |
| } |
| |
| define i64 @bextr64_b1_indexzext(i64 %val, i8 zeroext %numskipbits, i8 zeroext %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_b1_indexzext: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %ebx |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %ch |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl %eax, %edi |
| ; X86-NOBMI-NEXT: shrl %cl, %edi |
| ; X86-NOBMI-NEXT: shrdl %cl, %eax, %esi |
| ; X86-NOBMI-NEXT: xorl %eax, %eax |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: je .LBB26_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %edi, %esi |
| ; X86-NOBMI-NEXT: xorl %edi, %edi |
| ; X86-NOBMI-NEXT: .LBB26_2: |
| ; X86-NOBMI-NEXT: movl $-1, %edx |
| ; X86-NOBMI-NEXT: movl $-1, %ebx |
| ; X86-NOBMI-NEXT: movb %ch, %cl |
| ; X86-NOBMI-NEXT: shll %cl, %ebx |
| ; X86-NOBMI-NEXT: testb $32, %ch |
| ; X86-NOBMI-NEXT: jne .LBB26_3 |
| ; X86-NOBMI-NEXT: # %bb.4: |
| ; X86-NOBMI-NEXT: movl %ebx, %eax |
| ; X86-NOBMI-NEXT: jmp .LBB26_5 |
| ; X86-NOBMI-NEXT: .LBB26_3: |
| ; X86-NOBMI-NEXT: movl %ebx, %edx |
| ; X86-NOBMI-NEXT: .LBB26_5: |
| ; X86-NOBMI-NEXT: notl %edx |
| ; X86-NOBMI-NEXT: andl %edi, %edx |
| ; X86-NOBMI-NEXT: notl %eax |
| ; X86-NOBMI-NEXT: andl %esi, %eax |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: popl %ebx |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_b1_indexzext: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %ebx |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-BMI1-NEXT: movl %edi, %edx |
| ; X86-BMI1-NEXT: shrl %cl, %edx |
| ; X86-BMI1-NEXT: shrdl %cl, %edi, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: je .LBB26_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %edx, %esi |
| ; X86-BMI1-NEXT: xorl %edx, %edx |
| ; X86-BMI1-NEXT: .LBB26_2: |
| ; X86-BMI1-NEXT: movl $-1, %edi |
| ; X86-BMI1-NEXT: movl $-1, %ebx |
| ; X86-BMI1-NEXT: movl %eax, %ecx |
| ; X86-BMI1-NEXT: shll %cl, %ebx |
| ; X86-BMI1-NEXT: testb $32, %al |
| ; X86-BMI1-NEXT: je .LBB26_4 |
| ; X86-BMI1-NEXT: # %bb.3: |
| ; X86-BMI1-NEXT: movl %ebx, %edi |
| ; X86-BMI1-NEXT: xorl %ebx, %ebx |
| ; X86-BMI1-NEXT: .LBB26_4: |
| ; X86-BMI1-NEXT: andnl %edx, %edi, %edx |
| ; X86-BMI1-NEXT: andnl %esi, %ebx, %eax |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: popl %ebx |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_b1_indexzext: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %bl |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI2-NEXT: shrdl %cl, %edx, %eax |
| ; X86-BMI2-NEXT: shrxl %ecx, %edx, %edx |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB26_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: movl %edx, %eax |
| ; X86-BMI2-NEXT: xorl %edx, %edx |
| ; X86-BMI2-NEXT: .LBB26_2: |
| ; X86-BMI2-NEXT: movl $-1, %esi |
| ; X86-BMI2-NEXT: shlxl %ebx, %esi, %ecx |
| ; X86-BMI2-NEXT: testb $32, %bl |
| ; X86-BMI2-NEXT: je .LBB26_4 |
| ; X86-BMI2-NEXT: # %bb.3: |
| ; X86-BMI2-NEXT: movl %ecx, %esi |
| ; X86-BMI2-NEXT: xorl %ecx, %ecx |
| ; X86-BMI2-NEXT: .LBB26_4: |
| ; X86-BMI2-NEXT: andnl %edx, %esi, %edx |
| ; X86-BMI2-NEXT: andnl %eax, %ecx, %eax |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: popl %ebx |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_b1_indexzext: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NOBMI-NEXT: shrq %cl, %rdi |
| ; X64-NOBMI-NEXT: movq $-1, %rax |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shlq %cl, %rax |
| ; X64-NOBMI-NEXT: notq %rax |
| ; X64-NOBMI-NEXT: andq %rdi, %rax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_b1_indexzext: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrq %rax, %rdi, %rax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_b1_indexzext: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: # kill: def $edx killed $edx def $rdx |
| ; X64-BMI2-NEXT: # kill: def $esi killed $esi def $rsi |
| ; X64-BMI2-NEXT: shrxq %rsi, %rdi, %rax |
| ; X64-BMI2-NEXT: bzhiq %rdx, %rax, %rax |
| ; X64-BMI2-NEXT: retq |
| %skip = zext i8 %numskipbits to i64 |
| %shifted = lshr i64 %val, %skip |
| %conv = zext i8 %numlowbits to i64 |
| %notmask = shl i64 -1, %conv |
| %mask = xor i64 %notmask, -1 |
| %masked = and i64 %mask, %shifted |
| ret i64 %masked |
| } |
| |
| define i64 @bextr64_b2_load(i64* %w, i64 %numskipbits, i64 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_b2_load: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %ebx |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %ch |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl (%eax), %esi |
| ; X86-NOBMI-NEXT: movl 4(%eax), %eax |
| ; X86-NOBMI-NEXT: movl %eax, %edi |
| ; X86-NOBMI-NEXT: shrl %cl, %edi |
| ; X86-NOBMI-NEXT: shrdl %cl, %eax, %esi |
| ; X86-NOBMI-NEXT: xorl %eax, %eax |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: je .LBB27_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %edi, %esi |
| ; X86-NOBMI-NEXT: xorl %edi, %edi |
| ; X86-NOBMI-NEXT: .LBB27_2: |
| ; X86-NOBMI-NEXT: movl $-1, %edx |
| ; X86-NOBMI-NEXT: movl $-1, %ebx |
| ; X86-NOBMI-NEXT: movb %ch, %cl |
| ; X86-NOBMI-NEXT: shll %cl, %ebx |
| ; X86-NOBMI-NEXT: testb $32, %ch |
| ; X86-NOBMI-NEXT: jne .LBB27_3 |
| ; X86-NOBMI-NEXT: # %bb.4: |
| ; X86-NOBMI-NEXT: movl %ebx, %eax |
| ; X86-NOBMI-NEXT: jmp .LBB27_5 |
| ; X86-NOBMI-NEXT: .LBB27_3: |
| ; X86-NOBMI-NEXT: movl %ebx, %edx |
| ; X86-NOBMI-NEXT: .LBB27_5: |
| ; X86-NOBMI-NEXT: notl %edx |
| ; X86-NOBMI-NEXT: andl %edi, %edx |
| ; X86-NOBMI-NEXT: notl %eax |
| ; X86-NOBMI-NEXT: andl %esi, %eax |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: popl %ebx |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_b2_load: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %ebx |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI1-NEXT: movl (%edx), %esi |
| ; X86-BMI1-NEXT: movl 4(%edx), %edi |
| ; X86-BMI1-NEXT: movl %edi, %edx |
| ; X86-BMI1-NEXT: shrl %cl, %edx |
| ; X86-BMI1-NEXT: shrdl %cl, %edi, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: je .LBB27_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %edx, %esi |
| ; X86-BMI1-NEXT: xorl %edx, %edx |
| ; X86-BMI1-NEXT: .LBB27_2: |
| ; X86-BMI1-NEXT: movl $-1, %edi |
| ; X86-BMI1-NEXT: movl $-1, %ebx |
| ; X86-BMI1-NEXT: movl %eax, %ecx |
| ; X86-BMI1-NEXT: shll %cl, %ebx |
| ; X86-BMI1-NEXT: testb $32, %al |
| ; X86-BMI1-NEXT: je .LBB27_4 |
| ; X86-BMI1-NEXT: # %bb.3: |
| ; X86-BMI1-NEXT: movl %ebx, %edi |
| ; X86-BMI1-NEXT: xorl %ebx, %ebx |
| ; X86-BMI1-NEXT: .LBB27_4: |
| ; X86-BMI1-NEXT: andnl %edx, %edi, %edx |
| ; X86-BMI1-NEXT: andnl %esi, %ebx, %eax |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: popl %ebx |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_b2_load: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %bl |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI2-NEXT: movl (%edx), %eax |
| ; X86-BMI2-NEXT: movl 4(%edx), %esi |
| ; X86-BMI2-NEXT: shrxl %ecx, %esi, %edx |
| ; X86-BMI2-NEXT: shrdl %cl, %esi, %eax |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB27_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: movl %edx, %eax |
| ; X86-BMI2-NEXT: xorl %edx, %edx |
| ; X86-BMI2-NEXT: .LBB27_2: |
| ; X86-BMI2-NEXT: movl $-1, %esi |
| ; X86-BMI2-NEXT: shlxl %ebx, %esi, %ecx |
| ; X86-BMI2-NEXT: testb $32, %bl |
| ; X86-BMI2-NEXT: je .LBB27_4 |
| ; X86-BMI2-NEXT: # %bb.3: |
| ; X86-BMI2-NEXT: movl %ecx, %esi |
| ; X86-BMI2-NEXT: xorl %ecx, %ecx |
| ; X86-BMI2-NEXT: .LBB27_4: |
| ; X86-BMI2-NEXT: andnl %edx, %esi, %edx |
| ; X86-BMI2-NEXT: andnl %eax, %ecx, %eax |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: popl %ebx |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_b2_load: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movq %rsi, %rcx |
| ; X64-NOBMI-NEXT: movq (%rdi), %rsi |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-NOBMI-NEXT: shrq %cl, %rsi |
| ; X64-NOBMI-NEXT: movq $-1, %rax |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shlq %cl, %rax |
| ; X64-NOBMI-NEXT: notq %rax |
| ; X64-NOBMI-NEXT: andq %rsi, %rax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_b2_load: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrq %rax, (%rdi), %rax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_b2_load: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxq %rsi, (%rdi), %rax |
| ; X64-BMI2-NEXT: bzhiq %rdx, %rax, %rax |
| ; X64-BMI2-NEXT: retq |
| %val = load i64, i64* %w |
| %shifted = lshr i64 %val, %numskipbits |
| %notmask = shl i64 -1, %numlowbits |
| %mask = xor i64 %notmask, -1 |
| %masked = and i64 %mask, %shifted |
| ret i64 %masked |
| } |
| |
| define i64 @bextr64_b3_load_indexzext(i64* %w, i8 zeroext %numskipbits, i8 zeroext %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_b3_load_indexzext: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %ebx |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %ch |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl (%eax), %esi |
| ; X86-NOBMI-NEXT: movl 4(%eax), %eax |
| ; X86-NOBMI-NEXT: movl %eax, %edi |
| ; X86-NOBMI-NEXT: shrl %cl, %edi |
| ; X86-NOBMI-NEXT: shrdl %cl, %eax, %esi |
| ; X86-NOBMI-NEXT: xorl %eax, %eax |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: je .LBB28_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %edi, %esi |
| ; X86-NOBMI-NEXT: xorl %edi, %edi |
| ; X86-NOBMI-NEXT: .LBB28_2: |
| ; X86-NOBMI-NEXT: movl $-1, %edx |
| ; X86-NOBMI-NEXT: movl $-1, %ebx |
| ; X86-NOBMI-NEXT: movb %ch, %cl |
| ; X86-NOBMI-NEXT: shll %cl, %ebx |
| ; X86-NOBMI-NEXT: testb $32, %ch |
| ; X86-NOBMI-NEXT: jne .LBB28_3 |
| ; X86-NOBMI-NEXT: # %bb.4: |
| ; X86-NOBMI-NEXT: movl %ebx, %eax |
| ; X86-NOBMI-NEXT: jmp .LBB28_5 |
| ; X86-NOBMI-NEXT: .LBB28_3: |
| ; X86-NOBMI-NEXT: movl %ebx, %edx |
| ; X86-NOBMI-NEXT: .LBB28_5: |
| ; X86-NOBMI-NEXT: notl %edx |
| ; X86-NOBMI-NEXT: andl %edi, %edx |
| ; X86-NOBMI-NEXT: notl %eax |
| ; X86-NOBMI-NEXT: andl %esi, %eax |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: popl %ebx |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_b3_load_indexzext: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %ebx |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI1-NEXT: movl (%edx), %esi |
| ; X86-BMI1-NEXT: movl 4(%edx), %edi |
| ; X86-BMI1-NEXT: movl %edi, %edx |
| ; X86-BMI1-NEXT: shrl %cl, %edx |
| ; X86-BMI1-NEXT: shrdl %cl, %edi, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: je .LBB28_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %edx, %esi |
| ; X86-BMI1-NEXT: xorl %edx, %edx |
| ; X86-BMI1-NEXT: .LBB28_2: |
| ; X86-BMI1-NEXT: movl $-1, %edi |
| ; X86-BMI1-NEXT: movl $-1, %ebx |
| ; X86-BMI1-NEXT: movl %eax, %ecx |
| ; X86-BMI1-NEXT: shll %cl, %ebx |
| ; X86-BMI1-NEXT: testb $32, %al |
| ; X86-BMI1-NEXT: je .LBB28_4 |
| ; X86-BMI1-NEXT: # %bb.3: |
| ; X86-BMI1-NEXT: movl %ebx, %edi |
| ; X86-BMI1-NEXT: xorl %ebx, %ebx |
| ; X86-BMI1-NEXT: .LBB28_4: |
| ; X86-BMI1-NEXT: andnl %edx, %edi, %edx |
| ; X86-BMI1-NEXT: andnl %esi, %ebx, %eax |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: popl %ebx |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_b3_load_indexzext: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %bl |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI2-NEXT: movl (%edx), %eax |
| ; X86-BMI2-NEXT: movl 4(%edx), %esi |
| ; X86-BMI2-NEXT: shrxl %ecx, %esi, %edx |
| ; X86-BMI2-NEXT: shrdl %cl, %esi, %eax |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB28_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: movl %edx, %eax |
| ; X86-BMI2-NEXT: xorl %edx, %edx |
| ; X86-BMI2-NEXT: .LBB28_2: |
| ; X86-BMI2-NEXT: movl $-1, %esi |
| ; X86-BMI2-NEXT: shlxl %ebx, %esi, %ecx |
| ; X86-BMI2-NEXT: testb $32, %bl |
| ; X86-BMI2-NEXT: je .LBB28_4 |
| ; X86-BMI2-NEXT: # %bb.3: |
| ; X86-BMI2-NEXT: movl %ecx, %esi |
| ; X86-BMI2-NEXT: xorl %ecx, %ecx |
| ; X86-BMI2-NEXT: .LBB28_4: |
| ; X86-BMI2-NEXT: andnl %edx, %esi, %edx |
| ; X86-BMI2-NEXT: andnl %eax, %ecx, %eax |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: popl %ebx |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_b3_load_indexzext: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: movq (%rdi), %rsi |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NOBMI-NEXT: shrq %cl, %rsi |
| ; X64-NOBMI-NEXT: movq $-1, %rax |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shlq %cl, %rax |
| ; X64-NOBMI-NEXT: notq %rax |
| ; X64-NOBMI-NEXT: andq %rsi, %rax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_b3_load_indexzext: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrq %rax, (%rdi), %rax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_b3_load_indexzext: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: # kill: def $edx killed $edx def $rdx |
| ; X64-BMI2-NEXT: # kill: def $esi killed $esi def $rsi |
| ; X64-BMI2-NEXT: shrxq %rsi, (%rdi), %rax |
| ; X64-BMI2-NEXT: bzhiq %rdx, %rax, %rax |
| ; X64-BMI2-NEXT: retq |
| %val = load i64, i64* %w |
| %skip = zext i8 %numskipbits to i64 |
| %shifted = lshr i64 %val, %skip |
| %conv = zext i8 %numlowbits to i64 |
| %notmask = shl i64 -1, %conv |
| %mask = xor i64 %notmask, -1 |
| %masked = and i64 %mask, %shifted |
| ret i64 %masked |
| } |
| |
| define i64 @bextr64_b4_commutative(i64 %val, i64 %numskipbits, i64 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_b4_commutative: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %ebx |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %ch |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NOBMI-NEXT: movl %esi, %edx |
| ; X86-NOBMI-NEXT: shrl %cl, %edx |
| ; X86-NOBMI-NEXT: shrdl %cl, %esi, %eax |
| ; X86-NOBMI-NEXT: xorl %esi, %esi |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: je .LBB29_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %edx, %eax |
| ; X86-NOBMI-NEXT: xorl %edx, %edx |
| ; X86-NOBMI-NEXT: .LBB29_2: |
| ; X86-NOBMI-NEXT: movl $-1, %edi |
| ; X86-NOBMI-NEXT: movl $-1, %ebx |
| ; X86-NOBMI-NEXT: movb %ch, %cl |
| ; X86-NOBMI-NEXT: shll %cl, %ebx |
| ; X86-NOBMI-NEXT: testb $32, %ch |
| ; X86-NOBMI-NEXT: jne .LBB29_3 |
| ; X86-NOBMI-NEXT: # %bb.4: |
| ; X86-NOBMI-NEXT: movl %ebx, %esi |
| ; X86-NOBMI-NEXT: jmp .LBB29_5 |
| ; X86-NOBMI-NEXT: .LBB29_3: |
| ; X86-NOBMI-NEXT: movl %ebx, %edi |
| ; X86-NOBMI-NEXT: .LBB29_5: |
| ; X86-NOBMI-NEXT: notl %edi |
| ; X86-NOBMI-NEXT: andl %edi, %edx |
| ; X86-NOBMI-NEXT: notl %esi |
| ; X86-NOBMI-NEXT: andl %esi, %eax |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: popl %ebx |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_b4_commutative: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %ebx |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-BMI1-NEXT: movl %edi, %edx |
| ; X86-BMI1-NEXT: shrl %cl, %edx |
| ; X86-BMI1-NEXT: shrdl %cl, %edi, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: je .LBB29_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %edx, %esi |
| ; X86-BMI1-NEXT: xorl %edx, %edx |
| ; X86-BMI1-NEXT: .LBB29_2: |
| ; X86-BMI1-NEXT: movl $-1, %edi |
| ; X86-BMI1-NEXT: movl $-1, %ebx |
| ; X86-BMI1-NEXT: movl %eax, %ecx |
| ; X86-BMI1-NEXT: shll %cl, %ebx |
| ; X86-BMI1-NEXT: testb $32, %al |
| ; X86-BMI1-NEXT: je .LBB29_4 |
| ; X86-BMI1-NEXT: # %bb.3: |
| ; X86-BMI1-NEXT: movl %ebx, %edi |
| ; X86-BMI1-NEXT: xorl %ebx, %ebx |
| ; X86-BMI1-NEXT: .LBB29_4: |
| ; X86-BMI1-NEXT: andnl %edx, %edi, %edx |
| ; X86-BMI1-NEXT: andnl %esi, %ebx, %eax |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: popl %ebx |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_b4_commutative: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %bl |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI2-NEXT: shrdl %cl, %edx, %eax |
| ; X86-BMI2-NEXT: shrxl %ecx, %edx, %edx |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB29_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: movl %edx, %eax |
| ; X86-BMI2-NEXT: xorl %edx, %edx |
| ; X86-BMI2-NEXT: .LBB29_2: |
| ; X86-BMI2-NEXT: movl $-1, %esi |
| ; X86-BMI2-NEXT: shlxl %ebx, %esi, %ecx |
| ; X86-BMI2-NEXT: testb $32, %bl |
| ; X86-BMI2-NEXT: je .LBB29_4 |
| ; X86-BMI2-NEXT: # %bb.3: |
| ; X86-BMI2-NEXT: movl %ecx, %esi |
| ; X86-BMI2-NEXT: xorl %ecx, %ecx |
| ; X86-BMI2-NEXT: .LBB29_4: |
| ; X86-BMI2-NEXT: andnl %edx, %esi, %edx |
| ; X86-BMI2-NEXT: andnl %eax, %ecx, %eax |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: popl %ebx |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_b4_commutative: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movq %rsi, %rcx |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-NOBMI-NEXT: shrq %cl, %rdi |
| ; X64-NOBMI-NEXT: movq $-1, %rax |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shlq %cl, %rax |
| ; X64-NOBMI-NEXT: notq %rax |
| ; X64-NOBMI-NEXT: andq %rdi, %rax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_b4_commutative: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrq %rax, %rdi, %rax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_b4_commutative: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxq %rsi, %rdi, %rax |
| ; X64-BMI2-NEXT: bzhiq %rdx, %rax, %rax |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i64 %val, %numskipbits |
| %notmask = shl i64 -1, %numlowbits |
| %mask = xor i64 %notmask, -1 |
| %masked = and i64 %shifted, %mask ; swapped order |
| ret i64 %masked |
| } |
| |
| define i64 @bextr64_b5_skipextrauses(i64 %val, i64 %numskipbits, i64 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_b5_skipextrauses: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %ebp |
| ; X86-NOBMI-NEXT: pushl %ebx |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: subl $12, %esp |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %ch |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl %esi, %ebp |
| ; X86-NOBMI-NEXT: movb %al, %cl |
| ; X86-NOBMI-NEXT: shrl %cl, %ebp |
| ; X86-NOBMI-NEXT: shrdl %cl, %esi, %edx |
| ; X86-NOBMI-NEXT: xorl %ebx, %ebx |
| ; X86-NOBMI-NEXT: testb $32, %al |
| ; X86-NOBMI-NEXT: je .LBB30_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %ebp, %edx |
| ; X86-NOBMI-NEXT: xorl %ebp, %ebp |
| ; X86-NOBMI-NEXT: .LBB30_2: |
| ; X86-NOBMI-NEXT: movl $-1, %edi |
| ; X86-NOBMI-NEXT: movl $-1, %esi |
| ; X86-NOBMI-NEXT: movb %ch, %cl |
| ; X86-NOBMI-NEXT: shll %cl, %esi |
| ; X86-NOBMI-NEXT: testb $32, %ch |
| ; X86-NOBMI-NEXT: jne .LBB30_3 |
| ; X86-NOBMI-NEXT: # %bb.4: |
| ; X86-NOBMI-NEXT: movl %esi, %ebx |
| ; X86-NOBMI-NEXT: jmp .LBB30_5 |
| ; X86-NOBMI-NEXT: .LBB30_3: |
| ; X86-NOBMI-NEXT: movl %esi, %edi |
| ; X86-NOBMI-NEXT: .LBB30_5: |
| ; X86-NOBMI-NEXT: notl %edi |
| ; X86-NOBMI-NEXT: andl %ebp, %edi |
| ; X86-NOBMI-NEXT: notl %ebx |
| ; X86-NOBMI-NEXT: andl %edx, %ebx |
| ; X86-NOBMI-NEXT: subl $8, %esp |
| ; X86-NOBMI-NEXT: pushl {{[0-9]+}}(%esp) |
| ; X86-NOBMI-NEXT: pushl %eax |
| ; X86-NOBMI-NEXT: calll use64@PLT |
| ; X86-NOBMI-NEXT: addl $16, %esp |
| ; X86-NOBMI-NEXT: movl %ebx, %eax |
| ; X86-NOBMI-NEXT: movl %edi, %edx |
| ; X86-NOBMI-NEXT: addl $12, %esp |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: popl %ebx |
| ; X86-NOBMI-NEXT: popl %ebp |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_b5_skipextrauses: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %ebp |
| ; X86-BMI1-NEXT: pushl %ebx |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: subl $12, %esp |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %ebx |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: movl %ebx, %esi |
| ; X86-BMI1-NEXT: movl %eax, %ecx |
| ; X86-BMI1-NEXT: shrl %cl, %esi |
| ; X86-BMI1-NEXT: shrdl %cl, %ebx, %edi |
| ; X86-BMI1-NEXT: testb $32, %al |
| ; X86-BMI1-NEXT: je .LBB30_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %esi, %edi |
| ; X86-BMI1-NEXT: xorl %esi, %esi |
| ; X86-BMI1-NEXT: .LBB30_2: |
| ; X86-BMI1-NEXT: movl $-1, %ebx |
| ; X86-BMI1-NEXT: movl $-1, %ebp |
| ; X86-BMI1-NEXT: movl %edx, %ecx |
| ; X86-BMI1-NEXT: shll %cl, %ebp |
| ; X86-BMI1-NEXT: testb $32, %dl |
| ; X86-BMI1-NEXT: je .LBB30_4 |
| ; X86-BMI1-NEXT: # %bb.3: |
| ; X86-BMI1-NEXT: movl %ebp, %ebx |
| ; X86-BMI1-NEXT: xorl %ebp, %ebp |
| ; X86-BMI1-NEXT: .LBB30_4: |
| ; X86-BMI1-NEXT: andnl %esi, %ebx, %esi |
| ; X86-BMI1-NEXT: andnl %edi, %ebp, %edi |
| ; X86-BMI1-NEXT: subl $8, %esp |
| ; X86-BMI1-NEXT: pushl {{[0-9]+}}(%esp) |
| ; X86-BMI1-NEXT: pushl %eax |
| ; X86-BMI1-NEXT: calll use64@PLT |
| ; X86-BMI1-NEXT: addl $16, %esp |
| ; X86-BMI1-NEXT: movl %edi, %eax |
| ; X86-BMI1-NEXT: movl %esi, %edx |
| ; X86-BMI1-NEXT: addl $12, %esp |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: popl %ebx |
| ; X86-BMI1-NEXT: popl %ebp |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_b5_skipextrauses: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %ebp |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %edi |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: subl $12, %esp |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %bl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-BMI2-NEXT: shrdl %cl, %edx, %eax |
| ; X86-BMI2-NEXT: shrxl %ecx, %edx, %edx |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB30_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: movl %edx, %eax |
| ; X86-BMI2-NEXT: xorl %edx, %edx |
| ; X86-BMI2-NEXT: .LBB30_2: |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %ebp |
| ; X86-BMI2-NEXT: movl $-1, %esi |
| ; X86-BMI2-NEXT: shlxl %ebx, %esi, %edi |
| ; X86-BMI2-NEXT: testb $32, %bl |
| ; X86-BMI2-NEXT: je .LBB30_4 |
| ; X86-BMI2-NEXT: # %bb.3: |
| ; X86-BMI2-NEXT: movl %edi, %esi |
| ; X86-BMI2-NEXT: xorl %edi, %edi |
| ; X86-BMI2-NEXT: .LBB30_4: |
| ; X86-BMI2-NEXT: andnl %edx, %esi, %esi |
| ; X86-BMI2-NEXT: andnl %eax, %edi, %edi |
| ; X86-BMI2-NEXT: subl $8, %esp |
| ; X86-BMI2-NEXT: pushl %ebp |
| ; X86-BMI2-NEXT: pushl %ecx |
| ; X86-BMI2-NEXT: calll use64@PLT |
| ; X86-BMI2-NEXT: addl $16, %esp |
| ; X86-BMI2-NEXT: movl %edi, %eax |
| ; X86-BMI2-NEXT: movl %esi, %edx |
| ; X86-BMI2-NEXT: addl $12, %esp |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: popl %edi |
| ; X86-BMI2-NEXT: popl %ebx |
| ; X86-BMI2-NEXT: popl %ebp |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_b5_skipextrauses: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: pushq %rbx |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: shrq %cl, %rdi |
| ; X64-NOBMI-NEXT: movq $-1, %rbx |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shlq %cl, %rbx |
| ; X64-NOBMI-NEXT: notq %rbx |
| ; X64-NOBMI-NEXT: andq %rdi, %rbx |
| ; X64-NOBMI-NEXT: movq %rsi, %rdi |
| ; X64-NOBMI-NEXT: callq use64@PLT |
| ; X64-NOBMI-NEXT: movq %rbx, %rax |
| ; X64-NOBMI-NEXT: popq %rbx |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_b5_skipextrauses: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: pushq %rbx |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrq %rax, %rdi, %rbx |
| ; X64-BMI1-NEXT: movq %rsi, %rdi |
| ; X64-BMI1-NEXT: callq use64@PLT |
| ; X64-BMI1-NEXT: movq %rbx, %rax |
| ; X64-BMI1-NEXT: popq %rbx |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_b5_skipextrauses: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: pushq %rbx |
| ; X64-BMI2-NEXT: shrxq %rsi, %rdi, %rax |
| ; X64-BMI2-NEXT: bzhiq %rdx, %rax, %rbx |
| ; X64-BMI2-NEXT: movq %rsi, %rdi |
| ; X64-BMI2-NEXT: callq use64@PLT |
| ; X64-BMI2-NEXT: movq %rbx, %rax |
| ; X64-BMI2-NEXT: popq %rbx |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i64 %val, %numskipbits |
| %notmask = shl i64 -1, %numlowbits |
| %mask = xor i64 %notmask, -1 |
| %masked = and i64 %mask, %shifted |
| call void @use64(i64 %numskipbits) |
| ret i64 %masked |
| } |
| |
| ; 64-bit, but with 32-bit output |
| |
| ; Everything done in 64-bit, truncation happens last. |
| define i32 @bextr64_32_b0(i64 %val, i64 %numskipbits, i8 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_32_b0: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-NOBMI-NEXT: movl %edi, %eax |
| ; X86-NOBMI-NEXT: shrl %cl, %eax |
| ; X86-NOBMI-NEXT: shrdl %cl, %edi, %esi |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: jne .LBB31_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %esi, %eax |
| ; X86-NOBMI-NEXT: .LBB31_2: |
| ; X86-NOBMI-NEXT: movl $-1, %esi |
| ; X86-NOBMI-NEXT: movl %edx, %ecx |
| ; X86-NOBMI-NEXT: shll %cl, %esi |
| ; X86-NOBMI-NEXT: xorl %ecx, %ecx |
| ; X86-NOBMI-NEXT: testb $32, %dl |
| ; X86-NOBMI-NEXT: jne .LBB31_4 |
| ; X86-NOBMI-NEXT: # %bb.3: |
| ; X86-NOBMI-NEXT: movl %esi, %ecx |
| ; X86-NOBMI-NEXT: .LBB31_4: |
| ; X86-NOBMI-NEXT: notl %ecx |
| ; X86-NOBMI-NEXT: andl %ecx, %eax |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_32_b0: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-BMI1-NEXT: movl %edi, %edx |
| ; X86-BMI1-NEXT: shrl %cl, %edx |
| ; X86-BMI1-NEXT: shrdl %cl, %edi, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: jne .LBB31_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %esi, %edx |
| ; X86-BMI1-NEXT: .LBB31_2: |
| ; X86-BMI1-NEXT: movl $-1, %esi |
| ; X86-BMI1-NEXT: movl %eax, %ecx |
| ; X86-BMI1-NEXT: shll %cl, %esi |
| ; X86-BMI1-NEXT: xorl %ecx, %ecx |
| ; X86-BMI1-NEXT: testb $32, %al |
| ; X86-BMI1-NEXT: jne .LBB31_4 |
| ; X86-BMI1-NEXT: # %bb.3: |
| ; X86-BMI1-NEXT: movl %esi, %ecx |
| ; X86-BMI1-NEXT: .LBB31_4: |
| ; X86-BMI1-NEXT: andnl %edx, %ecx, %eax |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_32_b0: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI2-NEXT: shrdl %cl, %esi, %edx |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB31_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: shrxl %ecx, %esi, %edx |
| ; X86-BMI2-NEXT: .LBB31_2: |
| ; X86-BMI2-NEXT: xorl %ecx, %ecx |
| ; X86-BMI2-NEXT: testb $32, %al |
| ; X86-BMI2-NEXT: jne .LBB31_4 |
| ; X86-BMI2-NEXT: # %bb.3: |
| ; X86-BMI2-NEXT: movl $-1, %ecx |
| ; X86-BMI2-NEXT: shlxl %eax, %ecx, %ecx |
| ; X86-BMI2-NEXT: .LBB31_4: |
| ; X86-BMI2-NEXT: andnl %edx, %ecx, %eax |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_32_b0: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movq %rsi, %rcx |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-NOBMI-NEXT: shrq %cl, %rdi |
| ; X64-NOBMI-NEXT: movq $-1, %rax |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shlq %cl, %rax |
| ; X64-NOBMI-NEXT: notl %eax |
| ; X64-NOBMI-NEXT: andl %edi, %eax |
| ; X64-NOBMI-NEXT: # kill: def $eax killed $eax killed $rax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_32_b0: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrq %rax, %rdi, %rax |
| ; X64-BMI1-NEXT: # kill: def $eax killed $eax killed $rax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_32_b0: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxq %rsi, %rdi, %rax |
| ; X64-BMI2-NEXT: bzhil %edx, %eax, %eax |
| ; X64-BMI2-NEXT: retq |
| %shiftedval = lshr i64 %val, %numskipbits |
| %widenumlowbits = zext i8 %numlowbits to i64 |
| %notmask = shl nsw i64 -1, %widenumlowbits |
| %mask = xor i64 %notmask, -1 |
| %wideres = and i64 %shiftedval, %mask |
| %res = trunc i64 %wideres to i32 |
| ret i32 %res |
| } |
| |
| ; Shifting happens in 64-bit, then truncation. Masking is 32-bit. |
| define i32 @bextr64_32_b1(i64 %val, i64 %numskipbits, i8 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_32_b1: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-NOBMI-NEXT: movl %edi, %esi |
| ; X86-NOBMI-NEXT: shrl %cl, %esi |
| ; X86-NOBMI-NEXT: shrdl %cl, %edi, %eax |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: jne .LBB32_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %eax, %esi |
| ; X86-NOBMI-NEXT: .LBB32_2: |
| ; X86-NOBMI-NEXT: movl $-1, %eax |
| ; X86-NOBMI-NEXT: movl %edx, %ecx |
| ; X86-NOBMI-NEXT: shll %cl, %eax |
| ; X86-NOBMI-NEXT: notl %eax |
| ; X86-NOBMI-NEXT: andl %esi, %eax |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_32_b1: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-BMI1-NEXT: movl %edi, %edx |
| ; X86-BMI1-NEXT: shrl %cl, %edx |
| ; X86-BMI1-NEXT: shrdl %cl, %edi, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: jne .LBB32_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %esi, %edx |
| ; X86-BMI1-NEXT: .LBB32_2: |
| ; X86-BMI1-NEXT: shll $8, %eax |
| ; X86-BMI1-NEXT: bextrl %eax, %edx, %eax |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_32_b1: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI2-NEXT: shrdl %cl, %esi, %edx |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB32_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: shrxl %ecx, %esi, %edx |
| ; X86-BMI2-NEXT: .LBB32_2: |
| ; X86-BMI2-NEXT: bzhil %eax, %edx, %eax |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_32_b1: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movq %rsi, %rcx |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-NOBMI-NEXT: shrq %cl, %rdi |
| ; X64-NOBMI-NEXT: movl $-1, %eax |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shll %cl, %eax |
| ; X64-NOBMI-NEXT: notl %eax |
| ; X64-NOBMI-NEXT: andl %edi, %eax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_32_b1: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrq %rax, %rdi, %rax |
| ; X64-BMI1-NEXT: # kill: def $eax killed $eax killed $rax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_32_b1: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxq %rsi, %rdi, %rax |
| ; X64-BMI2-NEXT: bzhil %edx, %eax, %eax |
| ; X64-BMI2-NEXT: retq |
| %shiftedval = lshr i64 %val, %numskipbits |
| %truncshiftedval = trunc i64 %shiftedval to i32 |
| %widenumlowbits = zext i8 %numlowbits to i32 |
| %notmask = shl nsw i32 -1, %widenumlowbits |
| %mask = xor i32 %notmask, -1 |
| %res = and i32 %truncshiftedval, %mask |
| ret i32 %res |
| } |
| |
| ; Shifting happens in 64-bit. Mask is 32-bit, but extended to 64-bit. |
| ; Masking is 64-bit. Then truncation. |
| define i32 @bextr64_32_b2(i64 %val, i64 %numskipbits, i8 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_32_b2: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-NOBMI-NEXT: movl %edi, %esi |
| ; X86-NOBMI-NEXT: shrl %cl, %esi |
| ; X86-NOBMI-NEXT: shrdl %cl, %edi, %eax |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: jne .LBB33_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %eax, %esi |
| ; X86-NOBMI-NEXT: .LBB33_2: |
| ; X86-NOBMI-NEXT: movl $-1, %eax |
| ; X86-NOBMI-NEXT: movl %edx, %ecx |
| ; X86-NOBMI-NEXT: shll %cl, %eax |
| ; X86-NOBMI-NEXT: notl %eax |
| ; X86-NOBMI-NEXT: andl %esi, %eax |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_32_b2: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-BMI1-NEXT: movl %edi, %edx |
| ; X86-BMI1-NEXT: shrl %cl, %edx |
| ; X86-BMI1-NEXT: shrdl %cl, %edi, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: jne .LBB33_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %esi, %edx |
| ; X86-BMI1-NEXT: .LBB33_2: |
| ; X86-BMI1-NEXT: shll $8, %eax |
| ; X86-BMI1-NEXT: bextrl %eax, %edx, %eax |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_32_b2: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI2-NEXT: shrdl %cl, %esi, %edx |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB33_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: shrxl %ecx, %esi, %edx |
| ; X86-BMI2-NEXT: .LBB33_2: |
| ; X86-BMI2-NEXT: bzhil %eax, %edx, %eax |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_32_b2: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movq %rsi, %rcx |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-NOBMI-NEXT: shrq %cl, %rdi |
| ; X64-NOBMI-NEXT: movl $-1, %eax |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shll %cl, %eax |
| ; X64-NOBMI-NEXT: notl %eax |
| ; X64-NOBMI-NEXT: andl %edi, %eax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_32_b2: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrq %rax, %rdi, %rax |
| ; X64-BMI1-NEXT: # kill: def $eax killed $eax killed $rax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_32_b2: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxq %rsi, %rdi, %rax |
| ; X64-BMI2-NEXT: bzhil %edx, %eax, %eax |
| ; X64-BMI2-NEXT: retq |
| %shiftedval = lshr i64 %val, %numskipbits |
| %widenumlowbits = zext i8 %numlowbits to i32 |
| %notmask = shl nsw i32 -1, %widenumlowbits |
| %mask = xor i32 %notmask, -1 |
| %zextmask = zext i32 %mask to i64 |
| %wideres = and i64 %shiftedval, %zextmask |
| %res = trunc i64 %wideres to i32 |
| ret i32 %res |
| } |
| |
| ; Shifting happens in 64-bit. Mask is 32-bit, but calculated in 64-bit. |
| ; Masking is 64-bit. Then truncation. |
| define i32 @bextr64_32_b3(i64 %val, i64 %numskipbits, i8 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_32_b3: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-NOBMI-NEXT: movl %edi, %eax |
| ; X86-NOBMI-NEXT: shrl %cl, %eax |
| ; X86-NOBMI-NEXT: shrdl %cl, %edi, %esi |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: jne .LBB34_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %esi, %eax |
| ; X86-NOBMI-NEXT: .LBB34_2: |
| ; X86-NOBMI-NEXT: movl $-1, %esi |
| ; X86-NOBMI-NEXT: movl %edx, %ecx |
| ; X86-NOBMI-NEXT: shll %cl, %esi |
| ; X86-NOBMI-NEXT: xorl %ecx, %ecx |
| ; X86-NOBMI-NEXT: testb $32, %dl |
| ; X86-NOBMI-NEXT: jne .LBB34_4 |
| ; X86-NOBMI-NEXT: # %bb.3: |
| ; X86-NOBMI-NEXT: movl %esi, %ecx |
| ; X86-NOBMI-NEXT: .LBB34_4: |
| ; X86-NOBMI-NEXT: notl %ecx |
| ; X86-NOBMI-NEXT: andl %ecx, %eax |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_32_b3: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-BMI1-NEXT: movl %edi, %edx |
| ; X86-BMI1-NEXT: shrl %cl, %edx |
| ; X86-BMI1-NEXT: shrdl %cl, %edi, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: jne .LBB34_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %esi, %edx |
| ; X86-BMI1-NEXT: .LBB34_2: |
| ; X86-BMI1-NEXT: movl $-1, %esi |
| ; X86-BMI1-NEXT: movl %eax, %ecx |
| ; X86-BMI1-NEXT: shll %cl, %esi |
| ; X86-BMI1-NEXT: xorl %ecx, %ecx |
| ; X86-BMI1-NEXT: testb $32, %al |
| ; X86-BMI1-NEXT: jne .LBB34_4 |
| ; X86-BMI1-NEXT: # %bb.3: |
| ; X86-BMI1-NEXT: movl %esi, %ecx |
| ; X86-BMI1-NEXT: .LBB34_4: |
| ; X86-BMI1-NEXT: andnl %edx, %ecx, %eax |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_32_b3: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI2-NEXT: shrdl %cl, %esi, %edx |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB34_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: shrxl %ecx, %esi, %edx |
| ; X86-BMI2-NEXT: .LBB34_2: |
| ; X86-BMI2-NEXT: xorl %ecx, %ecx |
| ; X86-BMI2-NEXT: testb $32, %al |
| ; X86-BMI2-NEXT: jne .LBB34_4 |
| ; X86-BMI2-NEXT: # %bb.3: |
| ; X86-BMI2-NEXT: movl $-1, %ecx |
| ; X86-BMI2-NEXT: shlxl %eax, %ecx, %ecx |
| ; X86-BMI2-NEXT: .LBB34_4: |
| ; X86-BMI2-NEXT: andnl %edx, %ecx, %eax |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_32_b3: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movq %rsi, %rcx |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-NOBMI-NEXT: shrq %cl, %rdi |
| ; X64-NOBMI-NEXT: movl $4294967295, %eax # imm = 0xFFFFFFFF |
| ; X64-NOBMI-NEXT: movl $4294967295, %esi # imm = 0xFFFFFFFF |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shlq %cl, %rsi |
| ; X64-NOBMI-NEXT: xorl %esi, %eax |
| ; X64-NOBMI-NEXT: andl %edi, %eax |
| ; X64-NOBMI-NEXT: # kill: def $eax killed $eax killed $rax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_32_b3: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrq %rax, %rdi, %rax |
| ; X64-BMI1-NEXT: # kill: def $eax killed $eax killed $rax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_32_b3: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxq %rsi, %rdi, %rax |
| ; X64-BMI2-NEXT: bzhil %edx, %eax, %eax |
| ; X64-BMI2-NEXT: retq |
| %shiftedval = lshr i64 %val, %numskipbits |
| %widenumlowbits = zext i8 %numlowbits to i64 |
| %notmask = shl nsw i64 4294967295, %widenumlowbits |
| %mask = xor i64 %notmask, 4294967295 |
| %wideres = and i64 %shiftedval, %mask |
| %res = trunc i64 %wideres to i32 |
| ret i32 %res |
| } |
| |
| ; ---------------------------------------------------------------------------- ; |
| ; Pattern c. 32-bit |
| ; ---------------------------------------------------------------------------- ; |
| |
| define i32 @bextr32_c0(i32 %val, i32 %numskipbits, i32 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr32_c0: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: pushl %eax |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-NOBMI-NEXT: shrl %cl, %edi |
| ; X86-NOBMI-NEXT: xorl %ecx, %ecx |
| ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl $-1, %esi |
| ; X86-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X86-NOBMI-NEXT: shrl %cl, %esi |
| ; X86-NOBMI-NEXT: movl %esi, (%esp) |
| ; X86-NOBMI-NEXT: calll use32@PLT |
| ; X86-NOBMI-NEXT: andl %edi, %esi |
| ; X86-NOBMI-NEXT: movl %esi, %eax |
| ; X86-NOBMI-NEXT: addl $4, %esp |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr32_c0: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: pushl %eax |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-BMI1-NEXT: shrl %cl, %edi |
| ; X86-BMI1-NEXT: xorl %ecx, %ecx |
| ; X86-BMI1-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl $-1, %esi |
| ; X86-BMI1-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X86-BMI1-NEXT: shrl %cl, %esi |
| ; X86-BMI1-NEXT: movl %esi, (%esp) |
| ; X86-BMI1-NEXT: calll use32@PLT |
| ; X86-BMI1-NEXT: andl %edi, %esi |
| ; X86-BMI1-NEXT: movl %esi, %eax |
| ; X86-BMI1-NEXT: addl $4, %esp |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr32_c0: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: pushl %eax |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %bl |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: shrxl %eax, {{[0-9]+}}(%esp), %esi |
| ; X86-BMI2-NEXT: movl %ebx, %eax |
| ; X86-BMI2-NEXT: negb %al |
| ; X86-BMI2-NEXT: movl $-1, %ecx |
| ; X86-BMI2-NEXT: shrxl %eax, %ecx, %eax |
| ; X86-BMI2-NEXT: movl %eax, (%esp) |
| ; X86-BMI2-NEXT: calll use32@PLT |
| ; X86-BMI2-NEXT: bzhil %ebx, %esi, %eax |
| ; X86-BMI2-NEXT: addl $4, %esp |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: popl %ebx |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr32_c0: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: pushq %rbp |
| ; X64-NOBMI-NEXT: pushq %rbx |
| ; X64-NOBMI-NEXT: pushq %rax |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: movl %edi, %ebx |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NOBMI-NEXT: shrl %cl, %ebx |
| ; X64-NOBMI-NEXT: negb %dl |
| ; X64-NOBMI-NEXT: movl $-1, %ebp |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shrl %cl, %ebp |
| ; X64-NOBMI-NEXT: movl %ebp, %edi |
| ; X64-NOBMI-NEXT: callq use32@PLT |
| ; X64-NOBMI-NEXT: andl %ebx, %ebp |
| ; X64-NOBMI-NEXT: movl %ebp, %eax |
| ; X64-NOBMI-NEXT: addq $8, %rsp |
| ; X64-NOBMI-NEXT: popq %rbx |
| ; X64-NOBMI-NEXT: popq %rbp |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr32_c0: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: pushq %rbp |
| ; X64-BMI1-NEXT: pushq %rbx |
| ; X64-BMI1-NEXT: pushq %rax |
| ; X64-BMI1-NEXT: movl %esi, %ecx |
| ; X64-BMI1-NEXT: movl %edi, %ebx |
| ; X64-BMI1-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-BMI1-NEXT: shrl %cl, %ebx |
| ; X64-BMI1-NEXT: negb %dl |
| ; X64-BMI1-NEXT: movl $-1, %ebp |
| ; X64-BMI1-NEXT: movl %edx, %ecx |
| ; X64-BMI1-NEXT: shrl %cl, %ebp |
| ; X64-BMI1-NEXT: movl %ebp, %edi |
| ; X64-BMI1-NEXT: callq use32@PLT |
| ; X64-BMI1-NEXT: andl %ebx, %ebp |
| ; X64-BMI1-NEXT: movl %ebp, %eax |
| ; X64-BMI1-NEXT: addq $8, %rsp |
| ; X64-BMI1-NEXT: popq %rbx |
| ; X64-BMI1-NEXT: popq %rbp |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr32_c0: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: pushq %rbp |
| ; X64-BMI2-NEXT: pushq %rbx |
| ; X64-BMI2-NEXT: pushq %rax |
| ; X64-BMI2-NEXT: movl %edx, %ebx |
| ; X64-BMI2-NEXT: shrxl %esi, %edi, %ebp |
| ; X64-BMI2-NEXT: movl %ebx, %eax |
| ; X64-BMI2-NEXT: negb %al |
| ; X64-BMI2-NEXT: movl $-1, %ecx |
| ; X64-BMI2-NEXT: shrxl %eax, %ecx, %edi |
| ; X64-BMI2-NEXT: callq use32@PLT |
| ; X64-BMI2-NEXT: bzhil %ebx, %ebp, %eax |
| ; X64-BMI2-NEXT: addq $8, %rsp |
| ; X64-BMI2-NEXT: popq %rbx |
| ; X64-BMI2-NEXT: popq %rbp |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i32 %val, %numskipbits |
| %numhighbits = sub i32 32, %numlowbits |
| %mask = lshr i32 -1, %numhighbits |
| call void @use32(i32 %mask) |
| %masked = and i32 %mask, %shifted |
| ret i32 %masked |
| } |
| |
| define i32 @bextr32_c1_indexzext(i32 %val, i8 %numskipbits, i8 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr32_c1_indexzext: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: pushl %eax |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-NOBMI-NEXT: shrl %cl, %edi |
| ; X86-NOBMI-NEXT: xorl %ecx, %ecx |
| ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl $-1, %esi |
| ; X86-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X86-NOBMI-NEXT: shrl %cl, %esi |
| ; X86-NOBMI-NEXT: movl %esi, (%esp) |
| ; X86-NOBMI-NEXT: calll use32@PLT |
| ; X86-NOBMI-NEXT: andl %edi, %esi |
| ; X86-NOBMI-NEXT: movl %esi, %eax |
| ; X86-NOBMI-NEXT: addl $4, %esp |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr32_c1_indexzext: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: pushl %eax |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-BMI1-NEXT: shrl %cl, %edi |
| ; X86-BMI1-NEXT: xorl %ecx, %ecx |
| ; X86-BMI1-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl $-1, %esi |
| ; X86-BMI1-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X86-BMI1-NEXT: shrl %cl, %esi |
| ; X86-BMI1-NEXT: movl %esi, (%esp) |
| ; X86-BMI1-NEXT: calll use32@PLT |
| ; X86-BMI1-NEXT: andl %edi, %esi |
| ; X86-BMI1-NEXT: movl %esi, %eax |
| ; X86-BMI1-NEXT: addl $4, %esp |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr32_c1_indexzext: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: pushl %eax |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %bl |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: shrxl %eax, {{[0-9]+}}(%esp), %esi |
| ; X86-BMI2-NEXT: movl %ebx, %eax |
| ; X86-BMI2-NEXT: negb %al |
| ; X86-BMI2-NEXT: movl $-1, %ecx |
| ; X86-BMI2-NEXT: shrxl %eax, %ecx, %eax |
| ; X86-BMI2-NEXT: movl %eax, (%esp) |
| ; X86-BMI2-NEXT: calll use32@PLT |
| ; X86-BMI2-NEXT: bzhil %ebx, %esi, %eax |
| ; X86-BMI2-NEXT: addl $4, %esp |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: popl %ebx |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr32_c1_indexzext: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: pushq %rbp |
| ; X64-NOBMI-NEXT: pushq %rbx |
| ; X64-NOBMI-NEXT: pushq %rax |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: movl %edi, %ebx |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NOBMI-NEXT: shrl %cl, %ebx |
| ; X64-NOBMI-NEXT: negb %dl |
| ; X64-NOBMI-NEXT: movl $-1, %ebp |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shrl %cl, %ebp |
| ; X64-NOBMI-NEXT: movl %ebp, %edi |
| ; X64-NOBMI-NEXT: callq use32@PLT |
| ; X64-NOBMI-NEXT: andl %ebx, %ebp |
| ; X64-NOBMI-NEXT: movl %ebp, %eax |
| ; X64-NOBMI-NEXT: addq $8, %rsp |
| ; X64-NOBMI-NEXT: popq %rbx |
| ; X64-NOBMI-NEXT: popq %rbp |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr32_c1_indexzext: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: pushq %rbp |
| ; X64-BMI1-NEXT: pushq %rbx |
| ; X64-BMI1-NEXT: pushq %rax |
| ; X64-BMI1-NEXT: movl %esi, %ecx |
| ; X64-BMI1-NEXT: movl %edi, %ebx |
| ; X64-BMI1-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-BMI1-NEXT: shrl %cl, %ebx |
| ; X64-BMI1-NEXT: negb %dl |
| ; X64-BMI1-NEXT: movl $-1, %ebp |
| ; X64-BMI1-NEXT: movl %edx, %ecx |
| ; X64-BMI1-NEXT: shrl %cl, %ebp |
| ; X64-BMI1-NEXT: movl %ebp, %edi |
| ; X64-BMI1-NEXT: callq use32@PLT |
| ; X64-BMI1-NEXT: andl %ebx, %ebp |
| ; X64-BMI1-NEXT: movl %ebp, %eax |
| ; X64-BMI1-NEXT: addq $8, %rsp |
| ; X64-BMI1-NEXT: popq %rbx |
| ; X64-BMI1-NEXT: popq %rbp |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr32_c1_indexzext: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: pushq %rbp |
| ; X64-BMI2-NEXT: pushq %rbx |
| ; X64-BMI2-NEXT: pushq %rax |
| ; X64-BMI2-NEXT: movl %edx, %ebx |
| ; X64-BMI2-NEXT: shrxl %esi, %edi, %ebp |
| ; X64-BMI2-NEXT: movl %ebx, %eax |
| ; X64-BMI2-NEXT: negb %al |
| ; X64-BMI2-NEXT: movl $-1, %ecx |
| ; X64-BMI2-NEXT: shrxl %eax, %ecx, %edi |
| ; X64-BMI2-NEXT: callq use32@PLT |
| ; X64-BMI2-NEXT: bzhil %ebx, %ebp, %eax |
| ; X64-BMI2-NEXT: addq $8, %rsp |
| ; X64-BMI2-NEXT: popq %rbx |
| ; X64-BMI2-NEXT: popq %rbp |
| ; X64-BMI2-NEXT: retq |
| %skip = zext i8 %numskipbits to i32 |
| %shifted = lshr i32 %val, %skip |
| %numhighbits = sub i8 32, %numlowbits |
| %sh_prom = zext i8 %numhighbits to i32 |
| %mask = lshr i32 -1, %sh_prom |
| call void @use32(i32 %mask) |
| %masked = and i32 %mask, %shifted |
| ret i32 %masked |
| } |
| |
| define i32 @bextr32_c2_load(i32* %w, i32 %numskipbits, i32 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr32_c2_load: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: pushl %eax |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl (%eax), %edi |
| ; X86-NOBMI-NEXT: shrl %cl, %edi |
| ; X86-NOBMI-NEXT: xorl %ecx, %ecx |
| ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl $-1, %esi |
| ; X86-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X86-NOBMI-NEXT: shrl %cl, %esi |
| ; X86-NOBMI-NEXT: movl %esi, (%esp) |
| ; X86-NOBMI-NEXT: calll use32@PLT |
| ; X86-NOBMI-NEXT: andl %edi, %esi |
| ; X86-NOBMI-NEXT: movl %esi, %eax |
| ; X86-NOBMI-NEXT: addl $4, %esp |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr32_c2_load: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: pushl %eax |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: movl (%eax), %edi |
| ; X86-BMI1-NEXT: shrl %cl, %edi |
| ; X86-BMI1-NEXT: xorl %ecx, %ecx |
| ; X86-BMI1-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl $-1, %esi |
| ; X86-BMI1-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X86-BMI1-NEXT: shrl %cl, %esi |
| ; X86-BMI1-NEXT: movl %esi, (%esp) |
| ; X86-BMI1-NEXT: calll use32@PLT |
| ; X86-BMI1-NEXT: andl %edi, %esi |
| ; X86-BMI1-NEXT: movl %esi, %eax |
| ; X86-BMI1-NEXT: addl $4, %esp |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr32_c2_load: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: pushl %eax |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %bl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: shrxl %ecx, (%eax), %esi |
| ; X86-BMI2-NEXT: movl %ebx, %eax |
| ; X86-BMI2-NEXT: negb %al |
| ; X86-BMI2-NEXT: movl $-1, %ecx |
| ; X86-BMI2-NEXT: shrxl %eax, %ecx, %eax |
| ; X86-BMI2-NEXT: movl %eax, (%esp) |
| ; X86-BMI2-NEXT: calll use32@PLT |
| ; X86-BMI2-NEXT: bzhil %ebx, %esi, %eax |
| ; X86-BMI2-NEXT: addl $4, %esp |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: popl %ebx |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr32_c2_load: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: pushq %rbp |
| ; X64-NOBMI-NEXT: pushq %rbx |
| ; X64-NOBMI-NEXT: pushq %rax |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: movl (%rdi), %ebp |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NOBMI-NEXT: shrl %cl, %ebp |
| ; X64-NOBMI-NEXT: negb %dl |
| ; X64-NOBMI-NEXT: movl $-1, %ebx |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shrl %cl, %ebx |
| ; X64-NOBMI-NEXT: movl %ebx, %edi |
| ; X64-NOBMI-NEXT: callq use32@PLT |
| ; X64-NOBMI-NEXT: andl %ebp, %ebx |
| ; X64-NOBMI-NEXT: movl %ebx, %eax |
| ; X64-NOBMI-NEXT: addq $8, %rsp |
| ; X64-NOBMI-NEXT: popq %rbx |
| ; X64-NOBMI-NEXT: popq %rbp |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr32_c2_load: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: pushq %rbp |
| ; X64-BMI1-NEXT: pushq %rbx |
| ; X64-BMI1-NEXT: pushq %rax |
| ; X64-BMI1-NEXT: movl %esi, %ecx |
| ; X64-BMI1-NEXT: movl (%rdi), %ebp |
| ; X64-BMI1-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-BMI1-NEXT: shrl %cl, %ebp |
| ; X64-BMI1-NEXT: negb %dl |
| ; X64-BMI1-NEXT: movl $-1, %ebx |
| ; X64-BMI1-NEXT: movl %edx, %ecx |
| ; X64-BMI1-NEXT: shrl %cl, %ebx |
| ; X64-BMI1-NEXT: movl %ebx, %edi |
| ; X64-BMI1-NEXT: callq use32@PLT |
| ; X64-BMI1-NEXT: andl %ebp, %ebx |
| ; X64-BMI1-NEXT: movl %ebx, %eax |
| ; X64-BMI1-NEXT: addq $8, %rsp |
| ; X64-BMI1-NEXT: popq %rbx |
| ; X64-BMI1-NEXT: popq %rbp |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr32_c2_load: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: pushq %rbp |
| ; X64-BMI2-NEXT: pushq %rbx |
| ; X64-BMI2-NEXT: pushq %rax |
| ; X64-BMI2-NEXT: movl %edx, %ebx |
| ; X64-BMI2-NEXT: shrxl %esi, (%rdi), %ebp |
| ; X64-BMI2-NEXT: movl %ebx, %eax |
| ; X64-BMI2-NEXT: negb %al |
| ; X64-BMI2-NEXT: movl $-1, %ecx |
| ; X64-BMI2-NEXT: shrxl %eax, %ecx, %edi |
| ; X64-BMI2-NEXT: callq use32@PLT |
| ; X64-BMI2-NEXT: bzhil %ebx, %ebp, %eax |
| ; X64-BMI2-NEXT: addq $8, %rsp |
| ; X64-BMI2-NEXT: popq %rbx |
| ; X64-BMI2-NEXT: popq %rbp |
| ; X64-BMI2-NEXT: retq |
| %val = load i32, i32* %w |
| %shifted = lshr i32 %val, %numskipbits |
| %numhighbits = sub i32 32, %numlowbits |
| %mask = lshr i32 -1, %numhighbits |
| call void @use32(i32 %mask) |
| %masked = and i32 %mask, %shifted |
| ret i32 %masked |
| } |
| |
| define i32 @bextr32_c3_load_indexzext(i32* %w, i8 %numskipbits, i8 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr32_c3_load_indexzext: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: pushl %eax |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl (%eax), %edi |
| ; X86-NOBMI-NEXT: shrl %cl, %edi |
| ; X86-NOBMI-NEXT: xorl %ecx, %ecx |
| ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl $-1, %esi |
| ; X86-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X86-NOBMI-NEXT: shrl %cl, %esi |
| ; X86-NOBMI-NEXT: movl %esi, (%esp) |
| ; X86-NOBMI-NEXT: calll use32@PLT |
| ; X86-NOBMI-NEXT: andl %edi, %esi |
| ; X86-NOBMI-NEXT: movl %esi, %eax |
| ; X86-NOBMI-NEXT: addl $4, %esp |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr32_c3_load_indexzext: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: pushl %eax |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: movl (%eax), %edi |
| ; X86-BMI1-NEXT: shrl %cl, %edi |
| ; X86-BMI1-NEXT: xorl %ecx, %ecx |
| ; X86-BMI1-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl $-1, %esi |
| ; X86-BMI1-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X86-BMI1-NEXT: shrl %cl, %esi |
| ; X86-BMI1-NEXT: movl %esi, (%esp) |
| ; X86-BMI1-NEXT: calll use32@PLT |
| ; X86-BMI1-NEXT: andl %edi, %esi |
| ; X86-BMI1-NEXT: movl %esi, %eax |
| ; X86-BMI1-NEXT: addl $4, %esp |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr32_c3_load_indexzext: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: pushl %eax |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %bl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: shrxl %ecx, (%eax), %esi |
| ; X86-BMI2-NEXT: movl %ebx, %eax |
| ; X86-BMI2-NEXT: negb %al |
| ; X86-BMI2-NEXT: movl $-1, %ecx |
| ; X86-BMI2-NEXT: shrxl %eax, %ecx, %eax |
| ; X86-BMI2-NEXT: movl %eax, (%esp) |
| ; X86-BMI2-NEXT: calll use32@PLT |
| ; X86-BMI2-NEXT: bzhil %ebx, %esi, %eax |
| ; X86-BMI2-NEXT: addl $4, %esp |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: popl %ebx |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr32_c3_load_indexzext: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: pushq %rbp |
| ; X64-NOBMI-NEXT: pushq %rbx |
| ; X64-NOBMI-NEXT: pushq %rax |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: movl (%rdi), %ebp |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NOBMI-NEXT: shrl %cl, %ebp |
| ; X64-NOBMI-NEXT: negb %dl |
| ; X64-NOBMI-NEXT: movl $-1, %ebx |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shrl %cl, %ebx |
| ; X64-NOBMI-NEXT: movl %ebx, %edi |
| ; X64-NOBMI-NEXT: callq use32@PLT |
| ; X64-NOBMI-NEXT: andl %ebp, %ebx |
| ; X64-NOBMI-NEXT: movl %ebx, %eax |
| ; X64-NOBMI-NEXT: addq $8, %rsp |
| ; X64-NOBMI-NEXT: popq %rbx |
| ; X64-NOBMI-NEXT: popq %rbp |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr32_c3_load_indexzext: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: pushq %rbp |
| ; X64-BMI1-NEXT: pushq %rbx |
| ; X64-BMI1-NEXT: pushq %rax |
| ; X64-BMI1-NEXT: movl %esi, %ecx |
| ; X64-BMI1-NEXT: movl (%rdi), %ebp |
| ; X64-BMI1-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-BMI1-NEXT: shrl %cl, %ebp |
| ; X64-BMI1-NEXT: negb %dl |
| ; X64-BMI1-NEXT: movl $-1, %ebx |
| ; X64-BMI1-NEXT: movl %edx, %ecx |
| ; X64-BMI1-NEXT: shrl %cl, %ebx |
| ; X64-BMI1-NEXT: movl %ebx, %edi |
| ; X64-BMI1-NEXT: callq use32@PLT |
| ; X64-BMI1-NEXT: andl %ebp, %ebx |
| ; X64-BMI1-NEXT: movl %ebx, %eax |
| ; X64-BMI1-NEXT: addq $8, %rsp |
| ; X64-BMI1-NEXT: popq %rbx |
| ; X64-BMI1-NEXT: popq %rbp |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr32_c3_load_indexzext: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: pushq %rbp |
| ; X64-BMI2-NEXT: pushq %rbx |
| ; X64-BMI2-NEXT: pushq %rax |
| ; X64-BMI2-NEXT: movl %edx, %ebx |
| ; X64-BMI2-NEXT: shrxl %esi, (%rdi), %ebp |
| ; X64-BMI2-NEXT: movl %ebx, %eax |
| ; X64-BMI2-NEXT: negb %al |
| ; X64-BMI2-NEXT: movl $-1, %ecx |
| ; X64-BMI2-NEXT: shrxl %eax, %ecx, %edi |
| ; X64-BMI2-NEXT: callq use32@PLT |
| ; X64-BMI2-NEXT: bzhil %ebx, %ebp, %eax |
| ; X64-BMI2-NEXT: addq $8, %rsp |
| ; X64-BMI2-NEXT: popq %rbx |
| ; X64-BMI2-NEXT: popq %rbp |
| ; X64-BMI2-NEXT: retq |
| %val = load i32, i32* %w |
| %skip = zext i8 %numskipbits to i32 |
| %shifted = lshr i32 %val, %skip |
| %numhighbits = sub i8 32, %numlowbits |
| %sh_prom = zext i8 %numhighbits to i32 |
| %mask = lshr i32 -1, %sh_prom |
| call void @use32(i32 %mask) |
| %masked = and i32 %mask, %shifted |
| ret i32 %masked |
| } |
| |
| define i32 @bextr32_c4_commutative(i32 %val, i32 %numskipbits, i32 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr32_c4_commutative: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: pushl %eax |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-NOBMI-NEXT: shrl %cl, %edi |
| ; X86-NOBMI-NEXT: xorl %ecx, %ecx |
| ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl $-1, %esi |
| ; X86-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X86-NOBMI-NEXT: shrl %cl, %esi |
| ; X86-NOBMI-NEXT: movl %esi, (%esp) |
| ; X86-NOBMI-NEXT: calll use32@PLT |
| ; X86-NOBMI-NEXT: andl %edi, %esi |
| ; X86-NOBMI-NEXT: movl %esi, %eax |
| ; X86-NOBMI-NEXT: addl $4, %esp |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr32_c4_commutative: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: pushl %eax |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-BMI1-NEXT: shrl %cl, %edi |
| ; X86-BMI1-NEXT: xorl %ecx, %ecx |
| ; X86-BMI1-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl $-1, %esi |
| ; X86-BMI1-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X86-BMI1-NEXT: shrl %cl, %esi |
| ; X86-BMI1-NEXT: movl %esi, (%esp) |
| ; X86-BMI1-NEXT: calll use32@PLT |
| ; X86-BMI1-NEXT: andl %edi, %esi |
| ; X86-BMI1-NEXT: movl %esi, %eax |
| ; X86-BMI1-NEXT: addl $4, %esp |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr32_c4_commutative: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: pushl %eax |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %bl |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: shrxl %eax, {{[0-9]+}}(%esp), %esi |
| ; X86-BMI2-NEXT: movl %ebx, %eax |
| ; X86-BMI2-NEXT: negb %al |
| ; X86-BMI2-NEXT: movl $-1, %ecx |
| ; X86-BMI2-NEXT: shrxl %eax, %ecx, %eax |
| ; X86-BMI2-NEXT: movl %eax, (%esp) |
| ; X86-BMI2-NEXT: calll use32@PLT |
| ; X86-BMI2-NEXT: bzhil %ebx, %esi, %eax |
| ; X86-BMI2-NEXT: addl $4, %esp |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: popl %ebx |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr32_c4_commutative: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: pushq %rbp |
| ; X64-NOBMI-NEXT: pushq %rbx |
| ; X64-NOBMI-NEXT: pushq %rax |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: movl %edi, %ebx |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NOBMI-NEXT: shrl %cl, %ebx |
| ; X64-NOBMI-NEXT: negb %dl |
| ; X64-NOBMI-NEXT: movl $-1, %ebp |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shrl %cl, %ebp |
| ; X64-NOBMI-NEXT: movl %ebp, %edi |
| ; X64-NOBMI-NEXT: callq use32@PLT |
| ; X64-NOBMI-NEXT: andl %ebx, %ebp |
| ; X64-NOBMI-NEXT: movl %ebp, %eax |
| ; X64-NOBMI-NEXT: addq $8, %rsp |
| ; X64-NOBMI-NEXT: popq %rbx |
| ; X64-NOBMI-NEXT: popq %rbp |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr32_c4_commutative: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: pushq %rbp |
| ; X64-BMI1-NEXT: pushq %rbx |
| ; X64-BMI1-NEXT: pushq %rax |
| ; X64-BMI1-NEXT: movl %esi, %ecx |
| ; X64-BMI1-NEXT: movl %edi, %ebx |
| ; X64-BMI1-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-BMI1-NEXT: shrl %cl, %ebx |
| ; X64-BMI1-NEXT: negb %dl |
| ; X64-BMI1-NEXT: movl $-1, %ebp |
| ; X64-BMI1-NEXT: movl %edx, %ecx |
| ; X64-BMI1-NEXT: shrl %cl, %ebp |
| ; X64-BMI1-NEXT: movl %ebp, %edi |
| ; X64-BMI1-NEXT: callq use32@PLT |
| ; X64-BMI1-NEXT: andl %ebx, %ebp |
| ; X64-BMI1-NEXT: movl %ebp, %eax |
| ; X64-BMI1-NEXT: addq $8, %rsp |
| ; X64-BMI1-NEXT: popq %rbx |
| ; X64-BMI1-NEXT: popq %rbp |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr32_c4_commutative: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: pushq %rbp |
| ; X64-BMI2-NEXT: pushq %rbx |
| ; X64-BMI2-NEXT: pushq %rax |
| ; X64-BMI2-NEXT: movl %edx, %ebx |
| ; X64-BMI2-NEXT: shrxl %esi, %edi, %ebp |
| ; X64-BMI2-NEXT: movl %ebx, %eax |
| ; X64-BMI2-NEXT: negb %al |
| ; X64-BMI2-NEXT: movl $-1, %ecx |
| ; X64-BMI2-NEXT: shrxl %eax, %ecx, %edi |
| ; X64-BMI2-NEXT: callq use32@PLT |
| ; X64-BMI2-NEXT: bzhil %ebx, %ebp, %eax |
| ; X64-BMI2-NEXT: addq $8, %rsp |
| ; X64-BMI2-NEXT: popq %rbx |
| ; X64-BMI2-NEXT: popq %rbp |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i32 %val, %numskipbits |
| %numhighbits = sub i32 32, %numlowbits |
| %mask = lshr i32 -1, %numhighbits |
| call void @use32(i32 %mask) |
| %masked = and i32 %shifted, %mask ; swapped order |
| ret i32 %masked |
| } |
| |
| define i32 @bextr32_c5_skipextrauses(i32 %val, i32 %numskipbits, i32 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr32_c5_skipextrauses: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %ebx |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: subl $16, %esp |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %ebx |
| ; X86-NOBMI-NEXT: movl %ebx, %ecx |
| ; X86-NOBMI-NEXT: shrl %cl, %edi |
| ; X86-NOBMI-NEXT: xorl %ecx, %ecx |
| ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl $-1, %esi |
| ; X86-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X86-NOBMI-NEXT: shrl %cl, %esi |
| ; X86-NOBMI-NEXT: movl %esi, (%esp) |
| ; X86-NOBMI-NEXT: calll use32@PLT |
| ; X86-NOBMI-NEXT: andl %edi, %esi |
| ; X86-NOBMI-NEXT: movl %ebx, (%esp) |
| ; X86-NOBMI-NEXT: calll use32@PLT |
| ; X86-NOBMI-NEXT: movl %esi, %eax |
| ; X86-NOBMI-NEXT: addl $16, %esp |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: popl %ebx |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr32_c5_skipextrauses: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %ebx |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: subl $16, %esp |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %ebx |
| ; X86-BMI1-NEXT: movl %ebx, %ecx |
| ; X86-BMI1-NEXT: shrl %cl, %edi |
| ; X86-BMI1-NEXT: xorl %ecx, %ecx |
| ; X86-BMI1-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl $-1, %esi |
| ; X86-BMI1-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X86-BMI1-NEXT: shrl %cl, %esi |
| ; X86-BMI1-NEXT: movl %esi, (%esp) |
| ; X86-BMI1-NEXT: calll use32@PLT |
| ; X86-BMI1-NEXT: andl %edi, %esi |
| ; X86-BMI1-NEXT: movl %ebx, (%esp) |
| ; X86-BMI1-NEXT: calll use32@PLT |
| ; X86-BMI1-NEXT: movl %esi, %eax |
| ; X86-BMI1-NEXT: addl $16, %esp |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: popl %ebx |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr32_c5_skipextrauses: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %edi |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: subl $16, %esp |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %bl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-BMI2-NEXT: shrxl %edi, {{[0-9]+}}(%esp), %esi |
| ; X86-BMI2-NEXT: movl %ebx, %eax |
| ; X86-BMI2-NEXT: negb %al |
| ; X86-BMI2-NEXT: movl $-1, %ecx |
| ; X86-BMI2-NEXT: shrxl %eax, %ecx, %eax |
| ; X86-BMI2-NEXT: movl %eax, (%esp) |
| ; X86-BMI2-NEXT: calll use32@PLT |
| ; X86-BMI2-NEXT: bzhil %ebx, %esi, %esi |
| ; X86-BMI2-NEXT: movl %edi, (%esp) |
| ; X86-BMI2-NEXT: calll use32@PLT |
| ; X86-BMI2-NEXT: movl %esi, %eax |
| ; X86-BMI2-NEXT: addl $16, %esp |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: popl %edi |
| ; X86-BMI2-NEXT: popl %ebx |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr32_c5_skipextrauses: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: pushq %rbp |
| ; X64-NOBMI-NEXT: pushq %r14 |
| ; X64-NOBMI-NEXT: pushq %rbx |
| ; X64-NOBMI-NEXT: movl %esi, %r14d |
| ; X64-NOBMI-NEXT: movl %edi, %ebp |
| ; X64-NOBMI-NEXT: movl %r14d, %ecx |
| ; X64-NOBMI-NEXT: shrl %cl, %ebp |
| ; X64-NOBMI-NEXT: negb %dl |
| ; X64-NOBMI-NEXT: movl $-1, %ebx |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shrl %cl, %ebx |
| ; X64-NOBMI-NEXT: movl %ebx, %edi |
| ; X64-NOBMI-NEXT: callq use32@PLT |
| ; X64-NOBMI-NEXT: andl %ebp, %ebx |
| ; X64-NOBMI-NEXT: movl %r14d, %edi |
| ; X64-NOBMI-NEXT: callq use32@PLT |
| ; X64-NOBMI-NEXT: movl %ebx, %eax |
| ; X64-NOBMI-NEXT: popq %rbx |
| ; X64-NOBMI-NEXT: popq %r14 |
| ; X64-NOBMI-NEXT: popq %rbp |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr32_c5_skipextrauses: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: pushq %rbp |
| ; X64-BMI1-NEXT: pushq %r14 |
| ; X64-BMI1-NEXT: pushq %rbx |
| ; X64-BMI1-NEXT: movl %esi, %r14d |
| ; X64-BMI1-NEXT: movl %edi, %ebp |
| ; X64-BMI1-NEXT: movl %r14d, %ecx |
| ; X64-BMI1-NEXT: shrl %cl, %ebp |
| ; X64-BMI1-NEXT: negb %dl |
| ; X64-BMI1-NEXT: movl $-1, %ebx |
| ; X64-BMI1-NEXT: movl %edx, %ecx |
| ; X64-BMI1-NEXT: shrl %cl, %ebx |
| ; X64-BMI1-NEXT: movl %ebx, %edi |
| ; X64-BMI1-NEXT: callq use32@PLT |
| ; X64-BMI1-NEXT: andl %ebp, %ebx |
| ; X64-BMI1-NEXT: movl %r14d, %edi |
| ; X64-BMI1-NEXT: callq use32@PLT |
| ; X64-BMI1-NEXT: movl %ebx, %eax |
| ; X64-BMI1-NEXT: popq %rbx |
| ; X64-BMI1-NEXT: popq %r14 |
| ; X64-BMI1-NEXT: popq %rbp |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr32_c5_skipextrauses: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: pushq %rbp |
| ; X64-BMI2-NEXT: pushq %r14 |
| ; X64-BMI2-NEXT: pushq %rbx |
| ; X64-BMI2-NEXT: movl %edx, %ebx |
| ; X64-BMI2-NEXT: movl %esi, %ebp |
| ; X64-BMI2-NEXT: shrxl %esi, %edi, %r14d |
| ; X64-BMI2-NEXT: movl %ebx, %eax |
| ; X64-BMI2-NEXT: negb %al |
| ; X64-BMI2-NEXT: movl $-1, %ecx |
| ; X64-BMI2-NEXT: shrxl %eax, %ecx, %edi |
| ; X64-BMI2-NEXT: callq use32@PLT |
| ; X64-BMI2-NEXT: bzhil %ebx, %r14d, %ebx |
| ; X64-BMI2-NEXT: movl %ebp, %edi |
| ; X64-BMI2-NEXT: callq use32@PLT |
| ; X64-BMI2-NEXT: movl %ebx, %eax |
| ; X64-BMI2-NEXT: popq %rbx |
| ; X64-BMI2-NEXT: popq %r14 |
| ; X64-BMI2-NEXT: popq %rbp |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i32 %val, %numskipbits |
| %numhighbits = sub i32 32, %numlowbits |
| %mask = lshr i32 -1, %numhighbits |
| call void @use32(i32 %mask) |
| %masked = and i32 %mask, %shifted |
| call void @use32(i32 %numskipbits) |
| ret i32 %masked |
| } |
| |
| ; 64-bit |
| |
| define i64 @bextr64_c0(i64 %val, i64 %numskipbits, i64 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_c0: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %ebp |
| ; X86-NOBMI-NEXT: pushl %ebx |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: subl $12, %esp |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl %eax, %edi |
| ; X86-NOBMI-NEXT: shrl %cl, %edi |
| ; X86-NOBMI-NEXT: shrdl %cl, %eax, %esi |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: je .LBB41_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %edi, %esi |
| ; X86-NOBMI-NEXT: xorl %edi, %edi |
| ; X86-NOBMI-NEXT: .LBB41_2: |
| ; X86-NOBMI-NEXT: movb $64, %cl |
| ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl $-1, %ebp |
| ; X86-NOBMI-NEXT: movl $-1, %ebx |
| ; X86-NOBMI-NEXT: shrl %cl, %ebx |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: je .LBB41_4 |
| ; X86-NOBMI-NEXT: # %bb.3: |
| ; X86-NOBMI-NEXT: movl %ebx, %ebp |
| ; X86-NOBMI-NEXT: xorl %ebx, %ebx |
| ; X86-NOBMI-NEXT: .LBB41_4: |
| ; X86-NOBMI-NEXT: subl $8, %esp |
| ; X86-NOBMI-NEXT: pushl %ebx |
| ; X86-NOBMI-NEXT: pushl %ebp |
| ; X86-NOBMI-NEXT: calll use64@PLT |
| ; X86-NOBMI-NEXT: addl $16, %esp |
| ; X86-NOBMI-NEXT: andl %ebp, %esi |
| ; X86-NOBMI-NEXT: andl %ebx, %edi |
| ; X86-NOBMI-NEXT: movl %esi, %eax |
| ; X86-NOBMI-NEXT: movl %edi, %edx |
| ; X86-NOBMI-NEXT: addl $12, %esp |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: popl %ebx |
| ; X86-NOBMI-NEXT: popl %ebp |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_c0: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %ebp |
| ; X86-BMI1-NEXT: pushl %ebx |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: subl $12, %esp |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: movl %eax, %edi |
| ; X86-BMI1-NEXT: shrl %cl, %edi |
| ; X86-BMI1-NEXT: shrdl %cl, %eax, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: je .LBB41_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %edi, %esi |
| ; X86-BMI1-NEXT: xorl %edi, %edi |
| ; X86-BMI1-NEXT: .LBB41_2: |
| ; X86-BMI1-NEXT: movb $64, %cl |
| ; X86-BMI1-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl $-1, %ebp |
| ; X86-BMI1-NEXT: movl $-1, %ebx |
| ; X86-BMI1-NEXT: shrl %cl, %ebx |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: je .LBB41_4 |
| ; X86-BMI1-NEXT: # %bb.3: |
| ; X86-BMI1-NEXT: movl %ebx, %ebp |
| ; X86-BMI1-NEXT: xorl %ebx, %ebx |
| ; X86-BMI1-NEXT: .LBB41_4: |
| ; X86-BMI1-NEXT: subl $8, %esp |
| ; X86-BMI1-NEXT: pushl %ebx |
| ; X86-BMI1-NEXT: pushl %ebp |
| ; X86-BMI1-NEXT: calll use64@PLT |
| ; X86-BMI1-NEXT: addl $16, %esp |
| ; X86-BMI1-NEXT: andl %ebp, %esi |
| ; X86-BMI1-NEXT: andl %ebx, %edi |
| ; X86-BMI1-NEXT: movl %esi, %eax |
| ; X86-BMI1-NEXT: movl %edi, %edx |
| ; X86-BMI1-NEXT: addl $12, %esp |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: popl %ebx |
| ; X86-BMI1-NEXT: popl %ebp |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_c0: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %ebp |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %edi |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: subl $12, %esp |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI2-NEXT: shrdl %cl, %eax, %esi |
| ; X86-BMI2-NEXT: shrxl %ecx, %eax, %edi |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB41_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: movl %edi, %esi |
| ; X86-BMI2-NEXT: xorl %edi, %edi |
| ; X86-BMI2-NEXT: .LBB41_2: |
| ; X86-BMI2-NEXT: movb $64, %al |
| ; X86-BMI2-NEXT: subb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movl $-1, %ebp |
| ; X86-BMI2-NEXT: shrxl %eax, %ebp, %ebx |
| ; X86-BMI2-NEXT: testb $32, %al |
| ; X86-BMI2-NEXT: je .LBB41_4 |
| ; X86-BMI2-NEXT: # %bb.3: |
| ; X86-BMI2-NEXT: movl %ebx, %ebp |
| ; X86-BMI2-NEXT: xorl %ebx, %ebx |
| ; X86-BMI2-NEXT: .LBB41_4: |
| ; X86-BMI2-NEXT: subl $8, %esp |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %ebp |
| ; X86-BMI2-NEXT: calll use64@PLT |
| ; X86-BMI2-NEXT: addl $16, %esp |
| ; X86-BMI2-NEXT: andl %ebp, %esi |
| ; X86-BMI2-NEXT: andl %ebx, %edi |
| ; X86-BMI2-NEXT: movl %esi, %eax |
| ; X86-BMI2-NEXT: movl %edi, %edx |
| ; X86-BMI2-NEXT: addl $12, %esp |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: popl %edi |
| ; X86-BMI2-NEXT: popl %ebx |
| ; X86-BMI2-NEXT: popl %ebp |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_c0: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: pushq %r14 |
| ; X64-NOBMI-NEXT: pushq %rbx |
| ; X64-NOBMI-NEXT: pushq %rax |
| ; X64-NOBMI-NEXT: movq %rsi, %rcx |
| ; X64-NOBMI-NEXT: movq %rdi, %r14 |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-NOBMI-NEXT: shrq %cl, %r14 |
| ; X64-NOBMI-NEXT: negb %dl |
| ; X64-NOBMI-NEXT: movq $-1, %rbx |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shrq %cl, %rbx |
| ; X64-NOBMI-NEXT: movq %rbx, %rdi |
| ; X64-NOBMI-NEXT: callq use64@PLT |
| ; X64-NOBMI-NEXT: andq %r14, %rbx |
| ; X64-NOBMI-NEXT: movq %rbx, %rax |
| ; X64-NOBMI-NEXT: addq $8, %rsp |
| ; X64-NOBMI-NEXT: popq %rbx |
| ; X64-NOBMI-NEXT: popq %r14 |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_c0: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: pushq %r14 |
| ; X64-BMI1-NEXT: pushq %rbx |
| ; X64-BMI1-NEXT: pushq %rax |
| ; X64-BMI1-NEXT: movq %rsi, %rcx |
| ; X64-BMI1-NEXT: movq %rdi, %r14 |
| ; X64-BMI1-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-BMI1-NEXT: shrq %cl, %r14 |
| ; X64-BMI1-NEXT: negb %dl |
| ; X64-BMI1-NEXT: movq $-1, %rbx |
| ; X64-BMI1-NEXT: movl %edx, %ecx |
| ; X64-BMI1-NEXT: shrq %cl, %rbx |
| ; X64-BMI1-NEXT: movq %rbx, %rdi |
| ; X64-BMI1-NEXT: callq use64@PLT |
| ; X64-BMI1-NEXT: andq %r14, %rbx |
| ; X64-BMI1-NEXT: movq %rbx, %rax |
| ; X64-BMI1-NEXT: addq $8, %rsp |
| ; X64-BMI1-NEXT: popq %rbx |
| ; X64-BMI1-NEXT: popq %r14 |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_c0: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: pushq %r14 |
| ; X64-BMI2-NEXT: pushq %rbx |
| ; X64-BMI2-NEXT: pushq %rax |
| ; X64-BMI2-NEXT: movq %rdx, %rbx |
| ; X64-BMI2-NEXT: shrxq %rsi, %rdi, %r14 |
| ; X64-BMI2-NEXT: movl %ebx, %eax |
| ; X64-BMI2-NEXT: negb %al |
| ; X64-BMI2-NEXT: movq $-1, %rcx |
| ; X64-BMI2-NEXT: shrxq %rax, %rcx, %rdi |
| ; X64-BMI2-NEXT: callq use64@PLT |
| ; X64-BMI2-NEXT: bzhiq %rbx, %r14, %rax |
| ; X64-BMI2-NEXT: addq $8, %rsp |
| ; X64-BMI2-NEXT: popq %rbx |
| ; X64-BMI2-NEXT: popq %r14 |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i64 %val, %numskipbits |
| %numhighbits = sub i64 64, %numlowbits |
| %mask = lshr i64 -1, %numhighbits |
| call void @use64(i64 %mask) |
| %masked = and i64 %mask, %shifted |
| ret i64 %masked |
| } |
| |
| define i64 @bextr64_c1_indexzext(i64 %val, i8 %numskipbits, i8 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_c1_indexzext: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %ebp |
| ; X86-NOBMI-NEXT: pushl %ebx |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: subl $12, %esp |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl %eax, %edi |
| ; X86-NOBMI-NEXT: shrl %cl, %edi |
| ; X86-NOBMI-NEXT: shrdl %cl, %eax, %esi |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: je .LBB42_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %edi, %esi |
| ; X86-NOBMI-NEXT: xorl %edi, %edi |
| ; X86-NOBMI-NEXT: .LBB42_2: |
| ; X86-NOBMI-NEXT: movb $64, %cl |
| ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl $-1, %ebp |
| ; X86-NOBMI-NEXT: movl $-1, %ebx |
| ; X86-NOBMI-NEXT: shrl %cl, %ebx |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: je .LBB42_4 |
| ; X86-NOBMI-NEXT: # %bb.3: |
| ; X86-NOBMI-NEXT: movl %ebx, %ebp |
| ; X86-NOBMI-NEXT: xorl %ebx, %ebx |
| ; X86-NOBMI-NEXT: .LBB42_4: |
| ; X86-NOBMI-NEXT: subl $8, %esp |
| ; X86-NOBMI-NEXT: pushl %ebx |
| ; X86-NOBMI-NEXT: pushl %ebp |
| ; X86-NOBMI-NEXT: calll use64@PLT |
| ; X86-NOBMI-NEXT: addl $16, %esp |
| ; X86-NOBMI-NEXT: andl %ebp, %esi |
| ; X86-NOBMI-NEXT: andl %ebx, %edi |
| ; X86-NOBMI-NEXT: movl %esi, %eax |
| ; X86-NOBMI-NEXT: movl %edi, %edx |
| ; X86-NOBMI-NEXT: addl $12, %esp |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: popl %ebx |
| ; X86-NOBMI-NEXT: popl %ebp |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_c1_indexzext: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %ebp |
| ; X86-BMI1-NEXT: pushl %ebx |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: subl $12, %esp |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: movl %eax, %edi |
| ; X86-BMI1-NEXT: shrl %cl, %edi |
| ; X86-BMI1-NEXT: shrdl %cl, %eax, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: je .LBB42_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %edi, %esi |
| ; X86-BMI1-NEXT: xorl %edi, %edi |
| ; X86-BMI1-NEXT: .LBB42_2: |
| ; X86-BMI1-NEXT: movb $64, %cl |
| ; X86-BMI1-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl $-1, %ebp |
| ; X86-BMI1-NEXT: movl $-1, %ebx |
| ; X86-BMI1-NEXT: shrl %cl, %ebx |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: je .LBB42_4 |
| ; X86-BMI1-NEXT: # %bb.3: |
| ; X86-BMI1-NEXT: movl %ebx, %ebp |
| ; X86-BMI1-NEXT: xorl %ebx, %ebx |
| ; X86-BMI1-NEXT: .LBB42_4: |
| ; X86-BMI1-NEXT: subl $8, %esp |
| ; X86-BMI1-NEXT: pushl %ebx |
| ; X86-BMI1-NEXT: pushl %ebp |
| ; X86-BMI1-NEXT: calll use64@PLT |
| ; X86-BMI1-NEXT: addl $16, %esp |
| ; X86-BMI1-NEXT: andl %ebp, %esi |
| ; X86-BMI1-NEXT: andl %ebx, %edi |
| ; X86-BMI1-NEXT: movl %esi, %eax |
| ; X86-BMI1-NEXT: movl %edi, %edx |
| ; X86-BMI1-NEXT: addl $12, %esp |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: popl %ebx |
| ; X86-BMI1-NEXT: popl %ebp |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_c1_indexzext: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %ebp |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %edi |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: subl $12, %esp |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI2-NEXT: shrdl %cl, %eax, %esi |
| ; X86-BMI2-NEXT: shrxl %ecx, %eax, %edi |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB42_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: movl %edi, %esi |
| ; X86-BMI2-NEXT: xorl %edi, %edi |
| ; X86-BMI2-NEXT: .LBB42_2: |
| ; X86-BMI2-NEXT: movb $64, %al |
| ; X86-BMI2-NEXT: subb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movl $-1, %ebp |
| ; X86-BMI2-NEXT: shrxl %eax, %ebp, %ebx |
| ; X86-BMI2-NEXT: testb $32, %al |
| ; X86-BMI2-NEXT: je .LBB42_4 |
| ; X86-BMI2-NEXT: # %bb.3: |
| ; X86-BMI2-NEXT: movl %ebx, %ebp |
| ; X86-BMI2-NEXT: xorl %ebx, %ebx |
| ; X86-BMI2-NEXT: .LBB42_4: |
| ; X86-BMI2-NEXT: subl $8, %esp |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %ebp |
| ; X86-BMI2-NEXT: calll use64@PLT |
| ; X86-BMI2-NEXT: addl $16, %esp |
| ; X86-BMI2-NEXT: andl %ebp, %esi |
| ; X86-BMI2-NEXT: andl %ebx, %edi |
| ; X86-BMI2-NEXT: movl %esi, %eax |
| ; X86-BMI2-NEXT: movl %edi, %edx |
| ; X86-BMI2-NEXT: addl $12, %esp |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: popl %edi |
| ; X86-BMI2-NEXT: popl %ebx |
| ; X86-BMI2-NEXT: popl %ebp |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_c1_indexzext: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: pushq %r14 |
| ; X64-NOBMI-NEXT: pushq %rbx |
| ; X64-NOBMI-NEXT: pushq %rax |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: movq %rdi, %r14 |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NOBMI-NEXT: shrq %cl, %r14 |
| ; X64-NOBMI-NEXT: negb %dl |
| ; X64-NOBMI-NEXT: movq $-1, %rbx |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shrq %cl, %rbx |
| ; X64-NOBMI-NEXT: movq %rbx, %rdi |
| ; X64-NOBMI-NEXT: callq use64@PLT |
| ; X64-NOBMI-NEXT: andq %r14, %rbx |
| ; X64-NOBMI-NEXT: movq %rbx, %rax |
| ; X64-NOBMI-NEXT: addq $8, %rsp |
| ; X64-NOBMI-NEXT: popq %rbx |
| ; X64-NOBMI-NEXT: popq %r14 |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_c1_indexzext: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: pushq %r14 |
| ; X64-BMI1-NEXT: pushq %rbx |
| ; X64-BMI1-NEXT: pushq %rax |
| ; X64-BMI1-NEXT: movl %esi, %ecx |
| ; X64-BMI1-NEXT: movq %rdi, %r14 |
| ; X64-BMI1-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-BMI1-NEXT: shrq %cl, %r14 |
| ; X64-BMI1-NEXT: negb %dl |
| ; X64-BMI1-NEXT: movq $-1, %rbx |
| ; X64-BMI1-NEXT: movl %edx, %ecx |
| ; X64-BMI1-NEXT: shrq %cl, %rbx |
| ; X64-BMI1-NEXT: movq %rbx, %rdi |
| ; X64-BMI1-NEXT: callq use64@PLT |
| ; X64-BMI1-NEXT: andq %r14, %rbx |
| ; X64-BMI1-NEXT: movq %rbx, %rax |
| ; X64-BMI1-NEXT: addq $8, %rsp |
| ; X64-BMI1-NEXT: popq %rbx |
| ; X64-BMI1-NEXT: popq %r14 |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_c1_indexzext: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: pushq %r14 |
| ; X64-BMI2-NEXT: pushq %rbx |
| ; X64-BMI2-NEXT: pushq %rax |
| ; X64-BMI2-NEXT: movl %edx, %ebx |
| ; X64-BMI2-NEXT: # kill: def $esi killed $esi def $rsi |
| ; X64-BMI2-NEXT: shrxq %rsi, %rdi, %r14 |
| ; X64-BMI2-NEXT: movl %ebx, %eax |
| ; X64-BMI2-NEXT: negb %al |
| ; X64-BMI2-NEXT: movq $-1, %rcx |
| ; X64-BMI2-NEXT: shrxq %rax, %rcx, %rdi |
| ; X64-BMI2-NEXT: callq use64@PLT |
| ; X64-BMI2-NEXT: bzhiq %rbx, %r14, %rax |
| ; X64-BMI2-NEXT: addq $8, %rsp |
| ; X64-BMI2-NEXT: popq %rbx |
| ; X64-BMI2-NEXT: popq %r14 |
| ; X64-BMI2-NEXT: retq |
| %skip = zext i8 %numskipbits to i64 |
| %shifted = lshr i64 %val, %skip |
| %numhighbits = sub i8 64, %numlowbits |
| %sh_prom = zext i8 %numhighbits to i64 |
| %mask = lshr i64 -1, %sh_prom |
| call void @use64(i64 %mask) |
| %masked = and i64 %mask, %shifted |
| ret i64 %masked |
| } |
| |
| define i64 @bextr64_c2_load(i64* %w, i64 %numskipbits, i64 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_c2_load: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %ebp |
| ; X86-NOBMI-NEXT: pushl %ebx |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: subl $12, %esp |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl (%eax), %esi |
| ; X86-NOBMI-NEXT: movl 4(%eax), %eax |
| ; X86-NOBMI-NEXT: movl %eax, %edi |
| ; X86-NOBMI-NEXT: shrl %cl, %edi |
| ; X86-NOBMI-NEXT: shrdl %cl, %eax, %esi |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: je .LBB43_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %edi, %esi |
| ; X86-NOBMI-NEXT: xorl %edi, %edi |
| ; X86-NOBMI-NEXT: .LBB43_2: |
| ; X86-NOBMI-NEXT: movb $64, %cl |
| ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl $-1, %ebp |
| ; X86-NOBMI-NEXT: movl $-1, %ebx |
| ; X86-NOBMI-NEXT: shrl %cl, %ebx |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: je .LBB43_4 |
| ; X86-NOBMI-NEXT: # %bb.3: |
| ; X86-NOBMI-NEXT: movl %ebx, %ebp |
| ; X86-NOBMI-NEXT: xorl %ebx, %ebx |
| ; X86-NOBMI-NEXT: .LBB43_4: |
| ; X86-NOBMI-NEXT: subl $8, %esp |
| ; X86-NOBMI-NEXT: pushl %ebx |
| ; X86-NOBMI-NEXT: pushl %ebp |
| ; X86-NOBMI-NEXT: calll use64@PLT |
| ; X86-NOBMI-NEXT: addl $16, %esp |
| ; X86-NOBMI-NEXT: andl %ebp, %esi |
| ; X86-NOBMI-NEXT: andl %ebx, %edi |
| ; X86-NOBMI-NEXT: movl %esi, %eax |
| ; X86-NOBMI-NEXT: movl %edi, %edx |
| ; X86-NOBMI-NEXT: addl $12, %esp |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: popl %ebx |
| ; X86-NOBMI-NEXT: popl %ebp |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_c2_load: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %ebp |
| ; X86-BMI1-NEXT: pushl %ebx |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: subl $12, %esp |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: movl (%eax), %esi |
| ; X86-BMI1-NEXT: movl 4(%eax), %eax |
| ; X86-BMI1-NEXT: movl %eax, %edi |
| ; X86-BMI1-NEXT: shrl %cl, %edi |
| ; X86-BMI1-NEXT: shrdl %cl, %eax, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: je .LBB43_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %edi, %esi |
| ; X86-BMI1-NEXT: xorl %edi, %edi |
| ; X86-BMI1-NEXT: .LBB43_2: |
| ; X86-BMI1-NEXT: movb $64, %cl |
| ; X86-BMI1-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl $-1, %ebp |
| ; X86-BMI1-NEXT: movl $-1, %ebx |
| ; X86-BMI1-NEXT: shrl %cl, %ebx |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: je .LBB43_4 |
| ; X86-BMI1-NEXT: # %bb.3: |
| ; X86-BMI1-NEXT: movl %ebx, %ebp |
| ; X86-BMI1-NEXT: xorl %ebx, %ebx |
| ; X86-BMI1-NEXT: .LBB43_4: |
| ; X86-BMI1-NEXT: subl $8, %esp |
| ; X86-BMI1-NEXT: pushl %ebx |
| ; X86-BMI1-NEXT: pushl %ebp |
| ; X86-BMI1-NEXT: calll use64@PLT |
| ; X86-BMI1-NEXT: addl $16, %esp |
| ; X86-BMI1-NEXT: andl %ebp, %esi |
| ; X86-BMI1-NEXT: andl %ebx, %edi |
| ; X86-BMI1-NEXT: movl %esi, %eax |
| ; X86-BMI1-NEXT: movl %edi, %edx |
| ; X86-BMI1-NEXT: addl $12, %esp |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: popl %ebx |
| ; X86-BMI1-NEXT: popl %ebp |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_c2_load: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %ebp |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %edi |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: subl $12, %esp |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI2-NEXT: movl (%eax), %esi |
| ; X86-BMI2-NEXT: movl 4(%eax), %eax |
| ; X86-BMI2-NEXT: shrxl %ecx, %eax, %edi |
| ; X86-BMI2-NEXT: shrdl %cl, %eax, %esi |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB43_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: movl %edi, %esi |
| ; X86-BMI2-NEXT: xorl %edi, %edi |
| ; X86-BMI2-NEXT: .LBB43_2: |
| ; X86-BMI2-NEXT: movb $64, %al |
| ; X86-BMI2-NEXT: subb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movl $-1, %ebp |
| ; X86-BMI2-NEXT: shrxl %eax, %ebp, %ebx |
| ; X86-BMI2-NEXT: testb $32, %al |
| ; X86-BMI2-NEXT: je .LBB43_4 |
| ; X86-BMI2-NEXT: # %bb.3: |
| ; X86-BMI2-NEXT: movl %ebx, %ebp |
| ; X86-BMI2-NEXT: xorl %ebx, %ebx |
| ; X86-BMI2-NEXT: .LBB43_4: |
| ; X86-BMI2-NEXT: subl $8, %esp |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %ebp |
| ; X86-BMI2-NEXT: calll use64@PLT |
| ; X86-BMI2-NEXT: addl $16, %esp |
| ; X86-BMI2-NEXT: andl %ebp, %esi |
| ; X86-BMI2-NEXT: andl %ebx, %edi |
| ; X86-BMI2-NEXT: movl %esi, %eax |
| ; X86-BMI2-NEXT: movl %edi, %edx |
| ; X86-BMI2-NEXT: addl $12, %esp |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: popl %edi |
| ; X86-BMI2-NEXT: popl %ebx |
| ; X86-BMI2-NEXT: popl %ebp |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_c2_load: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: pushq %r14 |
| ; X64-NOBMI-NEXT: pushq %rbx |
| ; X64-NOBMI-NEXT: pushq %rax |
| ; X64-NOBMI-NEXT: movq %rsi, %rcx |
| ; X64-NOBMI-NEXT: movq (%rdi), %r14 |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-NOBMI-NEXT: shrq %cl, %r14 |
| ; X64-NOBMI-NEXT: negb %dl |
| ; X64-NOBMI-NEXT: movq $-1, %rbx |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shrq %cl, %rbx |
| ; X64-NOBMI-NEXT: movq %rbx, %rdi |
| ; X64-NOBMI-NEXT: callq use64@PLT |
| ; X64-NOBMI-NEXT: andq %r14, %rbx |
| ; X64-NOBMI-NEXT: movq %rbx, %rax |
| ; X64-NOBMI-NEXT: addq $8, %rsp |
| ; X64-NOBMI-NEXT: popq %rbx |
| ; X64-NOBMI-NEXT: popq %r14 |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_c2_load: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: pushq %r14 |
| ; X64-BMI1-NEXT: pushq %rbx |
| ; X64-BMI1-NEXT: pushq %rax |
| ; X64-BMI1-NEXT: movq %rsi, %rcx |
| ; X64-BMI1-NEXT: movq (%rdi), %r14 |
| ; X64-BMI1-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-BMI1-NEXT: shrq %cl, %r14 |
| ; X64-BMI1-NEXT: negb %dl |
| ; X64-BMI1-NEXT: movq $-1, %rbx |
| ; X64-BMI1-NEXT: movl %edx, %ecx |
| ; X64-BMI1-NEXT: shrq %cl, %rbx |
| ; X64-BMI1-NEXT: movq %rbx, %rdi |
| ; X64-BMI1-NEXT: callq use64@PLT |
| ; X64-BMI1-NEXT: andq %r14, %rbx |
| ; X64-BMI1-NEXT: movq %rbx, %rax |
| ; X64-BMI1-NEXT: addq $8, %rsp |
| ; X64-BMI1-NEXT: popq %rbx |
| ; X64-BMI1-NEXT: popq %r14 |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_c2_load: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: pushq %r14 |
| ; X64-BMI2-NEXT: pushq %rbx |
| ; X64-BMI2-NEXT: pushq %rax |
| ; X64-BMI2-NEXT: movq %rdx, %rbx |
| ; X64-BMI2-NEXT: shrxq %rsi, (%rdi), %r14 |
| ; X64-BMI2-NEXT: movl %ebx, %eax |
| ; X64-BMI2-NEXT: negb %al |
| ; X64-BMI2-NEXT: movq $-1, %rcx |
| ; X64-BMI2-NEXT: shrxq %rax, %rcx, %rdi |
| ; X64-BMI2-NEXT: callq use64@PLT |
| ; X64-BMI2-NEXT: bzhiq %rbx, %r14, %rax |
| ; X64-BMI2-NEXT: addq $8, %rsp |
| ; X64-BMI2-NEXT: popq %rbx |
| ; X64-BMI2-NEXT: popq %r14 |
| ; X64-BMI2-NEXT: retq |
| %val = load i64, i64* %w |
| %shifted = lshr i64 %val, %numskipbits |
| %numhighbits = sub i64 64, %numlowbits |
| %mask = lshr i64 -1, %numhighbits |
| call void @use64(i64 %mask) |
| %masked = and i64 %mask, %shifted |
| ret i64 %masked |
| } |
| |
| define i64 @bextr64_c3_load_indexzext(i64* %w, i8 %numskipbits, i8 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_c3_load_indexzext: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %ebp |
| ; X86-NOBMI-NEXT: pushl %ebx |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: subl $12, %esp |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl (%eax), %esi |
| ; X86-NOBMI-NEXT: movl 4(%eax), %eax |
| ; X86-NOBMI-NEXT: movl %eax, %edi |
| ; X86-NOBMI-NEXT: shrl %cl, %edi |
| ; X86-NOBMI-NEXT: shrdl %cl, %eax, %esi |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: je .LBB44_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %edi, %esi |
| ; X86-NOBMI-NEXT: xorl %edi, %edi |
| ; X86-NOBMI-NEXT: .LBB44_2: |
| ; X86-NOBMI-NEXT: movb $64, %cl |
| ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl $-1, %ebp |
| ; X86-NOBMI-NEXT: movl $-1, %ebx |
| ; X86-NOBMI-NEXT: shrl %cl, %ebx |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: je .LBB44_4 |
| ; X86-NOBMI-NEXT: # %bb.3: |
| ; X86-NOBMI-NEXT: movl %ebx, %ebp |
| ; X86-NOBMI-NEXT: xorl %ebx, %ebx |
| ; X86-NOBMI-NEXT: .LBB44_4: |
| ; X86-NOBMI-NEXT: subl $8, %esp |
| ; X86-NOBMI-NEXT: pushl %ebx |
| ; X86-NOBMI-NEXT: pushl %ebp |
| ; X86-NOBMI-NEXT: calll use64@PLT |
| ; X86-NOBMI-NEXT: addl $16, %esp |
| ; X86-NOBMI-NEXT: andl %ebp, %esi |
| ; X86-NOBMI-NEXT: andl %ebx, %edi |
| ; X86-NOBMI-NEXT: movl %esi, %eax |
| ; X86-NOBMI-NEXT: movl %edi, %edx |
| ; X86-NOBMI-NEXT: addl $12, %esp |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: popl %ebx |
| ; X86-NOBMI-NEXT: popl %ebp |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_c3_load_indexzext: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %ebp |
| ; X86-BMI1-NEXT: pushl %ebx |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: subl $12, %esp |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: movl (%eax), %esi |
| ; X86-BMI1-NEXT: movl 4(%eax), %eax |
| ; X86-BMI1-NEXT: movl %eax, %edi |
| ; X86-BMI1-NEXT: shrl %cl, %edi |
| ; X86-BMI1-NEXT: shrdl %cl, %eax, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: je .LBB44_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %edi, %esi |
| ; X86-BMI1-NEXT: xorl %edi, %edi |
| ; X86-BMI1-NEXT: .LBB44_2: |
| ; X86-BMI1-NEXT: movb $64, %cl |
| ; X86-BMI1-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl $-1, %ebp |
| ; X86-BMI1-NEXT: movl $-1, %ebx |
| ; X86-BMI1-NEXT: shrl %cl, %ebx |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: je .LBB44_4 |
| ; X86-BMI1-NEXT: # %bb.3: |
| ; X86-BMI1-NEXT: movl %ebx, %ebp |
| ; X86-BMI1-NEXT: xorl %ebx, %ebx |
| ; X86-BMI1-NEXT: .LBB44_4: |
| ; X86-BMI1-NEXT: subl $8, %esp |
| ; X86-BMI1-NEXT: pushl %ebx |
| ; X86-BMI1-NEXT: pushl %ebp |
| ; X86-BMI1-NEXT: calll use64@PLT |
| ; X86-BMI1-NEXT: addl $16, %esp |
| ; X86-BMI1-NEXT: andl %ebp, %esi |
| ; X86-BMI1-NEXT: andl %ebx, %edi |
| ; X86-BMI1-NEXT: movl %esi, %eax |
| ; X86-BMI1-NEXT: movl %edi, %edx |
| ; X86-BMI1-NEXT: addl $12, %esp |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: popl %ebx |
| ; X86-BMI1-NEXT: popl %ebp |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_c3_load_indexzext: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %ebp |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %edi |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: subl $12, %esp |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI2-NEXT: movl (%eax), %esi |
| ; X86-BMI2-NEXT: movl 4(%eax), %eax |
| ; X86-BMI2-NEXT: shrxl %ecx, %eax, %edi |
| ; X86-BMI2-NEXT: shrdl %cl, %eax, %esi |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB44_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: movl %edi, %esi |
| ; X86-BMI2-NEXT: xorl %edi, %edi |
| ; X86-BMI2-NEXT: .LBB44_2: |
| ; X86-BMI2-NEXT: movb $64, %al |
| ; X86-BMI2-NEXT: subb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movl $-1, %ebp |
| ; X86-BMI2-NEXT: shrxl %eax, %ebp, %ebx |
| ; X86-BMI2-NEXT: testb $32, %al |
| ; X86-BMI2-NEXT: je .LBB44_4 |
| ; X86-BMI2-NEXT: # %bb.3: |
| ; X86-BMI2-NEXT: movl %ebx, %ebp |
| ; X86-BMI2-NEXT: xorl %ebx, %ebx |
| ; X86-BMI2-NEXT: .LBB44_4: |
| ; X86-BMI2-NEXT: subl $8, %esp |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %ebp |
| ; X86-BMI2-NEXT: calll use64@PLT |
| ; X86-BMI2-NEXT: addl $16, %esp |
| ; X86-BMI2-NEXT: andl %ebp, %esi |
| ; X86-BMI2-NEXT: andl %ebx, %edi |
| ; X86-BMI2-NEXT: movl %esi, %eax |
| ; X86-BMI2-NEXT: movl %edi, %edx |
| ; X86-BMI2-NEXT: addl $12, %esp |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: popl %edi |
| ; X86-BMI2-NEXT: popl %ebx |
| ; X86-BMI2-NEXT: popl %ebp |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_c3_load_indexzext: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: pushq %r14 |
| ; X64-NOBMI-NEXT: pushq %rbx |
| ; X64-NOBMI-NEXT: pushq %rax |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: movq (%rdi), %r14 |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NOBMI-NEXT: shrq %cl, %r14 |
| ; X64-NOBMI-NEXT: negb %dl |
| ; X64-NOBMI-NEXT: movq $-1, %rbx |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shrq %cl, %rbx |
| ; X64-NOBMI-NEXT: movq %rbx, %rdi |
| ; X64-NOBMI-NEXT: callq use64@PLT |
| ; X64-NOBMI-NEXT: andq %r14, %rbx |
| ; X64-NOBMI-NEXT: movq %rbx, %rax |
| ; X64-NOBMI-NEXT: addq $8, %rsp |
| ; X64-NOBMI-NEXT: popq %rbx |
| ; X64-NOBMI-NEXT: popq %r14 |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_c3_load_indexzext: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: pushq %r14 |
| ; X64-BMI1-NEXT: pushq %rbx |
| ; X64-BMI1-NEXT: pushq %rax |
| ; X64-BMI1-NEXT: movl %esi, %ecx |
| ; X64-BMI1-NEXT: movq (%rdi), %r14 |
| ; X64-BMI1-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-BMI1-NEXT: shrq %cl, %r14 |
| ; X64-BMI1-NEXT: negb %dl |
| ; X64-BMI1-NEXT: movq $-1, %rbx |
| ; X64-BMI1-NEXT: movl %edx, %ecx |
| ; X64-BMI1-NEXT: shrq %cl, %rbx |
| ; X64-BMI1-NEXT: movq %rbx, %rdi |
| ; X64-BMI1-NEXT: callq use64@PLT |
| ; X64-BMI1-NEXT: andq %r14, %rbx |
| ; X64-BMI1-NEXT: movq %rbx, %rax |
| ; X64-BMI1-NEXT: addq $8, %rsp |
| ; X64-BMI1-NEXT: popq %rbx |
| ; X64-BMI1-NEXT: popq %r14 |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_c3_load_indexzext: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: pushq %r14 |
| ; X64-BMI2-NEXT: pushq %rbx |
| ; X64-BMI2-NEXT: pushq %rax |
| ; X64-BMI2-NEXT: movl %edx, %ebx |
| ; X64-BMI2-NEXT: # kill: def $esi killed $esi def $rsi |
| ; X64-BMI2-NEXT: shrxq %rsi, (%rdi), %r14 |
| ; X64-BMI2-NEXT: movl %ebx, %eax |
| ; X64-BMI2-NEXT: negb %al |
| ; X64-BMI2-NEXT: movq $-1, %rcx |
| ; X64-BMI2-NEXT: shrxq %rax, %rcx, %rdi |
| ; X64-BMI2-NEXT: callq use64@PLT |
| ; X64-BMI2-NEXT: bzhiq %rbx, %r14, %rax |
| ; X64-BMI2-NEXT: addq $8, %rsp |
| ; X64-BMI2-NEXT: popq %rbx |
| ; X64-BMI2-NEXT: popq %r14 |
| ; X64-BMI2-NEXT: retq |
| %val = load i64, i64* %w |
| %skip = zext i8 %numskipbits to i64 |
| %shifted = lshr i64 %val, %skip |
| %numhighbits = sub i8 64, %numlowbits |
| %sh_prom = zext i8 %numhighbits to i64 |
| %mask = lshr i64 -1, %sh_prom |
| call void @use64(i64 %mask) |
| %masked = and i64 %mask, %shifted |
| ret i64 %masked |
| } |
| |
| define i64 @bextr64_c4_commutative(i64 %val, i64 %numskipbits, i64 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_c4_commutative: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %ebp |
| ; X86-NOBMI-NEXT: pushl %ebx |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: subl $12, %esp |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl %eax, %edi |
| ; X86-NOBMI-NEXT: shrl %cl, %edi |
| ; X86-NOBMI-NEXT: shrdl %cl, %eax, %esi |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: je .LBB45_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %edi, %esi |
| ; X86-NOBMI-NEXT: xorl %edi, %edi |
| ; X86-NOBMI-NEXT: .LBB45_2: |
| ; X86-NOBMI-NEXT: movb $64, %cl |
| ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl $-1, %ebp |
| ; X86-NOBMI-NEXT: movl $-1, %ebx |
| ; X86-NOBMI-NEXT: shrl %cl, %ebx |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: je .LBB45_4 |
| ; X86-NOBMI-NEXT: # %bb.3: |
| ; X86-NOBMI-NEXT: movl %ebx, %ebp |
| ; X86-NOBMI-NEXT: xorl %ebx, %ebx |
| ; X86-NOBMI-NEXT: .LBB45_4: |
| ; X86-NOBMI-NEXT: subl $8, %esp |
| ; X86-NOBMI-NEXT: pushl %ebx |
| ; X86-NOBMI-NEXT: pushl %ebp |
| ; X86-NOBMI-NEXT: calll use64@PLT |
| ; X86-NOBMI-NEXT: addl $16, %esp |
| ; X86-NOBMI-NEXT: andl %ebp, %esi |
| ; X86-NOBMI-NEXT: andl %ebx, %edi |
| ; X86-NOBMI-NEXT: movl %esi, %eax |
| ; X86-NOBMI-NEXT: movl %edi, %edx |
| ; X86-NOBMI-NEXT: addl $12, %esp |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: popl %ebx |
| ; X86-NOBMI-NEXT: popl %ebp |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_c4_commutative: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %ebp |
| ; X86-BMI1-NEXT: pushl %ebx |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: subl $12, %esp |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: movl %eax, %edi |
| ; X86-BMI1-NEXT: shrl %cl, %edi |
| ; X86-BMI1-NEXT: shrdl %cl, %eax, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: je .LBB45_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %edi, %esi |
| ; X86-BMI1-NEXT: xorl %edi, %edi |
| ; X86-BMI1-NEXT: .LBB45_2: |
| ; X86-BMI1-NEXT: movb $64, %cl |
| ; X86-BMI1-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl $-1, %ebp |
| ; X86-BMI1-NEXT: movl $-1, %ebx |
| ; X86-BMI1-NEXT: shrl %cl, %ebx |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: je .LBB45_4 |
| ; X86-BMI1-NEXT: # %bb.3: |
| ; X86-BMI1-NEXT: movl %ebx, %ebp |
| ; X86-BMI1-NEXT: xorl %ebx, %ebx |
| ; X86-BMI1-NEXT: .LBB45_4: |
| ; X86-BMI1-NEXT: subl $8, %esp |
| ; X86-BMI1-NEXT: pushl %ebx |
| ; X86-BMI1-NEXT: pushl %ebp |
| ; X86-BMI1-NEXT: calll use64@PLT |
| ; X86-BMI1-NEXT: addl $16, %esp |
| ; X86-BMI1-NEXT: andl %ebp, %esi |
| ; X86-BMI1-NEXT: andl %ebx, %edi |
| ; X86-BMI1-NEXT: movl %esi, %eax |
| ; X86-BMI1-NEXT: movl %edi, %edx |
| ; X86-BMI1-NEXT: addl $12, %esp |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: popl %ebx |
| ; X86-BMI1-NEXT: popl %ebp |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_c4_commutative: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %ebp |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %edi |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: subl $12, %esp |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI2-NEXT: shrdl %cl, %eax, %esi |
| ; X86-BMI2-NEXT: shrxl %ecx, %eax, %edi |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB45_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: movl %edi, %esi |
| ; X86-BMI2-NEXT: xorl %edi, %edi |
| ; X86-BMI2-NEXT: .LBB45_2: |
| ; X86-BMI2-NEXT: movb $64, %al |
| ; X86-BMI2-NEXT: subb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movl $-1, %ebp |
| ; X86-BMI2-NEXT: shrxl %eax, %ebp, %ebx |
| ; X86-BMI2-NEXT: testb $32, %al |
| ; X86-BMI2-NEXT: je .LBB45_4 |
| ; X86-BMI2-NEXT: # %bb.3: |
| ; X86-BMI2-NEXT: movl %ebx, %ebp |
| ; X86-BMI2-NEXT: xorl %ebx, %ebx |
| ; X86-BMI2-NEXT: .LBB45_4: |
| ; X86-BMI2-NEXT: subl $8, %esp |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %ebp |
| ; X86-BMI2-NEXT: calll use64@PLT |
| ; X86-BMI2-NEXT: addl $16, %esp |
| ; X86-BMI2-NEXT: andl %ebp, %esi |
| ; X86-BMI2-NEXT: andl %ebx, %edi |
| ; X86-BMI2-NEXT: movl %esi, %eax |
| ; X86-BMI2-NEXT: movl %edi, %edx |
| ; X86-BMI2-NEXT: addl $12, %esp |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: popl %edi |
| ; X86-BMI2-NEXT: popl %ebx |
| ; X86-BMI2-NEXT: popl %ebp |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_c4_commutative: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: pushq %r14 |
| ; X64-NOBMI-NEXT: pushq %rbx |
| ; X64-NOBMI-NEXT: pushq %rax |
| ; X64-NOBMI-NEXT: movq %rsi, %rcx |
| ; X64-NOBMI-NEXT: movq %rdi, %r14 |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-NOBMI-NEXT: shrq %cl, %r14 |
| ; X64-NOBMI-NEXT: negb %dl |
| ; X64-NOBMI-NEXT: movq $-1, %rbx |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shrq %cl, %rbx |
| ; X64-NOBMI-NEXT: movq %rbx, %rdi |
| ; X64-NOBMI-NEXT: callq use64@PLT |
| ; X64-NOBMI-NEXT: andq %r14, %rbx |
| ; X64-NOBMI-NEXT: movq %rbx, %rax |
| ; X64-NOBMI-NEXT: addq $8, %rsp |
| ; X64-NOBMI-NEXT: popq %rbx |
| ; X64-NOBMI-NEXT: popq %r14 |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_c4_commutative: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: pushq %r14 |
| ; X64-BMI1-NEXT: pushq %rbx |
| ; X64-BMI1-NEXT: pushq %rax |
| ; X64-BMI1-NEXT: movq %rsi, %rcx |
| ; X64-BMI1-NEXT: movq %rdi, %r14 |
| ; X64-BMI1-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-BMI1-NEXT: shrq %cl, %r14 |
| ; X64-BMI1-NEXT: negb %dl |
| ; X64-BMI1-NEXT: movq $-1, %rbx |
| ; X64-BMI1-NEXT: movl %edx, %ecx |
| ; X64-BMI1-NEXT: shrq %cl, %rbx |
| ; X64-BMI1-NEXT: movq %rbx, %rdi |
| ; X64-BMI1-NEXT: callq use64@PLT |
| ; X64-BMI1-NEXT: andq %r14, %rbx |
| ; X64-BMI1-NEXT: movq %rbx, %rax |
| ; X64-BMI1-NEXT: addq $8, %rsp |
| ; X64-BMI1-NEXT: popq %rbx |
| ; X64-BMI1-NEXT: popq %r14 |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_c4_commutative: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: pushq %r14 |
| ; X64-BMI2-NEXT: pushq %rbx |
| ; X64-BMI2-NEXT: pushq %rax |
| ; X64-BMI2-NEXT: movq %rdx, %rbx |
| ; X64-BMI2-NEXT: shrxq %rsi, %rdi, %r14 |
| ; X64-BMI2-NEXT: movl %ebx, %eax |
| ; X64-BMI2-NEXT: negb %al |
| ; X64-BMI2-NEXT: movq $-1, %rcx |
| ; X64-BMI2-NEXT: shrxq %rax, %rcx, %rdi |
| ; X64-BMI2-NEXT: callq use64@PLT |
| ; X64-BMI2-NEXT: bzhiq %rbx, %r14, %rax |
| ; X64-BMI2-NEXT: addq $8, %rsp |
| ; X64-BMI2-NEXT: popq %rbx |
| ; X64-BMI2-NEXT: popq %r14 |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i64 %val, %numskipbits |
| %numhighbits = sub i64 64, %numlowbits |
| %mask = lshr i64 -1, %numhighbits |
| call void @use64(i64 %mask) |
| %masked = and i64 %shifted, %mask ; swapped order |
| ret i64 %masked |
| } |
| |
| define i64 @bextr64_c5_skipextrauses(i64 %val, i64 %numskipbits, i64 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_c5_skipextrauses: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %ebp |
| ; X86-NOBMI-NEXT: pushl %ebx |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: subl $12, %esp |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NOBMI-NEXT: movl %eax, %edi |
| ; X86-NOBMI-NEXT: shrl %cl, %edi |
| ; X86-NOBMI-NEXT: shrdl %cl, %eax, %esi |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: je .LBB46_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %edi, %esi |
| ; X86-NOBMI-NEXT: xorl %edi, %edi |
| ; X86-NOBMI-NEXT: .LBB46_2: |
| ; X86-NOBMI-NEXT: movb $64, %cl |
| ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl $-1, %ebx |
| ; X86-NOBMI-NEXT: movl $-1, %ebp |
| ; X86-NOBMI-NEXT: shrl %cl, %ebp |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: je .LBB46_4 |
| ; X86-NOBMI-NEXT: # %bb.3: |
| ; X86-NOBMI-NEXT: movl %ebp, %ebx |
| ; X86-NOBMI-NEXT: xorl %ebp, %ebp |
| ; X86-NOBMI-NEXT: .LBB46_4: |
| ; X86-NOBMI-NEXT: subl $8, %esp |
| ; X86-NOBMI-NEXT: pushl %ebp |
| ; X86-NOBMI-NEXT: pushl %ebx |
| ; X86-NOBMI-NEXT: calll use64@PLT |
| ; X86-NOBMI-NEXT: addl $16, %esp |
| ; X86-NOBMI-NEXT: andl %ebx, %esi |
| ; X86-NOBMI-NEXT: andl %ebp, %edi |
| ; X86-NOBMI-NEXT: subl $8, %esp |
| ; X86-NOBMI-NEXT: pushl {{[0-9]+}}(%esp) |
| ; X86-NOBMI-NEXT: pushl {{[0-9]+}}(%esp) |
| ; X86-NOBMI-NEXT: calll use64@PLT |
| ; X86-NOBMI-NEXT: addl $16, %esp |
| ; X86-NOBMI-NEXT: movl %esi, %eax |
| ; X86-NOBMI-NEXT: movl %edi, %edx |
| ; X86-NOBMI-NEXT: addl $12, %esp |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: popl %ebx |
| ; X86-NOBMI-NEXT: popl %ebp |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_c5_skipextrauses: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %ebp |
| ; X86-BMI1-NEXT: pushl %ebx |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: subl $12, %esp |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-BMI1-NEXT: movl %eax, %edi |
| ; X86-BMI1-NEXT: shrl %cl, %edi |
| ; X86-BMI1-NEXT: shrdl %cl, %eax, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: je .LBB46_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %edi, %esi |
| ; X86-BMI1-NEXT: xorl %edi, %edi |
| ; X86-BMI1-NEXT: .LBB46_2: |
| ; X86-BMI1-NEXT: movb $64, %cl |
| ; X86-BMI1-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl $-1, %ebx |
| ; X86-BMI1-NEXT: movl $-1, %ebp |
| ; X86-BMI1-NEXT: shrl %cl, %ebp |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: je .LBB46_4 |
| ; X86-BMI1-NEXT: # %bb.3: |
| ; X86-BMI1-NEXT: movl %ebp, %ebx |
| ; X86-BMI1-NEXT: xorl %ebp, %ebp |
| ; X86-BMI1-NEXT: .LBB46_4: |
| ; X86-BMI1-NEXT: subl $8, %esp |
| ; X86-BMI1-NEXT: pushl %ebp |
| ; X86-BMI1-NEXT: pushl %ebx |
| ; X86-BMI1-NEXT: calll use64@PLT |
| ; X86-BMI1-NEXT: addl $16, %esp |
| ; X86-BMI1-NEXT: andl %ebx, %esi |
| ; X86-BMI1-NEXT: andl %ebp, %edi |
| ; X86-BMI1-NEXT: subl $8, %esp |
| ; X86-BMI1-NEXT: pushl {{[0-9]+}}(%esp) |
| ; X86-BMI1-NEXT: pushl {{[0-9]+}}(%esp) |
| ; X86-BMI1-NEXT: calll use64@PLT |
| ; X86-BMI1-NEXT: addl $16, %esp |
| ; X86-BMI1-NEXT: movl %esi, %eax |
| ; X86-BMI1-NEXT: movl %edi, %edx |
| ; X86-BMI1-NEXT: addl $12, %esp |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: popl %ebx |
| ; X86-BMI1-NEXT: popl %ebp |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_c5_skipextrauses: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %ebp |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %edi |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: subl $12, %esp |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-BMI2-NEXT: shrdl %cl, %eax, %esi |
| ; X86-BMI2-NEXT: shrxl %ecx, %eax, %edi |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB46_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: movl %edi, %esi |
| ; X86-BMI2-NEXT: xorl %edi, %edi |
| ; X86-BMI2-NEXT: .LBB46_2: |
| ; X86-BMI2-NEXT: movb $64, %al |
| ; X86-BMI2-NEXT: subb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movl $-1, %ebp |
| ; X86-BMI2-NEXT: shrxl %eax, %ebp, %ebx |
| ; X86-BMI2-NEXT: testb $32, %al |
| ; X86-BMI2-NEXT: je .LBB46_4 |
| ; X86-BMI2-NEXT: # %bb.3: |
| ; X86-BMI2-NEXT: movl %ebx, %ebp |
| ; X86-BMI2-NEXT: xorl %ebx, %ebx |
| ; X86-BMI2-NEXT: .LBB46_4: |
| ; X86-BMI2-NEXT: subl $8, %esp |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %ebp |
| ; X86-BMI2-NEXT: calll use64@PLT |
| ; X86-BMI2-NEXT: addl $16, %esp |
| ; X86-BMI2-NEXT: andl %ebp, %esi |
| ; X86-BMI2-NEXT: andl %ebx, %edi |
| ; X86-BMI2-NEXT: subl $8, %esp |
| ; X86-BMI2-NEXT: pushl {{[0-9]+}}(%esp) |
| ; X86-BMI2-NEXT: pushl {{[0-9]+}}(%esp) |
| ; X86-BMI2-NEXT: calll use64@PLT |
| ; X86-BMI2-NEXT: addl $16, %esp |
| ; X86-BMI2-NEXT: movl %esi, %eax |
| ; X86-BMI2-NEXT: movl %edi, %edx |
| ; X86-BMI2-NEXT: addl $12, %esp |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: popl %edi |
| ; X86-BMI2-NEXT: popl %ebx |
| ; X86-BMI2-NEXT: popl %ebp |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_c5_skipextrauses: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: pushq %r15 |
| ; X64-NOBMI-NEXT: pushq %r14 |
| ; X64-NOBMI-NEXT: pushq %rbx |
| ; X64-NOBMI-NEXT: movq %rsi, %r14 |
| ; X64-NOBMI-NEXT: movq %rdi, %r15 |
| ; X64-NOBMI-NEXT: movl %r14d, %ecx |
| ; X64-NOBMI-NEXT: shrq %cl, %r15 |
| ; X64-NOBMI-NEXT: negb %dl |
| ; X64-NOBMI-NEXT: movq $-1, %rbx |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shrq %cl, %rbx |
| ; X64-NOBMI-NEXT: movq %rbx, %rdi |
| ; X64-NOBMI-NEXT: callq use64@PLT |
| ; X64-NOBMI-NEXT: andq %r15, %rbx |
| ; X64-NOBMI-NEXT: movq %r14, %rdi |
| ; X64-NOBMI-NEXT: callq use64@PLT |
| ; X64-NOBMI-NEXT: movq %rbx, %rax |
| ; X64-NOBMI-NEXT: popq %rbx |
| ; X64-NOBMI-NEXT: popq %r14 |
| ; X64-NOBMI-NEXT: popq %r15 |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_c5_skipextrauses: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: pushq %r15 |
| ; X64-BMI1-NEXT: pushq %r14 |
| ; X64-BMI1-NEXT: pushq %rbx |
| ; X64-BMI1-NEXT: movq %rsi, %r14 |
| ; X64-BMI1-NEXT: movq %rdi, %r15 |
| ; X64-BMI1-NEXT: movl %r14d, %ecx |
| ; X64-BMI1-NEXT: shrq %cl, %r15 |
| ; X64-BMI1-NEXT: negb %dl |
| ; X64-BMI1-NEXT: movq $-1, %rbx |
| ; X64-BMI1-NEXT: movl %edx, %ecx |
| ; X64-BMI1-NEXT: shrq %cl, %rbx |
| ; X64-BMI1-NEXT: movq %rbx, %rdi |
| ; X64-BMI1-NEXT: callq use64@PLT |
| ; X64-BMI1-NEXT: andq %r15, %rbx |
| ; X64-BMI1-NEXT: movq %r14, %rdi |
| ; X64-BMI1-NEXT: callq use64@PLT |
| ; X64-BMI1-NEXT: movq %rbx, %rax |
| ; X64-BMI1-NEXT: popq %rbx |
| ; X64-BMI1-NEXT: popq %r14 |
| ; X64-BMI1-NEXT: popq %r15 |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_c5_skipextrauses: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: pushq %r15 |
| ; X64-BMI2-NEXT: pushq %r14 |
| ; X64-BMI2-NEXT: pushq %rbx |
| ; X64-BMI2-NEXT: movq %rdx, %rbx |
| ; X64-BMI2-NEXT: movq %rsi, %r14 |
| ; X64-BMI2-NEXT: shrxq %rsi, %rdi, %r15 |
| ; X64-BMI2-NEXT: movl %ebx, %eax |
| ; X64-BMI2-NEXT: negb %al |
| ; X64-BMI2-NEXT: movq $-1, %rcx |
| ; X64-BMI2-NEXT: shrxq %rax, %rcx, %rdi |
| ; X64-BMI2-NEXT: callq use64@PLT |
| ; X64-BMI2-NEXT: bzhiq %rbx, %r15, %rbx |
| ; X64-BMI2-NEXT: movq %r14, %rdi |
| ; X64-BMI2-NEXT: callq use64@PLT |
| ; X64-BMI2-NEXT: movq %rbx, %rax |
| ; X64-BMI2-NEXT: popq %rbx |
| ; X64-BMI2-NEXT: popq %r14 |
| ; X64-BMI2-NEXT: popq %r15 |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i64 %val, %numskipbits |
| %numhighbits = sub i64 64, %numlowbits |
| %mask = lshr i64 -1, %numhighbits |
| call void @use64(i64 %mask) |
| %masked = and i64 %mask, %shifted |
| call void @use64(i64 %numskipbits) |
| ret i64 %masked |
| } |
| |
| ; 64-bit, but with 32-bit output |
| |
| ; Everything done in 64-bit, truncation happens last. |
| define i32 @bextr64_32_c0(i64 %val, i64 %numskipbits, i64 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_32_c0: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NOBMI-NEXT: movl %esi, %edx |
| ; X86-NOBMI-NEXT: shrl %cl, %edx |
| ; X86-NOBMI-NEXT: shrdl %cl, %esi, %eax |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: jne .LBB47_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %eax, %edx |
| ; X86-NOBMI-NEXT: .LBB47_2: |
| ; X86-NOBMI-NEXT: movb $64, %cl |
| ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl $-1, %eax |
| ; X86-NOBMI-NEXT: shrl %cl, %eax |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: jne .LBB47_4 |
| ; X86-NOBMI-NEXT: # %bb.3: |
| ; X86-NOBMI-NEXT: movl $-1, %eax |
| ; X86-NOBMI-NEXT: .LBB47_4: |
| ; X86-NOBMI-NEXT: andl %edx, %eax |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_32_c0: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI1-NEXT: movl %esi, %edx |
| ; X86-BMI1-NEXT: shrl %cl, %edx |
| ; X86-BMI1-NEXT: shrdl %cl, %esi, %eax |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: jne .LBB47_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %eax, %edx |
| ; X86-BMI1-NEXT: .LBB47_2: |
| ; X86-BMI1-NEXT: movb $64, %cl |
| ; X86-BMI1-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl $-1, %eax |
| ; X86-BMI1-NEXT: shrl %cl, %eax |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: jne .LBB47_4 |
| ; X86-BMI1-NEXT: # %bb.3: |
| ; X86-BMI1-NEXT: movl $-1, %eax |
| ; X86-BMI1-NEXT: .LBB47_4: |
| ; X86-BMI1-NEXT: andl %edx, %eax |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_32_c0: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI2-NEXT: shrdl %cl, %eax, %edx |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB47_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: shrxl %ecx, %eax, %edx |
| ; X86-BMI2-NEXT: .LBB47_2: |
| ; X86-BMI2-NEXT: movb $64, %cl |
| ; X86-BMI2-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl $-1, %eax |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB47_4 |
| ; X86-BMI2-NEXT: # %bb.3: |
| ; X86-BMI2-NEXT: shrxl %ecx, %eax, %eax |
| ; X86-BMI2-NEXT: .LBB47_4: |
| ; X86-BMI2-NEXT: andl %edx, %eax |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_32_c0: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movq %rsi, %rcx |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-NOBMI-NEXT: shrq %cl, %rdi |
| ; X64-NOBMI-NEXT: negb %dl |
| ; X64-NOBMI-NEXT: movq $-1, %rax |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shrq %cl, %rax |
| ; X64-NOBMI-NEXT: andl %edi, %eax |
| ; X64-NOBMI-NEXT: # kill: def $eax killed $eax killed $rax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_32_c0: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrq %rax, %rdi, %rax |
| ; X64-BMI1-NEXT: # kill: def $eax killed $eax killed $rax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_32_c0: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxq %rsi, %rdi, %rax |
| ; X64-BMI2-NEXT: bzhil %edx, %eax, %eax |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i64 %val, %numskipbits |
| %numhighbits = sub i64 64, %numlowbits |
| %mask = lshr i64 -1, %numhighbits |
| %masked = and i64 %mask, %shifted |
| %res = trunc i64 %masked to i32 |
| ret i32 %res |
| } |
| |
| ; Shifting happens in 64-bit, then truncation. Masking is 32-bit. |
| define i32 @bextr64_32_c1(i64 %val, i64 %numskipbits, i32 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_32_c1: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NOBMI-NEXT: movl %esi, %eax |
| ; X86-NOBMI-NEXT: shrl %cl, %eax |
| ; X86-NOBMI-NEXT: shrdl %cl, %esi, %edx |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: jne .LBB48_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %edx, %eax |
| ; X86-NOBMI-NEXT: .LBB48_2: |
| ; X86-NOBMI-NEXT: xorl %ecx, %ecx |
| ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: shll %cl, %eax |
| ; X86-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X86-NOBMI-NEXT: shrl %cl, %eax |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_32_c1: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-BMI1-NEXT: movl %edi, %edx |
| ; X86-BMI1-NEXT: shrl %cl, %edx |
| ; X86-BMI1-NEXT: shrdl %cl, %edi, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: jne .LBB48_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %esi, %edx |
| ; X86-BMI1-NEXT: .LBB48_2: |
| ; X86-BMI1-NEXT: shll $8, %eax |
| ; X86-BMI1-NEXT: bextrl %eax, %edx, %eax |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_32_c1: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI2-NEXT: shrdl %cl, %esi, %edx |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB48_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: shrxl %ecx, %esi, %edx |
| ; X86-BMI2-NEXT: .LBB48_2: |
| ; X86-BMI2-NEXT: bzhil %eax, %edx, %eax |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_32_c1: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movq %rsi, %rcx |
| ; X64-NOBMI-NEXT: movq %rdi, %rax |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-NOBMI-NEXT: shrq %cl, %rax |
| ; X64-NOBMI-NEXT: negb %dl |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shll %cl, %eax |
| ; X64-NOBMI-NEXT: shrl %cl, %eax |
| ; X64-NOBMI-NEXT: # kill: def $eax killed $eax killed $rax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_32_c1: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrq %rax, %rdi, %rax |
| ; X64-BMI1-NEXT: # kill: def $eax killed $eax killed $rax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_32_c1: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxq %rsi, %rdi, %rax |
| ; X64-BMI2-NEXT: bzhil %edx, %eax, %eax |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i64 %val, %numskipbits |
| %truncshifted = trunc i64 %shifted to i32 |
| %numhighbits = sub i32 32, %numlowbits |
| %mask = lshr i32 -1, %numhighbits |
| %masked = and i32 %mask, %truncshifted |
| ret i32 %masked |
| } |
| |
| ; Shifting happens in 64-bit. Mask is 32-bit, but extended to 64-bit. |
| ; Masking is 64-bit. Then truncation. |
| define i32 @bextr64_32_c2(i64 %val, i64 %numskipbits, i32 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_32_c2: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NOBMI-NEXT: movl %esi, %eax |
| ; X86-NOBMI-NEXT: shrl %cl, %eax |
| ; X86-NOBMI-NEXT: shrdl %cl, %esi, %edx |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: jne .LBB49_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %edx, %eax |
| ; X86-NOBMI-NEXT: .LBB49_2: |
| ; X86-NOBMI-NEXT: xorl %ecx, %ecx |
| ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: shll %cl, %eax |
| ; X86-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X86-NOBMI-NEXT: shrl %cl, %eax |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_32_c2: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-BMI1-NEXT: movl %edi, %edx |
| ; X86-BMI1-NEXT: shrl %cl, %edx |
| ; X86-BMI1-NEXT: shrdl %cl, %edi, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: jne .LBB49_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %esi, %edx |
| ; X86-BMI1-NEXT: .LBB49_2: |
| ; X86-BMI1-NEXT: shll $8, %eax |
| ; X86-BMI1-NEXT: bextrl %eax, %edx, %eax |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_32_c2: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI2-NEXT: shrdl %cl, %esi, %edx |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB49_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: shrxl %ecx, %esi, %edx |
| ; X86-BMI2-NEXT: .LBB49_2: |
| ; X86-BMI2-NEXT: bzhil %eax, %edx, %eax |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_32_c2: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movq %rsi, %rcx |
| ; X64-NOBMI-NEXT: movq %rdi, %rax |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-NOBMI-NEXT: shrq %cl, %rax |
| ; X64-NOBMI-NEXT: negb %dl |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shll %cl, %eax |
| ; X64-NOBMI-NEXT: shrl %cl, %eax |
| ; X64-NOBMI-NEXT: # kill: def $eax killed $eax killed $rax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_32_c2: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrq %rax, %rdi, %rax |
| ; X64-BMI1-NEXT: # kill: def $eax killed $eax killed $rax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_32_c2: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxq %rsi, %rdi, %rax |
| ; X64-BMI2-NEXT: bzhil %edx, %eax, %eax |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i64 %val, %numskipbits |
| %numhighbits = sub i32 32, %numlowbits |
| %mask = lshr i32 -1, %numhighbits |
| %zextmask = zext i32 %mask to i64 |
| %masked = and i64 %zextmask, %shifted |
| %truncmasked = trunc i64 %masked to i32 |
| ret i32 %truncmasked |
| } |
| |
| ; Shifting happens in 64-bit. Mask is 32-bit, but calculated in 64-bit. |
| ; Masking is 64-bit. Then truncation. |
| define i32 @bextr64_32_c3(i64 %val, i64 %numskipbits, i64 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_32_c3: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NOBMI-NEXT: movl %esi, %edx |
| ; X86-NOBMI-NEXT: shrl %cl, %edx |
| ; X86-NOBMI-NEXT: shrdl %cl, %esi, %eax |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: jne .LBB50_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %eax, %edx |
| ; X86-NOBMI-NEXT: .LBB50_2: |
| ; X86-NOBMI-NEXT: movb $64, %cl |
| ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: xorl %eax, %eax |
| ; X86-NOBMI-NEXT: movl $-1, %esi |
| ; X86-NOBMI-NEXT: shrdl %cl, %eax, %esi |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: jne .LBB50_4 |
| ; X86-NOBMI-NEXT: # %bb.3: |
| ; X86-NOBMI-NEXT: movl %esi, %eax |
| ; X86-NOBMI-NEXT: .LBB50_4: |
| ; X86-NOBMI-NEXT: andl %edx, %eax |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_32_c3: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI1-NEXT: movl %esi, %edx |
| ; X86-BMI1-NEXT: shrl %cl, %edx |
| ; X86-BMI1-NEXT: shrdl %cl, %esi, %eax |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: jne .LBB50_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %eax, %edx |
| ; X86-BMI1-NEXT: .LBB50_2: |
| ; X86-BMI1-NEXT: movb $64, %cl |
| ; X86-BMI1-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: xorl %eax, %eax |
| ; X86-BMI1-NEXT: movl $-1, %esi |
| ; X86-BMI1-NEXT: shrdl %cl, %eax, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: jne .LBB50_4 |
| ; X86-BMI1-NEXT: # %bb.3: |
| ; X86-BMI1-NEXT: movl %esi, %eax |
| ; X86-BMI1-NEXT: .LBB50_4: |
| ; X86-BMI1-NEXT: andl %edx, %eax |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_32_c3: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI2-NEXT: shrdl %cl, %eax, %edx |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB50_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: shrxl %ecx, %eax, %edx |
| ; X86-BMI2-NEXT: .LBB50_2: |
| ; X86-BMI2-NEXT: movb $64, %cl |
| ; X86-BMI2-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: xorl %eax, %eax |
| ; X86-BMI2-NEXT: movl $-1, %esi |
| ; X86-BMI2-NEXT: shrdl %cl, %eax, %esi |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: jne .LBB50_4 |
| ; X86-BMI2-NEXT: # %bb.3: |
| ; X86-BMI2-NEXT: movl %esi, %eax |
| ; X86-BMI2-NEXT: .LBB50_4: |
| ; X86-BMI2-NEXT: andl %edx, %eax |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_32_c3: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movq %rsi, %rcx |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-NOBMI-NEXT: shrq %cl, %rdi |
| ; X64-NOBMI-NEXT: negb %dl |
| ; X64-NOBMI-NEXT: movl $4294967295, %eax # imm = 0xFFFFFFFF |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shrq %cl, %rax |
| ; X64-NOBMI-NEXT: andl %edi, %eax |
| ; X64-NOBMI-NEXT: # kill: def $eax killed $eax killed $rax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_32_c3: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: movq %rsi, %rcx |
| ; X64-BMI1-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-BMI1-NEXT: shrq %cl, %rdi |
| ; X64-BMI1-NEXT: negb %dl |
| ; X64-BMI1-NEXT: movl $4294967295, %eax # imm = 0xFFFFFFFF |
| ; X64-BMI1-NEXT: movl %edx, %ecx |
| ; X64-BMI1-NEXT: shrq %cl, %rax |
| ; X64-BMI1-NEXT: andl %edi, %eax |
| ; X64-BMI1-NEXT: # kill: def $eax killed $eax killed $rax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_32_c3: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxq %rsi, %rdi, %rcx |
| ; X64-BMI2-NEXT: negb %dl |
| ; X64-BMI2-NEXT: movl $4294967295, %eax # imm = 0xFFFFFFFF |
| ; X64-BMI2-NEXT: shrxq %rdx, %rax, %rax |
| ; X64-BMI2-NEXT: andl %ecx, %eax |
| ; X64-BMI2-NEXT: # kill: def $eax killed $eax killed $rax |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i64 %val, %numskipbits |
| %numhighbits = sub i64 64, %numlowbits |
| %mask = lshr i64 4294967295, %numhighbits |
| %masked = and i64 %mask, %shifted |
| %truncmasked = trunc i64 %masked to i32 |
| ret i32 %truncmasked |
| } |
| |
| ; ---------------------------------------------------------------------------- ; |
| ; Pattern d. 32-bit. |
| ; ---------------------------------------------------------------------------- ; |
| |
| define i32 @bextr32_d0(i32 %val, i32 %numskipbits, i32 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr32_d0: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: shrl %cl, %eax |
| ; X86-NOBMI-NEXT: xorl %ecx, %ecx |
| ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: shll %cl, %eax |
| ; X86-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X86-NOBMI-NEXT: shrl %cl, %eax |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr32_d0: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI1-NEXT: shll $8, %eax |
| ; X86-BMI1-NEXT: movzbl {{[0-9]+}}(%esp), %ecx |
| ; X86-BMI1-NEXT: orl %eax, %ecx |
| ; X86-BMI1-NEXT: bextrl %ecx, {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr32_d0: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: shrxl %ecx, {{[0-9]+}}(%esp), %ecx |
| ; X86-BMI2-NEXT: bzhil %eax, %ecx, %eax |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr32_d0: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: movl %edi, %eax |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NOBMI-NEXT: shrl %cl, %eax |
| ; X64-NOBMI-NEXT: negb %dl |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shll %cl, %eax |
| ; X64-NOBMI-NEXT: shrl %cl, %eax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr32_d0: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrl %eax, %edi, %eax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr32_d0: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxl %esi, %edi, %eax |
| ; X64-BMI2-NEXT: bzhil %edx, %eax, %eax |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i32 %val, %numskipbits |
| %numhighbits = sub i32 32, %numlowbits |
| %highbitscleared = shl i32 %shifted, %numhighbits |
| %masked = lshr i32 %highbitscleared, %numhighbits |
| ret i32 %masked |
| } |
| |
| define i32 @bextr32_d1_indexzext(i32 %val, i8 %numskipbits, i8 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr32_d1_indexzext: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: shrl %cl, %eax |
| ; X86-NOBMI-NEXT: xorl %ecx, %ecx |
| ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: shll %cl, %eax |
| ; X86-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X86-NOBMI-NEXT: shrl %cl, %eax |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr32_d1_indexzext: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI1-NEXT: shll $8, %eax |
| ; X86-BMI1-NEXT: movzbl {{[0-9]+}}(%esp), %ecx |
| ; X86-BMI1-NEXT: orl %eax, %ecx |
| ; X86-BMI1-NEXT: bextrl %ecx, {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr32_d1_indexzext: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: shrxl %ecx, {{[0-9]+}}(%esp), %ecx |
| ; X86-BMI2-NEXT: bzhil %eax, %ecx, %eax |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr32_d1_indexzext: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: movl %edi, %eax |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NOBMI-NEXT: shrl %cl, %eax |
| ; X64-NOBMI-NEXT: negb %dl |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shll %cl, %eax |
| ; X64-NOBMI-NEXT: shrl %cl, %eax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr32_d1_indexzext: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrl %eax, %edi, %eax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr32_d1_indexzext: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxl %esi, %edi, %eax |
| ; X64-BMI2-NEXT: bzhil %edx, %eax, %eax |
| ; X64-BMI2-NEXT: retq |
| %skip = zext i8 %numskipbits to i32 |
| %shifted = lshr i32 %val, %skip |
| %numhighbits = sub i8 32, %numlowbits |
| %sh_prom = zext i8 %numhighbits to i32 |
| %highbitscleared = shl i32 %shifted, %sh_prom |
| %masked = lshr i32 %highbitscleared, %sh_prom |
| ret i32 %masked |
| } |
| |
| define i32 @bextr32_d2_load(i32* %w, i32 %numskipbits, i32 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr32_d2_load: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl (%eax), %eax |
| ; X86-NOBMI-NEXT: shrl %cl, %eax |
| ; X86-NOBMI-NEXT: xorl %ecx, %ecx |
| ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: shll %cl, %eax |
| ; X86-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X86-NOBMI-NEXT: shrl %cl, %eax |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr32_d2_load: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: shll $8, %ecx |
| ; X86-BMI1-NEXT: movzbl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI1-NEXT: orl %ecx, %edx |
| ; X86-BMI1-NEXT: bextrl %edx, (%eax), %eax |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr32_d2_load: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-BMI2-NEXT: shrxl %edx, (%ecx), %ecx |
| ; X86-BMI2-NEXT: bzhil %eax, %ecx, %eax |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr32_d2_load: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: movl (%rdi), %eax |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NOBMI-NEXT: shrl %cl, %eax |
| ; X64-NOBMI-NEXT: negb %dl |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shll %cl, %eax |
| ; X64-NOBMI-NEXT: shrl %cl, %eax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr32_d2_load: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrl %eax, (%rdi), %eax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr32_d2_load: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxl %esi, (%rdi), %eax |
| ; X64-BMI2-NEXT: bzhil %edx, %eax, %eax |
| ; X64-BMI2-NEXT: retq |
| %val = load i32, i32* %w |
| %shifted = lshr i32 %val, %numskipbits |
| %numhighbits = sub i32 32, %numlowbits |
| %highbitscleared = shl i32 %shifted, %numhighbits |
| %masked = lshr i32 %highbitscleared, %numhighbits |
| ret i32 %masked |
| } |
| |
| define i32 @bextr32_d3_load_indexzext(i32* %w, i8 %numskipbits, i8 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr32_d3_load_indexzext: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl (%eax), %eax |
| ; X86-NOBMI-NEXT: shrl %cl, %eax |
| ; X86-NOBMI-NEXT: xorl %ecx, %ecx |
| ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: shll %cl, %eax |
| ; X86-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X86-NOBMI-NEXT: shrl %cl, %eax |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr32_d3_load_indexzext: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: shll $8, %ecx |
| ; X86-BMI1-NEXT: movzbl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI1-NEXT: orl %ecx, %edx |
| ; X86-BMI1-NEXT: bextrl %edx, (%eax), %eax |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr32_d3_load_indexzext: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %dl |
| ; X86-BMI2-NEXT: shrxl %edx, (%ecx), %ecx |
| ; X86-BMI2-NEXT: bzhil %eax, %ecx, %eax |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr32_d3_load_indexzext: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: movl (%rdi), %eax |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NOBMI-NEXT: shrl %cl, %eax |
| ; X64-NOBMI-NEXT: negb %dl |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shll %cl, %eax |
| ; X64-NOBMI-NEXT: shrl %cl, %eax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr32_d3_load_indexzext: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrl %eax, (%rdi), %eax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr32_d3_load_indexzext: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxl %esi, (%rdi), %eax |
| ; X64-BMI2-NEXT: bzhil %edx, %eax, %eax |
| ; X64-BMI2-NEXT: retq |
| %val = load i32, i32* %w |
| %skip = zext i8 %numskipbits to i32 |
| %shifted = lshr i32 %val, %skip |
| %numhighbits = sub i8 32, %numlowbits |
| %sh_prom = zext i8 %numhighbits to i32 |
| %highbitscleared = shl i32 %shifted, %sh_prom |
| %masked = lshr i32 %highbitscleared, %sh_prom |
| ret i32 %masked |
| } |
| |
| define i32 @bextr32_d5_skipextrauses(i32 %val, i32 %numskipbits, i32 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr32_d5_skipextrauses: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: subl $8, %esp |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl %eax, %ecx |
| ; X86-NOBMI-NEXT: shrl %cl, %esi |
| ; X86-NOBMI-NEXT: xorl %ecx, %ecx |
| ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: shll %cl, %esi |
| ; X86-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X86-NOBMI-NEXT: shrl %cl, %esi |
| ; X86-NOBMI-NEXT: movl %eax, (%esp) |
| ; X86-NOBMI-NEXT: calll use32@PLT |
| ; X86-NOBMI-NEXT: movl %esi, %eax |
| ; X86-NOBMI-NEXT: addl $8, %esp |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr32_d5_skipextrauses: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: subl $8, %esp |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: shll $8, %ecx |
| ; X86-BMI1-NEXT: movzbl %al, %edx |
| ; X86-BMI1-NEXT: orl %ecx, %edx |
| ; X86-BMI1-NEXT: bextrl %edx, {{[0-9]+}}(%esp), %esi |
| ; X86-BMI1-NEXT: movl %eax, (%esp) |
| ; X86-BMI1-NEXT: calll use32@PLT |
| ; X86-BMI1-NEXT: movl %esi, %eax |
| ; X86-BMI1-NEXT: addl $8, %esp |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr32_d5_skipextrauses: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: subl $8, %esp |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-BMI2-NEXT: shrxl %ecx, {{[0-9]+}}(%esp), %edx |
| ; X86-BMI2-NEXT: bzhil %eax, %edx, %esi |
| ; X86-BMI2-NEXT: movl %ecx, (%esp) |
| ; X86-BMI2-NEXT: calll use32@PLT |
| ; X86-BMI2-NEXT: movl %esi, %eax |
| ; X86-BMI2-NEXT: addl $8, %esp |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr32_d5_skipextrauses: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: pushq %rbx |
| ; X64-NOBMI-NEXT: movl %edi, %ebx |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: shrl %cl, %ebx |
| ; X64-NOBMI-NEXT: negb %dl |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shll %cl, %ebx |
| ; X64-NOBMI-NEXT: shrl %cl, %ebx |
| ; X64-NOBMI-NEXT: movl %esi, %edi |
| ; X64-NOBMI-NEXT: callq use32@PLT |
| ; X64-NOBMI-NEXT: movl %ebx, %eax |
| ; X64-NOBMI-NEXT: popq %rbx |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr32_d5_skipextrauses: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: pushq %rbx |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrl %eax, %edi, %ebx |
| ; X64-BMI1-NEXT: movl %esi, %edi |
| ; X64-BMI1-NEXT: callq use32@PLT |
| ; X64-BMI1-NEXT: movl %ebx, %eax |
| ; X64-BMI1-NEXT: popq %rbx |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr32_d5_skipextrauses: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: pushq %rbx |
| ; X64-BMI2-NEXT: shrxl %esi, %edi, %eax |
| ; X64-BMI2-NEXT: bzhil %edx, %eax, %ebx |
| ; X64-BMI2-NEXT: movl %esi, %edi |
| ; X64-BMI2-NEXT: callq use32@PLT |
| ; X64-BMI2-NEXT: movl %ebx, %eax |
| ; X64-BMI2-NEXT: popq %rbx |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i32 %val, %numskipbits |
| %numhighbits = sub i32 32, %numlowbits |
| %highbitscleared = shl i32 %shifted, %numhighbits |
| %masked = lshr i32 %highbitscleared, %numhighbits |
| call void @use32(i32 %numskipbits) |
| ret i32 %masked |
| } |
| |
| ; 64-bit. |
| |
| define i64 @bextr64_d0(i64 %val, i64 %numskipbits, i64 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_d0: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %ebx |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-NOBMI-NEXT: movl %edx, %eax |
| ; X86-NOBMI-NEXT: shrl %cl, %eax |
| ; X86-NOBMI-NEXT: shrdl %cl, %edx, %edi |
| ; X86-NOBMI-NEXT: xorl %esi, %esi |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: je .LBB56_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %eax, %edi |
| ; X86-NOBMI-NEXT: xorl %eax, %eax |
| ; X86-NOBMI-NEXT: .LBB56_2: |
| ; X86-NOBMI-NEXT: movb $64, %cl |
| ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: shldl %cl, %edi, %eax |
| ; X86-NOBMI-NEXT: shll %cl, %edi |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: movl %edi, %ebx |
| ; X86-NOBMI-NEXT: jne .LBB56_4 |
| ; X86-NOBMI-NEXT: # %bb.3: |
| ; X86-NOBMI-NEXT: movl %eax, %ebx |
| ; X86-NOBMI-NEXT: .LBB56_4: |
| ; X86-NOBMI-NEXT: movl %ebx, %eax |
| ; X86-NOBMI-NEXT: shrl %cl, %eax |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: movl $0, %edx |
| ; X86-NOBMI-NEXT: jne .LBB56_6 |
| ; X86-NOBMI-NEXT: # %bb.5: |
| ; X86-NOBMI-NEXT: movl %edi, %esi |
| ; X86-NOBMI-NEXT: movl %eax, %edx |
| ; X86-NOBMI-NEXT: .LBB56_6: |
| ; X86-NOBMI-NEXT: shrdl %cl, %ebx, %esi |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: jne .LBB56_8 |
| ; X86-NOBMI-NEXT: # %bb.7: |
| ; X86-NOBMI-NEXT: movl %esi, %eax |
| ; X86-NOBMI-NEXT: .LBB56_8: |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: popl %ebx |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_d0: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %ebx |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI1-NEXT: movl %edx, %eax |
| ; X86-BMI1-NEXT: shrl %cl, %eax |
| ; X86-BMI1-NEXT: shrdl %cl, %edx, %edi |
| ; X86-BMI1-NEXT: xorl %esi, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: je .LBB56_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %eax, %edi |
| ; X86-BMI1-NEXT: xorl %eax, %eax |
| ; X86-BMI1-NEXT: .LBB56_2: |
| ; X86-BMI1-NEXT: movb $64, %cl |
| ; X86-BMI1-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: shldl %cl, %edi, %eax |
| ; X86-BMI1-NEXT: shll %cl, %edi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: movl %edi, %ebx |
| ; X86-BMI1-NEXT: jne .LBB56_4 |
| ; X86-BMI1-NEXT: # %bb.3: |
| ; X86-BMI1-NEXT: movl %eax, %ebx |
| ; X86-BMI1-NEXT: .LBB56_4: |
| ; X86-BMI1-NEXT: movl %ebx, %eax |
| ; X86-BMI1-NEXT: shrl %cl, %eax |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: movl $0, %edx |
| ; X86-BMI1-NEXT: jne .LBB56_6 |
| ; X86-BMI1-NEXT: # %bb.5: |
| ; X86-BMI1-NEXT: movl %edi, %esi |
| ; X86-BMI1-NEXT: movl %eax, %edx |
| ; X86-BMI1-NEXT: .LBB56_6: |
| ; X86-BMI1-NEXT: shrdl %cl, %ebx, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: jne .LBB56_8 |
| ; X86-BMI1-NEXT: # %bb.7: |
| ; X86-BMI1-NEXT: movl %esi, %eax |
| ; X86-BMI1-NEXT: .LBB56_8: |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: popl %ebx |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_d0: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %edi |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI2-NEXT: shrdl %cl, %edx, %eax |
| ; X86-BMI2-NEXT: shrxl %ecx, %edx, %esi |
| ; X86-BMI2-NEXT: xorl %edx, %edx |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB56_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: movl %esi, %eax |
| ; X86-BMI2-NEXT: xorl %esi, %esi |
| ; X86-BMI2-NEXT: .LBB56_2: |
| ; X86-BMI2-NEXT: movb $64, %cl |
| ; X86-BMI2-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: shldl %cl, %eax, %esi |
| ; X86-BMI2-NEXT: shlxl %ecx, %eax, %edi |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB56_4 |
| ; X86-BMI2-NEXT: # %bb.3: |
| ; X86-BMI2-NEXT: movl %edi, %esi |
| ; X86-BMI2-NEXT: movl $0, %edi |
| ; X86-BMI2-NEXT: .LBB56_4: |
| ; X86-BMI2-NEXT: shrxl %ecx, %esi, %eax |
| ; X86-BMI2-NEXT: jne .LBB56_6 |
| ; X86-BMI2-NEXT: # %bb.5: |
| ; X86-BMI2-NEXT: movl %eax, %edx |
| ; X86-BMI2-NEXT: .LBB56_6: |
| ; X86-BMI2-NEXT: shrdl %cl, %esi, %edi |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: jne .LBB56_8 |
| ; X86-BMI2-NEXT: # %bb.7: |
| ; X86-BMI2-NEXT: movl %edi, %eax |
| ; X86-BMI2-NEXT: .LBB56_8: |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: popl %edi |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_d0: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movq %rsi, %rcx |
| ; X64-NOBMI-NEXT: movq %rdi, %rax |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-NOBMI-NEXT: shrq %cl, %rax |
| ; X64-NOBMI-NEXT: negb %dl |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shlq %cl, %rax |
| ; X64-NOBMI-NEXT: shrq %cl, %rax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_d0: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrq %rax, %rdi, %rax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_d0: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxq %rsi, %rdi, %rax |
| ; X64-BMI2-NEXT: bzhiq %rdx, %rax, %rax |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i64 %val, %numskipbits |
| %numhighbits = sub i64 64, %numlowbits |
| %highbitscleared = shl i64 %shifted, %numhighbits |
| %masked = lshr i64 %highbitscleared, %numhighbits |
| ret i64 %masked |
| } |
| |
| define i64 @bextr64_d1_indexzext(i64 %val, i8 %numskipbits, i8 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_d1_indexzext: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %ebx |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-NOBMI-NEXT: movl %edx, %eax |
| ; X86-NOBMI-NEXT: shrl %cl, %eax |
| ; X86-NOBMI-NEXT: shrdl %cl, %edx, %edi |
| ; X86-NOBMI-NEXT: xorl %esi, %esi |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: je .LBB57_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %eax, %edi |
| ; X86-NOBMI-NEXT: xorl %eax, %eax |
| ; X86-NOBMI-NEXT: .LBB57_2: |
| ; X86-NOBMI-NEXT: movb $64, %cl |
| ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: shldl %cl, %edi, %eax |
| ; X86-NOBMI-NEXT: shll %cl, %edi |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: movl %edi, %ebx |
| ; X86-NOBMI-NEXT: jne .LBB57_4 |
| ; X86-NOBMI-NEXT: # %bb.3: |
| ; X86-NOBMI-NEXT: movl %eax, %ebx |
| ; X86-NOBMI-NEXT: .LBB57_4: |
| ; X86-NOBMI-NEXT: movl %ebx, %eax |
| ; X86-NOBMI-NEXT: shrl %cl, %eax |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: movl $0, %edx |
| ; X86-NOBMI-NEXT: jne .LBB57_6 |
| ; X86-NOBMI-NEXT: # %bb.5: |
| ; X86-NOBMI-NEXT: movl %edi, %esi |
| ; X86-NOBMI-NEXT: movl %eax, %edx |
| ; X86-NOBMI-NEXT: .LBB57_6: |
| ; X86-NOBMI-NEXT: shrdl %cl, %ebx, %esi |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: jne .LBB57_8 |
| ; X86-NOBMI-NEXT: # %bb.7: |
| ; X86-NOBMI-NEXT: movl %esi, %eax |
| ; X86-NOBMI-NEXT: .LBB57_8: |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: popl %ebx |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_d1_indexzext: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %ebx |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI1-NEXT: movl %edx, %eax |
| ; X86-BMI1-NEXT: shrl %cl, %eax |
| ; X86-BMI1-NEXT: shrdl %cl, %edx, %edi |
| ; X86-BMI1-NEXT: xorl %esi, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: je .LBB57_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %eax, %edi |
| ; X86-BMI1-NEXT: xorl %eax, %eax |
| ; X86-BMI1-NEXT: .LBB57_2: |
| ; X86-BMI1-NEXT: movb $64, %cl |
| ; X86-BMI1-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: shldl %cl, %edi, %eax |
| ; X86-BMI1-NEXT: shll %cl, %edi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: movl %edi, %ebx |
| ; X86-BMI1-NEXT: jne .LBB57_4 |
| ; X86-BMI1-NEXT: # %bb.3: |
| ; X86-BMI1-NEXT: movl %eax, %ebx |
| ; X86-BMI1-NEXT: .LBB57_4: |
| ; X86-BMI1-NEXT: movl %ebx, %eax |
| ; X86-BMI1-NEXT: shrl %cl, %eax |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: movl $0, %edx |
| ; X86-BMI1-NEXT: jne .LBB57_6 |
| ; X86-BMI1-NEXT: # %bb.5: |
| ; X86-BMI1-NEXT: movl %edi, %esi |
| ; X86-BMI1-NEXT: movl %eax, %edx |
| ; X86-BMI1-NEXT: .LBB57_6: |
| ; X86-BMI1-NEXT: shrdl %cl, %ebx, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: jne .LBB57_8 |
| ; X86-BMI1-NEXT: # %bb.7: |
| ; X86-BMI1-NEXT: movl %esi, %eax |
| ; X86-BMI1-NEXT: .LBB57_8: |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: popl %ebx |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_d1_indexzext: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %edi |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI2-NEXT: shrdl %cl, %edx, %eax |
| ; X86-BMI2-NEXT: shrxl %ecx, %edx, %esi |
| ; X86-BMI2-NEXT: xorl %edx, %edx |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB57_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: movl %esi, %eax |
| ; X86-BMI2-NEXT: xorl %esi, %esi |
| ; X86-BMI2-NEXT: .LBB57_2: |
| ; X86-BMI2-NEXT: movb $64, %cl |
| ; X86-BMI2-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: shldl %cl, %eax, %esi |
| ; X86-BMI2-NEXT: shlxl %ecx, %eax, %edi |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB57_4 |
| ; X86-BMI2-NEXT: # %bb.3: |
| ; X86-BMI2-NEXT: movl %edi, %esi |
| ; X86-BMI2-NEXT: movl $0, %edi |
| ; X86-BMI2-NEXT: .LBB57_4: |
| ; X86-BMI2-NEXT: shrxl %ecx, %esi, %eax |
| ; X86-BMI2-NEXT: jne .LBB57_6 |
| ; X86-BMI2-NEXT: # %bb.5: |
| ; X86-BMI2-NEXT: movl %eax, %edx |
| ; X86-BMI2-NEXT: .LBB57_6: |
| ; X86-BMI2-NEXT: shrdl %cl, %esi, %edi |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: jne .LBB57_8 |
| ; X86-BMI2-NEXT: # %bb.7: |
| ; X86-BMI2-NEXT: movl %edi, %eax |
| ; X86-BMI2-NEXT: .LBB57_8: |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: popl %edi |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_d1_indexzext: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: movq %rdi, %rax |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NOBMI-NEXT: shrq %cl, %rax |
| ; X64-NOBMI-NEXT: negb %dl |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shlq %cl, %rax |
| ; X64-NOBMI-NEXT: shrq %cl, %rax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_d1_indexzext: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrq %rax, %rdi, %rax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_d1_indexzext: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: # kill: def $edx killed $edx def $rdx |
| ; X64-BMI2-NEXT: # kill: def $esi killed $esi def $rsi |
| ; X64-BMI2-NEXT: shrxq %rsi, %rdi, %rax |
| ; X64-BMI2-NEXT: bzhiq %rdx, %rax, %rax |
| ; X64-BMI2-NEXT: retq |
| %skip = zext i8 %numskipbits to i64 |
| %shifted = lshr i64 %val, %skip |
| %numhighbits = sub i8 64, %numlowbits |
| %sh_prom = zext i8 %numhighbits to i64 |
| %highbitscleared = shl i64 %shifted, %sh_prom |
| %masked = lshr i64 %highbitscleared, %sh_prom |
| ret i64 %masked |
| } |
| |
| define i64 @bextr64_d2_load(i64* %w, i64 %numskipbits, i64 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_d2_load: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %ebx |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl (%eax), %edi |
| ; X86-NOBMI-NEXT: movl 4(%eax), %edx |
| ; X86-NOBMI-NEXT: movl %edx, %eax |
| ; X86-NOBMI-NEXT: shrl %cl, %eax |
| ; X86-NOBMI-NEXT: shrdl %cl, %edx, %edi |
| ; X86-NOBMI-NEXT: xorl %esi, %esi |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: je .LBB58_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %eax, %edi |
| ; X86-NOBMI-NEXT: xorl %eax, %eax |
| ; X86-NOBMI-NEXT: .LBB58_2: |
| ; X86-NOBMI-NEXT: movb $64, %cl |
| ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: shldl %cl, %edi, %eax |
| ; X86-NOBMI-NEXT: shll %cl, %edi |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: movl %edi, %ebx |
| ; X86-NOBMI-NEXT: jne .LBB58_4 |
| ; X86-NOBMI-NEXT: # %bb.3: |
| ; X86-NOBMI-NEXT: movl %eax, %ebx |
| ; X86-NOBMI-NEXT: .LBB58_4: |
| ; X86-NOBMI-NEXT: movl %ebx, %eax |
| ; X86-NOBMI-NEXT: shrl %cl, %eax |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: movl $0, %edx |
| ; X86-NOBMI-NEXT: jne .LBB58_6 |
| ; X86-NOBMI-NEXT: # %bb.5: |
| ; X86-NOBMI-NEXT: movl %edi, %esi |
| ; X86-NOBMI-NEXT: movl %eax, %edx |
| ; X86-NOBMI-NEXT: .LBB58_6: |
| ; X86-NOBMI-NEXT: shrdl %cl, %ebx, %esi |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: jne .LBB58_8 |
| ; X86-NOBMI-NEXT: # %bb.7: |
| ; X86-NOBMI-NEXT: movl %esi, %eax |
| ; X86-NOBMI-NEXT: .LBB58_8: |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: popl %ebx |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_d2_load: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %ebx |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: movl (%eax), %edi |
| ; X86-BMI1-NEXT: movl 4(%eax), %edx |
| ; X86-BMI1-NEXT: movl %edx, %eax |
| ; X86-BMI1-NEXT: shrl %cl, %eax |
| ; X86-BMI1-NEXT: shrdl %cl, %edx, %edi |
| ; X86-BMI1-NEXT: xorl %esi, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: je .LBB58_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %eax, %edi |
| ; X86-BMI1-NEXT: xorl %eax, %eax |
| ; X86-BMI1-NEXT: .LBB58_2: |
| ; X86-BMI1-NEXT: movb $64, %cl |
| ; X86-BMI1-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: shldl %cl, %edi, %eax |
| ; X86-BMI1-NEXT: shll %cl, %edi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: movl %edi, %ebx |
| ; X86-BMI1-NEXT: jne .LBB58_4 |
| ; X86-BMI1-NEXT: # %bb.3: |
| ; X86-BMI1-NEXT: movl %eax, %ebx |
| ; X86-BMI1-NEXT: .LBB58_4: |
| ; X86-BMI1-NEXT: movl %ebx, %eax |
| ; X86-BMI1-NEXT: shrl %cl, %eax |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: movl $0, %edx |
| ; X86-BMI1-NEXT: jne .LBB58_6 |
| ; X86-BMI1-NEXT: # %bb.5: |
| ; X86-BMI1-NEXT: movl %edi, %esi |
| ; X86-BMI1-NEXT: movl %eax, %edx |
| ; X86-BMI1-NEXT: .LBB58_6: |
| ; X86-BMI1-NEXT: shrdl %cl, %ebx, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: jne .LBB58_8 |
| ; X86-BMI1-NEXT: # %bb.7: |
| ; X86-BMI1-NEXT: movl %esi, %eax |
| ; X86-BMI1-NEXT: .LBB58_8: |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: popl %ebx |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_d2_load: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %edi |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI2-NEXT: movl (%edx), %eax |
| ; X86-BMI2-NEXT: movl 4(%edx), %edx |
| ; X86-BMI2-NEXT: shrxl %ecx, %edx, %esi |
| ; X86-BMI2-NEXT: shrdl %cl, %edx, %eax |
| ; X86-BMI2-NEXT: xorl %edx, %edx |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB58_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: movl %esi, %eax |
| ; X86-BMI2-NEXT: xorl %esi, %esi |
| ; X86-BMI2-NEXT: .LBB58_2: |
| ; X86-BMI2-NEXT: movb $64, %cl |
| ; X86-BMI2-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: shldl %cl, %eax, %esi |
| ; X86-BMI2-NEXT: shlxl %ecx, %eax, %edi |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB58_4 |
| ; X86-BMI2-NEXT: # %bb.3: |
| ; X86-BMI2-NEXT: movl %edi, %esi |
| ; X86-BMI2-NEXT: movl $0, %edi |
| ; X86-BMI2-NEXT: .LBB58_4: |
| ; X86-BMI2-NEXT: shrxl %ecx, %esi, %eax |
| ; X86-BMI2-NEXT: jne .LBB58_6 |
| ; X86-BMI2-NEXT: # %bb.5: |
| ; X86-BMI2-NEXT: movl %eax, %edx |
| ; X86-BMI2-NEXT: .LBB58_6: |
| ; X86-BMI2-NEXT: shrdl %cl, %esi, %edi |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: jne .LBB58_8 |
| ; X86-BMI2-NEXT: # %bb.7: |
| ; X86-BMI2-NEXT: movl %edi, %eax |
| ; X86-BMI2-NEXT: .LBB58_8: |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: popl %edi |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_d2_load: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movq %rsi, %rcx |
| ; X64-NOBMI-NEXT: movq (%rdi), %rax |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-NOBMI-NEXT: shrq %cl, %rax |
| ; X64-NOBMI-NEXT: negb %dl |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shlq %cl, %rax |
| ; X64-NOBMI-NEXT: shrq %cl, %rax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_d2_load: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrq %rax, (%rdi), %rax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_d2_load: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxq %rsi, (%rdi), %rax |
| ; X64-BMI2-NEXT: bzhiq %rdx, %rax, %rax |
| ; X64-BMI2-NEXT: retq |
| %val = load i64, i64* %w |
| %shifted = lshr i64 %val, %numskipbits |
| %numhighbits = sub i64 64, %numlowbits |
| %highbitscleared = shl i64 %shifted, %numhighbits |
| %masked = lshr i64 %highbitscleared, %numhighbits |
| ret i64 %masked |
| } |
| |
| define i64 @bextr64_d3_load_indexzext(i64* %w, i8 %numskipbits, i8 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_d3_load_indexzext: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %ebx |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl (%eax), %edi |
| ; X86-NOBMI-NEXT: movl 4(%eax), %edx |
| ; X86-NOBMI-NEXT: movl %edx, %eax |
| ; X86-NOBMI-NEXT: shrl %cl, %eax |
| ; X86-NOBMI-NEXT: shrdl %cl, %edx, %edi |
| ; X86-NOBMI-NEXT: xorl %esi, %esi |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: je .LBB59_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %eax, %edi |
| ; X86-NOBMI-NEXT: xorl %eax, %eax |
| ; X86-NOBMI-NEXT: .LBB59_2: |
| ; X86-NOBMI-NEXT: movb $64, %cl |
| ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: shldl %cl, %edi, %eax |
| ; X86-NOBMI-NEXT: shll %cl, %edi |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: movl %edi, %ebx |
| ; X86-NOBMI-NEXT: jne .LBB59_4 |
| ; X86-NOBMI-NEXT: # %bb.3: |
| ; X86-NOBMI-NEXT: movl %eax, %ebx |
| ; X86-NOBMI-NEXT: .LBB59_4: |
| ; X86-NOBMI-NEXT: movl %ebx, %eax |
| ; X86-NOBMI-NEXT: shrl %cl, %eax |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: movl $0, %edx |
| ; X86-NOBMI-NEXT: jne .LBB59_6 |
| ; X86-NOBMI-NEXT: # %bb.5: |
| ; X86-NOBMI-NEXT: movl %edi, %esi |
| ; X86-NOBMI-NEXT: movl %eax, %edx |
| ; X86-NOBMI-NEXT: .LBB59_6: |
| ; X86-NOBMI-NEXT: shrdl %cl, %ebx, %esi |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: jne .LBB59_8 |
| ; X86-NOBMI-NEXT: # %bb.7: |
| ; X86-NOBMI-NEXT: movl %esi, %eax |
| ; X86-NOBMI-NEXT: .LBB59_8: |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: popl %ebx |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_d3_load_indexzext: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %ebx |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: movl (%eax), %edi |
| ; X86-BMI1-NEXT: movl 4(%eax), %edx |
| ; X86-BMI1-NEXT: movl %edx, %eax |
| ; X86-BMI1-NEXT: shrl %cl, %eax |
| ; X86-BMI1-NEXT: shrdl %cl, %edx, %edi |
| ; X86-BMI1-NEXT: xorl %esi, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: je .LBB59_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %eax, %edi |
| ; X86-BMI1-NEXT: xorl %eax, %eax |
| ; X86-BMI1-NEXT: .LBB59_2: |
| ; X86-BMI1-NEXT: movb $64, %cl |
| ; X86-BMI1-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: shldl %cl, %edi, %eax |
| ; X86-BMI1-NEXT: shll %cl, %edi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: movl %edi, %ebx |
| ; X86-BMI1-NEXT: jne .LBB59_4 |
| ; X86-BMI1-NEXT: # %bb.3: |
| ; X86-BMI1-NEXT: movl %eax, %ebx |
| ; X86-BMI1-NEXT: .LBB59_4: |
| ; X86-BMI1-NEXT: movl %ebx, %eax |
| ; X86-BMI1-NEXT: shrl %cl, %eax |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: movl $0, %edx |
| ; X86-BMI1-NEXT: jne .LBB59_6 |
| ; X86-BMI1-NEXT: # %bb.5: |
| ; X86-BMI1-NEXT: movl %edi, %esi |
| ; X86-BMI1-NEXT: movl %eax, %edx |
| ; X86-BMI1-NEXT: .LBB59_6: |
| ; X86-BMI1-NEXT: shrdl %cl, %ebx, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: jne .LBB59_8 |
| ; X86-BMI1-NEXT: # %bb.7: |
| ; X86-BMI1-NEXT: movl %esi, %eax |
| ; X86-BMI1-NEXT: .LBB59_8: |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: popl %ebx |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_d3_load_indexzext: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %edi |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI2-NEXT: movl (%edx), %eax |
| ; X86-BMI2-NEXT: movl 4(%edx), %edx |
| ; X86-BMI2-NEXT: shrxl %ecx, %edx, %esi |
| ; X86-BMI2-NEXT: shrdl %cl, %edx, %eax |
| ; X86-BMI2-NEXT: xorl %edx, %edx |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB59_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: movl %esi, %eax |
| ; X86-BMI2-NEXT: xorl %esi, %esi |
| ; X86-BMI2-NEXT: .LBB59_2: |
| ; X86-BMI2-NEXT: movb $64, %cl |
| ; X86-BMI2-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: shldl %cl, %eax, %esi |
| ; X86-BMI2-NEXT: shlxl %ecx, %eax, %edi |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB59_4 |
| ; X86-BMI2-NEXT: # %bb.3: |
| ; X86-BMI2-NEXT: movl %edi, %esi |
| ; X86-BMI2-NEXT: movl $0, %edi |
| ; X86-BMI2-NEXT: .LBB59_4: |
| ; X86-BMI2-NEXT: shrxl %ecx, %esi, %eax |
| ; X86-BMI2-NEXT: jne .LBB59_6 |
| ; X86-BMI2-NEXT: # %bb.5: |
| ; X86-BMI2-NEXT: movl %eax, %edx |
| ; X86-BMI2-NEXT: .LBB59_6: |
| ; X86-BMI2-NEXT: shrdl %cl, %esi, %edi |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: jne .LBB59_8 |
| ; X86-BMI2-NEXT: # %bb.7: |
| ; X86-BMI2-NEXT: movl %edi, %eax |
| ; X86-BMI2-NEXT: .LBB59_8: |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: popl %edi |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_d3_load_indexzext: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: movq (%rdi), %rax |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X64-NOBMI-NEXT: shrq %cl, %rax |
| ; X64-NOBMI-NEXT: negb %dl |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shlq %cl, %rax |
| ; X64-NOBMI-NEXT: shrq %cl, %rax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_d3_load_indexzext: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrq %rax, (%rdi), %rax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_d3_load_indexzext: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: # kill: def $edx killed $edx def $rdx |
| ; X64-BMI2-NEXT: # kill: def $esi killed $esi def $rsi |
| ; X64-BMI2-NEXT: shrxq %rsi, (%rdi), %rax |
| ; X64-BMI2-NEXT: bzhiq %rdx, %rax, %rax |
| ; X64-BMI2-NEXT: retq |
| %val = load i64, i64* %w |
| %skip = zext i8 %numskipbits to i64 |
| %shifted = lshr i64 %val, %skip |
| %numhighbits = sub i8 64, %numlowbits |
| %sh_prom = zext i8 %numhighbits to i64 |
| %highbitscleared = shl i64 %shifted, %sh_prom |
| %masked = lshr i64 %highbitscleared, %sh_prom |
| ret i64 %masked |
| } |
| |
| define i64 @bextr64_d5_skipextrauses(i64 %val, i64 %numskipbits, i64 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_d5_skipextrauses: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %ebp |
| ; X86-NOBMI-NEXT: pushl %ebx |
| ; X86-NOBMI-NEXT: pushl %edi |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: subl $12, %esp |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %ebx |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl %edx, %esi |
| ; X86-NOBMI-NEXT: movl %eax, %ecx |
| ; X86-NOBMI-NEXT: shrl %cl, %esi |
| ; X86-NOBMI-NEXT: shrdl %cl, %edx, %ebx |
| ; X86-NOBMI-NEXT: xorl %edx, %edx |
| ; X86-NOBMI-NEXT: testb $32, %al |
| ; X86-NOBMI-NEXT: je .LBB60_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %esi, %ebx |
| ; X86-NOBMI-NEXT: xorl %esi, %esi |
| ; X86-NOBMI-NEXT: .LBB60_2: |
| ; X86-NOBMI-NEXT: movb $64, %cl |
| ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: shldl %cl, %ebx, %esi |
| ; X86-NOBMI-NEXT: shll %cl, %ebx |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: movl %ebx, %ebp |
| ; X86-NOBMI-NEXT: jne .LBB60_4 |
| ; X86-NOBMI-NEXT: # %bb.3: |
| ; X86-NOBMI-NEXT: movl %esi, %ebp |
| ; X86-NOBMI-NEXT: .LBB60_4: |
| ; X86-NOBMI-NEXT: movl %ebp, %esi |
| ; X86-NOBMI-NEXT: shrl %cl, %esi |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: movl $0, %edi |
| ; X86-NOBMI-NEXT: jne .LBB60_6 |
| ; X86-NOBMI-NEXT: # %bb.5: |
| ; X86-NOBMI-NEXT: movl %ebx, %edx |
| ; X86-NOBMI-NEXT: movl %esi, %edi |
| ; X86-NOBMI-NEXT: .LBB60_6: |
| ; X86-NOBMI-NEXT: shrdl %cl, %ebp, %edx |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NOBMI-NEXT: jne .LBB60_8 |
| ; X86-NOBMI-NEXT: # %bb.7: |
| ; X86-NOBMI-NEXT: movl %edx, %esi |
| ; X86-NOBMI-NEXT: .LBB60_8: |
| ; X86-NOBMI-NEXT: subl $8, %esp |
| ; X86-NOBMI-NEXT: pushl %ecx |
| ; X86-NOBMI-NEXT: pushl %eax |
| ; X86-NOBMI-NEXT: calll use64@PLT |
| ; X86-NOBMI-NEXT: addl $16, %esp |
| ; X86-NOBMI-NEXT: movl %esi, %eax |
| ; X86-NOBMI-NEXT: movl %edi, %edx |
| ; X86-NOBMI-NEXT: addl $12, %esp |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: popl %edi |
| ; X86-NOBMI-NEXT: popl %ebx |
| ; X86-NOBMI-NEXT: popl %ebp |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_d5_skipextrauses: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %ebp |
| ; X86-BMI1-NEXT: pushl %ebx |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: subl $12, %esp |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %ebx |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI1-NEXT: movl %edx, %esi |
| ; X86-BMI1-NEXT: movl %eax, %ecx |
| ; X86-BMI1-NEXT: shrl %cl, %esi |
| ; X86-BMI1-NEXT: shrdl %cl, %edx, %ebx |
| ; X86-BMI1-NEXT: xorl %edx, %edx |
| ; X86-BMI1-NEXT: testb $32, %al |
| ; X86-BMI1-NEXT: je .LBB60_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %esi, %ebx |
| ; X86-BMI1-NEXT: xorl %esi, %esi |
| ; X86-BMI1-NEXT: .LBB60_2: |
| ; X86-BMI1-NEXT: movb $64, %cl |
| ; X86-BMI1-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: shldl %cl, %ebx, %esi |
| ; X86-BMI1-NEXT: shll %cl, %ebx |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: movl %ebx, %ebp |
| ; X86-BMI1-NEXT: jne .LBB60_4 |
| ; X86-BMI1-NEXT: # %bb.3: |
| ; X86-BMI1-NEXT: movl %esi, %ebp |
| ; X86-BMI1-NEXT: .LBB60_4: |
| ; X86-BMI1-NEXT: movl %ebp, %esi |
| ; X86-BMI1-NEXT: shrl %cl, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: movl $0, %edi |
| ; X86-BMI1-NEXT: jne .LBB60_6 |
| ; X86-BMI1-NEXT: # %bb.5: |
| ; X86-BMI1-NEXT: movl %ebx, %edx |
| ; X86-BMI1-NEXT: movl %esi, %edi |
| ; X86-BMI1-NEXT: .LBB60_6: |
| ; X86-BMI1-NEXT: shrdl %cl, %ebp, %edx |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-BMI1-NEXT: jne .LBB60_8 |
| ; X86-BMI1-NEXT: # %bb.7: |
| ; X86-BMI1-NEXT: movl %edx, %esi |
| ; X86-BMI1-NEXT: .LBB60_8: |
| ; X86-BMI1-NEXT: subl $8, %esp |
| ; X86-BMI1-NEXT: pushl %ecx |
| ; X86-BMI1-NEXT: pushl %eax |
| ; X86-BMI1-NEXT: calll use64@PLT |
| ; X86-BMI1-NEXT: addl $16, %esp |
| ; X86-BMI1-NEXT: movl %esi, %eax |
| ; X86-BMI1-NEXT: movl %edi, %edx |
| ; X86-BMI1-NEXT: addl $12, %esp |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: popl %ebx |
| ; X86-BMI1-NEXT: popl %ebp |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_d5_skipextrauses: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %ebx |
| ; X86-BMI2-NEXT: pushl %edi |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI2-NEXT: movl %eax, %ecx |
| ; X86-BMI2-NEXT: shrdl %cl, %edx, %edi |
| ; X86-BMI2-NEXT: shrxl %eax, %edx, %edx |
| ; X86-BMI2-NEXT: xorl %esi, %esi |
| ; X86-BMI2-NEXT: testb $32, %al |
| ; X86-BMI2-NEXT: je .LBB60_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: movl %edx, %edi |
| ; X86-BMI2-NEXT: xorl %edx, %edx |
| ; X86-BMI2-NEXT: .LBB60_2: |
| ; X86-BMI2-NEXT: movb $64, %cl |
| ; X86-BMI2-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: shldl %cl, %edi, %edx |
| ; X86-BMI2-NEXT: shlxl %ecx, %edi, %ebx |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB60_4 |
| ; X86-BMI2-NEXT: # %bb.3: |
| ; X86-BMI2-NEXT: movl %ebx, %edx |
| ; X86-BMI2-NEXT: movl $0, %ebx |
| ; X86-BMI2-NEXT: .LBB60_4: |
| ; X86-BMI2-NEXT: shrxl %ecx, %edx, %edi |
| ; X86-BMI2-NEXT: jne .LBB60_6 |
| ; X86-BMI2-NEXT: # %bb.5: |
| ; X86-BMI2-NEXT: movl %edi, %esi |
| ; X86-BMI2-NEXT: .LBB60_6: |
| ; X86-BMI2-NEXT: shrdl %cl, %edx, %ebx |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-BMI2-NEXT: jne .LBB60_8 |
| ; X86-BMI2-NEXT: # %bb.7: |
| ; X86-BMI2-NEXT: movl %ebx, %edi |
| ; X86-BMI2-NEXT: .LBB60_8: |
| ; X86-BMI2-NEXT: subl $8, %esp |
| ; X86-BMI2-NEXT: pushl %ecx |
| ; X86-BMI2-NEXT: pushl %eax |
| ; X86-BMI2-NEXT: calll use64@PLT |
| ; X86-BMI2-NEXT: addl $16, %esp |
| ; X86-BMI2-NEXT: movl %edi, %eax |
| ; X86-BMI2-NEXT: movl %esi, %edx |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: popl %edi |
| ; X86-BMI2-NEXT: popl %ebx |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_d5_skipextrauses: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: pushq %rbx |
| ; X64-NOBMI-NEXT: movq %rdi, %rbx |
| ; X64-NOBMI-NEXT: movl %esi, %ecx |
| ; X64-NOBMI-NEXT: shrq %cl, %rbx |
| ; X64-NOBMI-NEXT: negb %dl |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shlq %cl, %rbx |
| ; X64-NOBMI-NEXT: shrq %cl, %rbx |
| ; X64-NOBMI-NEXT: movq %rsi, %rdi |
| ; X64-NOBMI-NEXT: callq use64@PLT |
| ; X64-NOBMI-NEXT: movq %rbx, %rax |
| ; X64-NOBMI-NEXT: popq %rbx |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_d5_skipextrauses: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: pushq %rbx |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrq %rax, %rdi, %rbx |
| ; X64-BMI1-NEXT: movq %rsi, %rdi |
| ; X64-BMI1-NEXT: callq use64@PLT |
| ; X64-BMI1-NEXT: movq %rbx, %rax |
| ; X64-BMI1-NEXT: popq %rbx |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_d5_skipextrauses: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: pushq %rbx |
| ; X64-BMI2-NEXT: shrxq %rsi, %rdi, %rax |
| ; X64-BMI2-NEXT: bzhiq %rdx, %rax, %rbx |
| ; X64-BMI2-NEXT: movq %rsi, %rdi |
| ; X64-BMI2-NEXT: callq use64@PLT |
| ; X64-BMI2-NEXT: movq %rbx, %rax |
| ; X64-BMI2-NEXT: popq %rbx |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i64 %val, %numskipbits |
| %numhighbits = sub i64 64, %numlowbits |
| %highbitscleared = shl i64 %shifted, %numhighbits |
| %masked = lshr i64 %highbitscleared, %numhighbits |
| call void @use64(i64 %numskipbits) |
| ret i64 %masked |
| } |
| |
| ; 64-bit, but with 32-bit output |
| |
| ; Everything done in 64-bit, truncation happens last. |
| define i32 @bextr64_32_d0(i64 %val, i64 %numskipbits, i64 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_32_d0: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NOBMI-NEXT: movl %esi, %eax |
| ; X86-NOBMI-NEXT: shrl %cl, %eax |
| ; X86-NOBMI-NEXT: shrdl %cl, %esi, %edx |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: je .LBB61_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %eax, %edx |
| ; X86-NOBMI-NEXT: xorl %eax, %eax |
| ; X86-NOBMI-NEXT: .LBB61_2: |
| ; X86-NOBMI-NEXT: movb $64, %cl |
| ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: shldl %cl, %edx, %eax |
| ; X86-NOBMI-NEXT: shll %cl, %edx |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: je .LBB61_4 |
| ; X86-NOBMI-NEXT: # %bb.3: |
| ; X86-NOBMI-NEXT: movl %edx, %eax |
| ; X86-NOBMI-NEXT: xorl %edx, %edx |
| ; X86-NOBMI-NEXT: .LBB61_4: |
| ; X86-NOBMI-NEXT: shrdl %cl, %eax, %edx |
| ; X86-NOBMI-NEXT: shrl %cl, %eax |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: jne .LBB61_6 |
| ; X86-NOBMI-NEXT: # %bb.5: |
| ; X86-NOBMI-NEXT: movl %edx, %eax |
| ; X86-NOBMI-NEXT: .LBB61_6: |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_32_d0: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI1-NEXT: movl %esi, %eax |
| ; X86-BMI1-NEXT: shrl %cl, %eax |
| ; X86-BMI1-NEXT: shrdl %cl, %esi, %edx |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: je .LBB61_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %eax, %edx |
| ; X86-BMI1-NEXT: xorl %eax, %eax |
| ; X86-BMI1-NEXT: .LBB61_2: |
| ; X86-BMI1-NEXT: movb $64, %cl |
| ; X86-BMI1-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: shldl %cl, %edx, %eax |
| ; X86-BMI1-NEXT: shll %cl, %edx |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: je .LBB61_4 |
| ; X86-BMI1-NEXT: # %bb.3: |
| ; X86-BMI1-NEXT: movl %edx, %eax |
| ; X86-BMI1-NEXT: xorl %edx, %edx |
| ; X86-BMI1-NEXT: .LBB61_4: |
| ; X86-BMI1-NEXT: shrdl %cl, %eax, %edx |
| ; X86-BMI1-NEXT: shrl %cl, %eax |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: jne .LBB61_6 |
| ; X86-BMI1-NEXT: # %bb.5: |
| ; X86-BMI1-NEXT: movl %edx, %eax |
| ; X86-BMI1-NEXT: .LBB61_6: |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_32_d0: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI2-NEXT: shrdl %cl, %edx, %eax |
| ; X86-BMI2-NEXT: shrxl %ecx, %edx, %edx |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB61_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: movl %edx, %eax |
| ; X86-BMI2-NEXT: xorl %edx, %edx |
| ; X86-BMI2-NEXT: .LBB61_2: |
| ; X86-BMI2-NEXT: movb $64, %cl |
| ; X86-BMI2-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: shldl %cl, %eax, %edx |
| ; X86-BMI2-NEXT: shlxl %ecx, %eax, %eax |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB61_4 |
| ; X86-BMI2-NEXT: # %bb.3: |
| ; X86-BMI2-NEXT: movl %eax, %edx |
| ; X86-BMI2-NEXT: xorl %eax, %eax |
| ; X86-BMI2-NEXT: .LBB61_4: |
| ; X86-BMI2-NEXT: shrdl %cl, %edx, %eax |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB61_6 |
| ; X86-BMI2-NEXT: # %bb.5: |
| ; X86-BMI2-NEXT: shrxl %ecx, %edx, %eax |
| ; X86-BMI2-NEXT: .LBB61_6: |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_32_d0: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movq %rsi, %rcx |
| ; X64-NOBMI-NEXT: movq %rdi, %rax |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-NOBMI-NEXT: shrq %cl, %rax |
| ; X64-NOBMI-NEXT: negb %dl |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shlq %cl, %rax |
| ; X64-NOBMI-NEXT: shrq %cl, %rax |
| ; X64-NOBMI-NEXT: # kill: def $eax killed $eax killed $rax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_32_d0: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrq %rax, %rdi, %rax |
| ; X64-BMI1-NEXT: # kill: def $eax killed $eax killed $rax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_32_d0: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxq %rsi, %rdi, %rax |
| ; X64-BMI2-NEXT: bzhiq %rdx, %rax, %rax |
| ; X64-BMI2-NEXT: # kill: def $eax killed $eax killed $rax |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i64 %val, %numskipbits |
| %numhighbits = sub i64 64, %numlowbits |
| %highbitscleared = shl i64 %shifted, %numhighbits |
| %masked = lshr i64 %highbitscleared, %numhighbits |
| %res = trunc i64 %masked to i32 |
| ret i32 %res |
| } |
| |
| ; Shifting happens in 64-bit, then truncation. Masking is 32-bit. |
| define i32 @bextr64_32_d1(i64 %val, i64 %numskipbits, i32 %numlowbits) nounwind { |
| ; X86-NOBMI-LABEL: bextr64_32_d1: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: pushl %esi |
| ; X86-NOBMI-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NOBMI-NEXT: movl %esi, %eax |
| ; X86-NOBMI-NEXT: shrl %cl, %eax |
| ; X86-NOBMI-NEXT: shrdl %cl, %esi, %edx |
| ; X86-NOBMI-NEXT: testb $32, %cl |
| ; X86-NOBMI-NEXT: jne .LBB62_2 |
| ; X86-NOBMI-NEXT: # %bb.1: |
| ; X86-NOBMI-NEXT: movl %edx, %eax |
| ; X86-NOBMI-NEXT: .LBB62_2: |
| ; X86-NOBMI-NEXT: xorl %ecx, %ecx |
| ; X86-NOBMI-NEXT: subb {{[0-9]+}}(%esp), %cl |
| ; X86-NOBMI-NEXT: shll %cl, %eax |
| ; X86-NOBMI-NEXT: # kill: def $cl killed $cl killed $ecx |
| ; X86-NOBMI-NEXT: shrl %cl, %eax |
| ; X86-NOBMI-NEXT: popl %esi |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMI1-LABEL: bextr64_32_d1: |
| ; X86-BMI1: # %bb.0: |
| ; X86-BMI1-NEXT: pushl %edi |
| ; X86-BMI1-NEXT: pushl %esi |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI1-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI1-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-BMI1-NEXT: movl %edi, %edx |
| ; X86-BMI1-NEXT: shrl %cl, %edx |
| ; X86-BMI1-NEXT: shrdl %cl, %edi, %esi |
| ; X86-BMI1-NEXT: testb $32, %cl |
| ; X86-BMI1-NEXT: jne .LBB62_2 |
| ; X86-BMI1-NEXT: # %bb.1: |
| ; X86-BMI1-NEXT: movl %esi, %edx |
| ; X86-BMI1-NEXT: .LBB62_2: |
| ; X86-BMI1-NEXT: shll $8, %eax |
| ; X86-BMI1-NEXT: bextrl %eax, %edx, %eax |
| ; X86-BMI1-NEXT: popl %esi |
| ; X86-BMI1-NEXT: popl %edi |
| ; X86-BMI1-NEXT: retl |
| ; |
| ; X86-BMI2-LABEL: bextr64_32_d1: |
| ; X86-BMI2: # %bb.0: |
| ; X86-BMI2-NEXT: pushl %esi |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %al |
| ; X86-BMI2-NEXT: movb {{[0-9]+}}(%esp), %cl |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-BMI2-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-BMI2-NEXT: shrdl %cl, %esi, %edx |
| ; X86-BMI2-NEXT: testb $32, %cl |
| ; X86-BMI2-NEXT: je .LBB62_2 |
| ; X86-BMI2-NEXT: # %bb.1: |
| ; X86-BMI2-NEXT: shrxl %ecx, %esi, %edx |
| ; X86-BMI2-NEXT: .LBB62_2: |
| ; X86-BMI2-NEXT: bzhil %eax, %edx, %eax |
| ; X86-BMI2-NEXT: popl %esi |
| ; X86-BMI2-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: bextr64_32_d1: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movq %rsi, %rcx |
| ; X64-NOBMI-NEXT: movq %rdi, %rax |
| ; X64-NOBMI-NEXT: # kill: def $cl killed $cl killed $rcx |
| ; X64-NOBMI-NEXT: shrq %cl, %rax |
| ; X64-NOBMI-NEXT: negb %dl |
| ; X64-NOBMI-NEXT: movl %edx, %ecx |
| ; X64-NOBMI-NEXT: shll %cl, %eax |
| ; X64-NOBMI-NEXT: shrl %cl, %eax |
| ; X64-NOBMI-NEXT: # kill: def $eax killed $eax killed $rax |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMI1-LABEL: bextr64_32_d1: |
| ; X64-BMI1: # %bb.0: |
| ; X64-BMI1-NEXT: shll $8, %edx |
| ; X64-BMI1-NEXT: movzbl %sil, %eax |
| ; X64-BMI1-NEXT: orl %edx, %eax |
| ; X64-BMI1-NEXT: bextrq %rax, %rdi, %rax |
| ; X64-BMI1-NEXT: # kill: def $eax killed $eax killed $rax |
| ; X64-BMI1-NEXT: retq |
| ; |
| ; X64-BMI2-LABEL: bextr64_32_d1: |
| ; X64-BMI2: # %bb.0: |
| ; X64-BMI2-NEXT: shrxq %rsi, %rdi, %rax |
| ; X64-BMI2-NEXT: bzhil %edx, %eax, %eax |
| ; X64-BMI2-NEXT: retq |
| %shifted = lshr i64 %val, %numskipbits |
| %truncshifted = trunc i64 %shifted to i32 |
| %numhighbits = sub i32 32, %numlowbits |
| %highbitscleared = shl i32 %truncshifted, %numhighbits |
| %masked = lshr i32 %highbitscleared, %numhighbits |
| ret i32 %masked |
| } |
| |
| ; ---------------------------------------------------------------------------- ; |
| ; Constant |
| ; ---------------------------------------------------------------------------- ; |
| |
| ; https://bugs.llvm.org/show_bug.cgi?id=38938 |
| define void @pr38938(i32* %a0, i64* %a1) nounwind { |
| ; X86-NOBMI-LABEL: pr38938: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NOBMI-NEXT: movl (%ecx), %ecx |
| ; X86-NOBMI-NEXT: shrl $19, %ecx |
| ; X86-NOBMI-NEXT: andl $4092, %ecx # imm = 0xFFC |
| ; X86-NOBMI-NEXT: incl (%eax,%ecx) |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMINOTBM-LABEL: pr38938: |
| ; X86-BMINOTBM: # %bb.0: |
| ; X86-BMINOTBM-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMINOTBM-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-BMINOTBM-NEXT: movl $2581, %edx # imm = 0xA15 |
| ; X86-BMINOTBM-NEXT: bextrl %edx, (%ecx), %ecx |
| ; X86-BMINOTBM-NEXT: incl (%eax,%ecx,4) |
| ; X86-BMINOTBM-NEXT: retl |
| ; |
| ; X86-BMITBM-LABEL: pr38938: |
| ; X86-BMITBM: # %bb.0: |
| ; X86-BMITBM-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMITBM-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-BMITBM-NEXT: bextrl $2581, (%ecx), %ecx # imm = 0xA15 |
| ; X86-BMITBM-NEXT: incl (%eax,%ecx,4) |
| ; X86-BMITBM-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: pr38938: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movq (%rsi), %rax |
| ; X64-NOBMI-NEXT: shrq $19, %rax |
| ; X64-NOBMI-NEXT: andl $4092, %eax # imm = 0xFFC |
| ; X64-NOBMI-NEXT: incl (%rdi,%rax) |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMINOTBM-LABEL: pr38938: |
| ; X64-BMINOTBM: # %bb.0: |
| ; X64-BMINOTBM-NEXT: movl $2581, %eax # imm = 0xA15 |
| ; X64-BMINOTBM-NEXT: bextrq %rax, (%rsi), %rax |
| ; X64-BMINOTBM-NEXT: incl (%rdi,%rax,4) |
| ; X64-BMINOTBM-NEXT: retq |
| ; |
| ; X64-BMITBM-LABEL: pr38938: |
| ; X64-BMITBM: # %bb.0: |
| ; X64-BMITBM-NEXT: bextrq $2581, (%rsi), %rax # imm = 0xA15 |
| ; X64-BMITBM-NEXT: incl (%rdi,%rax,4) |
| ; X64-BMITBM-NEXT: retq |
| %tmp = load i64, i64* %a1, align 8 |
| %tmp1 = lshr i64 %tmp, 21 |
| %tmp2 = and i64 %tmp1, 1023 |
| %tmp3 = getelementptr inbounds i32, i32* %a0, i64 %tmp2 |
| %tmp4 = load i32, i32* %tmp3, align 4 |
| %tmp5 = add nsw i32 %tmp4, 1 |
| store i32 %tmp5, i32* %tmp3, align 4 |
| ret void |
| } |
| |
| ; The most canonical variant |
| define i32 @c0_i32(i32 %arg) nounwind { |
| ; X86-NOBMI-LABEL: c0_i32: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: shrl $19, %eax |
| ; X86-NOBMI-NEXT: andl $1023, %eax # imm = 0x3FF |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMINOTBM-LABEL: c0_i32: |
| ; X86-BMINOTBM: # %bb.0: |
| ; X86-BMINOTBM-NEXT: movl $2579, %eax # imm = 0xA13 |
| ; X86-BMINOTBM-NEXT: bextrl %eax, {{[0-9]+}}(%esp), %eax |
| ; X86-BMINOTBM-NEXT: retl |
| ; |
| ; X86-BMITBM-LABEL: c0_i32: |
| ; X86-BMITBM: # %bb.0: |
| ; X86-BMITBM-NEXT: bextrl $2579, {{[0-9]+}}(%esp), %eax # imm = 0xA13 |
| ; X86-BMITBM-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: c0_i32: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movl %edi, %eax |
| ; X64-NOBMI-NEXT: shrl $19, %eax |
| ; X64-NOBMI-NEXT: andl $1023, %eax # imm = 0x3FF |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMINOTBM-LABEL: c0_i32: |
| ; X64-BMINOTBM: # %bb.0: |
| ; X64-BMINOTBM-NEXT: movl $2579, %eax # imm = 0xA13 |
| ; X64-BMINOTBM-NEXT: bextrl %eax, %edi, %eax |
| ; X64-BMINOTBM-NEXT: retq |
| ; |
| ; X64-BMITBM-LABEL: c0_i32: |
| ; X64-BMITBM: # %bb.0: |
| ; X64-BMITBM-NEXT: bextrl $2579, %edi, %eax # imm = 0xA13 |
| ; X64-BMITBM-NEXT: retq |
| %tmp0 = lshr i32 %arg, 19 |
| %tmp1 = and i32 %tmp0, 1023 |
| ret i32 %tmp1 |
| } |
| |
| ; Should be still fine, but the mask is shifted |
| define i32 @c1_i32(i32 %arg) nounwind { |
| ; X86-LABEL: c1_i32: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: shrl $19, %eax |
| ; X86-NEXT: andl $4092, %eax # imm = 0xFFC |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: c1_i32: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movl %edi, %eax |
| ; X64-NEXT: shrl $19, %eax |
| ; X64-NEXT: andl $4092, %eax # imm = 0xFFC |
| ; X64-NEXT: retq |
| %tmp0 = lshr i32 %arg, 19 |
| %tmp1 = and i32 %tmp0, 4092 |
| ret i32 %tmp1 |
| } |
| |
| ; Should be still fine, but the result is shifted left afterwards |
| define i32 @c2_i32(i32 %arg) nounwind { |
| ; X86-LABEL: c2_i32: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: shrl $17, %eax |
| ; X86-NEXT: andl $4092, %eax # imm = 0xFFC |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: c2_i32: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movl %edi, %eax |
| ; X64-NEXT: shrl $17, %eax |
| ; X64-NEXT: andl $4092, %eax # imm = 0xFFC |
| ; X64-NEXT: retq |
| %tmp0 = lshr i32 %arg, 19 |
| %tmp1 = and i32 %tmp0, 1023 |
| %tmp2 = shl i32 %tmp1, 2 |
| ret i32 %tmp2 |
| } |
| |
| ; The mask covers newly shifted-in bit |
| define i32 @c4_i32_bad(i32 %arg) nounwind { |
| ; X86-LABEL: c4_i32_bad: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: shrl $19, %eax |
| ; X86-NEXT: andl $-2, %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: c4_i32_bad: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movl %edi, %eax |
| ; X64-NEXT: shrl $19, %eax |
| ; X64-NEXT: andl $-2, %eax |
| ; X64-NEXT: retq |
| %tmp0 = lshr i32 %arg, 19 |
| %tmp1 = and i32 %tmp0, 16382 |
| ret i32 %tmp1 |
| } |
| |
| ; i64 |
| |
| ; The most canonical variant |
| define i64 @c0_i64(i64 %arg) nounwind { |
| ; X86-NOBMI-LABEL: c0_i64: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: shrl $19, %eax |
| ; X86-NOBMI-NEXT: andl $1023, %eax # imm = 0x3FF |
| ; X86-NOBMI-NEXT: xorl %edx, %edx |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMINOTBM-LABEL: c0_i64: |
| ; X86-BMINOTBM: # %bb.0: |
| ; X86-BMINOTBM-NEXT: movl $2579, %eax # imm = 0xA13 |
| ; X86-BMINOTBM-NEXT: bextrl %eax, {{[0-9]+}}(%esp), %eax |
| ; X86-BMINOTBM-NEXT: xorl %edx, %edx |
| ; X86-BMINOTBM-NEXT: retl |
| ; |
| ; X86-BMITBM-LABEL: c0_i64: |
| ; X86-BMITBM: # %bb.0: |
| ; X86-BMITBM-NEXT: bextrl $2579, {{[0-9]+}}(%esp), %eax # imm = 0xA13 |
| ; X86-BMITBM-NEXT: xorl %edx, %edx |
| ; X86-BMITBM-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: c0_i64: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: movq %rdi, %rax |
| ; X64-NOBMI-NEXT: shrq $51, %rax |
| ; X64-NOBMI-NEXT: andl $1023, %eax # imm = 0x3FF |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMINOTBM-LABEL: c0_i64: |
| ; X64-BMINOTBM: # %bb.0: |
| ; X64-BMINOTBM-NEXT: movl $2611, %eax # imm = 0xA33 |
| ; X64-BMINOTBM-NEXT: bextrq %rax, %rdi, %rax |
| ; X64-BMINOTBM-NEXT: retq |
| ; |
| ; X64-BMITBM-LABEL: c0_i64: |
| ; X64-BMITBM: # %bb.0: |
| ; X64-BMITBM-NEXT: bextrq $2611, %rdi, %rax # imm = 0xA33 |
| ; X64-BMITBM-NEXT: retq |
| %tmp0 = lshr i64 %arg, 51 |
| %tmp1 = and i64 %tmp0, 1023 |
| ret i64 %tmp1 |
| } |
| |
| ; Should be still fine, but the mask is shifted |
| define i64 @c1_i64(i64 %arg) nounwind { |
| ; X86-LABEL: c1_i64: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: shrl $19, %eax |
| ; X86-NEXT: andl $4092, %eax # imm = 0xFFC |
| ; X86-NEXT: xorl %edx, %edx |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: c1_i64: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movq %rdi, %rax |
| ; X64-NEXT: shrq $51, %rax |
| ; X64-NEXT: andl $4092, %eax # imm = 0xFFC |
| ; X64-NEXT: retq |
| %tmp0 = lshr i64 %arg, 51 |
| %tmp1 = and i64 %tmp0, 4092 |
| ret i64 %tmp1 |
| } |
| |
| ; Should be still fine, but the result is shifted left afterwards |
| define i64 @c2_i64(i64 %arg) nounwind { |
| ; X86-LABEL: c2_i64: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: shrl $17, %eax |
| ; X86-NEXT: andl $4092, %eax # imm = 0xFFC |
| ; X86-NEXT: xorl %edx, %edx |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: c2_i64: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movq %rdi, %rax |
| ; X64-NEXT: shrq $49, %rax |
| ; X64-NEXT: andl $4092, %eax # imm = 0xFFC |
| ; X64-NEXT: retq |
| %tmp0 = lshr i64 %arg, 51 |
| %tmp1 = and i64 %tmp0, 1023 |
| %tmp2 = shl i64 %tmp1, 2 |
| ret i64 %tmp2 |
| } |
| |
| ; The mask covers newly shifted-in bit |
| define i64 @c4_i64_bad(i64 %arg) nounwind { |
| ; X86-LABEL: c4_i64_bad: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: shrl $19, %eax |
| ; X86-NEXT: andl $-2, %eax |
| ; X86-NEXT: xorl %edx, %edx |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: c4_i64_bad: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movq %rdi, %rax |
| ; X64-NEXT: shrq $51, %rax |
| ; X64-NEXT: andl $-2, %eax |
| ; X64-NEXT: retq |
| %tmp0 = lshr i64 %arg, 51 |
| %tmp1 = and i64 %tmp0, 16382 |
| ret i64 %tmp1 |
| } |
| |
| ; ---------------------------------------------------------------------------- ; |
| ; Constant, storing the result afterwards. |
| ; ---------------------------------------------------------------------------- ; |
| |
| ; i32 |
| |
| ; The most canonical variant |
| define void @c5_i32(i32 %arg, i32* %ptr) nounwind { |
| ; X86-NOBMI-LABEL: c5_i32: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NOBMI-NEXT: shrl $19, %ecx |
| ; X86-NOBMI-NEXT: andl $1023, %ecx # imm = 0x3FF |
| ; X86-NOBMI-NEXT: movl %ecx, (%eax) |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMINOTBM-LABEL: c5_i32: |
| ; X86-BMINOTBM: # %bb.0: |
| ; X86-BMINOTBM-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMINOTBM-NEXT: movl $2579, %ecx # imm = 0xA13 |
| ; X86-BMINOTBM-NEXT: bextrl %ecx, {{[0-9]+}}(%esp), %ecx |
| ; X86-BMINOTBM-NEXT: movl %ecx, (%eax) |
| ; X86-BMINOTBM-NEXT: retl |
| ; |
| ; X86-BMITBM-LABEL: c5_i32: |
| ; X86-BMITBM: # %bb.0: |
| ; X86-BMITBM-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMITBM-NEXT: bextrl $2579, {{[0-9]+}}(%esp), %ecx # imm = 0xA13 |
| ; X86-BMITBM-NEXT: movl %ecx, (%eax) |
| ; X86-BMITBM-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: c5_i32: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: shrl $19, %edi |
| ; X64-NOBMI-NEXT: andl $1023, %edi # imm = 0x3FF |
| ; X64-NOBMI-NEXT: movl %edi, (%rsi) |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMINOTBM-LABEL: c5_i32: |
| ; X64-BMINOTBM: # %bb.0: |
| ; X64-BMINOTBM-NEXT: movl $2579, %eax # imm = 0xA13 |
| ; X64-BMINOTBM-NEXT: bextrl %eax, %edi, %eax |
| ; X64-BMINOTBM-NEXT: movl %eax, (%rsi) |
| ; X64-BMINOTBM-NEXT: retq |
| ; |
| ; X64-BMITBM-LABEL: c5_i32: |
| ; X64-BMITBM: # %bb.0: |
| ; X64-BMITBM-NEXT: bextrl $2579, %edi, %eax # imm = 0xA13 |
| ; X64-BMITBM-NEXT: movl %eax, (%rsi) |
| ; X64-BMITBM-NEXT: retq |
| %tmp0 = lshr i32 %arg, 19 |
| %tmp1 = and i32 %tmp0, 1023 |
| store i32 %tmp1, i32* %ptr |
| ret void |
| } |
| |
| ; Should be still fine, but the mask is shifted |
| define void @c6_i32(i32 %arg, i32* %ptr) nounwind { |
| ; X86-NOBMI-LABEL: c6_i32: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NOBMI-NEXT: shrl $19, %ecx |
| ; X86-NOBMI-NEXT: andl $4095, %ecx # imm = 0xFFF |
| ; X86-NOBMI-NEXT: movl %ecx, (%eax) |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMINOTBM-LABEL: c6_i32: |
| ; X86-BMINOTBM: # %bb.0: |
| ; X86-BMINOTBM-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMINOTBM-NEXT: movl $3091, %ecx # imm = 0xC13 |
| ; X86-BMINOTBM-NEXT: bextrl %ecx, {{[0-9]+}}(%esp), %ecx |
| ; X86-BMINOTBM-NEXT: movl %ecx, (%eax) |
| ; X86-BMINOTBM-NEXT: retl |
| ; |
| ; X86-BMITBM-LABEL: c6_i32: |
| ; X86-BMITBM: # %bb.0: |
| ; X86-BMITBM-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMITBM-NEXT: bextrl $3091, {{[0-9]+}}(%esp), %ecx # imm = 0xC13 |
| ; X86-BMITBM-NEXT: movl %ecx, (%eax) |
| ; X86-BMITBM-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: c6_i32: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: shrl $19, %edi |
| ; X64-NOBMI-NEXT: andl $4095, %edi # imm = 0xFFF |
| ; X64-NOBMI-NEXT: movl %edi, (%rsi) |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMINOTBM-LABEL: c6_i32: |
| ; X64-BMINOTBM: # %bb.0: |
| ; X64-BMINOTBM-NEXT: movl $3091, %eax # imm = 0xC13 |
| ; X64-BMINOTBM-NEXT: bextrl %eax, %edi, %eax |
| ; X64-BMINOTBM-NEXT: movl %eax, (%rsi) |
| ; X64-BMINOTBM-NEXT: retq |
| ; |
| ; X64-BMITBM-LABEL: c6_i32: |
| ; X64-BMITBM: # %bb.0: |
| ; X64-BMITBM-NEXT: bextrl $3091, %edi, %eax # imm = 0xC13 |
| ; X64-BMITBM-NEXT: movl %eax, (%rsi) |
| ; X64-BMITBM-NEXT: retq |
| %tmp0 = lshr i32 %arg, 19 |
| %tmp1 = and i32 %tmp0, 4095 |
| store i32 %tmp1, i32* %ptr |
| ret void |
| } |
| |
| ; Should be still fine, but the result is shifted left afterwards |
| define void @c7_i32(i32 %arg, i32* %ptr) nounwind { |
| ; X86-LABEL: c7_i32: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: shrl $17, %ecx |
| ; X86-NEXT: andl $4092, %ecx # imm = 0xFFC |
| ; X86-NEXT: movl %ecx, (%eax) |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: c7_i32: |
| ; X64: # %bb.0: |
| ; X64-NEXT: shrl $17, %edi |
| ; X64-NEXT: andl $4092, %edi # imm = 0xFFC |
| ; X64-NEXT: movl %edi, (%rsi) |
| ; X64-NEXT: retq |
| %tmp0 = lshr i32 %arg, 19 |
| %tmp1 = and i32 %tmp0, 1023 |
| %tmp2 = shl i32 %tmp1, 2 |
| store i32 %tmp2, i32* %ptr |
| ret void |
| } |
| |
| ; i64 |
| |
| ; The most canonical variant |
| define void @c5_i64(i64 %arg, i64* %ptr) nounwind { |
| ; X86-NOBMI-LABEL: c5_i64: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NOBMI-NEXT: shrl $19, %ecx |
| ; X86-NOBMI-NEXT: andl $1023, %ecx # imm = 0x3FF |
| ; X86-NOBMI-NEXT: movl %ecx, (%eax) |
| ; X86-NOBMI-NEXT: movl $0, 4(%eax) |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMINOTBM-LABEL: c5_i64: |
| ; X86-BMINOTBM: # %bb.0: |
| ; X86-BMINOTBM-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMINOTBM-NEXT: movl $2579, %ecx # imm = 0xA13 |
| ; X86-BMINOTBM-NEXT: bextrl %ecx, {{[0-9]+}}(%esp), %ecx |
| ; X86-BMINOTBM-NEXT: movl %ecx, (%eax) |
| ; X86-BMINOTBM-NEXT: movl $0, 4(%eax) |
| ; X86-BMINOTBM-NEXT: retl |
| ; |
| ; X86-BMITBM-LABEL: c5_i64: |
| ; X86-BMITBM: # %bb.0: |
| ; X86-BMITBM-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMITBM-NEXT: bextrl $2579, {{[0-9]+}}(%esp), %ecx # imm = 0xA13 |
| ; X86-BMITBM-NEXT: movl %ecx, (%eax) |
| ; X86-BMITBM-NEXT: movl $0, 4(%eax) |
| ; X86-BMITBM-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: c5_i64: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: shrq $51, %rdi |
| ; X64-NOBMI-NEXT: andl $1023, %edi # imm = 0x3FF |
| ; X64-NOBMI-NEXT: movq %rdi, (%rsi) |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMINOTBM-LABEL: c5_i64: |
| ; X64-BMINOTBM: # %bb.0: |
| ; X64-BMINOTBM-NEXT: movl $2611, %eax # imm = 0xA33 |
| ; X64-BMINOTBM-NEXT: bextrq %rax, %rdi, %rax |
| ; X64-BMINOTBM-NEXT: movq %rax, (%rsi) |
| ; X64-BMINOTBM-NEXT: retq |
| ; |
| ; X64-BMITBM-LABEL: c5_i64: |
| ; X64-BMITBM: # %bb.0: |
| ; X64-BMITBM-NEXT: bextrq $2611, %rdi, %rax # imm = 0xA33 |
| ; X64-BMITBM-NEXT: movq %rax, (%rsi) |
| ; X64-BMITBM-NEXT: retq |
| %tmp0 = lshr i64 %arg, 51 |
| %tmp1 = and i64 %tmp0, 1023 |
| store i64 %tmp1, i64* %ptr |
| ret void |
| } |
| |
| ; Should be still fine, but the mask is shifted |
| define void @c6_i64(i64 %arg, i64* %ptr) nounwind { |
| ; X86-NOBMI-LABEL: c6_i64: |
| ; X86-NOBMI: # %bb.0: |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NOBMI-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NOBMI-NEXT: shrl $19, %ecx |
| ; X86-NOBMI-NEXT: andl $4095, %ecx # imm = 0xFFF |
| ; X86-NOBMI-NEXT: movl %ecx, (%eax) |
| ; X86-NOBMI-NEXT: movl $0, 4(%eax) |
| ; X86-NOBMI-NEXT: retl |
| ; |
| ; X86-BMINOTBM-LABEL: c6_i64: |
| ; X86-BMINOTBM: # %bb.0: |
| ; X86-BMINOTBM-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMINOTBM-NEXT: movl $3091, %ecx # imm = 0xC13 |
| ; X86-BMINOTBM-NEXT: bextrl %ecx, {{[0-9]+}}(%esp), %ecx |
| ; X86-BMINOTBM-NEXT: movl %ecx, (%eax) |
| ; X86-BMINOTBM-NEXT: movl $0, 4(%eax) |
| ; X86-BMINOTBM-NEXT: retl |
| ; |
| ; X86-BMITBM-LABEL: c6_i64: |
| ; X86-BMITBM: # %bb.0: |
| ; X86-BMITBM-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-BMITBM-NEXT: bextrl $3091, {{[0-9]+}}(%esp), %ecx # imm = 0xC13 |
| ; X86-BMITBM-NEXT: movl %ecx, (%eax) |
| ; X86-BMITBM-NEXT: movl $0, 4(%eax) |
| ; X86-BMITBM-NEXT: retl |
| ; |
| ; X64-NOBMI-LABEL: c6_i64: |
| ; X64-NOBMI: # %bb.0: |
| ; X64-NOBMI-NEXT: shrq $51, %rdi |
| ; X64-NOBMI-NEXT: andl $4095, %edi # imm = 0xFFF |
| ; X64-NOBMI-NEXT: movq %rdi, (%rsi) |
| ; X64-NOBMI-NEXT: retq |
| ; |
| ; X64-BMINOTBM-LABEL: c6_i64: |
| ; X64-BMINOTBM: # %bb.0: |
| ; X64-BMINOTBM-NEXT: movl $3123, %eax # imm = 0xC33 |
| ; X64-BMINOTBM-NEXT: bextrq %rax, %rdi, %rax |
| ; X64-BMINOTBM-NEXT: movq %rax, (%rsi) |
| ; X64-BMINOTBM-NEXT: retq |
| ; |
| ; X64-BMITBM-LABEL: c6_i64: |
| ; X64-BMITBM: # %bb.0: |
| ; X64-BMITBM-NEXT: bextrq $3123, %rdi, %rax # imm = 0xC33 |
| ; X64-BMITBM-NEXT: movq %rax, (%rsi) |
| ; X64-BMITBM-NEXT: retq |
| %tmp0 = lshr i64 %arg, 51 |
| %tmp1 = and i64 %tmp0, 4095 |
| store i64 %tmp1, i64* %ptr |
| ret void |
| } |
| |
| ; Should be still fine, but the result is shifted left afterwards |
| define void @c7_i64(i64 %arg, i64* %ptr) nounwind { |
| ; X86-LABEL: c7_i64: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: shrl $17, %ecx |
| ; X86-NEXT: andl $4092, %ecx # imm = 0xFFC |
| ; X86-NEXT: movl %ecx, (%eax) |
| ; X86-NEXT: movl $0, 4(%eax) |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: c7_i64: |
| ; X64: # %bb.0: |
| ; X64-NEXT: shrq $49, %rdi |
| ; X64-NEXT: andl $4092, %edi # imm = 0xFFC |
| ; X64-NEXT: movq %rdi, (%rsi) |
| ; X64-NEXT: retq |
| %tmp0 = lshr i64 %arg, 51 |
| %tmp1 = and i64 %tmp0, 1023 |
| %tmp2 = shl i64 %tmp1, 2 |
| store i64 %tmp2, i64* %ptr |
| ret void |
| } |