| ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py |
| ; RUN: llc < %s -mtriple=i686 -mattr=cmov | FileCheck %s --check-prefixes=X86 |
| ; RUN: llc < %s -mtriple=x86_64-linux | FileCheck %s --check-prefixes=X64 |
| |
| ; |
| ; trunc(abs(sub(zext(a),zext(b)))) -> abdu(a,b) |
| ; |
| |
| define i8 @abd_ext_i8(i8 %a, i8 %b) nounwind { |
| ; X86-LABEL: abd_ext_i8: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl %ecx, %edx |
| ; X86-NEXT: subl %eax, %edx |
| ; X86-NEXT: subl %ecx, %eax |
| ; X86-NEXT: cmovbl %edx, %eax |
| ; X86-NEXT: # kill: def $al killed $al killed $eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: abd_ext_i8: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movzbl %sil, %eax |
| ; X64-NEXT: movzbl %dil, %ecx |
| ; X64-NEXT: movl %ecx, %edx |
| ; X64-NEXT: subl %eax, %edx |
| ; X64-NEXT: subl %ecx, %eax |
| ; X64-NEXT: cmovbl %edx, %eax |
| ; X64-NEXT: # kill: def $al killed $al killed $eax |
| ; X64-NEXT: retq |
| %aext = zext i8 %a to i64 |
| %bext = zext i8 %b to i64 |
| %sub = sub i64 %aext, %bext |
| %abs = call i64 @llvm.abs.i64(i64 %sub, i1 false) |
| %trunc = trunc i64 %abs to i8 |
| ret i8 %trunc |
| } |
| |
| define i8 @abd_ext_i8_i16(i8 %a, i16 %b) nounwind { |
| ; X86-LABEL: abd_ext_i8_i16: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl %ecx, %edx |
| ; X86-NEXT: subl %eax, %edx |
| ; X86-NEXT: subl %ecx, %eax |
| ; X86-NEXT: cmovbl %edx, %eax |
| ; X86-NEXT: # kill: def $al killed $al killed $eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: abd_ext_i8_i16: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movzbl %dil, %ecx |
| ; X64-NEXT: subl %esi, %edi |
| ; X64-NEXT: movzwl %si, %eax |
| ; X64-NEXT: subl %ecx, %eax |
| ; X64-NEXT: cmovbl %edi, %eax |
| ; X64-NEXT: # kill: def $al killed $al killed $eax |
| ; X64-NEXT: retq |
| %aext = zext i8 %a to i64 |
| %bext = zext i16 %b to i64 |
| %sub = sub i64 %aext, %bext |
| %abs = call i64 @llvm.abs.i64(i64 %sub, i1 false) |
| %trunc = trunc i64 %abs to i8 |
| ret i8 %trunc |
| } |
| |
| define i8 @abd_ext_i8_undef(i8 %a, i8 %b) nounwind { |
| ; X86-LABEL: abd_ext_i8_undef: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl %ecx, %edx |
| ; X86-NEXT: subl %eax, %edx |
| ; X86-NEXT: subl %ecx, %eax |
| ; X86-NEXT: cmovbl %edx, %eax |
| ; X86-NEXT: # kill: def $al killed $al killed $eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: abd_ext_i8_undef: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movzbl %sil, %eax |
| ; X64-NEXT: movzbl %dil, %ecx |
| ; X64-NEXT: movl %ecx, %edx |
| ; X64-NEXT: subl %eax, %edx |
| ; X64-NEXT: subl %ecx, %eax |
| ; X64-NEXT: cmovbl %edx, %eax |
| ; X64-NEXT: # kill: def $al killed $al killed $eax |
| ; X64-NEXT: retq |
| %aext = zext i8 %a to i64 |
| %bext = zext i8 %b to i64 |
| %sub = sub i64 %aext, %bext |
| %abs = call i64 @llvm.abs.i64(i64 %sub, i1 true) |
| %trunc = trunc i64 %abs to i8 |
| ret i8 %trunc |
| } |
| |
| define i16 @abd_ext_i16(i16 %a, i16 %b) nounwind { |
| ; X86-LABEL: abd_ext_i16: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl %ecx, %edx |
| ; X86-NEXT: subl %eax, %edx |
| ; X86-NEXT: subl %ecx, %eax |
| ; X86-NEXT: cmovbl %edx, %eax |
| ; X86-NEXT: # kill: def $ax killed $ax killed $eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: abd_ext_i16: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movzwl %si, %eax |
| ; X64-NEXT: movzwl %di, %ecx |
| ; X64-NEXT: movl %ecx, %edx |
| ; X64-NEXT: subl %eax, %edx |
| ; X64-NEXT: subl %ecx, %eax |
| ; X64-NEXT: cmovbl %edx, %eax |
| ; X64-NEXT: # kill: def $ax killed $ax killed $eax |
| ; X64-NEXT: retq |
| %aext = zext i16 %a to i64 |
| %bext = zext i16 %b to i64 |
| %sub = sub i64 %aext, %bext |
| %abs = call i64 @llvm.abs.i64(i64 %sub, i1 false) |
| %trunc = trunc i64 %abs to i16 |
| ret i16 %trunc |
| } |
| |
| define i16 @abd_ext_i16_i32(i16 %a, i32 %b) nounwind { |
| ; X86-LABEL: abd_ext_i16_i32: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl %ecx, %edx |
| ; X86-NEXT: subl %eax, %edx |
| ; X86-NEXT: subl %ecx, %eax |
| ; X86-NEXT: cmovbl %edx, %eax |
| ; X86-NEXT: # kill: def $ax killed $ax killed $eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: abd_ext_i16_i32: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movzwl %di, %ecx |
| ; X64-NEXT: movl %edi, %eax |
| ; X64-NEXT: subl %esi, %eax |
| ; X64-NEXT: subl %ecx, %esi |
| ; X64-NEXT: cmovael %esi, %eax |
| ; X64-NEXT: # kill: def $ax killed $ax killed $eax |
| ; X64-NEXT: retq |
| %aext = zext i16 %a to i64 |
| %bext = zext i32 %b to i64 |
| %sub = sub i64 %aext, %bext |
| %abs = call i64 @llvm.abs.i64(i64 %sub, i1 false) |
| %trunc = trunc i64 %abs to i16 |
| ret i16 %trunc |
| } |
| |
| define i16 @abd_ext_i16_undef(i16 %a, i16 %b) nounwind { |
| ; X86-LABEL: abd_ext_i16_undef: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl %ecx, %edx |
| ; X86-NEXT: subl %eax, %edx |
| ; X86-NEXT: subl %ecx, %eax |
| ; X86-NEXT: cmovbl %edx, %eax |
| ; X86-NEXT: # kill: def $ax killed $ax killed $eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: abd_ext_i16_undef: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movzwl %si, %eax |
| ; X64-NEXT: movzwl %di, %ecx |
| ; X64-NEXT: movl %ecx, %edx |
| ; X64-NEXT: subl %eax, %edx |
| ; X64-NEXT: subl %ecx, %eax |
| ; X64-NEXT: cmovbl %edx, %eax |
| ; X64-NEXT: # kill: def $ax killed $ax killed $eax |
| ; X64-NEXT: retq |
| %aext = zext i16 %a to i64 |
| %bext = zext i16 %b to i64 |
| %sub = sub i64 %aext, %bext |
| %abs = call i64 @llvm.abs.i64(i64 %sub, i1 true) |
| %trunc = trunc i64 %abs to i16 |
| ret i16 %trunc |
| } |
| |
| define i32 @abd_ext_i32(i32 %a, i32 %b) nounwind { |
| ; X86-LABEL: abd_ext_i32: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl %ecx, %edx |
| ; X86-NEXT: subl %eax, %edx |
| ; X86-NEXT: subl %ecx, %eax |
| ; X86-NEXT: cmovbl %edx, %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: abd_ext_i32: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movl %edi, %eax |
| ; X64-NEXT: subl %esi, %eax |
| ; X64-NEXT: subl %edi, %esi |
| ; X64-NEXT: cmovael %esi, %eax |
| ; X64-NEXT: retq |
| %aext = zext i32 %a to i64 |
| %bext = zext i32 %b to i64 |
| %sub = sub i64 %aext, %bext |
| %abs = call i64 @llvm.abs.i64(i64 %sub, i1 false) |
| %trunc = trunc i64 %abs to i32 |
| ret i32 %trunc |
| } |
| |
| define i32 @abd_ext_i32_i16(i32 %a, i16 %b) nounwind { |
| ; X86-LABEL: abd_ext_i32_i16: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl %ecx, %edx |
| ; X86-NEXT: subl %eax, %edx |
| ; X86-NEXT: subl %ecx, %eax |
| ; X86-NEXT: cmovbl %edx, %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: abd_ext_i32_i16: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movzwl %si, %eax |
| ; X64-NEXT: movl %edi, %ecx |
| ; X64-NEXT: subl %eax, %ecx |
| ; X64-NEXT: subl %edi, %eax |
| ; X64-NEXT: cmovbl %ecx, %eax |
| ; X64-NEXT: retq |
| %aext = zext i32 %a to i64 |
| %bext = zext i16 %b to i64 |
| %sub = sub i64 %aext, %bext |
| %abs = call i64 @llvm.abs.i64(i64 %sub, i1 false) |
| %trunc = trunc i64 %abs to i32 |
| ret i32 %trunc |
| } |
| |
| define i32 @abd_ext_i32_undef(i32 %a, i32 %b) nounwind { |
| ; X86-LABEL: abd_ext_i32_undef: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl %ecx, %edx |
| ; X86-NEXT: subl %eax, %edx |
| ; X86-NEXT: subl %ecx, %eax |
| ; X86-NEXT: cmovbl %edx, %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: abd_ext_i32_undef: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movl %edi, %eax |
| ; X64-NEXT: subl %esi, %eax |
| ; X64-NEXT: subl %edi, %esi |
| ; X64-NEXT: cmovael %esi, %eax |
| ; X64-NEXT: retq |
| %aext = zext i32 %a to i64 |
| %bext = zext i32 %b to i64 |
| %sub = sub i64 %aext, %bext |
| %abs = call i64 @llvm.abs.i64(i64 %sub, i1 true) |
| %trunc = trunc i64 %abs to i32 |
| ret i32 %trunc |
| } |
| |
| define i64 @abd_ext_i64(i64 %a, i64 %b) nounwind { |
| ; X86-LABEL: abd_ext_i64: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: xorl %ecx, %ecx |
| ; X86-NEXT: subl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: sbbl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: sbbl %ecx, %ecx |
| ; X86-NEXT: xorl %ecx, %edx |
| ; X86-NEXT: xorl %ecx, %eax |
| ; X86-NEXT: subl %ecx, %eax |
| ; X86-NEXT: sbbl %ecx, %edx |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: abd_ext_i64: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movq %rdi, %rax |
| ; X64-NEXT: subq %rsi, %rax |
| ; X64-NEXT: subq %rdi, %rsi |
| ; X64-NEXT: cmovaeq %rsi, %rax |
| ; X64-NEXT: retq |
| %aext = zext i64 %a to i128 |
| %bext = zext i64 %b to i128 |
| %sub = sub i128 %aext, %bext |
| %abs = call i128 @llvm.abs.i128(i128 %sub, i1 false) |
| %trunc = trunc i128 %abs to i64 |
| ret i64 %trunc |
| } |
| |
| define i64 @abd_ext_i64_undef(i64 %a, i64 %b) nounwind { |
| ; X86-LABEL: abd_ext_i64_undef: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: xorl %ecx, %ecx |
| ; X86-NEXT: subl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: sbbl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: sbbl %ecx, %ecx |
| ; X86-NEXT: xorl %ecx, %edx |
| ; X86-NEXT: xorl %ecx, %eax |
| ; X86-NEXT: subl %ecx, %eax |
| ; X86-NEXT: sbbl %ecx, %edx |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: abd_ext_i64_undef: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movq %rdi, %rax |
| ; X64-NEXT: subq %rsi, %rax |
| ; X64-NEXT: subq %rdi, %rsi |
| ; X64-NEXT: cmovaeq %rsi, %rax |
| ; X64-NEXT: retq |
| %aext = zext i64 %a to i128 |
| %bext = zext i64 %b to i128 |
| %sub = sub i128 %aext, %bext |
| %abs = call i128 @llvm.abs.i128(i128 %sub, i1 true) |
| %trunc = trunc i128 %abs to i64 |
| ret i64 %trunc |
| } |
| |
| define i128 @abd_ext_i128(i128 %a, i128 %b) nounwind { |
| ; X86-LABEL: abd_ext_i128: |
| ; X86: # %bb.0: |
| ; X86-NEXT: pushl %ebx |
| ; X86-NEXT: pushl %edi |
| ; X86-NEXT: pushl %esi |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: xorl %ebx, %ebx |
| ; X86-NEXT: subl {{[0-9]+}}(%esp), %edi |
| ; X86-NEXT: sbbl {{[0-9]+}}(%esp), %esi |
| ; X86-NEXT: sbbl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: sbbl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: sbbl %ebx, %ebx |
| ; X86-NEXT: xorl %ebx, %ecx |
| ; X86-NEXT: xorl %ebx, %edx |
| ; X86-NEXT: xorl %ebx, %esi |
| ; X86-NEXT: xorl %ebx, %edi |
| ; X86-NEXT: subl %ebx, %edi |
| ; X86-NEXT: sbbl %ebx, %esi |
| ; X86-NEXT: sbbl %ebx, %edx |
| ; X86-NEXT: sbbl %ebx, %ecx |
| ; X86-NEXT: movl %edi, (%eax) |
| ; X86-NEXT: movl %esi, 4(%eax) |
| ; X86-NEXT: movl %edx, 8(%eax) |
| ; X86-NEXT: movl %ecx, 12(%eax) |
| ; X86-NEXT: popl %esi |
| ; X86-NEXT: popl %edi |
| ; X86-NEXT: popl %ebx |
| ; X86-NEXT: retl $4 |
| ; |
| ; X64-LABEL: abd_ext_i128: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movq %rdi, %rax |
| ; X64-NEXT: xorl %edi, %edi |
| ; X64-NEXT: subq %rdx, %rax |
| ; X64-NEXT: sbbq %rcx, %rsi |
| ; X64-NEXT: sbbq %rdi, %rdi |
| ; X64-NEXT: xorq %rdi, %rsi |
| ; X64-NEXT: xorq %rdi, %rax |
| ; X64-NEXT: subq %rdi, %rax |
| ; X64-NEXT: sbbq %rdi, %rsi |
| ; X64-NEXT: movq %rsi, %rdx |
| ; X64-NEXT: retq |
| %aext = zext i128 %a to i256 |
| %bext = zext i128 %b to i256 |
| %sub = sub i256 %aext, %bext |
| %abs = call i256 @llvm.abs.i256(i256 %sub, i1 false) |
| %trunc = trunc i256 %abs to i128 |
| ret i128 %trunc |
| } |
| |
| define i128 @abd_ext_i128_undef(i128 %a, i128 %b) nounwind { |
| ; X86-LABEL: abd_ext_i128_undef: |
| ; X86: # %bb.0: |
| ; X86-NEXT: pushl %ebx |
| ; X86-NEXT: pushl %edi |
| ; X86-NEXT: pushl %esi |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: xorl %ebx, %ebx |
| ; X86-NEXT: subl {{[0-9]+}}(%esp), %edi |
| ; X86-NEXT: sbbl {{[0-9]+}}(%esp), %esi |
| ; X86-NEXT: sbbl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: sbbl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: sbbl %ebx, %ebx |
| ; X86-NEXT: xorl %ebx, %ecx |
| ; X86-NEXT: xorl %ebx, %edx |
| ; X86-NEXT: xorl %ebx, %esi |
| ; X86-NEXT: xorl %ebx, %edi |
| ; X86-NEXT: subl %ebx, %edi |
| ; X86-NEXT: sbbl %ebx, %esi |
| ; X86-NEXT: sbbl %ebx, %edx |
| ; X86-NEXT: sbbl %ebx, %ecx |
| ; X86-NEXT: movl %edi, (%eax) |
| ; X86-NEXT: movl %esi, 4(%eax) |
| ; X86-NEXT: movl %edx, 8(%eax) |
| ; X86-NEXT: movl %ecx, 12(%eax) |
| ; X86-NEXT: popl %esi |
| ; X86-NEXT: popl %edi |
| ; X86-NEXT: popl %ebx |
| ; X86-NEXT: retl $4 |
| ; |
| ; X64-LABEL: abd_ext_i128_undef: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movq %rdi, %rax |
| ; X64-NEXT: xorl %edi, %edi |
| ; X64-NEXT: subq %rdx, %rax |
| ; X64-NEXT: sbbq %rcx, %rsi |
| ; X64-NEXT: sbbq %rdi, %rdi |
| ; X64-NEXT: xorq %rdi, %rsi |
| ; X64-NEXT: xorq %rdi, %rax |
| ; X64-NEXT: subq %rdi, %rax |
| ; X64-NEXT: sbbq %rdi, %rsi |
| ; X64-NEXT: movq %rsi, %rdx |
| ; X64-NEXT: retq |
| %aext = zext i128 %a to i256 |
| %bext = zext i128 %b to i256 |
| %sub = sub i256 %aext, %bext |
| %abs = call i256 @llvm.abs.i256(i256 %sub, i1 true) |
| %trunc = trunc i256 %abs to i128 |
| ret i128 %trunc |
| } |
| |
| ; |
| ; sub(umax(a,b),umin(a,b)) -> abdu(a,b) |
| ; |
| |
| define i8 @abd_minmax_i8(i8 %a, i8 %b) nounwind { |
| ; X86-LABEL: abd_minmax_i8: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl %ecx, %edx |
| ; X86-NEXT: subl %eax, %edx |
| ; X86-NEXT: subl %ecx, %eax |
| ; X86-NEXT: cmovbl %edx, %eax |
| ; X86-NEXT: # kill: def $al killed $al killed $eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: abd_minmax_i8: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movzbl %sil, %eax |
| ; X64-NEXT: movzbl %dil, %ecx |
| ; X64-NEXT: movl %ecx, %edx |
| ; X64-NEXT: subl %eax, %edx |
| ; X64-NEXT: subl %ecx, %eax |
| ; X64-NEXT: cmovbl %edx, %eax |
| ; X64-NEXT: # kill: def $al killed $al killed $eax |
| ; X64-NEXT: retq |
| %min = call i8 @llvm.umin.i8(i8 %a, i8 %b) |
| %max = call i8 @llvm.umax.i8(i8 %a, i8 %b) |
| %sub = sub i8 %max, %min |
| ret i8 %sub |
| } |
| |
| define i16 @abd_minmax_i16(i16 %a, i16 %b) nounwind { |
| ; X86-LABEL: abd_minmax_i16: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl %ecx, %edx |
| ; X86-NEXT: subl %eax, %edx |
| ; X86-NEXT: subl %ecx, %eax |
| ; X86-NEXT: cmovbl %edx, %eax |
| ; X86-NEXT: # kill: def $ax killed $ax killed $eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: abd_minmax_i16: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movzwl %si, %eax |
| ; X64-NEXT: movzwl %di, %ecx |
| ; X64-NEXT: movl %ecx, %edx |
| ; X64-NEXT: subl %eax, %edx |
| ; X64-NEXT: subl %ecx, %eax |
| ; X64-NEXT: cmovbl %edx, %eax |
| ; X64-NEXT: # kill: def $ax killed $ax killed $eax |
| ; X64-NEXT: retq |
| %min = call i16 @llvm.umin.i16(i16 %a, i16 %b) |
| %max = call i16 @llvm.umax.i16(i16 %a, i16 %b) |
| %sub = sub i16 %max, %min |
| ret i16 %sub |
| } |
| |
| define i32 @abd_minmax_i32(i32 %a, i32 %b) nounwind { |
| ; X86-LABEL: abd_minmax_i32: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl %ecx, %edx |
| ; X86-NEXT: subl %eax, %edx |
| ; X86-NEXT: subl %ecx, %eax |
| ; X86-NEXT: cmovbl %edx, %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: abd_minmax_i32: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movl %edi, %eax |
| ; X64-NEXT: subl %esi, %eax |
| ; X64-NEXT: subl %edi, %esi |
| ; X64-NEXT: cmovael %esi, %eax |
| ; X64-NEXT: retq |
| %min = call i32 @llvm.umin.i32(i32 %a, i32 %b) |
| %max = call i32 @llvm.umax.i32(i32 %a, i32 %b) |
| %sub = sub i32 %max, %min |
| ret i32 %sub |
| } |
| |
| define i64 @abd_minmax_i64(i64 %a, i64 %b) nounwind { |
| ; X86-LABEL: abd_minmax_i64: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: xorl %ecx, %ecx |
| ; X86-NEXT: subl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: sbbl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: sbbl %ecx, %ecx |
| ; X86-NEXT: xorl %ecx, %edx |
| ; X86-NEXT: xorl %ecx, %eax |
| ; X86-NEXT: subl %ecx, %eax |
| ; X86-NEXT: sbbl %ecx, %edx |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: abd_minmax_i64: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movq %rdi, %rax |
| ; X64-NEXT: subq %rsi, %rax |
| ; X64-NEXT: subq %rdi, %rsi |
| ; X64-NEXT: cmovaeq %rsi, %rax |
| ; X64-NEXT: retq |
| %min = call i64 @llvm.umin.i64(i64 %a, i64 %b) |
| %max = call i64 @llvm.umax.i64(i64 %a, i64 %b) |
| %sub = sub i64 %max, %min |
| ret i64 %sub |
| } |
| |
| define i128 @abd_minmax_i128(i128 %a, i128 %b) nounwind { |
| ; X86-LABEL: abd_minmax_i128: |
| ; X86: # %bb.0: |
| ; X86-NEXT: pushl %ebx |
| ; X86-NEXT: pushl %edi |
| ; X86-NEXT: pushl %esi |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: xorl %ebx, %ebx |
| ; X86-NEXT: subl {{[0-9]+}}(%esp), %edi |
| ; X86-NEXT: sbbl {{[0-9]+}}(%esp), %esi |
| ; X86-NEXT: sbbl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: sbbl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: sbbl %ebx, %ebx |
| ; X86-NEXT: xorl %ebx, %ecx |
| ; X86-NEXT: xorl %ebx, %edx |
| ; X86-NEXT: xorl %ebx, %esi |
| ; X86-NEXT: xorl %ebx, %edi |
| ; X86-NEXT: subl %ebx, %edi |
| ; X86-NEXT: sbbl %ebx, %esi |
| ; X86-NEXT: sbbl %ebx, %edx |
| ; X86-NEXT: sbbl %ebx, %ecx |
| ; X86-NEXT: movl %edi, (%eax) |
| ; X86-NEXT: movl %esi, 4(%eax) |
| ; X86-NEXT: movl %edx, 8(%eax) |
| ; X86-NEXT: movl %ecx, 12(%eax) |
| ; X86-NEXT: popl %esi |
| ; X86-NEXT: popl %edi |
| ; X86-NEXT: popl %ebx |
| ; X86-NEXT: retl $4 |
| ; |
| ; X64-LABEL: abd_minmax_i128: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movq %rdi, %rax |
| ; X64-NEXT: xorl %edi, %edi |
| ; X64-NEXT: subq %rdx, %rax |
| ; X64-NEXT: sbbq %rcx, %rsi |
| ; X64-NEXT: sbbq %rdi, %rdi |
| ; X64-NEXT: xorq %rdi, %rsi |
| ; X64-NEXT: xorq %rdi, %rax |
| ; X64-NEXT: subq %rdi, %rax |
| ; X64-NEXT: sbbq %rdi, %rsi |
| ; X64-NEXT: movq %rsi, %rdx |
| ; X64-NEXT: retq |
| %min = call i128 @llvm.umin.i128(i128 %a, i128 %b) |
| %max = call i128 @llvm.umax.i128(i128 %a, i128 %b) |
| %sub = sub i128 %max, %min |
| ret i128 %sub |
| } |
| |
| ; |
| ; select(icmp(a,b),sub(a,b),sub(b,a)) -> abdu(a,b) |
| ; |
| |
| define i8 @abd_cmp_i8(i8 %a, i8 %b) nounwind { |
| ; X86-LABEL: abd_cmp_i8: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl %ecx, %edx |
| ; X86-NEXT: subl %eax, %edx |
| ; X86-NEXT: subl %ecx, %eax |
| ; X86-NEXT: cmovbl %edx, %eax |
| ; X86-NEXT: # kill: def $al killed $al killed $eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: abd_cmp_i8: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movzbl %dil, %eax |
| ; X64-NEXT: movzbl %sil, %ecx |
| ; X64-NEXT: movl %ecx, %edx |
| ; X64-NEXT: subl %eax, %edx |
| ; X64-NEXT: subl %ecx, %eax |
| ; X64-NEXT: cmovbl %edx, %eax |
| ; X64-NEXT: # kill: def $al killed $al killed $eax |
| ; X64-NEXT: retq |
| %cmp = icmp ugt i8 %a, %b |
| %ab = sub i8 %a, %b |
| %ba = sub i8 %b, %a |
| %sel = select i1 %cmp, i8 %ab, i8 %ba |
| ret i8 %sel |
| } |
| |
| define i16 @abd_cmp_i16(i16 %a, i16 %b) nounwind { |
| ; X86-LABEL: abd_cmp_i16: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl %ecx, %edx |
| ; X86-NEXT: subl %eax, %edx |
| ; X86-NEXT: subl %ecx, %eax |
| ; X86-NEXT: cmovbl %edx, %eax |
| ; X86-NEXT: # kill: def $ax killed $ax killed $eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: abd_cmp_i16: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movzwl %si, %eax |
| ; X64-NEXT: movzwl %di, %ecx |
| ; X64-NEXT: movl %ecx, %edx |
| ; X64-NEXT: subl %eax, %edx |
| ; X64-NEXT: subl %ecx, %eax |
| ; X64-NEXT: cmovbl %edx, %eax |
| ; X64-NEXT: # kill: def $ax killed $ax killed $eax |
| ; X64-NEXT: retq |
| %cmp = icmp uge i16 %a, %b |
| %ab = sub i16 %a, %b |
| %ba = sub i16 %b, %a |
| %sel = select i1 %cmp, i16 %ab, i16 %ba |
| ret i16 %sel |
| } |
| |
| define i32 @abd_cmp_i32(i32 %a, i32 %b) nounwind { |
| ; X86-LABEL: abd_cmp_i32: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl %ecx, %edx |
| ; X86-NEXT: subl %eax, %edx |
| ; X86-NEXT: subl %ecx, %eax |
| ; X86-NEXT: cmovbl %edx, %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: abd_cmp_i32: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movl %edi, %eax |
| ; X64-NEXT: subl %esi, %eax |
| ; X64-NEXT: subl %edi, %esi |
| ; X64-NEXT: cmovael %esi, %eax |
| ; X64-NEXT: retq |
| %cmp = icmp ult i32 %a, %b |
| %ab = sub i32 %a, %b |
| %ba = sub i32 %b, %a |
| %sel = select i1 %cmp, i32 %ba, i32 %ab |
| ret i32 %sel |
| } |
| |
| define i64 @abd_cmp_i64(i64 %a, i64 %b) nounwind { |
| ; X86-LABEL: abd_cmp_i64: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: xorl %ecx, %ecx |
| ; X86-NEXT: subl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: sbbl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: sbbl %ecx, %ecx |
| ; X86-NEXT: xorl %ecx, %edx |
| ; X86-NEXT: xorl %ecx, %eax |
| ; X86-NEXT: subl %ecx, %eax |
| ; X86-NEXT: sbbl %ecx, %edx |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: abd_cmp_i64: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movq %rdi, %rax |
| ; X64-NEXT: subq %rsi, %rax |
| ; X64-NEXT: subq %rdi, %rsi |
| ; X64-NEXT: cmovaeq %rsi, %rax |
| ; X64-NEXT: retq |
| %cmp = icmp uge i64 %a, %b |
| %ab = sub i64 %a, %b |
| %ba = sub i64 %b, %a |
| %sel = select i1 %cmp, i64 %ab, i64 %ba |
| ret i64 %sel |
| } |
| |
| define i128 @abd_cmp_i128(i128 %a, i128 %b) nounwind { |
| ; X86-LABEL: abd_cmp_i128: |
| ; X86: # %bb.0: |
| ; X86-NEXT: pushl %ebx |
| ; X86-NEXT: pushl %edi |
| ; X86-NEXT: pushl %esi |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: xorl %ebx, %ebx |
| ; X86-NEXT: subl {{[0-9]+}}(%esp), %edi |
| ; X86-NEXT: sbbl {{[0-9]+}}(%esp), %esi |
| ; X86-NEXT: sbbl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: sbbl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: sbbl %ebx, %ebx |
| ; X86-NEXT: xorl %ebx, %ecx |
| ; X86-NEXT: xorl %ebx, %edx |
| ; X86-NEXT: xorl %ebx, %esi |
| ; X86-NEXT: xorl %ebx, %edi |
| ; X86-NEXT: subl %ebx, %edi |
| ; X86-NEXT: sbbl %ebx, %esi |
| ; X86-NEXT: sbbl %ebx, %edx |
| ; X86-NEXT: sbbl %ebx, %ecx |
| ; X86-NEXT: movl %edi, (%eax) |
| ; X86-NEXT: movl %esi, 4(%eax) |
| ; X86-NEXT: movl %edx, 8(%eax) |
| ; X86-NEXT: movl %ecx, 12(%eax) |
| ; X86-NEXT: popl %esi |
| ; X86-NEXT: popl %edi |
| ; X86-NEXT: popl %ebx |
| ; X86-NEXT: retl $4 |
| ; |
| ; X64-LABEL: abd_cmp_i128: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movq %rdi, %rax |
| ; X64-NEXT: xorl %edi, %edi |
| ; X64-NEXT: subq %rdx, %rax |
| ; X64-NEXT: sbbq %rcx, %rsi |
| ; X64-NEXT: sbbq %rdi, %rdi |
| ; X64-NEXT: xorq %rdi, %rsi |
| ; X64-NEXT: xorq %rdi, %rax |
| ; X64-NEXT: subq %rdi, %rax |
| ; X64-NEXT: sbbq %rdi, %rsi |
| ; X64-NEXT: movq %rsi, %rdx |
| ; X64-NEXT: retq |
| %cmp = icmp uge i128 %a, %b |
| %ab = sub i128 %a, %b |
| %ba = sub i128 %b, %a |
| %sel = select i1 %cmp, i128 %ab, i128 %ba |
| ret i128 %sel |
| } |
| |
| ; |
| ; sub(select(icmp(a,b),a,b),select(icmp(a,b),b,a)) -> abdu(a,b) |
| ; |
| |
| define i8 @abd_select_i8(i8 %a, i8 %b) nounwind { |
| ; X86-LABEL: abd_select_i8: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movzbl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl %ecx, %edx |
| ; X86-NEXT: subl %eax, %edx |
| ; X86-NEXT: subl %ecx, %eax |
| ; X86-NEXT: cmovbl %edx, %eax |
| ; X86-NEXT: # kill: def $al killed $al killed $eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: abd_select_i8: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movzbl %sil, %eax |
| ; X64-NEXT: movzbl %dil, %ecx |
| ; X64-NEXT: movl %ecx, %edx |
| ; X64-NEXT: subl %eax, %edx |
| ; X64-NEXT: subl %ecx, %eax |
| ; X64-NEXT: cmovbl %edx, %eax |
| ; X64-NEXT: # kill: def $al killed $al killed $eax |
| ; X64-NEXT: retq |
| %cmp = icmp ult i8 %a, %b |
| %ab = select i1 %cmp, i8 %a, i8 %b |
| %ba = select i1 %cmp, i8 %b, i8 %a |
| %sub = sub i8 %ba, %ab |
| ret i8 %sub |
| } |
| |
| define i16 @abd_select_i16(i16 %a, i16 %b) nounwind { |
| ; X86-LABEL: abd_select_i16: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movzwl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl %ecx, %edx |
| ; X86-NEXT: subl %eax, %edx |
| ; X86-NEXT: subl %ecx, %eax |
| ; X86-NEXT: cmovbl %edx, %eax |
| ; X86-NEXT: # kill: def $ax killed $ax killed $eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: abd_select_i16: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movzwl %si, %eax |
| ; X64-NEXT: movzwl %di, %ecx |
| ; X64-NEXT: movl %ecx, %edx |
| ; X64-NEXT: subl %eax, %edx |
| ; X64-NEXT: subl %ecx, %eax |
| ; X64-NEXT: cmovbl %edx, %eax |
| ; X64-NEXT: # kill: def $ax killed $ax killed $eax |
| ; X64-NEXT: retq |
| %cmp = icmp ule i16 %a, %b |
| %ab = select i1 %cmp, i16 %a, i16 %b |
| %ba = select i1 %cmp, i16 %b, i16 %a |
| %sub = sub i16 %ba, %ab |
| ret i16 %sub |
| } |
| |
| define i32 @abd_select_i32(i32 %a, i32 %b) nounwind { |
| ; X86-LABEL: abd_select_i32: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl %ecx, %edx |
| ; X86-NEXT: subl %eax, %edx |
| ; X86-NEXT: subl %ecx, %eax |
| ; X86-NEXT: cmovbl %edx, %eax |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: abd_select_i32: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movl %edi, %eax |
| ; X64-NEXT: subl %esi, %eax |
| ; X64-NEXT: subl %edi, %esi |
| ; X64-NEXT: cmovael %esi, %eax |
| ; X64-NEXT: retq |
| %cmp = icmp ugt i32 %a, %b |
| %ab = select i1 %cmp, i32 %a, i32 %b |
| %ba = select i1 %cmp, i32 %b, i32 %a |
| %sub = sub i32 %ab, %ba |
| ret i32 %sub |
| } |
| |
| define i64 @abd_select_i64(i64 %a, i64 %b) nounwind { |
| ; X86-LABEL: abd_select_i64: |
| ; X86: # %bb.0: |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: xorl %ecx, %ecx |
| ; X86-NEXT: subl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: sbbl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: sbbl %ecx, %ecx |
| ; X86-NEXT: xorl %ecx, %edx |
| ; X86-NEXT: xorl %ecx, %eax |
| ; X86-NEXT: subl %ecx, %eax |
| ; X86-NEXT: sbbl %ecx, %edx |
| ; X86-NEXT: retl |
| ; |
| ; X64-LABEL: abd_select_i64: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movq %rdi, %rax |
| ; X64-NEXT: subq %rsi, %rax |
| ; X64-NEXT: subq %rdi, %rsi |
| ; X64-NEXT: cmovaeq %rsi, %rax |
| ; X64-NEXT: retq |
| %cmp = icmp uge i64 %a, %b |
| %ab = select i1 %cmp, i64 %a, i64 %b |
| %ba = select i1 %cmp, i64 %b, i64 %a |
| %sub = sub i64 %ab, %ba |
| ret i64 %sub |
| } |
| |
| define i128 @abd_select_i128(i128 %a, i128 %b) nounwind { |
| ; X86-LABEL: abd_select_i128: |
| ; X86: # %bb.0: |
| ; X86-NEXT: pushl %ebx |
| ; X86-NEXT: pushl %edi |
| ; X86-NEXT: pushl %esi |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %edi |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %esi |
| ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax |
| ; X86-NEXT: xorl %ebx, %ebx |
| ; X86-NEXT: subl {{[0-9]+}}(%esp), %edi |
| ; X86-NEXT: sbbl {{[0-9]+}}(%esp), %esi |
| ; X86-NEXT: sbbl {{[0-9]+}}(%esp), %edx |
| ; X86-NEXT: sbbl {{[0-9]+}}(%esp), %ecx |
| ; X86-NEXT: sbbl %ebx, %ebx |
| ; X86-NEXT: xorl %ebx, %ecx |
| ; X86-NEXT: xorl %ebx, %edx |
| ; X86-NEXT: xorl %ebx, %esi |
| ; X86-NEXT: xorl %ebx, %edi |
| ; X86-NEXT: subl %ebx, %edi |
| ; X86-NEXT: sbbl %ebx, %esi |
| ; X86-NEXT: sbbl %ebx, %edx |
| ; X86-NEXT: sbbl %ebx, %ecx |
| ; X86-NEXT: movl %edi, (%eax) |
| ; X86-NEXT: movl %esi, 4(%eax) |
| ; X86-NEXT: movl %edx, 8(%eax) |
| ; X86-NEXT: movl %ecx, 12(%eax) |
| ; X86-NEXT: popl %esi |
| ; X86-NEXT: popl %edi |
| ; X86-NEXT: popl %ebx |
| ; X86-NEXT: retl $4 |
| ; |
| ; X64-LABEL: abd_select_i128: |
| ; X64: # %bb.0: |
| ; X64-NEXT: movq %rdi, %rax |
| ; X64-NEXT: xorl %edi, %edi |
| ; X64-NEXT: subq %rdx, %rax |
| ; X64-NEXT: sbbq %rcx, %rsi |
| ; X64-NEXT: sbbq %rdi, %rdi |
| ; X64-NEXT: xorq %rdi, %rsi |
| ; X64-NEXT: xorq %rdi, %rax |
| ; X64-NEXT: subq %rdi, %rax |
| ; X64-NEXT: sbbq %rdi, %rsi |
| ; X64-NEXT: movq %rsi, %rdx |
| ; X64-NEXT: retq |
| %cmp = icmp ult i128 %a, %b |
| %ab = select i1 %cmp, i128 %a, i128 %b |
| %ba = select i1 %cmp, i128 %b, i128 %a |
| %sub = sub i128 %ba, %ab |
| ret i128 %sub |
| } |
| |
| declare i8 @llvm.abs.i8(i8, i1) |
| declare i16 @llvm.abs.i16(i16, i1) |
| declare i32 @llvm.abs.i32(i32, i1) |
| declare i64 @llvm.abs.i64(i64, i1) |
| declare i128 @llvm.abs.i128(i128, i1) |
| |
| declare i8 @llvm.umax.i8(i8, i8) |
| declare i16 @llvm.umax.i16(i16, i16) |
| declare i32 @llvm.umax.i32(i32, i32) |
| declare i64 @llvm.umax.i64(i64, i64) |
| |
| declare i8 @llvm.umin.i8(i8, i8) |
| declare i16 @llvm.umin.i16(i16, i16) |
| declare i32 @llvm.umin.i32(i32, i32) |
| declare i64 @llvm.umin.i64(i64, i64) |