blob: 2e13776715e5bec4b18a328474884d3027142211 [file] [log] [blame]
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc < %s -mtriple=x86_64-linux | FileCheck %s -check-prefix=X86-64
; RUN: llc < %s -mtriple=x86_64-cygwin | FileCheck %s -check-prefix=WIN64
; RUN: llc < %s -mtriple=x86_64-win32 | FileCheck %s -check-prefix=WIN64
; RUN: llc < %s -mtriple=x86_64-mingw32 | FileCheck %s -check-prefix=WIN64
define i64 @mod128(i128 %x) nounwind {
; X86-64-LABEL: mod128:
; X86-64: # %bb.0:
; X86-64-NEXT: pushq %rax
; X86-64-NEXT: movl $3, %edx
; X86-64-NEXT: xorl %ecx, %ecx
; X86-64-NEXT: callq __modti3@PLT
; X86-64-NEXT: popq %rcx
; X86-64-NEXT: retq
;
; WIN64-LABEL: mod128:
; WIN64: # %bb.0:
; WIN64-NEXT: subq $72, %rsp
; WIN64-NEXT: movq %rdx, {{[0-9]+}}(%rsp)
; WIN64-NEXT: movq %rcx, {{[0-9]+}}(%rsp)
; WIN64-NEXT: movq $3, {{[0-9]+}}(%rsp)
; WIN64-NEXT: movq $0, {{[0-9]+}}(%rsp)
; WIN64-NEXT: leaq {{[0-9]+}}(%rsp), %rcx
; WIN64-NEXT: leaq {{[0-9]+}}(%rsp), %rdx
; WIN64-NEXT: callq __modti3
; WIN64-NEXT: movq %xmm0, %rax
; WIN64-NEXT: addq $72, %rsp
; WIN64-NEXT: retq
%1 = srem i128 %x, 3
%2 = trunc i128 %1 to i64
ret i64 %2
}
define i64 @div128(i128 %x) nounwind {
; X86-64-LABEL: div128:
; X86-64: # %bb.0:
; X86-64-NEXT: pushq %rax
; X86-64-NEXT: movl $3, %edx
; X86-64-NEXT: xorl %ecx, %ecx
; X86-64-NEXT: callq __divti3@PLT
; X86-64-NEXT: popq %rcx
; X86-64-NEXT: retq
;
; WIN64-LABEL: div128:
; WIN64: # %bb.0:
; WIN64-NEXT: subq $72, %rsp
; WIN64-NEXT: movq %rdx, {{[0-9]+}}(%rsp)
; WIN64-NEXT: movq %rcx, {{[0-9]+}}(%rsp)
; WIN64-NEXT: movq $3, {{[0-9]+}}(%rsp)
; WIN64-NEXT: movq $0, {{[0-9]+}}(%rsp)
; WIN64-NEXT: leaq {{[0-9]+}}(%rsp), %rcx
; WIN64-NEXT: leaq {{[0-9]+}}(%rsp), %rdx
; WIN64-NEXT: callq __divti3
; WIN64-NEXT: movq %xmm0, %rax
; WIN64-NEXT: addq $72, %rsp
; WIN64-NEXT: retq
%1 = sdiv i128 %x, 3
%2 = trunc i128 %1 to i64
ret i64 %2
}
define i64 @umod128(i128 %x) nounwind {
; X86-64-LABEL: umod128:
; X86-64: # %bb.0:
; X86-64-NEXT: pushq %rax
; X86-64-NEXT: movl $11, %edx
; X86-64-NEXT: xorl %ecx, %ecx
; X86-64-NEXT: callq __umodti3@PLT
; X86-64-NEXT: popq %rcx
; X86-64-NEXT: retq
;
; WIN64-LABEL: umod128:
; WIN64: # %bb.0:
; WIN64-NEXT: subq $72, %rsp
; WIN64-NEXT: movq %rdx, {{[0-9]+}}(%rsp)
; WIN64-NEXT: movq %rcx, {{[0-9]+}}(%rsp)
; WIN64-NEXT: movq $11, {{[0-9]+}}(%rsp)
; WIN64-NEXT: movq $0, {{[0-9]+}}(%rsp)
; WIN64-NEXT: leaq {{[0-9]+}}(%rsp), %rcx
; WIN64-NEXT: leaq {{[0-9]+}}(%rsp), %rdx
; WIN64-NEXT: callq __umodti3
; WIN64-NEXT: movq %xmm0, %rax
; WIN64-NEXT: addq $72, %rsp
; WIN64-NEXT: retq
%1 = urem i128 %x, 11
%2 = trunc i128 %1 to i64
ret i64 %2
}
define i64 @udiv128(i128 %x) nounwind {
; X86-64-LABEL: udiv128:
; X86-64: # %bb.0:
; X86-64-NEXT: addq %rdi, %rsi
; X86-64-NEXT: adcq $0, %rsi
; X86-64-NEXT: movabsq $-6148914691236517205, %rcx # imm = 0xAAAAAAAAAAAAAAAB
; X86-64-NEXT: movq %rsi, %rax
; X86-64-NEXT: mulq %rcx
; X86-64-NEXT: shrq %rdx
; X86-64-NEXT: leaq (%rdx,%rdx,2), %rax
; X86-64-NEXT: subq %rsi, %rax
; X86-64-NEXT: addq %rdi, %rax
; X86-64-NEXT: imulq %rcx, %rax
; X86-64-NEXT: retq
;
; WIN64-LABEL: udiv128:
; WIN64: # %bb.0:
; WIN64-NEXT: movq %rdx, %r8
; WIN64-NEXT: addq %rcx, %r8
; WIN64-NEXT: adcq $0, %r8
; WIN64-NEXT: movabsq $-6148914691236517205, %r9 # imm = 0xAAAAAAAAAAAAAAAB
; WIN64-NEXT: movq %r8, %rax
; WIN64-NEXT: mulq %r9
; WIN64-NEXT: shrq %rdx
; WIN64-NEXT: leaq (%rdx,%rdx,2), %rax
; WIN64-NEXT: subq %r8, %rax
; WIN64-NEXT: addq %rcx, %rax
; WIN64-NEXT: imulq %r9, %rax
; WIN64-NEXT: retq
%1 = udiv i128 %x, 3
%2 = trunc i128 %1 to i64
ret i64 %2
}
define i128 @urem_i128_3(i128 %x) nounwind {
; X86-64-LABEL: urem_i128_3:
; X86-64: # %bb.0: # %entry
; X86-64-NEXT: addq %rsi, %rdi
; X86-64-NEXT: adcq $0, %rdi
; X86-64-NEXT: movabsq $-6148914691236517205, %rcx # imm = 0xAAAAAAAAAAAAAAAB
; X86-64-NEXT: movq %rdi, %rax
; X86-64-NEXT: mulq %rcx
; X86-64-NEXT: shrq %rdx
; X86-64-NEXT: leaq (%rdx,%rdx,2), %rax
; X86-64-NEXT: subq %rax, %rdi
; X86-64-NEXT: movq %rdi, %rax
; X86-64-NEXT: xorl %edx, %edx
; X86-64-NEXT: retq
;
; WIN64-LABEL: urem_i128_3:
; WIN64: # %bb.0: # %entry
; WIN64-NEXT: addq %rdx, %rcx
; WIN64-NEXT: adcq $0, %rcx
; WIN64-NEXT: movabsq $-6148914691236517205, %rdx # imm = 0xAAAAAAAAAAAAAAAB
; WIN64-NEXT: movq %rcx, %rax
; WIN64-NEXT: mulq %rdx
; WIN64-NEXT: shrq %rdx
; WIN64-NEXT: leaq (%rdx,%rdx,2), %rax
; WIN64-NEXT: subq %rax, %rcx
; WIN64-NEXT: movq %rcx, %rax
; WIN64-NEXT: xorl %edx, %edx
; WIN64-NEXT: retq
entry:
%rem = urem i128 %x, 3
ret i128 %rem
}
define i128 @urem_i128_5(i128 %x) nounwind {
; X86-64-LABEL: urem_i128_5:
; X86-64: # %bb.0: # %entry
; X86-64-NEXT: addq %rsi, %rdi
; X86-64-NEXT: adcq $0, %rdi
; X86-64-NEXT: movabsq $-3689348814741910323, %rcx # imm = 0xCCCCCCCCCCCCCCCD
; X86-64-NEXT: movq %rdi, %rax
; X86-64-NEXT: mulq %rcx
; X86-64-NEXT: shrq $2, %rdx
; X86-64-NEXT: leaq (%rdx,%rdx,4), %rax
; X86-64-NEXT: subq %rax, %rdi
; X86-64-NEXT: movq %rdi, %rax
; X86-64-NEXT: xorl %edx, %edx
; X86-64-NEXT: retq
;
; WIN64-LABEL: urem_i128_5:
; WIN64: # %bb.0: # %entry
; WIN64-NEXT: addq %rdx, %rcx
; WIN64-NEXT: adcq $0, %rcx
; WIN64-NEXT: movabsq $-3689348814741910323, %rdx # imm = 0xCCCCCCCCCCCCCCCD
; WIN64-NEXT: movq %rcx, %rax
; WIN64-NEXT: mulq %rdx
; WIN64-NEXT: shrq $2, %rdx
; WIN64-NEXT: leaq (%rdx,%rdx,4), %rax
; WIN64-NEXT: subq %rax, %rcx
; WIN64-NEXT: movq %rcx, %rax
; WIN64-NEXT: xorl %edx, %edx
; WIN64-NEXT: retq
entry:
%rem = urem i128 %x, 5
ret i128 %rem
}
define i128 @urem_i128_15(i128 %x) nounwind {
; X86-64-LABEL: urem_i128_15:
; X86-64: # %bb.0: # %entry
; X86-64-NEXT: addq %rsi, %rdi
; X86-64-NEXT: adcq $0, %rdi
; X86-64-NEXT: movabsq $-8608480567731124087, %rcx # imm = 0x8888888888888889
; X86-64-NEXT: movq %rdi, %rax
; X86-64-NEXT: mulq %rcx
; X86-64-NEXT: shrq $3, %rdx
; X86-64-NEXT: leaq (%rdx,%rdx,4), %rax
; X86-64-NEXT: leaq (%rax,%rax,2), %rax
; X86-64-NEXT: subq %rax, %rdi
; X86-64-NEXT: movq %rdi, %rax
; X86-64-NEXT: xorl %edx, %edx
; X86-64-NEXT: retq
;
; WIN64-LABEL: urem_i128_15:
; WIN64: # %bb.0: # %entry
; WIN64-NEXT: addq %rdx, %rcx
; WIN64-NEXT: adcq $0, %rcx
; WIN64-NEXT: movabsq $-8608480567731124087, %rdx # imm = 0x8888888888888889
; WIN64-NEXT: movq %rcx, %rax
; WIN64-NEXT: mulq %rdx
; WIN64-NEXT: shrq $3, %rdx
; WIN64-NEXT: leaq (%rdx,%rdx,4), %rax
; WIN64-NEXT: leaq (%rax,%rax,2), %rax
; WIN64-NEXT: subq %rax, %rcx
; WIN64-NEXT: movq %rcx, %rax
; WIN64-NEXT: xorl %edx, %edx
; WIN64-NEXT: retq
entry:
%rem = urem i128 %x, 15
ret i128 %rem
}
define i128 @urem_i128_17(i128 %x) nounwind {
; X86-64-LABEL: urem_i128_17:
; X86-64: # %bb.0: # %entry
; X86-64-NEXT: addq %rsi, %rdi
; X86-64-NEXT: adcq $0, %rdi
; X86-64-NEXT: movabsq $-1085102592571150095, %rcx # imm = 0xF0F0F0F0F0F0F0F1
; X86-64-NEXT: movq %rdi, %rax
; X86-64-NEXT: mulq %rcx
; X86-64-NEXT: movq %rdx, %rax
; X86-64-NEXT: andq $-16, %rax
; X86-64-NEXT: shrq $4, %rdx
; X86-64-NEXT: addq %rax, %rdx
; X86-64-NEXT: subq %rdx, %rdi
; X86-64-NEXT: movq %rdi, %rax
; X86-64-NEXT: xorl %edx, %edx
; X86-64-NEXT: retq
;
; WIN64-LABEL: urem_i128_17:
; WIN64: # %bb.0: # %entry
; WIN64-NEXT: addq %rdx, %rcx
; WIN64-NEXT: adcq $0, %rcx
; WIN64-NEXT: movabsq $-1085102592571150095, %rdx # imm = 0xF0F0F0F0F0F0F0F1
; WIN64-NEXT: movq %rcx, %rax
; WIN64-NEXT: mulq %rdx
; WIN64-NEXT: movq %rdx, %rax
; WIN64-NEXT: andq $-16, %rax
; WIN64-NEXT: shrq $4, %rdx
; WIN64-NEXT: addq %rax, %rdx
; WIN64-NEXT: subq %rdx, %rcx
; WIN64-NEXT: movq %rcx, %rax
; WIN64-NEXT: xorl %edx, %edx
; WIN64-NEXT: retq
entry:
%rem = urem i128 %x, 17
ret i128 %rem
}
define i128 @urem_i128_255(i128 %x) nounwind {
; X86-64-LABEL: urem_i128_255:
; X86-64: # %bb.0: # %entry
; X86-64-NEXT: movq %rdi, %rax
; X86-64-NEXT: addq %rsi, %rax
; X86-64-NEXT: adcq $0, %rax
; X86-64-NEXT: movabsq $-9187201950435737471, %rcx # imm = 0x8080808080808081
; X86-64-NEXT: mulq %rcx
; X86-64-NEXT: shrq $7, %rdx
; X86-64-NEXT: movq %rdx, %rax
; X86-64-NEXT: shlq $8, %rax
; X86-64-NEXT: subq %rax, %rdx
; X86-64-NEXT: addq %rsi, %rdi
; X86-64-NEXT: adcq %rdx, %rdi
; X86-64-NEXT: movq %rdi, %rax
; X86-64-NEXT: xorl %edx, %edx
; X86-64-NEXT: retq
;
; WIN64-LABEL: urem_i128_255:
; WIN64: # %bb.0: # %entry
; WIN64-NEXT: movq %rdx, %r8
; WIN64-NEXT: movq %rcx, %rax
; WIN64-NEXT: addq %rdx, %rax
; WIN64-NEXT: adcq $0, %rax
; WIN64-NEXT: movabsq $-9187201950435737471, %rdx # imm = 0x8080808080808081
; WIN64-NEXT: mulq %rdx
; WIN64-NEXT: shrq $7, %rdx
; WIN64-NEXT: movq %rdx, %rax
; WIN64-NEXT: shlq $8, %rax
; WIN64-NEXT: subq %rax, %rdx
; WIN64-NEXT: addq %rcx, %r8
; WIN64-NEXT: adcq %rdx, %r8
; WIN64-NEXT: movq %r8, %rax
; WIN64-NEXT: xorl %edx, %edx
; WIN64-NEXT: retq
entry:
%rem = urem i128 %x, 255
ret i128 %rem
}
define i128 @urem_i128_257(i128 %x) nounwind {
; X86-64-LABEL: urem_i128_257:
; X86-64: # %bb.0: # %entry
; X86-64-NEXT: addq %rsi, %rdi
; X86-64-NEXT: adcq $0, %rdi
; X86-64-NEXT: movabsq $-71777214294589695, %rcx # imm = 0xFF00FF00FF00FF01
; X86-64-NEXT: movq %rdi, %rax
; X86-64-NEXT: mulq %rcx
; X86-64-NEXT: movq %rdx, %rax
; X86-64-NEXT: andq $-256, %rax
; X86-64-NEXT: shrq $8, %rdx
; X86-64-NEXT: addq %rax, %rdx
; X86-64-NEXT: subq %rdx, %rdi
; X86-64-NEXT: movq %rdi, %rax
; X86-64-NEXT: xorl %edx, %edx
; X86-64-NEXT: retq
;
; WIN64-LABEL: urem_i128_257:
; WIN64: # %bb.0: # %entry
; WIN64-NEXT: addq %rdx, %rcx
; WIN64-NEXT: adcq $0, %rcx
; WIN64-NEXT: movabsq $-71777214294589695, %rdx # imm = 0xFF00FF00FF00FF01
; WIN64-NEXT: movq %rcx, %rax
; WIN64-NEXT: mulq %rdx
; WIN64-NEXT: movq %rdx, %rax
; WIN64-NEXT: andq $-256, %rax
; WIN64-NEXT: shrq $8, %rdx
; WIN64-NEXT: addq %rax, %rdx
; WIN64-NEXT: subq %rdx, %rcx
; WIN64-NEXT: movq %rcx, %rax
; WIN64-NEXT: xorl %edx, %edx
; WIN64-NEXT: retq
entry:
%rem = urem i128 %x, 257
ret i128 %rem
}
define i128 @urem_i128_65535(i128 %x) nounwind {
; X86-64-LABEL: urem_i128_65535:
; X86-64: # %bb.0: # %entry
; X86-64-NEXT: movq %rdi, %rax
; X86-64-NEXT: addq %rsi, %rax
; X86-64-NEXT: adcq $0, %rax
; X86-64-NEXT: movabsq $-9223231297218904063, %rcx # imm = 0x8000800080008001
; X86-64-NEXT: mulq %rcx
; X86-64-NEXT: shrq $15, %rdx
; X86-64-NEXT: movq %rdx, %rax
; X86-64-NEXT: shlq $16, %rax
; X86-64-NEXT: subq %rax, %rdx
; X86-64-NEXT: addq %rsi, %rdi
; X86-64-NEXT: adcq %rdx, %rdi
; X86-64-NEXT: movq %rdi, %rax
; X86-64-NEXT: xorl %edx, %edx
; X86-64-NEXT: retq
;
; WIN64-LABEL: urem_i128_65535:
; WIN64: # %bb.0: # %entry
; WIN64-NEXT: movq %rdx, %r8
; WIN64-NEXT: movq %rcx, %rax
; WIN64-NEXT: addq %rdx, %rax
; WIN64-NEXT: adcq $0, %rax
; WIN64-NEXT: movabsq $-9223231297218904063, %rdx # imm = 0x8000800080008001
; WIN64-NEXT: mulq %rdx
; WIN64-NEXT: shrq $15, %rdx
; WIN64-NEXT: movq %rdx, %rax
; WIN64-NEXT: shlq $16, %rax
; WIN64-NEXT: subq %rax, %rdx
; WIN64-NEXT: addq %rcx, %r8
; WIN64-NEXT: adcq %rdx, %r8
; WIN64-NEXT: movq %r8, %rax
; WIN64-NEXT: xorl %edx, %edx
; WIN64-NEXT: retq
entry:
%rem = urem i128 %x, 65535
ret i128 %rem
}
define i128 @urem_i128_65537(i128 %x) nounwind {
; X86-64-LABEL: urem_i128_65537:
; X86-64: # %bb.0: # %entry
; X86-64-NEXT: addq %rsi, %rdi
; X86-64-NEXT: adcq $0, %rdi
; X86-64-NEXT: movabsq $-281470681808895, %rcx # imm = 0xFFFF0000FFFF0001
; X86-64-NEXT: movq %rdi, %rax
; X86-64-NEXT: mulq %rcx
; X86-64-NEXT: movq %rdx, %rax
; X86-64-NEXT: andq $-65536, %rax # imm = 0xFFFF0000
; X86-64-NEXT: shrq $16, %rdx
; X86-64-NEXT: addq %rax, %rdx
; X86-64-NEXT: subq %rdx, %rdi
; X86-64-NEXT: movq %rdi, %rax
; X86-64-NEXT: xorl %edx, %edx
; X86-64-NEXT: retq
;
; WIN64-LABEL: urem_i128_65537:
; WIN64: # %bb.0: # %entry
; WIN64-NEXT: addq %rdx, %rcx
; WIN64-NEXT: adcq $0, %rcx
; WIN64-NEXT: movabsq $-281470681808895, %rdx # imm = 0xFFFF0000FFFF0001
; WIN64-NEXT: movq %rcx, %rax
; WIN64-NEXT: mulq %rdx
; WIN64-NEXT: movq %rdx, %rax
; WIN64-NEXT: andq $-65536, %rax # imm = 0xFFFF0000
; WIN64-NEXT: shrq $16, %rdx
; WIN64-NEXT: addq %rax, %rdx
; WIN64-NEXT: subq %rdx, %rcx
; WIN64-NEXT: movq %rcx, %rax
; WIN64-NEXT: xorl %edx, %edx
; WIN64-NEXT: retq
entry:
%rem = urem i128 %x, 65537
ret i128 %rem
}
define i128 @urem_i128_12(i128 %x) nounwind {
; X86-64-LABEL: urem_i128_12:
; X86-64: # %bb.0: # %entry
; X86-64-NEXT: movq %rsi, %rax
; X86-64-NEXT: shldq $62, %rdi, %rax
; X86-64-NEXT: shrq $2, %rsi
; X86-64-NEXT: addq %rax, %rsi
; X86-64-NEXT: adcq $0, %rsi
; X86-64-NEXT: movabsq $-6148914691236517205, %rcx # imm = 0xAAAAAAAAAAAAAAAB
; X86-64-NEXT: movq %rsi, %rax
; X86-64-NEXT: mulq %rcx
; X86-64-NEXT: shrq %rdx
; X86-64-NEXT: leaq (%rdx,%rdx,2), %rax
; X86-64-NEXT: subq %rax, %rsi
; X86-64-NEXT: andl $3, %edi
; X86-64-NEXT: leaq (%rdi,%rsi,4), %rax
; X86-64-NEXT: xorl %edx, %edx
; X86-64-NEXT: retq
;
; WIN64-LABEL: urem_i128_12:
; WIN64: # %bb.0: # %entry
; WIN64-NEXT: movq %rdx, %r8
; WIN64-NEXT: movq %rdx, %rax
; WIN64-NEXT: shldq $62, %rcx, %rax
; WIN64-NEXT: shrq $2, %r8
; WIN64-NEXT: addq %rax, %r8
; WIN64-NEXT: adcq $0, %r8
; WIN64-NEXT: movabsq $-6148914691236517205, %rdx # imm = 0xAAAAAAAAAAAAAAAB
; WIN64-NEXT: movq %r8, %rax
; WIN64-NEXT: mulq %rdx
; WIN64-NEXT: shrq %rdx
; WIN64-NEXT: leaq (%rdx,%rdx,2), %rax
; WIN64-NEXT: subq %rax, %r8
; WIN64-NEXT: andl $3, %ecx
; WIN64-NEXT: leaq (%rcx,%r8,4), %rax
; WIN64-NEXT: xorl %edx, %edx
; WIN64-NEXT: retq
entry:
%rem = urem i128 %x, 12
ret i128 %rem
}
define i128 @udiv_i128_3(i128 %x) nounwind {
; X86-64-LABEL: udiv_i128_3:
; X86-64: # %bb.0: # %entry
; X86-64-NEXT: movq %rdi, %rcx
; X86-64-NEXT: addq %rsi, %rcx
; X86-64-NEXT: adcq $0, %rcx
; X86-64-NEXT: movabsq $-6148914691236517205, %r8 # imm = 0xAAAAAAAAAAAAAAAB
; X86-64-NEXT: movq %rcx, %rax
; X86-64-NEXT: mulq %r8
; X86-64-NEXT: shrq %rdx
; X86-64-NEXT: leaq (%rdx,%rdx,2), %rax
; X86-64-NEXT: subq %rax, %rcx
; X86-64-NEXT: subq %rcx, %rdi
; X86-64-NEXT: sbbq $0, %rsi
; X86-64-NEXT: movabsq $-6148914691236517206, %rcx # imm = 0xAAAAAAAAAAAAAAAA
; X86-64-NEXT: imulq %rdi, %rcx
; X86-64-NEXT: movq %rdi, %rax
; X86-64-NEXT: mulq %r8
; X86-64-NEXT: addq %rcx, %rdx
; X86-64-NEXT: imulq %rsi, %r8
; X86-64-NEXT: addq %r8, %rdx
; X86-64-NEXT: retq
;
; WIN64-LABEL: udiv_i128_3:
; WIN64: # %bb.0: # %entry
; WIN64-NEXT: movq %rdx, %r8
; WIN64-NEXT: movq %rcx, %r9
; WIN64-NEXT: addq %rdx, %r9
; WIN64-NEXT: adcq $0, %r9
; WIN64-NEXT: movabsq $-6148914691236517205, %r10 # imm = 0xAAAAAAAAAAAAAAAB
; WIN64-NEXT: movq %r9, %rax
; WIN64-NEXT: mulq %r10
; WIN64-NEXT: shrq %rdx
; WIN64-NEXT: leaq (%rdx,%rdx,2), %rax
; WIN64-NEXT: subq %rax, %r9
; WIN64-NEXT: subq %r9, %rcx
; WIN64-NEXT: sbbq $0, %r8
; WIN64-NEXT: movabsq $-6148914691236517206, %r9 # imm = 0xAAAAAAAAAAAAAAAA
; WIN64-NEXT: imulq %rcx, %r9
; WIN64-NEXT: movq %rcx, %rax
; WIN64-NEXT: mulq %r10
; WIN64-NEXT: addq %r9, %rdx
; WIN64-NEXT: imulq %r10, %r8
; WIN64-NEXT: addq %r8, %rdx
; WIN64-NEXT: retq
entry:
%rem = udiv i128 %x, 3
ret i128 %rem
}
define i128 @udiv_i128_5(i128 %x) nounwind {
; X86-64-LABEL: udiv_i128_5:
; X86-64: # %bb.0: # %entry
; X86-64-NEXT: movq %rdi, %rcx
; X86-64-NEXT: addq %rsi, %rcx
; X86-64-NEXT: adcq $0, %rcx
; X86-64-NEXT: movabsq $-3689348814741910323, %r8 # imm = 0xCCCCCCCCCCCCCCCD
; X86-64-NEXT: movq %rcx, %rax
; X86-64-NEXT: mulq %r8
; X86-64-NEXT: shrq $2, %rdx
; X86-64-NEXT: leaq (%rdx,%rdx,4), %rax
; X86-64-NEXT: subq %rax, %rcx
; X86-64-NEXT: subq %rcx, %rdi
; X86-64-NEXT: sbbq $0, %rsi
; X86-64-NEXT: movabsq $-3689348814741910324, %rcx # imm = 0xCCCCCCCCCCCCCCCC
; X86-64-NEXT: imulq %rdi, %rcx
; X86-64-NEXT: movq %rdi, %rax
; X86-64-NEXT: mulq %r8
; X86-64-NEXT: addq %rcx, %rdx
; X86-64-NEXT: imulq %rsi, %r8
; X86-64-NEXT: addq %r8, %rdx
; X86-64-NEXT: retq
;
; WIN64-LABEL: udiv_i128_5:
; WIN64: # %bb.0: # %entry
; WIN64-NEXT: movq %rdx, %r8
; WIN64-NEXT: movq %rcx, %r9
; WIN64-NEXT: addq %rdx, %r9
; WIN64-NEXT: adcq $0, %r9
; WIN64-NEXT: movabsq $-3689348814741910323, %r10 # imm = 0xCCCCCCCCCCCCCCCD
; WIN64-NEXT: movq %r9, %rax
; WIN64-NEXT: mulq %r10
; WIN64-NEXT: shrq $2, %rdx
; WIN64-NEXT: leaq (%rdx,%rdx,4), %rax
; WIN64-NEXT: subq %rax, %r9
; WIN64-NEXT: subq %r9, %rcx
; WIN64-NEXT: sbbq $0, %r8
; WIN64-NEXT: movabsq $-3689348814741910324, %r9 # imm = 0xCCCCCCCCCCCCCCCC
; WIN64-NEXT: imulq %rcx, %r9
; WIN64-NEXT: movq %rcx, %rax
; WIN64-NEXT: mulq %r10
; WIN64-NEXT: addq %r9, %rdx
; WIN64-NEXT: imulq %r10, %r8
; WIN64-NEXT: addq %r8, %rdx
; WIN64-NEXT: retq
entry:
%rem = udiv i128 %x, 5
ret i128 %rem
}
define i128 @udiv_i128_15(i128 %x) nounwind {
; X86-64-LABEL: udiv_i128_15:
; X86-64: # %bb.0: # %entry
; X86-64-NEXT: movq %rdi, %rcx
; X86-64-NEXT: addq %rsi, %rcx
; X86-64-NEXT: adcq $0, %rcx
; X86-64-NEXT: movabsq $-8608480567731124087, %rdx # imm = 0x8888888888888889
; X86-64-NEXT: movq %rcx, %rax
; X86-64-NEXT: mulq %rdx
; X86-64-NEXT: shrq $3, %rdx
; X86-64-NEXT: leaq (%rdx,%rdx,4), %rax
; X86-64-NEXT: leaq (%rax,%rax,2), %rax
; X86-64-NEXT: subq %rax, %rcx
; X86-64-NEXT: subq %rcx, %rdi
; X86-64-NEXT: sbbq $0, %rsi
; X86-64-NEXT: movabsq $-1229782938247303442, %rcx # imm = 0xEEEEEEEEEEEEEEEE
; X86-64-NEXT: imulq %rdi, %rcx
; X86-64-NEXT: movabsq $-1229782938247303441, %r8 # imm = 0xEEEEEEEEEEEEEEEF
; X86-64-NEXT: movq %rdi, %rax
; X86-64-NEXT: mulq %r8
; X86-64-NEXT: addq %rcx, %rdx
; X86-64-NEXT: imulq %rsi, %r8
; X86-64-NEXT: addq %r8, %rdx
; X86-64-NEXT: retq
;
; WIN64-LABEL: udiv_i128_15:
; WIN64: # %bb.0: # %entry
; WIN64-NEXT: movq %rdx, %r8
; WIN64-NEXT: movq %rcx, %r9
; WIN64-NEXT: addq %rdx, %r9
; WIN64-NEXT: adcq $0, %r9
; WIN64-NEXT: movabsq $-8608480567731124087, %rdx # imm = 0x8888888888888889
; WIN64-NEXT: movq %r9, %rax
; WIN64-NEXT: mulq %rdx
; WIN64-NEXT: shrq $3, %rdx
; WIN64-NEXT: leaq (%rdx,%rdx,4), %rax
; WIN64-NEXT: leaq (%rax,%rax,2), %rax
; WIN64-NEXT: subq %rax, %r9
; WIN64-NEXT: subq %r9, %rcx
; WIN64-NEXT: sbbq $0, %r8
; WIN64-NEXT: movabsq $-1229782938247303442, %r9 # imm = 0xEEEEEEEEEEEEEEEE
; WIN64-NEXT: imulq %rcx, %r9
; WIN64-NEXT: movabsq $-1229782938247303441, %r10 # imm = 0xEEEEEEEEEEEEEEEF
; WIN64-NEXT: movq %rcx, %rax
; WIN64-NEXT: mulq %r10
; WIN64-NEXT: addq %r9, %rdx
; WIN64-NEXT: imulq %r10, %r8
; WIN64-NEXT: addq %r8, %rdx
; WIN64-NEXT: retq
entry:
%rem = udiv i128 %x, 15
ret i128 %rem
}
define i128 @udiv_i128_17(i128 %x) nounwind {
; X86-64-LABEL: udiv_i128_17:
; X86-64: # %bb.0: # %entry
; X86-64-NEXT: movq %rdi, %rcx
; X86-64-NEXT: addq %rsi, %rcx
; X86-64-NEXT: adcq $0, %rcx
; X86-64-NEXT: movabsq $-1085102592571150095, %r8 # imm = 0xF0F0F0F0F0F0F0F1
; X86-64-NEXT: movq %rcx, %rax
; X86-64-NEXT: mulq %r8
; X86-64-NEXT: movq %rdx, %rax
; X86-64-NEXT: andq $-16, %rax
; X86-64-NEXT: shrq $4, %rdx
; X86-64-NEXT: addq %rax, %rdx
; X86-64-NEXT: subq %rdx, %rcx
; X86-64-NEXT: subq %rcx, %rdi
; X86-64-NEXT: sbbq $0, %rsi
; X86-64-NEXT: movabsq $-1085102592571150096, %rcx # imm = 0xF0F0F0F0F0F0F0F0
; X86-64-NEXT: imulq %rdi, %rcx
; X86-64-NEXT: movq %rdi, %rax
; X86-64-NEXT: mulq %r8
; X86-64-NEXT: addq %rcx, %rdx
; X86-64-NEXT: imulq %rsi, %r8
; X86-64-NEXT: addq %r8, %rdx
; X86-64-NEXT: retq
;
; WIN64-LABEL: udiv_i128_17:
; WIN64: # %bb.0: # %entry
; WIN64-NEXT: movq %rdx, %r8
; WIN64-NEXT: movq %rcx, %r9
; WIN64-NEXT: addq %rdx, %r9
; WIN64-NEXT: adcq $0, %r9
; WIN64-NEXT: movabsq $-1085102592571150095, %r10 # imm = 0xF0F0F0F0F0F0F0F1
; WIN64-NEXT: movq %r9, %rax
; WIN64-NEXT: mulq %r10
; WIN64-NEXT: movq %rdx, %rax
; WIN64-NEXT: andq $-16, %rax
; WIN64-NEXT: shrq $4, %rdx
; WIN64-NEXT: addq %rax, %rdx
; WIN64-NEXT: subq %rdx, %r9
; WIN64-NEXT: subq %r9, %rcx
; WIN64-NEXT: sbbq $0, %r8
; WIN64-NEXT: movabsq $-1085102592571150096, %r9 # imm = 0xF0F0F0F0F0F0F0F0
; WIN64-NEXT: imulq %rcx, %r9
; WIN64-NEXT: movq %rcx, %rax
; WIN64-NEXT: mulq %r10
; WIN64-NEXT: addq %r9, %rdx
; WIN64-NEXT: imulq %r10, %r8
; WIN64-NEXT: addq %r8, %rdx
; WIN64-NEXT: retq
entry:
%rem = udiv i128 %x, 17
ret i128 %rem
}
define i128 @udiv_i128_255(i128 %x) nounwind {
; X86-64-LABEL: udiv_i128_255:
; X86-64: # %bb.0: # %entry
; X86-64-NEXT: movq %rdi, %rax
; X86-64-NEXT: addq %rsi, %rax
; X86-64-NEXT: adcq $0, %rax
; X86-64-NEXT: movabsq $-9187201950435737471, %rcx # imm = 0x8080808080808081
; X86-64-NEXT: mulq %rcx
; X86-64-NEXT: shrq $7, %rdx
; X86-64-NEXT: movq %rdx, %rax
; X86-64-NEXT: shlq $8, %rax
; X86-64-NEXT: subq %rax, %rdx
; X86-64-NEXT: movq %rdi, %rax
; X86-64-NEXT: addq %rsi, %rax
; X86-64-NEXT: adcq %rdx, %rax
; X86-64-NEXT: subq %rax, %rdi
; X86-64-NEXT: sbbq $0, %rsi
; X86-64-NEXT: movabsq $-72340172838076674, %rcx # imm = 0xFEFEFEFEFEFEFEFE
; X86-64-NEXT: imulq %rdi, %rcx
; X86-64-NEXT: movabsq $-72340172838076673, %r8 # imm = 0xFEFEFEFEFEFEFEFF
; X86-64-NEXT: movq %rdi, %rax
; X86-64-NEXT: mulq %r8
; X86-64-NEXT: addq %rcx, %rdx
; X86-64-NEXT: imulq %rsi, %r8
; X86-64-NEXT: addq %r8, %rdx
; X86-64-NEXT: retq
;
; WIN64-LABEL: udiv_i128_255:
; WIN64: # %bb.0: # %entry
; WIN64-NEXT: movq %rdx, %r8
; WIN64-NEXT: movq %rcx, %rax
; WIN64-NEXT: addq %rdx, %rax
; WIN64-NEXT: adcq $0, %rax
; WIN64-NEXT: movabsq $-9187201950435737471, %rdx # imm = 0x8080808080808081
; WIN64-NEXT: mulq %rdx
; WIN64-NEXT: shrq $7, %rdx
; WIN64-NEXT: movq %rdx, %rax
; WIN64-NEXT: shlq $8, %rax
; WIN64-NEXT: subq %rax, %rdx
; WIN64-NEXT: movq %rcx, %rax
; WIN64-NEXT: addq %r8, %rax
; WIN64-NEXT: adcq %rdx, %rax
; WIN64-NEXT: subq %rax, %rcx
; WIN64-NEXT: sbbq $0, %r8
; WIN64-NEXT: movabsq $-72340172838076674, %r9 # imm = 0xFEFEFEFEFEFEFEFE
; WIN64-NEXT: imulq %rcx, %r9
; WIN64-NEXT: movabsq $-72340172838076673, %r10 # imm = 0xFEFEFEFEFEFEFEFF
; WIN64-NEXT: movq %rcx, %rax
; WIN64-NEXT: mulq %r10
; WIN64-NEXT: addq %r9, %rdx
; WIN64-NEXT: imulq %r10, %r8
; WIN64-NEXT: addq %r8, %rdx
; WIN64-NEXT: retq
entry:
%rem = udiv i128 %x, 255
ret i128 %rem
}
define i128 @udiv_i128_257(i128 %x) nounwind {
; X86-64-LABEL: udiv_i128_257:
; X86-64: # %bb.0: # %entry
; X86-64-NEXT: movq %rdi, %rcx
; X86-64-NEXT: addq %rsi, %rcx
; X86-64-NEXT: adcq $0, %rcx
; X86-64-NEXT: movabsq $-71777214294589695, %r8 # imm = 0xFF00FF00FF00FF01
; X86-64-NEXT: movq %rcx, %rax
; X86-64-NEXT: mulq %r8
; X86-64-NEXT: movq %rdx, %rax
; X86-64-NEXT: andq $-256, %rax
; X86-64-NEXT: shrq $8, %rdx
; X86-64-NEXT: addq %rax, %rdx
; X86-64-NEXT: subq %rdx, %rcx
; X86-64-NEXT: subq %rcx, %rdi
; X86-64-NEXT: sbbq $0, %rsi
; X86-64-NEXT: movabsq $-71777214294589696, %rcx # imm = 0xFF00FF00FF00FF00
; X86-64-NEXT: imulq %rdi, %rcx
; X86-64-NEXT: movq %rdi, %rax
; X86-64-NEXT: mulq %r8
; X86-64-NEXT: addq %rcx, %rdx
; X86-64-NEXT: imulq %rsi, %r8
; X86-64-NEXT: addq %r8, %rdx
; X86-64-NEXT: retq
;
; WIN64-LABEL: udiv_i128_257:
; WIN64: # %bb.0: # %entry
; WIN64-NEXT: movq %rdx, %r8
; WIN64-NEXT: movq %rcx, %r9
; WIN64-NEXT: addq %rdx, %r9
; WIN64-NEXT: adcq $0, %r9
; WIN64-NEXT: movabsq $-71777214294589695, %r10 # imm = 0xFF00FF00FF00FF01
; WIN64-NEXT: movq %r9, %rax
; WIN64-NEXT: mulq %r10
; WIN64-NEXT: movq %rdx, %rax
; WIN64-NEXT: andq $-256, %rax
; WIN64-NEXT: shrq $8, %rdx
; WIN64-NEXT: addq %rax, %rdx
; WIN64-NEXT: subq %rdx, %r9
; WIN64-NEXT: subq %r9, %rcx
; WIN64-NEXT: sbbq $0, %r8
; WIN64-NEXT: movabsq $-71777214294589696, %r9 # imm = 0xFF00FF00FF00FF00
; WIN64-NEXT: imulq %rcx, %r9
; WIN64-NEXT: movq %rcx, %rax
; WIN64-NEXT: mulq %r10
; WIN64-NEXT: addq %r9, %rdx
; WIN64-NEXT: imulq %r10, %r8
; WIN64-NEXT: addq %r8, %rdx
; WIN64-NEXT: retq
entry:
%rem = udiv i128 %x, 257
ret i128 %rem
}
define i128 @udiv_i128_65535(i128 %x) nounwind {
; X86-64-LABEL: udiv_i128_65535:
; X86-64: # %bb.0: # %entry
; X86-64-NEXT: movq %rdi, %rax
; X86-64-NEXT: addq %rsi, %rax
; X86-64-NEXT: adcq $0, %rax
; X86-64-NEXT: movabsq $-9223231297218904063, %rcx # imm = 0x8000800080008001
; X86-64-NEXT: mulq %rcx
; X86-64-NEXT: shrq $15, %rdx
; X86-64-NEXT: movq %rdx, %rax
; X86-64-NEXT: shlq $16, %rax
; X86-64-NEXT: subq %rax, %rdx
; X86-64-NEXT: movq %rdi, %rax
; X86-64-NEXT: addq %rsi, %rax
; X86-64-NEXT: adcq %rdx, %rax
; X86-64-NEXT: subq %rax, %rdi
; X86-64-NEXT: sbbq $0, %rsi
; X86-64-NEXT: movabsq $-281479271743490, %rcx # imm = 0xFFFEFFFEFFFEFFFE
; X86-64-NEXT: imulq %rdi, %rcx
; X86-64-NEXT: movabsq $-281479271743489, %r8 # imm = 0xFFFEFFFEFFFEFFFF
; X86-64-NEXT: movq %rdi, %rax
; X86-64-NEXT: mulq %r8
; X86-64-NEXT: addq %rcx, %rdx
; X86-64-NEXT: imulq %rsi, %r8
; X86-64-NEXT: addq %r8, %rdx
; X86-64-NEXT: retq
;
; WIN64-LABEL: udiv_i128_65535:
; WIN64: # %bb.0: # %entry
; WIN64-NEXT: movq %rdx, %r8
; WIN64-NEXT: movq %rcx, %rax
; WIN64-NEXT: addq %rdx, %rax
; WIN64-NEXT: adcq $0, %rax
; WIN64-NEXT: movabsq $-9223231297218904063, %rdx # imm = 0x8000800080008001
; WIN64-NEXT: mulq %rdx
; WIN64-NEXT: shrq $15, %rdx
; WIN64-NEXT: movq %rdx, %rax
; WIN64-NEXT: shlq $16, %rax
; WIN64-NEXT: subq %rax, %rdx
; WIN64-NEXT: movq %rcx, %rax
; WIN64-NEXT: addq %r8, %rax
; WIN64-NEXT: adcq %rdx, %rax
; WIN64-NEXT: subq %rax, %rcx
; WIN64-NEXT: sbbq $0, %r8
; WIN64-NEXT: movabsq $-281479271743490, %r9 # imm = 0xFFFEFFFEFFFEFFFE
; WIN64-NEXT: imulq %rcx, %r9
; WIN64-NEXT: movabsq $-281479271743489, %r10 # imm = 0xFFFEFFFEFFFEFFFF
; WIN64-NEXT: movq %rcx, %rax
; WIN64-NEXT: mulq %r10
; WIN64-NEXT: addq %r9, %rdx
; WIN64-NEXT: imulq %r10, %r8
; WIN64-NEXT: addq %r8, %rdx
; WIN64-NEXT: retq
entry:
%rem = udiv i128 %x, 65535
ret i128 %rem
}
define i128 @udiv_i128_65537(i128 %x) nounwind {
; X86-64-LABEL: udiv_i128_65537:
; X86-64: # %bb.0: # %entry
; X86-64-NEXT: movq %rdi, %rcx
; X86-64-NEXT: addq %rsi, %rcx
; X86-64-NEXT: adcq $0, %rcx
; X86-64-NEXT: movabsq $-281470681808895, %r8 # imm = 0xFFFF0000FFFF0001
; X86-64-NEXT: movq %rcx, %rax
; X86-64-NEXT: mulq %r8
; X86-64-NEXT: movq %rdx, %rax
; X86-64-NEXT: andq $-65536, %rax # imm = 0xFFFF0000
; X86-64-NEXT: shrq $16, %rdx
; X86-64-NEXT: addq %rax, %rdx
; X86-64-NEXT: subq %rdx, %rcx
; X86-64-NEXT: subq %rcx, %rdi
; X86-64-NEXT: sbbq $0, %rsi
; X86-64-NEXT: movabsq $-281470681808896, %rcx # imm = 0xFFFF0000FFFF0000
; X86-64-NEXT: imulq %rdi, %rcx
; X86-64-NEXT: movq %rdi, %rax
; X86-64-NEXT: mulq %r8
; X86-64-NEXT: addq %rcx, %rdx
; X86-64-NEXT: imulq %rsi, %r8
; X86-64-NEXT: addq %r8, %rdx
; X86-64-NEXT: retq
;
; WIN64-LABEL: udiv_i128_65537:
; WIN64: # %bb.0: # %entry
; WIN64-NEXT: movq %rdx, %r8
; WIN64-NEXT: movq %rcx, %r9
; WIN64-NEXT: addq %rdx, %r9
; WIN64-NEXT: adcq $0, %r9
; WIN64-NEXT: movabsq $-281470681808895, %r10 # imm = 0xFFFF0000FFFF0001
; WIN64-NEXT: movq %r9, %rax
; WIN64-NEXT: mulq %r10
; WIN64-NEXT: movq %rdx, %rax
; WIN64-NEXT: andq $-65536, %rax # imm = 0xFFFF0000
; WIN64-NEXT: shrq $16, %rdx
; WIN64-NEXT: addq %rax, %rdx
; WIN64-NEXT: subq %rdx, %r9
; WIN64-NEXT: subq %r9, %rcx
; WIN64-NEXT: sbbq $0, %r8
; WIN64-NEXT: movabsq $-281470681808896, %r9 # imm = 0xFFFF0000FFFF0000
; WIN64-NEXT: imulq %rcx, %r9
; WIN64-NEXT: movq %rcx, %rax
; WIN64-NEXT: mulq %r10
; WIN64-NEXT: addq %r9, %rdx
; WIN64-NEXT: imulq %r10, %r8
; WIN64-NEXT: addq %r8, %rdx
; WIN64-NEXT: retq
entry:
%rem = udiv i128 %x, 65537
ret i128 %rem
}
define i128 @udiv_i128_12(i128 %x) nounwind {
; X86-64-LABEL: udiv_i128_12:
; X86-64: # %bb.0: # %entry
; X86-64-NEXT: shrdq $2, %rsi, %rdi
; X86-64-NEXT: shrq $2, %rsi
; X86-64-NEXT: movq %rdi, %rcx
; X86-64-NEXT: addq %rsi, %rcx
; X86-64-NEXT: adcq $0, %rcx
; X86-64-NEXT: movabsq $-6148914691236517205, %r8 # imm = 0xAAAAAAAAAAAAAAAB
; X86-64-NEXT: movq %rcx, %rax
; X86-64-NEXT: mulq %r8
; X86-64-NEXT: shrq %rdx
; X86-64-NEXT: leaq (%rdx,%rdx,2), %rax
; X86-64-NEXT: subq %rax, %rcx
; X86-64-NEXT: subq %rcx, %rdi
; X86-64-NEXT: sbbq $0, %rsi
; X86-64-NEXT: movabsq $-6148914691236517206, %rcx # imm = 0xAAAAAAAAAAAAAAAA
; X86-64-NEXT: imulq %rdi, %rcx
; X86-64-NEXT: movq %rdi, %rax
; X86-64-NEXT: mulq %r8
; X86-64-NEXT: addq %rcx, %rdx
; X86-64-NEXT: imulq %rsi, %r8
; X86-64-NEXT: addq %r8, %rdx
; X86-64-NEXT: retq
;
; WIN64-LABEL: udiv_i128_12:
; WIN64: # %bb.0: # %entry
; WIN64-NEXT: movq %rdx, %r8
; WIN64-NEXT: shrdq $2, %rdx, %rcx
; WIN64-NEXT: shrq $2, %r8
; WIN64-NEXT: movq %rcx, %r9
; WIN64-NEXT: addq %r8, %r9
; WIN64-NEXT: adcq $0, %r9
; WIN64-NEXT: movabsq $-6148914691236517205, %r10 # imm = 0xAAAAAAAAAAAAAAAB
; WIN64-NEXT: movq %r9, %rax
; WIN64-NEXT: mulq %r10
; WIN64-NEXT: shrq %rdx
; WIN64-NEXT: leaq (%rdx,%rdx,2), %rax
; WIN64-NEXT: subq %rax, %r9
; WIN64-NEXT: subq %r9, %rcx
; WIN64-NEXT: sbbq $0, %r8
; WIN64-NEXT: movabsq $-6148914691236517206, %r9 # imm = 0xAAAAAAAAAAAAAAAA
; WIN64-NEXT: imulq %rcx, %r9
; WIN64-NEXT: movq %rcx, %rax
; WIN64-NEXT: mulq %r10
; WIN64-NEXT: addq %r9, %rdx
; WIN64-NEXT: imulq %r10, %r8
; WIN64-NEXT: addq %r8, %rdx
; WIN64-NEXT: retq
entry:
%rem = udiv i128 %x, 12
ret i128 %rem
}
; Make sure we don't inline expand for minsize.
define i128 @urem_i128_3_minsize(i128 %x) nounwind minsize {
; X86-64-LABEL: urem_i128_3_minsize:
; X86-64: # %bb.0: # %entry
; X86-64-NEXT: pushq %rax
; X86-64-NEXT: pushq $3
; X86-64-NEXT: popq %rdx
; X86-64-NEXT: xorl %ecx, %ecx
; X86-64-NEXT: callq __umodti3@PLT
; X86-64-NEXT: popq %rcx
; X86-64-NEXT: retq
;
; WIN64-LABEL: urem_i128_3_minsize:
; WIN64: # %bb.0: # %entry
; WIN64-NEXT: subq $72, %rsp
; WIN64-NEXT: leaq {{[0-9]+}}(%rsp), %rax
; WIN64-NEXT: movq %rdx, 8(%rax)
; WIN64-NEXT: movq %rcx, (%rax)
; WIN64-NEXT: leaq {{[0-9]+}}(%rsp), %rdx
; WIN64-NEXT: movq $3, (%rdx)
; WIN64-NEXT: andq $0, 8(%rdx)
; WIN64-NEXT: movq %rax, %rcx
; WIN64-NEXT: callq __umodti3
; WIN64-NEXT: movq %xmm0, %rax
; WIN64-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,3,2,3]
; WIN64-NEXT: movq %xmm0, %rdx
; WIN64-NEXT: addq $72, %rsp
; WIN64-NEXT: retq
entry:
%rem = urem i128 %x, 3
ret i128 %rem
}
; Make sure we don't inline expand for optsize.
define i128 @urem_i128_3_optsize(i128 %x) nounwind optsize {
; X86-64-LABEL: urem_i128_3_optsize:
; X86-64: # %bb.0: # %entry
; X86-64-NEXT: pushq %rax
; X86-64-NEXT: movl $3, %edx
; X86-64-NEXT: xorl %ecx, %ecx
; X86-64-NEXT: callq __umodti3@PLT
; X86-64-NEXT: popq %rcx
; X86-64-NEXT: retq
;
; WIN64-LABEL: urem_i128_3_optsize:
; WIN64: # %bb.0: # %entry
; WIN64-NEXT: subq $72, %rsp
; WIN64-NEXT: leaq {{[0-9]+}}(%rsp), %rax
; WIN64-NEXT: movq %rdx, 8(%rax)
; WIN64-NEXT: movq %rcx, (%rax)
; WIN64-NEXT: leaq {{[0-9]+}}(%rsp), %rdx
; WIN64-NEXT: movq $3, (%rdx)
; WIN64-NEXT: movq $0, 8(%rdx)
; WIN64-NEXT: movq %rax, %rcx
; WIN64-NEXT: callq __umodti3
; WIN64-NEXT: movq %xmm0, %rax
; WIN64-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,3,2,3]
; WIN64-NEXT: movq %xmm0, %rdx
; WIN64-NEXT: addq $72, %rsp
; WIN64-NEXT: retq
entry:
%rem = urem i128 %x, 3
ret i128 %rem
}