blob: 79723603f1c1abda16731c6f7bd3a0a9730cd395 [file] [log] [blame]
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc -mtriple=amdgcn-amd-mesa3d < %s | FileCheck %s
define i1 @test_urem_odd(i13 %X) nounwind {
; CHECK-LABEL: test_urem_odd:
; CHECK: ; %bb.0:
; CHECK-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; CHECK-NEXT: s_movk_i32 s4, 0x1fff
; CHECK-NEXT: s_movk_i32 s5, 0x667
; CHECK-NEXT: v_and_b32_e32 v0, s4, v0
; CHECK-NEXT: v_mul_u32_u24_e32 v0, 0xccd, v0
; CHECK-NEXT: v_and_b32_e32 v0, s4, v0
; CHECK-NEXT: v_cmp_gt_u32_e32 vcc, s5, v0
; CHECK-NEXT: v_cndmask_b32_e64 v0, 0, 1, vcc
; CHECK-NEXT: s_setpc_b64 s[30:31]
%urem = urem i13 %X, 5
%cmp = icmp eq i13 %urem, 0
ret i1 %cmp
}
define i1 @test_urem_even(i27 %X) nounwind {
; CHECK-LABEL: test_urem_even:
; CHECK: ; %bb.0:
; CHECK-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; CHECK-NEXT: s_mov_b32 s4, 0x6db6db7
; CHECK-NEXT: s_mov_b32 s5, 0x924925
; CHECK-NEXT: v_mul_lo_u32 v0, v0, s4
; CHECK-NEXT: v_lshlrev_b32_e32 v1, 26, v0
; CHECK-NEXT: v_bfe_u32 v0, v0, 1, 26
; CHECK-NEXT: v_or_b32_e32 v0, v0, v1
; CHECK-NEXT: v_and_b32_e32 v0, 0x7ffffff, v0
; CHECK-NEXT: v_cmp_gt_u32_e32 vcc, s5, v0
; CHECK-NEXT: v_cndmask_b32_e64 v0, 0, 1, vcc
; CHECK-NEXT: s_setpc_b64 s[30:31]
%urem = urem i27 %X, 14
%cmp = icmp eq i27 %urem, 0
ret i1 %cmp
}
define i1 @test_urem_odd_setne(i4 %X) nounwind {
; CHECK-LABEL: test_urem_odd_setne:
; CHECK: ; %bb.0:
; CHECK-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; CHECK-NEXT: v_and_b32_e32 v0, 15, v0
; CHECK-NEXT: v_mul_u32_u24_e32 v0, 13, v0
; CHECK-NEXT: v_and_b32_e32 v0, 15, v0
; CHECK-NEXT: v_cmp_lt_u32_e32 vcc, 3, v0
; CHECK-NEXT: v_cndmask_b32_e64 v0, 0, 1, vcc
; CHECK-NEXT: s_setpc_b64 s[30:31]
%urem = urem i4 %X, 5
%cmp = icmp ne i4 %urem, 0
ret i1 %cmp
}
define i1 @test_urem_negative_odd(i9 %X) nounwind {
; CHECK-LABEL: test_urem_negative_odd:
; CHECK: ; %bb.0:
; CHECK-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; CHECK-NEXT: s_movk_i32 s4, 0x1ff
; CHECK-NEXT: v_and_b32_e32 v0, s4, v0
; CHECK-NEXT: v_mul_u32_u24_e32 v0, 0x133, v0
; CHECK-NEXT: v_and_b32_e32 v0, s4, v0
; CHECK-NEXT: v_cmp_lt_u32_e32 vcc, 1, v0
; CHECK-NEXT: v_cndmask_b32_e64 v0, 0, 1, vcc
; CHECK-NEXT: s_setpc_b64 s[30:31]
%urem = urem i9 %X, -5
%cmp = icmp ne i9 %urem, 0
ret i1 %cmp
}
define <3 x i1> @test_urem_vec(<3 x i11> %X) nounwind {
; CHECK-LABEL: test_urem_vec:
; CHECK: ; %bb.0:
; CHECK-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; CHECK-NEXT: s_movk_i32 s4, 0x7ff
; CHECK-NEXT: s_mov_b32 s5, 0x8311eb33
; CHECK-NEXT: s_mov_b32 s6, 0x20140c
; CHECK-NEXT: s_mov_b32 s7, 0xb6db6db7
; CHECK-NEXT: s_mov_b32 s8, 0x49249249
; CHECK-NEXT: s_mov_b32 s9, 0x24924924
; CHECK-NEXT: s_mov_b32 s10, 0xaaaaaaab
; CHECK-NEXT: s_mov_b32 s11, 0x2aaaaaaa
; CHECK-NEXT: v_and_b32_e32 v0, s4, v0
; CHECK-NEXT: v_and_b32_e32 v1, s4, v1
; CHECK-NEXT: v_and_b32_e32 v2, s4, v2
; CHECK-NEXT: v_mul_lo_u32 v2, v2, s5
; CHECK-NEXT: v_mul_lo_u32 v1, v1, s7
; CHECK-NEXT: v_mul_lo_u32 v0, v0, s10
; CHECK-NEXT: v_add_i32_e32 v2, vcc, 0xf9dc299a, v2
; CHECK-NEXT: v_add_i32_e32 v1, vcc, s8, v1
; CHECK-NEXT: v_alignbit_b32 v0, v0, v0, 1
; CHECK-NEXT: v_cmp_lt_u32_e32 vcc, s11, v0
; CHECK-NEXT: v_cndmask_b32_e64 v0, 0, 1, vcc
; CHECK-NEXT: v_cmp_lt_u32_e32 vcc, s9, v1
; CHECK-NEXT: v_cndmask_b32_e64 v1, 0, 1, vcc
; CHECK-NEXT: v_cmp_lt_u32_e32 vcc, s6, v2
; CHECK-NEXT: v_cndmask_b32_e64 v2, 0, 1, vcc
; CHECK-NEXT: s_setpc_b64 s[30:31]
%urem = urem <3 x i11> %X, <i11 6, i11 7, i11 -5>
%cmp = icmp ne <3 x i11> %urem, <i11 0, i11 1, i11 2>
ret <3 x i1> %cmp
}