jeff | e6c29c0 | 2022-09-26 15:09:24 +0000 | [diff] [blame] | 1 | ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py |
Fangrui Song | 9e9907f | 2024-01-16 21:54:58 -0800 | [diff] [blame] | 2 | ; RUN: llc -mtriple=amdgcn -mcpu=gfx900 -verify-machineinstrs < %s | FileCheck -enable-var-scope --check-prefix=GCN %s |
Matt Arsenault | b143d9a | 2018-05-09 20:52:43 +0000 | [diff] [blame] | 3 | |
| 4 | ; Test combine to reduce the width of a 64-bit shift to 32-bit if |
| 5 | ; truncated to 16-bit. |
Matt Arsenault | b143d9a | 2018-05-09 20:52:43 +0000 | [diff] [blame] | 6 | define i16 @trunc_srl_i64_16_to_i16(i64 %x) { |
jeff | e6c29c0 | 2022-09-26 15:09:24 +0000 | [diff] [blame] | 7 | ; GCN-LABEL: trunc_srl_i64_16_to_i16: |
| 8 | ; GCN: ; %bb.0: |
| 9 | ; GCN-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0) |
| 10 | ; GCN-NEXT: v_lshrrev_b32_e32 v0, 16, v0 |
| 11 | ; GCN-NEXT: s_setpc_b64 s[30:31] |
Matt Arsenault | b143d9a | 2018-05-09 20:52:43 +0000 | [diff] [blame] | 12 | %shift = lshr i64 %x, 16 |
| 13 | %trunc = trunc i64 %shift to i16 |
| 14 | ret i16 %trunc |
| 15 | } |
| 16 | |
Matt Arsenault | b143d9a | 2018-05-09 20:52:43 +0000 | [diff] [blame] | 17 | define i16 @trunc_srl_i64_17_to_i16(i64 %x) { |
jeff | e6c29c0 | 2022-09-26 15:09:24 +0000 | [diff] [blame] | 18 | ; GCN-LABEL: trunc_srl_i64_17_to_i16: |
| 19 | ; GCN: ; %bb.0: |
| 20 | ; GCN-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0) |
| 21 | ; GCN-NEXT: v_lshrrev_b64 v[0:1], 17, v[0:1] |
| 22 | ; GCN-NEXT: s_setpc_b64 s[30:31] |
Matt Arsenault | b143d9a | 2018-05-09 20:52:43 +0000 | [diff] [blame] | 23 | %shift = lshr i64 %x, 17 |
| 24 | %trunc = trunc i64 %shift to i16 |
| 25 | ret i16 %trunc |
| 26 | } |
| 27 | |
Matt Arsenault | b143d9a | 2018-05-09 20:52:43 +0000 | [diff] [blame] | 28 | define i15 @trunc_srl_i55_16_to_i15(i55 %x) { |
jeff | e6c29c0 | 2022-09-26 15:09:24 +0000 | [diff] [blame] | 29 | ; GCN-LABEL: trunc_srl_i55_16_to_i15: |
| 30 | ; GCN: ; %bb.0: |
| 31 | ; GCN-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0) |
| 32 | ; GCN-NEXT: v_lshrrev_b32_e32 v0, 15, v0 |
| 33 | ; GCN-NEXT: v_add_u16_e32 v0, 4, v0 |
| 34 | ; GCN-NEXT: s_setpc_b64 s[30:31] |
Matt Arsenault | b143d9a | 2018-05-09 20:52:43 +0000 | [diff] [blame] | 35 | %shift = lshr i55 %x, 15 |
| 36 | %trunc = trunc i55 %shift to i15 |
| 37 | %add = add i15 %trunc, 4 |
| 38 | ret i15 %add |
| 39 | } |
| 40 | |
Matt Arsenault | b143d9a | 2018-05-09 20:52:43 +0000 | [diff] [blame] | 41 | define i16 @trunc_sra_i64_16_to_i16(i64 %x) { |
jeff | e6c29c0 | 2022-09-26 15:09:24 +0000 | [diff] [blame] | 42 | ; GCN-LABEL: trunc_sra_i64_16_to_i16: |
| 43 | ; GCN: ; %bb.0: |
| 44 | ; GCN-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0) |
| 45 | ; GCN-NEXT: v_lshrrev_b32_e32 v0, 16, v0 |
| 46 | ; GCN-NEXT: s_setpc_b64 s[30:31] |
Matt Arsenault | b143d9a | 2018-05-09 20:52:43 +0000 | [diff] [blame] | 47 | %shift = ashr i64 %x, 16 |
| 48 | %trunc = trunc i64 %shift to i16 |
| 49 | ret i16 %trunc |
| 50 | } |
| 51 | |
Matt Arsenault | b143d9a | 2018-05-09 20:52:43 +0000 | [diff] [blame] | 52 | define i16 @trunc_sra_i64_17_to_i16(i64 %x) { |
jeff | e6c29c0 | 2022-09-26 15:09:24 +0000 | [diff] [blame] | 53 | ; GCN-LABEL: trunc_sra_i64_17_to_i16: |
| 54 | ; GCN: ; %bb.0: |
| 55 | ; GCN-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0) |
| 56 | ; GCN-NEXT: v_lshrrev_b64 v[0:1], 17, v[0:1] |
| 57 | ; GCN-NEXT: s_setpc_b64 s[30:31] |
Matt Arsenault | b143d9a | 2018-05-09 20:52:43 +0000 | [diff] [blame] | 58 | %shift = ashr i64 %x, 17 |
| 59 | %trunc = trunc i64 %shift to i16 |
| 60 | ret i16 %trunc |
| 61 | } |
| 62 | |
Matt Arsenault | b143d9a | 2018-05-09 20:52:43 +0000 | [diff] [blame] | 63 | define i16 @trunc_shl_i64_16_to_i16(i64 %x) { |
jeff | e6c29c0 | 2022-09-26 15:09:24 +0000 | [diff] [blame] | 64 | ; GCN-LABEL: trunc_shl_i64_16_to_i16: |
| 65 | ; GCN: ; %bb.0: |
| 66 | ; GCN-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0) |
| 67 | ; GCN-NEXT: v_mov_b32_e32 v0, 0 |
| 68 | ; GCN-NEXT: s_setpc_b64 s[30:31] |
Matt Arsenault | b143d9a | 2018-05-09 20:52:43 +0000 | [diff] [blame] | 69 | %shift = shl i64 %x, 16 |
| 70 | %trunc = trunc i64 %shift to i16 |
| 71 | ret i16 %trunc |
| 72 | } |
| 73 | |
Matt Arsenault | b143d9a | 2018-05-09 20:52:43 +0000 | [diff] [blame] | 74 | define i16 @trunc_shl_i64_17_to_i16(i64 %x) { |
jeff | e6c29c0 | 2022-09-26 15:09:24 +0000 | [diff] [blame] | 75 | ; GCN-LABEL: trunc_shl_i64_17_to_i16: |
| 76 | ; GCN: ; %bb.0: |
| 77 | ; GCN-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0) |
| 78 | ; GCN-NEXT: v_mov_b32_e32 v0, 0 |
| 79 | ; GCN-NEXT: s_setpc_b64 s[30:31] |
Matt Arsenault | b143d9a | 2018-05-09 20:52:43 +0000 | [diff] [blame] | 80 | %shift = shl i64 %x, 17 |
| 81 | %trunc = trunc i64 %shift to i16 |
| 82 | ret i16 %trunc |
| 83 | } |
| 84 | |
Matt Arsenault | b143d9a | 2018-05-09 20:52:43 +0000 | [diff] [blame] | 85 | define <2 x i16> @trunc_srl_v2i64_16_to_v2i16(<2 x i64> %x) { |
jeff | e6c29c0 | 2022-09-26 15:09:24 +0000 | [diff] [blame] | 86 | ; GCN-LABEL: trunc_srl_v2i64_16_to_v2i16: |
| 87 | ; GCN: ; %bb.0: |
| 88 | ; GCN-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0) |
jeff | f4e6149 | 2022-09-21 18:09:30 +0000 | [diff] [blame] | 89 | ; GCN-NEXT: s_mov_b32 s4, 0x7060302 |
| 90 | ; GCN-NEXT: v_perm_b32 v0, v2, v0, s4 |
jeff | e6c29c0 | 2022-09-26 15:09:24 +0000 | [diff] [blame] | 91 | ; GCN-NEXT: s_setpc_b64 s[30:31] |
Matt Arsenault | b143d9a | 2018-05-09 20:52:43 +0000 | [diff] [blame] | 92 | %shift = lshr <2 x i64> %x, <i64 16, i64 16> |
| 93 | %trunc = trunc <2 x i64> %shift to <2 x i16> |
| 94 | ret <2 x i16> %trunc |
| 95 | } |
| 96 | |
Matt Arsenault | b143d9a | 2018-05-09 20:52:43 +0000 | [diff] [blame] | 97 | define amdgpu_kernel void @s_trunc_srl_i64_16_to_i16(i64 %x) { |
jeff | e6c29c0 | 2022-09-26 15:09:24 +0000 | [diff] [blame] | 98 | ; GCN-LABEL: s_trunc_srl_i64_16_to_i16: |
| 99 | ; GCN: ; %bb.0: |
Shilei Tian | 6548b63 | 2024-11-08 20:21:16 -0500 | [diff] [blame] | 100 | ; GCN-NEXT: s_load_dword s0, s[4:5], 0x24 |
jeff | e6c29c0 | 2022-09-26 15:09:24 +0000 | [diff] [blame] | 101 | ; GCN-NEXT: s_waitcnt lgkmcnt(0) |
| 102 | ; GCN-NEXT: s_lshr_b32 s0, s0, 16 |
| 103 | ; GCN-NEXT: s_or_b32 s0, s0, 4 |
| 104 | ; GCN-NEXT: v_mov_b32_e32 v0, s0 |
| 105 | ; GCN-NEXT: global_store_short v[0:1], v0, off |
| 106 | ; GCN-NEXT: s_endpgm |
Matt Arsenault | b143d9a | 2018-05-09 20:52:43 +0000 | [diff] [blame] | 107 | %shift = lshr i64 %x, 16 |
| 108 | %trunc = trunc i64 %shift to i16 |
| 109 | %add = or i16 %trunc, 4 |
Nikita Popov | bdf2fbb | 2022-12-19 12:39:01 +0100 | [diff] [blame] | 110 | store i16 %add, ptr addrspace(1) undef |
Matt Arsenault | b143d9a | 2018-05-09 20:52:43 +0000 | [diff] [blame] | 111 | ret void |
| 112 | } |
Matt Arsenault | 74fd760 | 2018-05-09 20:52:54 +0000 | [diff] [blame] | 113 | |
Matt Arsenault | 74fd760 | 2018-05-09 20:52:54 +0000 | [diff] [blame] | 114 | define i16 @trunc_srl_i64_var_mask15_to_i16(i64 %x, i64 %amt) { |
jeff | e6c29c0 | 2022-09-26 15:09:24 +0000 | [diff] [blame] | 115 | ; GCN-LABEL: trunc_srl_i64_var_mask15_to_i16: |
| 116 | ; GCN: ; %bb.0: |
| 117 | ; GCN-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0) |
| 118 | ; GCN-NEXT: v_and_b32_e32 v1, 15, v2 |
| 119 | ; GCN-NEXT: v_lshrrev_b32_e32 v0, v1, v0 |
| 120 | ; GCN-NEXT: s_setpc_b64 s[30:31] |
Matt Arsenault | 74fd760 | 2018-05-09 20:52:54 +0000 | [diff] [blame] | 121 | %amt.masked = and i64 %amt, 15 |
| 122 | %shift = lshr i64 %x, %amt.masked |
| 123 | %trunc = trunc i64 %shift to i16 |
| 124 | ret i16 %trunc |
| 125 | } |
| 126 | |
Matt Arsenault | 74fd760 | 2018-05-09 20:52:54 +0000 | [diff] [blame] | 127 | define i16 @trunc_srl_i64_var_mask16_to_i16(i64 %x, i64 %amt) { |
jeff | e6c29c0 | 2022-09-26 15:09:24 +0000 | [diff] [blame] | 128 | ; GCN-LABEL: trunc_srl_i64_var_mask16_to_i16: |
| 129 | ; GCN: ; %bb.0: |
| 130 | ; GCN-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0) |
Pierre van Houtryve | 824dd81 | 2022-10-17 07:56:20 +0000 | [diff] [blame] | 131 | ; GCN-NEXT: v_and_b32_e32 v1, 16, v2 |
| 132 | ; GCN-NEXT: v_lshrrev_b32_e32 v0, v1, v0 |
jeff | e6c29c0 | 2022-09-26 15:09:24 +0000 | [diff] [blame] | 133 | ; GCN-NEXT: s_setpc_b64 s[30:31] |
Matt Arsenault | 74fd760 | 2018-05-09 20:52:54 +0000 | [diff] [blame] | 134 | %amt.masked = and i64 %amt, 16 |
| 135 | %shift = lshr i64 %x, %amt.masked |
| 136 | %trunc = trunc i64 %shift to i16 |
| 137 | ret i16 %trunc |
| 138 | } |
| 139 | |
Matt Arsenault | 74fd760 | 2018-05-09 20:52:54 +0000 | [diff] [blame] | 140 | define i16 @trunc_srl_i64_var_mask31_to_i16(i64 %x, i64 %amt) { |
jeff | e6c29c0 | 2022-09-26 15:09:24 +0000 | [diff] [blame] | 141 | ; GCN-LABEL: trunc_srl_i64_var_mask31_to_i16: |
| 142 | ; GCN: ; %bb.0: |
| 143 | ; GCN-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0) |
| 144 | ; GCN-NEXT: v_and_b32_e32 v2, 31, v2 |
| 145 | ; GCN-NEXT: v_lshrrev_b64 v[0:1], v2, v[0:1] |
| 146 | ; GCN-NEXT: s_setpc_b64 s[30:31] |
Matt Arsenault | 74fd760 | 2018-05-09 20:52:54 +0000 | [diff] [blame] | 147 | %amt.masked = and i64 %amt, 31 |
| 148 | %shift = lshr i64 %x, %amt.masked |
| 149 | %trunc = trunc i64 %shift to i16 |
| 150 | ret i16 %trunc |
| 151 | } |
Pierre van Houtryve | 824dd81 | 2022-10-17 07:56:20 +0000 | [diff] [blame] | 152 | |
| 153 | define i32 @trunc_srl_i64_25_to_i26(i64 %x) { |
| 154 | ; GCN-LABEL: trunc_srl_i64_25_to_i26: |
| 155 | ; GCN: ; %bb.0: |
| 156 | ; GCN-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0) |
| 157 | ; GCN-NEXT: v_and_b32_e32 v0, 0xa000000, v0 |
Simon Pilgrim | e9caa37 | 2023-07-17 15:34:52 +0100 | [diff] [blame] | 158 | ; GCN-NEXT: v_lshrrev_b32_e32 v0, 25, v0 |
Pierre van Houtryve | 824dd81 | 2022-10-17 07:56:20 +0000 | [diff] [blame] | 159 | ; GCN-NEXT: v_add_u32_e32 v0, 55, v0 |
| 160 | ; GCN-NEXT: s_setpc_b64 s[30:31] |
| 161 | %value.knownbits2 = and i64 %x, 167772160 ; 0xA000000 |
| 162 | %shift = lshr i64 %value.knownbits2, 25 |
| 163 | %trunc = trunc i64 %shift to i26 |
| 164 | %add = add i26 %trunc, 55 |
| 165 | %ext = zext i26 %add to i32 |
| 166 | ret i32 %ext |
| 167 | } |