blob: 60bb38f863e8e2c51e39be77f49837ef2001c868 [file] [log] [blame]
Shilei Tian35709082025-04-28 09:25:05 -04001; NOTE: Assertions have been autogenerated by utils/update_test_checks.py UTC_ARGS: --prefix-filecheck-ir-name _ --version 5
Matt Arsenault5651af82022-11-27 20:24:34 -05002; RUN: opt -S -mtriple=amdgcn-amd-amdhsa -passes=infer-address-spaces %s | FileCheck %s
Eric Christophercee313d2019-04-17 04:52:47 +00003
4; Trivial optimization of generic addressing
5
Matt Arsenaulta982f092022-11-27 20:11:40 -05006define float @load_global_from_flat(ptr %generic_scalar) #0 {
Shilei Tian35709082025-04-28 09:25:05 -04007; CHECK-LABEL: define float @load_global_from_flat(
8; CHECK-SAME: ptr [[GENERIC_SCALAR:%.*]]) #[[ATTR0:[0-9]+]] {
9; CHECK-NEXT: [[_TMP0:%.*]] = addrspacecast ptr [[GENERIC_SCALAR]] to ptr addrspace(1)
10; CHECK-NEXT: [[_TMP1:%.*]] = load float, ptr addrspace(1) [[_TMP0]], align 4
11; CHECK-NEXT: ret float [[_TMP1]]
12;
Matt Arsenaulta982f092022-11-27 20:11:40 -050013 %tmp0 = addrspacecast ptr %generic_scalar to ptr addrspace(1)
14 %tmp1 = load float, ptr addrspace(1) %tmp0
Eric Christophercee313d2019-04-17 04:52:47 +000015 ret float %tmp1
16}
17
Matt Arsenaulta982f092022-11-27 20:11:40 -050018define float @load_constant_from_flat(ptr %generic_scalar) #0 {
Shilei Tian35709082025-04-28 09:25:05 -040019; CHECK-LABEL: define float @load_constant_from_flat(
20; CHECK-SAME: ptr [[GENERIC_SCALAR:%.*]]) #[[ATTR0]] {
21; CHECK-NEXT: [[_TMP0:%.*]] = addrspacecast ptr [[GENERIC_SCALAR]] to ptr addrspace(4)
22; CHECK-NEXT: [[_TMP1:%.*]] = load float, ptr addrspace(4) [[_TMP0]], align 4
23; CHECK-NEXT: ret float [[_TMP1]]
24;
Matt Arsenaulta982f092022-11-27 20:11:40 -050025 %tmp0 = addrspacecast ptr %generic_scalar to ptr addrspace(4)
26 %tmp1 = load float, ptr addrspace(4) %tmp0
Eric Christophercee313d2019-04-17 04:52:47 +000027 ret float %tmp1
28}
29
Matt Arsenaulta982f092022-11-27 20:11:40 -050030define float @load_group_from_flat(ptr %generic_scalar) #0 {
Shilei Tian35709082025-04-28 09:25:05 -040031; CHECK-LABEL: define float @load_group_from_flat(
32; CHECK-SAME: ptr [[GENERIC_SCALAR:%.*]]) #[[ATTR0]] {
33; CHECK-NEXT: [[_TMP0:%.*]] = addrspacecast ptr [[GENERIC_SCALAR]] to ptr addrspace(3)
34; CHECK-NEXT: [[_TMP1:%.*]] = load float, ptr addrspace(3) [[_TMP0]], align 4
35; CHECK-NEXT: ret float [[_TMP1]]
36;
Matt Arsenaulta982f092022-11-27 20:11:40 -050037 %tmp0 = addrspacecast ptr %generic_scalar to ptr addrspace(3)
38 %tmp1 = load float, ptr addrspace(3) %tmp0
Eric Christophercee313d2019-04-17 04:52:47 +000039 ret float %tmp1
40}
41
Matt Arsenaulta982f092022-11-27 20:11:40 -050042define float @load_private_from_flat(ptr %generic_scalar) #0 {
Shilei Tian35709082025-04-28 09:25:05 -040043; CHECK-LABEL: define float @load_private_from_flat(
44; CHECK-SAME: ptr [[GENERIC_SCALAR:%.*]]) #[[ATTR0]] {
45; CHECK-NEXT: [[_TMP0:%.*]] = addrspacecast ptr [[GENERIC_SCALAR]] to ptr addrspace(5)
46; CHECK-NEXT: [[_TMP1:%.*]] = load float, ptr addrspace(5) [[_TMP0]], align 4
47; CHECK-NEXT: ret float [[_TMP1]]
48;
Matt Arsenaulta982f092022-11-27 20:11:40 -050049 %tmp0 = addrspacecast ptr %generic_scalar to ptr addrspace(5)
50 %tmp1 = load float, ptr addrspace(5) %tmp0
Eric Christophercee313d2019-04-17 04:52:47 +000051 ret float %tmp1
52}
53
Matt Arsenaulta982f092022-11-27 20:11:40 -050054define amdgpu_kernel void @store_global_from_flat(ptr %generic_scalar) #0 {
Shilei Tian35709082025-04-28 09:25:05 -040055; CHECK-LABEL: define amdgpu_kernel void @store_global_from_flat(
56; CHECK-SAME: ptr [[GENERIC_SCALAR:%.*]]) #[[ATTR0]] {
57; CHECK-NEXT: [[_TMP0:%.*]] = addrspacecast ptr [[GENERIC_SCALAR]] to ptr addrspace(1)
58; CHECK-NEXT: store float 0.000000e+00, ptr addrspace(1) [[_TMP0]], align 4
59; CHECK-NEXT: ret void
60;
Matt Arsenaulta982f092022-11-27 20:11:40 -050061 %tmp0 = addrspacecast ptr %generic_scalar to ptr addrspace(1)
62 store float 0.0, ptr addrspace(1) %tmp0
Eric Christophercee313d2019-04-17 04:52:47 +000063 ret void
64}
65
Matt Arsenaulta982f092022-11-27 20:11:40 -050066define amdgpu_kernel void @store_group_from_flat(ptr %generic_scalar) #0 {
Shilei Tian35709082025-04-28 09:25:05 -040067; CHECK-LABEL: define amdgpu_kernel void @store_group_from_flat(
68; CHECK-SAME: ptr [[GENERIC_SCALAR:%.*]]) #[[ATTR0]] {
Shilei Tian9bf6b2a2025-05-30 17:30:42 -040069; CHECK-NEXT: [[TMP1:%.*]] = addrspacecast ptr [[GENERIC_SCALAR]] to ptr addrspace(1)
70; CHECK-NEXT: [[TMP2:%.*]] = addrspacecast ptr addrspace(1) [[TMP1]] to ptr
71; CHECK-NEXT: [[_TMP0:%.*]] = addrspacecast ptr [[TMP2]] to ptr addrspace(3)
Shilei Tian35709082025-04-28 09:25:05 -040072; CHECK-NEXT: store float 0.000000e+00, ptr addrspace(3) [[_TMP0]], align 4
73; CHECK-NEXT: ret void
74;
Matt Arsenaulta982f092022-11-27 20:11:40 -050075 %tmp0 = addrspacecast ptr %generic_scalar to ptr addrspace(3)
76 store float 0.0, ptr addrspace(3) %tmp0
Eric Christophercee313d2019-04-17 04:52:47 +000077 ret void
78}
79
Matt Arsenaulta982f092022-11-27 20:11:40 -050080define amdgpu_kernel void @store_private_from_flat(ptr %generic_scalar) #0 {
Shilei Tian35709082025-04-28 09:25:05 -040081; CHECK-LABEL: define amdgpu_kernel void @store_private_from_flat(
82; CHECK-SAME: ptr [[GENERIC_SCALAR:%.*]]) #[[ATTR0]] {
Shilei Tian9bf6b2a2025-05-30 17:30:42 -040083; CHECK-NEXT: [[TMP1:%.*]] = addrspacecast ptr [[GENERIC_SCALAR]] to ptr addrspace(1)
84; CHECK-NEXT: [[TMP2:%.*]] = addrspacecast ptr addrspace(1) [[TMP1]] to ptr
85; CHECK-NEXT: [[_TMP0:%.*]] = addrspacecast ptr [[TMP2]] to ptr addrspace(5)
Shilei Tian35709082025-04-28 09:25:05 -040086; CHECK-NEXT: store float 0.000000e+00, ptr addrspace(5) [[_TMP0]], align 4
87; CHECK-NEXT: ret void
88;
Matt Arsenaulta982f092022-11-27 20:11:40 -050089 %tmp0 = addrspacecast ptr %generic_scalar to ptr addrspace(5)
90 store float 0.0, ptr addrspace(5) %tmp0
Eric Christophercee313d2019-04-17 04:52:47 +000091 ret void
92}
93
94; optimized to global load/store.
Matt Arsenaulta982f092022-11-27 20:11:40 -050095define amdgpu_kernel void @load_store_global(ptr addrspace(1) nocapture %input, ptr addrspace(1) nocapture %output) #0 {
Shilei Tian35709082025-04-28 09:25:05 -040096; CHECK-LABEL: define amdgpu_kernel void @load_store_global(
97; CHECK-SAME: ptr addrspace(1) captures(none) [[INPUT:%.*]], ptr addrspace(1) captures(none) [[OUTPUT:%.*]]) #[[ATTR0]] {
98; CHECK-NEXT: [[VAL:%.*]] = load i32, ptr addrspace(1) [[INPUT]], align 4
99; CHECK-NEXT: store i32 [[VAL]], ptr addrspace(1) [[OUTPUT]], align 4
100; CHECK-NEXT: ret void
101;
Matt Arsenaulta982f092022-11-27 20:11:40 -0500102 %tmp0 = addrspacecast ptr addrspace(1) %input to ptr
103 %tmp1 = addrspacecast ptr addrspace(1) %output to ptr
104 %val = load i32, ptr %tmp0, align 4
105 store i32 %val, ptr %tmp1, align 4
Eric Christophercee313d2019-04-17 04:52:47 +0000106 ret void
107}
108
109; Optimized to group load/store.
Matt Arsenaulta982f092022-11-27 20:11:40 -0500110define amdgpu_kernel void @load_store_group(ptr addrspace(3) nocapture %input, ptr addrspace(3) nocapture %output) #0 {
Shilei Tian35709082025-04-28 09:25:05 -0400111; CHECK-LABEL: define amdgpu_kernel void @load_store_group(
112; CHECK-SAME: ptr addrspace(3) captures(none) [[INPUT:%.*]], ptr addrspace(3) captures(none) [[OUTPUT:%.*]]) #[[ATTR0]] {
113; CHECK-NEXT: [[VAL:%.*]] = load i32, ptr addrspace(3) [[INPUT]], align 4
114; CHECK-NEXT: store i32 [[VAL]], ptr addrspace(3) [[OUTPUT]], align 4
115; CHECK-NEXT: ret void
116;
Matt Arsenaulta982f092022-11-27 20:11:40 -0500117 %tmp0 = addrspacecast ptr addrspace(3) %input to ptr
118 %tmp1 = addrspacecast ptr addrspace(3) %output to ptr
119 %val = load i32, ptr %tmp0, align 4
120 store i32 %val, ptr %tmp1, align 4
Eric Christophercee313d2019-04-17 04:52:47 +0000121 ret void
122}
123
124; Optimized to private load/store.
Matt Arsenaulta982f092022-11-27 20:11:40 -0500125define amdgpu_kernel void @load_store_private(ptr addrspace(5) nocapture %input, ptr addrspace(5) nocapture %output) #0 {
Shilei Tian35709082025-04-28 09:25:05 -0400126; CHECK-LABEL: define amdgpu_kernel void @load_store_private(
127; CHECK-SAME: ptr addrspace(5) captures(none) [[INPUT:%.*]], ptr addrspace(5) captures(none) [[OUTPUT:%.*]]) #[[ATTR0]] {
128; CHECK-NEXT: [[VAL:%.*]] = load i32, ptr addrspace(5) [[INPUT]], align 4
129; CHECK-NEXT: store i32 [[VAL]], ptr addrspace(5) [[OUTPUT]], align 4
130; CHECK-NEXT: ret void
131;
Matt Arsenaulta982f092022-11-27 20:11:40 -0500132 %tmp0 = addrspacecast ptr addrspace(5) %input to ptr
133 %tmp1 = addrspacecast ptr addrspace(5) %output to ptr
134 %val = load i32, ptr %tmp0, align 4
135 store i32 %val, ptr %tmp1, align 4
Eric Christophercee313d2019-04-17 04:52:47 +0000136 ret void
137}
138
139; No optimization. flat load/store.
Matt Arsenaulta982f092022-11-27 20:11:40 -0500140define amdgpu_kernel void @load_store_flat(ptr nocapture %input, ptr nocapture %output) #0 {
Shilei Tian35709082025-04-28 09:25:05 -0400141; CHECK-LABEL: define amdgpu_kernel void @load_store_flat(
142; CHECK-SAME: ptr captures(none) [[INPUT:%.*]], ptr captures(none) [[OUTPUT:%.*]]) #[[ATTR0]] {
Shilei Tian9bf6b2a2025-05-30 17:30:42 -0400143; CHECK-NEXT: [[TMP1:%.*]] = addrspacecast ptr [[INPUT]] to ptr addrspace(1)
144; CHECK-NEXT: [[TMP2:%.*]] = addrspacecast ptr [[OUTPUT]] to ptr addrspace(1)
145; CHECK-NEXT: [[VAL:%.*]] = load i32, ptr addrspace(1) [[TMP1]], align 4
146; CHECK-NEXT: store i32 [[VAL]], ptr addrspace(1) [[TMP2]], align 4
Shilei Tian35709082025-04-28 09:25:05 -0400147; CHECK-NEXT: ret void
148;
Matt Arsenaulta982f092022-11-27 20:11:40 -0500149 %val = load i32, ptr %input, align 4
150 store i32 %val, ptr %output, align 4
Eric Christophercee313d2019-04-17 04:52:47 +0000151 ret void
152}
153
Matt Arsenaulta982f092022-11-27 20:11:40 -0500154define amdgpu_kernel void @store_addrspacecast_ptr_value(ptr addrspace(1) nocapture %input, ptr addrspace(1) nocapture %output) #0 {
Shilei Tian35709082025-04-28 09:25:05 -0400155; CHECK-LABEL: define amdgpu_kernel void @store_addrspacecast_ptr_value(
156; CHECK-SAME: ptr addrspace(1) captures(none) [[INPUT:%.*]], ptr addrspace(1) captures(none) [[OUTPUT:%.*]]) #[[ATTR0]] {
157; CHECK-NEXT: [[CAST:%.*]] = addrspacecast ptr addrspace(1) [[INPUT]] to ptr
158; CHECK-NEXT: store ptr [[CAST]], ptr addrspace(1) [[OUTPUT]], align 4
159; CHECK-NEXT: ret void
160;
Matt Arsenaulta982f092022-11-27 20:11:40 -0500161 %cast = addrspacecast ptr addrspace(1) %input to ptr
162 store ptr %cast, ptr addrspace(1) %output, align 4
Eric Christophercee313d2019-04-17 04:52:47 +0000163 ret void
164}
165
Matt Arsenaulta982f092022-11-27 20:11:40 -0500166define i32 @atomicrmw_add_global_to_flat(ptr addrspace(1) %global.ptr, i32 %y) #0 {
Shilei Tian35709082025-04-28 09:25:05 -0400167; CHECK-LABEL: define i32 @atomicrmw_add_global_to_flat(
168; CHECK-SAME: ptr addrspace(1) [[GLOBAL_PTR:%.*]], i32 [[Y:%.*]]) #[[ATTR0]] {
169; CHECK-NEXT: [[RET:%.*]] = atomicrmw add ptr addrspace(1) [[GLOBAL_PTR]], i32 [[Y]] seq_cst, align 4
170; CHECK-NEXT: ret i32 [[RET]]
171;
Matt Arsenaulta982f092022-11-27 20:11:40 -0500172 %cast = addrspacecast ptr addrspace(1) %global.ptr to ptr
173 %ret = atomicrmw add ptr %cast, i32 %y seq_cst
Eric Christophercee313d2019-04-17 04:52:47 +0000174 ret i32 %ret
175}
176
Matt Arsenaulta982f092022-11-27 20:11:40 -0500177define i32 @atomicrmw_add_group_to_flat(ptr addrspace(3) %group.ptr, i32 %y) #0 {
Shilei Tian35709082025-04-28 09:25:05 -0400178; CHECK-LABEL: define i32 @atomicrmw_add_group_to_flat(
179; CHECK-SAME: ptr addrspace(3) [[GROUP_PTR:%.*]], i32 [[Y:%.*]]) #[[ATTR0]] {
180; CHECK-NEXT: [[RET:%.*]] = atomicrmw add ptr addrspace(3) [[GROUP_PTR]], i32 [[Y]] seq_cst, align 4
181; CHECK-NEXT: ret i32 [[RET]]
182;
Matt Arsenaulta982f092022-11-27 20:11:40 -0500183 %cast = addrspacecast ptr addrspace(3) %group.ptr to ptr
184 %ret = atomicrmw add ptr %cast, i32 %y seq_cst
Eric Christophercee313d2019-04-17 04:52:47 +0000185 ret i32 %ret
186}
187
Matt Arsenaulta982f092022-11-27 20:11:40 -0500188define { i32, i1 } @cmpxchg_global_to_flat(ptr addrspace(1) %global.ptr, i32 %cmp, i32 %val) #0 {
Shilei Tian35709082025-04-28 09:25:05 -0400189; CHECK-LABEL: define { i32, i1 } @cmpxchg_global_to_flat(
190; CHECK-SAME: ptr addrspace(1) [[GLOBAL_PTR:%.*]], i32 [[CMP:%.*]], i32 [[VAL:%.*]]) #[[ATTR0]] {
191; CHECK-NEXT: [[RET:%.*]] = cmpxchg ptr addrspace(1) [[GLOBAL_PTR]], i32 [[CMP]], i32 [[VAL]] seq_cst monotonic, align 4
192; CHECK-NEXT: ret { i32, i1 } [[RET]]
193;
Matt Arsenaulta982f092022-11-27 20:11:40 -0500194 %cast = addrspacecast ptr addrspace(1) %global.ptr to ptr
195 %ret = cmpxchg ptr %cast, i32 %cmp, i32 %val seq_cst monotonic
Eric Christophercee313d2019-04-17 04:52:47 +0000196 ret { i32, i1 } %ret
197}
198
Matt Arsenaulta982f092022-11-27 20:11:40 -0500199define { i32, i1 } @cmpxchg_group_to_flat(ptr addrspace(3) %group.ptr, i32 %cmp, i32 %val) #0 {
Shilei Tian35709082025-04-28 09:25:05 -0400200; CHECK-LABEL: define { i32, i1 } @cmpxchg_group_to_flat(
201; CHECK-SAME: ptr addrspace(3) [[GROUP_PTR:%.*]], i32 [[CMP:%.*]], i32 [[VAL:%.*]]) #[[ATTR0]] {
202; CHECK-NEXT: [[RET:%.*]] = cmpxchg ptr addrspace(3) [[GROUP_PTR]], i32 [[CMP]], i32 [[VAL]] seq_cst monotonic, align 4
203; CHECK-NEXT: ret { i32, i1 } [[RET]]
204;
Matt Arsenaulta982f092022-11-27 20:11:40 -0500205 %cast = addrspacecast ptr addrspace(3) %group.ptr to ptr
206 %ret = cmpxchg ptr %cast, i32 %cmp, i32 %val seq_cst monotonic
Eric Christophercee313d2019-04-17 04:52:47 +0000207 ret { i32, i1 } %ret
208}
209
210; Not pointer operand
Matt Arsenaulta982f092022-11-27 20:11:40 -0500211define { ptr, i1 } @cmpxchg_group_to_flat_wrong_operand(ptr addrspace(3) %cas.ptr, ptr addrspace(3) %cmp.ptr, ptr %val) #0 {
Shilei Tian35709082025-04-28 09:25:05 -0400212; CHECK-LABEL: define { ptr, i1 } @cmpxchg_group_to_flat_wrong_operand(
213; CHECK-SAME: ptr addrspace(3) [[CAS_PTR:%.*]], ptr addrspace(3) [[CMP_PTR:%.*]], ptr [[VAL:%.*]]) #[[ATTR0]] {
214; CHECK-NEXT: [[CAST_CMP:%.*]] = addrspacecast ptr addrspace(3) [[CMP_PTR]] to ptr
215; CHECK-NEXT: [[RET:%.*]] = cmpxchg ptr addrspace(3) [[CAS_PTR]], ptr [[CAST_CMP]], ptr [[VAL]] seq_cst monotonic, align 8
216; CHECK-NEXT: ret { ptr, i1 } [[RET]]
217;
Matt Arsenaulta982f092022-11-27 20:11:40 -0500218 %cast.cmp = addrspacecast ptr addrspace(3) %cmp.ptr to ptr
219 %ret = cmpxchg ptr addrspace(3) %cas.ptr, ptr %cast.cmp, ptr %val seq_cst monotonic
220 ret { ptr, i1 } %ret
Eric Christophercee313d2019-04-17 04:52:47 +0000221}
222
223; Null pointer in local addr space
Matt Arsenaulta982f092022-11-27 20:11:40 -0500224define void @local_nullptr(ptr addrspace(1) nocapture %results, ptr addrspace(3) %a) {
Shilei Tian35709082025-04-28 09:25:05 -0400225; CHECK-LABEL: define void @local_nullptr(
226; CHECK-SAME: ptr addrspace(1) captures(none) [[RESULTS:%.*]], ptr addrspace(3) [[A:%.*]]) {
227; CHECK-NEXT: [[ENTRY:.*:]]
228; CHECK-NEXT: [[TOBOOL:%.*]] = icmp ne ptr addrspace(3) [[A]], addrspacecast (ptr addrspace(5) null to ptr addrspace(3))
229; CHECK-NEXT: [[CONV:%.*]] = zext i1 [[TOBOOL]] to i32
230; CHECK-NEXT: store i32 [[CONV]], ptr addrspace(1) [[RESULTS]], align 4
231; CHECK-NEXT: ret void
232;
Eric Christophercee313d2019-04-17 04:52:47 +0000233entry:
Matt Arsenaulta982f092022-11-27 20:11:40 -0500234 %tobool = icmp ne ptr addrspace(3) %a, addrspacecast (ptr addrspace(5) null to ptr addrspace(3))
Eric Christophercee313d2019-04-17 04:52:47 +0000235 %conv = zext i1 %tobool to i32
Matt Arsenaulta982f092022-11-27 20:11:40 -0500236 store i32 %conv, ptr addrspace(1) %results, align 4
Eric Christophercee313d2019-04-17 04:52:47 +0000237 ret void
238}
239
Matt Arsenaultf433c3b2024-04-20 00:43:36 +0200240define i32 @atomicrmw_add_global_to_flat_preserve_amdgpu_md(ptr addrspace(1) %global.ptr, i32 %y) #0 {
Shilei Tian35709082025-04-28 09:25:05 -0400241; CHECK-LABEL: define i32 @atomicrmw_add_global_to_flat_preserve_amdgpu_md(
242; CHECK-SAME: ptr addrspace(1) [[GLOBAL_PTR:%.*]], i32 [[Y:%.*]]) #[[ATTR0]] {
243; CHECK-NEXT: [[RET:%.*]] = atomicrmw add ptr addrspace(1) [[GLOBAL_PTR]], i32 [[Y]] seq_cst, align 4, !amdgpu.no.fine.grained.memory [[META0:![0-9]+]], !amdgpu.no.remote.memory [[META0]]
244; CHECK-NEXT: ret i32 [[RET]]
245;
Matt Arsenaultf433c3b2024-04-20 00:43:36 +0200246 %cast = addrspacecast ptr addrspace(1) %global.ptr to ptr
Matt Arsenault9f9856d2024-04-22 11:40:35 +0200247 %ret = atomicrmw add ptr %cast, i32 %y seq_cst, align 4, !amdgpu.no.fine.grained.memory !0, !amdgpu.no.remote.memory !0
Matt Arsenaultf433c3b2024-04-20 00:43:36 +0200248 ret i32 %ret
249}
250
Matt Arsenault2ccbf922024-08-15 15:53:55 +0400251; Make sure there's no assert
Matt Arsenault2ccbf922024-08-15 15:53:55 +0400252define ptr @try_infer_getelementptr_constant_null() {
Shilei Tian35709082025-04-28 09:25:05 -0400253; CHECK-LABEL: define ptr @try_infer_getelementptr_constant_null() {
254; CHECK-NEXT: [[CE:%.*]] = getelementptr i8, ptr getelementptr inbounds (i8, ptr null, i64 8), i64 0
255; CHECK-NEXT: ret ptr [[CE]]
256;
Matt Arsenault2ccbf922024-08-15 15:53:55 +0400257 %ce = getelementptr i8, ptr getelementptr inbounds (i8, ptr null, i64 8), i64 0
258 ret ptr %ce
259}
260
Eric Christophercee313d2019-04-17 04:52:47 +0000261attributes #0 = { nounwind }
Matt Arsenaultf433c3b2024-04-20 00:43:36 +0200262
263!0 = !{}
Shilei Tian35709082025-04-28 09:25:05 -0400264;.
265; CHECK: [[META0]] = !{}
266;.