blob: 99a52acd3b2b1643e557f5964fe9843e295e77cd [file] [log] [blame]
Roman Lebedeve5df0a52021-10-29 21:09:22 +03001; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
2; RUN: opt -passes='default<O3>' -S < %s | FileCheck %s --check-prefixes=ALL,O3
3; RUN: opt -passes='default<O2>' -S < %s | FileCheck %s --check-prefixes=ALL,O2
4; RUN: opt -passes='default<O1>' -S < %s | FileCheck %s --check-prefixes=ALL,O1
5
6; All these tests should optimize to a single comparison
7; of the original argument with null. There should be no loops.
8
9%struct.node = type { %struct.node*, i32 }
10
11define dso_local zeroext i1 @is_not_empty_variant1(%struct.node* %p) {
12; ALL-LABEL: @is_not_empty_variant1(
13; ALL-NEXT: entry:
Roman Lebedev9c2469c2021-11-03 19:23:25 +030014; ALL-NEXT: [[TOBOOL_NOT3_I:%.*]] = icmp ne %struct.node* [[P:%.*]], null
15; ALL-NEXT: ret i1 [[TOBOOL_NOT3_I]]
Roman Lebedeve5df0a52021-10-29 21:09:22 +030016;
17entry:
18 %p.addr = alloca %struct.node*, align 8
19 store %struct.node* %p, %struct.node** %p.addr, align 8
20 %0 = load %struct.node*, %struct.node** %p.addr, align 8
21 %call = call i32 @count_nodes_variant1(%struct.node* %0)
22 %cmp = icmp sgt i32 %call, 0
23 ret i1 %cmp
24}
25
26define internal i32 @count_nodes_variant1(%struct.node* %p) {
27entry:
28 %p.addr = alloca %struct.node*, align 8
29 %size = alloca i32, align 4
30 store %struct.node* %p, %struct.node** %p.addr, align 8
31 %0 = bitcast i32* %size to i8*
32 store i32 0, i32* %size, align 4
33 br label %while.cond
34
35while.cond:
36 %1 = load %struct.node*, %struct.node** %p.addr, align 8
37 %tobool = icmp ne %struct.node* %1, null
38 br i1 %tobool, label %while.body, label %while.end
39
40while.body:
41 %2 = load %struct.node*, %struct.node** %p.addr, align 8
42 %next = getelementptr inbounds %struct.node, %struct.node* %2, i32 0, i32 0
43 %3 = load %struct.node*, %struct.node** %next, align 8
44 store %struct.node* %3, %struct.node** %p.addr, align 8
45 %4 = load i32, i32* %size, align 4
46 %inc = add nsw i32 %4, 1
47 store i32 %inc, i32* %size, align 4
48 br label %while.cond, !llvm.loop !0
49
50while.end:
51 %5 = load i32, i32* %size, align 4
52 %6 = bitcast i32* %size to i8*
53 ret i32 %5
54}
55
56define dso_local zeroext i1 @is_not_empty_variant2(%struct.node* %p) {
57; ALL-LABEL: @is_not_empty_variant2(
58; ALL-NEXT: entry:
59; ALL-NEXT: [[TOBOOL_NOT4_I:%.*]] = icmp ne %struct.node* [[P:%.*]], null
60; ALL-NEXT: ret i1 [[TOBOOL_NOT4_I]]
61;
62entry:
63 %p.addr = alloca %struct.node*, align 8
64 store %struct.node* %p, %struct.node** %p.addr, align 8
65 %0 = load %struct.node*, %struct.node** %p.addr, align 8
66 %call = call i64 @count_nodes_variant2(%struct.node* %0)
67 %cmp = icmp ugt i64 %call, 0
68 ret i1 %cmp
69}
70
71define internal i64 @count_nodes_variant2(%struct.node* %p) {
72entry:
73 %p.addr = alloca %struct.node*, align 8
74 %size = alloca i64, align 8
75 store %struct.node* %p, %struct.node** %p.addr, align 8
76 %0 = bitcast i64* %size to i8*
77 store i64 0, i64* %size, align 8
78 br label %while.cond
79
80while.cond:
81 %1 = load %struct.node*, %struct.node** %p.addr, align 8
82 %tobool = icmp ne %struct.node* %1, null
83 br i1 %tobool, label %while.body, label %while.end
84
85while.body:
86 %2 = load %struct.node*, %struct.node** %p.addr, align 8
87 %next = getelementptr inbounds %struct.node, %struct.node* %2, i32 0, i32 0
88 %3 = load %struct.node*, %struct.node** %next, align 8
89 store %struct.node* %3, %struct.node** %p.addr, align 8
90 %4 = load i64, i64* %size, align 8
91 %inc = add i64 %4, 1
92 store i64 %inc, i64* %size, align 8
93 %5 = load i64, i64* %size, align 8
94 %cmp = icmp ne i64 %5, 0
95 call void @_ZL6assumeb(i1 zeroext %cmp)
96 br label %while.cond, !llvm.loop !2
97
98while.end:
99 %6 = load i64, i64* %size, align 8
100 %7 = bitcast i64* %size to i8*
101 ret i64 %6
102}
103
104define dso_local zeroext i1 @is_not_empty_variant3(%struct.node* %p) {
105; O3-LABEL: @is_not_empty_variant3(
106; O3-NEXT: entry:
Roman Lebedev9c2469c2021-11-03 19:23:25 +0300107; O3-NEXT: [[TOBOOL_NOT4_I:%.*]] = icmp ne %struct.node* [[P:%.*]], null
108; O3-NEXT: ret i1 [[TOBOOL_NOT4_I]]
Roman Lebedeve5df0a52021-10-29 21:09:22 +0300109;
110; O2-LABEL: @is_not_empty_variant3(
111; O2-NEXT: entry:
Roman Lebedev9c2469c2021-11-03 19:23:25 +0300112; O2-NEXT: [[TOBOOL_NOT4_I:%.*]] = icmp ne %struct.node* [[P:%.*]], null
113; O2-NEXT: ret i1 [[TOBOOL_NOT4_I]]
Roman Lebedeve5df0a52021-10-29 21:09:22 +0300114;
115; O1-LABEL: @is_not_empty_variant3(
116; O1-NEXT: entry:
117; O1-NEXT: [[TOBOOL_NOT4_I:%.*]] = icmp eq %struct.node* [[P:%.*]], null
118; O1-NEXT: br i1 [[TOBOOL_NOT4_I]], label [[COUNT_NODES_VARIANT3_EXIT:%.*]], label [[WHILE_BODY_I:%.*]]
119; O1: while.body.i:
120; O1-NEXT: [[SIZE_06_I:%.*]] = phi i64 [ [[INC_I:%.*]], [[WHILE_BODY_I]] ], [ 0, [[ENTRY:%.*]] ]
121; O1-NEXT: [[P_ADDR_05_I:%.*]] = phi %struct.node* [ [[TMP0:%.*]], [[WHILE_BODY_I]] ], [ [[P]], [[ENTRY]] ]
122; O1-NEXT: [[CMP_I:%.*]] = icmp ne i64 [[SIZE_06_I]], -1
123; O1-NEXT: call void @llvm.assume(i1 [[CMP_I]]) #[[ATTR3:[0-9]+]]
124; O1-NEXT: [[NEXT_I:%.*]] = getelementptr inbounds [[STRUCT_NODE:%.*]], %struct.node* [[P_ADDR_05_I]], i64 0, i32 0
125; O1-NEXT: [[TMP0]] = load %struct.node*, %struct.node** [[NEXT_I]], align 8
126; O1-NEXT: [[INC_I]] = add i64 [[SIZE_06_I]], 1
127; O1-NEXT: [[TOBOOL_NOT_I:%.*]] = icmp eq %struct.node* [[TMP0]], null
Roman Lebedev9c2469c2021-11-03 19:23:25 +0300128; O1-NEXT: br i1 [[TOBOOL_NOT_I]], label [[COUNT_NODES_VARIANT3_EXIT_LOOPEXIT:%.*]], label [[WHILE_BODY_I]], !llvm.loop [[LOOP0:![0-9]+]]
Roman Lebedeve5df0a52021-10-29 21:09:22 +0300129; O1: count_nodes_variant3.exit.loopexit:
130; O1-NEXT: [[PHI_CMP:%.*]] = icmp ne i64 [[INC_I]], 0
131; O1-NEXT: br label [[COUNT_NODES_VARIANT3_EXIT]]
132; O1: count_nodes_variant3.exit:
133; O1-NEXT: [[SIZE_0_LCSSA_I:%.*]] = phi i1 [ false, [[ENTRY]] ], [ [[PHI_CMP]], [[COUNT_NODES_VARIANT3_EXIT_LOOPEXIT]] ]
134; O1-NEXT: ret i1 [[SIZE_0_LCSSA_I]]
135;
136entry:
137 %p.addr = alloca %struct.node*, align 8
138 store %struct.node* %p, %struct.node** %p.addr, align 8
139 %0 = load %struct.node*, %struct.node** %p.addr, align 8
140 %call = call i64 @count_nodes_variant3(%struct.node* %0)
141 %cmp = icmp ugt i64 %call, 0
142 ret i1 %cmp
143}
144
145define internal i64 @count_nodes_variant3(%struct.node* %p) {
146entry:
147 %p.addr = alloca %struct.node*, align 8
148 %size = alloca i64, align 8
149 store %struct.node* %p, %struct.node** %p.addr, align 8
150 %0 = bitcast i64* %size to i8*
151 store i64 0, i64* %size, align 8
152 br label %while.cond
153
154while.cond:
155 %1 = load %struct.node*, %struct.node** %p.addr, align 8
156 %tobool = icmp ne %struct.node* %1, null
157 br i1 %tobool, label %while.body, label %while.end
158
159while.body:
160 %2 = load i64, i64* %size, align 8
161 %cmp = icmp ne i64 %2, -1
162 call void @_ZL6assumeb(i1 zeroext %cmp)
163 %3 = load %struct.node*, %struct.node** %p.addr, align 8
164 %next = getelementptr inbounds %struct.node, %struct.node* %3, i32 0, i32 0
165 %4 = load %struct.node*, %struct.node** %next, align 8
166 store %struct.node* %4, %struct.node** %p.addr, align 8
167 %5 = load i64, i64* %size, align 8
168 %inc = add i64 %5, 1
169 store i64 %inc, i64* %size, align 8
170 br label %while.cond, !llvm.loop !3
171
172while.end:
173 %6 = load i64, i64* %size, align 8
174 %7 = bitcast i64* %size to i8*
175 ret i64 %6
176}
177
178define internal void @_ZL6assumeb(i1 zeroext %expression) {
179entry:
180 %expression.addr = alloca i8, align 1
181 %frombool = zext i1 %expression to i8
182 store i8 %frombool, i8* %expression.addr, align 1
183 %0 = load i8, i8* %expression.addr, align 1
184 %tobool = trunc i8 %0 to i1
185 call void @llvm.assume(i1 %tobool)
186 ret void
187}
188
189declare void @llvm.assume(i1 noundef)
190
191!0 = distinct !{!0, !1}
192!1 = !{!"llvm.loop.mustprogress"}
193!2 = distinct !{!2, !1}
194!3 = distinct !{!3, !1}