blob: 741cce19d72c38277eedcf73e012e34d38f9b654 [file] [log] [blame]
Ulrich Weigand9e3577f2013-05-06 16:17:29 +00001; Test 64-bit additions of constants to memory.
2;
3; RUN: llc < %s -mtriple=s390x-linux-gnu | FileCheck %s
4
5; Check additions of 1.
6define void @f1(i64 *%ptr) {
Stephen Lind24ab202013-07-14 06:24:09 +00007; CHECK-LABEL: f1:
Ulrich Weigand9e3577f2013-05-06 16:17:29 +00008; CHECK: agsi 0(%r2), 1
9; CHECK: br %r14
10 %val = load i64 *%ptr
11 %add = add i64 %val, 127
12 store i64 %add, i64 *%ptr
13 ret void
14}
15
16; Check the high end of the constant range.
17define void @f2(i64 *%ptr) {
Stephen Lind24ab202013-07-14 06:24:09 +000018; CHECK-LABEL: f2:
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000019; CHECK: agsi 0(%r2), 127
20; CHECK: br %r14
21 %val = load i64 *%ptr
22 %add = add i64 %val, 127
23 store i64 %add, i64 *%ptr
24 ret void
25}
26
27; Check the next constant up, which must use an addition and a store.
28; Both LG/AGHI and LGHI/AG would be OK.
29define void @f3(i64 *%ptr) {
Stephen Lind24ab202013-07-14 06:24:09 +000030; CHECK-LABEL: f3:
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000031; CHECK-NOT: agsi
32; CHECK: stg %r0, 0(%r2)
33; CHECK: br %r14
34 %val = load i64 *%ptr
35 %add = add i64 %val, 128
36 store i64 %add, i64 *%ptr
37 ret void
38}
39
40; Check the low end of the constant range.
41define void @f4(i64 *%ptr) {
Stephen Lind24ab202013-07-14 06:24:09 +000042; CHECK-LABEL: f4:
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000043; CHECK: agsi 0(%r2), -128
44; CHECK: br %r14
45 %val = load i64 *%ptr
46 %add = add i64 %val, -128
47 store i64 %add, i64 *%ptr
48 ret void
49}
50
51; Check the next value down, with the same comment as f3.
52define void @f5(i64 *%ptr) {
Stephen Lind24ab202013-07-14 06:24:09 +000053; CHECK-LABEL: f5:
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000054; CHECK-NOT: agsi
55; CHECK: stg %r0, 0(%r2)
56; CHECK: br %r14
57 %val = load i64 *%ptr
58 %add = add i64 %val, -129
59 store i64 %add, i64 *%ptr
60 ret void
61}
62
63; Check the high end of the aligned AGSI range.
64define void @f6(i64 *%base) {
Stephen Lind24ab202013-07-14 06:24:09 +000065; CHECK-LABEL: f6:
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000066; CHECK: agsi 524280(%r2), 1
67; CHECK: br %r14
68 %ptr = getelementptr i64 *%base, i64 65535
69 %val = load i64 *%ptr
70 %add = add i64 %val, 1
71 store i64 %add, i64 *%ptr
72 ret void
73}
74
75; Check the next doubleword up, which must use separate address logic.
76; Other sequences besides this one would be OK.
77define void @f7(i64 *%base) {
Stephen Lind24ab202013-07-14 06:24:09 +000078; CHECK-LABEL: f7:
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000079; CHECK: agfi %r2, 524288
80; CHECK: agsi 0(%r2), 1
81; CHECK: br %r14
82 %ptr = getelementptr i64 *%base, i64 65536
83 %val = load i64 *%ptr
84 %add = add i64 %val, 1
85 store i64 %add, i64 *%ptr
86 ret void
87}
88
89; Check the low end of the AGSI range.
90define void @f8(i64 *%base) {
Stephen Lind24ab202013-07-14 06:24:09 +000091; CHECK-LABEL: f8:
Ulrich Weigand9e3577f2013-05-06 16:17:29 +000092; CHECK: agsi -524288(%r2), 1
93; CHECK: br %r14
94 %ptr = getelementptr i64 *%base, i64 -65536
95 %val = load i64 *%ptr
96 %add = add i64 %val, 1
97 store i64 %add, i64 *%ptr
98 ret void
99}
100
101; Check the next doubleword down, which must use separate address logic.
102; Other sequences besides this one would be OK.
103define void @f9(i64 *%base) {
Stephen Lind24ab202013-07-14 06:24:09 +0000104; CHECK-LABEL: f9:
Ulrich Weigand9e3577f2013-05-06 16:17:29 +0000105; CHECK: agfi %r2, -524296
106; CHECK: agsi 0(%r2), 1
107; CHECK: br %r14
108 %ptr = getelementptr i64 *%base, i64 -65537
109 %val = load i64 *%ptr
110 %add = add i64 %val, 1
111 store i64 %add, i64 *%ptr
112 ret void
113}
114
115; Check that AGSI does not allow indices.
116define void @f10(i64 %base, i64 %index) {
Stephen Lind24ab202013-07-14 06:24:09 +0000117; CHECK-LABEL: f10:
Ulrich Weigand9e3577f2013-05-06 16:17:29 +0000118; CHECK: agr %r2, %r3
119; CHECK: agsi 8(%r2), 1
120; CHECK: br %r14
121 %add1 = add i64 %base, %index
122 %add2 = add i64 %add1, 8
123 %ptr = inttoptr i64 %add2 to i64 *
124 %val = load i64 *%ptr
125 %add = add i64 %val, 1
126 store i64 %add, i64 *%ptr
127 ret void
128}
Richard Sandiford6af6ff12013-10-15 08:42:59 +0000129
130; Check that adding 127 to a spilled value can use AGSI.
131define void @f11(i64 *%ptr, i32 %sel) {
132; CHECK-LABEL: f11:
133; CHECK: agsi {{[0-9]+}}(%r15), 127
134; CHECK: br %r14
135entry:
136 %val0 = load volatile i64 *%ptr
137 %val1 = load volatile i64 *%ptr
138 %val2 = load volatile i64 *%ptr
139 %val3 = load volatile i64 *%ptr
140 %val4 = load volatile i64 *%ptr
141 %val5 = load volatile i64 *%ptr
142 %val6 = load volatile i64 *%ptr
143 %val7 = load volatile i64 *%ptr
144 %val8 = load volatile i64 *%ptr
145 %val9 = load volatile i64 *%ptr
146 %val10 = load volatile i64 *%ptr
147 %val11 = load volatile i64 *%ptr
148 %val12 = load volatile i64 *%ptr
149 %val13 = load volatile i64 *%ptr
150 %val14 = load volatile i64 *%ptr
151 %val15 = load volatile i64 *%ptr
152
153 %test = icmp ne i32 %sel, 0
154 br i1 %test, label %add, label %store
155
156add:
157 %add0 = add i64 %val0, 127
158 %add1 = add i64 %val1, 127
159 %add2 = add i64 %val2, 127
160 %add3 = add i64 %val3, 127
161 %add4 = add i64 %val4, 127
162 %add5 = add i64 %val5, 127
163 %add6 = add i64 %val6, 127
164 %add7 = add i64 %val7, 127
165 %add8 = add i64 %val8, 127
166 %add9 = add i64 %val9, 127
167 %add10 = add i64 %val10, 127
168 %add11 = add i64 %val11, 127
169 %add12 = add i64 %val12, 127
170 %add13 = add i64 %val13, 127
171 %add14 = add i64 %val14, 127
172 %add15 = add i64 %val15, 127
173 br label %store
174
175store:
176 %new0 = phi i64 [ %val0, %entry ], [ %add0, %add ]
177 %new1 = phi i64 [ %val1, %entry ], [ %add1, %add ]
178 %new2 = phi i64 [ %val2, %entry ], [ %add2, %add ]
179 %new3 = phi i64 [ %val3, %entry ], [ %add3, %add ]
180 %new4 = phi i64 [ %val4, %entry ], [ %add4, %add ]
181 %new5 = phi i64 [ %val5, %entry ], [ %add5, %add ]
182 %new6 = phi i64 [ %val6, %entry ], [ %add6, %add ]
183 %new7 = phi i64 [ %val7, %entry ], [ %add7, %add ]
184 %new8 = phi i64 [ %val8, %entry ], [ %add8, %add ]
185 %new9 = phi i64 [ %val9, %entry ], [ %add9, %add ]
186 %new10 = phi i64 [ %val10, %entry ], [ %add10, %add ]
187 %new11 = phi i64 [ %val11, %entry ], [ %add11, %add ]
188 %new12 = phi i64 [ %val12, %entry ], [ %add12, %add ]
189 %new13 = phi i64 [ %val13, %entry ], [ %add13, %add ]
190 %new14 = phi i64 [ %val14, %entry ], [ %add14, %add ]
191 %new15 = phi i64 [ %val15, %entry ], [ %add15, %add ]
192
193 store volatile i64 %new0, i64 *%ptr
194 store volatile i64 %new1, i64 *%ptr
195 store volatile i64 %new2, i64 *%ptr
196 store volatile i64 %new3, i64 *%ptr
197 store volatile i64 %new4, i64 *%ptr
198 store volatile i64 %new5, i64 *%ptr
199 store volatile i64 %new6, i64 *%ptr
200 store volatile i64 %new7, i64 *%ptr
201 store volatile i64 %new8, i64 *%ptr
202 store volatile i64 %new9, i64 *%ptr
203 store volatile i64 %new10, i64 *%ptr
204 store volatile i64 %new11, i64 *%ptr
205 store volatile i64 %new12, i64 *%ptr
206 store volatile i64 %new13, i64 *%ptr
207 store volatile i64 %new14, i64 *%ptr
208 store volatile i64 %new15, i64 *%ptr
209
210 ret void
211}
212
213; Check that adding -128 to a spilled value can use AGSI.
214define void @f12(i64 *%ptr, i32 %sel) {
215; CHECK-LABEL: f12:
216; CHECK: agsi {{[0-9]+}}(%r15), -128
217; CHECK: br %r14
218entry:
219 %val0 = load volatile i64 *%ptr
220 %val1 = load volatile i64 *%ptr
221 %val2 = load volatile i64 *%ptr
222 %val3 = load volatile i64 *%ptr
223 %val4 = load volatile i64 *%ptr
224 %val5 = load volatile i64 *%ptr
225 %val6 = load volatile i64 *%ptr
226 %val7 = load volatile i64 *%ptr
227 %val8 = load volatile i64 *%ptr
228 %val9 = load volatile i64 *%ptr
229 %val10 = load volatile i64 *%ptr
230 %val11 = load volatile i64 *%ptr
231 %val12 = load volatile i64 *%ptr
232 %val13 = load volatile i64 *%ptr
233 %val14 = load volatile i64 *%ptr
234 %val15 = load volatile i64 *%ptr
235
236 %test = icmp ne i32 %sel, 0
237 br i1 %test, label %add, label %store
238
239add:
240 %add0 = add i64 %val0, -128
241 %add1 = add i64 %val1, -128
242 %add2 = add i64 %val2, -128
243 %add3 = add i64 %val3, -128
244 %add4 = add i64 %val4, -128
245 %add5 = add i64 %val5, -128
246 %add6 = add i64 %val6, -128
247 %add7 = add i64 %val7, -128
248 %add8 = add i64 %val8, -128
249 %add9 = add i64 %val9, -128
250 %add10 = add i64 %val10, -128
251 %add11 = add i64 %val11, -128
252 %add12 = add i64 %val12, -128
253 %add13 = add i64 %val13, -128
254 %add14 = add i64 %val14, -128
255 %add15 = add i64 %val15, -128
256 br label %store
257
258store:
259 %new0 = phi i64 [ %val0, %entry ], [ %add0, %add ]
260 %new1 = phi i64 [ %val1, %entry ], [ %add1, %add ]
261 %new2 = phi i64 [ %val2, %entry ], [ %add2, %add ]
262 %new3 = phi i64 [ %val3, %entry ], [ %add3, %add ]
263 %new4 = phi i64 [ %val4, %entry ], [ %add4, %add ]
264 %new5 = phi i64 [ %val5, %entry ], [ %add5, %add ]
265 %new6 = phi i64 [ %val6, %entry ], [ %add6, %add ]
266 %new7 = phi i64 [ %val7, %entry ], [ %add7, %add ]
267 %new8 = phi i64 [ %val8, %entry ], [ %add8, %add ]
268 %new9 = phi i64 [ %val9, %entry ], [ %add9, %add ]
269 %new10 = phi i64 [ %val10, %entry ], [ %add10, %add ]
270 %new11 = phi i64 [ %val11, %entry ], [ %add11, %add ]
271 %new12 = phi i64 [ %val12, %entry ], [ %add12, %add ]
272 %new13 = phi i64 [ %val13, %entry ], [ %add13, %add ]
273 %new14 = phi i64 [ %val14, %entry ], [ %add14, %add ]
274 %new15 = phi i64 [ %val15, %entry ], [ %add15, %add ]
275
276 store volatile i64 %new0, i64 *%ptr
277 store volatile i64 %new1, i64 *%ptr
278 store volatile i64 %new2, i64 *%ptr
279 store volatile i64 %new3, i64 *%ptr
280 store volatile i64 %new4, i64 *%ptr
281 store volatile i64 %new5, i64 *%ptr
282 store volatile i64 %new6, i64 *%ptr
283 store volatile i64 %new7, i64 *%ptr
284 store volatile i64 %new8, i64 *%ptr
285 store volatile i64 %new9, i64 *%ptr
286 store volatile i64 %new10, i64 *%ptr
287 store volatile i64 %new11, i64 *%ptr
288 store volatile i64 %new12, i64 *%ptr
289 store volatile i64 %new13, i64 *%ptr
290 store volatile i64 %new14, i64 *%ptr
291 store volatile i64 %new15, i64 *%ptr
292
293 ret void
294}