blob: 996858f4cf56218a3a8d37a54c1190e07adeff30 [file] [log] [blame]
Bob Wilson0305dd72009-10-09 05:14:48 +00001; RUN: llc < %s -march=arm -mattr=+neon | FileCheck %s
Bob Wilson5bafff32009-06-22 23:27:02 +00002
3define <8 x i8> @vshls8(<8 x i8>* %A, <8 x i8>* %B) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +00004;CHECK: vshls8:
5;CHECK: vshl.u8
Bob Wilson5bafff32009-06-22 23:27:02 +00006 %tmp1 = load <8 x i8>* %A
7 %tmp2 = load <8 x i8>* %B
8 %tmp3 = shl <8 x i8> %tmp1, %tmp2
9 ret <8 x i8> %tmp3
10}
11
12define <4 x i16> @vshls16(<4 x i16>* %A, <4 x i16>* %B) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +000013;CHECK: vshls16:
14;CHECK: vshl.u16
Bob Wilson5bafff32009-06-22 23:27:02 +000015 %tmp1 = load <4 x i16>* %A
16 %tmp2 = load <4 x i16>* %B
17 %tmp3 = shl <4 x i16> %tmp1, %tmp2
18 ret <4 x i16> %tmp3
19}
20
21define <2 x i32> @vshls32(<2 x i32>* %A, <2 x i32>* %B) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +000022;CHECK: vshls32:
23;CHECK: vshl.u32
Bob Wilson5bafff32009-06-22 23:27:02 +000024 %tmp1 = load <2 x i32>* %A
25 %tmp2 = load <2 x i32>* %B
26 %tmp3 = shl <2 x i32> %tmp1, %tmp2
27 ret <2 x i32> %tmp3
28}
29
30define <1 x i64> @vshls64(<1 x i64>* %A, <1 x i64>* %B) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +000031;CHECK: vshls64:
32;CHECK: vshl.u64
Bob Wilson5bafff32009-06-22 23:27:02 +000033 %tmp1 = load <1 x i64>* %A
34 %tmp2 = load <1 x i64>* %B
35 %tmp3 = shl <1 x i64> %tmp1, %tmp2
36 ret <1 x i64> %tmp3
37}
38
39define <8 x i8> @vshli8(<8 x i8>* %A) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +000040;CHECK: vshli8:
41;CHECK: vshl.i8
Bob Wilson5bafff32009-06-22 23:27:02 +000042 %tmp1 = load <8 x i8>* %A
43 %tmp2 = shl <8 x i8> %tmp1, < i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7 >
44 ret <8 x i8> %tmp2
45}
46
47define <4 x i16> @vshli16(<4 x i16>* %A) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +000048;CHECK: vshli16:
49;CHECK: vshl.i16
Bob Wilson5bafff32009-06-22 23:27:02 +000050 %tmp1 = load <4 x i16>* %A
51 %tmp2 = shl <4 x i16> %tmp1, < i16 15, i16 15, i16 15, i16 15 >
52 ret <4 x i16> %tmp2
53}
54
55define <2 x i32> @vshli32(<2 x i32>* %A) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +000056;CHECK: vshli32:
57;CHECK: vshl.i32
Bob Wilson5bafff32009-06-22 23:27:02 +000058 %tmp1 = load <2 x i32>* %A
59 %tmp2 = shl <2 x i32> %tmp1, < i32 31, i32 31 >
60 ret <2 x i32> %tmp2
61}
62
63define <1 x i64> @vshli64(<1 x i64>* %A) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +000064;CHECK: vshli64:
65;CHECK: vshl.i64
Bob Wilson5bafff32009-06-22 23:27:02 +000066 %tmp1 = load <1 x i64>* %A
67 %tmp2 = shl <1 x i64> %tmp1, < i64 63 >
68 ret <1 x i64> %tmp2
69}
70
71define <16 x i8> @vshlQs8(<16 x i8>* %A, <16 x i8>* %B) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +000072;CHECK: vshlQs8:
73;CHECK: vshl.u8
Bob Wilson5bafff32009-06-22 23:27:02 +000074 %tmp1 = load <16 x i8>* %A
75 %tmp2 = load <16 x i8>* %B
76 %tmp3 = shl <16 x i8> %tmp1, %tmp2
77 ret <16 x i8> %tmp3
78}
79
80define <8 x i16> @vshlQs16(<8 x i16>* %A, <8 x i16>* %B) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +000081;CHECK: vshlQs16:
82;CHECK: vshl.u16
Bob Wilson5bafff32009-06-22 23:27:02 +000083 %tmp1 = load <8 x i16>* %A
84 %tmp2 = load <8 x i16>* %B
85 %tmp3 = shl <8 x i16> %tmp1, %tmp2
86 ret <8 x i16> %tmp3
87}
88
89define <4 x i32> @vshlQs32(<4 x i32>* %A, <4 x i32>* %B) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +000090;CHECK: vshlQs32:
91;CHECK: vshl.u32
Bob Wilson5bafff32009-06-22 23:27:02 +000092 %tmp1 = load <4 x i32>* %A
93 %tmp2 = load <4 x i32>* %B
94 %tmp3 = shl <4 x i32> %tmp1, %tmp2
95 ret <4 x i32> %tmp3
96}
97
98define <2 x i64> @vshlQs64(<2 x i64>* %A, <2 x i64>* %B) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +000099;CHECK: vshlQs64:
100;CHECK: vshl.u64
Bob Wilson5bafff32009-06-22 23:27:02 +0000101 %tmp1 = load <2 x i64>* %A
102 %tmp2 = load <2 x i64>* %B
103 %tmp3 = shl <2 x i64> %tmp1, %tmp2
104 ret <2 x i64> %tmp3
105}
106
107define <16 x i8> @vshlQi8(<16 x i8>* %A) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000108;CHECK: vshlQi8:
109;CHECK: vshl.i8
Bob Wilson5bafff32009-06-22 23:27:02 +0000110 %tmp1 = load <16 x i8>* %A
111 %tmp2 = shl <16 x i8> %tmp1, < i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7 >
112 ret <16 x i8> %tmp2
113}
114
115define <8 x i16> @vshlQi16(<8 x i16>* %A) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000116;CHECK: vshlQi16:
117;CHECK: vshl.i16
Bob Wilson5bafff32009-06-22 23:27:02 +0000118 %tmp1 = load <8 x i16>* %A
119 %tmp2 = shl <8 x i16> %tmp1, < i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15, i16 15 >
120 ret <8 x i16> %tmp2
121}
122
123define <4 x i32> @vshlQi32(<4 x i32>* %A) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000124;CHECK: vshlQi32:
125;CHECK: vshl.i32
Bob Wilson5bafff32009-06-22 23:27:02 +0000126 %tmp1 = load <4 x i32>* %A
127 %tmp2 = shl <4 x i32> %tmp1, < i32 31, i32 31, i32 31, i32 31 >
128 ret <4 x i32> %tmp2
129}
130
131define <2 x i64> @vshlQi64(<2 x i64>* %A) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000132;CHECK: vshlQi64:
133;CHECK: vshl.i64
Bob Wilson5bafff32009-06-22 23:27:02 +0000134 %tmp1 = load <2 x i64>* %A
135 %tmp2 = shl <2 x i64> %tmp1, < i64 63, i64 63 >
136 ret <2 x i64> %tmp2
137}
138
139define <8 x i8> @vlshru8(<8 x i8>* %A, <8 x i8>* %B) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000140;CHECK: vlshru8:
141;CHECK: vneg.s8
142;CHECK: vshl.u8
Bob Wilson5bafff32009-06-22 23:27:02 +0000143 %tmp1 = load <8 x i8>* %A
144 %tmp2 = load <8 x i8>* %B
145 %tmp3 = lshr <8 x i8> %tmp1, %tmp2
146 ret <8 x i8> %tmp3
147}
148
149define <4 x i16> @vlshru16(<4 x i16>* %A, <4 x i16>* %B) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000150;CHECK: vlshru16:
151;CHECK: vneg.s16
152;CHECK: vshl.u16
Bob Wilson5bafff32009-06-22 23:27:02 +0000153 %tmp1 = load <4 x i16>* %A
154 %tmp2 = load <4 x i16>* %B
155 %tmp3 = lshr <4 x i16> %tmp1, %tmp2
156 ret <4 x i16> %tmp3
157}
158
159define <2 x i32> @vlshru32(<2 x i32>* %A, <2 x i32>* %B) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000160;CHECK: vlshru32:
161;CHECK: vneg.s32
162;CHECK: vshl.u32
Bob Wilson5bafff32009-06-22 23:27:02 +0000163 %tmp1 = load <2 x i32>* %A
164 %tmp2 = load <2 x i32>* %B
165 %tmp3 = lshr <2 x i32> %tmp1, %tmp2
166 ret <2 x i32> %tmp3
167}
168
169define <1 x i64> @vlshru64(<1 x i64>* %A, <1 x i64>* %B) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000170;CHECK: vlshru64:
171;CHECK: vsub.i64
172;CHECK: vshl.u64
Bob Wilson5bafff32009-06-22 23:27:02 +0000173 %tmp1 = load <1 x i64>* %A
174 %tmp2 = load <1 x i64>* %B
175 %tmp3 = lshr <1 x i64> %tmp1, %tmp2
176 ret <1 x i64> %tmp3
177}
178
179define <8 x i8> @vlshri8(<8 x i8>* %A) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000180;CHECK: vlshri8:
181;CHECK: vshr.u8
Bob Wilson5bafff32009-06-22 23:27:02 +0000182 %tmp1 = load <8 x i8>* %A
183 %tmp2 = lshr <8 x i8> %tmp1, < i8 8, i8 8, i8 8, i8 8, i8 8, i8 8, i8 8, i8 8 >
184 ret <8 x i8> %tmp2
185}
186
187define <4 x i16> @vlshri16(<4 x i16>* %A) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000188;CHECK: vlshri16:
189;CHECK: vshr.u16
Bob Wilson5bafff32009-06-22 23:27:02 +0000190 %tmp1 = load <4 x i16>* %A
191 %tmp2 = lshr <4 x i16> %tmp1, < i16 16, i16 16, i16 16, i16 16 >
192 ret <4 x i16> %tmp2
193}
194
195define <2 x i32> @vlshri32(<2 x i32>* %A) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000196;CHECK: vlshri32:
197;CHECK: vshr.u32
Bob Wilson5bafff32009-06-22 23:27:02 +0000198 %tmp1 = load <2 x i32>* %A
199 %tmp2 = lshr <2 x i32> %tmp1, < i32 32, i32 32 >
200 ret <2 x i32> %tmp2
201}
202
203define <1 x i64> @vlshri64(<1 x i64>* %A) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000204;CHECK: vlshri64:
205;CHECK: vshr.u64
Bob Wilson5bafff32009-06-22 23:27:02 +0000206 %tmp1 = load <1 x i64>* %A
207 %tmp2 = lshr <1 x i64> %tmp1, < i64 64 >
208 ret <1 x i64> %tmp2
209}
210
211define <16 x i8> @vlshrQu8(<16 x i8>* %A, <16 x i8>* %B) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000212;CHECK: vlshrQu8:
213;CHECK: vneg.s8
214;CHECK: vshl.u8
Bob Wilson5bafff32009-06-22 23:27:02 +0000215 %tmp1 = load <16 x i8>* %A
216 %tmp2 = load <16 x i8>* %B
217 %tmp3 = lshr <16 x i8> %tmp1, %tmp2
218 ret <16 x i8> %tmp3
219}
220
221define <8 x i16> @vlshrQu16(<8 x i16>* %A, <8 x i16>* %B) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000222;CHECK: vlshrQu16:
223;CHECK: vneg.s16
224;CHECK: vshl.u16
Bob Wilson5bafff32009-06-22 23:27:02 +0000225 %tmp1 = load <8 x i16>* %A
226 %tmp2 = load <8 x i16>* %B
227 %tmp3 = lshr <8 x i16> %tmp1, %tmp2
228 ret <8 x i16> %tmp3
229}
230
231define <4 x i32> @vlshrQu32(<4 x i32>* %A, <4 x i32>* %B) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000232;CHECK: vlshrQu32:
233;CHECK: vneg.s32
234;CHECK: vshl.u32
Bob Wilson5bafff32009-06-22 23:27:02 +0000235 %tmp1 = load <4 x i32>* %A
236 %tmp2 = load <4 x i32>* %B
237 %tmp3 = lshr <4 x i32> %tmp1, %tmp2
238 ret <4 x i32> %tmp3
239}
240
241define <2 x i64> @vlshrQu64(<2 x i64>* %A, <2 x i64>* %B) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000242;CHECK: vlshrQu64:
243;CHECK: vsub.i64
244;CHECK: vshl.u64
Bob Wilson5bafff32009-06-22 23:27:02 +0000245 %tmp1 = load <2 x i64>* %A
246 %tmp2 = load <2 x i64>* %B
247 %tmp3 = lshr <2 x i64> %tmp1, %tmp2
248 ret <2 x i64> %tmp3
249}
250
251define <16 x i8> @vlshrQi8(<16 x i8>* %A) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000252;CHECK: vlshrQi8:
253;CHECK: vshr.u8
Bob Wilson5bafff32009-06-22 23:27:02 +0000254 %tmp1 = load <16 x i8>* %A
255 %tmp2 = lshr <16 x i8> %tmp1, < i8 8, i8 8, i8 8, i8 8, i8 8, i8 8, i8 8, i8 8, i8 8, i8 8, i8 8, i8 8, i8 8, i8 8, i8 8, i8 8 >
256 ret <16 x i8> %tmp2
257}
258
259define <8 x i16> @vlshrQi16(<8 x i16>* %A) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000260;CHECK: vlshrQi16:
261;CHECK: vshr.u16
Bob Wilson5bafff32009-06-22 23:27:02 +0000262 %tmp1 = load <8 x i16>* %A
263 %tmp2 = lshr <8 x i16> %tmp1, < i16 16, i16 16, i16 16, i16 16, i16 16, i16 16, i16 16, i16 16 >
264 ret <8 x i16> %tmp2
265}
266
267define <4 x i32> @vlshrQi32(<4 x i32>* %A) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000268;CHECK: vlshrQi32:
269;CHECK: vshr.u32
Bob Wilson5bafff32009-06-22 23:27:02 +0000270 %tmp1 = load <4 x i32>* %A
271 %tmp2 = lshr <4 x i32> %tmp1, < i32 32, i32 32, i32 32, i32 32 >
272 ret <4 x i32> %tmp2
273}
274
275define <2 x i64> @vlshrQi64(<2 x i64>* %A) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000276;CHECK: vlshrQi64:
277;CHECK: vshr.u64
Bob Wilson5bafff32009-06-22 23:27:02 +0000278 %tmp1 = load <2 x i64>* %A
279 %tmp2 = lshr <2 x i64> %tmp1, < i64 64, i64 64 >
280 ret <2 x i64> %tmp2
281}
282
283define <8 x i8> @vashrs8(<8 x i8>* %A, <8 x i8>* %B) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000284;CHECK: vashrs8:
285;CHECK: vneg.s8
286;CHECK: vshl.s8
Bob Wilson5bafff32009-06-22 23:27:02 +0000287 %tmp1 = load <8 x i8>* %A
288 %tmp2 = load <8 x i8>* %B
289 %tmp3 = ashr <8 x i8> %tmp1, %tmp2
290 ret <8 x i8> %tmp3
291}
292
293define <4 x i16> @vashrs16(<4 x i16>* %A, <4 x i16>* %B) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000294;CHECK: vashrs16:
295;CHECK: vneg.s16
296;CHECK: vshl.s16
Bob Wilson5bafff32009-06-22 23:27:02 +0000297 %tmp1 = load <4 x i16>* %A
298 %tmp2 = load <4 x i16>* %B
299 %tmp3 = ashr <4 x i16> %tmp1, %tmp2
300 ret <4 x i16> %tmp3
301}
302
303define <2 x i32> @vashrs32(<2 x i32>* %A, <2 x i32>* %B) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000304;CHECK: vashrs32:
305;CHECK: vneg.s32
306;CHECK: vshl.s32
Bob Wilson5bafff32009-06-22 23:27:02 +0000307 %tmp1 = load <2 x i32>* %A
308 %tmp2 = load <2 x i32>* %B
309 %tmp3 = ashr <2 x i32> %tmp1, %tmp2
310 ret <2 x i32> %tmp3
311}
312
313define <1 x i64> @vashrs64(<1 x i64>* %A, <1 x i64>* %B) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000314;CHECK: vashrs64:
315;CHECK: vsub.i64
316;CHECK: vshl.s64
Bob Wilson5bafff32009-06-22 23:27:02 +0000317 %tmp1 = load <1 x i64>* %A
318 %tmp2 = load <1 x i64>* %B
319 %tmp3 = ashr <1 x i64> %tmp1, %tmp2
320 ret <1 x i64> %tmp3
321}
322
323define <8 x i8> @vashri8(<8 x i8>* %A) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000324;CHECK: vashri8:
325;CHECK: vshr.s8
Bob Wilson5bafff32009-06-22 23:27:02 +0000326 %tmp1 = load <8 x i8>* %A
327 %tmp2 = ashr <8 x i8> %tmp1, < i8 8, i8 8, i8 8, i8 8, i8 8, i8 8, i8 8, i8 8 >
328 ret <8 x i8> %tmp2
329}
330
331define <4 x i16> @vashri16(<4 x i16>* %A) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000332;CHECK: vashri16:
333;CHECK: vshr.s16
Bob Wilson5bafff32009-06-22 23:27:02 +0000334 %tmp1 = load <4 x i16>* %A
335 %tmp2 = ashr <4 x i16> %tmp1, < i16 16, i16 16, i16 16, i16 16 >
336 ret <4 x i16> %tmp2
337}
338
339define <2 x i32> @vashri32(<2 x i32>* %A) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000340;CHECK: vashri32:
341;CHECK: vshr.s32
Bob Wilson5bafff32009-06-22 23:27:02 +0000342 %tmp1 = load <2 x i32>* %A
343 %tmp2 = ashr <2 x i32> %tmp1, < i32 32, i32 32 >
344 ret <2 x i32> %tmp2
345}
346
347define <1 x i64> @vashri64(<1 x i64>* %A) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000348;CHECK: vashri64:
349;CHECK: vshr.s64
Bob Wilson5bafff32009-06-22 23:27:02 +0000350 %tmp1 = load <1 x i64>* %A
351 %tmp2 = ashr <1 x i64> %tmp1, < i64 64 >
352 ret <1 x i64> %tmp2
353}
354
355define <16 x i8> @vashrQs8(<16 x i8>* %A, <16 x i8>* %B) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000356;CHECK: vashrQs8:
357;CHECK: vneg.s8
358;CHECK: vshl.s8
Bob Wilson5bafff32009-06-22 23:27:02 +0000359 %tmp1 = load <16 x i8>* %A
360 %tmp2 = load <16 x i8>* %B
361 %tmp3 = ashr <16 x i8> %tmp1, %tmp2
362 ret <16 x i8> %tmp3
363}
364
365define <8 x i16> @vashrQs16(<8 x i16>* %A, <8 x i16>* %B) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000366;CHECK: vashrQs16:
367;CHECK: vneg.s16
368;CHECK: vshl.s16
Bob Wilson5bafff32009-06-22 23:27:02 +0000369 %tmp1 = load <8 x i16>* %A
370 %tmp2 = load <8 x i16>* %B
371 %tmp3 = ashr <8 x i16> %tmp1, %tmp2
372 ret <8 x i16> %tmp3
373}
374
375define <4 x i32> @vashrQs32(<4 x i32>* %A, <4 x i32>* %B) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000376;CHECK: vashrQs32:
377;CHECK: vneg.s32
378;CHECK: vshl.s32
Bob Wilson5bafff32009-06-22 23:27:02 +0000379 %tmp1 = load <4 x i32>* %A
380 %tmp2 = load <4 x i32>* %B
381 %tmp3 = ashr <4 x i32> %tmp1, %tmp2
382 ret <4 x i32> %tmp3
383}
384
385define <2 x i64> @vashrQs64(<2 x i64>* %A, <2 x i64>* %B) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000386;CHECK: vashrQs64:
387;CHECK: vsub.i64
388;CHECK: vshl.s64
Bob Wilson5bafff32009-06-22 23:27:02 +0000389 %tmp1 = load <2 x i64>* %A
390 %tmp2 = load <2 x i64>* %B
391 %tmp3 = ashr <2 x i64> %tmp1, %tmp2
392 ret <2 x i64> %tmp3
393}
394
395define <16 x i8> @vashrQi8(<16 x i8>* %A) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000396;CHECK: vashrQi8:
397;CHECK: vshr.s8
Bob Wilson5bafff32009-06-22 23:27:02 +0000398 %tmp1 = load <16 x i8>* %A
399 %tmp2 = ashr <16 x i8> %tmp1, < i8 8, i8 8, i8 8, i8 8, i8 8, i8 8, i8 8, i8 8, i8 8, i8 8, i8 8, i8 8, i8 8, i8 8, i8 8, i8 8 >
400 ret <16 x i8> %tmp2
401}
402
403define <8 x i16> @vashrQi16(<8 x i16>* %A) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000404;CHECK: vashrQi16:
405;CHECK: vshr.s16
Bob Wilson5bafff32009-06-22 23:27:02 +0000406 %tmp1 = load <8 x i16>* %A
407 %tmp2 = ashr <8 x i16> %tmp1, < i16 16, i16 16, i16 16, i16 16, i16 16, i16 16, i16 16, i16 16 >
408 ret <8 x i16> %tmp2
409}
410
411define <4 x i32> @vashrQi32(<4 x i32>* %A) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000412;CHECK: vashrQi32:
413;CHECK: vshr.s32
Bob Wilson5bafff32009-06-22 23:27:02 +0000414 %tmp1 = load <4 x i32>* %A
415 %tmp2 = ashr <4 x i32> %tmp1, < i32 32, i32 32, i32 32, i32 32 >
416 ret <4 x i32> %tmp2
417}
418
419define <2 x i64> @vashrQi64(<2 x i64>* %A) nounwind {
Bob Wilson0305dd72009-10-09 05:14:48 +0000420;CHECK: vashrQi64:
421;CHECK: vshr.s64
Bob Wilson5bafff32009-06-22 23:27:02 +0000422 %tmp1 = load <2 x i64>* %A
423 %tmp2 = ashr <2 x i64> %tmp1, < i64 64, i64 64 >
424 ret <2 x i64> %tmp2
425}