blob: c2c16aa132f20bb750a80871474be7be6c5bc5f4 [file] [log] [blame]
Dan Gohmanc8054d92009-09-09 00:09:15 +00001; RUN: llc < %s -march=arm -mattr=+neon | FileCheck %s
Bob Wilson2e076c42009-06-22 23:27:02 +00002
3define <8 x i8> @vaddi8(<8 x i8>* %A, <8 x i8>* %B) nounwind {
Stephen Lind24ab202013-07-14 06:24:09 +00004;CHECK-LABEL: vaddi8:
Bob Wilson97262e02009-08-07 23:45:02 +00005;CHECK: vadd.i8
Bob Wilson2e076c42009-06-22 23:27:02 +00006 %tmp1 = load <8 x i8>* %A
7 %tmp2 = load <8 x i8>* %B
8 %tmp3 = add <8 x i8> %tmp1, %tmp2
9 ret <8 x i8> %tmp3
10}
11
12define <4 x i16> @vaddi16(<4 x i16>* %A, <4 x i16>* %B) nounwind {
Stephen Lind24ab202013-07-14 06:24:09 +000013;CHECK-LABEL: vaddi16:
Bob Wilson97262e02009-08-07 23:45:02 +000014;CHECK: vadd.i16
Bob Wilson2e076c42009-06-22 23:27:02 +000015 %tmp1 = load <4 x i16>* %A
16 %tmp2 = load <4 x i16>* %B
17 %tmp3 = add <4 x i16> %tmp1, %tmp2
18 ret <4 x i16> %tmp3
19}
20
21define <2 x i32> @vaddi32(<2 x i32>* %A, <2 x i32>* %B) nounwind {
Stephen Lind24ab202013-07-14 06:24:09 +000022;CHECK-LABEL: vaddi32:
Bob Wilson97262e02009-08-07 23:45:02 +000023;CHECK: vadd.i32
Bob Wilson2e076c42009-06-22 23:27:02 +000024 %tmp1 = load <2 x i32>* %A
25 %tmp2 = load <2 x i32>* %B
26 %tmp3 = add <2 x i32> %tmp1, %tmp2
27 ret <2 x i32> %tmp3
28}
29
30define <1 x i64> @vaddi64(<1 x i64>* %A, <1 x i64>* %B) nounwind {
Stephen Lind24ab202013-07-14 06:24:09 +000031;CHECK-LABEL: vaddi64:
Bob Wilson97262e02009-08-07 23:45:02 +000032;CHECK: vadd.i64
Bob Wilson2e076c42009-06-22 23:27:02 +000033 %tmp1 = load <1 x i64>* %A
34 %tmp2 = load <1 x i64>* %B
35 %tmp3 = add <1 x i64> %tmp1, %tmp2
36 ret <1 x i64> %tmp3
37}
38
39define <2 x float> @vaddf32(<2 x float>* %A, <2 x float>* %B) nounwind {
Stephen Lind24ab202013-07-14 06:24:09 +000040;CHECK-LABEL: vaddf32:
Bob Wilson97262e02009-08-07 23:45:02 +000041;CHECK: vadd.f32
Bob Wilson2e076c42009-06-22 23:27:02 +000042 %tmp1 = load <2 x float>* %A
43 %tmp2 = load <2 x float>* %B
Dan Gohman0553acf2010-05-03 22:36:46 +000044 %tmp3 = fadd <2 x float> %tmp1, %tmp2
Bob Wilson2e076c42009-06-22 23:27:02 +000045 ret <2 x float> %tmp3
46}
47
48define <16 x i8> @vaddQi8(<16 x i8>* %A, <16 x i8>* %B) nounwind {
Stephen Lind24ab202013-07-14 06:24:09 +000049;CHECK-LABEL: vaddQi8:
Bob Wilson97262e02009-08-07 23:45:02 +000050;CHECK: vadd.i8
Bob Wilson2e076c42009-06-22 23:27:02 +000051 %tmp1 = load <16 x i8>* %A
52 %tmp2 = load <16 x i8>* %B
53 %tmp3 = add <16 x i8> %tmp1, %tmp2
54 ret <16 x i8> %tmp3
55}
56
57define <8 x i16> @vaddQi16(<8 x i16>* %A, <8 x i16>* %B) nounwind {
Stephen Lind24ab202013-07-14 06:24:09 +000058;CHECK-LABEL: vaddQi16:
Bob Wilson97262e02009-08-07 23:45:02 +000059;CHECK: vadd.i16
Bob Wilson2e076c42009-06-22 23:27:02 +000060 %tmp1 = load <8 x i16>* %A
61 %tmp2 = load <8 x i16>* %B
62 %tmp3 = add <8 x i16> %tmp1, %tmp2
63 ret <8 x i16> %tmp3
64}
65
66define <4 x i32> @vaddQi32(<4 x i32>* %A, <4 x i32>* %B) nounwind {
Stephen Lind24ab202013-07-14 06:24:09 +000067;CHECK-LABEL: vaddQi32:
Bob Wilson97262e02009-08-07 23:45:02 +000068;CHECK: vadd.i32
Bob Wilson2e076c42009-06-22 23:27:02 +000069 %tmp1 = load <4 x i32>* %A
70 %tmp2 = load <4 x i32>* %B
71 %tmp3 = add <4 x i32> %tmp1, %tmp2
72 ret <4 x i32> %tmp3
73}
74
75define <2 x i64> @vaddQi64(<2 x i64>* %A, <2 x i64>* %B) nounwind {
Stephen Lind24ab202013-07-14 06:24:09 +000076;CHECK-LABEL: vaddQi64:
Bob Wilson97262e02009-08-07 23:45:02 +000077;CHECK: vadd.i64
Bob Wilson2e076c42009-06-22 23:27:02 +000078 %tmp1 = load <2 x i64>* %A
79 %tmp2 = load <2 x i64>* %B
80 %tmp3 = add <2 x i64> %tmp1, %tmp2
81 ret <2 x i64> %tmp3
82}
83
84define <4 x float> @vaddQf32(<4 x float>* %A, <4 x float>* %B) nounwind {
Stephen Lind24ab202013-07-14 06:24:09 +000085;CHECK-LABEL: vaddQf32:
Bob Wilson97262e02009-08-07 23:45:02 +000086;CHECK: vadd.f32
Bob Wilson2e076c42009-06-22 23:27:02 +000087 %tmp1 = load <4 x float>* %A
88 %tmp2 = load <4 x float>* %B
Dan Gohman0553acf2010-05-03 22:36:46 +000089 %tmp3 = fadd <4 x float> %tmp1, %tmp2
Bob Wilson2e076c42009-06-22 23:27:02 +000090 ret <4 x float> %tmp3
91}
Bob Wilson35b61732009-10-09 20:20:54 +000092
93define <8 x i8> @vaddhni16(<8 x i16>* %A, <8 x i16>* %B) nounwind {
Stephen Lind24ab202013-07-14 06:24:09 +000094;CHECK-LABEL: vaddhni16:
Bob Wilson35b61732009-10-09 20:20:54 +000095;CHECK: vaddhn.i16
96 %tmp1 = load <8 x i16>* %A
97 %tmp2 = load <8 x i16>* %B
98 %tmp3 = call <8 x i8> @llvm.arm.neon.vaddhn.v8i8(<8 x i16> %tmp1, <8 x i16> %tmp2)
99 ret <8 x i8> %tmp3
100}
101
102define <4 x i16> @vaddhni32(<4 x i32>* %A, <4 x i32>* %B) nounwind {
Stephen Lind24ab202013-07-14 06:24:09 +0000103;CHECK-LABEL: vaddhni32:
Bob Wilson35b61732009-10-09 20:20:54 +0000104;CHECK: vaddhn.i32
105 %tmp1 = load <4 x i32>* %A
106 %tmp2 = load <4 x i32>* %B
107 %tmp3 = call <4 x i16> @llvm.arm.neon.vaddhn.v4i16(<4 x i32> %tmp1, <4 x i32> %tmp2)
108 ret <4 x i16> %tmp3
109}
110
111define <2 x i32> @vaddhni64(<2 x i64>* %A, <2 x i64>* %B) nounwind {
Stephen Lind24ab202013-07-14 06:24:09 +0000112;CHECK-LABEL: vaddhni64:
Bob Wilson35b61732009-10-09 20:20:54 +0000113;CHECK: vaddhn.i64
114 %tmp1 = load <2 x i64>* %A
115 %tmp2 = load <2 x i64>* %B
116 %tmp3 = call <2 x i32> @llvm.arm.neon.vaddhn.v2i32(<2 x i64> %tmp1, <2 x i64> %tmp2)
117 ret <2 x i32> %tmp3
118}
119
120declare <8 x i8> @llvm.arm.neon.vaddhn.v8i8(<8 x i16>, <8 x i16>) nounwind readnone
121declare <4 x i16> @llvm.arm.neon.vaddhn.v4i16(<4 x i32>, <4 x i32>) nounwind readnone
122declare <2 x i32> @llvm.arm.neon.vaddhn.v2i32(<2 x i64>, <2 x i64>) nounwind readnone
123
124define <8 x i8> @vraddhni16(<8 x i16>* %A, <8 x i16>* %B) nounwind {
Stephen Lind24ab202013-07-14 06:24:09 +0000125;CHECK-LABEL: vraddhni16:
Bob Wilson35b61732009-10-09 20:20:54 +0000126;CHECK: vraddhn.i16
127 %tmp1 = load <8 x i16>* %A
128 %tmp2 = load <8 x i16>* %B
129 %tmp3 = call <8 x i8> @llvm.arm.neon.vraddhn.v8i8(<8 x i16> %tmp1, <8 x i16> %tmp2)
130 ret <8 x i8> %tmp3
131}
132
133define <4 x i16> @vraddhni32(<4 x i32>* %A, <4 x i32>* %B) nounwind {
Stephen Lind24ab202013-07-14 06:24:09 +0000134;CHECK-LABEL: vraddhni32:
Bob Wilson35b61732009-10-09 20:20:54 +0000135;CHECK: vraddhn.i32
136 %tmp1 = load <4 x i32>* %A
137 %tmp2 = load <4 x i32>* %B
138 %tmp3 = call <4 x i16> @llvm.arm.neon.vraddhn.v4i16(<4 x i32> %tmp1, <4 x i32> %tmp2)
139 ret <4 x i16> %tmp3
140}
141
142define <2 x i32> @vraddhni64(<2 x i64>* %A, <2 x i64>* %B) nounwind {
Stephen Lind24ab202013-07-14 06:24:09 +0000143;CHECK-LABEL: vraddhni64:
Bob Wilson35b61732009-10-09 20:20:54 +0000144;CHECK: vraddhn.i64
145 %tmp1 = load <2 x i64>* %A
146 %tmp2 = load <2 x i64>* %B
147 %tmp3 = call <2 x i32> @llvm.arm.neon.vraddhn.v2i32(<2 x i64> %tmp1, <2 x i64> %tmp2)
148 ret <2 x i32> %tmp3
149}
150
151declare <8 x i8> @llvm.arm.neon.vraddhn.v8i8(<8 x i16>, <8 x i16>) nounwind readnone
152declare <4 x i16> @llvm.arm.neon.vraddhn.v4i16(<4 x i32>, <4 x i32>) nounwind readnone
153declare <2 x i32> @llvm.arm.neon.vraddhn.v2i32(<2 x i64>, <2 x i64>) nounwind readnone
154
Tim Northover449d3902013-08-27 10:31:36 +0000155define <8 x i8> @vaddhni16_natural(<8 x i16> %A, <8 x i16> %B) nounwind {
156; CHECK-LABEL: vaddhni16_natural:
157; CHECK: vaddhn.i16
158 %sum = add <8 x i16> %A, %B
159 %shift = lshr <8 x i16> %sum, <i16 8, i16 8, i16 8, i16 8, i16 8, i16 8, i16 8, i16 8>
160 %trunc = trunc <8 x i16> %shift to <8 x i8>
161 ret <8 x i8> %trunc
162}
163
164define <4 x i16> @vaddhni32_natural(<4 x i32> %A, <4 x i32> %B) nounwind {
165; CHECK-LABEL: vaddhni32_natural:
166; CHECK: vaddhn.i32
167 %sum = add <4 x i32> %A, %B
168 %shift = lshr <4 x i32> %sum, <i32 16, i32 16, i32 16, i32 16>
169 %trunc = trunc <4 x i32> %shift to <4 x i16>
170 ret <4 x i16> %trunc
171}
172
173define <2 x i32> @vaddhni64_natural(<2 x i64> %A, <2 x i64> %B) nounwind {
174; CHECK-LABEL: vaddhni64_natural:
175; CHECK: vaddhn.i64
176 %sum = add <2 x i64> %A, %B
177 %shift = lshr <2 x i64> %sum, <i64 32, i64 32>
178 %trunc = trunc <2 x i64> %shift to <2 x i32>
179 ret <2 x i32> %trunc
180}
181
Bob Wilson35b61732009-10-09 20:20:54 +0000182define <8 x i16> @vaddls8(<8 x i8>* %A, <8 x i8>* %B) nounwind {
Stephen Lind24ab202013-07-14 06:24:09 +0000183;CHECK-LABEL: vaddls8:
Bob Wilson35b61732009-10-09 20:20:54 +0000184;CHECK: vaddl.s8
185 %tmp1 = load <8 x i8>* %A
186 %tmp2 = load <8 x i8>* %B
Bob Wilsond0c05482010-08-29 05:57:34 +0000187 %tmp3 = sext <8 x i8> %tmp1 to <8 x i16>
188 %tmp4 = sext <8 x i8> %tmp2 to <8 x i16>
189 %tmp5 = add <8 x i16> %tmp3, %tmp4
190 ret <8 x i16> %tmp5
Bob Wilson35b61732009-10-09 20:20:54 +0000191}
192
193define <4 x i32> @vaddls16(<4 x i16>* %A, <4 x i16>* %B) nounwind {
Stephen Lind24ab202013-07-14 06:24:09 +0000194;CHECK-LABEL: vaddls16:
Bob Wilson35b61732009-10-09 20:20:54 +0000195;CHECK: vaddl.s16
196 %tmp1 = load <4 x i16>* %A
197 %tmp2 = load <4 x i16>* %B
Bob Wilsond0c05482010-08-29 05:57:34 +0000198 %tmp3 = sext <4 x i16> %tmp1 to <4 x i32>
199 %tmp4 = sext <4 x i16> %tmp2 to <4 x i32>
200 %tmp5 = add <4 x i32> %tmp3, %tmp4
201 ret <4 x i32> %tmp5
Bob Wilson35b61732009-10-09 20:20:54 +0000202}
203
204define <2 x i64> @vaddls32(<2 x i32>* %A, <2 x i32>* %B) nounwind {
Stephen Lind24ab202013-07-14 06:24:09 +0000205;CHECK-LABEL: vaddls32:
Bob Wilson35b61732009-10-09 20:20:54 +0000206;CHECK: vaddl.s32
207 %tmp1 = load <2 x i32>* %A
208 %tmp2 = load <2 x i32>* %B
Bob Wilsond0c05482010-08-29 05:57:34 +0000209 %tmp3 = sext <2 x i32> %tmp1 to <2 x i64>
210 %tmp4 = sext <2 x i32> %tmp2 to <2 x i64>
211 %tmp5 = add <2 x i64> %tmp3, %tmp4
212 ret <2 x i64> %tmp5
Bob Wilson35b61732009-10-09 20:20:54 +0000213}
214
215define <8 x i16> @vaddlu8(<8 x i8>* %A, <8 x i8>* %B) nounwind {
Stephen Lind24ab202013-07-14 06:24:09 +0000216;CHECK-LABEL: vaddlu8:
Bob Wilson35b61732009-10-09 20:20:54 +0000217;CHECK: vaddl.u8
218 %tmp1 = load <8 x i8>* %A
219 %tmp2 = load <8 x i8>* %B
Bob Wilsond0c05482010-08-29 05:57:34 +0000220 %tmp3 = zext <8 x i8> %tmp1 to <8 x i16>
221 %tmp4 = zext <8 x i8> %tmp2 to <8 x i16>
222 %tmp5 = add <8 x i16> %tmp3, %tmp4
223 ret <8 x i16> %tmp5
Bob Wilson35b61732009-10-09 20:20:54 +0000224}
225
226define <4 x i32> @vaddlu16(<4 x i16>* %A, <4 x i16>* %B) nounwind {
Stephen Lind24ab202013-07-14 06:24:09 +0000227;CHECK-LABEL: vaddlu16:
Bob Wilson35b61732009-10-09 20:20:54 +0000228;CHECK: vaddl.u16
229 %tmp1 = load <4 x i16>* %A
230 %tmp2 = load <4 x i16>* %B
Bob Wilsond0c05482010-08-29 05:57:34 +0000231 %tmp3 = zext <4 x i16> %tmp1 to <4 x i32>
232 %tmp4 = zext <4 x i16> %tmp2 to <4 x i32>
233 %tmp5 = add <4 x i32> %tmp3, %tmp4
234 ret <4 x i32> %tmp5
Bob Wilson35b61732009-10-09 20:20:54 +0000235}
236
237define <2 x i64> @vaddlu32(<2 x i32>* %A, <2 x i32>* %B) nounwind {
Stephen Lind24ab202013-07-14 06:24:09 +0000238;CHECK-LABEL: vaddlu32:
Bob Wilson35b61732009-10-09 20:20:54 +0000239;CHECK: vaddl.u32
240 %tmp1 = load <2 x i32>* %A
241 %tmp2 = load <2 x i32>* %B
Bob Wilsond0c05482010-08-29 05:57:34 +0000242 %tmp3 = zext <2 x i32> %tmp1 to <2 x i64>
243 %tmp4 = zext <2 x i32> %tmp2 to <2 x i64>
244 %tmp5 = add <2 x i64> %tmp3, %tmp4
245 ret <2 x i64> %tmp5
Bob Wilson35b61732009-10-09 20:20:54 +0000246}
247
Bob Wilson35b61732009-10-09 20:20:54 +0000248define <8 x i16> @vaddws8(<8 x i16>* %A, <8 x i8>* %B) nounwind {
Stephen Lind24ab202013-07-14 06:24:09 +0000249;CHECK-LABEL: vaddws8:
Bob Wilson35b61732009-10-09 20:20:54 +0000250;CHECK: vaddw.s8
251 %tmp1 = load <8 x i16>* %A
252 %tmp2 = load <8 x i8>* %B
Bob Wilsond0c05482010-08-29 05:57:34 +0000253 %tmp3 = sext <8 x i8> %tmp2 to <8 x i16>
254 %tmp4 = add <8 x i16> %tmp1, %tmp3
255 ret <8 x i16> %tmp4
Bob Wilson35b61732009-10-09 20:20:54 +0000256}
257
258define <4 x i32> @vaddws16(<4 x i32>* %A, <4 x i16>* %B) nounwind {
Stephen Lind24ab202013-07-14 06:24:09 +0000259;CHECK-LABEL: vaddws16:
Bob Wilson35b61732009-10-09 20:20:54 +0000260;CHECK: vaddw.s16
261 %tmp1 = load <4 x i32>* %A
262 %tmp2 = load <4 x i16>* %B
Bob Wilsond0c05482010-08-29 05:57:34 +0000263 %tmp3 = sext <4 x i16> %tmp2 to <4 x i32>
264 %tmp4 = add <4 x i32> %tmp1, %tmp3
265 ret <4 x i32> %tmp4
Bob Wilson35b61732009-10-09 20:20:54 +0000266}
267
268define <2 x i64> @vaddws32(<2 x i64>* %A, <2 x i32>* %B) nounwind {
Stephen Lind24ab202013-07-14 06:24:09 +0000269;CHECK-LABEL: vaddws32:
Bob Wilson35b61732009-10-09 20:20:54 +0000270;CHECK: vaddw.s32
271 %tmp1 = load <2 x i64>* %A
272 %tmp2 = load <2 x i32>* %B
Bob Wilsond0c05482010-08-29 05:57:34 +0000273 %tmp3 = sext <2 x i32> %tmp2 to <2 x i64>
274 %tmp4 = add <2 x i64> %tmp1, %tmp3
275 ret <2 x i64> %tmp4
Bob Wilson35b61732009-10-09 20:20:54 +0000276}
277
278define <8 x i16> @vaddwu8(<8 x i16>* %A, <8 x i8>* %B) nounwind {
Stephen Lind24ab202013-07-14 06:24:09 +0000279;CHECK-LABEL: vaddwu8:
Bob Wilson35b61732009-10-09 20:20:54 +0000280;CHECK: vaddw.u8
281 %tmp1 = load <8 x i16>* %A
282 %tmp2 = load <8 x i8>* %B
Bob Wilsond0c05482010-08-29 05:57:34 +0000283 %tmp3 = zext <8 x i8> %tmp2 to <8 x i16>
284 %tmp4 = add <8 x i16> %tmp1, %tmp3
285 ret <8 x i16> %tmp4
Bob Wilson35b61732009-10-09 20:20:54 +0000286}
287
288define <4 x i32> @vaddwu16(<4 x i32>* %A, <4 x i16>* %B) nounwind {
Stephen Lind24ab202013-07-14 06:24:09 +0000289;CHECK-LABEL: vaddwu16:
Bob Wilson35b61732009-10-09 20:20:54 +0000290;CHECK: vaddw.u16
291 %tmp1 = load <4 x i32>* %A
292 %tmp2 = load <4 x i16>* %B
Bob Wilsond0c05482010-08-29 05:57:34 +0000293 %tmp3 = zext <4 x i16> %tmp2 to <4 x i32>
294 %tmp4 = add <4 x i32> %tmp1, %tmp3
295 ret <4 x i32> %tmp4
Bob Wilson35b61732009-10-09 20:20:54 +0000296}
297
298define <2 x i64> @vaddwu32(<2 x i64>* %A, <2 x i32>* %B) nounwind {
Stephen Lind24ab202013-07-14 06:24:09 +0000299;CHECK-LABEL: vaddwu32:
Bob Wilson35b61732009-10-09 20:20:54 +0000300;CHECK: vaddw.u32
301 %tmp1 = load <2 x i64>* %A
302 %tmp2 = load <2 x i32>* %B
Bob Wilsond0c05482010-08-29 05:57:34 +0000303 %tmp3 = zext <2 x i32> %tmp2 to <2 x i64>
304 %tmp4 = add <2 x i64> %tmp1, %tmp3
305 ret <2 x i64> %tmp4
Bob Wilson35b61732009-10-09 20:20:54 +0000306}