blob: 4203e91068d0f38cee2e35ecfe8ae2f313233e0f [file] [log] [blame]
Dan Gohmanb7c0b242009-09-11 18:36:27 +00001; RUN: llc < %s -march=cellspu > %t1.s
Evan Cheng4c2b0012009-12-17 00:40:05 +00002; RUN: grep and %t1.s | count 234
Scott Michelec2a08f2007-12-15 00:38:50 +00003; RUN: grep andc %t1.s | count 85
Evan Cheng4c2b0012009-12-17 00:40:05 +00004; RUN: grep andi %t1.s | count 37
5; RUN: grep andhi %t1.s | count 30
6; RUN: grep andbi %t1.s | count 4
Scott Michel53dec472008-03-05 23:00:19 +00007
Dan Gohman65fd6562011-11-03 21:49:52 +00008; CellSPU legalization is over-sensitive to Legalize's traversal order.
9; XFAIL: *
10
Scott Michel9de5d0d2008-01-11 02:53:15 +000011target datalayout = "E-p:32:32:128-f64:64:128-f32:32:128-i64:32:128-i32:32:128-i16:16:128-i8:8:128-i1:8:128-a0:0:128-v128:128:128-s0:128:128"
12target triple = "spu"
Scott Michelec2a08f2007-12-15 00:38:50 +000013
14; AND instruction generation:
15define <4 x i32> @and_v4i32_1(<4 x i32> %arg1, <4 x i32> %arg2) {
16 %A = and <4 x i32> %arg1, %arg2
17 ret <4 x i32> %A
18}
19
20define <4 x i32> @and_v4i32_2(<4 x i32> %arg1, <4 x i32> %arg2) {
21 %A = and <4 x i32> %arg2, %arg1
22 ret <4 x i32> %A
23}
24
25define <8 x i16> @and_v8i16_1(<8 x i16> %arg1, <8 x i16> %arg2) {
26 %A = and <8 x i16> %arg1, %arg2
27 ret <8 x i16> %A
28}
29
30define <8 x i16> @and_v8i16_2(<8 x i16> %arg1, <8 x i16> %arg2) {
31 %A = and <8 x i16> %arg2, %arg1
32 ret <8 x i16> %A
33}
34
35define <16 x i8> @and_v16i8_1(<16 x i8> %arg1, <16 x i8> %arg2) {
36 %A = and <16 x i8> %arg2, %arg1
37 ret <16 x i8> %A
38}
39
40define <16 x i8> @and_v16i8_2(<16 x i8> %arg1, <16 x i8> %arg2) {
41 %A = and <16 x i8> %arg1, %arg2
42 ret <16 x i8> %A
43}
44
45define i32 @and_i32_1(i32 %arg1, i32 %arg2) {
Scott Michel53dec472008-03-05 23:00:19 +000046 %A = and i32 %arg2, %arg1
47 ret i32 %A
Scott Michelec2a08f2007-12-15 00:38:50 +000048}
49
50define i32 @and_i32_2(i32 %arg1, i32 %arg2) {
Scott Michel53dec472008-03-05 23:00:19 +000051 %A = and i32 %arg1, %arg2
52 ret i32 %A
Scott Michelec2a08f2007-12-15 00:38:50 +000053}
54
55define i16 @and_i16_1(i16 %arg1, i16 %arg2) {
Scott Michel53dec472008-03-05 23:00:19 +000056 %A = and i16 %arg2, %arg1
57 ret i16 %A
Scott Michelec2a08f2007-12-15 00:38:50 +000058}
59
60define i16 @and_i16_2(i16 %arg1, i16 %arg2) {
Scott Michel53dec472008-03-05 23:00:19 +000061 %A = and i16 %arg1, %arg2
62 ret i16 %A
Scott Michelec2a08f2007-12-15 00:38:50 +000063}
64
65define i8 @and_i8_1(i8 %arg1, i8 %arg2) {
Scott Michel53dec472008-03-05 23:00:19 +000066 %A = and i8 %arg2, %arg1
67 ret i8 %A
Scott Michelec2a08f2007-12-15 00:38:50 +000068}
69
70define i8 @and_i8_2(i8 %arg1, i8 %arg2) {
Scott Michel53dec472008-03-05 23:00:19 +000071 %A = and i8 %arg1, %arg2
72 ret i8 %A
Scott Michelec2a08f2007-12-15 00:38:50 +000073}
74
75; ANDC instruction generation:
76define <4 x i32> @andc_v4i32_1(<4 x i32> %arg1, <4 x i32> %arg2) {
77 %A = xor <4 x i32> %arg2, < i32 -1, i32 -1, i32 -1, i32 -1 >
78 %B = and <4 x i32> %arg1, %A
79 ret <4 x i32> %B
80}
81
82define <4 x i32> @andc_v4i32_2(<4 x i32> %arg1, <4 x i32> %arg2) {
83 %A = xor <4 x i32> %arg1, < i32 -1, i32 -1, i32 -1, i32 -1 >
84 %B = and <4 x i32> %arg2, %A
85 ret <4 x i32> %B
86}
87
88define <4 x i32> @andc_v4i32_3(<4 x i32> %arg1, <4 x i32> %arg2) {
89 %A = xor <4 x i32> %arg1, < i32 -1, i32 -1, i32 -1, i32 -1 >
90 %B = and <4 x i32> %A, %arg2
91 ret <4 x i32> %B
92}
93
94define <8 x i16> @andc_v8i16_1(<8 x i16> %arg1, <8 x i16> %arg2) {
95 %A = xor <8 x i16> %arg2, < i16 -1, i16 -1, i16 -1, i16 -1,
96 i16 -1, i16 -1, i16 -1, i16 -1 >
97 %B = and <8 x i16> %arg1, %A
98 ret <8 x i16> %B
99}
100
101define <8 x i16> @andc_v8i16_2(<8 x i16> %arg1, <8 x i16> %arg2) {
102 %A = xor <8 x i16> %arg1, < i16 -1, i16 -1, i16 -1, i16 -1,
103 i16 -1, i16 -1, i16 -1, i16 -1 >
104 %B = and <8 x i16> %arg2, %A
105 ret <8 x i16> %B
106}
107
108define <16 x i8> @andc_v16i8_1(<16 x i8> %arg1, <16 x i8> %arg2) {
109 %A = xor <16 x i8> %arg1, < i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1,
110 i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1,
111 i8 -1, i8 -1, i8 -1, i8 -1 >
112 %B = and <16 x i8> %arg2, %A
113 ret <16 x i8> %B
114}
115
116define <16 x i8> @andc_v16i8_2(<16 x i8> %arg1, <16 x i8> %arg2) {
117 %A = xor <16 x i8> %arg2, < i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1,
118 i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1,
119 i8 -1, i8 -1, i8 -1, i8 -1 >
120 %B = and <16 x i8> %arg1, %A
121 ret <16 x i8> %B
122}
123
124define <16 x i8> @andc_v16i8_3(<16 x i8> %arg1, <16 x i8> %arg2) {
125 %A = xor <16 x i8> %arg2, < i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1,
126 i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1,
127 i8 -1, i8 -1, i8 -1, i8 -1 >
128 %B = and <16 x i8> %A, %arg1
129 ret <16 x i8> %B
130}
131
132define i32 @andc_i32_1(i32 %arg1, i32 %arg2) {
Scott Michel53dec472008-03-05 23:00:19 +0000133 %A = xor i32 %arg2, -1
134 %B = and i32 %A, %arg1
135 ret i32 %B
Scott Michelec2a08f2007-12-15 00:38:50 +0000136}
137
138define i32 @andc_i32_2(i32 %arg1, i32 %arg2) {
Scott Michel53dec472008-03-05 23:00:19 +0000139 %A = xor i32 %arg1, -1
140 %B = and i32 %A, %arg2
141 ret i32 %B
Scott Michelec2a08f2007-12-15 00:38:50 +0000142}
143
144define i32 @andc_i32_3(i32 %arg1, i32 %arg2) {
Scott Michel53dec472008-03-05 23:00:19 +0000145 %A = xor i32 %arg2, -1
146 %B = and i32 %arg1, %A
147 ret i32 %B
Scott Michelec2a08f2007-12-15 00:38:50 +0000148}
149
150define i16 @andc_i16_1(i16 %arg1, i16 %arg2) {
Scott Michel53dec472008-03-05 23:00:19 +0000151 %A = xor i16 %arg2, -1
152 %B = and i16 %A, %arg1
153 ret i16 %B
Scott Michelec2a08f2007-12-15 00:38:50 +0000154}
155
156define i16 @andc_i16_2(i16 %arg1, i16 %arg2) {
Scott Michel53dec472008-03-05 23:00:19 +0000157 %A = xor i16 %arg1, -1
158 %B = and i16 %A, %arg2
159 ret i16 %B
Scott Michelec2a08f2007-12-15 00:38:50 +0000160}
161
162define i16 @andc_i16_3(i16 %arg1, i16 %arg2) {
Scott Michel53dec472008-03-05 23:00:19 +0000163 %A = xor i16 %arg2, -1
164 %B = and i16 %arg1, %A
165 ret i16 %B
Scott Michelec2a08f2007-12-15 00:38:50 +0000166}
167
168define i8 @andc_i8_1(i8 %arg1, i8 %arg2) {
Scott Michel53dec472008-03-05 23:00:19 +0000169 %A = xor i8 %arg2, -1
170 %B = and i8 %A, %arg1
171 ret i8 %B
Scott Michelec2a08f2007-12-15 00:38:50 +0000172}
173
174define i8 @andc_i8_2(i8 %arg1, i8 %arg2) {
Scott Michel53dec472008-03-05 23:00:19 +0000175 %A = xor i8 %arg1, -1
176 %B = and i8 %A, %arg2
177 ret i8 %B
Scott Michelec2a08f2007-12-15 00:38:50 +0000178}
179
180define i8 @andc_i8_3(i8 %arg1, i8 %arg2) {
Scott Michel53dec472008-03-05 23:00:19 +0000181 %A = xor i8 %arg2, -1
182 %B = and i8 %arg1, %A
183 ret i8 %B
Scott Michelec2a08f2007-12-15 00:38:50 +0000184}
185
186; ANDI instruction generation (i32 data type):
187define <4 x i32> @andi_v4i32_1(<4 x i32> %in) {
188 %tmp2 = and <4 x i32> %in, < i32 511, i32 511, i32 511, i32 511 >
189 ret <4 x i32> %tmp2
190}
191
192define <4 x i32> @andi_v4i32_2(<4 x i32> %in) {
193 %tmp2 = and <4 x i32> %in, < i32 510, i32 510, i32 510, i32 510 >
194 ret <4 x i32> %tmp2
195}
196
197define <4 x i32> @andi_v4i32_3(<4 x i32> %in) {
198 %tmp2 = and <4 x i32> %in, < i32 -1, i32 -1, i32 -1, i32 -1 >
199 ret <4 x i32> %tmp2
200}
201
202define <4 x i32> @andi_v4i32_4(<4 x i32> %in) {
203 %tmp2 = and <4 x i32> %in, < i32 -512, i32 -512, i32 -512, i32 -512 >
204 ret <4 x i32> %tmp2
205}
206
Chris Lattner26b00002011-06-17 03:14:27 +0000207define zeroext i32 @andi_u32(i32 zeroext %in) {
Scott Michelec2a08f2007-12-15 00:38:50 +0000208 %tmp37 = and i32 %in, 37
209 ret i32 %tmp37
210}
211
Chris Lattner26b00002011-06-17 03:14:27 +0000212define signext i32 @andi_i32(i32 signext %in) {
Scott Michelec2a08f2007-12-15 00:38:50 +0000213 %tmp38 = and i32 %in, 37
214 ret i32 %tmp38
215}
216
217define i32 @andi_i32_1(i32 %in) {
218 %tmp37 = and i32 %in, 37
219 ret i32 %tmp37
220}
221
222; ANDHI instruction generation (i16 data type):
223define <8 x i16> @andhi_v8i16_1(<8 x i16> %in) {
224 %tmp2 = and <8 x i16> %in, < i16 511, i16 511, i16 511, i16 511,
225 i16 511, i16 511, i16 511, i16 511 >
226 ret <8 x i16> %tmp2
227}
228
229define <8 x i16> @andhi_v8i16_2(<8 x i16> %in) {
230 %tmp2 = and <8 x i16> %in, < i16 510, i16 510, i16 510, i16 510,
231 i16 510, i16 510, i16 510, i16 510 >
232 ret <8 x i16> %tmp2
233}
234
235define <8 x i16> @andhi_v8i16_3(<8 x i16> %in) {
236 %tmp2 = and <8 x i16> %in, < i16 -1, i16 -1, i16 -1, i16 -1, i16 -1,
237 i16 -1, i16 -1, i16 -1 >
238 ret <8 x i16> %tmp2
239}
240
241define <8 x i16> @andhi_v8i16_4(<8 x i16> %in) {
242 %tmp2 = and <8 x i16> %in, < i16 -512, i16 -512, i16 -512, i16 -512,
243 i16 -512, i16 -512, i16 -512, i16 -512 >
244 ret <8 x i16> %tmp2
245}
246
Chris Lattner26b00002011-06-17 03:14:27 +0000247define zeroext i16 @andhi_u16(i16 zeroext %in) {
Scott Michelec2a08f2007-12-15 00:38:50 +0000248 %tmp37 = and i16 %in, 37 ; <i16> [#uses=1]
249 ret i16 %tmp37
250}
251
Chris Lattner26b00002011-06-17 03:14:27 +0000252define signext i16 @andhi_i16(i16 signext %in) {
Scott Michelec2a08f2007-12-15 00:38:50 +0000253 %tmp38 = and i16 %in, 37 ; <i16> [#uses=1]
254 ret i16 %tmp38
255}
256
257; i8 data type (s/b ANDBI if 8-bit registers were supported):
258define <16 x i8> @and_v16i8(<16 x i8> %in) {
Scott Michel53dec472008-03-05 23:00:19 +0000259 ; ANDBI generated for vector types
Scott Michelec2a08f2007-12-15 00:38:50 +0000260 %tmp2 = and <16 x i8> %in, < i8 42, i8 42, i8 42, i8 42, i8 42, i8 42,
261 i8 42, i8 42, i8 42, i8 42, i8 42, i8 42,
262 i8 42, i8 42, i8 42, i8 42 >
263 ret <16 x i8> %tmp2
264}
265
Chris Lattner26b00002011-06-17 03:14:27 +0000266define zeroext i8 @and_u8(i8 zeroext %in) {
Scott Michel53dec472008-03-05 23:00:19 +0000267 ; ANDBI generated:
Scott Michel504c3692007-12-17 22:32:34 +0000268 %tmp37 = and i8 %in, 37
Scott Michelec2a08f2007-12-15 00:38:50 +0000269 ret i8 %tmp37
270}
271
Chris Lattner26b00002011-06-17 03:14:27 +0000272define signext i8 @and_sext8(i8 signext %in) {
Scott Michel53dec472008-03-05 23:00:19 +0000273 ; ANDBI generated
Scott Michel504c3692007-12-17 22:32:34 +0000274 %tmp38 = and i8 %in, 37
275 ret i8 %tmp38
276}
277
278define i8 @and_i8(i8 %in) {
Scott Michel53dec472008-03-05 23:00:19 +0000279 ; ANDBI generated
Scott Michel504c3692007-12-17 22:32:34 +0000280 %tmp38 = and i8 %in, 205
Scott Michelec2a08f2007-12-15 00:38:50 +0000281 ret i8 %tmp38
282}