blob: a027c7e182809ec6ee7c1ad0a39b66b771fba980 [file] [log] [blame]
Sanjay Patela4b052c2016-06-19 21:40:12 +00001; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
Duncan Sandsba286d72011-10-26 20:55:21 +00002; RUN: opt < %s -instsimplify -S | FileCheck %s
3
Sanjay Patel308eb222017-02-18 21:51:14 +00004; add nsw (xor X, signbit), signbit --> X
5
6define <2 x i32> @add_nsw_signbit(<2 x i32> %x) {
7; CHECK-LABEL: @add_nsw_signbit(
Sanjay Patelfe672552017-02-18 21:59:09 +00008; CHECK-NEXT: ret <2 x i32> %x
Sanjay Patel308eb222017-02-18 21:51:14 +00009;
10 %y = xor <2 x i32> %x, <i32 -2147483648, i32 -2147483648>
11 %z = add nsw <2 x i32> %y, <i32 -2147483648, i32 -2147483648>
12 ret <2 x i32> %z
13}
14
15; add nuw (xor X, signbit), signbit --> X
16
17define <2 x i5> @add_nuw_signbit(<2 x i5> %x) {
18; CHECK-LABEL: @add_nuw_signbit(
Sanjay Patelfe672552017-02-18 21:59:09 +000019; CHECK-NEXT: ret <2 x i5> %x
Sanjay Patel308eb222017-02-18 21:51:14 +000020;
21 %y = xor <2 x i5> %x, <i5 -16, i5 -16>
22 %z = add nuw <2 x i5> %y, <i5 -16, i5 -16>
23 ret <2 x i5> %z
24}
25
Duncan Sandsba286d72011-10-26 20:55:21 +000026define i64 @pow2(i32 %x) {
Stephen Linc1c7a132013-07-14 01:42:54 +000027; CHECK-LABEL: @pow2(
Sanjay Patela4b052c2016-06-19 21:40:12 +000028; CHECK-NEXT: [[NEGX:%.*]] = sub i32 0, %x
Sanjay Patel69632442016-03-25 20:12:25 +000029; CHECK-NEXT: [[X2:%.*]] = and i32 %x, [[NEGX]]
30; CHECK-NEXT: [[E:%.*]] = zext i32 [[X2]] to i64
31; CHECK-NEXT: ret i64 [[E]]
32;
Duncan Sandsba286d72011-10-26 20:55:21 +000033 %negx = sub i32 0, %x
34 %x2 = and i32 %x, %negx
35 %e = zext i32 %x2 to i64
36 %nege = sub i64 0, %e
37 %e2 = and i64 %e, %nege
38 ret i64 %e2
Duncan Sandsba286d72011-10-26 20:55:21 +000039}
Duncan Sands985ba632011-10-28 18:30:05 +000040
41define i64 @pow2b(i32 %x) {
Stephen Linc1c7a132013-07-14 01:42:54 +000042; CHECK-LABEL: @pow2b(
Sanjay Patela4b052c2016-06-19 21:40:12 +000043; CHECK-NEXT: [[SH:%.*]] = shl i32 2, %x
Sanjay Patel69632442016-03-25 20:12:25 +000044; CHECK-NEXT: [[E:%.*]] = zext i32 [[SH]] to i64
45; CHECK-NEXT: ret i64 [[E]]
46;
Duncan Sands985ba632011-10-28 18:30:05 +000047 %sh = shl i32 2, %x
48 %e = zext i32 %sh to i64
49 %nege = sub i64 0, %e
50 %e2 = and i64 %e, %nege
51 ret i64 %e2
Duncan Sands985ba632011-10-28 18:30:05 +000052}
David Majnemercd4fbcd2014-07-31 04:49:18 +000053
David Majnemera315bd82014-09-15 08:15:28 +000054define i1 @and_of_icmps0(i32 %b) {
55; CHECK-LABEL: @and_of_icmps0(
Sanjay Patela4b052c2016-06-19 21:40:12 +000056; CHECK-NEXT: ret i1 false
Sanjay Patel69632442016-03-25 20:12:25 +000057;
David Majnemera315bd82014-09-15 08:15:28 +000058 %1 = add i32 %b, 2
59 %2 = icmp ult i32 %1, 4
60 %cmp3 = icmp sgt i32 %b, 2
61 %cmp = and i1 %2, %cmp3
62 ret i1 %cmp
David Majnemera315bd82014-09-15 08:15:28 +000063}
64
Sanjay Patel1b312ad2016-09-28 13:53:13 +000065define <2 x i1> @and_of_icmps0_vec(<2 x i32> %b) {
66; CHECK-LABEL: @and_of_icmps0_vec(
67; CHECK-NEXT: ret <2 x i1> zeroinitializer
68;
69 %1 = add <2 x i32> %b, <i32 2, i32 2>
70 %2 = icmp ult <2 x i32> %1, <i32 4, i32 4>
71 %cmp3 = icmp sgt <2 x i32> %b, <i32 2, i32 2>
72 %cmp = and <2 x i1> %2, %cmp3
73 ret <2 x i1> %cmp
74}
75
David Majnemera315bd82014-09-15 08:15:28 +000076define i1 @and_of_icmps1(i32 %b) {
77; CHECK-LABEL: @and_of_icmps1(
Sanjay Patela4b052c2016-06-19 21:40:12 +000078; CHECK-NEXT: ret i1 false
Sanjay Patel69632442016-03-25 20:12:25 +000079;
David Majnemera315bd82014-09-15 08:15:28 +000080 %1 = add nsw i32 %b, 2
81 %2 = icmp slt i32 %1, 4
82 %cmp3 = icmp sgt i32 %b, 2
83 %cmp = and i1 %2, %cmp3
84 ret i1 %cmp
David Majnemera315bd82014-09-15 08:15:28 +000085}
86
Sanjay Patel1b312ad2016-09-28 13:53:13 +000087define <2 x i1> @and_of_icmps1_vec(<2 x i32> %b) {
88; CHECK-LABEL: @and_of_icmps1_vec(
89; CHECK-NEXT: ret <2 x i1> zeroinitializer
90;
91 %1 = add nsw <2 x i32> %b, <i32 2, i32 2>
92 %2 = icmp slt <2 x i32> %1, <i32 4, i32 4>
93 %cmp3 = icmp sgt <2 x i32> %b, <i32 2, i32 2>
94 %cmp = and <2 x i1> %2, %cmp3
95 ret <2 x i1> %cmp
96}
97
David Majnemera315bd82014-09-15 08:15:28 +000098define i1 @and_of_icmps2(i32 %b) {
99; CHECK-LABEL: @and_of_icmps2(
Sanjay Patela4b052c2016-06-19 21:40:12 +0000100; CHECK-NEXT: ret i1 false
Sanjay Patel69632442016-03-25 20:12:25 +0000101;
David Majnemera315bd82014-09-15 08:15:28 +0000102 %1 = add i32 %b, 2
103 %2 = icmp ule i32 %1, 3
104 %cmp3 = icmp sgt i32 %b, 2
105 %cmp = and i1 %2, %cmp3
106 ret i1 %cmp
David Majnemera315bd82014-09-15 08:15:28 +0000107}
108
Sanjay Patel1b312ad2016-09-28 13:53:13 +0000109define <2 x i1> @and_of_icmps2_vec(<2 x i32> %b) {
110; CHECK-LABEL: @and_of_icmps2_vec(
111; CHECK-NEXT: ret <2 x i1> zeroinitializer
112;
113 %1 = add <2 x i32> %b, <i32 2, i32 2>
114 %2 = icmp ule <2 x i32> %1, <i32 3, i32 3>
115 %cmp3 = icmp sgt <2 x i32> %b, <i32 2, i32 2>
116 %cmp = and <2 x i1> %2, %cmp3
117 ret <2 x i1> %cmp
118}
119
David Majnemera315bd82014-09-15 08:15:28 +0000120define i1 @and_of_icmps3(i32 %b) {
121; CHECK-LABEL: @and_of_icmps3(
Sanjay Patela4b052c2016-06-19 21:40:12 +0000122; CHECK-NEXT: ret i1 false
Sanjay Patel69632442016-03-25 20:12:25 +0000123;
David Majnemera315bd82014-09-15 08:15:28 +0000124 %1 = add nsw i32 %b, 2
125 %2 = icmp sle i32 %1, 3
126 %cmp3 = icmp sgt i32 %b, 2
127 %cmp = and i1 %2, %cmp3
128 ret i1 %cmp
David Majnemera315bd82014-09-15 08:15:28 +0000129}
130
Sanjay Patel1b312ad2016-09-28 13:53:13 +0000131define <2 x i1> @and_of_icmps3_vec(<2 x i32> %b) {
132; CHECK-LABEL: @and_of_icmps3_vec(
133; CHECK-NEXT: ret <2 x i1> zeroinitializer
134;
135 %1 = add nsw <2 x i32> %b, <i32 2, i32 2>
136 %2 = icmp sle <2 x i32> %1, <i32 3, i32 3>
137 %cmp3 = icmp sgt <2 x i32> %b, <i32 2, i32 2>
138 %cmp = and <2 x i1> %2, %cmp3
139 ret <2 x i1> %cmp
140}
141
David Majnemera315bd82014-09-15 08:15:28 +0000142define i1 @and_of_icmps4(i32 %b) {
143; CHECK-LABEL: @and_of_icmps4(
Sanjay Patela4b052c2016-06-19 21:40:12 +0000144; CHECK-NEXT: ret i1 false
Sanjay Patel69632442016-03-25 20:12:25 +0000145;
David Majnemera315bd82014-09-15 08:15:28 +0000146 %1 = add nuw i32 %b, 2
147 %2 = icmp ult i32 %1, 4
148 %cmp3 = icmp ugt i32 %b, 2
149 %cmp = and i1 %2, %cmp3
150 ret i1 %cmp
David Majnemera315bd82014-09-15 08:15:28 +0000151}
152
Sanjay Patel1b312ad2016-09-28 13:53:13 +0000153define <2 x i1> @and_of_icmps4_vec(<2 x i32> %b) {
154; CHECK-LABEL: @and_of_icmps4_vec(
155; CHECK-NEXT: ret <2 x i1> zeroinitializer
156;
157 %1 = add nuw <2 x i32> %b, <i32 2, i32 2>
158 %2 = icmp ult <2 x i32> %1, <i32 4, i32 4>
159 %cmp3 = icmp ugt <2 x i32> %b, <i32 2, i32 2>
160 %cmp = and <2 x i1> %2, %cmp3
161 ret <2 x i1> %cmp
162}
163
David Majnemera315bd82014-09-15 08:15:28 +0000164define i1 @and_of_icmps5(i32 %b) {
165; CHECK-LABEL: @and_of_icmps5(
Sanjay Patela4b052c2016-06-19 21:40:12 +0000166; CHECK-NEXT: ret i1 false
Sanjay Patel69632442016-03-25 20:12:25 +0000167;
David Majnemera315bd82014-09-15 08:15:28 +0000168 %1 = add nuw i32 %b, 2
169 %2 = icmp ule i32 %1, 3
170 %cmp3 = icmp ugt i32 %b, 2
171 %cmp = and i1 %2, %cmp3
172 ret i1 %cmp
David Majnemera315bd82014-09-15 08:15:28 +0000173}
174
Sanjay Patel1b312ad2016-09-28 13:53:13 +0000175define <2 x i1> @and_of_icmps5_vec(<2 x i32> %b) {
176; CHECK-LABEL: @and_of_icmps5_vec(
177; CHECK-NEXT: ret <2 x i1> zeroinitializer
178;
179 %1 = add nuw <2 x i32> %b, <i32 2, i32 2>
180 %2 = icmp ule <2 x i32> %1, <i32 3, i32 3>
181 %cmp3 = icmp ugt <2 x i32> %b, <i32 2, i32 2>
182 %cmp = and <2 x i1> %2, %cmp3
183 ret <2 x i1> %cmp
184}
185
David Majnemera315bd82014-09-15 08:15:28 +0000186define i1 @or_of_icmps0(i32 %b) {
187; CHECK-LABEL: @or_of_icmps0(
Sanjay Patela4b052c2016-06-19 21:40:12 +0000188; CHECK-NEXT: ret i1 true
Sanjay Patel69632442016-03-25 20:12:25 +0000189;
David Majnemera315bd82014-09-15 08:15:28 +0000190 %1 = add i32 %b, 2
191 %2 = icmp uge i32 %1, 4
192 %cmp3 = icmp sle i32 %b, 2
193 %cmp = or i1 %2, %cmp3
194 ret i1 %cmp
David Majnemera315bd82014-09-15 08:15:28 +0000195}
196
Sanjay Patela8f9e572016-09-28 14:17:35 +0000197define <2 x i1> @or_of_icmps0_vec(<2 x i32> %b) {
198; CHECK-LABEL: @or_of_icmps0_vec(
Sanjay Patel220a8732016-09-28 14:27:21 +0000199; CHECK-NEXT: ret <2 x i1> <i1 true, i1 true>
Sanjay Patela8f9e572016-09-28 14:17:35 +0000200;
201 %1 = add <2 x i32> %b, <i32 2, i32 2>
202 %2 = icmp uge <2 x i32> %1, <i32 4, i32 4>
203 %cmp3 = icmp sle <2 x i32> %b, <i32 2, i32 2>
204 %cmp = or <2 x i1> %2, %cmp3
205 ret <2 x i1> %cmp
206}
207
David Majnemera315bd82014-09-15 08:15:28 +0000208define i1 @or_of_icmps1(i32 %b) {
209; CHECK-LABEL: @or_of_icmps1(
Sanjay Patela4b052c2016-06-19 21:40:12 +0000210; CHECK-NEXT: ret i1 true
Sanjay Patel69632442016-03-25 20:12:25 +0000211;
David Majnemera315bd82014-09-15 08:15:28 +0000212 %1 = add nsw i32 %b, 2
213 %2 = icmp sge i32 %1, 4
214 %cmp3 = icmp sle i32 %b, 2
215 %cmp = or i1 %2, %cmp3
216 ret i1 %cmp
David Majnemera315bd82014-09-15 08:15:28 +0000217}
218
Sanjay Patela8f9e572016-09-28 14:17:35 +0000219define <2 x i1> @or_of_icmps1_vec(<2 x i32> %b) {
220; CHECK-LABEL: @or_of_icmps1_vec(
Sanjay Patel220a8732016-09-28 14:27:21 +0000221; CHECK-NEXT: ret <2 x i1> <i1 true, i1 true>
Sanjay Patela8f9e572016-09-28 14:17:35 +0000222;
223 %1 = add nsw <2 x i32> %b, <i32 2, i32 2>
224 %2 = icmp sge <2 x i32> %1, <i32 4, i32 4>
225 %cmp3 = icmp sle <2 x i32> %b, <i32 2, i32 2>
226 %cmp = or <2 x i1> %2, %cmp3
227 ret <2 x i1> %cmp
228}
229
David Majnemera315bd82014-09-15 08:15:28 +0000230define i1 @or_of_icmps2(i32 %b) {
231; CHECK-LABEL: @or_of_icmps2(
Sanjay Patela4b052c2016-06-19 21:40:12 +0000232; CHECK-NEXT: ret i1 true
Sanjay Patel69632442016-03-25 20:12:25 +0000233;
David Majnemera315bd82014-09-15 08:15:28 +0000234 %1 = add i32 %b, 2
235 %2 = icmp ugt i32 %1, 3
236 %cmp3 = icmp sle i32 %b, 2
237 %cmp = or i1 %2, %cmp3
238 ret i1 %cmp
David Majnemera315bd82014-09-15 08:15:28 +0000239}
240
Sanjay Patela8f9e572016-09-28 14:17:35 +0000241define <2 x i1> @or_of_icmps2_vec(<2 x i32> %b) {
242; CHECK-LABEL: @or_of_icmps2_vec(
Sanjay Patel220a8732016-09-28 14:27:21 +0000243; CHECK-NEXT: ret <2 x i1> <i1 true, i1 true>
Sanjay Patela8f9e572016-09-28 14:17:35 +0000244;
245 %1 = add <2 x i32> %b, <i32 2, i32 2>
246 %2 = icmp ugt <2 x i32> %1, <i32 3, i32 3>
247 %cmp3 = icmp sle <2 x i32> %b, <i32 2, i32 2>
248 %cmp = or <2 x i1> %2, %cmp3
249 ret <2 x i1> %cmp
250}
251
David Majnemera315bd82014-09-15 08:15:28 +0000252define i1 @or_of_icmps3(i32 %b) {
253; CHECK-LABEL: @or_of_icmps3(
Sanjay Patela4b052c2016-06-19 21:40:12 +0000254; CHECK-NEXT: ret i1 true
Sanjay Patel69632442016-03-25 20:12:25 +0000255;
David Majnemera315bd82014-09-15 08:15:28 +0000256 %1 = add nsw i32 %b, 2
257 %2 = icmp sgt i32 %1, 3
258 %cmp3 = icmp sle i32 %b, 2
259 %cmp = or i1 %2, %cmp3
260 ret i1 %cmp
David Majnemera315bd82014-09-15 08:15:28 +0000261}
262
Sanjay Patela8f9e572016-09-28 14:17:35 +0000263define <2 x i1> @or_of_icmps3_vec(<2 x i32> %b) {
264; CHECK-LABEL: @or_of_icmps3_vec(
Sanjay Patel220a8732016-09-28 14:27:21 +0000265; CHECK-NEXT: ret <2 x i1> <i1 true, i1 true>
Sanjay Patela8f9e572016-09-28 14:17:35 +0000266;
267 %1 = add nsw <2 x i32> %b, <i32 2, i32 2>
268 %2 = icmp sgt <2 x i32> %1, <i32 3, i32 3>
269 %cmp3 = icmp sle <2 x i32> %b, <i32 2, i32 2>
270 %cmp = or <2 x i1> %2, %cmp3
271 ret <2 x i1> %cmp
272}
273
David Majnemera315bd82014-09-15 08:15:28 +0000274define i1 @or_of_icmps4(i32 %b) {
275; CHECK-LABEL: @or_of_icmps4(
Sanjay Patela4b052c2016-06-19 21:40:12 +0000276; CHECK-NEXT: ret i1 true
Sanjay Patel69632442016-03-25 20:12:25 +0000277;
David Majnemera315bd82014-09-15 08:15:28 +0000278 %1 = add nuw i32 %b, 2
279 %2 = icmp uge i32 %1, 4
280 %cmp3 = icmp ule i32 %b, 2
281 %cmp = or i1 %2, %cmp3
282 ret i1 %cmp
David Majnemera315bd82014-09-15 08:15:28 +0000283}
284
Sanjay Patela8f9e572016-09-28 14:17:35 +0000285define <2 x i1> @or_of_icmps4_vec(<2 x i32> %b) {
286; CHECK-LABEL: @or_of_icmps4_vec(
Sanjay Patel220a8732016-09-28 14:27:21 +0000287; CHECK-NEXT: ret <2 x i1> <i1 true, i1 true>
Sanjay Patela8f9e572016-09-28 14:17:35 +0000288;
289 %1 = add nuw <2 x i32> %b, <i32 2, i32 2>
290 %2 = icmp uge <2 x i32> %1, <i32 4, i32 4>
291 %cmp3 = icmp ule <2 x i32> %b, <i32 2, i32 2>
292 %cmp = or <2 x i1> %2, %cmp3
293 ret <2 x i1> %cmp
294}
295
David Majnemera315bd82014-09-15 08:15:28 +0000296define i1 @or_of_icmps5(i32 %b) {
297; CHECK-LABEL: @or_of_icmps5(
Sanjay Patela4b052c2016-06-19 21:40:12 +0000298; CHECK-NEXT: ret i1 true
Sanjay Patel69632442016-03-25 20:12:25 +0000299;
David Majnemera315bd82014-09-15 08:15:28 +0000300 %1 = add nuw i32 %b, 2
301 %2 = icmp ugt i32 %1, 3
302 %cmp3 = icmp ule i32 %b, 2
303 %cmp = or i1 %2, %cmp3
304 ret i1 %cmp
David Majnemera315bd82014-09-15 08:15:28 +0000305}
David Majnemer4efa9ff2014-11-22 07:15:16 +0000306
Sanjay Patela8f9e572016-09-28 14:17:35 +0000307define <2 x i1> @or_of_icmps5_vec(<2 x i32> %b) {
308; CHECK-LABEL: @or_of_icmps5_vec(
Sanjay Patel220a8732016-09-28 14:27:21 +0000309; CHECK-NEXT: ret <2 x i1> <i1 true, i1 true>
Sanjay Patela8f9e572016-09-28 14:17:35 +0000310;
311 %1 = add nuw <2 x i32> %b, <i32 2, i32 2>
312 %2 = icmp ugt <2 x i32> %1, <i32 3, i32 3>
313 %cmp3 = icmp ule <2 x i32> %b, <i32 2, i32 2>
314 %cmp = or <2 x i1> %2, %cmp3
315 ret <2 x i1> %cmp
316}
317
David Majnemer4efa9ff2014-11-22 07:15:16 +0000318define i32 @neg_nuw(i32 %x) {
319; CHECK-LABEL: @neg_nuw(
Sanjay Patela4b052c2016-06-19 21:40:12 +0000320; CHECK-NEXT: ret i32 0
Sanjay Patel69632442016-03-25 20:12:25 +0000321;
David Majnemer4efa9ff2014-11-22 07:15:16 +0000322 %neg = sub nuw i32 0, %x
323 ret i32 %neg
David Majnemer4efa9ff2014-11-22 07:15:16 +0000324}
David Majnemer1af36e52014-12-06 10:51:40 +0000325
326define i1 @and_icmp1(i32 %x, i32 %y) {
Sanjay Patel69632442016-03-25 20:12:25 +0000327; CHECK-LABEL: @and_icmp1(
Sanjay Patela4b052c2016-06-19 21:40:12 +0000328; CHECK-NEXT: [[TMP1:%.*]] = icmp ult i32 %x, %y
Sanjay Patel69632442016-03-25 20:12:25 +0000329; CHECK-NEXT: ret i1 [[TMP1]]
330;
David Majnemer1af36e52014-12-06 10:51:40 +0000331 %1 = icmp ult i32 %x, %y
332 %2 = icmp ne i32 %y, 0
333 %3 = and i1 %1, %2
334 ret i1 %3
335}
David Majnemer1af36e52014-12-06 10:51:40 +0000336
David Majnemerd5b3aa42014-12-08 18:30:43 +0000337define i1 @and_icmp2(i32 %x, i32 %y) {
Sanjay Patel69632442016-03-25 20:12:25 +0000338; CHECK-LABEL: @and_icmp2(
Sanjay Patela4b052c2016-06-19 21:40:12 +0000339; CHECK-NEXT: ret i1 false
Sanjay Patel69632442016-03-25 20:12:25 +0000340;
David Majnemerd5b3aa42014-12-08 18:30:43 +0000341 %1 = icmp ult i32 %x, %y
342 %2 = icmp eq i32 %y, 0
343 %3 = and i1 %1, %2
344 ret i1 %3
345}
David Majnemerd5b3aa42014-12-08 18:30:43 +0000346
David Majnemer1af36e52014-12-06 10:51:40 +0000347define i1 @or_icmp1(i32 %x, i32 %y) {
Sanjay Patel69632442016-03-25 20:12:25 +0000348; CHECK-LABEL: @or_icmp1(
Sanjay Patela4b052c2016-06-19 21:40:12 +0000349; CHECK-NEXT: [[TMP1:%.*]] = icmp ne i32 %y, 0
Sanjay Patel69632442016-03-25 20:12:25 +0000350; CHECK-NEXT: ret i1 [[TMP1]]
351;
David Majnemer1af36e52014-12-06 10:51:40 +0000352 %1 = icmp ult i32 %x, %y
353 %2 = icmp ne i32 %y, 0
354 %3 = or i1 %1, %2
355 ret i1 %3
356}
David Majnemer1af36e52014-12-06 10:51:40 +0000357
358define i1 @or_icmp2(i32 %x, i32 %y) {
Sanjay Patel69632442016-03-25 20:12:25 +0000359; CHECK-LABEL: @or_icmp2(
Sanjay Patela4b052c2016-06-19 21:40:12 +0000360; CHECK-NEXT: ret i1 true
Sanjay Patel69632442016-03-25 20:12:25 +0000361;
David Majnemer1af36e52014-12-06 10:51:40 +0000362 %1 = icmp uge i32 %x, %y
363 %2 = icmp ne i32 %y, 0
364 %3 = or i1 %1, %2
365 ret i1 %3
366}
David Majnemer1af36e52014-12-06 10:51:40 +0000367
368define i1 @or_icmp3(i32 %x, i32 %y) {
Sanjay Patel69632442016-03-25 20:12:25 +0000369; CHECK-LABEL: @or_icmp3(
Sanjay Patela4b052c2016-06-19 21:40:12 +0000370; CHECK-NEXT: [[TMP1:%.*]] = icmp uge i32 %x, %y
Sanjay Patel69632442016-03-25 20:12:25 +0000371; CHECK-NEXT: ret i1 [[TMP1]]
372;
David Majnemer1af36e52014-12-06 10:51:40 +0000373 %1 = icmp uge i32 %x, %y
374 %2 = icmp eq i32 %y, 0
375 %3 = or i1 %1, %2
376 ret i1 %3
377}
Sanjay Patel69632442016-03-25 20:12:25 +0000378
Sanjay Patel9ad8fb62016-06-20 20:59:59 +0000379; PR27869 - Look through casts to eliminate cmps and bitwise logic.
380
Sanjay Patela4b052c2016-06-19 21:40:12 +0000381define i32 @and_of_zexted_icmps(i32 %i) {
382; CHECK-LABEL: @and_of_zexted_icmps(
Sanjay Patel9ad8fb62016-06-20 20:59:59 +0000383; CHECK-NEXT: ret i32 0
Sanjay Patela4b052c2016-06-19 21:40:12 +0000384;
Sanjay Patel9ad8fb62016-06-20 20:59:59 +0000385 %cmp0 = icmp eq i32 %i, 0
386 %conv0 = zext i1 %cmp0 to i32
Sanjay Patela4b052c2016-06-19 21:40:12 +0000387 %cmp1 = icmp ugt i32 %i, 4
Sanjay Patel9ad8fb62016-06-20 20:59:59 +0000388 %conv1 = zext i1 %cmp1 to i32
389 %and = and i32 %conv0, %conv1
Sanjay Patela4b052c2016-06-19 21:40:12 +0000390 ret i32 %and
391}
392
Sanjay Patel9ad8fb62016-06-20 20:59:59 +0000393; Make sure vectors work too.
394
Sanjay Patela4b052c2016-06-19 21:40:12 +0000395define <4 x i32> @and_of_zexted_icmps_vec(<4 x i32> %i) {
396; CHECK-LABEL: @and_of_zexted_icmps_vec(
Sanjay Patel9ad8fb62016-06-20 20:59:59 +0000397; CHECK-NEXT: ret <4 x i32> zeroinitializer
Sanjay Patela4b052c2016-06-19 21:40:12 +0000398;
Sanjay Patel9ad8fb62016-06-20 20:59:59 +0000399 %cmp0 = icmp eq <4 x i32> %i, zeroinitializer
400 %conv0 = zext <4 x i1> %cmp0 to <4 x i32>
Sanjay Patela4b052c2016-06-19 21:40:12 +0000401 %cmp1 = icmp slt <4 x i32> %i, zeroinitializer
Sanjay Patel9ad8fb62016-06-20 20:59:59 +0000402 %conv1 = zext <4 x i1> %cmp1 to <4 x i32>
403 %and = and <4 x i32> %conv0, %conv1
Sanjay Patela4b052c2016-06-19 21:40:12 +0000404 ret <4 x i32> %and
405}
406
Sanjay Patel9ad8fb62016-06-20 20:59:59 +0000407; Try a different cast and weird types.
408
409define i5 @and_of_sexted_icmps(i3 %i) {
410; CHECK-LABEL: @and_of_sexted_icmps(
411; CHECK-NEXT: ret i5 0
412;
413 %cmp0 = icmp eq i3 %i, 0
414 %conv0 = sext i1 %cmp0 to i5
415 %cmp1 = icmp ugt i3 %i, 1
416 %conv1 = sext i1 %cmp1 to i5
417 %and = and i5 %conv0, %conv1
418 ret i5 %and
419}
420
421; Try a different cast and weird vector types.
422
423define i3 @and_of_bitcast_icmps_vec(<3 x i65> %i) {
424; CHECK-LABEL: @and_of_bitcast_icmps_vec(
425; CHECK-NEXT: ret i3 0
426;
427 %cmp0 = icmp sgt <3 x i65> %i, zeroinitializer
428 %conv0 = bitcast <3 x i1> %cmp0 to i3
429 %cmp1 = icmp slt <3 x i65> %i, zeroinitializer
430 %conv1 = bitcast <3 x i1> %cmp1 to i3
431 %and = and i3 %conv0, %conv1
432 ret i3 %and
433}
434
435; We can't do this if the casts are different.
436
437define i16 @and_of_different_cast_icmps(i8 %i) {
438; CHECK-LABEL: @and_of_different_cast_icmps(
439; CHECK-NEXT: [[CMP0:%.*]] = icmp eq i8 %i, 0
440; CHECK-NEXT: [[CONV0:%.*]] = zext i1 [[CMP0]] to i16
441; CHECK-NEXT: [[CMP1:%.*]] = icmp eq i8 %i, 1
442; CHECK-NEXT: [[CONV1:%.*]] = sext i1 [[CMP1]] to i16
443; CHECK-NEXT: [[AND:%.*]] = and i16 [[CONV0]], [[CONV1]]
444; CHECK-NEXT: ret i16 [[AND]]
445;
446 %cmp0 = icmp eq i8 %i, 0
447 %conv0 = zext i1 %cmp0 to i16
448 %cmp1 = icmp eq i8 %i, 1
449 %conv1 = sext i1 %cmp1 to i16
450 %and = and i16 %conv0, %conv1
451 ret i16 %and
452}
453
454define <2 x i3> @and_of_different_cast_icmps_vec(<2 x i8> %i, <2 x i16> %j) {
455; CHECK-LABEL: @and_of_different_cast_icmps_vec(
456; CHECK-NEXT: [[CMP0:%.*]] = icmp eq <2 x i8> %i, zeroinitializer
457; CHECK-NEXT: [[CONV0:%.*]] = zext <2 x i1> [[CMP0]] to <2 x i3>
458; CHECK-NEXT: [[CMP1:%.*]] = icmp ugt <2 x i16> %j, <i16 1, i16 1>
459; CHECK-NEXT: [[CONV1:%.*]] = zext <2 x i1> [[CMP1]] to <2 x i3>
460; CHECK-NEXT: [[AND:%.*]] = and <2 x i3> [[CONV0]], [[CONV1]]
461; CHECK-NEXT: ret <2 x i3> [[AND]]
462;
463 %cmp0 = icmp eq <2 x i8> %i, zeroinitializer
464 %conv0 = zext <2 x i1> %cmp0 to <2 x i3>
465 %cmp1 = icmp ugt <2 x i16> %j, <i16 1, i16 1>
466 %conv1 = zext <2 x i1> %cmp1 to <2 x i3>
467 %and = and <2 x i3> %conv0, %conv1
468 ret <2 x i3> %and
469}
470
Sanjay Patel500e5122017-05-04 17:36:53 +0000471define i32 @or_of_zexted_icmps(i32 %i) {
472; CHECK-LABEL: @or_of_zexted_icmps(
Sanjay Patele42b4d52017-05-04 19:51:34 +0000473; CHECK-NEXT: ret i32 1
Sanjay Patel500e5122017-05-04 17:36:53 +0000474;
475 %cmp0 = icmp ne i32 %i, 0
476 %conv0 = zext i1 %cmp0 to i32
477 %cmp1 = icmp uge i32 4, %i
478 %conv1 = zext i1 %cmp1 to i32
479 %or = or i32 %conv0, %conv1
480 ret i32 %or
481}
482
Sanjay Patel500e5122017-05-04 17:36:53 +0000483; Try a different cast and weird vector types.
484
485define i3 @or_of_bitcast_icmps_vec(<3 x i65> %i) {
486; CHECK-LABEL: @or_of_bitcast_icmps_vec(
Sanjay Patele42b4d52017-05-04 19:51:34 +0000487; CHECK-NEXT: ret i3 bitcast (<3 x i1> <i1 true, i1 true, i1 true> to i3)
Sanjay Patel500e5122017-05-04 17:36:53 +0000488;
489 %cmp0 = icmp sge <3 x i65> %i, zeroinitializer
490 %conv0 = bitcast <3 x i1> %cmp0 to i3
491 %cmp1 = icmp slt <3 x i65> %i, zeroinitializer
492 %conv1 = bitcast <3 x i1> %cmp1 to i3
493 %or = or i3 %conv0, %conv1
494 ret i3 %or
495}
496
497; We can't simplify if the casts are different.
498
499define i16 @or_of_different_cast_icmps(i8 %i) {
500; CHECK-LABEL: @or_of_different_cast_icmps(
501; CHECK-NEXT: [[CMP0:%.*]] = icmp ne i8 %i, 0
502; CHECK-NEXT: [[CONV0:%.*]] = zext i1 [[CMP0]] to i16
503; CHECK-NEXT: [[CMP1:%.*]] = icmp ne i8 %i, 1
504; CHECK-NEXT: [[CONV1:%.*]] = sext i1 [[CMP1]] to i16
505; CHECK-NEXT: [[OR:%.*]] = or i16 [[CONV0]], [[CONV1]]
506; CHECK-NEXT: ret i16 [[OR]]
507;
508 %cmp0 = icmp ne i8 %i, 0
509 %conv0 = zext i1 %cmp0 to i16
510 %cmp1 = icmp ne i8 %i, 1
511 %conv1 = sext i1 %cmp1 to i16
512 %or = or i16 %conv0, %conv1
513 ret i16 %or
514}
515
Sanjay Patel08892252017-04-24 18:24:36 +0000516; (A & ~B) | (A ^ B) -> A ^ B
517
518define i32 @test43(i32 %a, i32 %b) {
519; CHECK-LABEL: @test43(
520; CHECK-NEXT: [[OR:%.*]] = xor i32 %a, %b
521; CHECK-NEXT: ret i32 [[OR]]
522;
523 %neg = xor i32 %b, -1
524 %and = and i32 %a, %neg
525 %xor = xor i32 %a, %b
526 %or = or i32 %and, %xor
527 ret i32 %or
528}
529
530define i32 @test43_commuted_and(i32 %a, i32 %b) {
531; CHECK-LABEL: @test43_commuted_and(
532; CHECK-NEXT: [[OR:%.*]] = xor i32 %a, %b
533; CHECK-NEXT: ret i32 [[OR]]
534;
535 %neg = xor i32 %b, -1
536 %and = and i32 %neg, %a
537 %xor = xor i32 %a, %b
538 %or = or i32 %and, %xor
539 ret i32 %or
540}
541
542; Commute operands of the 'or'.
543; (A ^ B) | (A & ~B) -> A ^ B
544
545define i32 @test44(i32 %a, i32 %b) {
546; CHECK-LABEL: @test44(
547; CHECK-NEXT: [[OR:%.*]] = xor i32 %a, %b
548; CHECK-NEXT: ret i32 [[OR]]
549;
550 %xor = xor i32 %a, %b
551 %neg = xor i32 %b, -1
552 %and = and i32 %a, %neg
553 %or = or i32 %xor, %and
554 ret i32 %or
555}
556
557define i32 @test44_commuted_and(i32 %a, i32 %b) {
558; CHECK-LABEL: @test44_commuted_and(
559; CHECK-NEXT: [[OR:%.*]] = xor i32 %a, %b
560; CHECK-NEXT: ret i32 [[OR]]
561;
562 %xor = xor i32 %a, %b
563 %neg = xor i32 %b, -1
564 %and = and i32 %neg, %a
565 %or = or i32 %xor, %and
566 ret i32 %or
567}
568
Craig Topper0b650d32017-04-25 17:01:32 +0000569; (~A & ~B) | (~A ^ B) -> ~A ^ B
570
571define i32 @test45(i32 %a, i32 %b) {
572; CHECK-LABEL: @test45(
573; CHECK-NEXT: [[NEGB:%.*]] = xor i32 [[B:%.*]], -1
574; CHECK-NEXT: [[XOR:%.*]] = xor i32 [[A:%.*]], [[NEGB]]
575; CHECK-NEXT: ret i32 [[XOR]]
576;
577 %nega = xor i32 %a, -1
578 %negb = xor i32 %b, -1
579 %and = and i32 %nega, %negb
580 %xor = xor i32 %a, %negb
581 %or = or i32 %and, %xor
582 ret i32 %or
583}
584
585define i32 @test45_commuted_and(i32 %a, i32 %b) {
Craig Topperb3b3c292017-04-25 17:40:58 +0000586; CHECK-LABEL: @test45_commuted_and(
Craig Topper0b650d32017-04-25 17:01:32 +0000587; CHECK-NEXT: [[NEGB:%.*]] = xor i32 [[B:%.*]], -1
588; CHECK-NEXT: [[XOR:%.*]] = xor i32 [[A:%.*]], [[NEGB]]
589; CHECK-NEXT: ret i32 [[XOR]]
590;
591 %nega = xor i32 %a, -1
592 %negb = xor i32 %b, -1
593 %and = and i32 %negb, %nega
594 %xor = xor i32 %a, %negb
595 %or = or i32 %and, %xor
596 ret i32 %or
597}
598
599; Commute operands of the 'or'.
600; (~A ^ B) | (~A & ~B) -> ~A ^ B
601
602define i32 @test46(i32 %a, i32 %b) {
603; CHECK-LABEL: @test46(
604; CHECK-NEXT: [[NEGB:%.*]] = xor i32 [[B:%.*]], -1
605; CHECK-NEXT: [[XOR:%.*]] = xor i32 [[A:%.*]], [[NEGB]]
606; CHECK-NEXT: ret i32 [[XOR]]
607;
608 %nega = xor i32 %a, -1
609 %negb = xor i32 %b, -1
610 %and = and i32 %nega, %negb
611 %xor = xor i32 %a, %negb
612 %or = or i32 %xor, %and
613 ret i32 %or
614}
615
616; (~A & ~B) | (~A ^ B) -> ~A ^ B
617
618define i32 @test46_commuted_and(i32 %a, i32 %b) {
Craig Topperb3b3c292017-04-25 17:40:58 +0000619; CHECK-LABEL: @test46_commuted_and(
Craig Topper0b650d32017-04-25 17:01:32 +0000620; CHECK-NEXT: [[NEGB:%.*]] = xor i32 [[B:%.*]], -1
621; CHECK-NEXT: [[XOR:%.*]] = xor i32 [[A:%.*]], [[NEGB]]
622; CHECK-NEXT: ret i32 [[XOR]]
623;
624 %nega = xor i32 %a, -1
625 %negb = xor i32 %b, -1
626 %and = and i32 %negb, %nega
627 %xor = xor i32 %a, %negb
628 %or = or i32 %xor, %and
629 ret i32 %or
630}
Craig Topper982cc3b2017-05-14 07:54:40 +0000631
632; (~A ^ B) | (A & B) -> ~A ^ B
633
634define i32 @test47(i32 %a, i32 %b) {
635; CHECK-LABEL: @test47(
636; CHECK-NEXT: [[NEGA:%.*]] = xor i32 [[A:%.*]], -1
Craig Topper479daaf2017-05-14 07:54:43 +0000637; CHECK-NEXT: [[XOR:%.*]] = xor i32 [[NEGA]], [[B:%.*]]
638; CHECK-NEXT: ret i32 [[XOR]]
Craig Topper982cc3b2017-05-14 07:54:40 +0000639;
640 %nega = xor i32 %a, -1
641 %and = and i32 %a, %b
642 %xor = xor i32 %nega, %b
643 %or = or i32 %xor, %and
644 ret i32 %or
645}
646
647define i32 @test48(i32 %a, i32 %b) {
648; CHECK-LABEL: @test48(
649; CHECK-NEXT: [[NEGA:%.*]] = xor i32 [[A:%.*]], -1
Craig Topper479daaf2017-05-14 07:54:43 +0000650; CHECK-NEXT: [[XOR:%.*]] = xor i32 [[B:%.*]], [[NEGA]]
651; CHECK-NEXT: ret i32 [[XOR]]
Craig Topper982cc3b2017-05-14 07:54:40 +0000652;
653 %nega = xor i32 %a, -1
654 %and = and i32 %a, %b
655 %xor = xor i32 %b, %nega
656 %or = or i32 %xor, %and
657 ret i32 %or
658}
659
660define i32 @test49(i32 %a, i32 %b) {
661; CHECK-LABEL: @test49(
662; CHECK-NEXT: [[NEGA:%.*]] = xor i32 [[A:%.*]], -1
Craig Topper479daaf2017-05-14 07:54:43 +0000663; CHECK-NEXT: [[XOR:%.*]] = xor i32 [[B:%.*]], [[NEGA]]
664; CHECK-NEXT: ret i32 [[XOR]]
Craig Topper982cc3b2017-05-14 07:54:40 +0000665;
666 %nega = xor i32 %a, -1
667 %and = and i32 %b, %a
668 %xor = xor i32 %b, %nega
669 %or = or i32 %xor, %and
670 ret i32 %or
671}
672
673define i32 @test50(i32 %a, i32 %b) {
674; CHECK-LABEL: @test50(
675; CHECK-NEXT: [[NEGA:%.*]] = xor i32 [[A:%.*]], -1
Craig Topper479daaf2017-05-14 07:54:43 +0000676; CHECK-NEXT: [[XOR:%.*]] = xor i32 [[NEGA]], [[B:%.*]]
677; CHECK-NEXT: ret i32 [[XOR]]
Craig Topper982cc3b2017-05-14 07:54:40 +0000678;
679 %nega = xor i32 %a, -1
680 %and = and i32 %b, %a
681 %xor = xor i32 %nega, %b
682 %or = or i32 %xor, %and
683 ret i32 %or
684}
685
686define i32 @test51(i32 %a, i32 %b) {
687; CHECK-LABEL: @test51(
688; CHECK-NEXT: [[NEGA:%.*]] = xor i32 [[A:%.*]], -1
Craig Topper479daaf2017-05-14 07:54:43 +0000689; CHECK-NEXT: [[XOR:%.*]] = xor i32 [[NEGA]], [[B:%.*]]
690; CHECK-NEXT: ret i32 [[XOR]]
Craig Topper982cc3b2017-05-14 07:54:40 +0000691;
692 %nega = xor i32 %a, -1
693 %and = and i32 %a, %b
694 %xor = xor i32 %nega, %b
695 %or = or i32 %and, %xor
696 ret i32 %or
697}
698
699define i32 @test52(i32 %a, i32 %b) {
700; CHECK-LABEL: @test52(
701; CHECK-NEXT: [[NEGA:%.*]] = xor i32 [[A:%.*]], -1
Craig Topper479daaf2017-05-14 07:54:43 +0000702; CHECK-NEXT: [[XOR:%.*]] = xor i32 [[B:%.*]], [[NEGA]]
703; CHECK-NEXT: ret i32 [[XOR]]
Craig Topper982cc3b2017-05-14 07:54:40 +0000704;
705 %nega = xor i32 %a, -1
706 %and = and i32 %a, %b
707 %xor = xor i32 %b, %nega
708 %or = or i32 %and, %xor
709 ret i32 %or
710}
711
712define i32 @test53(i32 %a, i32 %b) {
713; CHECK-LABEL: @test53(
714; CHECK-NEXT: [[NEGA:%.*]] = xor i32 [[A:%.*]], -1
Craig Topper479daaf2017-05-14 07:54:43 +0000715; CHECK-NEXT: [[XOR:%.*]] = xor i32 [[B:%.*]], [[NEGA]]
716; CHECK-NEXT: ret i32 [[XOR]]
Craig Topper982cc3b2017-05-14 07:54:40 +0000717;
718 %nega = xor i32 %a, -1
719 %and = and i32 %b, %a
720 %xor = xor i32 %b, %nega
721 %or = or i32 %and, %xor
722 ret i32 %or
723}
724
725define i32 @test54(i32 %a, i32 %b) {
726; CHECK-LABEL: @test54(
727; CHECK-NEXT: [[NEGA:%.*]] = xor i32 [[A:%.*]], -1
Craig Topper479daaf2017-05-14 07:54:43 +0000728; CHECK-NEXT: [[XOR:%.*]] = xor i32 [[NEGA]], [[B:%.*]]
729; CHECK-NEXT: ret i32 [[XOR]]
Craig Topper982cc3b2017-05-14 07:54:40 +0000730;
731 %nega = xor i32 %a, -1
732 %and = and i32 %b, %a
733 %xor = xor i32 %nega, %b
734 %or = or i32 %and, %xor
735 ret i32 %or
736}
Sanjay Patel9edfbc42017-05-15 22:54:37 +0000737
Craig Topper93898492017-05-18 18:14:40 +0000738; (A & B) | ~(A ^ B) -> ~(A ^ B)
739
740define i32 @test55(i32 %a, i32 %b) {
741; CHECK-LABEL: @test55(
742; CHECK-NEXT: [[AND:%.*]] = and i32 [[A:%.*]], [[B:%.*]]
743; CHECK-NEXT: [[XOR:%.*]] = xor i32 [[A]], [[B]]
744; CHECK-NEXT: [[XNOR:%.*]] = xor i32 [[XOR]], -1
745; CHECK-NEXT: [[OR:%.*]] = or i32 [[AND]], [[XNOR]]
746; CHECK-NEXT: ret i32 [[OR]]
747;
748 %and = and i32 %a, %b
749 %xor = xor i32 %a, %b
750 %xnor = xor i32 %xor, -1
751 %or = or i32 %and, %xnor
752 ret i32 %or
753}
754
755; ~(A ^ B) | (A & B) -> ~(A ^ B)
756
757define i32 @test56(i32 %a, i32 %b) {
758; CHECK-LABEL: @test56(
759; CHECK-NEXT: [[AND:%.*]] = and i32 [[A:%.*]], [[B:%.*]]
760; CHECK-NEXT: [[XOR:%.*]] = xor i32 [[A]], [[B]]
761; CHECK-NEXT: [[XNOR:%.*]] = xor i32 [[XOR]], -1
762; CHECK-NEXT: [[OR:%.*]] = or i32 [[XNOR]], [[AND]]
763; CHECK-NEXT: ret i32 [[OR]]
764;
765 %and = and i32 %a, %b
766 %xor = xor i32 %a, %b
767 %xnor = xor i32 %xor, -1
768 %or = or i32 %xnor, %and
769 ret i32 %or
770}
771
772; (B & A) | ~(A ^ B) -> ~(A ^ B)
773
774define i32 @test57(i32 %a, i32 %b) {
775; CHECK-LABEL: @test57(
776; CHECK-NEXT: [[AND:%.*]] = and i32 [[B:%.*]], [[A:%.*]]
777; CHECK-NEXT: [[XOR:%.*]] = xor i32 [[A]], [[B]]
778; CHECK-NEXT: [[XNOR:%.*]] = xor i32 [[XOR]], -1
779; CHECK-NEXT: [[OR:%.*]] = or i32 [[AND]], [[XNOR]]
780; CHECK-NEXT: ret i32 [[OR]]
781;
782 %and = and i32 %b, %a
783 %xor = xor i32 %a, %b
784 %xnor = xor i32 %xor, -1
785 %or = or i32 %and, %xnor
786 ret i32 %or
787}
788
789; ~(A ^ B) | (A & B) -> ~(A ^ B)
790
791define i32 @test58(i32 %a, i32 %b) {
792; CHECK-LABEL: @test58(
793; CHECK-NEXT: [[AND:%.*]] = and i32 [[B:%.*]], [[A:%.*]]
794; CHECK-NEXT: [[XOR:%.*]] = xor i32 [[A]], [[B]]
795; CHECK-NEXT: [[XNOR:%.*]] = xor i32 [[XOR]], -1
796; CHECK-NEXT: [[OR:%.*]] = or i32 [[XNOR]], [[AND]]
797; CHECK-NEXT: ret i32 [[OR]]
798;
799 %and = and i32 %b, %a
800 %xor = xor i32 %a, %b
801 %xnor = xor i32 %xor, -1
802 %or = or i32 %xnor, %and
803 ret i32 %or
804}
805
Sanjay Patel9edfbc42017-05-15 22:54:37 +0000806define i8 @lshr_perfect_mask(i8 %x) {
807; CHECK-LABEL: @lshr_perfect_mask(
808; CHECK-NEXT: [[SH:%.*]] = lshr i8 %x, 5
Sanjay Patel877364f2017-05-16 21:51:04 +0000809; CHECK-NEXT: ret i8 [[SH]]
Sanjay Patel9edfbc42017-05-15 22:54:37 +0000810;
811 %sh = lshr i8 %x, 5
812 %mask = and i8 %sh, 7 ; 0x07
813 ret i8 %mask
814}
815
816define <2 x i8> @lshr_oversized_mask_splat(<2 x i8> %x) {
817; CHECK-LABEL: @lshr_oversized_mask_splat(
818; CHECK-NEXT: [[SH:%.*]] = lshr <2 x i8> %x, <i8 5, i8 5>
Sanjay Patel877364f2017-05-16 21:51:04 +0000819; CHECK-NEXT: ret <2 x i8> [[SH]]
Sanjay Patel9edfbc42017-05-15 22:54:37 +0000820;
821 %sh = lshr <2 x i8> %x, <i8 5, i8 5>
822 %mask = and <2 x i8> %sh, <i8 135, i8 135> ; 0x87
823 ret <2 x i8> %mask
824}
825
826define i8 @lshr_undersized_mask(i8 %x) {
827; CHECK-LABEL: @lshr_undersized_mask(
828; CHECK-NEXT: [[SH:%.*]] = lshr i8 %x, 5
829; CHECK-NEXT: [[MASK:%.*]] = and i8 [[SH]], -2
830; CHECK-NEXT: ret i8 [[MASK]]
831;
832 %sh = lshr i8 %x, 5
833 %mask = and i8 %sh, -2 ; 0xFE
834 ret i8 %mask
835}
836
837define <2 x i8> @shl_perfect_mask_splat(<2 x i8> %x) {
838; CHECK-LABEL: @shl_perfect_mask_splat(
839; CHECK-NEXT: [[SH:%.*]] = shl <2 x i8> %x, <i8 6, i8 6>
Sanjay Patel877364f2017-05-16 21:51:04 +0000840; CHECK-NEXT: ret <2 x i8> [[SH]]
Sanjay Patel9edfbc42017-05-15 22:54:37 +0000841;
842 %sh = shl <2 x i8> %x, <i8 6, i8 6>
843 %mask = and <2 x i8> %sh, <i8 192, i8 192> ; 0xC0
844 ret <2 x i8> %mask
845}
846
847define i8 @shl_oversized_mask(i8 %x) {
848; CHECK-LABEL: @shl_oversized_mask(
849; CHECK-NEXT: [[SH:%.*]] = shl i8 %x, 6
Sanjay Patel877364f2017-05-16 21:51:04 +0000850; CHECK-NEXT: ret i8 [[SH]]
Sanjay Patel9edfbc42017-05-15 22:54:37 +0000851;
852 %sh = shl i8 %x, 6
853 %mask = and i8 %sh, 195 ; 0xC3
854 ret i8 %mask
855}
856
857define <2 x i8> @shl_undersized_mask_splat(<2 x i8> %x) {
858; CHECK-LABEL: @shl_undersized_mask_splat(
859; CHECK-NEXT: [[SH:%.*]] = shl <2 x i8> [[X:%.*]], <i8 6, i8 6>
860; CHECK-NEXT: [[MASK:%.*]] = and <2 x i8> [[SH]], <i8 -120, i8 -120>
861; CHECK-NEXT: ret <2 x i8> [[MASK]]
862;
863 %sh = shl <2 x i8> %x, <i8 6, i8 6>
864 %mask = and <2 x i8> %sh, <i8 136, i8 136> ; 0x88
865 ret <2 x i8> %mask
866}
867
Craig Topperdf01feb2017-05-18 20:27:32 +0000868define i32 @reversed_not(i32 %a) {
869; CHECK-LABEL: @reversed_not(
870; CHECK-NEXT: ret i32 -1
871;
872 %nega = xor i32 -1, %a
873 %or = or i32 %a, %nega
874 ret i32 %or
875}