blob: 1f2b69f8afe1f186cf3bc4c371a8da18da503e84 [file] [log] [blame]
Sanjay Patel7feb3ed2018-09-10 17:40:15 +00001; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc < %s -mtriple=x86_64-- -mattr=+sse2 | FileCheck %s --check-prefixes=ANY,SSE,SSE2
3; RUN: llc < %s -mtriple=x86_64-- -mattr=+sse4.1 | FileCheck %s --check-prefixes=ANY,SSE,SSE41
4
5; There are at least 3 potential patterns corresponding to an unsigned saturated add: min, cmp with sum, cmp with not.
6; Test each of those patterns with i8/i16/i32/i64.
7; Test each of those with a constant operand and a variable operand.
8; Test each of those with a 128-bit vector type.
9
10define i8 @unsigned_sat_constant_i8_using_min(i8 %x) {
11; ANY-LABEL: unsigned_sat_constant_i8_using_min:
12; ANY: # %bb.0:
Simon Pilgrim2d0f20c2018-09-19 18:59:08 +000013; ANY-NEXT: movl %edi, %eax
14; ANY-NEXT: cmpb $-43, %al
Sanjay Patel7feb3ed2018-09-10 17:40:15 +000015; ANY-NEXT: jb .LBB0_2
16; ANY-NEXT: # %bb.1:
Simon Pilgrim2d0f20c2018-09-19 18:59:08 +000017; ANY-NEXT: movb $-43, %al
Sanjay Patel7feb3ed2018-09-10 17:40:15 +000018; ANY-NEXT: .LBB0_2:
Simon Pilgrim2d0f20c2018-09-19 18:59:08 +000019; ANY-NEXT: addb $42, %al
20; ANY-NEXT: # kill: def $al killed $al killed $eax
Sanjay Patel7feb3ed2018-09-10 17:40:15 +000021; ANY-NEXT: retq
22 %c = icmp ult i8 %x, -43
23 %s = select i1 %c, i8 %x, i8 -43
24 %r = add i8 %s, 42
25 ret i8 %r
26}
27
28define i8 @unsigned_sat_constant_i8_using_cmp_sum(i8 %x) {
29; ANY-LABEL: unsigned_sat_constant_i8_using_cmp_sum:
30; ANY: # %bb.0:
31; ANY-NEXT: addb $42, %dil
32; ANY-NEXT: movb $-1, %al
33; ANY-NEXT: jb .LBB1_2
34; ANY-NEXT: # %bb.1:
35; ANY-NEXT: movl %edi, %eax
36; ANY-NEXT: .LBB1_2:
37; ANY-NEXT: retq
38 %a = add i8 %x, 42
39 %c = icmp ugt i8 %x, %a
40 %r = select i1 %c, i8 -1, i8 %a
41 ret i8 %r
42}
43
44define i8 @unsigned_sat_constant_i8_using_cmp_notval(i8 %x) {
45; ANY-LABEL: unsigned_sat_constant_i8_using_cmp_notval:
46; ANY: # %bb.0:
Sanjay Patel7feb3ed2018-09-10 17:40:15 +000047; ANY-NEXT: addb $42, %dil
Sanjay Patel2c901742018-09-24 14:47:15 +000048; ANY-NEXT: movb $-1, %al
49; ANY-NEXT: jb .LBB2_2
50; ANY-NEXT: # %bb.1:
Sanjay Patel7feb3ed2018-09-10 17:40:15 +000051; ANY-NEXT: movl %edi, %eax
52; ANY-NEXT: .LBB2_2:
53; ANY-NEXT: retq
54 %a = add i8 %x, 42
55 %c = icmp ugt i8 %x, -43
56 %r = select i1 %c, i8 -1, i8 %a
57 ret i8 %r
58}
59
60define i16 @unsigned_sat_constant_i16_using_min(i16 %x) {
61; ANY-LABEL: unsigned_sat_constant_i16_using_min:
62; ANY: # %bb.0:
63; ANY-NEXT: movzwl %di, %eax
64; ANY-NEXT: cmpl $65493, %eax # imm = 0xFFD5
65; ANY-NEXT: movl $65493, %eax # imm = 0xFFD5
66; ANY-NEXT: cmovbl %edi, %eax
67; ANY-NEXT: addl $42, %eax
68; ANY-NEXT: # kill: def $ax killed $ax killed $eax
69; ANY-NEXT: retq
70 %c = icmp ult i16 %x, -43
71 %s = select i1 %c, i16 %x, i16 -43
72 %r = add i16 %s, 42
73 ret i16 %r
74}
75
76define i16 @unsigned_sat_constant_i16_using_cmp_sum(i16 %x) {
77; ANY-LABEL: unsigned_sat_constant_i16_using_cmp_sum:
78; ANY: # %bb.0:
79; ANY-NEXT: addw $42, %di
80; ANY-NEXT: movl $65535, %eax # imm = 0xFFFF
81; ANY-NEXT: cmovael %edi, %eax
82; ANY-NEXT: # kill: def $ax killed $ax killed $eax
83; ANY-NEXT: retq
84 %a = add i16 %x, 42
85 %c = icmp ugt i16 %x, %a
86 %r = select i1 %c, i16 -1, i16 %a
87 ret i16 %r
88}
89
90define i16 @unsigned_sat_constant_i16_using_cmp_notval(i16 %x) {
91; ANY-LABEL: unsigned_sat_constant_i16_using_cmp_notval:
92; ANY: # %bb.0:
Sanjay Patel2c901742018-09-24 14:47:15 +000093; ANY-NEXT: addw $42, %di
Sanjay Patel7feb3ed2018-09-10 17:40:15 +000094; ANY-NEXT: movl $65535, %eax # imm = 0xFFFF
Sanjay Patel2c901742018-09-24 14:47:15 +000095; ANY-NEXT: cmovael %edi, %eax
Sanjay Patel7feb3ed2018-09-10 17:40:15 +000096; ANY-NEXT: # kill: def $ax killed $ax killed $eax
97; ANY-NEXT: retq
98 %a = add i16 %x, 42
99 %c = icmp ugt i16 %x, -43
100 %r = select i1 %c, i16 -1, i16 %a
101 ret i16 %r
102}
103
104define i32 @unsigned_sat_constant_i32_using_min(i32 %x) {
105; ANY-LABEL: unsigned_sat_constant_i32_using_min:
106; ANY: # %bb.0:
107; ANY-NEXT: cmpl $-43, %edi
108; ANY-NEXT: movl $-43, %eax
109; ANY-NEXT: cmovbl %edi, %eax
110; ANY-NEXT: addl $42, %eax
111; ANY-NEXT: retq
112 %c = icmp ult i32 %x, -43
113 %s = select i1 %c, i32 %x, i32 -43
114 %r = add i32 %s, 42
115 ret i32 %r
116}
117
118define i32 @unsigned_sat_constant_i32_using_cmp_sum(i32 %x) {
119; ANY-LABEL: unsigned_sat_constant_i32_using_cmp_sum:
120; ANY: # %bb.0:
121; ANY-NEXT: addl $42, %edi
122; ANY-NEXT: movl $-1, %eax
123; ANY-NEXT: cmovael %edi, %eax
124; ANY-NEXT: retq
125 %a = add i32 %x, 42
126 %c = icmp ugt i32 %x, %a
127 %r = select i1 %c, i32 -1, i32 %a
128 ret i32 %r
129}
130
131define i32 @unsigned_sat_constant_i32_using_cmp_notval(i32 %x) {
132; ANY-LABEL: unsigned_sat_constant_i32_using_cmp_notval:
133; ANY: # %bb.0:
Sanjay Patel2c901742018-09-24 14:47:15 +0000134; ANY-NEXT: addl $42, %edi
Sanjay Patel7feb3ed2018-09-10 17:40:15 +0000135; ANY-NEXT: movl $-1, %eax
Sanjay Patel2c901742018-09-24 14:47:15 +0000136; ANY-NEXT: cmovael %edi, %eax
Sanjay Patel7feb3ed2018-09-10 17:40:15 +0000137; ANY-NEXT: retq
138 %a = add i32 %x, 42
139 %c = icmp ugt i32 %x, -43
140 %r = select i1 %c, i32 -1, i32 %a
141 ret i32 %r
142}
143
144define i64 @unsigned_sat_constant_i64_using_min(i64 %x) {
145; ANY-LABEL: unsigned_sat_constant_i64_using_min:
146; ANY: # %bb.0:
147; ANY-NEXT: cmpq $-43, %rdi
148; ANY-NEXT: movq $-43, %rax
149; ANY-NEXT: cmovbq %rdi, %rax
150; ANY-NEXT: addq $42, %rax
151; ANY-NEXT: retq
152 %c = icmp ult i64 %x, -43
153 %s = select i1 %c, i64 %x, i64 -43
154 %r = add i64 %s, 42
155 ret i64 %r
156}
157
158define i64 @unsigned_sat_constant_i64_using_cmp_sum(i64 %x) {
159; ANY-LABEL: unsigned_sat_constant_i64_using_cmp_sum:
160; ANY: # %bb.0:
161; ANY-NEXT: addq $42, %rdi
162; ANY-NEXT: movq $-1, %rax
163; ANY-NEXT: cmovaeq %rdi, %rax
164; ANY-NEXT: retq
165 %a = add i64 %x, 42
166 %c = icmp ugt i64 %x, %a
167 %r = select i1 %c, i64 -1, i64 %a
168 ret i64 %r
169}
170
171define i64 @unsigned_sat_constant_i64_using_cmp_notval(i64 %x) {
172; ANY-LABEL: unsigned_sat_constant_i64_using_cmp_notval:
173; ANY: # %bb.0:
Sanjay Patel2c901742018-09-24 14:47:15 +0000174; ANY-NEXT: addq $42, %rdi
175; ANY-NEXT: movq $-1, %rax
176; ANY-NEXT: cmovaeq %rdi, %rax
Sanjay Patel7feb3ed2018-09-10 17:40:15 +0000177; ANY-NEXT: retq
178 %a = add i64 %x, 42
179 %c = icmp ugt i64 %x, -43
180 %r = select i1 %c, i64 -1, i64 %a
181 ret i64 %r
182}
183
184define i8 @unsigned_sat_variable_i8_using_min(i8 %x, i8 %y) {
185; ANY-LABEL: unsigned_sat_variable_i8_using_min:
186; ANY: # %bb.0:
Simon Pilgrim2d0f20c2018-09-19 18:59:08 +0000187; ANY-NEXT: movl %edi, %eax
188; ANY-NEXT: movl %esi, %ecx
189; ANY-NEXT: notb %cl
190; ANY-NEXT: cmpb %cl, %al
Sanjay Patel7feb3ed2018-09-10 17:40:15 +0000191; ANY-NEXT: jb .LBB12_2
192; ANY-NEXT: # %bb.1:
Simon Pilgrim2d0f20c2018-09-19 18:59:08 +0000193; ANY-NEXT: movl %ecx, %eax
Sanjay Patel7feb3ed2018-09-10 17:40:15 +0000194; ANY-NEXT: .LBB12_2:
Simon Pilgrim2d0f20c2018-09-19 18:59:08 +0000195; ANY-NEXT: addb %sil, %al
196; ANY-NEXT: # kill: def $al killed $al killed $eax
Sanjay Patel7feb3ed2018-09-10 17:40:15 +0000197; ANY-NEXT: retq
198 %noty = xor i8 %y, -1
199 %c = icmp ult i8 %x, %noty
200 %s = select i1 %c, i8 %x, i8 %noty
201 %r = add i8 %s, %y
202 ret i8 %r
203}
204
205define i8 @unsigned_sat_variable_i8_using_cmp_sum(i8 %x, i8 %y) {
206; ANY-LABEL: unsigned_sat_variable_i8_using_cmp_sum:
207; ANY: # %bb.0:
208; ANY-NEXT: addb %sil, %dil
209; ANY-NEXT: movb $-1, %al
210; ANY-NEXT: jb .LBB13_2
211; ANY-NEXT: # %bb.1:
212; ANY-NEXT: movl %edi, %eax
213; ANY-NEXT: .LBB13_2:
214; ANY-NEXT: retq
215 %a = add i8 %x, %y
216 %c = icmp ugt i8 %x, %a
217 %r = select i1 %c, i8 -1, i8 %a
218 ret i8 %r
219}
220
221define i8 @unsigned_sat_variable_i8_using_cmp_notval(i8 %x, i8 %y) {
222; ANY-LABEL: unsigned_sat_variable_i8_using_cmp_notval:
223; ANY: # %bb.0:
224; ANY-NEXT: movl %esi, %eax
225; ANY-NEXT: notb %al
226; ANY-NEXT: cmpb %al, %dil
227; ANY-NEXT: movb $-1, %al
228; ANY-NEXT: ja .LBB14_2
229; ANY-NEXT: # %bb.1:
230; ANY-NEXT: addb %sil, %dil
231; ANY-NEXT: movl %edi, %eax
232; ANY-NEXT: .LBB14_2:
233; ANY-NEXT: retq
234 %noty = xor i8 %y, -1
235 %a = add i8 %x, %y
236 %c = icmp ugt i8 %x, %noty
237 %r = select i1 %c, i8 -1, i8 %a
238 ret i8 %r
239}
240
241define i16 @unsigned_sat_variable_i16_using_min(i16 %x, i16 %y) {
242; ANY-LABEL: unsigned_sat_variable_i16_using_min:
243; ANY: # %bb.0:
244; ANY-NEXT: # kill: def $esi killed $esi def $rsi
245; ANY-NEXT: movl %esi, %eax
246; ANY-NEXT: notl %eax
247; ANY-NEXT: cmpw %ax, %di
248; ANY-NEXT: cmovbl %edi, %eax
249; ANY-NEXT: leal (%rax,%rsi), %eax
250; ANY-NEXT: # kill: def $ax killed $ax killed $eax
251; ANY-NEXT: retq
252 %noty = xor i16 %y, -1
253 %c = icmp ult i16 %x, %noty
254 %s = select i1 %c, i16 %x, i16 %noty
255 %r = add i16 %s, %y
256 ret i16 %r
257}
258
259define i16 @unsigned_sat_variable_i16_using_cmp_sum(i16 %x, i16 %y) {
260; ANY-LABEL: unsigned_sat_variable_i16_using_cmp_sum:
261; ANY: # %bb.0:
262; ANY-NEXT: addw %si, %di
263; ANY-NEXT: movl $65535, %eax # imm = 0xFFFF
264; ANY-NEXT: cmovael %edi, %eax
265; ANY-NEXT: # kill: def $ax killed $ax killed $eax
266; ANY-NEXT: retq
267 %a = add i16 %x, %y
268 %c = icmp ugt i16 %x, %a
269 %r = select i1 %c, i16 -1, i16 %a
270 ret i16 %r
271}
272
273define i16 @unsigned_sat_variable_i16_using_cmp_notval(i16 %x, i16 %y) {
274; ANY-LABEL: unsigned_sat_variable_i16_using_cmp_notval:
275; ANY: # %bb.0:
276; ANY-NEXT: # kill: def $esi killed $esi def $rsi
277; ANY-NEXT: # kill: def $edi killed $edi def $rdi
278; ANY-NEXT: leal (%rdi,%rsi), %ecx
279; ANY-NEXT: notl %esi
280; ANY-NEXT: cmpw %si, %di
281; ANY-NEXT: movl $65535, %eax # imm = 0xFFFF
282; ANY-NEXT: cmovbel %ecx, %eax
283; ANY-NEXT: # kill: def $ax killed $ax killed $eax
284; ANY-NEXT: retq
285 %noty = xor i16 %y, -1
286 %a = add i16 %x, %y
287 %c = icmp ugt i16 %x, %noty
288 %r = select i1 %c, i16 -1, i16 %a
289 ret i16 %r
290}
291
292define i32 @unsigned_sat_variable_i32_using_min(i32 %x, i32 %y) {
293; ANY-LABEL: unsigned_sat_variable_i32_using_min:
294; ANY: # %bb.0:
295; ANY-NEXT: # kill: def $esi killed $esi def $rsi
296; ANY-NEXT: movl %esi, %eax
297; ANY-NEXT: notl %eax
298; ANY-NEXT: cmpl %eax, %edi
299; ANY-NEXT: cmovbl %edi, %eax
300; ANY-NEXT: leal (%rax,%rsi), %eax
301; ANY-NEXT: retq
302 %noty = xor i32 %y, -1
303 %c = icmp ult i32 %x, %noty
304 %s = select i1 %c, i32 %x, i32 %noty
305 %r = add i32 %s, %y
306 ret i32 %r
307}
308
309define i32 @unsigned_sat_variable_i32_using_cmp_sum(i32 %x, i32 %y) {
310; ANY-LABEL: unsigned_sat_variable_i32_using_cmp_sum:
311; ANY: # %bb.0:
312; ANY-NEXT: addl %esi, %edi
313; ANY-NEXT: movl $-1, %eax
314; ANY-NEXT: cmovael %edi, %eax
315; ANY-NEXT: retq
316 %a = add i32 %x, %y
317 %c = icmp ugt i32 %x, %a
318 %r = select i1 %c, i32 -1, i32 %a
319 ret i32 %r
320}
321
322define i32 @unsigned_sat_variable_i32_using_cmp_notval(i32 %x, i32 %y) {
323; ANY-LABEL: unsigned_sat_variable_i32_using_cmp_notval:
324; ANY: # %bb.0:
325; ANY-NEXT: # kill: def $esi killed $esi def $rsi
326; ANY-NEXT: # kill: def $edi killed $edi def $rdi
327; ANY-NEXT: leal (%rdi,%rsi), %ecx
328; ANY-NEXT: notl %esi
329; ANY-NEXT: cmpl %esi, %edi
330; ANY-NEXT: movl $-1, %eax
331; ANY-NEXT: cmovbel %ecx, %eax
332; ANY-NEXT: retq
333 %noty = xor i32 %y, -1
334 %a = add i32 %x, %y
335 %c = icmp ugt i32 %x, %noty
336 %r = select i1 %c, i32 -1, i32 %a
337 ret i32 %r
338}
339
340define i64 @unsigned_sat_variable_i64_using_min(i64 %x, i64 %y) {
341; ANY-LABEL: unsigned_sat_variable_i64_using_min:
342; ANY: # %bb.0:
343; ANY-NEXT: movq %rsi, %rax
344; ANY-NEXT: notq %rax
345; ANY-NEXT: cmpq %rax, %rdi
346; ANY-NEXT: cmovbq %rdi, %rax
347; ANY-NEXT: leaq (%rax,%rsi), %rax
348; ANY-NEXT: retq
349 %noty = xor i64 %y, -1
350 %c = icmp ult i64 %x, %noty
351 %s = select i1 %c, i64 %x, i64 %noty
352 %r = add i64 %s, %y
353 ret i64 %r
354}
355
356define i64 @unsigned_sat_variable_i64_using_cmp_sum(i64 %x, i64 %y) {
357; ANY-LABEL: unsigned_sat_variable_i64_using_cmp_sum:
358; ANY: # %bb.0:
359; ANY-NEXT: addq %rsi, %rdi
360; ANY-NEXT: movq $-1, %rax
361; ANY-NEXT: cmovaeq %rdi, %rax
362; ANY-NEXT: retq
363 %a = add i64 %x, %y
364 %c = icmp ugt i64 %x, %a
365 %r = select i1 %c, i64 -1, i64 %a
366 ret i64 %r
367}
368
369define i64 @unsigned_sat_variable_i64_using_cmp_notval(i64 %x, i64 %y) {
370; ANY-LABEL: unsigned_sat_variable_i64_using_cmp_notval:
371; ANY: # %bb.0:
372; ANY-NEXT: leaq (%rdi,%rsi), %rcx
373; ANY-NEXT: notq %rsi
374; ANY-NEXT: cmpq %rsi, %rdi
375; ANY-NEXT: movq $-1, %rax
376; ANY-NEXT: cmovbeq %rcx, %rax
377; ANY-NEXT: retq
378 %noty = xor i64 %y, -1
379 %a = add i64 %x, %y
380 %c = icmp ugt i64 %x, %noty
381 %r = select i1 %c, i64 -1, i64 %a
382 ret i64 %r
383}
384
385define <16 x i8> @unsigned_sat_constant_v16i8_using_min(<16 x i8> %x) {
386; ANY-LABEL: unsigned_sat_constant_v16i8_using_min:
387; ANY: # %bb.0:
388; ANY-NEXT: pminub {{.*}}(%rip), %xmm0
389; ANY-NEXT: paddb {{.*}}(%rip), %xmm0
390; ANY-NEXT: retq
391 %c = icmp ult <16 x i8> %x, <i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43>
392 %s = select <16 x i1> %c, <16 x i8> %x, <16 x i8> <i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43>
393 %r = add <16 x i8> %s, <i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42>
394 ret <16 x i8> %r
395}
396
397define <16 x i8> @unsigned_sat_constant_v16i8_using_cmp_sum(<16 x i8> %x) {
398; ANY-LABEL: unsigned_sat_constant_v16i8_using_cmp_sum:
399; ANY: # %bb.0:
400; ANY-NEXT: paddusb {{.*}}(%rip), %xmm0
401; ANY-NEXT: retq
402 %a = add <16 x i8> %x, <i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42>
403 %c = icmp ugt <16 x i8> %x, %a
404 %r = select <16 x i1> %c, <16 x i8> <i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1>, <16 x i8> %a
405 ret <16 x i8> %r
406}
407
408define <16 x i8> @unsigned_sat_constant_v16i8_using_cmp_notval(<16 x i8> %x) {
409; ANY-LABEL: unsigned_sat_constant_v16i8_using_cmp_notval:
410; ANY: # %bb.0:
411; ANY-NEXT: paddusb {{.*}}(%rip), %xmm0
412; ANY-NEXT: retq
413 %a = add <16 x i8> %x, <i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42, i8 42>
414 %c = icmp ugt <16 x i8> %x, <i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43, i8 -43>
415 %r = select <16 x i1> %c, <16 x i8> <i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1>, <16 x i8> %a
416 ret <16 x i8> %r
417}
418
419define <8 x i16> @unsigned_sat_constant_v8i16_using_min(<8 x i16> %x) {
420; SSE2-LABEL: unsigned_sat_constant_v8i16_using_min:
421; SSE2: # %bb.0:
422; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [32768,32768,32768,32768,32768,32768,32768,32768]
423; SSE2-NEXT: pxor %xmm1, %xmm0
424; SSE2-NEXT: pminsw {{.*}}(%rip), %xmm0
425; SSE2-NEXT: pxor %xmm1, %xmm0
426; SSE2-NEXT: paddw {{.*}}(%rip), %xmm0
427; SSE2-NEXT: retq
428;
429; SSE41-LABEL: unsigned_sat_constant_v8i16_using_min:
430; SSE41: # %bb.0:
431; SSE41-NEXT: pminuw {{.*}}(%rip), %xmm0
432; SSE41-NEXT: paddw {{.*}}(%rip), %xmm0
433; SSE41-NEXT: retq
434 %c = icmp ult <8 x i16> %x, <i16 -43, i16 -43, i16 -43, i16 -43, i16 -43, i16 -43, i16 -43, i16 -43>
435 %s = select <8 x i1> %c, <8 x i16> %x, <8 x i16> <i16 -43, i16 -43, i16 -43, i16 -43, i16 -43, i16 -43, i16 -43, i16 -43>
436 %r = add <8 x i16> %s, <i16 42, i16 42, i16 42, i16 42, i16 42, i16 42, i16 42, i16 42>
437 ret <8 x i16> %r
438}
439
440define <8 x i16> @unsigned_sat_constant_v8i16_using_cmp_sum(<8 x i16> %x) {
441; ANY-LABEL: unsigned_sat_constant_v8i16_using_cmp_sum:
442; ANY: # %bb.0:
443; ANY-NEXT: paddusw {{.*}}(%rip), %xmm0
444; ANY-NEXT: retq
445 %a = add <8 x i16> %x, <i16 42, i16 42, i16 42, i16 42, i16 42, i16 42, i16 42, i16 42>
446 %c = icmp ugt <8 x i16> %x, %a
447 %r = select <8 x i1> %c, <8 x i16> <i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1>, <8 x i16> %a
448 ret <8 x i16> %r
449}
450
451define <8 x i16> @unsigned_sat_constant_v8i16_using_cmp_notval(<8 x i16> %x) {
452; ANY-LABEL: unsigned_sat_constant_v8i16_using_cmp_notval:
453; ANY: # %bb.0:
454; ANY-NEXT: paddusw {{.*}}(%rip), %xmm0
455; ANY-NEXT: retq
456 %a = add <8 x i16> %x, <i16 42, i16 42, i16 42, i16 42, i16 42, i16 42, i16 42, i16 42>
457 %c = icmp ugt <8 x i16> %x, <i16 -43, i16 -43, i16 -43, i16 -43, i16 -43, i16 -43, i16 -43, i16 -43>
458 %r = select <8 x i1> %c, <8 x i16> <i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1>, <8 x i16> %a
459 ret <8 x i16> %r
460}
461
462define <4 x i32> @unsigned_sat_constant_v4i32_using_min(<4 x i32> %x) {
463; SSE2-LABEL: unsigned_sat_constant_v4i32_using_min:
464; SSE2: # %bb.0:
465; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483648,2147483648,2147483648,2147483648]
466; SSE2-NEXT: pxor %xmm0, %xmm1
467; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483605,2147483605,2147483605,2147483605]
468; SSE2-NEXT: pcmpgtd %xmm1, %xmm2
469; SSE2-NEXT: pand %xmm2, %xmm0
470; SSE2-NEXT: pandn {{.*}}(%rip), %xmm2
471; SSE2-NEXT: por %xmm2, %xmm0
472; SSE2-NEXT: paddd {{.*}}(%rip), %xmm0
473; SSE2-NEXT: retq
474;
475; SSE41-LABEL: unsigned_sat_constant_v4i32_using_min:
476; SSE41: # %bb.0:
477; SSE41-NEXT: pminud {{.*}}(%rip), %xmm0
478; SSE41-NEXT: paddd {{.*}}(%rip), %xmm0
479; SSE41-NEXT: retq
480 %c = icmp ult <4 x i32> %x, <i32 -43, i32 -43, i32 -43, i32 -43>
481 %s = select <4 x i1> %c, <4 x i32> %x, <4 x i32> <i32 -43, i32 -43, i32 -43, i32 -43>
482 %r = add <4 x i32> %s, <i32 42, i32 42, i32 42, i32 42>
483 ret <4 x i32> %r
484}
485
486define <4 x i32> @unsigned_sat_constant_v4i32_using_cmp_sum(<4 x i32> %x) {
487; SSE2-LABEL: unsigned_sat_constant_v4i32_using_cmp_sum:
488; SSE2: # %bb.0:
489; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [42,42,42,42]
490; SSE2-NEXT: paddd %xmm0, %xmm1
491; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
492; SSE2-NEXT: pxor %xmm2, %xmm0
493; SSE2-NEXT: pxor %xmm1, %xmm2
494; SSE2-NEXT: pcmpgtd %xmm2, %xmm0
495; SSE2-NEXT: por %xmm1, %xmm0
496; SSE2-NEXT: retq
497;
498; SSE41-LABEL: unsigned_sat_constant_v4i32_using_cmp_sum:
499; SSE41: # %bb.0:
500; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [42,42,42,42]
501; SSE41-NEXT: paddd %xmm0, %xmm2
502; SSE41-NEXT: movdqa %xmm0, %xmm1
503; SSE41-NEXT: pminud %xmm2, %xmm1
504; SSE41-NEXT: pcmpeqd %xmm0, %xmm1
505; SSE41-NEXT: pcmpeqd %xmm0, %xmm0
506; SSE41-NEXT: pxor %xmm0, %xmm1
507; SSE41-NEXT: por %xmm2, %xmm1
508; SSE41-NEXT: movdqa %xmm1, %xmm0
509; SSE41-NEXT: retq
510 %a = add <4 x i32> %x, <i32 42, i32 42, i32 42, i32 42>
511 %c = icmp ugt <4 x i32> %x, %a
512 %r = select <4 x i1> %c, <4 x i32> <i32 -1, i32 -1, i32 -1, i32 -1>, <4 x i32> %a
513 ret <4 x i32> %r
514}
515
516define <4 x i32> @unsigned_sat_constant_v4i32_using_cmp_notval(<4 x i32> %x) {
517; SSE2-LABEL: unsigned_sat_constant_v4i32_using_cmp_notval:
518; SSE2: # %bb.0:
519; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [42,42,42,42]
520; SSE2-NEXT: paddd %xmm0, %xmm1
521; SSE2-NEXT: pxor {{.*}}(%rip), %xmm0
522; SSE2-NEXT: pcmpgtd {{.*}}(%rip), %xmm0
523; SSE2-NEXT: por %xmm1, %xmm0
524; SSE2-NEXT: retq
525;
526; SSE41-LABEL: unsigned_sat_constant_v4i32_using_cmp_notval:
527; SSE41: # %bb.0:
528; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [42,42,42,42]
529; SSE41-NEXT: paddd %xmm0, %xmm1
530; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [4294967253,4294967253,4294967253,4294967253]
531; SSE41-NEXT: pminud %xmm0, %xmm2
532; SSE41-NEXT: pcmpeqd %xmm2, %xmm0
533; SSE41-NEXT: pcmpeqd %xmm2, %xmm2
534; SSE41-NEXT: pxor %xmm2, %xmm0
535; SSE41-NEXT: por %xmm1, %xmm0
536; SSE41-NEXT: retq
537 %a = add <4 x i32> %x, <i32 42, i32 42, i32 42, i32 42>
538 %c = icmp ugt <4 x i32> %x, <i32 -43, i32 -43, i32 -43, i32 -43>
539 %r = select <4 x i1> %c, <4 x i32> <i32 -1, i32 -1, i32 -1, i32 -1>, <4 x i32> %a
540 ret <4 x i32> %r
541}
542
543define <2 x i64> @unsigned_sat_constant_v2i64_using_min(<2 x i64> %x) {
544; SSE2-LABEL: unsigned_sat_constant_v2i64_using_min:
545; SSE2: # %bb.0:
546; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483648,2147483648,2147483648,2147483648]
547; SSE2-NEXT: pxor %xmm0, %xmm1
548; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372034707292117,9223372034707292117]
549; SSE2-NEXT: movdqa %xmm2, %xmm3
550; SSE2-NEXT: pcmpgtd %xmm1, %xmm3
551; SSE2-NEXT: pshufd {{.*#+}} xmm4 = xmm3[0,0,2,2]
552; SSE2-NEXT: pcmpeqd %xmm2, %xmm1
553; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3]
554; SSE2-NEXT: pand %xmm4, %xmm1
555; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm3[1,1,3,3]
556; SSE2-NEXT: por %xmm1, %xmm2
557; SSE2-NEXT: pand %xmm2, %xmm0
558; SSE2-NEXT: pandn {{.*}}(%rip), %xmm2
559; SSE2-NEXT: por %xmm2, %xmm0
560; SSE2-NEXT: paddq {{.*}}(%rip), %xmm0
561; SSE2-NEXT: retq
562;
563; SSE41-LABEL: unsigned_sat_constant_v2i64_using_min:
564; SSE41: # %bb.0:
565; SSE41-NEXT: movdqa %xmm0, %xmm1
566; SSE41-NEXT: movapd {{.*#+}} xmm2 = [18446744073709551573,18446744073709551573]
567; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [2147483648,2147483648,2147483648,2147483648]
568; SSE41-NEXT: pxor %xmm1, %xmm0
569; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [9223372034707292117,9223372034707292117]
570; SSE41-NEXT: movdqa %xmm3, %xmm4
571; SSE41-NEXT: pcmpgtd %xmm0, %xmm4
572; SSE41-NEXT: pshufd {{.*#+}} xmm5 = xmm4[0,0,2,2]
573; SSE41-NEXT: pcmpeqd %xmm3, %xmm0
574; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm0[1,1,3,3]
575; SSE41-NEXT: pand %xmm5, %xmm3
576; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm4[1,1,3,3]
577; SSE41-NEXT: por %xmm3, %xmm0
578; SSE41-NEXT: blendvpd %xmm0, %xmm1, %xmm2
579; SSE41-NEXT: paddq {{.*}}(%rip), %xmm2
580; SSE41-NEXT: movdqa %xmm2, %xmm0
581; SSE41-NEXT: retq
582 %c = icmp ult <2 x i64> %x, <i64 -43, i64 -43>
583 %s = select <2 x i1> %c, <2 x i64> %x, <2 x i64> <i64 -43, i64 -43>
584 %r = add <2 x i64> %s, <i64 42, i64 42>
585 ret <2 x i64> %r
586}
587
588define <2 x i64> @unsigned_sat_constant_v2i64_using_cmp_sum(<2 x i64> %x) {
589; ANY-LABEL: unsigned_sat_constant_v2i64_using_cmp_sum:
590; ANY: # %bb.0:
591; ANY-NEXT: movdqa {{.*#+}} xmm1 = [42,42]
592; ANY-NEXT: paddq %xmm0, %xmm1
593; ANY-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
594; ANY-NEXT: pxor %xmm2, %xmm0
595; ANY-NEXT: pxor %xmm1, %xmm2
596; ANY-NEXT: movdqa %xmm0, %xmm3
597; ANY-NEXT: pcmpgtd %xmm2, %xmm3
598; ANY-NEXT: pshufd {{.*#+}} xmm4 = xmm3[0,0,2,2]
599; ANY-NEXT: pcmpeqd %xmm0, %xmm2
600; ANY-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
601; ANY-NEXT: pand %xmm4, %xmm2
602; ANY-NEXT: pshufd {{.*#+}} xmm0 = xmm3[1,1,3,3]
603; ANY-NEXT: por %xmm1, %xmm0
604; ANY-NEXT: por %xmm2, %xmm0
605; ANY-NEXT: retq
606 %a = add <2 x i64> %x, <i64 42, i64 42>
607 %c = icmp ugt <2 x i64> %x, %a
608 %r = select <2 x i1> %c, <2 x i64> <i64 -1, i64 -1>, <2 x i64> %a
609 ret <2 x i64> %r
610}
611
612define <2 x i64> @unsigned_sat_constant_v2i64_using_cmp_notval(<2 x i64> %x) {
613; ANY-LABEL: unsigned_sat_constant_v2i64_using_cmp_notval:
614; ANY: # %bb.0:
615; ANY-NEXT: movdqa {{.*#+}} xmm1 = [42,42]
616; ANY-NEXT: paddq %xmm0, %xmm1
617; ANY-NEXT: pxor {{.*}}(%rip), %xmm0
618; ANY-NEXT: movdqa {{.*#+}} xmm2 = [9223372034707292117,9223372034707292117]
619; ANY-NEXT: movdqa %xmm0, %xmm3
620; ANY-NEXT: pcmpgtd %xmm2, %xmm3
621; ANY-NEXT: pshufd {{.*#+}} xmm4 = xmm3[0,0,2,2]
622; ANY-NEXT: pcmpeqd %xmm2, %xmm0
623; ANY-NEXT: pshufd {{.*#+}} xmm2 = xmm0[1,1,3,3]
624; ANY-NEXT: pand %xmm4, %xmm2
625; ANY-NEXT: pshufd {{.*#+}} xmm0 = xmm3[1,1,3,3]
626; ANY-NEXT: por %xmm1, %xmm0
627; ANY-NEXT: por %xmm2, %xmm0
628; ANY-NEXT: retq
629 %a = add <2 x i64> %x, <i64 42, i64 42>
630 %c = icmp ugt <2 x i64> %x, <i64 -43, i64 -43>
631 %r = select <2 x i1> %c, <2 x i64> <i64 -1, i64 -1>, <2 x i64> %a
632 ret <2 x i64> %r
633}
634
635define <16 x i8> @unsigned_sat_variable_v16i8_using_min(<16 x i8> %x, <16 x i8> %y) {
636; ANY-LABEL: unsigned_sat_variable_v16i8_using_min:
637; ANY: # %bb.0:
638; ANY-NEXT: pcmpeqd %xmm2, %xmm2
639; ANY-NEXT: pxor %xmm1, %xmm2
640; ANY-NEXT: pminub %xmm2, %xmm0
641; ANY-NEXT: paddb %xmm1, %xmm0
642; ANY-NEXT: retq
643 %noty = xor <16 x i8> %y, <i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1>
644 %c = icmp ult <16 x i8> %x, %noty
645 %s = select <16 x i1> %c, <16 x i8> %x, <16 x i8> %noty
646 %r = add <16 x i8> %s, %y
647 ret <16 x i8> %r
648}
649
650define <16 x i8> @unsigned_sat_variable_v16i8_using_cmp_sum(<16 x i8> %x, <16 x i8> %y) {
651; ANY-LABEL: unsigned_sat_variable_v16i8_using_cmp_sum:
652; ANY: # %bb.0:
653; ANY-NEXT: paddusb %xmm1, %xmm0
654; ANY-NEXT: retq
655 %a = add <16 x i8> %x, %y
656 %c = icmp ugt <16 x i8> %x, %a
657 %r = select <16 x i1> %c, <16 x i8> <i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1>, <16 x i8> %a
658 ret <16 x i8> %r
659}
660
661define <16 x i8> @unsigned_sat_variable_v16i8_using_cmp_notval(<16 x i8> %x, <16 x i8> %y) {
662; ANY-LABEL: unsigned_sat_variable_v16i8_using_cmp_notval:
663; ANY: # %bb.0:
664; ANY-NEXT: pcmpeqd %xmm2, %xmm2
665; ANY-NEXT: movdqa %xmm0, %xmm3
666; ANY-NEXT: paddb %xmm1, %xmm3
667; ANY-NEXT: pxor %xmm2, %xmm1
668; ANY-NEXT: pminub %xmm0, %xmm1
669; ANY-NEXT: pcmpeqb %xmm1, %xmm0
670; ANY-NEXT: pxor %xmm2, %xmm0
671; ANY-NEXT: por %xmm3, %xmm0
672; ANY-NEXT: retq
673 %noty = xor <16 x i8> %y, <i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1>
674 %a = add <16 x i8> %x, %y
675 %c = icmp ugt <16 x i8> %x, %noty
676 %r = select <16 x i1> %c, <16 x i8> <i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1, i8 -1>, <16 x i8> %a
677 ret <16 x i8> %r
678}
679
680define <8 x i16> @unsigned_sat_variable_v8i16_using_min(<8 x i16> %x, <8 x i16> %y) {
681; SSE2-LABEL: unsigned_sat_variable_v8i16_using_min:
682; SSE2: # %bb.0:
683; SSE2-NEXT: pcmpeqd %xmm2, %xmm2
684; SSE2-NEXT: movdqa {{.*#+}} xmm3 = [32768,32768,32768,32768,32768,32768,32768,32768]
685; SSE2-NEXT: pxor %xmm3, %xmm0
686; SSE2-NEXT: pxor %xmm3, %xmm2
687; SSE2-NEXT: pxor %xmm1, %xmm2
688; SSE2-NEXT: pminsw %xmm2, %xmm0
689; SSE2-NEXT: pxor %xmm3, %xmm0
690; SSE2-NEXT: paddw %xmm1, %xmm0
691; SSE2-NEXT: retq
692;
693; SSE41-LABEL: unsigned_sat_variable_v8i16_using_min:
694; SSE41: # %bb.0:
695; SSE41-NEXT: pcmpeqd %xmm2, %xmm2
696; SSE41-NEXT: pxor %xmm1, %xmm2
697; SSE41-NEXT: pminuw %xmm2, %xmm0
698; SSE41-NEXT: paddw %xmm1, %xmm0
699; SSE41-NEXT: retq
700 %noty = xor <8 x i16> %y, <i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1>
701 %c = icmp ult <8 x i16> %x, %noty
702 %s = select <8 x i1> %c, <8 x i16> %x, <8 x i16> %noty
703 %r = add <8 x i16> %s, %y
704 ret <8 x i16> %r
705}
706
707define <8 x i16> @unsigned_sat_variable_v8i16_using_cmp_sum(<8 x i16> %x, <8 x i16> %y) {
708; ANY-LABEL: unsigned_sat_variable_v8i16_using_cmp_sum:
709; ANY: # %bb.0:
710; ANY-NEXT: paddusw %xmm1, %xmm0
711; ANY-NEXT: retq
712 %a = add <8 x i16> %x, %y
713 %c = icmp ugt <8 x i16> %x, %a
714 %r = select <8 x i1> %c, <8 x i16> <i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1>, <8 x i16> %a
715 ret <8 x i16> %r
716}
717
718define <8 x i16> @unsigned_sat_variable_v8i16_using_cmp_notval(<8 x i16> %x, <8 x i16> %y) {
719; SSE2-LABEL: unsigned_sat_variable_v8i16_using_cmp_notval:
720; SSE2: # %bb.0:
721; SSE2-NEXT: pcmpeqd %xmm2, %xmm2
722; SSE2-NEXT: movdqa %xmm0, %xmm3
723; SSE2-NEXT: paddw %xmm1, %xmm3
724; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [32768,32768,32768,32768,32768,32768,32768,32768]
725; SSE2-NEXT: pxor %xmm4, %xmm0
726; SSE2-NEXT: pxor %xmm4, %xmm2
727; SSE2-NEXT: pxor %xmm1, %xmm2
728; SSE2-NEXT: pcmpgtw %xmm2, %xmm0
729; SSE2-NEXT: por %xmm3, %xmm0
730; SSE2-NEXT: retq
731;
732; SSE41-LABEL: unsigned_sat_variable_v8i16_using_cmp_notval:
733; SSE41: # %bb.0:
734; SSE41-NEXT: pcmpeqd %xmm2, %xmm2
735; SSE41-NEXT: movdqa %xmm0, %xmm3
736; SSE41-NEXT: paddw %xmm1, %xmm3
737; SSE41-NEXT: pxor %xmm2, %xmm1
738; SSE41-NEXT: pminuw %xmm0, %xmm1
739; SSE41-NEXT: pcmpeqw %xmm1, %xmm0
740; SSE41-NEXT: pxor %xmm2, %xmm0
741; SSE41-NEXT: por %xmm3, %xmm0
742; SSE41-NEXT: retq
743 %noty = xor <8 x i16> %y, <i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1>
744 %a = add <8 x i16> %x, %y
745 %c = icmp ugt <8 x i16> %x, %noty
746 %r = select <8 x i1> %c, <8 x i16> <i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1, i16 -1>, <8 x i16> %a
747 ret <8 x i16> %r
748}
749
750define <4 x i32> @unsigned_sat_variable_v4i32_using_min(<4 x i32> %x, <4 x i32> %y) {
751; SSE2-LABEL: unsigned_sat_variable_v4i32_using_min:
752; SSE2: # %bb.0:
753; SSE2-NEXT: pcmpeqd %xmm2, %xmm2
754; SSE2-NEXT: movdqa {{.*#+}} xmm3 = [2147483648,2147483648,2147483648,2147483648]
755; SSE2-NEXT: movdqa %xmm0, %xmm4
756; SSE2-NEXT: pxor %xmm3, %xmm4
757; SSE2-NEXT: pxor %xmm2, %xmm3
758; SSE2-NEXT: pxor %xmm1, %xmm3
759; SSE2-NEXT: pcmpgtd %xmm4, %xmm3
760; SSE2-NEXT: pand %xmm3, %xmm0
761; SSE2-NEXT: pxor %xmm2, %xmm3
762; SSE2-NEXT: movdqa %xmm1, %xmm2
763; SSE2-NEXT: pandn %xmm3, %xmm2
764; SSE2-NEXT: por %xmm2, %xmm0
765; SSE2-NEXT: paddd %xmm1, %xmm0
766; SSE2-NEXT: retq
767;
768; SSE41-LABEL: unsigned_sat_variable_v4i32_using_min:
769; SSE41: # %bb.0:
770; SSE41-NEXT: pcmpeqd %xmm2, %xmm2
771; SSE41-NEXT: pxor %xmm1, %xmm2
772; SSE41-NEXT: pminud %xmm2, %xmm0
773; SSE41-NEXT: paddd %xmm1, %xmm0
774; SSE41-NEXT: retq
775 %noty = xor <4 x i32> %y, <i32 -1, i32 -1, i32 -1, i32 -1>
776 %c = icmp ult <4 x i32> %x, %noty
777 %s = select <4 x i1> %c, <4 x i32> %x, <4 x i32> %noty
778 %r = add <4 x i32> %s, %y
779 ret <4 x i32> %r
780}
781
782define <4 x i32> @unsigned_sat_variable_v4i32_using_cmp_sum(<4 x i32> %x, <4 x i32> %y) {
783; SSE2-LABEL: unsigned_sat_variable_v4i32_using_cmp_sum:
784; SSE2: # %bb.0:
785; SSE2-NEXT: paddd %xmm0, %xmm1
786; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
787; SSE2-NEXT: pxor %xmm2, %xmm0
788; SSE2-NEXT: pxor %xmm1, %xmm2
789; SSE2-NEXT: pcmpgtd %xmm2, %xmm0
790; SSE2-NEXT: por %xmm1, %xmm0
791; SSE2-NEXT: retq
792;
793; SSE41-LABEL: unsigned_sat_variable_v4i32_using_cmp_sum:
794; SSE41: # %bb.0:
795; SSE41-NEXT: paddd %xmm0, %xmm1
796; SSE41-NEXT: movdqa %xmm0, %xmm2
797; SSE41-NEXT: pminud %xmm1, %xmm2
798; SSE41-NEXT: pcmpeqd %xmm0, %xmm2
799; SSE41-NEXT: pcmpeqd %xmm0, %xmm0
800; SSE41-NEXT: pxor %xmm0, %xmm2
801; SSE41-NEXT: por %xmm1, %xmm2
802; SSE41-NEXT: movdqa %xmm2, %xmm0
803; SSE41-NEXT: retq
804 %a = add <4 x i32> %x, %y
805 %c = icmp ugt <4 x i32> %x, %a
806 %r = select <4 x i1> %c, <4 x i32> <i32 -1, i32 -1, i32 -1, i32 -1>, <4 x i32> %a
807 ret <4 x i32> %r
808}
809
810define <4 x i32> @unsigned_sat_variable_v4i32_using_cmp_notval(<4 x i32> %x, <4 x i32> %y) {
811; SSE2-LABEL: unsigned_sat_variable_v4i32_using_cmp_notval:
812; SSE2: # %bb.0:
813; SSE2-NEXT: pcmpeqd %xmm2, %xmm2
814; SSE2-NEXT: movdqa %xmm0, %xmm3
815; SSE2-NEXT: paddd %xmm1, %xmm3
816; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648,2147483648,2147483648]
817; SSE2-NEXT: pxor %xmm4, %xmm0
818; SSE2-NEXT: pxor %xmm4, %xmm2
819; SSE2-NEXT: pxor %xmm1, %xmm2
820; SSE2-NEXT: pcmpgtd %xmm2, %xmm0
821; SSE2-NEXT: por %xmm3, %xmm0
822; SSE2-NEXT: retq
823;
824; SSE41-LABEL: unsigned_sat_variable_v4i32_using_cmp_notval:
825; SSE41: # %bb.0:
826; SSE41-NEXT: pcmpeqd %xmm2, %xmm2
827; SSE41-NEXT: movdqa %xmm0, %xmm3
828; SSE41-NEXT: paddd %xmm1, %xmm3
829; SSE41-NEXT: pxor %xmm2, %xmm1
830; SSE41-NEXT: pminud %xmm0, %xmm1
831; SSE41-NEXT: pcmpeqd %xmm1, %xmm0
832; SSE41-NEXT: pxor %xmm2, %xmm0
833; SSE41-NEXT: por %xmm3, %xmm0
834; SSE41-NEXT: retq
835 %noty = xor <4 x i32> %y, <i32 -1, i32 -1, i32 -1, i32 -1>
836 %a = add <4 x i32> %x, %y
837 %c = icmp ugt <4 x i32> %x, %noty
838 %r = select <4 x i1> %c, <4 x i32> <i32 -1, i32 -1, i32 -1, i32 -1>, <4 x i32> %a
839 ret <4 x i32> %r
840}
841
842define <2 x i64> @unsigned_sat_variable_v2i64_using_min(<2 x i64> %x, <2 x i64> %y) {
843; SSE2-LABEL: unsigned_sat_variable_v2i64_using_min:
844; SSE2: # %bb.0:
845; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
846; SSE2-NEXT: pcmpeqd %xmm3, %xmm3
847; SSE2-NEXT: movdqa %xmm0, %xmm4
848; SSE2-NEXT: pxor %xmm2, %xmm4
849; SSE2-NEXT: pxor %xmm3, %xmm2
850; SSE2-NEXT: pxor %xmm1, %xmm2
851; SSE2-NEXT: movdqa %xmm2, %xmm5
852; SSE2-NEXT: pcmpgtd %xmm4, %xmm5
853; SSE2-NEXT: pshufd {{.*#+}} xmm6 = xmm5[0,0,2,2]
854; SSE2-NEXT: pcmpeqd %xmm4, %xmm2
855; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
856; SSE2-NEXT: pand %xmm6, %xmm2
857; SSE2-NEXT: pshufd {{.*#+}} xmm4 = xmm5[1,1,3,3]
858; SSE2-NEXT: por %xmm2, %xmm4
859; SSE2-NEXT: pand %xmm4, %xmm0
860; SSE2-NEXT: pxor %xmm3, %xmm4
861; SSE2-NEXT: movdqa %xmm1, %xmm2
862; SSE2-NEXT: pandn %xmm4, %xmm2
863; SSE2-NEXT: por %xmm2, %xmm0
864; SSE2-NEXT: paddq %xmm1, %xmm0
865; SSE2-NEXT: retq
866;
867; SSE41-LABEL: unsigned_sat_variable_v2i64_using_min:
868; SSE41: # %bb.0:
869; SSE41-NEXT: movdqa %xmm0, %xmm2
870; SSE41-NEXT: pcmpeqd %xmm3, %xmm3
871; SSE41-NEXT: pxor %xmm1, %xmm3
872; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [2147483648,2147483648,2147483648,2147483648]
873; SSE41-NEXT: movdqa %xmm2, %xmm4
874; SSE41-NEXT: pxor %xmm0, %xmm4
875; SSE41-NEXT: pxor %xmm3, %xmm0
876; SSE41-NEXT: movdqa %xmm0, %xmm5
877; SSE41-NEXT: pcmpgtd %xmm4, %xmm5
878; SSE41-NEXT: pshufd {{.*#+}} xmm6 = xmm5[0,0,2,2]
879; SSE41-NEXT: pcmpeqd %xmm4, %xmm0
880; SSE41-NEXT: pshufd {{.*#+}} xmm4 = xmm0[1,1,3,3]
881; SSE41-NEXT: pand %xmm6, %xmm4
882; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm5[1,1,3,3]
883; SSE41-NEXT: por %xmm4, %xmm0
884; SSE41-NEXT: blendvpd %xmm0, %xmm2, %xmm3
885; SSE41-NEXT: paddq %xmm1, %xmm3
886; SSE41-NEXT: movdqa %xmm3, %xmm0
887; SSE41-NEXT: retq
888 %noty = xor <2 x i64> %y, <i64 -1, i64 -1>
889 %c = icmp ult <2 x i64> %x, %noty
890 %s = select <2 x i1> %c, <2 x i64> %x, <2 x i64> %noty
891 %r = add <2 x i64> %s, %y
892 ret <2 x i64> %r
893}
894
895define <2 x i64> @unsigned_sat_variable_v2i64_using_cmp_sum(<2 x i64> %x, <2 x i64> %y) {
896; ANY-LABEL: unsigned_sat_variable_v2i64_using_cmp_sum:
897; ANY: # %bb.0:
898; ANY-NEXT: paddq %xmm0, %xmm1
899; ANY-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
900; ANY-NEXT: pxor %xmm2, %xmm0
901; ANY-NEXT: pxor %xmm1, %xmm2
902; ANY-NEXT: movdqa %xmm0, %xmm3
903; ANY-NEXT: pcmpgtd %xmm2, %xmm3
904; ANY-NEXT: pshufd {{.*#+}} xmm4 = xmm3[0,0,2,2]
905; ANY-NEXT: pcmpeqd %xmm0, %xmm2
906; ANY-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
907; ANY-NEXT: pand %xmm4, %xmm2
908; ANY-NEXT: pshufd {{.*#+}} xmm0 = xmm3[1,1,3,3]
909; ANY-NEXT: por %xmm1, %xmm0
910; ANY-NEXT: por %xmm2, %xmm0
911; ANY-NEXT: retq
912 %a = add <2 x i64> %x, %y
913 %c = icmp ugt <2 x i64> %x, %a
914 %r = select <2 x i1> %c, <2 x i64> <i64 -1, i64 -1>, <2 x i64> %a
915 ret <2 x i64> %r
916}
917
918define <2 x i64> @unsigned_sat_variable_v2i64_using_cmp_notval(<2 x i64> %x, <2 x i64> %y) {
919; ANY-LABEL: unsigned_sat_variable_v2i64_using_cmp_notval:
920; ANY: # %bb.0:
921; ANY-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
922; ANY-NEXT: pcmpeqd %xmm3, %xmm3
923; ANY-NEXT: movdqa %xmm0, %xmm4
924; ANY-NEXT: paddq %xmm1, %xmm4
925; ANY-NEXT: pxor %xmm2, %xmm0
926; ANY-NEXT: pxor %xmm2, %xmm3
927; ANY-NEXT: pxor %xmm1, %xmm3
928; ANY-NEXT: movdqa %xmm0, %xmm1
929; ANY-NEXT: pcmpgtd %xmm3, %xmm1
930; ANY-NEXT: pshufd {{.*#+}} xmm2 = xmm1[0,0,2,2]
931; ANY-NEXT: pcmpeqd %xmm0, %xmm3
932; ANY-NEXT: pshufd {{.*#+}} xmm3 = xmm3[1,1,3,3]
933; ANY-NEXT: pand %xmm2, %xmm3
934; ANY-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,3,3]
935; ANY-NEXT: por %xmm4, %xmm0
936; ANY-NEXT: por %xmm3, %xmm0
937; ANY-NEXT: retq
938 %noty = xor <2 x i64> %y, <i64 -1, i64 -1>
939 %a = add <2 x i64> %x, %y
940 %c = icmp ugt <2 x i64> %x, %noty
941 %r = select <2 x i1> %c, <2 x i64> <i64 -1, i64 -1>, <2 x i64> %a
942 ret <2 x i64> %r
943}
944