blob: 72af2811b7ad81e745de5634364ed93b14103365 [file] [log] [blame]
Stephen Hinesee4ca282014-12-02 17:05:12 -08001/*===---- avx512fintrin.h - AVX2 intrinsics --------------------------------===
2 *
3 * Permission is hereby granted, free of charge, to any person obtaining a copy
4 * of this software and associated documentation files (the "Software"), to deal
5 * in the Software without restriction, including without limitation the rights
6 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 * copies of the Software, and to permit persons to whom the Software is
8 * furnished to do so, subject to the following conditions:
9 *
10 * The above copyright notice and this permission notice shall be included in
11 * all copies or substantial portions of the Software.
12 *
13 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19 * THE SOFTWARE.
20 *
21 *===-----------------------------------------------------------------------===
22 */
23#ifndef __IMMINTRIN_H
24#error "Never use <avx512fintrin.h> directly; include <immintrin.h> instead."
25#endif
26
27#ifndef __AVX512FINTRIN_H
28#define __AVX512FINTRIN_H
29
30typedef double __v8df __attribute__((__vector_size__(64)));
31typedef float __v16sf __attribute__((__vector_size__(64)));
32typedef long long __v8di __attribute__((__vector_size__(64)));
33typedef int __v16si __attribute__((__vector_size__(64)));
34
35typedef float __m512 __attribute__((__vector_size__(64)));
36typedef double __m512d __attribute__((__vector_size__(64)));
37typedef long long __m512i __attribute__((__vector_size__(64)));
38
39typedef unsigned char __mmask8;
40typedef unsigned short __mmask16;
41
42/* Rounding mode macros. */
43#define _MM_FROUND_TO_NEAREST_INT 0x00
44#define _MM_FROUND_TO_NEG_INF 0x01
45#define _MM_FROUND_TO_POS_INF 0x02
46#define _MM_FROUND_TO_ZERO 0x03
47#define _MM_FROUND_CUR_DIRECTION 0x04
48
49/* Create vectors with repeated elements */
50
51static __inline __m512i __attribute__ ((__always_inline__, __nodebug__))
52_mm512_setzero_si512(void)
53{
54 return (__m512i)(__v8di){ 0, 0, 0, 0, 0, 0, 0, 0 };
55}
56
57static __inline __m512i __attribute__ ((__always_inline__, __nodebug__))
58_mm512_maskz_set1_epi32(__mmask16 __M, int __A)
59{
60 return (__m512i) __builtin_ia32_pbroadcastd512_gpr_mask (__A,
61 (__v16si)
62 _mm512_setzero_si512 (),
63 __M);
64}
65
66static __inline __m512i __attribute__ ((__always_inline__, __nodebug__))
67_mm512_maskz_set1_epi64(__mmask8 __M, long long __A)
68{
69#ifdef __x86_64__
70 return (__m512i) __builtin_ia32_pbroadcastq512_gpr_mask (__A,
71 (__v8di)
72 _mm512_setzero_si512 (),
73 __M);
74#else
75 return (__m512i) __builtin_ia32_pbroadcastq512_mem_mask (__A,
76 (__v8di)
77 _mm512_setzero_si512 (),
78 __M);
79#endif
80}
81
82static __inline __m512 __attribute__ ((__always_inline__, __nodebug__))
83_mm512_setzero_ps(void)
84{
85 return (__m512){ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
86 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 };
87}
88static __inline __m512d __attribute__ ((__always_inline__, __nodebug__))
89_mm512_setzero_pd(void)
90{
91 return (__m512d){ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 };
92}
93
94static __inline __m512 __attribute__((__always_inline__, __nodebug__))
95_mm512_set1_ps(float __w)
96{
97 return (__m512){ __w, __w, __w, __w, __w, __w, __w, __w,
98 __w, __w, __w, __w, __w, __w, __w, __w };
99}
100
101static __inline __m512d __attribute__((__always_inline__, __nodebug__))
102_mm512_set1_pd(double __w)
103{
104 return (__m512d){ __w, __w, __w, __w, __w, __w, __w, __w };
105}
106
107static __inline __m512i __attribute__((__always_inline__, __nodebug__))
108_mm512_set1_epi32(int __s)
109{
110 return (__m512i)(__v16si){ __s, __s, __s, __s, __s, __s, __s, __s,
111 __s, __s, __s, __s, __s, __s, __s, __s };
112}
113
114static __inline __m512i __attribute__((__always_inline__, __nodebug__))
115_mm512_set1_epi64(long long __d)
116{
117 return (__m512i)(__v8di){ __d, __d, __d, __d, __d, __d, __d, __d };
118}
119
120static __inline__ __m512 __attribute__((__always_inline__, __nodebug__))
121_mm512_broadcastss_ps(__m128 __X)
122{
123 float __f = __X[0];
124 return (__v16sf){ __f, __f, __f, __f,
125 __f, __f, __f, __f,
126 __f, __f, __f, __f,
127 __f, __f, __f, __f };
128}
129
130static __inline__ __m512d __attribute__((__always_inline__, __nodebug__))
131_mm512_broadcastsd_pd(__m128d __X)
132{
133 double __d = __X[0];
134 return (__v8df){ __d, __d, __d, __d,
135 __d, __d, __d, __d };
136}
137
138/* Cast between vector types */
139
140static __inline __m512d __attribute__((__always_inline__, __nodebug__))
141_mm512_castpd256_pd512(__m256d __a)
142{
143 return __builtin_shufflevector(__a, __a, 0, 1, 2, 3, -1, -1, -1, -1);
144}
145
146static __inline __m512 __attribute__((__always_inline__, __nodebug__))
147_mm512_castps256_ps512(__m256 __a)
148{
149 return __builtin_shufflevector(__a, __a, 0, 1, 2, 3, 4, 5, 6, 7,
150 -1, -1, -1, -1, -1, -1, -1, -1);
151}
152
153static __inline __m128d __attribute__((__always_inline__, __nodebug__))
154_mm512_castpd512_pd128(__m512d __a)
155{
156 return __builtin_shufflevector(__a, __a, 0, 1);
157}
158
159static __inline __m128 __attribute__((__always_inline__, __nodebug__))
160_mm512_castps512_ps128(__m512 __a)
161{
162 return __builtin_shufflevector(__a, __a, 0, 1, 2, 3);
163}
164
Pirama Arumuga Nainardb745862015-05-11 14:34:37 -0700165/* Bitwise operators */
166static __inline__ __m512i __attribute__((__always_inline__, __nodebug__))
167_mm512_and_epi32(__m512i __a, __m512i __b)
168{
169 return __a & __b;
170}
171
172static __inline__ __m512i __attribute__((__always_inline__, __nodebug__))
173_mm512_mask_and_epi32(__m512i __src, __mmask16 __k, __m512i __a, __m512i __b)
174{
175 return (__m512i) __builtin_ia32_pandd512_mask((__v16si) __a,
176 (__v16si) __b,
177 (__v16si) __src,
178 (__mmask16) __k);
179}
180static __inline__ __m512i __attribute__((__always_inline__, __nodebug__))
181_mm512_maskz_and_epi32(__mmask16 __k, __m512i __a, __m512i __b)
182{
183 return (__m512i) __builtin_ia32_pandd512_mask((__v16si) __a,
184 (__v16si) __b,
185 (__v16si)
186 _mm512_setzero_si512 (),
187 (__mmask16) __k);
188}
189
190static __inline__ __m512i __attribute__((__always_inline__, __nodebug__))
191_mm512_and_epi64(__m512i __a, __m512i __b)
192{
193 return __a & __b;
194}
195
196static __inline__ __m512i __attribute__((__always_inline__, __nodebug__))
197_mm512_mask_and_epi64(__m512i __src, __mmask8 __k, __m512i __a, __m512i __b)
198{
199 return (__m512i) __builtin_ia32_pandq512_mask ((__v8di) __a,
200 (__v8di) __b,
201 (__v8di) __src,
202 (__mmask8) __k);
203}
204static __inline__ __m512i __attribute__((__always_inline__, __nodebug__))
205_mm512_maskz_and_epi64(__mmask8 __k, __m512i __a, __m512i __b)
206{
207 return (__m512i) __builtin_ia32_pandq512_mask ((__v8di) __a,
208 (__v8di) __b,
209 (__v8di)
210 _mm512_setzero_si512 (),
211 (__mmask8) __k);
212}
213
214static __inline__ __m512i __attribute__((__always_inline__, __nodebug__))
215_mm512_or_epi32(__m512i __a, __m512i __b)
216{
217 return __a | __b;
218}
219
220static __inline__ __m512i __attribute__((__always_inline__, __nodebug__))
221_mm512_mask_or_epi32(__m512i __src, __mmask16 __k, __m512i __a, __m512i __b)
222{
223 return (__m512i) __builtin_ia32_pord512_mask((__v16si) __a,
224 (__v16si) __b,
225 (__v16si) __src,
226 (__mmask16) __k);
227}
228static __inline__ __m512i __attribute__((__always_inline__, __nodebug__))
229_mm512_maskz_or_epi32(__mmask16 __k, __m512i __a, __m512i __b)
230{
231 return (__m512i) __builtin_ia32_pord512_mask((__v16si) __a,
232 (__v16si) __b,
233 (__v16si)
234 _mm512_setzero_si512 (),
235 (__mmask16) __k);
236}
237
238static __inline__ __m512i __attribute__((__always_inline__, __nodebug__))
239_mm512_or_epi64(__m512i __a, __m512i __b)
240{
241 return __a | __b;
242}
243
244static __inline__ __m512i __attribute__((__always_inline__, __nodebug__))
245_mm512_mask_or_epi64(__m512i __src, __mmask8 __k, __m512i __a, __m512i __b)
246{
247 return (__m512i) __builtin_ia32_porq512_mask ((__v8di) __a,
248 (__v8di) __b,
249 (__v8di) __src,
250 (__mmask8) __k);
251}
252static __inline__ __m512i __attribute__((__always_inline__, __nodebug__))
253_mm512_maskz_or_epi64(__mmask8 __k, __m512i __a, __m512i __b)
254{
255 return (__m512i) __builtin_ia32_porq512_mask ((__v8di) __a,
256 (__v8di) __b,
257 (__v8di)
258 _mm512_setzero_si512 (),
259 (__mmask8) __k);
260}
261
262static __inline__ __m512i __attribute__((__always_inline__, __nodebug__))
263_mm512_xor_epi32(__m512i __a, __m512i __b)
264{
265 return __a ^ __b;
266}
267
268static __inline__ __m512i __attribute__((__always_inline__, __nodebug__))
269_mm512_mask_xor_epi32(__m512i __src, __mmask16 __k, __m512i __a, __m512i __b)
270{
271 return (__m512i) __builtin_ia32_pxord512_mask((__v16si) __a,
272 (__v16si) __b,
273 (__v16si) __src,
274 (__mmask16) __k);
275}
276static __inline__ __m512i __attribute__((__always_inline__, __nodebug__))
277_mm512_maskz_xor_epi32(__mmask16 __k, __m512i __a, __m512i __b)
278{
279 return (__m512i) __builtin_ia32_pxord512_mask((__v16si) __a,
280 (__v16si) __b,
281 (__v16si)
282 _mm512_setzero_si512 (),
283 (__mmask16) __k);
284}
285
286static __inline__ __m512i __attribute__((__always_inline__, __nodebug__))
287_mm512_xor_epi64(__m512i __a, __m512i __b)
288{
289 return __a ^ __b;
290}
291
292static __inline__ __m512i __attribute__((__always_inline__, __nodebug__))
293_mm512_mask_xor_epi64(__m512i __src, __mmask8 __k, __m512i __a, __m512i __b)
294{
295 return (__m512i) __builtin_ia32_pxorq512_mask ((__v8di) __a,
296 (__v8di) __b,
297 (__v8di) __src,
298 (__mmask8) __k);
299}
300static __inline__ __m512i __attribute__((__always_inline__, __nodebug__))
301_mm512_maskz_xor_epi64(__mmask8 __k, __m512i __a, __m512i __b)
302{
303 return (__m512i) __builtin_ia32_pxorq512_mask ((__v8di) __a,
304 (__v8di) __b,
305 (__v8di)
306 _mm512_setzero_si512 (),
307 (__mmask8) __k);
308}
309
310static __inline__ __m512i __attribute__((__always_inline__, __nodebug__))
311_mm512_and_si512(__m512i __a, __m512i __b)
312{
313 return __a & __b;
314}
315
316static __inline__ __m512i __attribute__((__always_inline__, __nodebug__))
317_mm512_or_si512(__m512i __a, __m512i __b)
318{
319 return __a | __b;
320}
321
322static __inline__ __m512i __attribute__((__always_inline__, __nodebug__))
323_mm512_xor_si512(__m512i __a, __m512i __b)
324{
325 return __a ^ __b;
326}
Stephen Hinesee4ca282014-12-02 17:05:12 -0800327/* Arithmetic */
328
329static __inline __m512d __attribute__((__always_inline__, __nodebug__))
330_mm512_add_pd(__m512d __a, __m512d __b)
331{
332 return __a + __b;
333}
334
335static __inline __m512 __attribute__((__always_inline__, __nodebug__))
336_mm512_add_ps(__m512 __a, __m512 __b)
337{
338 return __a + __b;
339}
340
341static __inline __m512d __attribute__((__always_inline__, __nodebug__))
342_mm512_mul_pd(__m512d __a, __m512d __b)
343{
344 return __a * __b;
345}
346
347static __inline __m512 __attribute__((__always_inline__, __nodebug__))
348_mm512_mul_ps(__m512 __a, __m512 __b)
349{
350 return __a * __b;
351}
352
353static __inline __m512d __attribute__((__always_inline__, __nodebug__))
354_mm512_sub_pd(__m512d __a, __m512d __b)
355{
356 return __a - __b;
357}
358
359static __inline __m512 __attribute__((__always_inline__, __nodebug__))
360_mm512_sub_ps(__m512 __a, __m512 __b)
361{
362 return __a - __b;
363}
364
365static __inline__ __m512d __attribute__((__always_inline__, __nodebug__))
366_mm512_max_pd(__m512d __A, __m512d __B)
367{
368 return (__m512d) __builtin_ia32_maxpd512_mask ((__v8df) __A,
369 (__v8df) __B,
370 (__v8df)
371 _mm512_setzero_pd (),
372 (__mmask8) -1,
373 _MM_FROUND_CUR_DIRECTION);
374}
375
376static __inline__ __m512 __attribute__((__always_inline__, __nodebug__))
377_mm512_max_ps(__m512 __A, __m512 __B)
378{
379 return (__m512) __builtin_ia32_maxps512_mask ((__v16sf) __A,
380 (__v16sf) __B,
381 (__v16sf)
382 _mm512_setzero_ps (),
383 (__mmask16) -1,
384 _MM_FROUND_CUR_DIRECTION);
385}
386
387static __inline __m512i
388__attribute__ ((__always_inline__, __nodebug__))
389_mm512_max_epi32(__m512i __A, __m512i __B)
390{
391 return (__m512i) __builtin_ia32_pmaxsd512_mask ((__v16si) __A,
392 (__v16si) __B,
393 (__v16si)
394 _mm512_setzero_si512 (),
395 (__mmask16) -1);
396}
397
398static __inline __m512i __attribute__ ((__always_inline__, __nodebug__))
399_mm512_max_epu32(__m512i __A, __m512i __B)
400{
401 return (__m512i) __builtin_ia32_pmaxud512_mask ((__v16si) __A,
402 (__v16si) __B,
403 (__v16si)
404 _mm512_setzero_si512 (),
405 (__mmask16) -1);
406}
407
408static __inline __m512i __attribute__ ((__always_inline__, __nodebug__))
409_mm512_max_epi64(__m512i __A, __m512i __B)
410{
411 return (__m512i) __builtin_ia32_pmaxsq512_mask ((__v8di) __A,
412 (__v8di) __B,
413 (__v8di)
414 _mm512_setzero_si512 (),
415 (__mmask8) -1);
416}
417
418static __inline __m512i __attribute__ ((__always_inline__, __nodebug__))
419_mm512_max_epu64(__m512i __A, __m512i __B)
420{
421 return (__m512i) __builtin_ia32_pmaxuq512_mask ((__v8di) __A,
422 (__v8di) __B,
423 (__v8di)
424 _mm512_setzero_si512 (),
425 (__mmask8) -1);
426}
427
428static __inline__ __m512d __attribute__((__always_inline__, __nodebug__))
429_mm512_min_pd(__m512d __A, __m512d __B)
430{
431 return (__m512d) __builtin_ia32_minpd512_mask ((__v8df) __A,
432 (__v8df) __B,
433 (__v8df)
434 _mm512_setzero_pd (),
435 (__mmask8) -1,
436 _MM_FROUND_CUR_DIRECTION);
437}
438
439static __inline__ __m512 __attribute__((__always_inline__, __nodebug__))
440_mm512_min_ps(__m512 __A, __m512 __B)
441{
442 return (__m512) __builtin_ia32_minps512_mask ((__v16sf) __A,
443 (__v16sf) __B,
444 (__v16sf)
445 _mm512_setzero_ps (),
446 (__mmask16) -1,
447 _MM_FROUND_CUR_DIRECTION);
448}
449
450static __inline __m512i
451__attribute__ ((__always_inline__, __nodebug__))
452_mm512_min_epi32(__m512i __A, __m512i __B)
453{
454 return (__m512i) __builtin_ia32_pminsd512_mask ((__v16si) __A,
455 (__v16si) __B,
456 (__v16si)
457 _mm512_setzero_si512 (),
458 (__mmask16) -1);
459}
460
461static __inline __m512i __attribute__ ((__always_inline__, __nodebug__))
462_mm512_min_epu32(__m512i __A, __m512i __B)
463{
464 return (__m512i) __builtin_ia32_pminud512_mask ((__v16si) __A,
465 (__v16si) __B,
466 (__v16si)
467 _mm512_setzero_si512 (),
468 (__mmask16) -1);
469}
470
471static __inline __m512i __attribute__ ((__always_inline__, __nodebug__))
472_mm512_min_epi64(__m512i __A, __m512i __B)
473{
474 return (__m512i) __builtin_ia32_pminsq512_mask ((__v8di) __A,
475 (__v8di) __B,
476 (__v8di)
477 _mm512_setzero_si512 (),
478 (__mmask8) -1);
479}
480
481static __inline __m512i __attribute__ ((__always_inline__, __nodebug__))
482_mm512_min_epu64(__m512i __A, __m512i __B)
483{
484 return (__m512i) __builtin_ia32_pminuq512_mask ((__v8di) __A,
485 (__v8di) __B,
486 (__v8di)
487 _mm512_setzero_si512 (),
488 (__mmask8) -1);
489}
490
491static __inline __m512i __attribute__ ((__always_inline__, __nodebug__))
492_mm512_mul_epi32(__m512i __X, __m512i __Y)
493{
494 return (__m512i) __builtin_ia32_pmuldq512_mask ((__v16si) __X,
495 (__v16si) __Y,
496 (__v8di)
497 _mm512_setzero_si512 (),
498 (__mmask8) -1);
499}
500
501static __inline __m512i __attribute__ ((__always_inline__, __nodebug__))
502_mm512_mul_epu32(__m512i __X, __m512i __Y)
503{
504 return (__m512i) __builtin_ia32_pmuludq512_mask ((__v16si) __X,
505 (__v16si) __Y,
506 (__v8di)
507 _mm512_setzero_si512 (),
508 (__mmask8) -1);
509}
510
511static __inline__ __m512d __attribute__((__always_inline__, __nodebug__))
512_mm512_sqrt_pd(__m512d a)
513{
514 return (__m512d)__builtin_ia32_sqrtpd512_mask((__v8df)a,
515 (__v8df) _mm512_setzero_pd (),
516 (__mmask8) -1,
517 _MM_FROUND_CUR_DIRECTION);
518}
519
520static __inline__ __m512 __attribute__((__always_inline__, __nodebug__))
521_mm512_sqrt_ps(__m512 a)
522{
523 return (__m512)__builtin_ia32_sqrtps512_mask((__v16sf)a,
524 (__v16sf) _mm512_setzero_ps (),
525 (__mmask16) -1,
526 _MM_FROUND_CUR_DIRECTION);
527}
528
529static __inline__ __m512d __attribute__((__always_inline__, __nodebug__))
530_mm512_rsqrt14_pd(__m512d __A)
531{
532 return (__m512d) __builtin_ia32_rsqrt14pd512_mask ((__v8df) __A,
533 (__v8df)
534 _mm512_setzero_pd (),
535 (__mmask8) -1);}
536
537static __inline__ __m512 __attribute__((__always_inline__, __nodebug__))
538_mm512_rsqrt14_ps(__m512 __A)
539{
540 return (__m512) __builtin_ia32_rsqrt14ps512_mask ((__v16sf) __A,
541 (__v16sf)
542 _mm512_setzero_ps (),
543 (__mmask16) -1);
544}
545
546static __inline__ __m128 __attribute__((__always_inline__, __nodebug__))
547_mm_rsqrt14_ss(__m128 __A, __m128 __B)
548{
549 return (__m128) __builtin_ia32_rsqrt14ss_mask ((__v4sf) __A,
550 (__v4sf) __B,
551 (__v4sf)
552 _mm_setzero_ps (),
553 (__mmask8) -1);
554}
555
556static __inline__ __m128d __attribute__((__always_inline__, __nodebug__))
557_mm_rsqrt14_sd(__m128d __A, __m128d __B)
558{
559 return (__m128d) __builtin_ia32_rsqrt14sd_mask ((__v2df) __A,
560 (__v2df) __B,
561 (__v2df)
562 _mm_setzero_pd (),
563 (__mmask8) -1);
564}
565
566static __inline__ __m512d __attribute__((__always_inline__, __nodebug__))
567_mm512_rcp14_pd(__m512d __A)
568{
569 return (__m512d) __builtin_ia32_rcp14pd512_mask ((__v8df) __A,
570 (__v8df)
571 _mm512_setzero_pd (),
572 (__mmask8) -1);
573}
574
575static __inline__ __m512 __attribute__((__always_inline__, __nodebug__))
576_mm512_rcp14_ps(__m512 __A)
577{
578 return (__m512) __builtin_ia32_rcp14ps512_mask ((__v16sf) __A,
579 (__v16sf)
580 _mm512_setzero_ps (),
581 (__mmask16) -1);
582}
583static __inline__ __m128 __attribute__((__always_inline__, __nodebug__))
584_mm_rcp14_ss(__m128 __A, __m128 __B)
585{
586 return (__m128) __builtin_ia32_rcp14ss_mask ((__v4sf) __A,
587 (__v4sf) __B,
588 (__v4sf)
589 _mm_setzero_ps (),
590 (__mmask8) -1);
591}
592
593static __inline__ __m128d __attribute__((__always_inline__, __nodebug__))
594_mm_rcp14_sd(__m128d __A, __m128d __B)
595{
596 return (__m128d) __builtin_ia32_rcp14sd_mask ((__v2df) __A,
597 (__v2df) __B,
598 (__v2df)
599 _mm_setzero_pd (),
600 (__mmask8) -1);
601}
602
603static __inline __m512 __attribute__ ((__always_inline__, __nodebug__))
604_mm512_floor_ps(__m512 __A)
605{
606 return (__m512) __builtin_ia32_rndscaleps_mask ((__v16sf) __A,
607 _MM_FROUND_FLOOR,
608 (__v16sf) __A, -1,
609 _MM_FROUND_CUR_DIRECTION);
610}
611
612static __inline __m512d __attribute__ ((__always_inline__, __nodebug__))
613_mm512_floor_pd(__m512d __A)
614{
615 return (__m512d) __builtin_ia32_rndscalepd_mask ((__v8df) __A,
616 _MM_FROUND_FLOOR,
617 (__v8df) __A, -1,
618 _MM_FROUND_CUR_DIRECTION);
619}
620
621static __inline __m512 __attribute__ ((__always_inline__, __nodebug__))
622_mm512_ceil_ps(__m512 __A)
623{
624 return (__m512) __builtin_ia32_rndscaleps_mask ((__v16sf) __A,
625 _MM_FROUND_CEIL,
626 (__v16sf) __A, -1,
627 _MM_FROUND_CUR_DIRECTION);
628}
629
630static __inline __m512d __attribute__ ((__always_inline__, __nodebug__))
631_mm512_ceil_pd(__m512d __A)
632{
633 return (__m512d) __builtin_ia32_rndscalepd_mask ((__v8df) __A,
634 _MM_FROUND_CEIL,
635 (__v8df) __A, -1,
636 _MM_FROUND_CUR_DIRECTION);
637}
638
639static __inline __m512i __attribute__ (( __always_inline__, __nodebug__))
640_mm512_abs_epi64(__m512i __A)
641{
642 return (__m512i) __builtin_ia32_pabsq512_mask ((__v8di) __A,
643 (__v8di)
644 _mm512_setzero_si512 (),
645 (__mmask8) -1);
646}
647
648static __inline __m512i __attribute__ (( __always_inline__, __nodebug__))
649_mm512_abs_epi32(__m512i __A)
650{
651 return (__m512i) __builtin_ia32_pabsd512_mask ((__v16si) __A,
652 (__v16si)
653 _mm512_setzero_si512 (),
654 (__mmask16) -1);
655}
656
Stephen Hinesb4d9c8b2015-03-30 16:04:04 -0700657#define _mm512_roundscale_ps(A, B) __extension__ ({ \
658 (__m512)__builtin_ia32_rndscaleps_mask((__v16sf)(A), (B), (__v16sf)(A), \
659 -1, _MM_FROUND_CUR_DIRECTION); })
660
661#define _mm512_roundscale_pd(A, B) __extension__ ({ \
662 (__m512d)__builtin_ia32_rndscalepd_mask((__v8df)(A), (B), (__v8df)(A), \
663 -1, _MM_FROUND_CUR_DIRECTION); })
Stephen Hinesee4ca282014-12-02 17:05:12 -0800664
665static __inline__ __m512d __attribute__((__always_inline__, __nodebug__))
666_mm512_fmadd_pd(__m512d __A, __m512d __B, __m512d __C)
667{
668 return (__m512d)
669 __builtin_ia32_vfmaddpd512_mask(__A,
670 __B,
671 __C,
672 (__mmask8) -1,
673 _MM_FROUND_CUR_DIRECTION);
674}
675
676static __inline__ __m512d __attribute__((__always_inline__, __nodebug__))
677_mm512_fmsub_pd(__m512d __A, __m512d __B, __m512d __C)
678{
679 return (__m512d)
680 __builtin_ia32_vfmsubpd512_mask(__A,
681 __B,
682 __C,
683 (__mmask8) -1,
684 _MM_FROUND_CUR_DIRECTION);
685}
686
687static __inline__ __m512d __attribute__((__always_inline__, __nodebug__))
688_mm512_fnmadd_pd(__m512d __A, __m512d __B, __m512d __C)
689{
690 return (__m512d)
691 __builtin_ia32_vfnmaddpd512_mask(__A,
692 __B,
693 __C,
694 (__mmask8) -1,
695 _MM_FROUND_CUR_DIRECTION);
696}
697
698static __inline__ __m512 __attribute__((__always_inline__, __nodebug__))
699_mm512_fmadd_ps(__m512 __A, __m512 __B, __m512 __C)
700{
701 return (__m512)
702 __builtin_ia32_vfmaddps512_mask(__A,
703 __B,
704 __C,
705 (__mmask16) -1,
706 _MM_FROUND_CUR_DIRECTION);
707}
708
709static __inline__ __m512 __attribute__((__always_inline__, __nodebug__))
710_mm512_fmsub_ps(__m512 __A, __m512 __B, __m512 __C)
711{
712 return (__m512)
713 __builtin_ia32_vfmsubps512_mask(__A,
714 __B,
715 __C,
716 (__mmask16) -1,
717 _MM_FROUND_CUR_DIRECTION);
718}
719
720static __inline__ __m512 __attribute__((__always_inline__, __nodebug__))
721_mm512_fnmadd_ps(__m512 __A, __m512 __B, __m512 __C)
722{
723 return (__m512)
724 __builtin_ia32_vfnmaddps512_mask(__A,
725 __B,
726 __C,
727 (__mmask16) -1,
728 _MM_FROUND_CUR_DIRECTION);
729}
730
731/* Vector permutations */
732
733static __inline __m512i __attribute__ ((__always_inline__, __nodebug__))
734_mm512_permutex2var_epi32(__m512i __A, __m512i __I, __m512i __B)
735{
736 return (__m512i) __builtin_ia32_vpermt2vard512_mask ((__v16si) __I
737 /* idx */ ,
738 (__v16si) __A,
739 (__v16si) __B,
740 (__mmask16) -1);
741}
742static __inline __m512i __attribute__ ((__always_inline__, __nodebug__))
743_mm512_permutex2var_epi64(__m512i __A, __m512i __I, __m512i __B)
744{
745 return (__m512i) __builtin_ia32_vpermt2varq512_mask ((__v8di) __I
746 /* idx */ ,
747 (__v8di) __A,
748 (__v8di) __B,
749 (__mmask8) -1);
750}
751
752static __inline __m512d __attribute__ ((__always_inline__, __nodebug__))
753_mm512_permutex2var_pd(__m512d __A, __m512i __I, __m512d __B)
754{
755 return (__m512d) __builtin_ia32_vpermt2varpd512_mask ((__v8di) __I
756 /* idx */ ,
757 (__v8df) __A,
758 (__v8df) __B,
759 (__mmask8) -1);
760}
761static __inline __m512 __attribute__ ((__always_inline__, __nodebug__))
762_mm512_permutex2var_ps(__m512 __A, __m512i __I, __m512 __B)
763{
764 return (__m512) __builtin_ia32_vpermt2varps512_mask ((__v16si) __I
765 /* idx */ ,
766 (__v16sf) __A,
767 (__v16sf) __B,
768 (__mmask16) -1);
769}
770
Stephen Hinesb4d9c8b2015-03-30 16:04:04 -0700771#define _mm512_alignr_epi64(A, B, I) __extension__ ({ \
772 (__m512i)__builtin_ia32_alignq512_mask((__v8di)(__m512i)(A), \
773 (__v8di)(__m512i)(B), \
774 (I), (__v8di)_mm512_setzero_si512(), \
775 (__mmask8)-1); })
Stephen Hinesee4ca282014-12-02 17:05:12 -0800776
Stephen Hinesb4d9c8b2015-03-30 16:04:04 -0700777#define _mm512_alignr_epi32(A, B, I) __extension__ ({ \
778 (__m512i)__builtin_ia32_alignd512_mask((__v16si)(__m512i)(A), \
779 (__v16si)(__m512i)(B), \
780 (I), (__v16si)_mm512_setzero_si512(), \
781 (__mmask16)-1); })
782
783/* Vector Extract */
784
785#define _mm512_extractf64x4_pd(A, I) __extension__ ({ \
786 __m512d __A = (A); \
787 (__m256d) \
788 __builtin_ia32_extractf64x4_mask((__v8df)__A, \
789 (I), \
790 (__v4df)_mm256_setzero_si256(), \
791 (__mmask8) -1); })
792
793#define _mm512_extractf32x4_ps(A, I) __extension__ ({ \
794 __m512 __A = (A); \
795 (__m128) \
796 __builtin_ia32_extractf32x4_mask((__v16sf)__A, \
797 (I), \
798 (__v4sf)_mm_setzero_ps(), \
799 (__mmask8) -1); })
Stephen Hinesee4ca282014-12-02 17:05:12 -0800800
801/* Vector Blend */
802
803static __inline __m512d __attribute__ ((__always_inline__, __nodebug__))
804_mm512_mask_blend_pd(__mmask8 __U, __m512d __A, __m512d __W)
805{
806 return (__m512d) __builtin_ia32_blendmpd_512_mask ((__v8df) __A,
807 (__v8df) __W,
808 (__mmask8) __U);
809}
810
811static __inline __m512 __attribute__ ((__always_inline__, __nodebug__))
812_mm512_mask_blend_ps(__mmask16 __U, __m512 __A, __m512 __W)
813{
814 return (__m512) __builtin_ia32_blendmps_512_mask ((__v16sf) __A,
815 (__v16sf) __W,
816 (__mmask16) __U);
817}
818
819static __inline __m512i __attribute__ ((__always_inline__, __nodebug__))
820_mm512_mask_blend_epi64(__mmask8 __U, __m512i __A, __m512i __W)
821{
822 return (__m512i) __builtin_ia32_blendmq_512_mask ((__v8di) __A,
823 (__v8di) __W,
824 (__mmask8) __U);
825}
826
827static __inline __m512i __attribute__ ((__always_inline__, __nodebug__))
828_mm512_mask_blend_epi32(__mmask16 __U, __m512i __A, __m512i __W)
829{
830 return (__m512i) __builtin_ia32_blendmd_512_mask ((__v16si) __A,
831 (__v16si) __W,
832 (__mmask16) __U);
833}
834
835/* Compare */
836
Stephen Hinesb4d9c8b2015-03-30 16:04:04 -0700837#define _mm512_cmp_round_ps_mask(A, B, P, R) __extension__ ({ \
838 (__mmask16)__builtin_ia32_cmpps512_mask((__v16sf)(__m512)(A), \
839 (__v16sf)(__m512)(B), \
840 (P), (__mmask16)-1, (R)); })
Stephen Hinesee4ca282014-12-02 17:05:12 -0800841
Stephen Hinesb4d9c8b2015-03-30 16:04:04 -0700842#define _mm512_mask_cmp_round_ps_mask(U, A, B, P, R) __extension__ ({ \
843 (__mmask16)__builtin_ia32_cmpps512_mask((__v16sf)(__m512)(A), \
844 (__v16sf)(__m512)(B), \
845 (P), (__mmask16)(U), (R)); })
846
847#define _mm512_cmp_ps_mask(A, B, P) \
848 _mm512_cmp_round_ps_mask((A), (B), (P), _MM_FROUND_CUR_DIRECTION)
849
850#define _mm512_mask_cmp_ps_mask(U, A, B, P) \
851 _mm512_mask_cmp_round_ps_mask((U), (A), (B), (P), _MM_FROUND_CUR_DIRECTION)
852
853#define _mm512_cmp_round_pd_mask(A, B, P, R) __extension__ ({ \
854 (__mmask8)__builtin_ia32_cmppd512_mask((__v8df)(__m512d)(A), \
855 (__v8df)(__m512d)(B), \
856 (P), (__mmask8)-1, (R)); })
857
858#define _mm512_mask_cmp_round_pd_mask(U, A, B, P, R) __extension__ ({ \
859 (__mmask8)__builtin_ia32_cmppd512_mask((__v8df)(__m512d)(A), \
860 (__v8df)(__m512d)(B), \
861 (P), (__mmask8)(U), (R)); })
862
863#define _mm512_cmp_pd_mask(A, B, P) \
864 _mm512_cmp_round_pd_mask((A), (B), (P), _MM_FROUND_CUR_DIRECTION)
865
866#define _mm512_mask_cmp_pd_mask(U, A, B, P) \
867 _mm512_mask_cmp_round_pd_mask((U), (A), (B), (P), _MM_FROUND_CUR_DIRECTION)
Stephen Hinesee4ca282014-12-02 17:05:12 -0800868
869/* Conversion */
870
871static __inline __m512i __attribute__ ((__always_inline__, __nodebug__))
872_mm512_cvttps_epu32(__m512 __A)
873{
874 return (__m512i) __builtin_ia32_cvttps2udq512_mask ((__v16sf) __A,
875 (__v16si)
876 _mm512_setzero_si512 (),
877 (__mmask16) -1,
878 _MM_FROUND_CUR_DIRECTION);
879}
880
Stephen Hinesb4d9c8b2015-03-30 16:04:04 -0700881#define _mm512_cvt_roundepi32_ps(A, R) __extension__ ({ \
882 (__m512)__builtin_ia32_cvtdq2ps512_mask((__v16si)(A), \
883 (__v16sf)_mm512_setzero_ps(), \
884 (__mmask16)-1, (R)); })
Stephen Hinesee4ca282014-12-02 17:05:12 -0800885
Stephen Hinesb4d9c8b2015-03-30 16:04:04 -0700886#define _mm512_cvt_roundepu32_ps(A, R) __extension__ ({ \
887 (__m512)__builtin_ia32_cvtudq2ps512_mask((__v16si)(A), \
888 (__v16sf)_mm512_setzero_ps(), \
889 (__mmask16)-1, (R)); })
Stephen Hinesee4ca282014-12-02 17:05:12 -0800890
891static __inline __m512d __attribute__ (( __always_inline__, __nodebug__))
892_mm512_cvtepi32_pd(__m256i __A)
893{
894 return (__m512d) __builtin_ia32_cvtdq2pd512_mask ((__v8si) __A,
895 (__v8df)
896 _mm512_setzero_pd (),
897 (__mmask8) -1);
898}
899
900static __inline __m512d __attribute__ (( __always_inline__, __nodebug__))
901_mm512_cvtepu32_pd(__m256i __A)
902{
903 return (__m512d) __builtin_ia32_cvtudq2pd512_mask ((__v8si) __A,
904 (__v8df)
905 _mm512_setzero_pd (),
906 (__mmask8) -1);
907}
Stephen Hinesee4ca282014-12-02 17:05:12 -0800908
Stephen Hinesb4d9c8b2015-03-30 16:04:04 -0700909#define _mm512_cvt_roundpd_ps(A, R) __extension__ ({ \
910 (__m256)__builtin_ia32_cvtpd2ps512_mask((__v8df)(A), \
911 (__v8sf)_mm256_setzero_ps(), \
912 (__mmask8)-1, (R)); })
913
914#define _mm512_cvtps_ph(A, I) __extension__ ({ \
915 (__m256i)__builtin_ia32_vcvtps2ph512_mask((__v16sf)(A), (I), \
916 (__v16hi)_mm256_setzero_si256(), \
917 -1); })
Stephen Hinesee4ca282014-12-02 17:05:12 -0800918
919static __inline __m512 __attribute__ ((__always_inline__, __nodebug__))
920_mm512_cvtph_ps(__m256i __A)
921{
922 return (__m512) __builtin_ia32_vcvtph2ps512_mask ((__v16hi) __A,
923 (__v16sf)
924 _mm512_setzero_ps (),
925 (__mmask16) -1,
926 _MM_FROUND_CUR_DIRECTION);
927}
928
929static __inline __m512i __attribute__((__always_inline__, __nodebug__))
930_mm512_cvttps_epi32(__m512 a)
931{
932 return (__m512i)
933 __builtin_ia32_cvttps2dq512_mask((__v16sf) a,
934 (__v16si) _mm512_setzero_si512 (),
935 (__mmask16) -1, _MM_FROUND_CUR_DIRECTION);
936}
937
938static __inline __m256i __attribute__((__always_inline__, __nodebug__))
939_mm512_cvttpd_epi32(__m512d a)
940{
941 return (__m256i)__builtin_ia32_cvttpd2dq512_mask((__v8df) a,
942 (__v8si)_mm256_setzero_si256(),
943 (__mmask8) -1,
944 _MM_FROUND_CUR_DIRECTION);
945}
946
Stephen Hinesb4d9c8b2015-03-30 16:04:04 -0700947#define _mm512_cvtt_roundpd_epi32(A, R) __extension__ ({ \
948 (__m256i)__builtin_ia32_cvttpd2dq512_mask((__v8df)(A), \
949 (__v8si)_mm256_setzero_si256(), \
950 (__mmask8)-1, (R)); })
951
952#define _mm512_cvtt_roundps_epi32(A, R) __extension__ ({ \
953 (__m512i)__builtin_ia32_cvttps2dq512_mask((__v16sf)(A), \
954 (__v16si)_mm512_setzero_si512(), \
955 (__mmask16)-1, (R)); })
956
957#define _mm512_cvt_roundps_epi32(A, R) __extension__ ({ \
958 (__m512i)__builtin_ia32_cvtps2dq512_mask((__v16sf)(A), \
959 (__v16si)_mm512_setzero_si512(), \
960 (__mmask16)-1, (R)); })
961
962#define _mm512_cvt_roundpd_epi32(A, R) __extension__ ({ \
963 (__m256i)__builtin_ia32_cvtpd2dq512_mask((__v8df)(A), \
964 (__v8si)_mm256_setzero_si256(), \
965 (__mmask8)-1, (R)); })
966
967#define _mm512_cvt_roundps_epu32(A, R) __extension__ ({ \
968 (__m512i)__builtin_ia32_cvtps2udq512_mask((__v16sf)(A), \
969 (__v16si)_mm512_setzero_si512(), \
970 (__mmask16)-1, (R)); })
971
972#define _mm512_cvt_roundpd_epu32(A, R) __extension__ ({ \
973 (__m256i)__builtin_ia32_cvtpd2udq512_mask((__v8df)(A), \
974 (__v8si)_mm256_setzero_si256(), \
975 (__mmask8) -1, (R)); })
976
977/* Unpack and Interleave */
978static __inline __m512d __attribute__((__always_inline__, __nodebug__))
979_mm512_unpackhi_pd(__m512d __a, __m512d __b)
Stephen Hinesee4ca282014-12-02 17:05:12 -0800980{
Stephen Hinesb4d9c8b2015-03-30 16:04:04 -0700981 return __builtin_shufflevector(__a, __b, 1, 9, 1+2, 9+2, 1+4, 9+4, 1+6, 9+6);
Stephen Hinesee4ca282014-12-02 17:05:12 -0800982}
983
Stephen Hinesb4d9c8b2015-03-30 16:04:04 -0700984static __inline __m512d __attribute__((__always_inline__, __nodebug__))
985_mm512_unpacklo_pd(__m512d __a, __m512d __b)
Stephen Hinesee4ca282014-12-02 17:05:12 -0800986{
Stephen Hinesb4d9c8b2015-03-30 16:04:04 -0700987 return __builtin_shufflevector(__a, __b, 0, 8, 0+2, 8+2, 0+4, 8+4, 0+6, 8+6);
Stephen Hinesee4ca282014-12-02 17:05:12 -0800988}
Stephen Hinesb4d9c8b2015-03-30 16:04:04 -0700989
990static __inline __m512 __attribute__((__always_inline__, __nodebug__))
991_mm512_unpackhi_ps(__m512 __a, __m512 __b)
Stephen Hinesee4ca282014-12-02 17:05:12 -0800992{
Stephen Hinesb4d9c8b2015-03-30 16:04:04 -0700993 return __builtin_shufflevector(__a, __b,
994 2, 18, 3, 19,
995 2+4, 18+4, 3+4, 19+4,
996 2+8, 18+8, 3+8, 19+8,
997 2+12, 18+12, 3+12, 19+12);
Stephen Hinesee4ca282014-12-02 17:05:12 -0800998}
Stephen Hinesb4d9c8b2015-03-30 16:04:04 -0700999
1000static __inline __m512 __attribute__((__always_inline__, __nodebug__))
1001_mm512_unpacklo_ps(__m512 __a, __m512 __b)
Stephen Hinesee4ca282014-12-02 17:05:12 -08001002{
Stephen Hinesb4d9c8b2015-03-30 16:04:04 -07001003 return __builtin_shufflevector(__a, __b,
1004 0, 16, 1, 17,
1005 0+4, 16+4, 1+4, 17+4,
1006 0+8, 16+8, 1+8, 17+8,
1007 0+12, 16+12, 1+12, 17+12);
Stephen Hinesee4ca282014-12-02 17:05:12 -08001008}
1009
1010/* Bit Test */
1011
1012static __inline __mmask16 __attribute__ ((__always_inline__, __nodebug__))
1013_mm512_test_epi32_mask(__m512i __A, __m512i __B)
1014{
1015 return (__mmask16) __builtin_ia32_ptestmd512 ((__v16si) __A,
1016 (__v16si) __B,
1017 (__mmask16) -1);
1018}
1019
1020static __inline __mmask8 __attribute__ ((__always_inline__, __nodebug__))
1021_mm512_test_epi64_mask(__m512i __A, __m512i __B)
1022{
1023 return (__mmask8) __builtin_ia32_ptestmq512 ((__v8di) __A,
1024 (__v8di) __B,
1025 (__mmask8) -1);
1026}
1027
1028/* SIMD load ops */
1029
1030static __inline __m512i __attribute__ ((__always_inline__, __nodebug__))
1031_mm512_maskz_loadu_epi32(__mmask16 __U, void const *__P)
1032{
1033 return (__m512i) __builtin_ia32_loaddqusi512_mask ((const __v16si *)__P,
1034 (__v16si)
1035 _mm512_setzero_si512 (),
1036 (__mmask16) __U);
1037}
1038
1039static __inline __m512i __attribute__ ((__always_inline__, __nodebug__))
1040_mm512_maskz_loadu_epi64(__mmask8 __U, void const *__P)
1041{
1042 return (__m512i) __builtin_ia32_loaddqudi512_mask ((const __v8di *)__P,
1043 (__v8di)
1044 _mm512_setzero_si512 (),
1045 (__mmask8) __U);
1046}
1047
1048static __inline __m512 __attribute__ ((__always_inline__, __nodebug__))
1049_mm512_maskz_loadu_ps(__mmask16 __U, void const *__P)
1050{
1051 return (__m512) __builtin_ia32_loadups512_mask ((const __v16sf *)__P,
1052 (__v16sf)
1053 _mm512_setzero_ps (),
1054 (__mmask16) __U);
1055}
1056
1057static __inline __m512d __attribute__ ((__always_inline__, __nodebug__))
1058_mm512_maskz_loadu_pd(__mmask8 __U, void const *__P)
1059{
1060 return (__m512d) __builtin_ia32_loadupd512_mask ((const __v8df *)__P,
1061 (__v8df)
1062 _mm512_setzero_pd (),
1063 (__mmask8) __U);
1064}
1065
Stephen Hinesb4d9c8b2015-03-30 16:04:04 -07001066static __inline __m512 __attribute__ ((__always_inline__, __nodebug__))
1067_mm512_maskz_load_ps(__mmask16 __U, void const *__P)
1068{
1069 return (__m512) __builtin_ia32_loadaps512_mask ((const __v16sf *)__P,
1070 (__v16sf)
1071 _mm512_setzero_ps (),
1072 (__mmask16) __U);
1073}
1074
1075static __inline __m512d __attribute__ ((__always_inline__, __nodebug__))
1076_mm512_maskz_load_pd(__mmask8 __U, void const *__P)
1077{
1078 return (__m512d) __builtin_ia32_loadapd512_mask ((const __v8df *)__P,
1079 (__v8df)
1080 _mm512_setzero_pd (),
1081 (__mmask8) __U);
1082}
1083
Stephen Hinesee4ca282014-12-02 17:05:12 -08001084static __inline __m512d __attribute__((__always_inline__, __nodebug__))
1085_mm512_loadu_pd(double const *__p)
1086{
1087 struct __loadu_pd {
1088 __m512d __v;
Stephen Hinesb4d9c8b2015-03-30 16:04:04 -07001089 } __attribute__((__packed__, __may_alias__));
Stephen Hinesee4ca282014-12-02 17:05:12 -08001090 return ((struct __loadu_pd*)__p)->__v;
1091}
1092
1093static __inline __m512 __attribute__((__always_inline__, __nodebug__))
1094_mm512_loadu_ps(float const *__p)
1095{
1096 struct __loadu_ps {
1097 __m512 __v;
Stephen Hinesb4d9c8b2015-03-30 16:04:04 -07001098 } __attribute__((__packed__, __may_alias__));
Stephen Hinesee4ca282014-12-02 17:05:12 -08001099 return ((struct __loadu_ps*)__p)->__v;
1100}
1101
Stephen Hinesb4d9c8b2015-03-30 16:04:04 -07001102static __inline __m512 __attribute__((__always_inline__, __nodebug__))
1103_mm512_load_ps(double const *__p)
1104{
1105 return (__m512) __builtin_ia32_loadaps512_mask ((const __v16sf *)__p,
1106 (__v16sf)
1107 _mm512_setzero_ps (),
1108 (__mmask16) -1);
1109}
1110
1111static __inline __m512d __attribute__((__always_inline__, __nodebug__))
1112_mm512_load_pd(float const *__p)
1113{
1114 return (__m512d) __builtin_ia32_loadapd512_mask ((const __v8df *)__p,
1115 (__v8df)
1116 _mm512_setzero_pd (),
1117 (__mmask8) -1);
1118}
1119
Stephen Hinesee4ca282014-12-02 17:05:12 -08001120/* SIMD store ops */
1121
1122static __inline void __attribute__ ((__always_inline__, __nodebug__))
1123_mm512_mask_storeu_epi64(void *__P, __mmask8 __U, __m512i __A)
1124{
1125 __builtin_ia32_storedqudi512_mask ((__v8di *)__P, (__v8di) __A,
1126 (__mmask8) __U);
1127}
1128
1129static __inline void __attribute__ ((__always_inline__, __nodebug__))
1130_mm512_mask_storeu_epi32(void *__P, __mmask16 __U, __m512i __A)
1131{
1132 __builtin_ia32_storedqusi512_mask ((__v16si *)__P, (__v16si) __A,
1133 (__mmask16) __U);
1134}
1135
1136static __inline void __attribute__ ((__always_inline__, __nodebug__))
1137_mm512_mask_storeu_pd(void *__P, __mmask8 __U, __m512d __A)
1138{
1139 __builtin_ia32_storeupd512_mask ((__v8df *)__P, (__v8df) __A, (__mmask8) __U);
1140}
1141
1142static __inline void __attribute__ ((__always_inline__, __nodebug__))
1143_mm512_storeu_pd(void *__P, __m512d __A)
1144{
1145 __builtin_ia32_storeupd512_mask((__v8df *)__P, (__v8df)__A, (__mmask8)-1);
1146}
1147
1148static __inline void __attribute__ ((__always_inline__, __nodebug__))
1149_mm512_mask_storeu_ps(void *__P, __mmask16 __U, __m512 __A)
1150{
1151 __builtin_ia32_storeups512_mask ((__v16sf *)__P, (__v16sf) __A,
1152 (__mmask16) __U);
1153}
1154
1155static __inline void __attribute__ ((__always_inline__, __nodebug__))
1156_mm512_storeu_ps(void *__P, __m512 __A)
1157{
1158 __builtin_ia32_storeups512_mask((__v16sf *)__P, (__v16sf)__A, (__mmask16)-1);
1159}
1160
1161static __inline void __attribute__ ((__always_inline__, __nodebug__))
Stephen Hinesb4d9c8b2015-03-30 16:04:04 -07001162_mm512_mask_store_pd(void *__P, __mmask8 __U, __m512d __A)
Stephen Hinesee4ca282014-12-02 17:05:12 -08001163{
Stephen Hinesb4d9c8b2015-03-30 16:04:04 -07001164 __builtin_ia32_storeapd512_mask ((__v8df *)__P, (__v8df) __A, (__mmask8) __U);
Stephen Hinesee4ca282014-12-02 17:05:12 -08001165}
1166
1167static __inline void __attribute__ ((__always_inline__, __nodebug__))
1168_mm512_store_pd(void *__P, __m512d __A)
1169{
1170 *(__m512d*)__P = __A;
1171}
1172
Stephen Hinesb4d9c8b2015-03-30 16:04:04 -07001173static __inline void __attribute__ ((__always_inline__, __nodebug__))
1174_mm512_mask_store_ps(void *__P, __mmask16 __U, __m512 __A)
1175{
1176 __builtin_ia32_storeaps512_mask ((__v16sf *)__P, (__v16sf) __A,
1177 (__mmask16) __U);
1178}
1179
1180static __inline void __attribute__ ((__always_inline__, __nodebug__))
1181_mm512_store_ps(void *__P, __m512 __A)
1182{
1183 *(__m512*)__P = __A;
1184}
1185
Stephen Hinesee4ca282014-12-02 17:05:12 -08001186/* Mask ops */
1187
1188static __inline __mmask16 __attribute__ ((__always_inline__, __nodebug__))
1189_mm512_knot(__mmask16 __M)
1190{
1191 return __builtin_ia32_knothi(__M);
1192}
1193
1194/* Integer compare */
1195
1196static __inline__ __mmask16 __attribute__((__always_inline__, __nodebug__))
1197_mm512_cmpeq_epi32_mask(__m512i __a, __m512i __b) {
1198 return (__mmask16)__builtin_ia32_pcmpeqd512_mask((__v16si)__a, (__v16si)__b,
1199 (__mmask16)-1);
1200}
1201
1202static __inline__ __mmask16 __attribute__((__always_inline__, __nodebug__))
1203_mm512_mask_cmpeq_epi32_mask(__mmask16 __u, __m512i __a, __m512i __b) {
1204 return (__mmask16)__builtin_ia32_pcmpeqd512_mask((__v16si)__a, (__v16si)__b,
1205 __u);
1206}
1207
Stephen Hinesb4d9c8b2015-03-30 16:04:04 -07001208static __inline__ __mmask16 __attribute__((__always_inline__, __nodebug__))
1209_mm512_cmpeq_epu32_mask(__m512i __a, __m512i __b) {
1210 return (__mmask16)__builtin_ia32_ucmpd512_mask((__v16si)__a, (__v16si)__b, 0,
1211 (__mmask16)-1);
1212}
1213
1214static __inline__ __mmask16 __attribute__((__always_inline__, __nodebug__))
1215_mm512_mask_cmpeq_epu32_mask(__mmask16 __u, __m512i __a, __m512i __b) {
1216 return (__mmask16)__builtin_ia32_ucmpd512_mask((__v16si)__a, (__v16si)__b, 0,
1217 __u);
1218}
1219
Stephen Hinesee4ca282014-12-02 17:05:12 -08001220static __inline__ __mmask8 __attribute__((__always_inline__, __nodebug__))
1221_mm512_mask_cmpeq_epi64_mask(__mmask8 __u, __m512i __a, __m512i __b) {
1222 return (__mmask8)__builtin_ia32_pcmpeqq512_mask((__v8di)__a, (__v8di)__b,
1223 __u);
1224}
1225
1226static __inline__ __mmask8 __attribute__((__always_inline__, __nodebug__))
1227_mm512_cmpeq_epi64_mask(__m512i __a, __m512i __b) {
1228 return (__mmask8)__builtin_ia32_pcmpeqq512_mask((__v8di)__a, (__v8di)__b,
1229 (__mmask8)-1);
1230}
1231
Stephen Hinesb4d9c8b2015-03-30 16:04:04 -07001232static __inline__ __mmask8 __attribute__((__always_inline__, __nodebug__))
1233_mm512_cmpeq_epu64_mask(__m512i __a, __m512i __b) {
1234 return (__mmask8)__builtin_ia32_ucmpq512_mask((__v8di)__a, (__v8di)__b, 0,
1235 (__mmask8)-1);
1236}
1237
1238static __inline__ __mmask8 __attribute__((__always_inline__, __nodebug__))
1239_mm512_mask_cmpeq_epu64_mask(__mmask8 __u, __m512i __a, __m512i __b) {
1240 return (__mmask8)__builtin_ia32_ucmpq512_mask((__v8di)__a, (__v8di)__b, 0,
1241 __u);
1242}
1243
1244static __inline__ __mmask16 __attribute__((__always_inline__, __nodebug__))
1245_mm512_cmpge_epi32_mask(__m512i __a, __m512i __b) {
1246 return (__mmask16)__builtin_ia32_cmpd512_mask((__v16si)__a, (__v16si)__b, 5,
1247 (__mmask16)-1);
1248}
1249
1250static __inline__ __mmask16 __attribute__((__always_inline__, __nodebug__))
1251_mm512_mask_cmpge_epi32_mask(__mmask16 __u, __m512i __a, __m512i __b) {
1252 return (__mmask16)__builtin_ia32_cmpd512_mask((__v16si)__a, (__v16si)__b, 5,
1253 __u);
1254}
1255
1256static __inline__ __mmask16 __attribute__((__always_inline__, __nodebug__))
1257_mm512_cmpge_epu32_mask(__m512i __a, __m512i __b) {
1258 return (__mmask16)__builtin_ia32_ucmpd512_mask((__v16si)__a, (__v16si)__b, 5,
1259 (__mmask16)-1);
1260}
1261
1262static __inline__ __mmask16 __attribute__((__always_inline__, __nodebug__))
1263_mm512_mask_cmpge_epu32_mask(__mmask16 __u, __m512i __a, __m512i __b) {
1264 return (__mmask16)__builtin_ia32_ucmpd512_mask((__v16si)__a, (__v16si)__b, 5,
1265 __u);
1266}
1267
1268static __inline__ __mmask8 __attribute__((__always_inline__, __nodebug__))
1269_mm512_cmpge_epi64_mask(__m512i __a, __m512i __b) {
1270 return (__mmask8)__builtin_ia32_cmpq512_mask((__v8di)__a, (__v8di)__b, 5,
1271 (__mmask8)-1);
1272}
1273
1274static __inline__ __mmask8 __attribute__((__always_inline__, __nodebug__))
1275_mm512_mask_cmpge_epi64_mask(__mmask8 __u, __m512i __a, __m512i __b) {
1276 return (__mmask8)__builtin_ia32_cmpq512_mask((__v8di)__a, (__v8di)__b, 5,
1277 __u);
1278}
1279
1280static __inline__ __mmask8 __attribute__((__always_inline__, __nodebug__))
1281_mm512_cmpge_epu64_mask(__m512i __a, __m512i __b) {
1282 return (__mmask8)__builtin_ia32_ucmpq512_mask((__v8di)__a, (__v8di)__b, 5,
1283 (__mmask8)-1);
1284}
1285
1286static __inline__ __mmask8 __attribute__((__always_inline__, __nodebug__))
1287_mm512_mask_cmpge_epu64_mask(__mmask8 __u, __m512i __a, __m512i __b) {
1288 return (__mmask8)__builtin_ia32_ucmpq512_mask((__v8di)__a, (__v8di)__b, 5,
1289 __u);
1290}
1291
1292static __inline__ __mmask16 __attribute__((__always_inline__, __nodebug__))
1293_mm512_cmpgt_epi32_mask(__m512i __a, __m512i __b) {
1294 return (__mmask16)__builtin_ia32_pcmpgtd512_mask((__v16si)__a, (__v16si)__b,
1295 (__mmask16)-1);
1296}
1297
1298static __inline__ __mmask16 __attribute__((__always_inline__, __nodebug__))
1299_mm512_mask_cmpgt_epi32_mask(__mmask16 __u, __m512i __a, __m512i __b) {
1300 return (__mmask16)__builtin_ia32_pcmpgtd512_mask((__v16si)__a, (__v16si)__b,
1301 __u);
1302}
1303
1304static __inline__ __mmask16 __attribute__((__always_inline__, __nodebug__))
1305_mm512_cmpgt_epu32_mask(__m512i __a, __m512i __b) {
1306 return (__mmask16)__builtin_ia32_ucmpd512_mask((__v16si)__a, (__v16si)__b, 6,
1307 (__mmask16)-1);
1308}
1309
1310static __inline__ __mmask16 __attribute__((__always_inline__, __nodebug__))
1311_mm512_mask_cmpgt_epu32_mask(__mmask16 __u, __m512i __a, __m512i __b) {
1312 return (__mmask16)__builtin_ia32_ucmpd512_mask((__v16si)__a, (__v16si)__b, 6,
1313 __u);
1314}
1315
1316static __inline__ __mmask8 __attribute__((__always_inline__, __nodebug__))
1317_mm512_mask_cmpgt_epi64_mask(__mmask8 __u, __m512i __a, __m512i __b) {
1318 return (__mmask8)__builtin_ia32_pcmpgtq512_mask((__v8di)__a, (__v8di)__b,
1319 __u);
1320}
1321
1322static __inline__ __mmask8 __attribute__((__always_inline__, __nodebug__))
1323_mm512_cmpgt_epi64_mask(__m512i __a, __m512i __b) {
1324 return (__mmask8)__builtin_ia32_pcmpgtq512_mask((__v8di)__a, (__v8di)__b,
1325 (__mmask8)-1);
1326}
1327
1328static __inline__ __mmask8 __attribute__((__always_inline__, __nodebug__))
1329_mm512_cmpgt_epu64_mask(__m512i __a, __m512i __b) {
1330 return (__mmask8)__builtin_ia32_ucmpq512_mask((__v8di)__a, (__v8di)__b, 6,
1331 (__mmask8)-1);
1332}
1333
1334static __inline__ __mmask8 __attribute__((__always_inline__, __nodebug__))
1335_mm512_mask_cmpgt_epu64_mask(__mmask8 __u, __m512i __a, __m512i __b) {
1336 return (__mmask8)__builtin_ia32_ucmpq512_mask((__v8di)__a, (__v8di)__b, 6,
1337 __u);
1338}
1339
1340static __inline__ __mmask16 __attribute__((__always_inline__, __nodebug__))
1341_mm512_cmple_epi32_mask(__m512i __a, __m512i __b) {
1342 return (__mmask16)__builtin_ia32_cmpd512_mask((__v16si)__a, (__v16si)__b, 2,
1343 (__mmask16)-1);
1344}
1345
1346static __inline__ __mmask16 __attribute__((__always_inline__, __nodebug__))
1347_mm512_mask_cmple_epi32_mask(__mmask16 __u, __m512i __a, __m512i __b) {
1348 return (__mmask16)__builtin_ia32_cmpd512_mask((__v16si)__a, (__v16si)__b, 2,
1349 __u);
1350}
1351
1352static __inline__ __mmask16 __attribute__((__always_inline__, __nodebug__))
1353_mm512_cmple_epu32_mask(__m512i __a, __m512i __b) {
1354 return (__mmask16)__builtin_ia32_ucmpd512_mask((__v16si)__a, (__v16si)__b, 2,
1355 (__mmask16)-1);
1356}
1357
1358static __inline__ __mmask16 __attribute__((__always_inline__, __nodebug__))
1359_mm512_mask_cmple_epu32_mask(__mmask16 __u, __m512i __a, __m512i __b) {
1360 return (__mmask16)__builtin_ia32_ucmpd512_mask((__v16si)__a, (__v16si)__b, 2,
1361 __u);
1362}
1363
1364static __inline__ __mmask8 __attribute__((__always_inline__, __nodebug__))
1365_mm512_cmple_epi64_mask(__m512i __a, __m512i __b) {
1366 return (__mmask8)__builtin_ia32_cmpq512_mask((__v8di)__a, (__v8di)__b, 2,
1367 (__mmask8)-1);
1368}
1369
1370static __inline__ __mmask8 __attribute__((__always_inline__, __nodebug__))
1371_mm512_mask_cmple_epi64_mask(__mmask8 __u, __m512i __a, __m512i __b) {
1372 return (__mmask8)__builtin_ia32_cmpq512_mask((__v8di)__a, (__v8di)__b, 2,
1373 __u);
1374}
1375
1376static __inline__ __mmask8 __attribute__((__always_inline__, __nodebug__))
1377_mm512_cmple_epu64_mask(__m512i __a, __m512i __b) {
1378 return (__mmask8)__builtin_ia32_ucmpq512_mask((__v8di)__a, (__v8di)__b, 2,
1379 (__mmask8)-1);
1380}
1381
1382static __inline__ __mmask8 __attribute__((__always_inline__, __nodebug__))
1383_mm512_mask_cmple_epu64_mask(__mmask8 __u, __m512i __a, __m512i __b) {
1384 return (__mmask8)__builtin_ia32_ucmpq512_mask((__v8di)__a, (__v8di)__b, 2,
1385 __u);
1386}
1387
1388static __inline__ __mmask16 __attribute__((__always_inline__, __nodebug__))
1389_mm512_cmplt_epi32_mask(__m512i __a, __m512i __b) {
1390 return (__mmask16)__builtin_ia32_cmpd512_mask((__v16si)__a, (__v16si)__b, 1,
1391 (__mmask16)-1);
1392}
1393
1394static __inline__ __mmask16 __attribute__((__always_inline__, __nodebug__))
1395_mm512_mask_cmplt_epi32_mask(__mmask16 __u, __m512i __a, __m512i __b) {
1396 return (__mmask16)__builtin_ia32_cmpd512_mask((__v16si)__a, (__v16si)__b, 1,
1397 __u);
1398}
1399
1400static __inline__ __mmask16 __attribute__((__always_inline__, __nodebug__))
1401_mm512_cmplt_epu32_mask(__m512i __a, __m512i __b) {
1402 return (__mmask16)__builtin_ia32_ucmpd512_mask((__v16si)__a, (__v16si)__b, 1,
1403 (__mmask16)-1);
1404}
1405
1406static __inline__ __mmask16 __attribute__((__always_inline__, __nodebug__))
1407_mm512_mask_cmplt_epu32_mask(__mmask16 __u, __m512i __a, __m512i __b) {
1408 return (__mmask16)__builtin_ia32_ucmpd512_mask((__v16si)__a, (__v16si)__b, 1,
1409 __u);
1410}
1411
1412static __inline__ __mmask8 __attribute__((__always_inline__, __nodebug__))
1413_mm512_cmplt_epi64_mask(__m512i __a, __m512i __b) {
1414 return (__mmask8)__builtin_ia32_cmpq512_mask((__v8di)__a, (__v8di)__b, 1,
1415 (__mmask8)-1);
1416}
1417
1418static __inline__ __mmask8 __attribute__((__always_inline__, __nodebug__))
1419_mm512_mask_cmplt_epi64_mask(__mmask8 __u, __m512i __a, __m512i __b) {
1420 return (__mmask8)__builtin_ia32_cmpq512_mask((__v8di)__a, (__v8di)__b, 1,
1421 __u);
1422}
1423
1424static __inline__ __mmask8 __attribute__((__always_inline__, __nodebug__))
1425_mm512_cmplt_epu64_mask(__m512i __a, __m512i __b) {
1426 return (__mmask8)__builtin_ia32_ucmpq512_mask((__v8di)__a, (__v8di)__b, 1,
1427 (__mmask8)-1);
1428}
1429
1430static __inline__ __mmask8 __attribute__((__always_inline__, __nodebug__))
1431_mm512_mask_cmplt_epu64_mask(__mmask8 __u, __m512i __a, __m512i __b) {
1432 return (__mmask8)__builtin_ia32_ucmpq512_mask((__v8di)__a, (__v8di)__b, 1,
1433 __u);
1434}
1435
1436static __inline__ __mmask16 __attribute__((__always_inline__, __nodebug__))
1437_mm512_cmpneq_epi32_mask(__m512i __a, __m512i __b) {
1438 return (__mmask16)__builtin_ia32_cmpd512_mask((__v16si)__a, (__v16si)__b, 4,
1439 (__mmask16)-1);
1440}
1441
1442static __inline__ __mmask16 __attribute__((__always_inline__, __nodebug__))
1443_mm512_mask_cmpneq_epi32_mask(__mmask16 __u, __m512i __a, __m512i __b) {
1444 return (__mmask16)__builtin_ia32_cmpd512_mask((__v16si)__a, (__v16si)__b, 4,
1445 __u);
1446}
1447
1448static __inline__ __mmask16 __attribute__((__always_inline__, __nodebug__))
1449_mm512_cmpneq_epu32_mask(__m512i __a, __m512i __b) {
1450 return (__mmask16)__builtin_ia32_ucmpd512_mask((__v16si)__a, (__v16si)__b, 4,
1451 (__mmask16)-1);
1452}
1453
1454static __inline__ __mmask16 __attribute__((__always_inline__, __nodebug__))
1455_mm512_mask_cmpneq_epu32_mask(__mmask16 __u, __m512i __a, __m512i __b) {
1456 return (__mmask16)__builtin_ia32_ucmpd512_mask((__v16si)__a, (__v16si)__b, 4,
1457 __u);
1458}
1459
1460static __inline__ __mmask8 __attribute__((__always_inline__, __nodebug__))
1461_mm512_cmpneq_epi64_mask(__m512i __a, __m512i __b) {
1462 return (__mmask8)__builtin_ia32_cmpq512_mask((__v8di)__a, (__v8di)__b, 4,
1463 (__mmask8)-1);
1464}
1465
1466static __inline__ __mmask8 __attribute__((__always_inline__, __nodebug__))
1467_mm512_mask_cmpneq_epi64_mask(__mmask8 __u, __m512i __a, __m512i __b) {
1468 return (__mmask8)__builtin_ia32_cmpq512_mask((__v8di)__a, (__v8di)__b, 4,
1469 __u);
1470}
1471
1472static __inline__ __mmask8 __attribute__((__always_inline__, __nodebug__))
1473_mm512_cmpneq_epu64_mask(__m512i __a, __m512i __b) {
1474 return (__mmask8)__builtin_ia32_ucmpq512_mask((__v8di)__a, (__v8di)__b, 4,
1475 (__mmask8)-1);
1476}
1477
1478static __inline__ __mmask8 __attribute__((__always_inline__, __nodebug__))
1479_mm512_mask_cmpneq_epu64_mask(__mmask8 __u, __m512i __a, __m512i __b) {
1480 return (__mmask8)__builtin_ia32_ucmpq512_mask((__v8di)__a, (__v8di)__b, 4,
1481 __u);
1482}
1483
1484#define _mm512_cmp_epi32_mask(a, b, p) __extension__ ({ \
1485 __m512i __a = (a); \
1486 __m512i __b = (b); \
1487 (__mmask16)__builtin_ia32_cmpd512_mask((__v16si)__a, (__v16si)__b, (p), \
1488 (__mmask16)-1); })
1489
1490#define _mm512_cmp_epu32_mask(a, b, p) __extension__ ({ \
1491 __m512i __a = (a); \
1492 __m512i __b = (b); \
1493 (__mmask16)__builtin_ia32_ucmpd512_mask((__v16si)__a, (__v16si)__b, (p), \
1494 (__mmask16)-1); })
1495
1496#define _mm512_cmp_epi64_mask(a, b, p) __extension__ ({ \
1497 __m512i __a = (a); \
1498 __m512i __b = (b); \
1499 (__mmask8)__builtin_ia32_cmpq512_mask((__v8di)__a, (__v8di)__b, (p), \
1500 (__mmask8)-1); })
1501
1502#define _mm512_cmp_epu64_mask(a, b, p) __extension__ ({ \
1503 __m512i __a = (a); \
1504 __m512i __b = (b); \
1505 (__mmask8)__builtin_ia32_ucmpq512_mask((__v8di)__a, (__v8di)__b, (p), \
1506 (__mmask8)-1); })
1507
1508#define _mm512_mask_cmp_epi32_mask(m, a, b, p) __extension__ ({ \
1509 __m512i __a = (a); \
1510 __m512i __b = (b); \
1511 (__mmask16)__builtin_ia32_cmpd512_mask((__v16si)__a, (__v16si)__b, (p), \
1512 (__mmask16)(m)); })
1513
1514#define _mm512_mask_cmp_epu32_mask(m, a, b, p) __extension__ ({ \
1515 __m512i __a = (a); \
1516 __m512i __b = (b); \
1517 (__mmask16)__builtin_ia32_ucmpd512_mask((__v16si)__a, (__v16si)__b, (p), \
1518 (__mmask16)(m)); })
1519
1520#define _mm512_mask_cmp_epi64_mask(m, a, b, p) __extension__ ({ \
1521 __m512i __a = (a); \
1522 __m512i __b = (b); \
1523 (__mmask8)__builtin_ia32_cmpq512_mask((__v8di)__a, (__v8di)__b, (p), \
1524 (__mmask8)(m)); })
1525
1526#define _mm512_mask_cmp_epu64_mask(m, a, b, p) __extension__ ({ \
1527 __m512i __a = (a); \
1528 __m512i __b = (b); \
1529 (__mmask8)__builtin_ia32_ucmpq512_mask((__v8di)__a, (__v8di)__b, (p), \
1530 (__mmask8)(m)); })
Stephen Hinesee4ca282014-12-02 17:05:12 -08001531#endif // __AVX512FINTRIN_H