blob: 9f186f7a7895d6db13ccc83b307fa7c7eb55bad3 [file] [log] [blame]
Eric Christopherd1428bf2013-08-31 00:22:48 +00001/* ===-------- Intrin.h ---------------------------------------------------===
2 *
3 * Permission is hereby granted, free of charge, to any person obtaining a copy
4 * of this software and associated documentation files (the "Software"), to deal
5 * in the Software without restriction, including without limitation the rights
6 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 * copies of the Software, and to permit persons to whom the Software is
8 * furnished to do so, subject to the following conditions:
9 *
10 * The above copyright notice and this permission notice shall be included in
11 * all copies or substantial portions of the Software.
12 *
13 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19 * THE SOFTWARE.
20 *
21 *===-----------------------------------------------------------------------===
22 */
23
24/* Only include this if we're compiling for the windows platform. */
25#ifndef _MSC_VER
26#include_next <Intrin.h>
27#else
28
29#ifndef __INTRIN_H
30#define __INTRIN_H
31
32/* First include the standard intrinsics. */
33#include <x86intrin.h>
34
Eric Christopher0db88a72013-09-18 22:24:01 +000035#ifdef __cplusplus
36extern "C" {
37#endif
38
Eric Christopherd1428bf2013-08-31 00:22:48 +000039/* And the random ones that aren't in those files. */
40__m64 _m_from_float(float);
41__m64 _m_from_int(int _l);
42void _m_prefetch(void *);
43float _m_to_float(__m64);
44int _m_to_int(__m64 _M);
45
46/* Other assorted instruction intrinsics. */
47void __addfsbyte(unsigned long, unsigned char);
48void __addfsdword(unsigned long, unsigned long);
49void __addfsword(unsigned long, unsigned short);
50void __code_seg(const char *);
Hans Wennborg854f7d32014-01-16 23:39:35 +000051static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +000052void __cpuid(int[4], int);
53void __cpuidex(int[4], int, int);
Eric Christopher0db88a72013-09-18 22:24:01 +000054void __debugbreak(void);
Eric Christopherd1428bf2013-08-31 00:22:48 +000055__int64 __emul(int, int);
56unsigned __int64 __emulu(unsigned int, unsigned int);
Eric Christopher0db88a72013-09-18 22:24:01 +000057void __cdecl __fastfail(unsigned int);
Eric Christopherd1428bf2013-08-31 00:22:48 +000058unsigned int __getcallerseflags(void);
59void __halt(void);
60unsigned char __inbyte(unsigned short);
61void __inbytestring(unsigned short, unsigned char *, unsigned long);
62void __incfsbyte(unsigned long);
63void __incfsdword(unsigned long);
64void __incfsword(unsigned long);
65unsigned long __indword(unsigned short);
66void __indwordstring(unsigned short, unsigned long *, unsigned long);
67void __int2c(void);
68void __invlpg(void *);
69unsigned short __inword(unsigned short);
70void __inwordstring(unsigned short, unsigned short *, unsigned long);
71void __lidt(void *);
72unsigned __int64 __ll_lshift(unsigned __int64, int);
73__int64 __ll_rshift(__int64, int);
74void __llwpcb(void *);
75unsigned char __lwpins32(unsigned int, unsigned int, unsigned int);
76void __lwpval32(unsigned int, unsigned int, unsigned int);
77unsigned int __lzcnt(unsigned int);
78unsigned short __lzcnt16(unsigned short);
79void __movsb(unsigned char *, unsigned char const *, size_t);
80void __movsd(unsigned long *, unsigned long const *, size_t);
81void __movsw(unsigned short *, unsigned short const *, size_t);
82void __nop(void);
83void __nvreg_restore_fence(void);
84void __nvreg_save_fence(void);
85void __outbyte(unsigned short, unsigned char);
86void __outbytestring(unsigned short, unsigned char *, unsigned long);
87void __outdword(unsigned short, unsigned long);
88void __outdwordstring(unsigned short, unsigned long *, unsigned long);
89void __outword(unsigned short, unsigned short);
90void __outwordstring(unsigned short, unsigned short *, unsigned long);
Warren Huntd6ffae92013-09-27 23:57:26 +000091static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +000092unsigned int __popcnt(unsigned int);
Warren Huntd6ffae92013-09-27 23:57:26 +000093static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +000094unsigned short __popcnt16(unsigned short);
95unsigned __int64 __rdtsc(void);
96unsigned __int64 __rdtscp(unsigned int *);
97unsigned long __readcr0(void);
98unsigned long __readcr2(void);
99unsigned long __readcr3(void);
Eric Christopher439137e2014-01-24 12:13:47 +0000100unsigned long __readcr4(void);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000101unsigned long __readcr8(void);
102unsigned int __readdr(unsigned int);
103unsigned int __readeflags(void);
Hans Wennborg74ca0c42014-01-24 00:52:39 +0000104#ifdef __i386__
105static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000106unsigned char __readfsbyte(unsigned long);
Hans Wennborg74ca0c42014-01-24 00:52:39 +0000107static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000108unsigned long __readfsdword(unsigned long);
Hans Wennborg74ca0c42014-01-24 00:52:39 +0000109static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000110unsigned __int64 __readfsqword(unsigned long);
Hans Wennborg74ca0c42014-01-24 00:52:39 +0000111static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000112unsigned short __readfsword(unsigned long);
Hans Wennborg74ca0c42014-01-24 00:52:39 +0000113#endif
Eric Christopherd1428bf2013-08-31 00:22:48 +0000114unsigned __int64 __readmsr(unsigned long);
115unsigned __int64 __readpmc(unsigned long);
116unsigned long __segmentlimit(unsigned long);
117void __sidt(void *);
118void *__slwpcb(void);
119void __stosb(unsigned char *, unsigned char, size_t);
120void __stosd(unsigned long *, unsigned long, size_t);
121void __stosw(unsigned short *, unsigned short, size_t);
122void __svm_clgi(void);
123void __svm_invlpga(void *, int);
124void __svm_skinit(int);
125void __svm_stgi(void);
126void __svm_vmload(size_t);
127void __svm_vmrun(size_t);
128void __svm_vmsave(size_t);
129void __ud2(void);
130unsigned __int64 __ull_rshift(unsigned __int64, int);
131void __vmx_off(void);
132void __vmx_vmptrst(unsigned __int64 *);
133void __wbinvd(void);
134void __writecr0(unsigned int);
135void __writecr3(unsigned int);
136void __writecr4(unsigned int);
137void __writecr8(unsigned int);
138void __writedr(unsigned int, unsigned int);
139void __writeeflags(unsigned int);
140void __writefsbyte(unsigned long, unsigned char);
141void __writefsdword(unsigned long, unsigned long);
142void __writefsqword(unsigned long, unsigned __int64);
143void __writefsword(unsigned long, unsigned short);
144void __writemsr(unsigned long, unsigned __int64);
Warren Huntd6ffae92013-09-27 23:57:26 +0000145static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000146void *_AddressOfReturnAddress(void);
147unsigned int _andn_u32(unsigned int, unsigned int);
148unsigned int _bextr_u32(unsigned int, unsigned int, unsigned int);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000149unsigned int _bextri_u32(unsigned int, unsigned int);
Warren Huntd6ffae92013-09-27 23:57:26 +0000150static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000151unsigned char _BitScanForward(unsigned long *_Index, unsigned long _Mask);
Warren Huntd6ffae92013-09-27 23:57:26 +0000152static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000153unsigned char _BitScanReverse(unsigned long *_Index, unsigned long _Mask);
Warren Huntd6ffae92013-09-27 23:57:26 +0000154static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000155unsigned char _bittest(long const *, long);
Warren Huntd6ffae92013-09-27 23:57:26 +0000156static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000157unsigned char _bittestandcomplement(long *, long);
Warren Huntd6ffae92013-09-27 23:57:26 +0000158static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000159unsigned char _bittestandreset(long *, long);
Warren Huntd6ffae92013-09-27 23:57:26 +0000160static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000161unsigned char _bittestandset(long *, long);
162unsigned int _blcfill_u32(unsigned int);
163unsigned int _blci_u32(unsigned int);
164unsigned int _blcic_u32(unsigned int);
165unsigned int _blcmsk_u32(unsigned int);
166unsigned int _blcs_u32(unsigned int);
167unsigned int _blsfill_u32(unsigned int);
168unsigned int _blsi_u32(unsigned int);
169unsigned int _blsic_u32(unsigned int);
170unsigned int _blsmsk_u32(unsigned int);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000171unsigned int _blsr_u32(unsigned int);
172unsigned __int64 __cdecl _byteswap_uint64(unsigned __int64);
173unsigned long __cdecl _byteswap_ulong(unsigned long);
174unsigned short __cdecl _byteswap_ushort(unsigned short);
175unsigned _bzhi_u32(unsigned int, unsigned int);
176void __cdecl _disable(void);
177void __cdecl _enable(void);
178void __cdecl _fxrstor(void const *);
179void __cdecl _fxsave(void *);
180long _InterlockedAddLargeStatistic(__int64 volatile *_Addend, long _Value);
Warren Huntd6ffae92013-09-27 23:57:26 +0000181static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000182long _InterlockedAnd(long volatile *_Value, long _Mask);
Warren Huntd6ffae92013-09-27 23:57:26 +0000183static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000184short _InterlockedAnd16(short volatile *_Value, short _Mask);
Warren Huntd6ffae92013-09-27 23:57:26 +0000185static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000186char _InterlockedAnd8(char volatile *_Value, char _Mask);
187unsigned char _interlockedbittestandreset(long volatile *, long);
188unsigned char _interlockedbittestandset(long volatile *, long);
Warren Huntd6ffae92013-09-27 23:57:26 +0000189static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000190long __cdecl _InterlockedCompareExchange(long volatile *_Destination,
191 long _Exchange, long _Comparand);
192long _InterlockedCompareExchange_HLEAcquire(long volatile *, long, long);
193long _InterlockedCompareExchange_HLERelease(long volatile *, long, long);
Warren Huntd6ffae92013-09-27 23:57:26 +0000194static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000195short _InterlockedCompareExchange16(short volatile *_Destination,
196 short _Exchange, short _Comparand);
Warren Huntd6ffae92013-09-27 23:57:26 +0000197static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000198__int64 _InterlockedCompareExchange64(__int64 volatile *_Destination,
199 __int64 _Exchange, __int64 _Comparand);
200__int64 _InterlockedcompareExchange64_HLEAcquire(__int64 volatile *, __int64,
201 __int64);
202__int64 _InterlockedCompareExchange64_HLERelease(__int64 volatile *, __int64,
203 __int64);
Warren Huntd6ffae92013-09-27 23:57:26 +0000204static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000205char _InterlockedCompareExchange8(char volatile *_Destination, char _Exchange,
206 char _Comparand);
207void *_InterlockedCompareExchangePointer_HLEAcquire(void *volatile *, void *,
208 void *);
209void *_InterlockedCompareExchangePointer_HLERelease(void *volatile *, void *,
210 void *);
Warren Huntd6ffae92013-09-27 23:57:26 +0000211static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000212long __cdecl _InterlockedDecrement(long volatile *_Addend);
Warren Huntd6ffae92013-09-27 23:57:26 +0000213static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000214short _InterlockedDecrement16(short volatile *_Addend);
Warren Huntd6ffae92013-09-27 23:57:26 +0000215static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000216long __cdecl _InterlockedExchange(long volatile *_Target, long _Value);
Warren Huntd6ffae92013-09-27 23:57:26 +0000217static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000218short _InterlockedExchange16(short volatile *_Target, short _Value);
Warren Huntd6ffae92013-09-27 23:57:26 +0000219static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000220char _InterlockedExchange8(char volatile *_Target, char _Value);
Warren Huntd6ffae92013-09-27 23:57:26 +0000221static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000222long __cdecl _InterlockedExchangeAdd(long volatile *_Addend, long _Value);
223long _InterlockedExchangeAdd_HLEAcquire(long volatile *, long);
224long _InterlockedExchangeAdd_HLERelease(long volatile *, long);
Eric Christopher439137e2014-01-24 12:13:47 +0000225short _InterlockedExchangeAdd16(short volatile *_Addend, short _Value);
226__int64 _InterlockedExchangeAdd64_HLEAcquire(__int64 volatile *, __int64);
227__int64 _InterlockedExchangeAdd64_HLERelease(__int64 volatile *, __int64);
Warren Huntd6ffae92013-09-27 23:57:26 +0000228static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000229char _InterlockedExchangeAdd8(char volatile *_Addend, char _Value);
Warren Huntd6ffae92013-09-27 23:57:26 +0000230static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000231long __cdecl _InterlockedIncrement(long volatile *_Addend);
Warren Huntd6ffae92013-09-27 23:57:26 +0000232static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000233short _InterlockedIncrement16(short volatile *_Addend);
Warren Huntd6ffae92013-09-27 23:57:26 +0000234static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000235long _InterlockedOr(long volatile *_Value, long _Mask);
Warren Huntd6ffae92013-09-27 23:57:26 +0000236static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000237short _InterlockedOr16(short volatile *_Value, short _Mask);
Warren Huntd6ffae92013-09-27 23:57:26 +0000238static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000239char _InterlockedOr8(char volatile *_Value, char _Mask);
Warren Huntd6ffae92013-09-27 23:57:26 +0000240static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000241long _InterlockedXor(long volatile *_Value, long _Mask);
Warren Huntd6ffae92013-09-27 23:57:26 +0000242static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000243short _InterlockedXor16(short volatile *_Value, short _Mask);
Warren Huntd6ffae92013-09-27 23:57:26 +0000244static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000245char _InterlockedXor8(char volatile *_Value, char _Mask);
246void __cdecl _invpcid(unsigned int, void *);
Warren Huntd6ffae92013-09-27 23:57:26 +0000247static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000248unsigned long __cdecl _lrotl(unsigned long, int);
Warren Huntd6ffae92013-09-27 23:57:26 +0000249static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000250unsigned long __cdecl _lrotr(unsigned long, int);
Warren Huntd6ffae92013-09-27 23:57:26 +0000251static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000252unsigned int _lzcnt_u32(unsigned int);
Reid Kleckner00d33a52013-10-17 01:29:48 +0000253static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000254void _ReadBarrier(void);
Reid Kleckner00d33a52013-10-17 01:29:48 +0000255static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000256void _ReadWriteBarrier(void);
Warren Huntd6ffae92013-09-27 23:57:26 +0000257static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000258void *_ReturnAddress(void);
259unsigned int _rorx_u32(unsigned int, const unsigned int);
260int __cdecl _rdrand16_step(unsigned short *);
261int __cdecl _rdrand32_step(unsigned int *);
Warren Huntd6ffae92013-09-27 23:57:26 +0000262static __inline__
Eric Christopherfb4b4332013-08-31 00:27:38 +0000263unsigned int __cdecl _rotl(unsigned int _Value, int _Shift);
Warren Huntd6ffae92013-09-27 23:57:26 +0000264static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000265unsigned short _rotl16(unsigned short _Value, unsigned char _Shift);
Warren Huntd6ffae92013-09-27 23:57:26 +0000266static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000267unsigned __int64 __cdecl _rotl64(unsigned __int64 _Value, int _Shift);
Warren Huntd6ffae92013-09-27 23:57:26 +0000268static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000269unsigned char _rotl8(unsigned char _Value, unsigned char _Shift);
Warren Huntd6ffae92013-09-27 23:57:26 +0000270static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000271unsigned int __cdecl _rotr(unsigned int _Value, int _Shift);
Warren Huntd6ffae92013-09-27 23:57:26 +0000272static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000273unsigned short _rotr16(unsigned short _Value, unsigned char _Shift);
Warren Huntd6ffae92013-09-27 23:57:26 +0000274static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000275unsigned __int64 __cdecl _rotr64(unsigned __int64 _Value, int _Shift);
Warren Huntd6ffae92013-09-27 23:57:26 +0000276static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000277unsigned char _rotr8(unsigned char _Value, unsigned char _Shift);
278int _sarx_i32(int, unsigned int);
279
280/* FIXME: Need definition for jmp_buf.
281 int __cdecl _setjmp(jmp_buf); */
282
283unsigned int _shlx_u32(unsigned int, unsigned int);
284unsigned int _shrx_u32(unsigned int, unsigned int);
285void _Store_HLERelease(long volatile *, long);
286void _Store64_HLERelease(__int64 volatile *, __int64);
287void _StorePointer_HLERelease(void *volatile *, void *);
288unsigned int _t1mskc_u32(unsigned int);
289unsigned int _tzcnt_u32(unsigned int);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000290unsigned int _tzmsk_u32(unsigned int);
Reid Kleckner00d33a52013-10-17 01:29:48 +0000291static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000292void _WriteBarrier(void);
293void _xabort(const unsigned int imm);
294unsigned __int32 xbegin(void);
295void _xend(void);
Hans Wennborg854f7d32014-01-16 23:39:35 +0000296static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000297unsigned __int64 __cdecl _xgetbv(unsigned int);
298void __cdecl _xrstor(void const *, unsigned __int64);
299void __cdecl _xsave(void *, unsigned __int64);
300void __cdecl _xsaveopt(void *, unsigned __int64);
301void __cdecl _xsetbv(unsigned int, unsigned __int64);
302unsigned char _xtest(void);
303
304/* These additional intrinsics are turned on in x64/amd64/x86_64 mode. */
Warren Hunt3f987942013-09-30 21:08:05 +0000305#ifdef __x86_64__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000306void __addgsbyte(unsigned long, unsigned char);
307void __addgsdword(unsigned long, unsigned long);
308void __addgsqword(unsigned long, unsigned __int64);
309void __addgsword(unsigned long, unsigned short);
310void __faststorefence(void);
311void __incgsbyte(unsigned long);
312void __incgsdword(unsigned long);
313void __incgsqword(unsigned long);
314void __incgsword(unsigned long);
Eric Christopher439137e2014-01-24 12:13:47 +0000315unsigned char __lwpins64(unsigned __int64, unsigned int, unsigned int);
316void __lwpval64(unsigned __int64, unsigned int, unsigned int);
317unsigned __int64 __lzcnt64(unsigned __int64);
318void __movsq(unsigned long long *, unsigned long long const *, size_t);
319__int64 __mulh(__int64, __int64);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000320unsigned __int64 __popcnt64(unsigned __int64);
Eric Christopher439137e2014-01-24 12:13:47 +0000321unsigned char __readgsbyte(unsigned long);
322unsigned long __readgsdword(unsigned long);
323unsigned short __readgsword(unsigned long);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000324unsigned __int64 __shiftleft128(unsigned __int64 _LowPart,
325 unsigned __int64 _HighPart,
326 unsigned char _Shift);
327unsigned __int64 __shiftright128(unsigned __int64 _LowPart,
328 unsigned __int64 _HighPart,
329 unsigned char _Shift);
330void __stosq(unsigned __int64 *, unsigned __int64, size_t);
Eric Christopher439137e2014-01-24 12:13:47 +0000331unsigned __int64 __umulh(unsigned __int64, unsigned __int64);
332unsigned char __vmx_on(unsigned __int64 *);
333unsigned char __vmx_vmclear(unsigned __int64 *);
334unsigned char __vmx_vmlaunch(void);
335unsigned char __vmx_vmptrld(unsigned __int64 *);
336unsigned char __vmx_vmread(size_t, size_t *);
337unsigned char __vmx_vmresume(void);
338unsigned char __vmx_vmwrite(size_t, size_t);
339void __writegsbyte(unsigned long, unsigned char);
340void __writegsdword(unsigned long, unsigned long);
341void __writegsqword(unsigned long, unsigned __int64);
342void __writegsword(unsigned long, unsigned short);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000343unsigned __int64 _andn_u64(unsigned __int64, unsigned __int64);
344unsigned __int64 _bextr_u64(unsigned __int64, unsigned int, unsigned int);
345unsigned __int64 _bextri_u64(unsigned __int64, unsigned int);
Warren Huntd6ffae92013-09-27 23:57:26 +0000346static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000347unsigned char _BitScanForward64(unsigned long *_Index, unsigned __int64 _Mask);
Warren Huntd6ffae92013-09-27 23:57:26 +0000348static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000349unsigned char _BitScanReverse64(unsigned long *_Index, unsigned __int64 _Mask);
Warren Huntd6ffae92013-09-27 23:57:26 +0000350static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000351unsigned char _bittest64(__int64 const *, __int64);
Warren Huntd6ffae92013-09-27 23:57:26 +0000352static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000353unsigned char _bittestandcomplement64(__int64 *, __int64);
Warren Huntd6ffae92013-09-27 23:57:26 +0000354static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000355unsigned char _bittestandreset64(__int64 *, __int64);
Warren Huntd6ffae92013-09-27 23:57:26 +0000356static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000357unsigned char _bittestandset64(__int64 *, __int64);
358unsigned __int64 _blcfill_u64(unsigned __int64);
359unsigned __int64 _blci_u64(unsigned __int64);
360unsigned __int64 _blcic_u64(unsigned __int64);
361unsigned __int64 _blcmsk_u64(unsigned __int64);
362unsigned __int64 _blcs_u64(unsigned __int64);
363unsigned __int64 _blsfill_u64(unsigned __int64);
364unsigned __int64 _blsi_u64(unsigned __int64);
365unsigned __int64 _blsic_u64(unsigned __int64);
Eric Christopher439137e2014-01-24 12:13:47 +0000366unsigned __int64 _blsmsk_u64(unsigned __int64);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000367unsigned __int64 _blsr_u64(unsigned __int64);
368unsigned __int64 __cdecl _byteswap_uint64(unsigned __int64);
369unsigned __int64 _bzhi_u64(unsigned __int64, unsigned int);
370void __cdecl _fxrstor64(void const *);
371void __cdecl _fxsave64(void *);
372long _InterlockedAnd_np(long volatile *_Value, long _Mask);
373short _InterlockedAnd16_np(short volatile *_Value, short _Mask);
374__int64 _InterlockedAnd64_np(__int64 volatile *_Value, __int64 _Mask);
375char _InterlockedAnd8_np(char volatile *_Value, char _Mask);
376unsigned char _interlockedbittestandreset64(__int64 volatile *, __int64);
377unsigned char _interlockedbittestandset64(__int64 volatile *, __int64);
378long _InterlockedCompareExchange_np(long volatile *_Destination, long _Exchange,
379 long _Comparand);
380unsigned char _InterlockedCompareExchange128(__int64 volatile *_Destination,
381 __int64 _ExchangeHigh,
382 __int64 _ExchangeLow,
383 __int64 *_CompareandResult);
384unsigned char _InterlockedCompareExchange128_np(__int64 volatile *_Destination,
385 __int64 _ExchangeHigh,
386 __int64 _ExchangeLow,
387 __int64 *_ComparandResult);
388short _InterlockedCompareExchange16_np(short volatile *_Destination,
389 short _Exchange, short _Comparand);
Eric Christopher439137e2014-01-24 12:13:47 +0000390__int64 _InterlockedCompareExchange64_HLEAcquire(__int64 volatile *, __int64,
391 __int64);
392__int64 _InterlockedCompareExchange64_HLERelease(__int64 volatile *, __int64,
393 __int64);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000394__int64 _InterlockedCompareExchange64_np(__int64 volatile *_Destination,
395 __int64 _Exchange, __int64 _Comparand);
396void *_InterlockedCompareExchangePointer_np(void *volatile *_Destination,
397 void *_Exchange, void *_Comparand);
Eric Christopher439137e2014-01-24 12:13:47 +0000398__int64 _InterlockedDecrement64(__int64 volatile *_Addend);
399__int64 _InterlockedExchange64(__int64 volatile *_Target, __int64 _Value);
400__int64 _InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value);
401void *_InterlockedExchangePointer(void *volatile *_Target, void *_Value);
402__int64 _InterlockedIncrement64(__int64 volatile *_Addend);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000403long _InterlockedOr_np(long volatile *_Value, long _Mask);
404short _InterlockedOr16_np(short volatile *_Value, short _Mask);
Eric Christopher439137e2014-01-24 12:13:47 +0000405__int64 _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask);
Warren Hunt41a993f2013-09-28 00:15:41 +0000406__int64 _InterlockedOr64_np(__int64 volatile *_Value, __int64 _Mask);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000407char _InterlockedOr8_np(char volatile *_Value, char _Mask);
408long _InterlockedXor_np(long volatile *_Value, long _Mask);
409short _InterlockedXor16_np(short volatile *_Value, short _Mask);
Eric Christopher439137e2014-01-24 12:13:47 +0000410__int64 _InterlockedXor64(__int64 volatile *_Value, __int64 _Mask);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000411__int64 _InterlockedXor64_np(__int64 volatile *_Value, __int64 _Mask);
412char _InterlockedXor8_np(char volatile *_Value, char _Mask);
413unsigned __int64 _lzcnt_u64(unsigned __int64);
414__int64 _mul128(__int64 _Multiplier, __int64 _Multiplicand,
415 __int64 *_HighProduct);
416unsigned int __cdecl _readfsbase_u32(void);
417unsigned __int64 __cdecl _readfsbase_u64(void);
418unsigned int __cdecl _readgsbase_u32(void);
419unsigned __int64 __cdecl _readgsbase_u64(void);
420unsigned __int64 _rorx_u64(unsigned __int64, const unsigned int);
Eric Christopher439137e2014-01-24 12:13:47 +0000421__int64 _sarx_i64(__int64, unsigned int);
422int __cdecl _setjmpex(jmp_buf);
423unsigned __int64 _shlx_u64(unsigned __int64, unsigned int);
424unsigned __int64 shrx_u64(unsigned __int64, unsigned int);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000425unsigned __int64 _tzcnt_u64(unsigned __int64);
426unsigned __int64 _tzmsk_u64(unsigned __int64);
427unsigned __int64 _umul128(unsigned __int64 _Multiplier,
428 unsigned __int64 _Multiplicand,
429 unsigned __int64 *_HighProduct);
430void __cdecl _writefsbase_u32(unsigned int);
431void _cdecl _writefsbase_u64(unsigned __int64);
432void __cdecl _writegsbase_u32(unsigned int);
433void __cdecl _writegsbase_u64(unsigned __int64);
434void __cdecl _xrstor64(void const *, unsigned __int64);
435void __cdecl _xsave64(void *, unsigned __int64);
436void __cdecl _xsaveopt64(void *, unsigned __int64);
Eric Christopher0db88a72013-09-18 22:24:01 +0000437
Warren Hunt3f987942013-09-30 21:08:05 +0000438#endif /* __x86_64__ */
Reid Klecknerf0e23222013-09-19 00:19:53 +0000439
Warren Huntd6ffae92013-09-27 23:57:26 +0000440/*----------------------------------------------------------------------------*\
441|* Bit Twiddling
442\*----------------------------------------------------------------------------*/
443static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
444_rotl8(unsigned char _Value, unsigned char _Shift) {
445 _Shift &= 0x7;
446 return _Shift ? (_Value << _Shift) | (_Value >> (8 - _Shift)) : _Value;
447}
448static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
449_rotr8(unsigned char _Value, unsigned char _Shift) {
450 _Shift &= 0x7;
451 return _Shift ? (_Value >> _Shift) | (_Value << (8 - _Shift)) : _Value;
452}
453static __inline__ unsigned short __attribute__((__always_inline__, __nodebug__))
454_rotl16(unsigned short _Value, unsigned char _Shift) {
455 _Shift &= 0xf;
456 return _Shift ? (_Value << _Shift) | (_Value >> (16 - _Shift)) : _Value;
457}
458static __inline__ unsigned short __attribute__((__always_inline__, __nodebug__))
459_rotr16(unsigned short _Value, unsigned char _Shift) {
460 _Shift &= 0xf;
461 return _Shift ? (_Value >> _Shift) | (_Value << (16 - _Shift)) : _Value;
462}
463static __inline__ unsigned int __attribute__((__always_inline__, __nodebug__))
464_rotl(unsigned int _Value, int _Shift) {
465 _Shift &= 0x1f;
466 return _Shift ? (_Value << _Shift) | (_Value >> (32 - _Shift)) : _Value;
467}
468static __inline__ unsigned int __attribute__((__always_inline__, __nodebug__))
469_rotr(unsigned int _Value, int _Shift) {
470 _Shift &= 0x1f;
471 return _Shift ? (_Value >> _Shift) | (_Value << (32 - _Shift)) : _Value;
472}
473static __inline__ unsigned long __attribute__((__always_inline__, __nodebug__))
474_lrotl(unsigned long _Value, int _Shift) {
475 _Shift &= 0x1f;
476 return _Shift ? (_Value << _Shift) | (_Value >> (32 - _Shift)) : _Value;
477}
478static __inline__ unsigned long __attribute__((__always_inline__, __nodebug__))
479_lrotr(unsigned long _Value, int _Shift) {
480 _Shift &= 0x1f;
481 return _Shift ? (_Value >> _Shift) | (_Value << (32 - _Shift)) : _Value;
482}
483static
484__inline__ unsigned __int64 __attribute__((__always_inline__, __nodebug__))
485_rotl64(unsigned __int64 _Value, int _Shift) {
486 _Shift &= 0x3f;
487 return _Shift ? (_Value << _Shift) | (_Value >> (64 - _Shift)) : _Value;
488}
489static
490__inline__ unsigned __int64 __attribute__((__always_inline__, __nodebug__))
491_rotr64(unsigned __int64 _Value, int _Shift) {
492 _Shift &= 0x3f;
493 return _Shift ? (_Value >> _Shift) | (_Value << (64 - _Shift)) : _Value;
494}
495/*----------------------------------------------------------------------------*\
496|* Bit Counting and Testing
497\*----------------------------------------------------------------------------*/
498static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
499_BitScanForward(unsigned long *_Index, unsigned long _Mask) {
500 if (!_Mask)
501 return 0;
502 *_Index = __builtin_ctzl(_Mask);
503 return 1;
504}
505static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
506_BitScanReverse(unsigned long *_Index, unsigned long _Mask) {
507 if (!_Mask)
508 return 0;
509 *_Index = 31 - __builtin_clzl(_Mask);
510 return 1;
511}
512static __inline__ unsigned int __attribute__((__always_inline__, __nodebug__))
513_lzcnt_u32(unsigned int a) {
514 if (!a)
515 return 32;
516 return __builtin_clzl(a);
517}
518static __inline__ unsigned short __attribute__((__always_inline__, __nodebug__))
519__popcnt16(unsigned short value) {
520 return __builtin_popcount((int)value);
521}
522static __inline__ unsigned int __attribute__((__always_inline__, __nodebug__))
523__popcnt(unsigned int value) {
524 return __builtin_popcount(value);
525}
526static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
527_bittest(long const *a, long b) {
528 return (*a >> b) & 1;
529}
530static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
531_bittestandcomplement(long *a, long b) {
532 unsigned char x = (*a >> b) & 1;
533 *a = *a ^ (1 << b);
534 return x;
535}
536static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
537_bittestandreset(long *a, long b) {
538 unsigned char x = (*a >> b) & 1;
539 *a = *a & ~(1 << b);
540 return x;
541}
542static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
543_bittestandset(long *a, long b) {
544 unsigned char x = (*a >> b) & 1;
Warren Hunt2731e3e2013-10-01 17:12:40 +0000545 *a = *a | (1 << b);
Warren Huntd6ffae92013-09-27 23:57:26 +0000546 return x;
547}
Warren Hunt3f987942013-09-30 21:08:05 +0000548#ifdef __x86_64__
Warren Huntd6ffae92013-09-27 23:57:26 +0000549static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
550_BitScanForward64(unsigned long *_Index, unsigned __int64 _Mask) {
551 if (!_Mask)
552 return 0;
553 *_Index = __builtin_ctzll(_Mask);
554 return 1;
555}
556static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
557_BitScanReverse64(unsigned long *_Index, unsigned __int64 _Mask) {
558 if (!_Mask)
559 return 0;
560 *_Index = 63 - __builtin_clzll(_Mask);
561 return 1;
562}
563static
564__inline__ unsigned __int64 __attribute__((__always_inline__, __nodebug__))
565_lzcnt_u64(unsigned __int64 a) {
566 if (!a)
567 return 64;
568 return __builtin_clzll(a);
569}
570static __inline__
571unsigned __int64 __attribute__((__always_inline__, __nodebug__))
572 __popcnt64(unsigned __int64 value) {
573 return __builtin_popcountll(value);
574}
575static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
576_bittest64(__int64 const *a, __int64 b) {
577 return (*a >> b) & 1;
578}
579static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
580_bittestandcomplement64(__int64 *a, __int64 b) {
581 unsigned char x = (*a >> b) & 1;
582 *a = *a ^ (1ll << b);
583 return x;
584}
585static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
586_bittestandreset64(__int64 *a, __int64 b) {
587 unsigned char x = (*a >> b) & 1;
588 *a = *a & ~(1ll << b);
589 return x;
590}
591static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
592_bittestandset64(__int64 *a, __int64 b) {
593 unsigned char x = (*a >> b) & 1;
Warren Hunt2731e3e2013-10-01 17:12:40 +0000594 *a = *a | (1ll << b);
Warren Huntd6ffae92013-09-27 23:57:26 +0000595 return x;
596}
597#endif
598/*----------------------------------------------------------------------------*\
599|* Interlocked Exchange Add
600\*----------------------------------------------------------------------------*/
601static __inline__ char __attribute__((__always_inline__, __nodebug__))
602_InterlockedExchangeAdd8(char volatile *_Addend, char _Value) {
603 return __atomic_add_fetch(_Addend, _Value, 0) - _Value;
604}
605static __inline__ short __attribute__((__always_inline__, __nodebug__))
606_InterlockedExchangeAdd16(short volatile *_Addend, short _Value) {
607 return __atomic_add_fetch(_Addend, _Value, 0) - _Value;
608}
609static __inline__ long __attribute__((__always_inline__, __nodebug__))
610_InterlockedExchangeAdd(long volatile *_Addend, long _Value) {
611 return __atomic_add_fetch(_Addend, _Value, 0) - _Value;
612}
Warren Hunt3f987942013-09-30 21:08:05 +0000613#ifdef __x86_64__
Warren Huntd6ffae92013-09-27 23:57:26 +0000614static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
615_InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value) {
616 return __atomic_add_fetch(_Addend, _Value, 0) - _Value;
617}
618#endif
619/*----------------------------------------------------------------------------*\
620|* Interlocked Exchange Sub
621\*----------------------------------------------------------------------------*/
622static __inline__ char __attribute__((__always_inline__, __nodebug__))
623_InterlockedExchangeSub8(char volatile *_Subend, char _Value) {
624 return __atomic_sub_fetch(_Subend, _Value, 0) + _Value;
625}
626static __inline__ short __attribute__((__always_inline__, __nodebug__))
627_InterlockedExchangeSub16(short volatile *_Subend, short _Value) {
628 return __atomic_sub_fetch(_Subend, _Value, 0) + _Value;
629}
630static __inline__ long __attribute__((__always_inline__, __nodebug__))
631_InterlockedExchangeSub(long volatile *_Subend, long _Value) {
632 return __atomic_sub_fetch(_Subend, _Value, 0) + _Value;
633}
Warren Hunt3f987942013-09-30 21:08:05 +0000634#ifdef __x86_64__
Warren Huntd6ffae92013-09-27 23:57:26 +0000635static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
636_InterlockedExchangeSub64(__int64 volatile *_Subend, __int64 _Value) {
637 return __atomic_sub_fetch(_Subend, _Value, 0) + _Value;
638}
639#endif
640/*----------------------------------------------------------------------------*\
641|* Interlocked Increment
642\*----------------------------------------------------------------------------*/
Hans Wennborg2ed88802014-01-23 19:15:39 +0000643static __inline__ short __attribute__((__always_inline__, __nodebug__))
644_InterlockedIncrement16(short volatile *_Value) {
Warren Huntd6ffae92013-09-27 23:57:26 +0000645 return __atomic_add_fetch(_Value, 1, 0);
646}
647static __inline__ long __attribute__((__always_inline__, __nodebug__))
648_InterlockedIncrement(long volatile *_Value) {
649 return __atomic_add_fetch(_Value, 1, 0);
650}
Warren Hunt3f987942013-09-30 21:08:05 +0000651#ifdef __x86_64__
Warren Huntd6ffae92013-09-27 23:57:26 +0000652static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
653_InterlockedIncrement64(__int64 volatile *_Value) {
654 return __atomic_add_fetch(_Value, 1, 0);
655}
656#endif
657/*----------------------------------------------------------------------------*\
658|* Interlocked Decrement
659\*----------------------------------------------------------------------------*/
Hans Wennborg2ed88802014-01-23 19:15:39 +0000660static __inline__ short __attribute__((__always_inline__, __nodebug__))
661_InterlockedDecrement16(short volatile *_Value) {
Warren Huntd6ffae92013-09-27 23:57:26 +0000662 return __atomic_sub_fetch(_Value, 1, 0);
663}
664static __inline__ long __attribute__((__always_inline__, __nodebug__))
665_InterlockedDecrement(long volatile *_Value) {
666 return __atomic_sub_fetch(_Value, 1, 0);
667}
Warren Hunt3f987942013-09-30 21:08:05 +0000668#ifdef __x86_64__
Warren Huntd6ffae92013-09-27 23:57:26 +0000669static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
670_InterlockedDecrement64(__int64 volatile *_Value) {
671 return __atomic_sub_fetch(_Value, 1, 0);
672}
673#endif
674/*----------------------------------------------------------------------------*\
675|* Interlocked And
676\*----------------------------------------------------------------------------*/
677static __inline__ char __attribute__((__always_inline__, __nodebug__))
678_InterlockedAnd8(char volatile *_Value, char _Mask) {
679 return __atomic_and_fetch(_Value, _Mask, 0);
680}
681static __inline__ short __attribute__((__always_inline__, __nodebug__))
682_InterlockedAnd16(short volatile *_Value, short _Mask) {
683 return __atomic_and_fetch(_Value, _Mask, 0);
684}
685static __inline__ long __attribute__((__always_inline__, __nodebug__))
686_InterlockedAnd(long volatile *_Value, long _Mask) {
687 return __atomic_and_fetch(_Value, _Mask, 0);
688}
Warren Hunt3f987942013-09-30 21:08:05 +0000689#ifdef __x86_64__
Warren Huntd6ffae92013-09-27 23:57:26 +0000690static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
691_InterlockedAnd64(__int64 volatile *_Value, __int64 _Mask) {
692 return __atomic_and_fetch(_Value, _Mask, 0);
693}
694#endif
695/*----------------------------------------------------------------------------*\
696|* Interlocked Or
697\*----------------------------------------------------------------------------*/
698static __inline__ char __attribute__((__always_inline__, __nodebug__))
699_InterlockedOr8(char volatile *_Value, char _Mask) {
700 return __atomic_or_fetch(_Value, _Mask, 0);
701}
702static __inline__ short __attribute__((__always_inline__, __nodebug__))
703_InterlockedOr16(short volatile *_Value, short _Mask) {
704 return __atomic_or_fetch(_Value, _Mask, 0);
705}
706static __inline__ long __attribute__((__always_inline__, __nodebug__))
707_InterlockedOr(long volatile *_Value, long _Mask) {
708 return __atomic_or_fetch(_Value, _Mask, 0);
709}
Warren Hunt3f987942013-09-30 21:08:05 +0000710#ifdef __x86_64__
Warren Huntd6ffae92013-09-27 23:57:26 +0000711static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
712_InterlockedOr64(__int64 volatile *_Value, __int64 _Mask) {
713 return __atomic_or_fetch(_Value, _Mask, 0);
714}
715#endif
716/*----------------------------------------------------------------------------*\
717|* Interlocked Xor
718\*----------------------------------------------------------------------------*/
719static __inline__ char __attribute__((__always_inline__, __nodebug__))
720_InterlockedXor8(char volatile *_Value, char _Mask) {
721 return __atomic_xor_fetch(_Value, _Mask, 0);
722}
723static __inline__ short __attribute__((__always_inline__, __nodebug__))
724_InterlockedXor16(short volatile *_Value, short _Mask) {
725 return __atomic_xor_fetch(_Value, _Mask, 0);
726}
727static __inline__ long __attribute__((__always_inline__, __nodebug__))
728_InterlockedXor(long volatile *_Value, long _Mask) {
729 return __atomic_xor_fetch(_Value, _Mask, 0);
730}
Warren Hunt3f987942013-09-30 21:08:05 +0000731#ifdef __x86_64__
Warren Huntd6ffae92013-09-27 23:57:26 +0000732static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
733_InterlockedXor64(__int64 volatile *_Value, __int64 _Mask) {
734 return __atomic_xor_fetch(_Value, _Mask, 0);
735}
736#endif
737/*----------------------------------------------------------------------------*\
738|* Interlocked Exchange
739\*----------------------------------------------------------------------------*/
740static __inline__ char __attribute__((__always_inline__, __nodebug__))
741_InterlockedExchange8(char volatile *_Target, char _Value) {
742 __atomic_exchange(_Target, &_Value, &_Value, 0);
743 return _Value;
744}
745static __inline__ short __attribute__((__always_inline__, __nodebug__))
746_InterlockedExchange16(short volatile *_Target, short _Value) {
747 __atomic_exchange(_Target, &_Value, &_Value, 0);
748 return _Value;
749}
750static __inline__ long __attribute__((__always_inline__, __nodebug__))
751_InterlockedExchange(long volatile *_Target, long _Value) {
752 __atomic_exchange(_Target, &_Value, &_Value, 0);
753 return _Value;
754}
Warren Hunt3f987942013-09-30 21:08:05 +0000755#ifdef __x86_64__
Warren Huntd6ffae92013-09-27 23:57:26 +0000756static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
757_InterlockedExchange64(__int64 volatile *_Target, __int64 _Value) {
758 __atomic_exchange(_Target, &_Value, &_Value, 0);
759 return _Value;
760}
761#endif
762/*----------------------------------------------------------------------------*\
763|* Interlocked Compare Exchange
764\*----------------------------------------------------------------------------*/
765static __inline__ char __attribute__((__always_inline__, __nodebug__))
766_InterlockedCompareExchange8(char volatile *_Destination,
767 char _Exchange, char _Comparand) {
768 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0, 0, 0);
769 return _Comparand;
770}
771static __inline__ short __attribute__((__always_inline__, __nodebug__))
772_InterlockedCompareExchange16(short volatile *_Destination,
773 short _Exchange, short _Comparand) {
774 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0, 0, 0);
775 return _Comparand;
776}
777static __inline__ long __attribute__((__always_inline__, __nodebug__))
778_InterlockedCompareExchange(long volatile *_Destination,
779 long _Exchange, long _Comparand) {
780 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0, 0, 0);
781 return _Comparand;
782}
Warren Hunt3f987942013-09-30 21:08:05 +0000783#ifdef __x86_64__
Warren Huntd6ffae92013-09-27 23:57:26 +0000784static __inline__ __int64 __attribute__((__always_inline__, __nodebug__))
785_InterlockedCompareExchange64(__int64 volatile *_Destination,
786 __int64 _Exchange, __int64 _Comparand) {
787 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0, 0, 0);
788 return _Comparand;
789}
790#endif
791/*----------------------------------------------------------------------------*\
Reid Kleckner00d33a52013-10-17 01:29:48 +0000792|* Barriers
793\*----------------------------------------------------------------------------*/
794static __inline__ void __attribute__((__always_inline__, __nodebug__))
795__attribute__((deprecated("use other intrinsics or C++11 atomics instead")))
796_ReadWriteBarrier(void) {
797 __asm__ volatile ("" : : : "memory");
798}
799static __inline__ void __attribute__((__always_inline__, __nodebug__))
800__attribute__((deprecated("use other intrinsics or C++11 atomics instead")))
801_ReadBarrier(void) {
802 __asm__ volatile ("" : : : "memory");
803}
804static __inline__ void __attribute__((__always_inline__, __nodebug__))
805__attribute__((deprecated("use other intrinsics or C++11 atomics instead")))
806_WriteBarrier(void) {
807 __asm__ volatile ("" : : : "memory");
808}
809/*----------------------------------------------------------------------------*\
Hans Wennborg74ca0c42014-01-24 00:52:39 +0000810|* readfs
811|* (Pointers in address space #257 are relative to the FS segment register.)
812\*----------------------------------------------------------------------------*/
813#ifdef __i386__
814#define __ptr_to_addr_space(__addr_space_nbr, __type, __offset) \
815 ((volatile __type __attribute__((__address_space__(__addr_space_nbr)))*) \
816 (__offset))
817
818static __inline__ unsigned char __attribute__((__always_inline__, __nodebug__))
819__readfsbyte(unsigned long __offset) {
820 return *__ptr_to_addr_space(257, unsigned char, __offset);
821}
822static __inline__ unsigned long __attribute__((__always_inline__, __nodebug__))
823__readfsdword(unsigned long __offset) {
824 return *__ptr_to_addr_space(257, unsigned long, __offset);
825}
826static __inline__ unsigned __int64 __attribute__((__always_inline__, __nodebug__))
827__readfsqword(unsigned long __offset) {
828 return *__ptr_to_addr_space(257, unsigned __int64, __offset);
829}
830static __inline__ unsigned short __attribute__((__always_inline__, __nodebug__))
831__readfsword(unsigned long __offset) {
832 return *__ptr_to_addr_space(257, unsigned short, __offset);
833}
834#undef __ptr_to_addr_space
835#endif
836/*----------------------------------------------------------------------------*\
Warren Huntd6ffae92013-09-27 23:57:26 +0000837|* Misc
838\*----------------------------------------------------------------------------*/
839static __inline__ void * __attribute__((__always_inline__, __nodebug__))
840_AddressOfReturnAddress(void) {
841 return (void*)((char*)__builtin_frame_address(0) + sizeof(void*));
842}
843static __inline__ void * __attribute__((__always_inline__, __nodebug__))
844_ReturnAddress(void) {
845 return __builtin_return_address(0);
846}
Hans Wennborg854f7d32014-01-16 23:39:35 +0000847static __inline__ void __attribute__((__always_inline__, __nodebug__))
848__cpuid(int __info[4], int __level) {
849#if __i386__
850 __asm__ ("cpuid"
851 : "=a"(__info[0]), "=b" (__info[1]), "=c"(__info[2]), "=d"(__info[3])
852 : "0"(__level));
853#else
854 __asm__ ("cpuid" : "=a"(__info[0]), "=b" (__info[1]), "=c"(__info[2]), "=d"(__info[3])
855 : "0"(__level));
856#endif
857}
858static __inline__ unsigned __int64 __cdecl __attribute__((__always_inline__, __nodebug__))
859_xgetbv(unsigned int __xcr_no) {
860 unsigned int __eax, __edx;
861 __asm__ ("xgetbv" : "=a" (__eax), "=d" (__edx) : "c" (__xcr_no));
862 return ((unsigned __int64)__edx << 32) | __eax;
863}
Warren Huntd6ffae92013-09-27 23:57:26 +0000864
Eric Christophercc872532013-09-18 22:40:18 +0000865#ifdef __cplusplus
Eric Christopher0db88a72013-09-18 22:24:01 +0000866}
Eric Christopherd1428bf2013-08-31 00:22:48 +0000867#endif
Reid Klecknerf0e23222013-09-19 00:19:53 +0000868
869#endif /* __INTRIN_H */
870#endif /* _MSC_VER */