blob: 5a73ecec1caeba0dbc222720bdf5ce01404e29df [file] [log] [blame]
Hans Wennborgf8b91f82016-06-14 20:14:24 +00001/* ===-------- intrin.h ---------------------------------------------------===
Eric Christopherd1428bf2013-08-31 00:22:48 +00002 *
3 * Permission is hereby granted, free of charge, to any person obtaining a copy
4 * of this software and associated documentation files (the "Software"), to deal
5 * in the Software without restriction, including without limitation the rights
6 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 * copies of the Software, and to permit persons to whom the Software is
8 * furnished to do so, subject to the following conditions:
9 *
10 * The above copyright notice and this permission notice shall be included in
11 * all copies or substantial portions of the Software.
12 *
13 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19 * THE SOFTWARE.
20 *
21 *===-----------------------------------------------------------------------===
22 */
23
24/* Only include this if we're compiling for the windows platform. */
25#ifndef _MSC_VER
Hans Wennborgf8b91f82016-06-14 20:14:24 +000026#include_next <intrin.h>
Eric Christopherd1428bf2013-08-31 00:22:48 +000027#else
28
29#ifndef __INTRIN_H
30#define __INTRIN_H
31
32/* First include the standard intrinsics. */
Saleem Abdulrasool702eefe2014-06-25 16:48:40 +000033#if defined(__i386__) || defined(__x86_64__)
Eric Christopherd1428bf2013-08-31 00:22:48 +000034#include <x86intrin.h>
Saleem Abdulrasool702eefe2014-06-25 16:48:40 +000035#endif
Eric Christopherd1428bf2013-08-31 00:22:48 +000036
Saleem Abdulrasoolafdef202016-08-06 17:58:24 +000037#if defined(__arm__)
38#include <armintr.h>
39#endif
40
Hans Wennborg1fd6dd32014-01-28 23:01:59 +000041/* For the definition of jmp_buf. */
Nico Webera62cffa2014-07-08 18:34:46 +000042#if __STDC_HOSTED__
Hans Wennborg1fd6dd32014-01-28 23:01:59 +000043#include <setjmp.h>
Nico Webera62cffa2014-07-08 18:34:46 +000044#endif
Hans Wennborg1fd6dd32014-01-28 23:01:59 +000045
Eric Christopher5a9bec12015-06-15 23:20:35 +000046/* Define the default attributes for the functions in this file. */
Michael Kupersteine45af542015-06-30 13:36:19 +000047#define __DEFAULT_FN_ATTRS __attribute__((__always_inline__, __nodebug__))
Eric Christopher5a9bec12015-06-15 23:20:35 +000048
Eric Christopher0db88a72013-09-18 22:24:01 +000049#ifdef __cplusplus
50extern "C" {
51#endif
52
Saleem Abdulrasoolc4ebb122014-07-08 05:46:04 +000053#if defined(__MMX__)
Eric Christopherd1428bf2013-08-31 00:22:48 +000054/* And the random ones that aren't in those files. */
55__m64 _m_from_float(float);
Eric Christopherd1428bf2013-08-31 00:22:48 +000056float _m_to_float(__m64);
Saleem Abdulrasoolc4ebb122014-07-08 05:46:04 +000057#endif
Eric Christopherd1428bf2013-08-31 00:22:48 +000058
59/* Other assorted instruction intrinsics. */
60void __addfsbyte(unsigned long, unsigned char);
61void __addfsdword(unsigned long, unsigned long);
62void __addfsword(unsigned long, unsigned short);
63void __code_seg(const char *);
Hans Wennborg854f7d32014-01-16 23:39:35 +000064static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +000065void __cpuid(int[4], int);
Hans Wennborg12fb89e2014-01-31 19:44:55 +000066static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +000067void __cpuidex(int[4], int, int);
Eric Christopher0db88a72013-09-18 22:24:01 +000068void __debugbreak(void);
Albert Gutowski7216f172016-10-10 18:09:27 +000069static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +000070__int64 __emul(int, int);
Albert Gutowski7216f172016-10-10 18:09:27 +000071static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +000072unsigned __int64 __emulu(unsigned int, unsigned int);
Eric Christopher0db88a72013-09-18 22:24:01 +000073void __cdecl __fastfail(unsigned int);
Eric Christopherd1428bf2013-08-31 00:22:48 +000074unsigned int __getcallerseflags(void);
Hans Wennborg740a4d62014-01-28 22:55:01 +000075static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +000076void __halt(void);
77unsigned char __inbyte(unsigned short);
78void __inbytestring(unsigned short, unsigned char *, unsigned long);
79void __incfsbyte(unsigned long);
80void __incfsdword(unsigned long);
81void __incfsword(unsigned long);
82unsigned long __indword(unsigned short);
83void __indwordstring(unsigned short, unsigned long *, unsigned long);
84void __int2c(void);
85void __invlpg(void *);
86unsigned short __inword(unsigned short);
87void __inwordstring(unsigned short, unsigned short *, unsigned long);
88void __lidt(void *);
89unsigned __int64 __ll_lshift(unsigned __int64, int);
90__int64 __ll_rshift(__int64, int);
91void __llwpcb(void *);
92unsigned char __lwpins32(unsigned int, unsigned int, unsigned int);
93void __lwpval32(unsigned int, unsigned int, unsigned int);
94unsigned int __lzcnt(unsigned int);
95unsigned short __lzcnt16(unsigned short);
Hans Wennborgd9be72e2014-03-12 22:00:32 +000096static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +000097void __movsb(unsigned char *, unsigned char const *, size_t);
Hans Wennborgd9be72e2014-03-12 22:00:32 +000098static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +000099void __movsd(unsigned long *, unsigned long const *, size_t);
Hans Wennborgd9be72e2014-03-12 22:00:32 +0000100static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000101void __movsw(unsigned short *, unsigned short const *, size_t);
Reid Kleckner5de2bcd2016-09-07 16:55:12 +0000102static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000103void __nop(void);
104void __nvreg_restore_fence(void);
105void __nvreg_save_fence(void);
106void __outbyte(unsigned short, unsigned char);
107void __outbytestring(unsigned short, unsigned char *, unsigned long);
108void __outdword(unsigned short, unsigned long);
109void __outdwordstring(unsigned short, unsigned long *, unsigned long);
110void __outword(unsigned short, unsigned short);
111void __outwordstring(unsigned short, unsigned short *, unsigned long);
Warren Huntd6ffae92013-09-27 23:57:26 +0000112static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000113unsigned int __popcnt(unsigned int);
Warren Huntd6ffae92013-09-27 23:57:26 +0000114static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000115unsigned short __popcnt16(unsigned short);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000116unsigned long __readcr0(void);
117unsigned long __readcr2(void);
Reid Kleckner592dc612014-04-08 00:28:22 +0000118static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000119unsigned long __readcr3(void);
Eric Christopher439137e2014-01-24 12:13:47 +0000120unsigned long __readcr4(void);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000121unsigned long __readcr8(void);
122unsigned int __readdr(unsigned int);
Hans Wennborg74ca0c42014-01-24 00:52:39 +0000123#ifdef __i386__
124static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000125unsigned char __readfsbyte(unsigned long);
Hans Wennborg74ca0c42014-01-24 00:52:39 +0000126static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000127unsigned long __readfsdword(unsigned long);
Hans Wennborg74ca0c42014-01-24 00:52:39 +0000128static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000129unsigned __int64 __readfsqword(unsigned long);
Hans Wennborg74ca0c42014-01-24 00:52:39 +0000130static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000131unsigned short __readfsword(unsigned long);
Hans Wennborg74ca0c42014-01-24 00:52:39 +0000132#endif
Reid Kleckner592dc612014-04-08 00:28:22 +0000133static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000134unsigned __int64 __readmsr(unsigned long);
135unsigned __int64 __readpmc(unsigned long);
136unsigned long __segmentlimit(unsigned long);
137void __sidt(void *);
138void *__slwpcb(void);
Hans Wennborgd9be72e2014-03-12 22:00:32 +0000139static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000140void __stosb(unsigned char *, unsigned char, size_t);
Hans Wennborgd9be72e2014-03-12 22:00:32 +0000141static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000142void __stosd(unsigned long *, unsigned long, size_t);
Hans Wennborgd9be72e2014-03-12 22:00:32 +0000143static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000144void __stosw(unsigned short *, unsigned short, size_t);
145void __svm_clgi(void);
146void __svm_invlpga(void *, int);
147void __svm_skinit(int);
148void __svm_stgi(void);
149void __svm_vmload(size_t);
150void __svm_vmrun(size_t);
151void __svm_vmsave(size_t);
152void __ud2(void);
153unsigned __int64 __ull_rshift(unsigned __int64, int);
154void __vmx_off(void);
155void __vmx_vmptrst(unsigned __int64 *);
156void __wbinvd(void);
157void __writecr0(unsigned int);
Reid Kleckner592dc612014-04-08 00:28:22 +0000158static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000159void __writecr3(unsigned int);
160void __writecr4(unsigned int);
161void __writecr8(unsigned int);
162void __writedr(unsigned int, unsigned int);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000163void __writefsbyte(unsigned long, unsigned char);
164void __writefsdword(unsigned long, unsigned long);
165void __writefsqword(unsigned long, unsigned __int64);
166void __writefsword(unsigned long, unsigned short);
167void __writemsr(unsigned long, unsigned __int64);
Warren Huntd6ffae92013-09-27 23:57:26 +0000168static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000169void *_AddressOfReturnAddress(void);
Warren Huntd6ffae92013-09-27 23:57:26 +0000170static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000171unsigned char _BitScanForward(unsigned long *_Index, unsigned long _Mask);
Warren Huntd6ffae92013-09-27 23:57:26 +0000172static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000173unsigned char _BitScanReverse(unsigned long *_Index, unsigned long _Mask);
Warren Huntd6ffae92013-09-27 23:57:26 +0000174static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000175unsigned char _bittest(long const *, long);
Warren Huntd6ffae92013-09-27 23:57:26 +0000176static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000177unsigned char _bittestandcomplement(long *, long);
Warren Huntd6ffae92013-09-27 23:57:26 +0000178static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000179unsigned char _bittestandreset(long *, long);
Warren Huntd6ffae92013-09-27 23:57:26 +0000180static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000181unsigned char _bittestandset(long *, long);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000182unsigned __int64 __cdecl _byteswap_uint64(unsigned __int64);
183unsigned long __cdecl _byteswap_ulong(unsigned long);
184unsigned short __cdecl _byteswap_ushort(unsigned short);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000185void __cdecl _disable(void);
186void __cdecl _enable(void);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000187long _InterlockedAddLargeStatistic(__int64 volatile *_Addend, long _Value);
Warren Huntd6ffae92013-09-27 23:57:26 +0000188static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000189long _InterlockedAnd(long volatile *_Value, long _Mask);
Warren Huntd6ffae92013-09-27 23:57:26 +0000190static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000191short _InterlockedAnd16(short volatile *_Value, short _Mask);
Warren Huntd6ffae92013-09-27 23:57:26 +0000192static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000193char _InterlockedAnd8(char volatile *_Value, char _Mask);
194unsigned char _interlockedbittestandreset(long volatile *, long);
Hans Wennborga3169332014-03-13 17:05:09 +0000195static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000196unsigned char _interlockedbittestandset(long volatile *, long);
Warren Huntd6ffae92013-09-27 23:57:26 +0000197static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000198long __cdecl _InterlockedCompareExchange(long volatile *_Destination,
199 long _Exchange, long _Comparand);
200long _InterlockedCompareExchange_HLEAcquire(long volatile *, long, long);
201long _InterlockedCompareExchange_HLERelease(long volatile *, long, long);
Warren Huntd6ffae92013-09-27 23:57:26 +0000202static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000203short _InterlockedCompareExchange16(short volatile *_Destination,
204 short _Exchange, short _Comparand);
Warren Huntd6ffae92013-09-27 23:57:26 +0000205static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000206__int64 _InterlockedCompareExchange64(__int64 volatile *_Destination,
207 __int64 _Exchange, __int64 _Comparand);
208__int64 _InterlockedcompareExchange64_HLEAcquire(__int64 volatile *, __int64,
209 __int64);
210__int64 _InterlockedCompareExchange64_HLERelease(__int64 volatile *, __int64,
211 __int64);
Warren Huntd6ffae92013-09-27 23:57:26 +0000212static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000213char _InterlockedCompareExchange8(char volatile *_Destination, char _Exchange,
214 char _Comparand);
215void *_InterlockedCompareExchangePointer_HLEAcquire(void *volatile *, void *,
216 void *);
217void *_InterlockedCompareExchangePointer_HLERelease(void *volatile *, void *,
218 void *);
Warren Huntd6ffae92013-09-27 23:57:26 +0000219static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000220long __cdecl _InterlockedDecrement(long volatile *_Addend);
Warren Huntd6ffae92013-09-27 23:57:26 +0000221static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000222short _InterlockedDecrement16(short volatile *_Addend);
Saleem Abdulrasool114efe02014-06-18 20:51:10 +0000223long _InterlockedExchange(long volatile *_Target, long _Value);
Warren Huntd6ffae92013-09-27 23:57:26 +0000224static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000225short _InterlockedExchange16(short volatile *_Target, short _Value);
Warren Huntd6ffae92013-09-27 23:57:26 +0000226static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000227char _InterlockedExchange8(char volatile *_Target, char _Value);
Warren Huntd6ffae92013-09-27 23:57:26 +0000228static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000229long __cdecl _InterlockedExchangeAdd(long volatile *_Addend, long _Value);
230long _InterlockedExchangeAdd_HLEAcquire(long volatile *, long);
231long _InterlockedExchangeAdd_HLERelease(long volatile *, long);
Reid Kleckner924eb2a2014-01-27 18:48:02 +0000232static __inline__
Eric Christopher439137e2014-01-24 12:13:47 +0000233short _InterlockedExchangeAdd16(short volatile *_Addend, short _Value);
234__int64 _InterlockedExchangeAdd64_HLEAcquire(__int64 volatile *, __int64);
235__int64 _InterlockedExchangeAdd64_HLERelease(__int64 volatile *, __int64);
Warren Huntd6ffae92013-09-27 23:57:26 +0000236static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000237char _InterlockedExchangeAdd8(char volatile *_Addend, char _Value);
Warren Huntd6ffae92013-09-27 23:57:26 +0000238static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000239long __cdecl _InterlockedIncrement(long volatile *_Addend);
Warren Huntd6ffae92013-09-27 23:57:26 +0000240static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000241short _InterlockedIncrement16(short volatile *_Addend);
Warren Huntd6ffae92013-09-27 23:57:26 +0000242static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000243long _InterlockedOr(long volatile *_Value, long _Mask);
Warren Huntd6ffae92013-09-27 23:57:26 +0000244static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000245short _InterlockedOr16(short volatile *_Value, short _Mask);
Warren Huntd6ffae92013-09-27 23:57:26 +0000246static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000247char _InterlockedOr8(char volatile *_Value, char _Mask);
Warren Huntd6ffae92013-09-27 23:57:26 +0000248static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000249long _InterlockedXor(long volatile *_Value, long _Mask);
Warren Huntd6ffae92013-09-27 23:57:26 +0000250static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000251short _InterlockedXor16(short volatile *_Value, short _Mask);
Warren Huntd6ffae92013-09-27 23:57:26 +0000252static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000253char _InterlockedXor8(char volatile *_Value, char _Mask);
254void __cdecl _invpcid(unsigned int, void *);
Warren Huntd6ffae92013-09-27 23:57:26 +0000255static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000256unsigned long __cdecl _lrotl(unsigned long, int);
Warren Huntd6ffae92013-09-27 23:57:26 +0000257static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000258unsigned long __cdecl _lrotr(unsigned long, int);
Albert Gutowskifcea61c2016-10-10 19:40:51 +0000259static __inline__ void
260__attribute__((__deprecated__("use other intrinsics or C++11 atomics instead")))
261_ReadBarrier(void);
262static __inline__ void
263__attribute__((__deprecated__("use other intrinsics or C++11 atomics instead")))
264_ReadWriteBarrier(void);
Warren Huntd6ffae92013-09-27 23:57:26 +0000265static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000266void *_ReturnAddress(void);
267unsigned int _rorx_u32(unsigned int, const unsigned int);
Warren Huntd6ffae92013-09-27 23:57:26 +0000268static __inline__
Eric Christopherfb4b4332013-08-31 00:27:38 +0000269unsigned int __cdecl _rotl(unsigned int _Value, int _Shift);
Warren Huntd6ffae92013-09-27 23:57:26 +0000270static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000271unsigned short _rotl16(unsigned short _Value, unsigned char _Shift);
Warren Huntd6ffae92013-09-27 23:57:26 +0000272static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000273unsigned __int64 __cdecl _rotl64(unsigned __int64 _Value, int _Shift);
Warren Huntd6ffae92013-09-27 23:57:26 +0000274static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000275unsigned char _rotl8(unsigned char _Value, unsigned char _Shift);
Warren Huntd6ffae92013-09-27 23:57:26 +0000276static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000277unsigned int __cdecl _rotr(unsigned int _Value, int _Shift);
Warren Huntd6ffae92013-09-27 23:57:26 +0000278static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000279unsigned short _rotr16(unsigned short _Value, unsigned char _Shift);
Warren Huntd6ffae92013-09-27 23:57:26 +0000280static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000281unsigned __int64 __cdecl _rotr64(unsigned __int64 _Value, int _Shift);
Warren Huntd6ffae92013-09-27 23:57:26 +0000282static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000283unsigned char _rotr8(unsigned char _Value, unsigned char _Shift);
284int _sarx_i32(int, unsigned int);
Nico Webera62cffa2014-07-08 18:34:46 +0000285#if __STDC_HOSTED__
Hans Wennborg1fd6dd32014-01-28 23:01:59 +0000286int __cdecl _setjmp(jmp_buf);
Nico Webera62cffa2014-07-08 18:34:46 +0000287#endif
Eric Christopherd1428bf2013-08-31 00:22:48 +0000288unsigned int _shlx_u32(unsigned int, unsigned int);
289unsigned int _shrx_u32(unsigned int, unsigned int);
290void _Store_HLERelease(long volatile *, long);
291void _Store64_HLERelease(__int64 volatile *, __int64);
292void _StorePointer_HLERelease(void *volatile *, void *);
Albert Gutowskifcea61c2016-10-10 19:40:51 +0000293static __inline__ void
294__attribute__((__deprecated__("use other intrinsics or C++11 atomics instead")))
295_WriteBarrier(void);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000296unsigned __int32 xbegin(void);
297void _xend(void);
Reid Kleckner66e77172016-08-16 16:04:14 +0000298static __inline__
299#define _XCR_XFEATURE_ENABLED_MASK 0
300unsigned __int64 __cdecl _xgetbv(unsigned int);
301void __cdecl _xsetbv(unsigned int, unsigned __int64);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000302
303/* These additional intrinsics are turned on in x64/amd64/x86_64 mode. */
Warren Hunt3f987942013-09-30 21:08:05 +0000304#ifdef __x86_64__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000305void __addgsbyte(unsigned long, unsigned char);
306void __addgsdword(unsigned long, unsigned long);
307void __addgsqword(unsigned long, unsigned __int64);
308void __addgsword(unsigned long, unsigned short);
Reid Klecknerf08d6582014-01-27 19:16:35 +0000309static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000310void __faststorefence(void);
311void __incgsbyte(unsigned long);
312void __incgsdword(unsigned long);
313void __incgsqword(unsigned long);
314void __incgsword(unsigned long);
Eric Christopher439137e2014-01-24 12:13:47 +0000315unsigned char __lwpins64(unsigned __int64, unsigned int, unsigned int);
316void __lwpval64(unsigned __int64, unsigned int, unsigned int);
317unsigned __int64 __lzcnt64(unsigned __int64);
Hans Wennborgd9be72e2014-03-12 22:00:32 +0000318static __inline__
Eric Christopher439137e2014-01-24 12:13:47 +0000319void __movsq(unsigned long long *, unsigned long long const *, size_t);
Albert Gutowskif3a0bce2016-10-04 22:29:49 +0000320static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000321unsigned __int64 __popcnt64(unsigned __int64);
Hans Wennborga4421e02014-03-12 21:09:05 +0000322static __inline__
Eric Christopher439137e2014-01-24 12:13:47 +0000323unsigned char __readgsbyte(unsigned long);
Hans Wennborga4421e02014-03-12 21:09:05 +0000324static __inline__
Eric Christopher439137e2014-01-24 12:13:47 +0000325unsigned long __readgsdword(unsigned long);
Hans Wennborga4421e02014-03-12 21:09:05 +0000326static __inline__
327unsigned __int64 __readgsqword(unsigned long);
Eric Christopher439137e2014-01-24 12:13:47 +0000328unsigned short __readgsword(unsigned long);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000329unsigned __int64 __shiftleft128(unsigned __int64 _LowPart,
330 unsigned __int64 _HighPart,
331 unsigned char _Shift);
332unsigned __int64 __shiftright128(unsigned __int64 _LowPart,
333 unsigned __int64 _HighPart,
334 unsigned char _Shift);
Hans Wennborgd9be72e2014-03-12 22:00:32 +0000335static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000336void __stosq(unsigned __int64 *, unsigned __int64, size_t);
Eric Christopher439137e2014-01-24 12:13:47 +0000337unsigned char __vmx_on(unsigned __int64 *);
338unsigned char __vmx_vmclear(unsigned __int64 *);
339unsigned char __vmx_vmlaunch(void);
340unsigned char __vmx_vmptrld(unsigned __int64 *);
341unsigned char __vmx_vmread(size_t, size_t *);
342unsigned char __vmx_vmresume(void);
343unsigned char __vmx_vmwrite(size_t, size_t);
344void __writegsbyte(unsigned long, unsigned char);
345void __writegsdword(unsigned long, unsigned long);
346void __writegsqword(unsigned long, unsigned __int64);
347void __writegsword(unsigned long, unsigned short);
Warren Huntd6ffae92013-09-27 23:57:26 +0000348static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000349unsigned char _BitScanForward64(unsigned long *_Index, unsigned __int64 _Mask);
Warren Huntd6ffae92013-09-27 23:57:26 +0000350static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000351unsigned char _BitScanReverse64(unsigned long *_Index, unsigned __int64 _Mask);
Warren Huntd6ffae92013-09-27 23:57:26 +0000352static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000353unsigned char _bittest64(__int64 const *, __int64);
Warren Huntd6ffae92013-09-27 23:57:26 +0000354static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000355unsigned char _bittestandcomplement64(__int64 *, __int64);
Warren Huntd6ffae92013-09-27 23:57:26 +0000356static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000357unsigned char _bittestandreset64(__int64 *, __int64);
Warren Huntd6ffae92013-09-27 23:57:26 +0000358static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000359unsigned char _bittestandset64(__int64 *, __int64);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000360unsigned __int64 __cdecl _byteswap_uint64(unsigned __int64);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000361long _InterlockedAnd_np(long volatile *_Value, long _Mask);
362short _InterlockedAnd16_np(short volatile *_Value, short _Mask);
363__int64 _InterlockedAnd64_np(__int64 volatile *_Value, __int64 _Mask);
364char _InterlockedAnd8_np(char volatile *_Value, char _Mask);
365unsigned char _interlockedbittestandreset64(__int64 volatile *, __int64);
Hans Wennborga3169332014-03-13 17:05:09 +0000366static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000367unsigned char _interlockedbittestandset64(__int64 volatile *, __int64);
368long _InterlockedCompareExchange_np(long volatile *_Destination, long _Exchange,
369 long _Comparand);
370unsigned char _InterlockedCompareExchange128(__int64 volatile *_Destination,
371 __int64 _ExchangeHigh,
372 __int64 _ExchangeLow,
373 __int64 *_CompareandResult);
374unsigned char _InterlockedCompareExchange128_np(__int64 volatile *_Destination,
375 __int64 _ExchangeHigh,
376 __int64 _ExchangeLow,
377 __int64 *_ComparandResult);
378short _InterlockedCompareExchange16_np(short volatile *_Destination,
379 short _Exchange, short _Comparand);
Eric Christopher439137e2014-01-24 12:13:47 +0000380__int64 _InterlockedCompareExchange64_HLEAcquire(__int64 volatile *, __int64,
381 __int64);
382__int64 _InterlockedCompareExchange64_HLERelease(__int64 volatile *, __int64,
383 __int64);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000384__int64 _InterlockedCompareExchange64_np(__int64 volatile *_Destination,
385 __int64 _Exchange, __int64 _Comparand);
Eric Christopher58b40432014-01-25 01:38:30 +0000386void *_InterlockedCompareExchangePointer(void *volatile *_Destination,
387 void *_Exchange, void *_Comparand);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000388void *_InterlockedCompareExchangePointer_np(void *volatile *_Destination,
389 void *_Exchange, void *_Comparand);
Reid Kleckner924eb2a2014-01-27 18:48:02 +0000390static __inline__
Eric Christopher439137e2014-01-24 12:13:47 +0000391__int64 _InterlockedDecrement64(__int64 volatile *_Addend);
Reid Kleckner924eb2a2014-01-27 18:48:02 +0000392static __inline__
Eric Christopher439137e2014-01-24 12:13:47 +0000393__int64 _InterlockedExchange64(__int64 volatile *_Target, __int64 _Value);
Reid Kleckner924eb2a2014-01-27 18:48:02 +0000394static __inline__
Eric Christopher439137e2014-01-24 12:13:47 +0000395__int64 _InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value);
396void *_InterlockedExchangePointer(void *volatile *_Target, void *_Value);
Reid Kleckner924eb2a2014-01-27 18:48:02 +0000397static __inline__
Eric Christopher439137e2014-01-24 12:13:47 +0000398__int64 _InterlockedIncrement64(__int64 volatile *_Addend);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000399long _InterlockedOr_np(long volatile *_Value, long _Mask);
400short _InterlockedOr16_np(short volatile *_Value, short _Mask);
Reid Kleckner924eb2a2014-01-27 18:48:02 +0000401static __inline__
Eric Christopher439137e2014-01-24 12:13:47 +0000402__int64 _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask);
Warren Hunt41a993f2013-09-28 00:15:41 +0000403__int64 _InterlockedOr64_np(__int64 volatile *_Value, __int64 _Mask);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000404char _InterlockedOr8_np(char volatile *_Value, char _Mask);
405long _InterlockedXor_np(long volatile *_Value, long _Mask);
406short _InterlockedXor16_np(short volatile *_Value, short _Mask);
Reid Kleckner924eb2a2014-01-27 18:48:02 +0000407static __inline__
Eric Christopher439137e2014-01-24 12:13:47 +0000408__int64 _InterlockedXor64(__int64 volatile *_Value, __int64 _Mask);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000409__int64 _InterlockedXor64_np(__int64 volatile *_Value, __int64 _Mask);
410char _InterlockedXor8_np(char volatile *_Value, char _Mask);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000411unsigned __int64 _rorx_u64(unsigned __int64, const unsigned int);
Eric Christopher439137e2014-01-24 12:13:47 +0000412__int64 _sarx_i64(__int64, unsigned int);
Nico Webera62cffa2014-07-08 18:34:46 +0000413#if __STDC_HOSTED__
414int __cdecl _setjmpex(jmp_buf);
415#endif
Eric Christopher439137e2014-01-24 12:13:47 +0000416unsigned __int64 _shlx_u64(unsigned __int64, unsigned int);
David Majnemer54507632014-12-02 23:30:26 +0000417unsigned __int64 _shrx_u64(unsigned __int64, unsigned int);
Albert Gutowski7216f172016-10-10 18:09:27 +0000418static __inline__
419__int64 __mulh(__int64, __int64);
Albert Gutowskif3a0bce2016-10-04 22:29:49 +0000420static __inline__
421unsigned __int64 __umulh(unsigned __int64, unsigned __int64);
Albert Gutowski7216f172016-10-10 18:09:27 +0000422static __inline__
423__int64 _mul128(__int64, __int64, __int64*);
424static __inline__
425unsigned __int64 _umul128(unsigned __int64,
426 unsigned __int64,
427 unsigned __int64*);
Eric Christopher0db88a72013-09-18 22:24:01 +0000428
Warren Hunt3f987942013-09-30 21:08:05 +0000429#endif /* __x86_64__ */
Reid Klecknerf0e23222013-09-19 00:19:53 +0000430
Warren Huntd6ffae92013-09-27 23:57:26 +0000431/*----------------------------------------------------------------------------*\
Warren Huntd6ffae92013-09-27 23:57:26 +0000432|* Bit Counting and Testing
433\*----------------------------------------------------------------------------*/
Michael Kupersteine45af542015-06-30 13:36:19 +0000434static __inline__ unsigned char __DEFAULT_FN_ATTRS
Warren Huntd6ffae92013-09-27 23:57:26 +0000435_BitScanForward(unsigned long *_Index, unsigned long _Mask) {
436 if (!_Mask)
437 return 0;
438 *_Index = __builtin_ctzl(_Mask);
439 return 1;
440}
Michael Kupersteine45af542015-06-30 13:36:19 +0000441static __inline__ unsigned char __DEFAULT_FN_ATTRS
Warren Huntd6ffae92013-09-27 23:57:26 +0000442_BitScanReverse(unsigned long *_Index, unsigned long _Mask) {
443 if (!_Mask)
444 return 0;
445 *_Index = 31 - __builtin_clzl(_Mask);
446 return 1;
447}
Michael Kupersteine45af542015-06-30 13:36:19 +0000448static __inline__ unsigned char __DEFAULT_FN_ATTRS
David Majnemer56e46672015-07-13 23:38:56 +0000449_bittest(long const *_BitBase, long _BitPos) {
450 return (*_BitBase >> _BitPos) & 1;
Warren Huntd6ffae92013-09-27 23:57:26 +0000451}
Michael Kupersteine45af542015-06-30 13:36:19 +0000452static __inline__ unsigned char __DEFAULT_FN_ATTRS
David Majnemer56e46672015-07-13 23:38:56 +0000453_bittestandcomplement(long *_BitBase, long _BitPos) {
454 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
455 *_BitBase = *_BitBase ^ (1 << _BitPos);
456 return _Res;
Warren Huntd6ffae92013-09-27 23:57:26 +0000457}
Michael Kupersteine45af542015-06-30 13:36:19 +0000458static __inline__ unsigned char __DEFAULT_FN_ATTRS
David Majnemer56e46672015-07-13 23:38:56 +0000459_bittestandreset(long *_BitBase, long _BitPos) {
460 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
461 *_BitBase = *_BitBase & ~(1 << _BitPos);
462 return _Res;
Warren Huntd6ffae92013-09-27 23:57:26 +0000463}
Michael Kupersteine45af542015-06-30 13:36:19 +0000464static __inline__ unsigned char __DEFAULT_FN_ATTRS
David Majnemer56e46672015-07-13 23:38:56 +0000465_bittestandset(long *_BitBase, long _BitPos) {
466 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
467 *_BitBase = *_BitBase | (1 << _BitPos);
468 return _Res;
Warren Huntd6ffae92013-09-27 23:57:26 +0000469}
Michael Kupersteine45af542015-06-30 13:36:19 +0000470static __inline__ unsigned char __DEFAULT_FN_ATTRS
David Majnemer56e46672015-07-13 23:38:56 +0000471_interlockedbittestandset(long volatile *_BitBase, long _BitPos) {
David Majnemere0b863f2015-07-13 23:39:37 +0000472 long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_SEQ_CST);
473 return (_PrevVal >> _BitPos) & 1;
Hans Wennborga3169332014-03-13 17:05:09 +0000474}
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000475#if defined(__arm__) || defined(__aarch64__)
476static __inline__ unsigned char __DEFAULT_FN_ATTRS
477_interlockedbittestandset_acq(long volatile *_BitBase, long _BitPos) {
478 long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_ACQUIRE);
479 return (_PrevVal >> _BitPos) & 1;
480}
481static __inline__ unsigned char __DEFAULT_FN_ATTRS
482_interlockedbittestandset_nf(long volatile *_BitBase, long _BitPos) {
483 long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_RELAXED);
484 return (_PrevVal >> _BitPos) & 1;
485}
486static __inline__ unsigned char __DEFAULT_FN_ATTRS
487_interlockedbittestandset_rel(long volatile *_BitBase, long _BitPos) {
488 long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_RELEASE);
489 return (_PrevVal >> _BitPos) & 1;
490}
491#endif
Warren Hunt3f987942013-09-30 21:08:05 +0000492#ifdef __x86_64__
Michael Kupersteine45af542015-06-30 13:36:19 +0000493static __inline__ unsigned char __DEFAULT_FN_ATTRS
Warren Huntd6ffae92013-09-27 23:57:26 +0000494_BitScanForward64(unsigned long *_Index, unsigned __int64 _Mask) {
495 if (!_Mask)
496 return 0;
497 *_Index = __builtin_ctzll(_Mask);
498 return 1;
499}
Michael Kupersteine45af542015-06-30 13:36:19 +0000500static __inline__ unsigned char __DEFAULT_FN_ATTRS
Warren Huntd6ffae92013-09-27 23:57:26 +0000501_BitScanReverse64(unsigned long *_Index, unsigned __int64 _Mask) {
502 if (!_Mask)
503 return 0;
504 *_Index = 63 - __builtin_clzll(_Mask);
505 return 1;
506}
Michael Kupersteine45af542015-06-30 13:36:19 +0000507static __inline__ unsigned char __DEFAULT_FN_ATTRS
David Majnemer56e46672015-07-13 23:38:56 +0000508_bittest64(__int64 const *_BitBase, __int64 _BitPos) {
509 return (*_BitBase >> _BitPos) & 1;
Warren Huntd6ffae92013-09-27 23:57:26 +0000510}
Michael Kupersteine45af542015-06-30 13:36:19 +0000511static __inline__ unsigned char __DEFAULT_FN_ATTRS
David Majnemer56e46672015-07-13 23:38:56 +0000512_bittestandcomplement64(__int64 *_BitBase, __int64 _BitPos) {
513 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
514 *_BitBase = *_BitBase ^ (1ll << _BitPos);
515 return _Res;
Warren Huntd6ffae92013-09-27 23:57:26 +0000516}
Michael Kupersteine45af542015-06-30 13:36:19 +0000517static __inline__ unsigned char __DEFAULT_FN_ATTRS
David Majnemer56e46672015-07-13 23:38:56 +0000518_bittestandreset64(__int64 *_BitBase, __int64 _BitPos) {
519 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
520 *_BitBase = *_BitBase & ~(1ll << _BitPos);
521 return _Res;
Warren Huntd6ffae92013-09-27 23:57:26 +0000522}
Michael Kupersteine45af542015-06-30 13:36:19 +0000523static __inline__ unsigned char __DEFAULT_FN_ATTRS
David Majnemer56e46672015-07-13 23:38:56 +0000524_bittestandset64(__int64 *_BitBase, __int64 _BitPos) {
525 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
526 *_BitBase = *_BitBase | (1ll << _BitPos);
527 return _Res;
Warren Huntd6ffae92013-09-27 23:57:26 +0000528}
Michael Kupersteine45af542015-06-30 13:36:19 +0000529static __inline__ unsigned char __DEFAULT_FN_ATTRS
David Majnemer56e46672015-07-13 23:38:56 +0000530_interlockedbittestandset64(__int64 volatile *_BitBase, __int64 _BitPos) {
David Majnemere0b863f2015-07-13 23:39:37 +0000531 long long _PrevVal =
532 __atomic_fetch_or(_BitBase, 1ll << _BitPos, __ATOMIC_SEQ_CST);
533 return (_PrevVal >> _BitPos) & 1;
Hans Wennborga3169332014-03-13 17:05:09 +0000534}
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000535#endif
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000536/*----------------------------------------------------------------------------*\
537|* Interlocked Exchange Add
538\*----------------------------------------------------------------------------*/
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000539#if defined(__arm__) || defined(__aarch64__)
540static __inline__ char __DEFAULT_FN_ATTRS
541_InterlockedExchangeAdd8_acq(char volatile *_Addend, char _Value) {
542 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
543}
544static __inline__ char __DEFAULT_FN_ATTRS
545_InterlockedExchangeAdd8_nf(char volatile *_Addend, char _Value) {
546 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
547}
548static __inline__ char __DEFAULT_FN_ATTRS
549_InterlockedExchangeAdd8_rel(char volatile *_Addend, char _Value) {
550 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
551}
552static __inline__ short __DEFAULT_FN_ATTRS
553_InterlockedExchangeAdd16_acq(short volatile *_Addend, short _Value) {
554 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
555}
556static __inline__ short __DEFAULT_FN_ATTRS
557_InterlockedExchangeAdd16_nf(short volatile *_Addend, short _Value) {
558 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
559}
560static __inline__ short __DEFAULT_FN_ATTRS
561_InterlockedExchangeAdd16_rel(short volatile *_Addend, short _Value) {
562 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE);
563}
564static __inline__ long __DEFAULT_FN_ATTRS
565_InterlockedExchangeAdd_acq(long volatile *_Addend, long _Value) {
566 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
567}
568static __inline__ long __DEFAULT_FN_ATTRS
569_InterlockedExchangeAdd_nf(long volatile *_Addend, long _Value) {
570 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
571}
572static __inline__ long __DEFAULT_FN_ATTRS
573_InterlockedExchangeAdd_rel(long volatile *_Addend, long _Value) {
574 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE);
575}
576#endif
577#if defined(__x86_64__) || defined(__arm__) || defined(__aarch64__)
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000578static __inline__ __int64 __DEFAULT_FN_ATTRS
579_InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value) {
Martin Storsjo963f75e2016-09-28 09:34:51 +0000580 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_SEQ_CST);
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000581}
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000582#endif
583#if defined(__arm__) || defined(__aarch64__)
584static __inline__ __int64 __DEFAULT_FN_ATTRS
585_InterlockedExchangeAdd64_acq(__int64 volatile *_Addend, __int64 _Value) {
586 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
587}
588static __inline__ __int64 __DEFAULT_FN_ATTRS
589_InterlockedExchangeAdd64_nf(__int64 volatile *_Addend, __int64 _Value) {
590 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
591}
592static __inline__ __int64 __DEFAULT_FN_ATTRS
593_InterlockedExchangeAdd64_rel(__int64 volatile *_Addend, __int64 _Value) {
594 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE);
595}
596#endif
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000597/*----------------------------------------------------------------------------*\
598|* Interlocked Exchange Sub
599\*----------------------------------------------------------------------------*/
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000600#if defined(__x86_64__) || defined(__arm__) || defined(__aarch64__)
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000601static __inline__ __int64 __DEFAULT_FN_ATTRS
602_InterlockedExchangeSub64(__int64 volatile *_Subend, __int64 _Value) {
Martin Storsjo963f75e2016-09-28 09:34:51 +0000603 return __atomic_fetch_sub(_Subend, _Value, __ATOMIC_SEQ_CST);
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000604}
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000605#endif
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000606/*----------------------------------------------------------------------------*\
607|* Interlocked Increment
608\*----------------------------------------------------------------------------*/
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000609#if defined(__arm__) || defined(__aarch64__)
610static __inline__ short __DEFAULT_FN_ATTRS
611_InterlockedIncrement16_acq(short volatile *_Value) {
612 return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE);
613}
614static __inline__ short __DEFAULT_FN_ATTRS
615_InterlockedIncrement16_nf(short volatile *_Value) {
616 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED);
617}
618static __inline__ short __DEFAULT_FN_ATTRS
619_InterlockedIncrement16_rel(short volatile *_Value) {
620 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE);
621}
622static __inline__ long __DEFAULT_FN_ATTRS
623_InterlockedIncrement_acq(long volatile *_Value) {
624 return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE);
625}
626static __inline__ long __DEFAULT_FN_ATTRS
627_InterlockedIncrement_nf(long volatile *_Value) {
628 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED);
629}
630static __inline__ long __DEFAULT_FN_ATTRS
631_InterlockedIncrement_rel(long volatile *_Value) {
632 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE);
633}
634#endif
635#if defined(__x86_64__) || defined(__arm__) || defined(__aarch64__)
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000636static __inline__ __int64 __DEFAULT_FN_ATTRS
637_InterlockedIncrement64(__int64 volatile *_Value) {
Martin Storsjo963f75e2016-09-28 09:34:51 +0000638 return __atomic_add_fetch(_Value, 1, __ATOMIC_SEQ_CST);
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000639}
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000640#endif
641#if defined(__arm__) || defined(__aarch64__)
642static __inline__ __int64 __DEFAULT_FN_ATTRS
643_InterlockedIncrement64_acq(__int64 volatile *_Value) {
644 return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE);
645}
646static __inline__ __int64 __DEFAULT_FN_ATTRS
647_InterlockedIncrement64_nf(__int64 volatile *_Value) {
648 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED);
649}
650static __inline__ __int64 __DEFAULT_FN_ATTRS
651_InterlockedIncrement64_rel(__int64 volatile *_Value) {
652 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE);
653}
654#endif
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000655/*----------------------------------------------------------------------------*\
656|* Interlocked Decrement
657\*----------------------------------------------------------------------------*/
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000658#if defined(__arm__) || defined(__aarch64__)
659static __inline__ short __DEFAULT_FN_ATTRS
660_InterlockedDecrement16_acq(short volatile *_Value) {
661 return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE);
662}
663static __inline__ short __DEFAULT_FN_ATTRS
664_InterlockedDecrement16_nf(short volatile *_Value) {
665 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED);
666}
667static __inline__ short __DEFAULT_FN_ATTRS
668_InterlockedDecrement16_rel(short volatile *_Value) {
669 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE);
670}
671static __inline__ long __DEFAULT_FN_ATTRS
672_InterlockedDecrement_acq(long volatile *_Value) {
673 return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE);
674}
675static __inline__ long __DEFAULT_FN_ATTRS
676_InterlockedDecrement_nf(long volatile *_Value) {
677 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED);
678}
679static __inline__ long __DEFAULT_FN_ATTRS
680_InterlockedDecrement_rel(long volatile *_Value) {
681 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE);
682}
683#endif
684#if defined(__x86_64__) || defined(__arm__) || defined(__aarch64__)
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000685static __inline__ __int64 __DEFAULT_FN_ATTRS
686_InterlockedDecrement64(__int64 volatile *_Value) {
Martin Storsjo963f75e2016-09-28 09:34:51 +0000687 return __atomic_sub_fetch(_Value, 1, __ATOMIC_SEQ_CST);
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000688}
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000689#endif
690#if defined(__arm__) || defined(__aarch64__)
691static __inline__ __int64 __DEFAULT_FN_ATTRS
692_InterlockedDecrement64_acq(__int64 volatile *_Value) {
693 return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE);
694}
695static __inline__ __int64 __DEFAULT_FN_ATTRS
696_InterlockedDecrement64_nf(__int64 volatile *_Value) {
697 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED);
698}
699static __inline__ __int64 __DEFAULT_FN_ATTRS
700_InterlockedDecrement64_rel(__int64 volatile *_Value) {
701 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE);
702}
703#endif
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000704/*----------------------------------------------------------------------------*\
705|* Interlocked And
706\*----------------------------------------------------------------------------*/
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000707#if defined(__arm__) || defined(__aarch64__)
708static __inline__ char __DEFAULT_FN_ATTRS
709_InterlockedAnd8_acq(char volatile *_Value, char _Mask) {
710 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
711}
712static __inline__ char __DEFAULT_FN_ATTRS
713_InterlockedAnd8_nf(char volatile *_Value, char _Mask) {
714 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
715}
716static __inline__ char __DEFAULT_FN_ATTRS
717_InterlockedAnd8_rel(char volatile *_Value, char _Mask) {
718 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
719}
720static __inline__ short __DEFAULT_FN_ATTRS
721_InterlockedAnd16_acq(short volatile *_Value, short _Mask) {
722 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
723}
724static __inline__ short __DEFAULT_FN_ATTRS
725_InterlockedAnd16_nf(short volatile *_Value, short _Mask) {
726 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
727}
728static __inline__ short __DEFAULT_FN_ATTRS
729_InterlockedAnd16_rel(short volatile *_Value, short _Mask) {
730 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
731}
732static __inline__ long __DEFAULT_FN_ATTRS
733_InterlockedAnd_acq(long volatile *_Value, long _Mask) {
734 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
735}
736static __inline__ long __DEFAULT_FN_ATTRS
737_InterlockedAnd_nf(long volatile *_Value, long _Mask) {
738 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
739}
740static __inline__ long __DEFAULT_FN_ATTRS
741_InterlockedAnd_rel(long volatile *_Value, long _Mask) {
742 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
743}
744#endif
745#if defined(__x86_64__) || defined(__arm__) || defined(__aarch64__)
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000746static __inline__ __int64 __DEFAULT_FN_ATTRS
747_InterlockedAnd64(__int64 volatile *_Value, __int64 _Mask) {
Martin Storsjo963f75e2016-09-28 09:34:51 +0000748 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_SEQ_CST);
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000749}
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000750#endif
751#if defined(__arm__) || defined(__aarch64__)
752static __inline__ __int64 __DEFAULT_FN_ATTRS
753_InterlockedAnd64_acq(__int64 volatile *_Value, __int64 _Mask) {
754 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
755}
756static __inline__ __int64 __DEFAULT_FN_ATTRS
757_InterlockedAnd64_nf(__int64 volatile *_Value, __int64 _Mask) {
758 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
759}
760static __inline__ __int64 __DEFAULT_FN_ATTRS
761_InterlockedAnd64_rel(__int64 volatile *_Value, __int64 _Mask) {
762 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
763}
764#endif
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000765/*----------------------------------------------------------------------------*\
766|* Interlocked Or
767\*----------------------------------------------------------------------------*/
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000768#if defined(__arm__) || defined(__aarch64__)
769static __inline__ char __DEFAULT_FN_ATTRS
770_InterlockedOr8_acq(char volatile *_Value, char _Mask) {
771 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
772}
773static __inline__ char __DEFAULT_FN_ATTRS
774_InterlockedOr8_nf(char volatile *_Value, char _Mask) {
775 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
776}
777static __inline__ char __DEFAULT_FN_ATTRS
778_InterlockedOr8_rel(char volatile *_Value, char _Mask) {
779 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
780}
781static __inline__ short __DEFAULT_FN_ATTRS
782_InterlockedOr16_acq(short volatile *_Value, short _Mask) {
783 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
784}
785static __inline__ short __DEFAULT_FN_ATTRS
786_InterlockedOr16_nf(short volatile *_Value, short _Mask) {
787 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
788}
789static __inline__ short __DEFAULT_FN_ATTRS
790_InterlockedOr16_rel(short volatile *_Value, short _Mask) {
791 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
792}
793static __inline__ long __DEFAULT_FN_ATTRS
794_InterlockedOr_acq(long volatile *_Value, long _Mask) {
795 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
796}
797static __inline__ long __DEFAULT_FN_ATTRS
798_InterlockedOr_nf(long volatile *_Value, long _Mask) {
799 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
800}
801static __inline__ long __DEFAULT_FN_ATTRS
802_InterlockedOr_rel(long volatile *_Value, long _Mask) {
803 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
804}
805#endif
806#if defined(__x86_64__) || defined(__arm__) || defined(__aarch64__)
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000807static __inline__ __int64 __DEFAULT_FN_ATTRS
808_InterlockedOr64(__int64 volatile *_Value, __int64 _Mask) {
Martin Storsjo963f75e2016-09-28 09:34:51 +0000809 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_SEQ_CST);
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000810}
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000811#endif
812#if defined(__arm__) || defined(__aarch64__)
813static __inline__ __int64 __DEFAULT_FN_ATTRS
814_InterlockedOr64_acq(__int64 volatile *_Value, __int64 _Mask) {
815 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
816}
817static __inline__ __int64 __DEFAULT_FN_ATTRS
818_InterlockedOr64_nf(__int64 volatile *_Value, __int64 _Mask) {
819 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
820}
821static __inline__ __int64 __DEFAULT_FN_ATTRS
822_InterlockedOr64_rel(__int64 volatile *_Value, __int64 _Mask) {
823 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
824}
825#endif
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000826/*----------------------------------------------------------------------------*\
827|* Interlocked Xor
828\*----------------------------------------------------------------------------*/
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000829#if defined(__arm__) || defined(__aarch64__)
830static __inline__ char __DEFAULT_FN_ATTRS
831_InterlockedXor8_acq(char volatile *_Value, char _Mask) {
832 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
833}
834static __inline__ char __DEFAULT_FN_ATTRS
835_InterlockedXor8_nf(char volatile *_Value, char _Mask) {
836 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
837}
838static __inline__ char __DEFAULT_FN_ATTRS
839_InterlockedXor8_rel(char volatile *_Value, char _Mask) {
840 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
841}
842static __inline__ short __DEFAULT_FN_ATTRS
843_InterlockedXor16_acq(short volatile *_Value, short _Mask) {
844 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
845}
846static __inline__ short __DEFAULT_FN_ATTRS
847_InterlockedXor16_nf(short volatile *_Value, short _Mask) {
848 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
849}
850static __inline__ short __DEFAULT_FN_ATTRS
851_InterlockedXor16_rel(short volatile *_Value, short _Mask) {
852 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
853}
854static __inline__ long __DEFAULT_FN_ATTRS
855_InterlockedXor_acq(long volatile *_Value, long _Mask) {
856 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
857}
858static __inline__ long __DEFAULT_FN_ATTRS
859_InterlockedXor_nf(long volatile *_Value, long _Mask) {
860 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
861}
862static __inline__ long __DEFAULT_FN_ATTRS
863_InterlockedXor_rel(long volatile *_Value, long _Mask) {
864 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
865}
866#endif
867#if defined(__x86_64__) || defined(__arm__) || defined(__aarch64__)
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000868static __inline__ __int64 __DEFAULT_FN_ATTRS
869_InterlockedXor64(__int64 volatile *_Value, __int64 _Mask) {
Martin Storsjo963f75e2016-09-28 09:34:51 +0000870 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_SEQ_CST);
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000871}
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000872#endif
873#if defined(__arm__) || defined(__aarch64__)
874static __inline__ __int64 __DEFAULT_FN_ATTRS
875_InterlockedXor64_acq(__int64 volatile *_Value, __int64 _Mask) {
876 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
877}
878static __inline__ __int64 __DEFAULT_FN_ATTRS
879_InterlockedXor64_nf(__int64 volatile *_Value, __int64 _Mask) {
880 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
881}
882static __inline__ __int64 __DEFAULT_FN_ATTRS
883_InterlockedXor64_rel(__int64 volatile *_Value, __int64 _Mask) {
884 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
885}
886#endif
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000887/*----------------------------------------------------------------------------*\
888|* Interlocked Exchange
889\*----------------------------------------------------------------------------*/
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000890#if defined(__arm__) || defined(__aarch64__)
891static __inline__ char __DEFAULT_FN_ATTRS
892_InterlockedExchange8_acq(char volatile *_Target, char _Value) {
893 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
894 return _Value;
895}
896static __inline__ char __DEFAULT_FN_ATTRS
897_InterlockedExchange8_nf(char volatile *_Target, char _Value) {
898 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
899 return _Value;
900}
901static __inline__ char __DEFAULT_FN_ATTRS
902_InterlockedExchange8_rel(char volatile *_Target, char _Value) {
903 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
904 return _Value;
905}
906static __inline__ short __DEFAULT_FN_ATTRS
907_InterlockedExchange16_acq(short volatile *_Target, short _Value) {
908 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
909 return _Value;
910}
911static __inline__ short __DEFAULT_FN_ATTRS
912_InterlockedExchange16_nf(short volatile *_Target, short _Value) {
913 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
914 return _Value;
915}
916static __inline__ short __DEFAULT_FN_ATTRS
917_InterlockedExchange16_rel(short volatile *_Target, short _Value) {
918 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
919 return _Value;
920}
921static __inline__ long __DEFAULT_FN_ATTRS
922_InterlockedExchange_acq(long volatile *_Target, long _Value) {
923 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
924 return _Value;
925}
926static __inline__ long __DEFAULT_FN_ATTRS
927_InterlockedExchange_nf(long volatile *_Target, long _Value) {
928 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
929 return _Value;
930}
931static __inline__ long __DEFAULT_FN_ATTRS
932_InterlockedExchange_rel(long volatile *_Target, long _Value) {
933 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
934 return _Value;
935}
936#endif
937#if defined(__x86_64__) || defined(__arm__) || defined(__aarch64__)
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000938static __inline__ __int64 __DEFAULT_FN_ATTRS
939_InterlockedExchange64(__int64 volatile *_Target, __int64 _Value) {
Martin Storsjo963f75e2016-09-28 09:34:51 +0000940 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_SEQ_CST);
941 return _Value;
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000942}
Warren Huntd6ffae92013-09-27 23:57:26 +0000943#endif
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000944#if defined(__arm__) || defined(__aarch64__)
945static __inline__ __int64 __DEFAULT_FN_ATTRS
946_InterlockedExchange64_acq(__int64 volatile *_Target, __int64 _Value) {
947 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
948 return _Value;
949}
950static __inline__ __int64 __DEFAULT_FN_ATTRS
951_InterlockedExchange64_nf(__int64 volatile *_Target, __int64 _Value) {
952 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
953 return _Value;
954}
955static __inline__ __int64 __DEFAULT_FN_ATTRS
956_InterlockedExchange64_rel(__int64 volatile *_Target, __int64 _Value) {
957 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
958 return _Value;
959}
960#endif
961/*----------------------------------------------------------------------------*\
962|* Interlocked Compare Exchange
963\*----------------------------------------------------------------------------*/
964#if defined(__arm__) || defined(__aarch64__)
965static __inline__ char __DEFAULT_FN_ATTRS
966_InterlockedCompareExchange8_acq(char volatile *_Destination,
967 char _Exchange, char _Comparand) {
968 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
969 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
970 return _Comparand;
971}
972static __inline__ char __DEFAULT_FN_ATTRS
973_InterlockedCompareExchange8_nf(char volatile *_Destination,
974 char _Exchange, char _Comparand) {
975 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
976 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
977 return _Comparand;
978}
979static __inline__ char __DEFAULT_FN_ATTRS
980_InterlockedCompareExchange8_rel(char volatile *_Destination,
981 char _Exchange, char _Comparand) {
982 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
983 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
984 return _Comparand;
985}
986static __inline__ short __DEFAULT_FN_ATTRS
987_InterlockedCompareExchange16_acq(short volatile *_Destination,
988 short _Exchange, short _Comparand) {
989 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
990 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
991 return _Comparand;
992}
993static __inline__ short __DEFAULT_FN_ATTRS
994_InterlockedCompareExchange16_nf(short volatile *_Destination,
995 short _Exchange, short _Comparand) {
996 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
997 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
998 return _Comparand;
999}
1000static __inline__ short __DEFAULT_FN_ATTRS
1001_InterlockedCompareExchange16_rel(short volatile *_Destination,
1002 short _Exchange, short _Comparand) {
1003 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
1004 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
1005 return _Comparand;
1006}
1007static __inline__ long __DEFAULT_FN_ATTRS
1008_InterlockedCompareExchange_acq(long volatile *_Destination,
1009 long _Exchange, long _Comparand) {
1010 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
1011 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
1012 return _Comparand;
1013}
1014static __inline__ long __DEFAULT_FN_ATTRS
1015_InterlockedCompareExchange_nf(long volatile *_Destination,
1016 long _Exchange, long _Comparand) {
1017 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
1018 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
1019 return _Comparand;
1020}
1021static __inline__ short __DEFAULT_FN_ATTRS
1022_InterlockedCompareExchange_rel(long volatile *_Destination,
1023 long _Exchange, long _Comparand) {
1024 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
1025 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
1026 return _Comparand;
1027}
1028static __inline__ __int64 __DEFAULT_FN_ATTRS
1029_InterlockedCompareExchange64_acq(__int64 volatile *_Destination,
1030 __int64 _Exchange, __int64 _Comparand) {
1031 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
1032 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
1033 return _Comparand;
1034}
1035static __inline__ __int64 __DEFAULT_FN_ATTRS
1036_InterlockedCompareExchange64_nf(__int64 volatile *_Destination,
1037 __int64 _Exchange, __int64 _Comparand) {
1038 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
1039 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
1040 return _Comparand;
1041}
1042static __inline__ __int64 __DEFAULT_FN_ATTRS
1043_InterlockedCompareExchange64_rel(__int64 volatile *_Destination,
1044 __int64 _Exchange, __int64 _Comparand) {
1045 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
1046 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
1047 return _Comparand;
1048}
1049#endif
Warren Huntd6ffae92013-09-27 23:57:26 +00001050/*----------------------------------------------------------------------------*\
Hans Wennborga4421e02014-03-12 21:09:05 +00001051|* readfs, readgs
1052|* (Pointers in address space #256 and #257 are relative to the GS and FS
1053|* segment registers, respectively.)
Hans Wennborg74ca0c42014-01-24 00:52:39 +00001054\*----------------------------------------------------------------------------*/
Hans Wennborg74ca0c42014-01-24 00:52:39 +00001055#define __ptr_to_addr_space(__addr_space_nbr, __type, __offset) \
1056 ((volatile __type __attribute__((__address_space__(__addr_space_nbr)))*) \
1057 (__offset))
1058
Hans Wennborga4421e02014-03-12 21:09:05 +00001059#ifdef __i386__
Michael Kupersteine45af542015-06-30 13:36:19 +00001060static __inline__ unsigned char __DEFAULT_FN_ATTRS
Hans Wennborg74ca0c42014-01-24 00:52:39 +00001061__readfsbyte(unsigned long __offset) {
1062 return *__ptr_to_addr_space(257, unsigned char, __offset);
1063}
Michael Kupersteine45af542015-06-30 13:36:19 +00001064static __inline__ unsigned short __DEFAULT_FN_ATTRS
Hans Wennborg74ca0c42014-01-24 00:52:39 +00001065__readfsword(unsigned long __offset) {
1066 return *__ptr_to_addr_space(257, unsigned short, __offset);
1067}
David Majnemerb2c57202016-05-27 02:06:14 +00001068static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
1069__readfsqword(unsigned long __offset) {
1070 return *__ptr_to_addr_space(257, unsigned __int64, __offset);
1071}
Hans Wennborg74ca0c42014-01-24 00:52:39 +00001072#endif
Hans Wennborga4421e02014-03-12 21:09:05 +00001073#ifdef __x86_64__
Michael Kupersteine45af542015-06-30 13:36:19 +00001074static __inline__ unsigned char __DEFAULT_FN_ATTRS
Hans Wennborga4421e02014-03-12 21:09:05 +00001075__readgsbyte(unsigned long __offset) {
1076 return *__ptr_to_addr_space(256, unsigned char, __offset);
1077}
David Majnemerb2c57202016-05-27 02:06:14 +00001078static __inline__ unsigned short __DEFAULT_FN_ATTRS
1079__readgsword(unsigned long __offset) {
1080 return *__ptr_to_addr_space(256, unsigned short, __offset);
1081}
Michael Kupersteine45af542015-06-30 13:36:19 +00001082static __inline__ unsigned long __DEFAULT_FN_ATTRS
Hans Wennborga4421e02014-03-12 21:09:05 +00001083__readgsdword(unsigned long __offset) {
1084 return *__ptr_to_addr_space(256, unsigned long, __offset);
1085}
Michael Kupersteine45af542015-06-30 13:36:19 +00001086static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
Hans Wennborga4421e02014-03-12 21:09:05 +00001087__readgsqword(unsigned long __offset) {
1088 return *__ptr_to_addr_space(256, unsigned __int64, __offset);
1089}
Hans Wennborga4421e02014-03-12 21:09:05 +00001090#endif
1091#undef __ptr_to_addr_space
Hans Wennborg74ca0c42014-01-24 00:52:39 +00001092/*----------------------------------------------------------------------------*\
Hans Wennborgd9be72e2014-03-12 22:00:32 +00001093|* movs, stos
1094\*----------------------------------------------------------------------------*/
Saleem Abdulrasool702eefe2014-06-25 16:48:40 +00001095#if defined(__i386__) || defined(__x86_64__)
Michael Kupersteine45af542015-06-30 13:36:19 +00001096static __inline__ void __DEFAULT_FN_ATTRS
Hans Wennborgd9be72e2014-03-12 22:00:32 +00001097__movsb(unsigned char *__dst, unsigned char const *__src, size_t __n) {
1098 __asm__("rep movsb" : : "D"(__dst), "S"(__src), "c"(__n)
1099 : "%edi", "%esi", "%ecx");
1100}
Michael Kupersteine45af542015-06-30 13:36:19 +00001101static __inline__ void __DEFAULT_FN_ATTRS
Hans Wennborgd9be72e2014-03-12 22:00:32 +00001102__movsd(unsigned long *__dst, unsigned long const *__src, size_t __n) {
1103 __asm__("rep movsl" : : "D"(__dst), "S"(__src), "c"(__n)
1104 : "%edi", "%esi", "%ecx");
1105}
Michael Kupersteine45af542015-06-30 13:36:19 +00001106static __inline__ void __DEFAULT_FN_ATTRS
Hans Wennborgd9be72e2014-03-12 22:00:32 +00001107__movsw(unsigned short *__dst, unsigned short const *__src, size_t __n) {
Nico Weber1f22a342015-09-22 00:46:21 +00001108 __asm__("rep movsw" : : "D"(__dst), "S"(__src), "c"(__n)
Hans Wennborgd9be72e2014-03-12 22:00:32 +00001109 : "%edi", "%esi", "%ecx");
1110}
Michael Kupersteine45af542015-06-30 13:36:19 +00001111static __inline__ void __DEFAULT_FN_ATTRS
Hans Wennborgd9be72e2014-03-12 22:00:32 +00001112__stosb(unsigned char *__dst, unsigned char __x, size_t __n) {
1113 __asm__("rep stosb" : : "D"(__dst), "a"(__x), "c"(__n)
1114 : "%edi", "%ecx");
1115}
Michael Kupersteine45af542015-06-30 13:36:19 +00001116static __inline__ void __DEFAULT_FN_ATTRS
Hans Wennborgd9be72e2014-03-12 22:00:32 +00001117__stosd(unsigned long *__dst, unsigned long __x, size_t __n) {
1118 __asm__("rep stosl" : : "D"(__dst), "a"(__x), "c"(__n)
1119 : "%edi", "%ecx");
1120}
Michael Kupersteine45af542015-06-30 13:36:19 +00001121static __inline__ void __DEFAULT_FN_ATTRS
Hans Wennborgd9be72e2014-03-12 22:00:32 +00001122__stosw(unsigned short *__dst, unsigned short __x, size_t __n) {
Nico Weber1f22a342015-09-22 00:46:21 +00001123 __asm__("rep stosw" : : "D"(__dst), "a"(__x), "c"(__n)
Hans Wennborgd9be72e2014-03-12 22:00:32 +00001124 : "%edi", "%ecx");
1125}
Saleem Abdulrasool702eefe2014-06-25 16:48:40 +00001126#endif
Hans Wennborgd9be72e2014-03-12 22:00:32 +00001127#ifdef __x86_64__
Michael Kupersteine45af542015-06-30 13:36:19 +00001128static __inline__ void __DEFAULT_FN_ATTRS
Hans Wennborgd9be72e2014-03-12 22:00:32 +00001129__movsq(unsigned long long *__dst, unsigned long long const *__src, size_t __n) {
1130 __asm__("rep movsq" : : "D"(__dst), "S"(__src), "c"(__n)
1131 : "%edi", "%esi", "%ecx");
1132}
Michael Kupersteine45af542015-06-30 13:36:19 +00001133static __inline__ void __DEFAULT_FN_ATTRS
Hans Wennborgd9be72e2014-03-12 22:00:32 +00001134__stosq(unsigned __int64 *__dst, unsigned __int64 __x, size_t __n) {
1135 __asm__("rep stosq" : : "D"(__dst), "a"(__x), "c"(__n)
1136 : "%edi", "%ecx");
1137}
1138#endif
1139
1140/*----------------------------------------------------------------------------*\
Warren Huntd6ffae92013-09-27 23:57:26 +00001141|* Misc
1142\*----------------------------------------------------------------------------*/
Michael Kupersteine45af542015-06-30 13:36:19 +00001143static __inline__ void * __DEFAULT_FN_ATTRS
Warren Huntd6ffae92013-09-27 23:57:26 +00001144_AddressOfReturnAddress(void) {
1145 return (void*)((char*)__builtin_frame_address(0) + sizeof(void*));
1146}
Michael Kupersteine45af542015-06-30 13:36:19 +00001147static __inline__ void * __DEFAULT_FN_ATTRS
Warren Huntd6ffae92013-09-27 23:57:26 +00001148_ReturnAddress(void) {
1149 return __builtin_return_address(0);
1150}
Saleem Abdulrasool702eefe2014-06-25 16:48:40 +00001151#if defined(__i386__) || defined(__x86_64__)
Michael Kupersteine45af542015-06-30 13:36:19 +00001152static __inline__ void __DEFAULT_FN_ATTRS
Hans Wennborg854f7d32014-01-16 23:39:35 +00001153__cpuid(int __info[4], int __level) {
Hans Wennborg854f7d32014-01-16 23:39:35 +00001154 __asm__ ("cpuid" : "=a"(__info[0]), "=b" (__info[1]), "=c"(__info[2]), "=d"(__info[3])
Hans Wennborg12fb89e2014-01-31 19:44:55 +00001155 : "a"(__level));
1156}
Michael Kupersteine45af542015-06-30 13:36:19 +00001157static __inline__ void __DEFAULT_FN_ATTRS
Hans Wennborg12fb89e2014-01-31 19:44:55 +00001158__cpuidex(int __info[4], int __level, int __ecx) {
1159 __asm__ ("cpuid" : "=a"(__info[0]), "=b" (__info[1]), "=c"(__info[2]), "=d"(__info[3])
1160 : "a"(__level), "c"(__ecx));
Hans Wennborg854f7d32014-01-16 23:39:35 +00001161}
Reid Kleckner66e77172016-08-16 16:04:14 +00001162static __inline__ unsigned __int64 __cdecl __DEFAULT_FN_ATTRS
1163_xgetbv(unsigned int __xcr_no) {
1164 unsigned int __eax, __edx;
1165 __asm__ ("xgetbv" : "=a" (__eax), "=d" (__edx) : "c" (__xcr_no));
1166 return ((unsigned __int64)__edx << 32) | __eax;
1167}
Michael Kupersteine45af542015-06-30 13:36:19 +00001168static __inline__ void __DEFAULT_FN_ATTRS
Hans Wennborg740a4d62014-01-28 22:55:01 +00001169__halt(void) {
1170 __asm__ volatile ("hlt");
1171}
Reid Kleckner5de2bcd2016-09-07 16:55:12 +00001172static __inline__ void __DEFAULT_FN_ATTRS
1173__nop(void) {
1174 __asm__ volatile ("nop");
1175}
Saleem Abdulrasool702eefe2014-06-25 16:48:40 +00001176#endif
Warren Huntd6ffae92013-09-27 23:57:26 +00001177
Reid Kleckner592dc612014-04-08 00:28:22 +00001178/*----------------------------------------------------------------------------*\
1179|* Privileged intrinsics
1180\*----------------------------------------------------------------------------*/
Saleem Abdulrasool702eefe2014-06-25 16:48:40 +00001181#if defined(__i386__) || defined(__x86_64__)
Michael Kupersteine45af542015-06-30 13:36:19 +00001182static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
Reid Kleckner592dc612014-04-08 00:28:22 +00001183__readmsr(unsigned long __register) {
1184 // Loads the contents of a 64-bit model specific register (MSR) specified in
1185 // the ECX register into registers EDX:EAX. The EDX register is loaded with
1186 // the high-order 32 bits of the MSR and the EAX register is loaded with the
1187 // low-order 32 bits. If less than 64 bits are implemented in the MSR being
1188 // read, the values returned to EDX:EAX in unimplemented bit locations are
1189 // undefined.
1190 unsigned long __edx;
1191 unsigned long __eax;
Reid Kleckner6df52542014-04-08 17:49:16 +00001192 __asm__ ("rdmsr" : "=d"(__edx), "=a"(__eax) : "c"(__register));
Reid Kleckner592dc612014-04-08 00:28:22 +00001193 return (((unsigned __int64)__edx) << 32) | (unsigned __int64)__eax;
1194}
1195
Michael Kupersteine45af542015-06-30 13:36:19 +00001196static __inline__ unsigned long __DEFAULT_FN_ATTRS
Reid Kleckner592dc612014-04-08 00:28:22 +00001197__readcr3(void) {
Reid Kleckner6df52542014-04-08 17:49:16 +00001198 unsigned long __cr3_val;
1199 __asm__ __volatile__ ("mov %%cr3, %0" : "=q"(__cr3_val) : : "memory");
1200 return __cr3_val;
Reid Kleckner592dc612014-04-08 00:28:22 +00001201}
1202
Michael Kupersteine45af542015-06-30 13:36:19 +00001203static __inline__ void __DEFAULT_FN_ATTRS
Reid Kleckner6df52542014-04-08 17:49:16 +00001204__writecr3(unsigned int __cr3_val) {
1205 __asm__ ("mov %0, %%cr3" : : "q"(__cr3_val) : "memory");
Reid Kleckner592dc612014-04-08 00:28:22 +00001206}
Saleem Abdulrasool702eefe2014-06-25 16:48:40 +00001207#endif
Reid Kleckner592dc612014-04-08 00:28:22 +00001208
Eric Christophercc872532013-09-18 22:40:18 +00001209#ifdef __cplusplus
Eric Christopher0db88a72013-09-18 22:24:01 +00001210}
Eric Christopherd1428bf2013-08-31 00:22:48 +00001211#endif
Reid Klecknerf0e23222013-09-19 00:19:53 +00001212
Michael Kupersteine45af542015-06-30 13:36:19 +00001213#undef __DEFAULT_FN_ATTRS
Eric Christopher5a9bec12015-06-15 23:20:35 +00001214
Reid Klecknerf0e23222013-09-19 00:19:53 +00001215#endif /* __INTRIN_H */
1216#endif /* _MSC_VER */