blob: b30aa215a452e43c0c1b74e348c935fa75495eac [file] [log] [blame]
Hans Wennborgf8b91f82016-06-14 20:14:24 +00001/* ===-------- intrin.h ---------------------------------------------------===
Eric Christopherd1428bf2013-08-31 00:22:48 +00002 *
3 * Permission is hereby granted, free of charge, to any person obtaining a copy
4 * of this software and associated documentation files (the "Software"), to deal
5 * in the Software without restriction, including without limitation the rights
6 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 * copies of the Software, and to permit persons to whom the Software is
8 * furnished to do so, subject to the following conditions:
9 *
10 * The above copyright notice and this permission notice shall be included in
11 * all copies or substantial portions of the Software.
12 *
13 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19 * THE SOFTWARE.
20 *
21 *===-----------------------------------------------------------------------===
22 */
23
24/* Only include this if we're compiling for the windows platform. */
25#ifndef _MSC_VER
Hans Wennborgf8b91f82016-06-14 20:14:24 +000026#include_next <intrin.h>
Eric Christopherd1428bf2013-08-31 00:22:48 +000027#else
28
29#ifndef __INTRIN_H
30#define __INTRIN_H
31
32/* First include the standard intrinsics. */
Saleem Abdulrasool702eefe2014-06-25 16:48:40 +000033#if defined(__i386__) || defined(__x86_64__)
Eric Christopherd1428bf2013-08-31 00:22:48 +000034#include <x86intrin.h>
Saleem Abdulrasool702eefe2014-06-25 16:48:40 +000035#endif
Eric Christopherd1428bf2013-08-31 00:22:48 +000036
Saleem Abdulrasoolafdef202016-08-06 17:58:24 +000037#if defined(__arm__)
38#include <armintr.h>
39#endif
40
Mandeep Singh Grang79249e12017-07-26 05:29:40 +000041#if defined(_M_ARM64)
42#include <arm64intr.h>
43#endif
44
Hans Wennborg1fd6dd32014-01-28 23:01:59 +000045/* For the definition of jmp_buf. */
Nico Webera62cffa2014-07-08 18:34:46 +000046#if __STDC_HOSTED__
Hans Wennborg1fd6dd32014-01-28 23:01:59 +000047#include <setjmp.h>
Nico Webera62cffa2014-07-08 18:34:46 +000048#endif
Hans Wennborg1fd6dd32014-01-28 23:01:59 +000049
Eric Christopher5a9bec12015-06-15 23:20:35 +000050/* Define the default attributes for the functions in this file. */
Michael Kupersteine45af542015-06-30 13:36:19 +000051#define __DEFAULT_FN_ATTRS __attribute__((__always_inline__, __nodebug__))
Eric Christopher5a9bec12015-06-15 23:20:35 +000052
Eric Christopher0db88a72013-09-18 22:24:01 +000053#ifdef __cplusplus
54extern "C" {
55#endif
56
Saleem Abdulrasoolc4ebb122014-07-08 05:46:04 +000057#if defined(__MMX__)
Eric Christopherd1428bf2013-08-31 00:22:48 +000058/* And the random ones that aren't in those files. */
59__m64 _m_from_float(float);
Eric Christopherd1428bf2013-08-31 00:22:48 +000060float _m_to_float(__m64);
Saleem Abdulrasoolc4ebb122014-07-08 05:46:04 +000061#endif
Eric Christopherd1428bf2013-08-31 00:22:48 +000062
63/* Other assorted instruction intrinsics. */
64void __addfsbyte(unsigned long, unsigned char);
65void __addfsdword(unsigned long, unsigned long);
66void __addfsword(unsigned long, unsigned short);
67void __code_seg(const char *);
Hans Wennborg854f7d32014-01-16 23:39:35 +000068static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +000069void __cpuid(int[4], int);
Hans Wennborg12fb89e2014-01-31 19:44:55 +000070static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +000071void __cpuidex(int[4], int, int);
Albert Gutowski7216f172016-10-10 18:09:27 +000072static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +000073__int64 __emul(int, int);
Albert Gutowski7216f172016-10-10 18:09:27 +000074static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +000075unsigned __int64 __emulu(unsigned int, unsigned int);
Eric Christopherd1428bf2013-08-31 00:22:48 +000076unsigned int __getcallerseflags(void);
Hans Wennborg740a4d62014-01-28 22:55:01 +000077static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +000078void __halt(void);
79unsigned char __inbyte(unsigned short);
80void __inbytestring(unsigned short, unsigned char *, unsigned long);
81void __incfsbyte(unsigned long);
82void __incfsdword(unsigned long);
83void __incfsword(unsigned long);
84unsigned long __indword(unsigned short);
85void __indwordstring(unsigned short, unsigned long *, unsigned long);
Eric Christopherd1428bf2013-08-31 00:22:48 +000086void __invlpg(void *);
87unsigned short __inword(unsigned short);
88void __inwordstring(unsigned short, unsigned short *, unsigned long);
89void __lidt(void *);
90unsigned __int64 __ll_lshift(unsigned __int64, int);
91__int64 __ll_rshift(__int64, int);
Eric Christopherd1428bf2013-08-31 00:22:48 +000092unsigned int __lzcnt(unsigned int);
93unsigned short __lzcnt16(unsigned short);
Hans Wennborgd9be72e2014-03-12 22:00:32 +000094static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +000095void __movsb(unsigned char *, unsigned char const *, size_t);
Hans Wennborgd9be72e2014-03-12 22:00:32 +000096static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +000097void __movsd(unsigned long *, unsigned long const *, size_t);
Hans Wennborgd9be72e2014-03-12 22:00:32 +000098static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +000099void __movsw(unsigned short *, unsigned short const *, size_t);
Reid Kleckner5de2bcd2016-09-07 16:55:12 +0000100static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000101void __nop(void);
102void __nvreg_restore_fence(void);
103void __nvreg_save_fence(void);
104void __outbyte(unsigned short, unsigned char);
105void __outbytestring(unsigned short, unsigned char *, unsigned long);
106void __outdword(unsigned short, unsigned long);
107void __outdwordstring(unsigned short, unsigned long *, unsigned long);
108void __outword(unsigned short, unsigned short);
109void __outwordstring(unsigned short, unsigned short *, unsigned long);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000110unsigned long __readcr0(void);
111unsigned long __readcr2(void);
Reid Kleckner592dc612014-04-08 00:28:22 +0000112static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000113unsigned long __readcr3(void);
Eric Christopher439137e2014-01-24 12:13:47 +0000114unsigned long __readcr4(void);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000115unsigned long __readcr8(void);
116unsigned int __readdr(unsigned int);
Hans Wennborg74ca0c42014-01-24 00:52:39 +0000117#ifdef __i386__
118static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000119unsigned char __readfsbyte(unsigned long);
Hans Wennborg74ca0c42014-01-24 00:52:39 +0000120static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000121unsigned __int64 __readfsqword(unsigned long);
Hans Wennborg74ca0c42014-01-24 00:52:39 +0000122static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000123unsigned short __readfsword(unsigned long);
Hans Wennborg74ca0c42014-01-24 00:52:39 +0000124#endif
Reid Kleckner592dc612014-04-08 00:28:22 +0000125static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000126unsigned __int64 __readmsr(unsigned long);
127unsigned __int64 __readpmc(unsigned long);
128unsigned long __segmentlimit(unsigned long);
129void __sidt(void *);
Hans Wennborgd9be72e2014-03-12 22:00:32 +0000130static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000131void __stosb(unsigned char *, unsigned char, size_t);
Hans Wennborgd9be72e2014-03-12 22:00:32 +0000132static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000133void __stosd(unsigned long *, unsigned long, size_t);
Hans Wennborgd9be72e2014-03-12 22:00:32 +0000134static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000135void __stosw(unsigned short *, unsigned short, size_t);
136void __svm_clgi(void);
137void __svm_invlpga(void *, int);
138void __svm_skinit(int);
139void __svm_stgi(void);
140void __svm_vmload(size_t);
141void __svm_vmrun(size_t);
142void __svm_vmsave(size_t);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000143unsigned __int64 __ull_rshift(unsigned __int64, int);
144void __vmx_off(void);
145void __vmx_vmptrst(unsigned __int64 *);
146void __wbinvd(void);
147void __writecr0(unsigned int);
Reid Kleckner592dc612014-04-08 00:28:22 +0000148static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000149void __writecr3(unsigned int);
150void __writecr4(unsigned int);
151void __writecr8(unsigned int);
152void __writedr(unsigned int, unsigned int);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000153void __writefsbyte(unsigned long, unsigned char);
154void __writefsdword(unsigned long, unsigned long);
155void __writefsqword(unsigned long, unsigned __int64);
156void __writefsword(unsigned long, unsigned short);
157void __writemsr(unsigned long, unsigned __int64);
Warren Huntd6ffae92013-09-27 23:57:26 +0000158static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000159void *_AddressOfReturnAddress(void);
Warren Huntd6ffae92013-09-27 23:57:26 +0000160static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000161unsigned char _BitScanForward(unsigned long *_Index, unsigned long _Mask);
Warren Huntd6ffae92013-09-27 23:57:26 +0000162static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000163unsigned char _BitScanReverse(unsigned long *_Index, unsigned long _Mask);
Warren Huntd6ffae92013-09-27 23:57:26 +0000164static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000165unsigned char _bittest(long const *, long);
Warren Huntd6ffae92013-09-27 23:57:26 +0000166static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000167unsigned char _bittestandcomplement(long *, long);
Warren Huntd6ffae92013-09-27 23:57:26 +0000168static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000169unsigned char _bittestandreset(long *, long);
Warren Huntd6ffae92013-09-27 23:57:26 +0000170static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000171unsigned char _bittestandset(long *, long);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000172void __cdecl _disable(void);
173void __cdecl _enable(void);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000174long _InterlockedAddLargeStatistic(__int64 volatile *_Addend, long _Value);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000175unsigned char _interlockedbittestandreset(long volatile *, long);
176unsigned char _interlockedbittestandset(long volatile *, long);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000177long _InterlockedCompareExchange_HLEAcquire(long volatile *, long, long);
178long _InterlockedCompareExchange_HLERelease(long volatile *, long, long);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000179__int64 _InterlockedcompareExchange64_HLEAcquire(__int64 volatile *, __int64,
180 __int64);
181__int64 _InterlockedCompareExchange64_HLERelease(__int64 volatile *, __int64,
182 __int64);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000183void *_InterlockedCompareExchangePointer_HLEAcquire(void *volatile *, void *,
184 void *);
185void *_InterlockedCompareExchangePointer_HLERelease(void *volatile *, void *,
186 void *);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000187long _InterlockedExchangeAdd_HLEAcquire(long volatile *, long);
188long _InterlockedExchangeAdd_HLERelease(long volatile *, long);
Eric Christopher439137e2014-01-24 12:13:47 +0000189__int64 _InterlockedExchangeAdd64_HLEAcquire(__int64 volatile *, __int64);
190__int64 _InterlockedExchangeAdd64_HLERelease(__int64 volatile *, __int64);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000191void __cdecl _invpcid(unsigned int, void *);
Albert Gutowskifcea61c2016-10-10 19:40:51 +0000192static __inline__ void
193__attribute__((__deprecated__("use other intrinsics or C++11 atomics instead")))
194_ReadBarrier(void);
195static __inline__ void
196__attribute__((__deprecated__("use other intrinsics or C++11 atomics instead")))
197_ReadWriteBarrier(void);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000198unsigned int _rorx_u32(unsigned int, const unsigned int);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000199int _sarx_i32(int, unsigned int);
Nico Webera62cffa2014-07-08 18:34:46 +0000200#if __STDC_HOSTED__
Hans Wennborg1fd6dd32014-01-28 23:01:59 +0000201int __cdecl _setjmp(jmp_buf);
Nico Webera62cffa2014-07-08 18:34:46 +0000202#endif
Eric Christopherd1428bf2013-08-31 00:22:48 +0000203unsigned int _shlx_u32(unsigned int, unsigned int);
204unsigned int _shrx_u32(unsigned int, unsigned int);
205void _Store_HLERelease(long volatile *, long);
206void _Store64_HLERelease(__int64 volatile *, __int64);
207void _StorePointer_HLERelease(void *volatile *, void *);
Albert Gutowskifcea61c2016-10-10 19:40:51 +0000208static __inline__ void
209__attribute__((__deprecated__("use other intrinsics or C++11 atomics instead")))
210_WriteBarrier(void);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000211unsigned __int32 xbegin(void);
212void _xend(void);
Reid Kleckner66e77172016-08-16 16:04:14 +0000213static __inline__
214#define _XCR_XFEATURE_ENABLED_MASK 0
215unsigned __int64 __cdecl _xgetbv(unsigned int);
216void __cdecl _xsetbv(unsigned int, unsigned __int64);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000217
218/* These additional intrinsics are turned on in x64/amd64/x86_64 mode. */
Warren Hunt3f987942013-09-30 21:08:05 +0000219#ifdef __x86_64__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000220void __addgsbyte(unsigned long, unsigned char);
221void __addgsdword(unsigned long, unsigned long);
222void __addgsqword(unsigned long, unsigned __int64);
223void __addgsword(unsigned long, unsigned short);
Reid Klecknerf08d6582014-01-27 19:16:35 +0000224static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000225void __faststorefence(void);
226void __incgsbyte(unsigned long);
227void __incgsdword(unsigned long);
228void __incgsqword(unsigned long);
229void __incgsword(unsigned long);
Eric Christopher439137e2014-01-24 12:13:47 +0000230unsigned __int64 __lzcnt64(unsigned __int64);
Hans Wennborgd9be72e2014-03-12 22:00:32 +0000231static __inline__
Eric Christopher439137e2014-01-24 12:13:47 +0000232void __movsq(unsigned long long *, unsigned long long const *, size_t);
Albert Gutowskif3a0bce2016-10-04 22:29:49 +0000233static __inline__
Eric Christopher439137e2014-01-24 12:13:47 +0000234unsigned char __readgsbyte(unsigned long);
Hans Wennborga4421e02014-03-12 21:09:05 +0000235static __inline__
Eric Christopher439137e2014-01-24 12:13:47 +0000236unsigned long __readgsdword(unsigned long);
Hans Wennborga4421e02014-03-12 21:09:05 +0000237static __inline__
238unsigned __int64 __readgsqword(unsigned long);
Eric Christopher439137e2014-01-24 12:13:47 +0000239unsigned short __readgsword(unsigned long);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000240unsigned __int64 __shiftleft128(unsigned __int64 _LowPart,
241 unsigned __int64 _HighPart,
242 unsigned char _Shift);
243unsigned __int64 __shiftright128(unsigned __int64 _LowPart,
244 unsigned __int64 _HighPart,
245 unsigned char _Shift);
Hans Wennborgd9be72e2014-03-12 22:00:32 +0000246static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000247void __stosq(unsigned __int64 *, unsigned __int64, size_t);
Eric Christopher439137e2014-01-24 12:13:47 +0000248unsigned char __vmx_on(unsigned __int64 *);
249unsigned char __vmx_vmclear(unsigned __int64 *);
250unsigned char __vmx_vmlaunch(void);
251unsigned char __vmx_vmptrld(unsigned __int64 *);
252unsigned char __vmx_vmread(size_t, size_t *);
253unsigned char __vmx_vmresume(void);
254unsigned char __vmx_vmwrite(size_t, size_t);
255void __writegsbyte(unsigned long, unsigned char);
256void __writegsdword(unsigned long, unsigned long);
257void __writegsqword(unsigned long, unsigned __int64);
258void __writegsword(unsigned long, unsigned short);
Warren Huntd6ffae92013-09-27 23:57:26 +0000259static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000260unsigned char _BitScanForward64(unsigned long *_Index, unsigned __int64 _Mask);
Warren Huntd6ffae92013-09-27 23:57:26 +0000261static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000262unsigned char _BitScanReverse64(unsigned long *_Index, unsigned __int64 _Mask);
Warren Huntd6ffae92013-09-27 23:57:26 +0000263static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000264unsigned char _bittest64(__int64 const *, __int64);
Warren Huntd6ffae92013-09-27 23:57:26 +0000265static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000266unsigned char _bittestandcomplement64(__int64 *, __int64);
Warren Huntd6ffae92013-09-27 23:57:26 +0000267static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000268unsigned char _bittestandreset64(__int64 *, __int64);
Warren Huntd6ffae92013-09-27 23:57:26 +0000269static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000270unsigned char _bittestandset64(__int64 *, __int64);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000271long _InterlockedAnd_np(long volatile *_Value, long _Mask);
272short _InterlockedAnd16_np(short volatile *_Value, short _Mask);
273__int64 _InterlockedAnd64_np(__int64 volatile *_Value, __int64 _Mask);
274char _InterlockedAnd8_np(char volatile *_Value, char _Mask);
275unsigned char _interlockedbittestandreset64(__int64 volatile *, __int64);
Hans Wennborga3169332014-03-13 17:05:09 +0000276static __inline__
Eric Christopherd1428bf2013-08-31 00:22:48 +0000277unsigned char _interlockedbittestandset64(__int64 volatile *, __int64);
278long _InterlockedCompareExchange_np(long volatile *_Destination, long _Exchange,
279 long _Comparand);
280unsigned char _InterlockedCompareExchange128(__int64 volatile *_Destination,
281 __int64 _ExchangeHigh,
282 __int64 _ExchangeLow,
283 __int64 *_CompareandResult);
284unsigned char _InterlockedCompareExchange128_np(__int64 volatile *_Destination,
285 __int64 _ExchangeHigh,
286 __int64 _ExchangeLow,
287 __int64 *_ComparandResult);
288short _InterlockedCompareExchange16_np(short volatile *_Destination,
289 short _Exchange, short _Comparand);
Eric Christopher439137e2014-01-24 12:13:47 +0000290__int64 _InterlockedCompareExchange64_HLEAcquire(__int64 volatile *, __int64,
291 __int64);
292__int64 _InterlockedCompareExchange64_HLERelease(__int64 volatile *, __int64,
293 __int64);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000294__int64 _InterlockedCompareExchange64_np(__int64 volatile *_Destination,
295 __int64 _Exchange, __int64 _Comparand);
296void *_InterlockedCompareExchangePointer_np(void *volatile *_Destination,
297 void *_Exchange, void *_Comparand);
298long _InterlockedOr_np(long volatile *_Value, long _Mask);
299short _InterlockedOr16_np(short volatile *_Value, short _Mask);
Warren Hunt41a993f2013-09-28 00:15:41 +0000300__int64 _InterlockedOr64_np(__int64 volatile *_Value, __int64 _Mask);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000301char _InterlockedOr8_np(char volatile *_Value, char _Mask);
302long _InterlockedXor_np(long volatile *_Value, long _Mask);
303short _InterlockedXor16_np(short volatile *_Value, short _Mask);
304__int64 _InterlockedXor64_np(__int64 volatile *_Value, __int64 _Mask);
305char _InterlockedXor8_np(char volatile *_Value, char _Mask);
Eric Christopherd1428bf2013-08-31 00:22:48 +0000306unsigned __int64 _rorx_u64(unsigned __int64, const unsigned int);
Eric Christopher439137e2014-01-24 12:13:47 +0000307__int64 _sarx_i64(__int64, unsigned int);
Eric Christopher439137e2014-01-24 12:13:47 +0000308unsigned __int64 _shlx_u64(unsigned __int64, unsigned int);
David Majnemer54507632014-12-02 23:30:26 +0000309unsigned __int64 _shrx_u64(unsigned __int64, unsigned int);
Albert Gutowski7216f172016-10-10 18:09:27 +0000310static __inline__
311__int64 __mulh(__int64, __int64);
Albert Gutowskif3a0bce2016-10-04 22:29:49 +0000312static __inline__
313unsigned __int64 __umulh(unsigned __int64, unsigned __int64);
Albert Gutowski7216f172016-10-10 18:09:27 +0000314static __inline__
315__int64 _mul128(__int64, __int64, __int64*);
316static __inline__
317unsigned __int64 _umul128(unsigned __int64,
318 unsigned __int64,
319 unsigned __int64*);
Eric Christopher0db88a72013-09-18 22:24:01 +0000320
Warren Hunt3f987942013-09-30 21:08:05 +0000321#endif /* __x86_64__ */
Reid Klecknerf0e23222013-09-19 00:19:53 +0000322
Albert Gutowski5e08df02016-10-13 22:35:07 +0000323#if defined(__x86_64__) || defined(__arm__)
324
325static __inline__
326__int64 _InterlockedDecrement64(__int64 volatile *_Addend);
327static __inline__
328__int64 _InterlockedExchange64(__int64 volatile *_Target, __int64 _Value);
329static __inline__
330__int64 _InterlockedExchangeAdd64(__int64 volatile *_Addend, __int64 _Value);
331static __inline__
332__int64 _InterlockedExchangeSub64(__int64 volatile *_Subend, __int64 _Value);
333static __inline__
334__int64 _InterlockedIncrement64(__int64 volatile *_Addend);
335static __inline__
336__int64 _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask);
337static __inline__
338__int64 _InterlockedXor64(__int64 volatile *_Value, __int64 _Mask);
339static __inline__
340__int64 _InterlockedAnd64(__int64 volatile *_Value, __int64 _Mask);
341
342#endif
343
Warren Huntd6ffae92013-09-27 23:57:26 +0000344/*----------------------------------------------------------------------------*\
Warren Huntd6ffae92013-09-27 23:57:26 +0000345|* Bit Counting and Testing
346\*----------------------------------------------------------------------------*/
Michael Kupersteine45af542015-06-30 13:36:19 +0000347static __inline__ unsigned char __DEFAULT_FN_ATTRS
David Majnemer56e46672015-07-13 23:38:56 +0000348_bittest(long const *_BitBase, long _BitPos) {
349 return (*_BitBase >> _BitPos) & 1;
Warren Huntd6ffae92013-09-27 23:57:26 +0000350}
Michael Kupersteine45af542015-06-30 13:36:19 +0000351static __inline__ unsigned char __DEFAULT_FN_ATTRS
David Majnemer56e46672015-07-13 23:38:56 +0000352_bittestandcomplement(long *_BitBase, long _BitPos) {
353 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
354 *_BitBase = *_BitBase ^ (1 << _BitPos);
355 return _Res;
Warren Huntd6ffae92013-09-27 23:57:26 +0000356}
Michael Kupersteine45af542015-06-30 13:36:19 +0000357static __inline__ unsigned char __DEFAULT_FN_ATTRS
David Majnemer56e46672015-07-13 23:38:56 +0000358_bittestandreset(long *_BitBase, long _BitPos) {
359 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
360 *_BitBase = *_BitBase & ~(1 << _BitPos);
361 return _Res;
Warren Huntd6ffae92013-09-27 23:57:26 +0000362}
Michael Kupersteine45af542015-06-30 13:36:19 +0000363static __inline__ unsigned char __DEFAULT_FN_ATTRS
David Majnemer56e46672015-07-13 23:38:56 +0000364_bittestandset(long *_BitBase, long _BitPos) {
365 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
366 *_BitBase = *_BitBase | (1 << _BitPos);
367 return _Res;
Warren Huntd6ffae92013-09-27 23:57:26 +0000368}
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000369#if defined(__arm__) || defined(__aarch64__)
370static __inline__ unsigned char __DEFAULT_FN_ATTRS
371_interlockedbittestandset_acq(long volatile *_BitBase, long _BitPos) {
372 long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_ACQUIRE);
373 return (_PrevVal >> _BitPos) & 1;
374}
375static __inline__ unsigned char __DEFAULT_FN_ATTRS
376_interlockedbittestandset_nf(long volatile *_BitBase, long _BitPos) {
377 long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_RELAXED);
378 return (_PrevVal >> _BitPos) & 1;
379}
380static __inline__ unsigned char __DEFAULT_FN_ATTRS
381_interlockedbittestandset_rel(long volatile *_BitBase, long _BitPos) {
382 long _PrevVal = __atomic_fetch_or(_BitBase, 1l << _BitPos, __ATOMIC_RELEASE);
383 return (_PrevVal >> _BitPos) & 1;
384}
385#endif
Warren Hunt3f987942013-09-30 21:08:05 +0000386#ifdef __x86_64__
Michael Kupersteine45af542015-06-30 13:36:19 +0000387static __inline__ unsigned char __DEFAULT_FN_ATTRS
David Majnemer56e46672015-07-13 23:38:56 +0000388_bittest64(__int64 const *_BitBase, __int64 _BitPos) {
389 return (*_BitBase >> _BitPos) & 1;
Warren Huntd6ffae92013-09-27 23:57:26 +0000390}
Michael Kupersteine45af542015-06-30 13:36:19 +0000391static __inline__ unsigned char __DEFAULT_FN_ATTRS
David Majnemer56e46672015-07-13 23:38:56 +0000392_bittestandcomplement64(__int64 *_BitBase, __int64 _BitPos) {
393 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
394 *_BitBase = *_BitBase ^ (1ll << _BitPos);
395 return _Res;
Warren Huntd6ffae92013-09-27 23:57:26 +0000396}
Michael Kupersteine45af542015-06-30 13:36:19 +0000397static __inline__ unsigned char __DEFAULT_FN_ATTRS
David Majnemer56e46672015-07-13 23:38:56 +0000398_bittestandreset64(__int64 *_BitBase, __int64 _BitPos) {
399 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
400 *_BitBase = *_BitBase & ~(1ll << _BitPos);
401 return _Res;
Warren Huntd6ffae92013-09-27 23:57:26 +0000402}
Michael Kupersteine45af542015-06-30 13:36:19 +0000403static __inline__ unsigned char __DEFAULT_FN_ATTRS
David Majnemer56e46672015-07-13 23:38:56 +0000404_bittestandset64(__int64 *_BitBase, __int64 _BitPos) {
405 unsigned char _Res = (*_BitBase >> _BitPos) & 1;
406 *_BitBase = *_BitBase | (1ll << _BitPos);
407 return _Res;
Warren Huntd6ffae92013-09-27 23:57:26 +0000408}
Michael Kupersteine45af542015-06-30 13:36:19 +0000409static __inline__ unsigned char __DEFAULT_FN_ATTRS
David Majnemer56e46672015-07-13 23:38:56 +0000410_interlockedbittestandset64(__int64 volatile *_BitBase, __int64 _BitPos) {
David Majnemere0b863f2015-07-13 23:39:37 +0000411 long long _PrevVal =
412 __atomic_fetch_or(_BitBase, 1ll << _BitPos, __ATOMIC_SEQ_CST);
413 return (_PrevVal >> _BitPos) & 1;
Hans Wennborga3169332014-03-13 17:05:09 +0000414}
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000415#endif
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000416/*----------------------------------------------------------------------------*\
417|* Interlocked Exchange Add
418\*----------------------------------------------------------------------------*/
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000419#if defined(__arm__) || defined(__aarch64__)
420static __inline__ char __DEFAULT_FN_ATTRS
421_InterlockedExchangeAdd8_acq(char volatile *_Addend, char _Value) {
422 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
423}
424static __inline__ char __DEFAULT_FN_ATTRS
425_InterlockedExchangeAdd8_nf(char volatile *_Addend, char _Value) {
426 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
427}
428static __inline__ char __DEFAULT_FN_ATTRS
429_InterlockedExchangeAdd8_rel(char volatile *_Addend, char _Value) {
430 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
431}
432static __inline__ short __DEFAULT_FN_ATTRS
433_InterlockedExchangeAdd16_acq(short volatile *_Addend, short _Value) {
434 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
435}
436static __inline__ short __DEFAULT_FN_ATTRS
437_InterlockedExchangeAdd16_nf(short volatile *_Addend, short _Value) {
438 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
439}
440static __inline__ short __DEFAULT_FN_ATTRS
441_InterlockedExchangeAdd16_rel(short volatile *_Addend, short _Value) {
442 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE);
443}
444static __inline__ long __DEFAULT_FN_ATTRS
445_InterlockedExchangeAdd_acq(long volatile *_Addend, long _Value) {
446 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
447}
448static __inline__ long __DEFAULT_FN_ATTRS
449_InterlockedExchangeAdd_nf(long volatile *_Addend, long _Value) {
450 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
451}
452static __inline__ long __DEFAULT_FN_ATTRS
453_InterlockedExchangeAdd_rel(long volatile *_Addend, long _Value) {
454 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE);
455}
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000456static __inline__ __int64 __DEFAULT_FN_ATTRS
457_InterlockedExchangeAdd64_acq(__int64 volatile *_Addend, __int64 _Value) {
458 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_ACQUIRE);
459}
460static __inline__ __int64 __DEFAULT_FN_ATTRS
461_InterlockedExchangeAdd64_nf(__int64 volatile *_Addend, __int64 _Value) {
462 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELAXED);
463}
464static __inline__ __int64 __DEFAULT_FN_ATTRS
465_InterlockedExchangeAdd64_rel(__int64 volatile *_Addend, __int64 _Value) {
466 return __atomic_fetch_add(_Addend, _Value, __ATOMIC_RELEASE);
467}
468#endif
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000469/*----------------------------------------------------------------------------*\
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000470|* Interlocked Increment
471\*----------------------------------------------------------------------------*/
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000472#if defined(__arm__) || defined(__aarch64__)
473static __inline__ short __DEFAULT_FN_ATTRS
474_InterlockedIncrement16_acq(short volatile *_Value) {
475 return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE);
476}
477static __inline__ short __DEFAULT_FN_ATTRS
478_InterlockedIncrement16_nf(short volatile *_Value) {
479 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED);
480}
481static __inline__ short __DEFAULT_FN_ATTRS
482_InterlockedIncrement16_rel(short volatile *_Value) {
483 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE);
484}
485static __inline__ long __DEFAULT_FN_ATTRS
486_InterlockedIncrement_acq(long volatile *_Value) {
487 return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE);
488}
489static __inline__ long __DEFAULT_FN_ATTRS
490_InterlockedIncrement_nf(long volatile *_Value) {
491 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED);
492}
493static __inline__ long __DEFAULT_FN_ATTRS
494_InterlockedIncrement_rel(long volatile *_Value) {
495 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE);
496}
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000497static __inline__ __int64 __DEFAULT_FN_ATTRS
498_InterlockedIncrement64_acq(__int64 volatile *_Value) {
499 return __atomic_add_fetch(_Value, 1, __ATOMIC_ACQUIRE);
500}
501static __inline__ __int64 __DEFAULT_FN_ATTRS
502_InterlockedIncrement64_nf(__int64 volatile *_Value) {
503 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELAXED);
504}
505static __inline__ __int64 __DEFAULT_FN_ATTRS
506_InterlockedIncrement64_rel(__int64 volatile *_Value) {
507 return __atomic_add_fetch(_Value, 1, __ATOMIC_RELEASE);
508}
509#endif
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000510/*----------------------------------------------------------------------------*\
511|* Interlocked Decrement
512\*----------------------------------------------------------------------------*/
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000513#if defined(__arm__) || defined(__aarch64__)
514static __inline__ short __DEFAULT_FN_ATTRS
515_InterlockedDecrement16_acq(short volatile *_Value) {
516 return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE);
517}
518static __inline__ short __DEFAULT_FN_ATTRS
519_InterlockedDecrement16_nf(short volatile *_Value) {
520 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED);
521}
522static __inline__ short __DEFAULT_FN_ATTRS
523_InterlockedDecrement16_rel(short volatile *_Value) {
524 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE);
525}
526static __inline__ long __DEFAULT_FN_ATTRS
527_InterlockedDecrement_acq(long volatile *_Value) {
528 return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE);
529}
530static __inline__ long __DEFAULT_FN_ATTRS
531_InterlockedDecrement_nf(long volatile *_Value) {
532 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED);
533}
534static __inline__ long __DEFAULT_FN_ATTRS
535_InterlockedDecrement_rel(long volatile *_Value) {
536 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE);
537}
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000538static __inline__ __int64 __DEFAULT_FN_ATTRS
539_InterlockedDecrement64_acq(__int64 volatile *_Value) {
540 return __atomic_sub_fetch(_Value, 1, __ATOMIC_ACQUIRE);
541}
542static __inline__ __int64 __DEFAULT_FN_ATTRS
543_InterlockedDecrement64_nf(__int64 volatile *_Value) {
544 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELAXED);
545}
546static __inline__ __int64 __DEFAULT_FN_ATTRS
547_InterlockedDecrement64_rel(__int64 volatile *_Value) {
548 return __atomic_sub_fetch(_Value, 1, __ATOMIC_RELEASE);
549}
550#endif
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000551/*----------------------------------------------------------------------------*\
552|* Interlocked And
553\*----------------------------------------------------------------------------*/
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000554#if defined(__arm__) || defined(__aarch64__)
555static __inline__ char __DEFAULT_FN_ATTRS
556_InterlockedAnd8_acq(char volatile *_Value, char _Mask) {
557 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
558}
559static __inline__ char __DEFAULT_FN_ATTRS
560_InterlockedAnd8_nf(char volatile *_Value, char _Mask) {
561 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
562}
563static __inline__ char __DEFAULT_FN_ATTRS
564_InterlockedAnd8_rel(char volatile *_Value, char _Mask) {
565 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
566}
567static __inline__ short __DEFAULT_FN_ATTRS
568_InterlockedAnd16_acq(short volatile *_Value, short _Mask) {
569 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
570}
571static __inline__ short __DEFAULT_FN_ATTRS
572_InterlockedAnd16_nf(short volatile *_Value, short _Mask) {
573 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
574}
575static __inline__ short __DEFAULT_FN_ATTRS
576_InterlockedAnd16_rel(short volatile *_Value, short _Mask) {
577 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
578}
579static __inline__ long __DEFAULT_FN_ATTRS
580_InterlockedAnd_acq(long volatile *_Value, long _Mask) {
581 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
582}
583static __inline__ long __DEFAULT_FN_ATTRS
584_InterlockedAnd_nf(long volatile *_Value, long _Mask) {
585 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
586}
587static __inline__ long __DEFAULT_FN_ATTRS
588_InterlockedAnd_rel(long volatile *_Value, long _Mask) {
589 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
590}
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000591static __inline__ __int64 __DEFAULT_FN_ATTRS
592_InterlockedAnd64_acq(__int64 volatile *_Value, __int64 _Mask) {
593 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_ACQUIRE);
594}
595static __inline__ __int64 __DEFAULT_FN_ATTRS
596_InterlockedAnd64_nf(__int64 volatile *_Value, __int64 _Mask) {
597 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELAXED);
598}
599static __inline__ __int64 __DEFAULT_FN_ATTRS
600_InterlockedAnd64_rel(__int64 volatile *_Value, __int64 _Mask) {
601 return __atomic_fetch_and(_Value, _Mask, __ATOMIC_RELEASE);
602}
603#endif
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000604/*----------------------------------------------------------------------------*\
605|* Interlocked Or
606\*----------------------------------------------------------------------------*/
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000607#if defined(__arm__) || defined(__aarch64__)
608static __inline__ char __DEFAULT_FN_ATTRS
609_InterlockedOr8_acq(char volatile *_Value, char _Mask) {
610 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
611}
612static __inline__ char __DEFAULT_FN_ATTRS
613_InterlockedOr8_nf(char volatile *_Value, char _Mask) {
614 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
615}
616static __inline__ char __DEFAULT_FN_ATTRS
617_InterlockedOr8_rel(char volatile *_Value, char _Mask) {
618 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
619}
620static __inline__ short __DEFAULT_FN_ATTRS
621_InterlockedOr16_acq(short volatile *_Value, short _Mask) {
622 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
623}
624static __inline__ short __DEFAULT_FN_ATTRS
625_InterlockedOr16_nf(short volatile *_Value, short _Mask) {
626 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
627}
628static __inline__ short __DEFAULT_FN_ATTRS
629_InterlockedOr16_rel(short volatile *_Value, short _Mask) {
630 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
631}
632static __inline__ long __DEFAULT_FN_ATTRS
633_InterlockedOr_acq(long volatile *_Value, long _Mask) {
634 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
635}
636static __inline__ long __DEFAULT_FN_ATTRS
637_InterlockedOr_nf(long volatile *_Value, long _Mask) {
638 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
639}
640static __inline__ long __DEFAULT_FN_ATTRS
641_InterlockedOr_rel(long volatile *_Value, long _Mask) {
642 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
643}
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000644static __inline__ __int64 __DEFAULT_FN_ATTRS
645_InterlockedOr64_acq(__int64 volatile *_Value, __int64 _Mask) {
646 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_ACQUIRE);
647}
648static __inline__ __int64 __DEFAULT_FN_ATTRS
649_InterlockedOr64_nf(__int64 volatile *_Value, __int64 _Mask) {
650 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELAXED);
651}
652static __inline__ __int64 __DEFAULT_FN_ATTRS
653_InterlockedOr64_rel(__int64 volatile *_Value, __int64 _Mask) {
654 return __atomic_fetch_or(_Value, _Mask, __ATOMIC_RELEASE);
655}
656#endif
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000657/*----------------------------------------------------------------------------*\
658|* Interlocked Xor
659\*----------------------------------------------------------------------------*/
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000660#if defined(__arm__) || defined(__aarch64__)
661static __inline__ char __DEFAULT_FN_ATTRS
662_InterlockedXor8_acq(char volatile *_Value, char _Mask) {
663 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
664}
665static __inline__ char __DEFAULT_FN_ATTRS
666_InterlockedXor8_nf(char volatile *_Value, char _Mask) {
667 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
668}
669static __inline__ char __DEFAULT_FN_ATTRS
670_InterlockedXor8_rel(char volatile *_Value, char _Mask) {
671 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
672}
673static __inline__ short __DEFAULT_FN_ATTRS
674_InterlockedXor16_acq(short volatile *_Value, short _Mask) {
675 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
676}
677static __inline__ short __DEFAULT_FN_ATTRS
678_InterlockedXor16_nf(short volatile *_Value, short _Mask) {
679 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
680}
681static __inline__ short __DEFAULT_FN_ATTRS
682_InterlockedXor16_rel(short volatile *_Value, short _Mask) {
683 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
684}
685static __inline__ long __DEFAULT_FN_ATTRS
686_InterlockedXor_acq(long volatile *_Value, long _Mask) {
687 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
688}
689static __inline__ long __DEFAULT_FN_ATTRS
690_InterlockedXor_nf(long volatile *_Value, long _Mask) {
691 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
692}
693static __inline__ long __DEFAULT_FN_ATTRS
694_InterlockedXor_rel(long volatile *_Value, long _Mask) {
695 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
696}
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000697static __inline__ __int64 __DEFAULT_FN_ATTRS
698_InterlockedXor64_acq(__int64 volatile *_Value, __int64 _Mask) {
699 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_ACQUIRE);
700}
701static __inline__ __int64 __DEFAULT_FN_ATTRS
702_InterlockedXor64_nf(__int64 volatile *_Value, __int64 _Mask) {
703 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELAXED);
704}
705static __inline__ __int64 __DEFAULT_FN_ATTRS
706_InterlockedXor64_rel(__int64 volatile *_Value, __int64 _Mask) {
707 return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_RELEASE);
708}
709#endif
Albert Gutowski727ab8a2016-09-14 21:19:43 +0000710/*----------------------------------------------------------------------------*\
711|* Interlocked Exchange
712\*----------------------------------------------------------------------------*/
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000713#if defined(__arm__) || defined(__aarch64__)
714static __inline__ char __DEFAULT_FN_ATTRS
715_InterlockedExchange8_acq(char volatile *_Target, char _Value) {
716 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
717 return _Value;
718}
719static __inline__ char __DEFAULT_FN_ATTRS
720_InterlockedExchange8_nf(char volatile *_Target, char _Value) {
721 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
722 return _Value;
723}
724static __inline__ char __DEFAULT_FN_ATTRS
725_InterlockedExchange8_rel(char volatile *_Target, char _Value) {
726 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
727 return _Value;
728}
729static __inline__ short __DEFAULT_FN_ATTRS
730_InterlockedExchange16_acq(short volatile *_Target, short _Value) {
731 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
732 return _Value;
733}
734static __inline__ short __DEFAULT_FN_ATTRS
735_InterlockedExchange16_nf(short volatile *_Target, short _Value) {
736 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
737 return _Value;
738}
739static __inline__ short __DEFAULT_FN_ATTRS
740_InterlockedExchange16_rel(short volatile *_Target, short _Value) {
741 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
742 return _Value;
743}
744static __inline__ long __DEFAULT_FN_ATTRS
745_InterlockedExchange_acq(long volatile *_Target, long _Value) {
746 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
747 return _Value;
748}
749static __inline__ long __DEFAULT_FN_ATTRS
750_InterlockedExchange_nf(long volatile *_Target, long _Value) {
751 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
752 return _Value;
753}
754static __inline__ long __DEFAULT_FN_ATTRS
755_InterlockedExchange_rel(long volatile *_Target, long _Value) {
756 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
757 return _Value;
758}
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000759static __inline__ __int64 __DEFAULT_FN_ATTRS
760_InterlockedExchange64_acq(__int64 volatile *_Target, __int64 _Value) {
761 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_ACQUIRE);
762 return _Value;
763}
764static __inline__ __int64 __DEFAULT_FN_ATTRS
765_InterlockedExchange64_nf(__int64 volatile *_Target, __int64 _Value) {
766 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELAXED);
767 return _Value;
768}
769static __inline__ __int64 __DEFAULT_FN_ATTRS
770_InterlockedExchange64_rel(__int64 volatile *_Target, __int64 _Value) {
771 __atomic_exchange(_Target, &_Value, &_Value, __ATOMIC_RELEASE);
772 return _Value;
773}
774#endif
775/*----------------------------------------------------------------------------*\
776|* Interlocked Compare Exchange
777\*----------------------------------------------------------------------------*/
778#if defined(__arm__) || defined(__aarch64__)
779static __inline__ char __DEFAULT_FN_ATTRS
780_InterlockedCompareExchange8_acq(char volatile *_Destination,
781 char _Exchange, char _Comparand) {
782 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
783 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
784 return _Comparand;
785}
786static __inline__ char __DEFAULT_FN_ATTRS
787_InterlockedCompareExchange8_nf(char volatile *_Destination,
788 char _Exchange, char _Comparand) {
789 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
790 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
791 return _Comparand;
792}
793static __inline__ char __DEFAULT_FN_ATTRS
794_InterlockedCompareExchange8_rel(char volatile *_Destination,
795 char _Exchange, char _Comparand) {
796 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
797 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
798 return _Comparand;
799}
800static __inline__ short __DEFAULT_FN_ATTRS
801_InterlockedCompareExchange16_acq(short volatile *_Destination,
802 short _Exchange, short _Comparand) {
803 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
804 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
805 return _Comparand;
806}
807static __inline__ short __DEFAULT_FN_ATTRS
808_InterlockedCompareExchange16_nf(short volatile *_Destination,
809 short _Exchange, short _Comparand) {
810 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
811 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
812 return _Comparand;
813}
814static __inline__ short __DEFAULT_FN_ATTRS
815_InterlockedCompareExchange16_rel(short volatile *_Destination,
816 short _Exchange, short _Comparand) {
817 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
818 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
819 return _Comparand;
820}
821static __inline__ long __DEFAULT_FN_ATTRS
822_InterlockedCompareExchange_acq(long volatile *_Destination,
823 long _Exchange, long _Comparand) {
824 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
825 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
826 return _Comparand;
827}
828static __inline__ long __DEFAULT_FN_ATTRS
829_InterlockedCompareExchange_nf(long volatile *_Destination,
830 long _Exchange, long _Comparand) {
831 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
832 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
833 return _Comparand;
834}
Martin Storsjo0fd7c5c2017-09-14 07:04:59 +0000835static __inline__ long __DEFAULT_FN_ATTRS
Saleem Abdulrasooleae64f82016-09-26 22:12:43 +0000836_InterlockedCompareExchange_rel(long volatile *_Destination,
837 long _Exchange, long _Comparand) {
838 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
839 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
840 return _Comparand;
841}
842static __inline__ __int64 __DEFAULT_FN_ATTRS
843_InterlockedCompareExchange64_acq(__int64 volatile *_Destination,
844 __int64 _Exchange, __int64 _Comparand) {
845 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
846 __ATOMIC_SEQ_CST, __ATOMIC_ACQUIRE);
847 return _Comparand;
848}
849static __inline__ __int64 __DEFAULT_FN_ATTRS
850_InterlockedCompareExchange64_nf(__int64 volatile *_Destination,
851 __int64 _Exchange, __int64 _Comparand) {
852 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
853 __ATOMIC_SEQ_CST, __ATOMIC_RELAXED);
854 return _Comparand;
855}
856static __inline__ __int64 __DEFAULT_FN_ATTRS
857_InterlockedCompareExchange64_rel(__int64 volatile *_Destination,
858 __int64 _Exchange, __int64 _Comparand) {
859 __atomic_compare_exchange(_Destination, &_Comparand, &_Exchange, 0,
860 __ATOMIC_SEQ_CST, __ATOMIC_RELEASE);
861 return _Comparand;
862}
863#endif
Hans Wennborg74ca0c42014-01-24 00:52:39 +0000864
Hans Wennborg74ca0c42014-01-24 00:52:39 +0000865/*----------------------------------------------------------------------------*\
Hans Wennborgd9be72e2014-03-12 22:00:32 +0000866|* movs, stos
867\*----------------------------------------------------------------------------*/
Saleem Abdulrasool702eefe2014-06-25 16:48:40 +0000868#if defined(__i386__) || defined(__x86_64__)
Michael Kupersteine45af542015-06-30 13:36:19 +0000869static __inline__ void __DEFAULT_FN_ATTRS
Hans Wennborgd9be72e2014-03-12 22:00:32 +0000870__movsb(unsigned char *__dst, unsigned char const *__src, size_t __n) {
Marina Yatsinac42fd032016-12-26 12:23:42 +0000871 __asm__("rep movsb" : : "D"(__dst), "S"(__src), "c"(__n));
Hans Wennborgd9be72e2014-03-12 22:00:32 +0000872}
Michael Kupersteine45af542015-06-30 13:36:19 +0000873static __inline__ void __DEFAULT_FN_ATTRS
Hans Wennborgd9be72e2014-03-12 22:00:32 +0000874__movsd(unsigned long *__dst, unsigned long const *__src, size_t __n) {
Marina Yatsinac42fd032016-12-26 12:23:42 +0000875 __asm__("rep movsl" : : "D"(__dst), "S"(__src), "c"(__n));
Hans Wennborgd9be72e2014-03-12 22:00:32 +0000876}
Michael Kupersteine45af542015-06-30 13:36:19 +0000877static __inline__ void __DEFAULT_FN_ATTRS
Hans Wennborgd9be72e2014-03-12 22:00:32 +0000878__movsw(unsigned short *__dst, unsigned short const *__src, size_t __n) {
Marina Yatsinac42fd032016-12-26 12:23:42 +0000879 __asm__("rep movsw" : : "D"(__dst), "S"(__src), "c"(__n));
Hans Wennborgd9be72e2014-03-12 22:00:32 +0000880}
Michael Kupersteine45af542015-06-30 13:36:19 +0000881static __inline__ void __DEFAULT_FN_ATTRS
Hans Wennborgd9be72e2014-03-12 22:00:32 +0000882__stosd(unsigned long *__dst, unsigned long __x, size_t __n) {
Marina Yatsinac42fd032016-12-26 12:23:42 +0000883 __asm__("rep stosl" : : "D"(__dst), "a"(__x), "c"(__n));
Hans Wennborgd9be72e2014-03-12 22:00:32 +0000884}
Michael Kupersteine45af542015-06-30 13:36:19 +0000885static __inline__ void __DEFAULT_FN_ATTRS
Hans Wennborgd9be72e2014-03-12 22:00:32 +0000886__stosw(unsigned short *__dst, unsigned short __x, size_t __n) {
Marina Yatsinac42fd032016-12-26 12:23:42 +0000887 __asm__("rep stosw" : : "D"(__dst), "a"(__x), "c"(__n));
Hans Wennborgd9be72e2014-03-12 22:00:32 +0000888}
Saleem Abdulrasool702eefe2014-06-25 16:48:40 +0000889#endif
Hans Wennborgd9be72e2014-03-12 22:00:32 +0000890#ifdef __x86_64__
Michael Kupersteine45af542015-06-30 13:36:19 +0000891static __inline__ void __DEFAULT_FN_ATTRS
Hans Wennborgd9be72e2014-03-12 22:00:32 +0000892__movsq(unsigned long long *__dst, unsigned long long const *__src, size_t __n) {
Marina Yatsinac42fd032016-12-26 12:23:42 +0000893 __asm__("rep movsq" : : "D"(__dst), "S"(__src), "c"(__n));
Hans Wennborgd9be72e2014-03-12 22:00:32 +0000894}
Michael Kupersteine45af542015-06-30 13:36:19 +0000895static __inline__ void __DEFAULT_FN_ATTRS
Hans Wennborgd9be72e2014-03-12 22:00:32 +0000896__stosq(unsigned __int64 *__dst, unsigned __int64 __x, size_t __n) {
Marina Yatsinac42fd032016-12-26 12:23:42 +0000897 __asm__("rep stosq" : : "D"(__dst), "a"(__x), "c"(__n));
Hans Wennborgd9be72e2014-03-12 22:00:32 +0000898}
899#endif
900
901/*----------------------------------------------------------------------------*\
Warren Huntd6ffae92013-09-27 23:57:26 +0000902|* Misc
903\*----------------------------------------------------------------------------*/
Saleem Abdulrasool702eefe2014-06-25 16:48:40 +0000904#if defined(__i386__) || defined(__x86_64__)
Michael Kupersteine45af542015-06-30 13:36:19 +0000905static __inline__ void __DEFAULT_FN_ATTRS
Hans Wennborg854f7d32014-01-16 23:39:35 +0000906__cpuid(int __info[4], int __level) {
Hans Wennborg854f7d32014-01-16 23:39:35 +0000907 __asm__ ("cpuid" : "=a"(__info[0]), "=b" (__info[1]), "=c"(__info[2]), "=d"(__info[3])
Hans Wennborg12fb89e2014-01-31 19:44:55 +0000908 : "a"(__level));
909}
Michael Kupersteine45af542015-06-30 13:36:19 +0000910static __inline__ void __DEFAULT_FN_ATTRS
Hans Wennborg12fb89e2014-01-31 19:44:55 +0000911__cpuidex(int __info[4], int __level, int __ecx) {
912 __asm__ ("cpuid" : "=a"(__info[0]), "=b" (__info[1]), "=c"(__info[2]), "=d"(__info[3])
913 : "a"(__level), "c"(__ecx));
Hans Wennborg854f7d32014-01-16 23:39:35 +0000914}
Reid Kleckner66e77172016-08-16 16:04:14 +0000915static __inline__ unsigned __int64 __cdecl __DEFAULT_FN_ATTRS
916_xgetbv(unsigned int __xcr_no) {
917 unsigned int __eax, __edx;
918 __asm__ ("xgetbv" : "=a" (__eax), "=d" (__edx) : "c" (__xcr_no));
919 return ((unsigned __int64)__edx << 32) | __eax;
920}
Michael Kupersteine45af542015-06-30 13:36:19 +0000921static __inline__ void __DEFAULT_FN_ATTRS
Hans Wennborg740a4d62014-01-28 22:55:01 +0000922__halt(void) {
923 __asm__ volatile ("hlt");
924}
Reid Kleckner5de2bcd2016-09-07 16:55:12 +0000925static __inline__ void __DEFAULT_FN_ATTRS
926__nop(void) {
927 __asm__ volatile ("nop");
928}
Saleem Abdulrasool702eefe2014-06-25 16:48:40 +0000929#endif
Warren Huntd6ffae92013-09-27 23:57:26 +0000930
Reid Kleckner592dc612014-04-08 00:28:22 +0000931/*----------------------------------------------------------------------------*\
932|* Privileged intrinsics
933\*----------------------------------------------------------------------------*/
Saleem Abdulrasool702eefe2014-06-25 16:48:40 +0000934#if defined(__i386__) || defined(__x86_64__)
Michael Kupersteine45af542015-06-30 13:36:19 +0000935static __inline__ unsigned __int64 __DEFAULT_FN_ATTRS
Reid Kleckner592dc612014-04-08 00:28:22 +0000936__readmsr(unsigned long __register) {
937 // Loads the contents of a 64-bit model specific register (MSR) specified in
938 // the ECX register into registers EDX:EAX. The EDX register is loaded with
939 // the high-order 32 bits of the MSR and the EAX register is loaded with the
940 // low-order 32 bits. If less than 64 bits are implemented in the MSR being
941 // read, the values returned to EDX:EAX in unimplemented bit locations are
942 // undefined.
943 unsigned long __edx;
944 unsigned long __eax;
Reid Kleckner6df52542014-04-08 17:49:16 +0000945 __asm__ ("rdmsr" : "=d"(__edx), "=a"(__eax) : "c"(__register));
Reid Kleckner592dc612014-04-08 00:28:22 +0000946 return (((unsigned __int64)__edx) << 32) | (unsigned __int64)__eax;
947}
948
Michael Kupersteine45af542015-06-30 13:36:19 +0000949static __inline__ unsigned long __DEFAULT_FN_ATTRS
Reid Kleckner592dc612014-04-08 00:28:22 +0000950__readcr3(void) {
Reid Kleckner6df52542014-04-08 17:49:16 +0000951 unsigned long __cr3_val;
952 __asm__ __volatile__ ("mov %%cr3, %0" : "=q"(__cr3_val) : : "memory");
953 return __cr3_val;
Reid Kleckner592dc612014-04-08 00:28:22 +0000954}
955
Michael Kupersteine45af542015-06-30 13:36:19 +0000956static __inline__ void __DEFAULT_FN_ATTRS
Reid Kleckner6df52542014-04-08 17:49:16 +0000957__writecr3(unsigned int __cr3_val) {
958 __asm__ ("mov %0, %%cr3" : : "q"(__cr3_val) : "memory");
Reid Kleckner592dc612014-04-08 00:28:22 +0000959}
Saleem Abdulrasool702eefe2014-06-25 16:48:40 +0000960#endif
Reid Kleckner592dc612014-04-08 00:28:22 +0000961
Eric Christophercc872532013-09-18 22:40:18 +0000962#ifdef __cplusplus
Eric Christopher0db88a72013-09-18 22:24:01 +0000963}
Eric Christopherd1428bf2013-08-31 00:22:48 +0000964#endif
Reid Klecknerf0e23222013-09-19 00:19:53 +0000965
Michael Kupersteine45af542015-06-30 13:36:19 +0000966#undef __DEFAULT_FN_ATTRS
Eric Christopher5a9bec12015-06-15 23:20:35 +0000967
Reid Klecknerf0e23222013-09-19 00:19:53 +0000968#endif /* __INTRIN_H */
969#endif /* _MSC_VER */