blob: 4ea77d22a0d9f00a45d46155d29940b201aba4f8 [file] [log] [blame]
Stephen Hines2d1fdb22014-05-28 23:58:16 -07001//===-- tsan_interface_atomic.h ---------------------------------*- C++ -*-===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of ThreadSanitizer (TSan), a race detector.
11//
12// Public interface header for TSan atomics.
13//===----------------------------------------------------------------------===//
14#ifndef TSAN_INTERFACE_ATOMIC_H
15#define TSAN_INTERFACE_ATOMIC_H
16
17#ifdef __cplusplus
18extern "C" {
19#endif
20
21typedef char __tsan_atomic8;
22typedef short __tsan_atomic16; // NOLINT
23typedef int __tsan_atomic32;
24typedef long __tsan_atomic64; // NOLINT
25#if defined(__SIZEOF_INT128__) \
26 || (__clang_major__ * 100 + __clang_minor__ >= 302)
27__extension__ typedef __int128 __tsan_atomic128;
28# define __TSAN_HAS_INT128 1
29#else
30# define __TSAN_HAS_INT128 0
31#endif
32
33// Part of ABI, do not change.
34// http://llvm.org/viewvc/llvm-project/libcxx/trunk/include/atomic?view=markup
35typedef enum {
36 __tsan_memory_order_relaxed,
37 __tsan_memory_order_consume,
38 __tsan_memory_order_acquire,
39 __tsan_memory_order_release,
40 __tsan_memory_order_acq_rel,
41 __tsan_memory_order_seq_cst
42} __tsan_memory_order;
43
44__tsan_atomic8 __tsan_atomic8_load(const volatile __tsan_atomic8 *a,
45 __tsan_memory_order mo);
46__tsan_atomic16 __tsan_atomic16_load(const volatile __tsan_atomic16 *a,
47 __tsan_memory_order mo);
48__tsan_atomic32 __tsan_atomic32_load(const volatile __tsan_atomic32 *a,
49 __tsan_memory_order mo);
50__tsan_atomic64 __tsan_atomic64_load(const volatile __tsan_atomic64 *a,
51 __tsan_memory_order mo);
52#if __TSAN_HAS_INT128
53__tsan_atomic128 __tsan_atomic128_load(const volatile __tsan_atomic128 *a,
54 __tsan_memory_order mo);
55#endif
56
57void __tsan_atomic8_store(volatile __tsan_atomic8 *a, __tsan_atomic8 v,
58 __tsan_memory_order mo);
59void __tsan_atomic16_store(volatile __tsan_atomic16 *a, __tsan_atomic16 v,
60 __tsan_memory_order mo);
61void __tsan_atomic32_store(volatile __tsan_atomic32 *a, __tsan_atomic32 v,
62 __tsan_memory_order mo);
63void __tsan_atomic64_store(volatile __tsan_atomic64 *a, __tsan_atomic64 v,
64 __tsan_memory_order mo);
65#if __TSAN_HAS_INT128
66void __tsan_atomic128_store(volatile __tsan_atomic128 *a, __tsan_atomic128 v,
67 __tsan_memory_order mo);
68#endif
69
70__tsan_atomic8 __tsan_atomic8_exchange(volatile __tsan_atomic8 *a,
71 __tsan_atomic8 v, __tsan_memory_order mo);
72__tsan_atomic16 __tsan_atomic16_exchange(volatile __tsan_atomic16 *a,
73 __tsan_atomic16 v, __tsan_memory_order mo);
74__tsan_atomic32 __tsan_atomic32_exchange(volatile __tsan_atomic32 *a,
75 __tsan_atomic32 v, __tsan_memory_order mo);
76__tsan_atomic64 __tsan_atomic64_exchange(volatile __tsan_atomic64 *a,
77 __tsan_atomic64 v, __tsan_memory_order mo);
78#if __TSAN_HAS_INT128
79__tsan_atomic128 __tsan_atomic128_exchange(volatile __tsan_atomic128 *a,
80 __tsan_atomic128 v, __tsan_memory_order mo);
81#endif
82
83__tsan_atomic8 __tsan_atomic8_fetch_add(volatile __tsan_atomic8 *a,
84 __tsan_atomic8 v, __tsan_memory_order mo);
85__tsan_atomic16 __tsan_atomic16_fetch_add(volatile __tsan_atomic16 *a,
86 __tsan_atomic16 v, __tsan_memory_order mo);
87__tsan_atomic32 __tsan_atomic32_fetch_add(volatile __tsan_atomic32 *a,
88 __tsan_atomic32 v, __tsan_memory_order mo);
89__tsan_atomic64 __tsan_atomic64_fetch_add(volatile __tsan_atomic64 *a,
90 __tsan_atomic64 v, __tsan_memory_order mo);
91#if __TSAN_HAS_INT128
92__tsan_atomic128 __tsan_atomic128_fetch_add(volatile __tsan_atomic128 *a,
93 __tsan_atomic128 v, __tsan_memory_order mo);
94#endif
95
96__tsan_atomic8 __tsan_atomic8_fetch_sub(volatile __tsan_atomic8 *a,
97 __tsan_atomic8 v, __tsan_memory_order mo);
98__tsan_atomic16 __tsan_atomic16_fetch_sub(volatile __tsan_atomic16 *a,
99 __tsan_atomic16 v, __tsan_memory_order mo);
100__tsan_atomic32 __tsan_atomic32_fetch_sub(volatile __tsan_atomic32 *a,
101 __tsan_atomic32 v, __tsan_memory_order mo);
102__tsan_atomic64 __tsan_atomic64_fetch_sub(volatile __tsan_atomic64 *a,
103 __tsan_atomic64 v, __tsan_memory_order mo);
104#if __TSAN_HAS_INT128
105__tsan_atomic128 __tsan_atomic128_fetch_sub(volatile __tsan_atomic128 *a,
106 __tsan_atomic128 v, __tsan_memory_order mo);
107#endif
108
109__tsan_atomic8 __tsan_atomic8_fetch_and(volatile __tsan_atomic8 *a,
110 __tsan_atomic8 v, __tsan_memory_order mo);
111__tsan_atomic16 __tsan_atomic16_fetch_and(volatile __tsan_atomic16 *a,
112 __tsan_atomic16 v, __tsan_memory_order mo);
113__tsan_atomic32 __tsan_atomic32_fetch_and(volatile __tsan_atomic32 *a,
114 __tsan_atomic32 v, __tsan_memory_order mo);
115__tsan_atomic64 __tsan_atomic64_fetch_and(volatile __tsan_atomic64 *a,
116 __tsan_atomic64 v, __tsan_memory_order mo);
117#if __TSAN_HAS_INT128
118__tsan_atomic128 __tsan_atomic128_fetch_and(volatile __tsan_atomic128 *a,
119 __tsan_atomic128 v, __tsan_memory_order mo);
120#endif
121
122__tsan_atomic8 __tsan_atomic8_fetch_or(volatile __tsan_atomic8 *a,
123 __tsan_atomic8 v, __tsan_memory_order mo);
124__tsan_atomic16 __tsan_atomic16_fetch_or(volatile __tsan_atomic16 *a,
125 __tsan_atomic16 v, __tsan_memory_order mo);
126__tsan_atomic32 __tsan_atomic32_fetch_or(volatile __tsan_atomic32 *a,
127 __tsan_atomic32 v, __tsan_memory_order mo);
128__tsan_atomic64 __tsan_atomic64_fetch_or(volatile __tsan_atomic64 *a,
129 __tsan_atomic64 v, __tsan_memory_order mo);
130#if __TSAN_HAS_INT128
131__tsan_atomic128 __tsan_atomic128_fetch_or(volatile __tsan_atomic128 *a,
132 __tsan_atomic128 v, __tsan_memory_order mo);
133#endif
134
135__tsan_atomic8 __tsan_atomic8_fetch_xor(volatile __tsan_atomic8 *a,
136 __tsan_atomic8 v, __tsan_memory_order mo);
137__tsan_atomic16 __tsan_atomic16_fetch_xor(volatile __tsan_atomic16 *a,
138 __tsan_atomic16 v, __tsan_memory_order mo);
139__tsan_atomic32 __tsan_atomic32_fetch_xor(volatile __tsan_atomic32 *a,
140 __tsan_atomic32 v, __tsan_memory_order mo);
141__tsan_atomic64 __tsan_atomic64_fetch_xor(volatile __tsan_atomic64 *a,
142 __tsan_atomic64 v, __tsan_memory_order mo);
143#if __TSAN_HAS_INT128
144__tsan_atomic128 __tsan_atomic128_fetch_xor(volatile __tsan_atomic128 *a,
145 __tsan_atomic128 v, __tsan_memory_order mo);
146#endif
147
148__tsan_atomic8 __tsan_atomic8_fetch_nand(volatile __tsan_atomic8 *a,
149 __tsan_atomic8 v, __tsan_memory_order mo);
150__tsan_atomic16 __tsan_atomic16_fetch_nand(volatile __tsan_atomic16 *a,
151 __tsan_atomic16 v, __tsan_memory_order mo);
152__tsan_atomic32 __tsan_atomic32_fetch_nand(volatile __tsan_atomic32 *a,
153 __tsan_atomic32 v, __tsan_memory_order mo);
154__tsan_atomic64 __tsan_atomic64_fetch_nand(volatile __tsan_atomic64 *a,
155 __tsan_atomic64 v, __tsan_memory_order mo);
156#if __TSAN_HAS_INT128
157__tsan_atomic128 __tsan_atomic128_fetch_nand(volatile __tsan_atomic128 *a,
158 __tsan_atomic128 v, __tsan_memory_order mo);
159#endif
160
161int __tsan_atomic8_compare_exchange_weak(volatile __tsan_atomic8 *a,
162 __tsan_atomic8 *c, __tsan_atomic8 v, __tsan_memory_order mo,
163 __tsan_memory_order fail_mo);
164int __tsan_atomic16_compare_exchange_weak(volatile __tsan_atomic16 *a,
165 __tsan_atomic16 *c, __tsan_atomic16 v, __tsan_memory_order mo,
166 __tsan_memory_order fail_mo);
167int __tsan_atomic32_compare_exchange_weak(volatile __tsan_atomic32 *a,
168 __tsan_atomic32 *c, __tsan_atomic32 v, __tsan_memory_order mo,
169 __tsan_memory_order fail_mo);
170int __tsan_atomic64_compare_exchange_weak(volatile __tsan_atomic64 *a,
171 __tsan_atomic64 *c, __tsan_atomic64 v, __tsan_memory_order mo,
172 __tsan_memory_order fail_mo);
173#if __TSAN_HAS_INT128
174int __tsan_atomic128_compare_exchange_weak(volatile __tsan_atomic128 *a,
175 __tsan_atomic128 *c, __tsan_atomic128 v, __tsan_memory_order mo,
176 __tsan_memory_order fail_mo);
177#endif
178
179int __tsan_atomic8_compare_exchange_strong(volatile __tsan_atomic8 *a,
180 __tsan_atomic8 *c, __tsan_atomic8 v, __tsan_memory_order mo,
181 __tsan_memory_order fail_mo);
182int __tsan_atomic16_compare_exchange_strong(volatile __tsan_atomic16 *a,
183 __tsan_atomic16 *c, __tsan_atomic16 v, __tsan_memory_order mo,
184 __tsan_memory_order fail_mo);
185int __tsan_atomic32_compare_exchange_strong(volatile __tsan_atomic32 *a,
186 __tsan_atomic32 *c, __tsan_atomic32 v, __tsan_memory_order mo,
187 __tsan_memory_order fail_mo);
188int __tsan_atomic64_compare_exchange_strong(volatile __tsan_atomic64 *a,
189 __tsan_atomic64 *c, __tsan_atomic64 v, __tsan_memory_order mo,
190 __tsan_memory_order fail_mo);
191#if __TSAN_HAS_INT128
192int __tsan_atomic128_compare_exchange_strong(volatile __tsan_atomic128 *a,
193 __tsan_atomic128 *c, __tsan_atomic128 v, __tsan_memory_order mo,
194 __tsan_memory_order fail_mo);
195#endif
196
197__tsan_atomic8 __tsan_atomic8_compare_exchange_val(
198 volatile __tsan_atomic8 *a, __tsan_atomic8 c, __tsan_atomic8 v,
199 __tsan_memory_order mo, __tsan_memory_order fail_mo);
200__tsan_atomic16 __tsan_atomic16_compare_exchange_val(
201 volatile __tsan_atomic16 *a, __tsan_atomic16 c, __tsan_atomic16 v,
202 __tsan_memory_order mo, __tsan_memory_order fail_mo);
203__tsan_atomic32 __tsan_atomic32_compare_exchange_val(
204 volatile __tsan_atomic32 *a, __tsan_atomic32 c, __tsan_atomic32 v,
205 __tsan_memory_order mo, __tsan_memory_order fail_mo);
206__tsan_atomic64 __tsan_atomic64_compare_exchange_val(
207 volatile __tsan_atomic64 *a, __tsan_atomic64 c, __tsan_atomic64 v,
208 __tsan_memory_order mo, __tsan_memory_order fail_mo);
209#if __TSAN_HAS_INT128
210__tsan_atomic128 __tsan_atomic128_compare_exchange_val(
211 volatile __tsan_atomic128 *a, __tsan_atomic128 c, __tsan_atomic128 v,
212 __tsan_memory_order mo, __tsan_memory_order fail_mo);
213#endif
214
215void __tsan_atomic_thread_fence(__tsan_memory_order mo);
216void __tsan_atomic_signal_fence(__tsan_memory_order mo);
217
218#ifdef __cplusplus
219} // extern "C"
220#endif
221
222#endif // TSAN_INTERFACE_ATOMIC_H