blob: 7a9cb61e893626fcc930e2ef10a8a18136ea7754 [file] [log] [blame]
Jason Evans92d32842011-03-18 18:15:37 -07001/******************************************************************************/
2#ifdef JEMALLOC_H_TYPES
3
4#endif /* JEMALLOC_H_TYPES */
5/******************************************************************************/
6#ifdef JEMALLOC_H_STRUCTS
7
8#endif /* JEMALLOC_H_STRUCTS */
9/******************************************************************************/
10#ifdef JEMALLOC_H_EXTERNS
11
12#define atomic_read_uint64(p) atomic_add_uint64(p, 0)
13#define atomic_read_uint32(p) atomic_add_uint32(p, 0)
Jason Evans06304a92012-03-23 16:09:56 -070014#define atomic_read_z(p) atomic_add_z(p, 0)
Jason Evans6da54182012-03-23 18:05:51 -070015#define atomic_read_u(p) atomic_add_u(p, 0)
Jason Evans92d32842011-03-18 18:15:37 -070016
17#endif /* JEMALLOC_H_EXTERNS */
18/******************************************************************************/
19#ifdef JEMALLOC_H_INLINES
20
21#ifndef JEMALLOC_ENABLE_INLINE
22uint64_t atomic_add_uint64(uint64_t *p, uint64_t x);
23uint64_t atomic_sub_uint64(uint64_t *p, uint64_t x);
24uint32_t atomic_add_uint32(uint32_t *p, uint32_t x);
25uint32_t atomic_sub_uint32(uint32_t *p, uint32_t x);
Jason Evans06304a92012-03-23 16:09:56 -070026size_t atomic_add_z(size_t *p, size_t x);
27size_t atomic_sub_z(size_t *p, size_t x);
Jason Evans6da54182012-03-23 18:05:51 -070028unsigned atomic_add_u(unsigned *p, unsigned x);
29unsigned atomic_sub_u(unsigned *p, unsigned x);
Jason Evans92d32842011-03-18 18:15:37 -070030#endif
31
32#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_ATOMIC_C_))
Jason Evans3e292472011-03-24 16:48:11 -070033/******************************************************************************/
Jason Evans92d32842011-03-18 18:15:37 -070034/* 64-bit operations. */
35#ifdef __GCC_HAVE_SYNC_COMPARE_AND_SWAP_8
36JEMALLOC_INLINE uint64_t
37atomic_add_uint64(uint64_t *p, uint64_t x)
38{
39
40 return (__sync_add_and_fetch(p, x));
41}
42
43JEMALLOC_INLINE uint64_t
44atomic_sub_uint64(uint64_t *p, uint64_t x)
45{
46
47 return (__sync_sub_and_fetch(p, x));
48}
Jason Evans763baa62011-03-18 19:10:31 -070049#elif (defined(JEMALLOC_OSATOMIC))
50JEMALLOC_INLINE uint64_t
51atomic_add_uint64(uint64_t *p, uint64_t x)
52{
53
54 return (OSAtomicAdd64((int64_t)x, (int64_t *)p));
55}
56
57JEMALLOC_INLINE uint64_t
58atomic_sub_uint64(uint64_t *p, uint64_t x)
59{
60
61 return (OSAtomicAdd64(-((int64_t)x), (int64_t *)p));
62}
Jason Evansb1726102012-02-28 16:50:47 -080063#elif (defined(__amd64__) || defined(__x86_64__))
Jason Evans3e292472011-03-24 16:48:11 -070064JEMALLOC_INLINE uint64_t
65atomic_add_uint64(uint64_t *p, uint64_t x)
66{
67
68 asm volatile (
69 "lock; xaddq %0, %1;"
70 : "+r" (x), "=m" (*p) /* Outputs. */
71 : "m" (*p) /* Inputs. */
72 );
73
74 return (x);
75}
76
77JEMALLOC_INLINE uint64_t
78atomic_sub_uint64(uint64_t *p, uint64_t x)
79{
80
81 x = (uint64_t)(-(int64_t)x);
82 asm volatile (
83 "lock; xaddq %0, %1;"
84 : "+r" (x), "=m" (*p) /* Outputs. */
85 : "m" (*p) /* Inputs. */
86 );
87
88 return (x);
89}
Jason Evans92d32842011-03-18 18:15:37 -070090#else
Jason Evans47e57f92011-03-22 09:00:56 -070091# if (LG_SIZEOF_PTR == 3)
92# error "Missing implementation for 64-bit atomic operations"
93# endif
Jason Evans92d32842011-03-18 18:15:37 -070094#endif
95
Jason Evans3e292472011-03-24 16:48:11 -070096/******************************************************************************/
Jason Evans92d32842011-03-18 18:15:37 -070097/* 32-bit operations. */
98#ifdef __GCC_HAVE_SYNC_COMPARE_AND_SWAP_4
99JEMALLOC_INLINE uint32_t
100atomic_add_uint32(uint32_t *p, uint32_t x)
101{
102
103 return (__sync_add_and_fetch(p, x));
104}
105
106JEMALLOC_INLINE uint32_t
107atomic_sub_uint32(uint32_t *p, uint32_t x)
108{
109
110 return (__sync_sub_and_fetch(p, x));
111}
Jason Evans763baa62011-03-18 19:10:31 -0700112#elif (defined(JEMALLOC_OSATOMIC))
113JEMALLOC_INLINE uint32_t
114atomic_add_uint32(uint32_t *p, uint32_t x)
115{
116
117 return (OSAtomicAdd32((int32_t)x, (int32_t *)p));
118}
119
120JEMALLOC_INLINE uint32_t
121atomic_sub_uint32(uint32_t *p, uint32_t x)
122{
123
124 return (OSAtomicAdd32(-((int32_t)x), (int32_t *)p));
125}
Jason Evansb1726102012-02-28 16:50:47 -0800126#elif (defined(__i386__) || defined(__amd64__) || defined(__x86_64__))
Jason Evans3e292472011-03-24 16:48:11 -0700127JEMALLOC_INLINE uint32_t
128atomic_add_uint32(uint32_t *p, uint32_t x)
129{
130
131 asm volatile (
132 "lock; xaddl %0, %1;"
133 : "+r" (x), "=m" (*p) /* Outputs. */
134 : "m" (*p) /* Inputs. */
135 );
136
137 return (x);
138}
139
140JEMALLOC_INLINE uint32_t
141atomic_sub_uint32(uint32_t *p, uint32_t x)
142{
143
144 x = (uint32_t)(-(int32_t)x);
145 asm volatile (
146 "lock; xaddl %0, %1;"
147 : "+r" (x), "=m" (*p) /* Outputs. */
148 : "m" (*p) /* Inputs. */
149 );
150
151 return (x);
152}
Jason Evans3492daf2012-03-05 12:16:57 -0800153#elif (defined __SH4__ || defined __mips__) && (__GNUC__ > 4 || \
154 (__GNUC__ == 4 && (__GNUC_MINOR__ > 1 || (__GNUC_MINOR__ == 1 && \
155 __GNUC_PATCHLEVEL__ > 1))))
156JEMALLOC_INLINE uint32_t
157atomic_add_uint32(uint32_t *p, uint32_t x)
158{
159
160 return (__sync_add_and_fetch(p, x));
161}
162
163JEMALLOC_INLINE uint32_t
164atomic_sub_uint32(uint32_t *p, uint32_t x)
165{
166
167 return (__sync_sub_and_fetch(p, x));
168}
Jason Evans92d32842011-03-18 18:15:37 -0700169#else
170# error "Missing implementation for 32-bit atomic operations"
171#endif
Jason Evans06304a92012-03-23 16:09:56 -0700172
173/******************************************************************************/
174/* size_t operations. */
175JEMALLOC_INLINE size_t
176atomic_add_z(size_t *p, size_t x)
177{
178
179#if (LG_SIZEOF_PTR == 3)
180 return ((size_t)atomic_add_uint64((uint64_t *)p, (uint64_t)x));
181#elif (LG_SIZEOF_PTR == 2)
182 return ((size_t)atomic_add_uint32((uint32_t *)p, (uint32_t)x));
183#endif
184}
185
186JEMALLOC_INLINE size_t
187atomic_sub_z(size_t *p, size_t x)
188{
189
190#if (LG_SIZEOF_PTR == 3)
191 return ((size_t)atomic_add_uint64((uint64_t *)p,
192 (uint64_t)-((int64_t)x)));
193#elif (LG_SIZEOF_PTR == 2)
194 return ((size_t)atomic_add_uint32((uint32_t *)p,
195 (uint32_t)-((int32_t)x)));
196#endif
197}
Jason Evans6da54182012-03-23 18:05:51 -0700198
199/******************************************************************************/
200/* unsigned operations. */
201JEMALLOC_INLINE unsigned
202atomic_add_u(unsigned *p, unsigned x)
203{
204
205#if (LG_SIZEOF_INT == 3)
206 return ((unsigned)atomic_add_uint64((uint64_t *)p, (uint64_t)x));
207#elif (LG_SIZEOF_INT == 2)
208 return ((unsigned)atomic_add_uint32((uint32_t *)p, (uint32_t)x));
209#endif
210}
211
212JEMALLOC_INLINE unsigned
213atomic_sub_u(unsigned *p, unsigned x)
214{
215
216#if (LG_SIZEOF_INT == 3)
217 return ((unsigned)atomic_add_uint64((uint64_t *)p,
218 (uint64_t)-((int64_t)x)));
219#elif (LG_SIZEOF_INT == 2)
220 return ((unsigned)atomic_add_uint32((uint32_t *)p,
221 (uint32_t)-((int32_t)x)));
222#endif
223}
224/******************************************************************************/
Jason Evans92d32842011-03-18 18:15:37 -0700225#endif
226
227#endif /* JEMALLOC_H_INLINES */
228/******************************************************************************/