| Jason Evans | 92d3284 | 2011-03-18 18:15:37 -0700 | [diff] [blame] | 1 | /******************************************************************************/ |
| 2 | #ifdef JEMALLOC_H_TYPES |
| 3 | |
| 4 | #endif /* JEMALLOC_H_TYPES */ |
| 5 | /******************************************************************************/ |
| 6 | #ifdef JEMALLOC_H_STRUCTS |
| 7 | |
| 8 | #endif /* JEMALLOC_H_STRUCTS */ |
| 9 | /******************************************************************************/ |
| 10 | #ifdef JEMALLOC_H_EXTERNS |
| 11 | |
| 12 | #define atomic_read_uint64(p) atomic_add_uint64(p, 0) |
| 13 | #define atomic_read_uint32(p) atomic_add_uint32(p, 0) |
| Jason Evans | 06304a9 | 2012-03-23 16:09:56 -0700 | [diff] [blame] | 14 | #define atomic_read_z(p) atomic_add_z(p, 0) |
| Jason Evans | 6da5418 | 2012-03-23 18:05:51 -0700 | [diff] [blame^] | 15 | #define atomic_read_u(p) atomic_add_u(p, 0) |
| Jason Evans | 92d3284 | 2011-03-18 18:15:37 -0700 | [diff] [blame] | 16 | |
| 17 | #endif /* JEMALLOC_H_EXTERNS */ |
| 18 | /******************************************************************************/ |
| 19 | #ifdef JEMALLOC_H_INLINES |
| 20 | |
| 21 | #ifndef JEMALLOC_ENABLE_INLINE |
| 22 | uint64_t atomic_add_uint64(uint64_t *p, uint64_t x); |
| 23 | uint64_t atomic_sub_uint64(uint64_t *p, uint64_t x); |
| 24 | uint32_t atomic_add_uint32(uint32_t *p, uint32_t x); |
| 25 | uint32_t atomic_sub_uint32(uint32_t *p, uint32_t x); |
| Jason Evans | 06304a9 | 2012-03-23 16:09:56 -0700 | [diff] [blame] | 26 | size_t atomic_add_z(size_t *p, size_t x); |
| 27 | size_t atomic_sub_z(size_t *p, size_t x); |
| Jason Evans | 6da5418 | 2012-03-23 18:05:51 -0700 | [diff] [blame^] | 28 | unsigned atomic_add_u(unsigned *p, unsigned x); |
| 29 | unsigned atomic_sub_u(unsigned *p, unsigned x); |
| Jason Evans | 92d3284 | 2011-03-18 18:15:37 -0700 | [diff] [blame] | 30 | #endif |
| 31 | |
| 32 | #if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_ATOMIC_C_)) |
| Jason Evans | 3e29247 | 2011-03-24 16:48:11 -0700 | [diff] [blame] | 33 | /******************************************************************************/ |
| Jason Evans | 92d3284 | 2011-03-18 18:15:37 -0700 | [diff] [blame] | 34 | /* 64-bit operations. */ |
| 35 | #ifdef __GCC_HAVE_SYNC_COMPARE_AND_SWAP_8 |
| 36 | JEMALLOC_INLINE uint64_t |
| 37 | atomic_add_uint64(uint64_t *p, uint64_t x) |
| 38 | { |
| 39 | |
| 40 | return (__sync_add_and_fetch(p, x)); |
| 41 | } |
| 42 | |
| 43 | JEMALLOC_INLINE uint64_t |
| 44 | atomic_sub_uint64(uint64_t *p, uint64_t x) |
| 45 | { |
| 46 | |
| 47 | return (__sync_sub_and_fetch(p, x)); |
| 48 | } |
| Jason Evans | 763baa6 | 2011-03-18 19:10:31 -0700 | [diff] [blame] | 49 | #elif (defined(JEMALLOC_OSATOMIC)) |
| 50 | JEMALLOC_INLINE uint64_t |
| 51 | atomic_add_uint64(uint64_t *p, uint64_t x) |
| 52 | { |
| 53 | |
| 54 | return (OSAtomicAdd64((int64_t)x, (int64_t *)p)); |
| 55 | } |
| 56 | |
| 57 | JEMALLOC_INLINE uint64_t |
| 58 | atomic_sub_uint64(uint64_t *p, uint64_t x) |
| 59 | { |
| 60 | |
| 61 | return (OSAtomicAdd64(-((int64_t)x), (int64_t *)p)); |
| 62 | } |
| Jason Evans | b172610 | 2012-02-28 16:50:47 -0800 | [diff] [blame] | 63 | #elif (defined(__amd64__) || defined(__x86_64__)) |
| Jason Evans | 3e29247 | 2011-03-24 16:48:11 -0700 | [diff] [blame] | 64 | JEMALLOC_INLINE uint64_t |
| 65 | atomic_add_uint64(uint64_t *p, uint64_t x) |
| 66 | { |
| 67 | |
| 68 | asm volatile ( |
| 69 | "lock; xaddq %0, %1;" |
| 70 | : "+r" (x), "=m" (*p) /* Outputs. */ |
| 71 | : "m" (*p) /* Inputs. */ |
| 72 | ); |
| 73 | |
| 74 | return (x); |
| 75 | } |
| 76 | |
| 77 | JEMALLOC_INLINE uint64_t |
| 78 | atomic_sub_uint64(uint64_t *p, uint64_t x) |
| 79 | { |
| 80 | |
| 81 | x = (uint64_t)(-(int64_t)x); |
| 82 | asm volatile ( |
| 83 | "lock; xaddq %0, %1;" |
| 84 | : "+r" (x), "=m" (*p) /* Outputs. */ |
| 85 | : "m" (*p) /* Inputs. */ |
| 86 | ); |
| 87 | |
| 88 | return (x); |
| 89 | } |
| Jason Evans | 92d3284 | 2011-03-18 18:15:37 -0700 | [diff] [blame] | 90 | #else |
| Jason Evans | 47e57f9 | 2011-03-22 09:00:56 -0700 | [diff] [blame] | 91 | # if (LG_SIZEOF_PTR == 3) |
| 92 | # error "Missing implementation for 64-bit atomic operations" |
| 93 | # endif |
| Jason Evans | 92d3284 | 2011-03-18 18:15:37 -0700 | [diff] [blame] | 94 | #endif |
| 95 | |
| Jason Evans | 3e29247 | 2011-03-24 16:48:11 -0700 | [diff] [blame] | 96 | /******************************************************************************/ |
| Jason Evans | 92d3284 | 2011-03-18 18:15:37 -0700 | [diff] [blame] | 97 | /* 32-bit operations. */ |
| 98 | #ifdef __GCC_HAVE_SYNC_COMPARE_AND_SWAP_4 |
| 99 | JEMALLOC_INLINE uint32_t |
| 100 | atomic_add_uint32(uint32_t *p, uint32_t x) |
| 101 | { |
| 102 | |
| 103 | return (__sync_add_and_fetch(p, x)); |
| 104 | } |
| 105 | |
| 106 | JEMALLOC_INLINE uint32_t |
| 107 | atomic_sub_uint32(uint32_t *p, uint32_t x) |
| 108 | { |
| 109 | |
| 110 | return (__sync_sub_and_fetch(p, x)); |
| 111 | } |
| Jason Evans | 763baa6 | 2011-03-18 19:10:31 -0700 | [diff] [blame] | 112 | #elif (defined(JEMALLOC_OSATOMIC)) |
| 113 | JEMALLOC_INLINE uint32_t |
| 114 | atomic_add_uint32(uint32_t *p, uint32_t x) |
| 115 | { |
| 116 | |
| 117 | return (OSAtomicAdd32((int32_t)x, (int32_t *)p)); |
| 118 | } |
| 119 | |
| 120 | JEMALLOC_INLINE uint32_t |
| 121 | atomic_sub_uint32(uint32_t *p, uint32_t x) |
| 122 | { |
| 123 | |
| 124 | return (OSAtomicAdd32(-((int32_t)x), (int32_t *)p)); |
| 125 | } |
| Jason Evans | b172610 | 2012-02-28 16:50:47 -0800 | [diff] [blame] | 126 | #elif (defined(__i386__) || defined(__amd64__) || defined(__x86_64__)) |
| Jason Evans | 3e29247 | 2011-03-24 16:48:11 -0700 | [diff] [blame] | 127 | JEMALLOC_INLINE uint32_t |
| 128 | atomic_add_uint32(uint32_t *p, uint32_t x) |
| 129 | { |
| 130 | |
| 131 | asm volatile ( |
| 132 | "lock; xaddl %0, %1;" |
| 133 | : "+r" (x), "=m" (*p) /* Outputs. */ |
| 134 | : "m" (*p) /* Inputs. */ |
| 135 | ); |
| 136 | |
| 137 | return (x); |
| 138 | } |
| 139 | |
| 140 | JEMALLOC_INLINE uint32_t |
| 141 | atomic_sub_uint32(uint32_t *p, uint32_t x) |
| 142 | { |
| 143 | |
| 144 | x = (uint32_t)(-(int32_t)x); |
| 145 | asm volatile ( |
| 146 | "lock; xaddl %0, %1;" |
| 147 | : "+r" (x), "=m" (*p) /* Outputs. */ |
| 148 | : "m" (*p) /* Inputs. */ |
| 149 | ); |
| 150 | |
| 151 | return (x); |
| 152 | } |
| Jason Evans | 3492daf | 2012-03-05 12:16:57 -0800 | [diff] [blame] | 153 | #elif (defined __SH4__ || defined __mips__) && (__GNUC__ > 4 || \ |
| 154 | (__GNUC__ == 4 && (__GNUC_MINOR__ > 1 || (__GNUC_MINOR__ == 1 && \ |
| 155 | __GNUC_PATCHLEVEL__ > 1)))) |
| 156 | JEMALLOC_INLINE uint32_t |
| 157 | atomic_add_uint32(uint32_t *p, uint32_t x) |
| 158 | { |
| 159 | |
| 160 | return (__sync_add_and_fetch(p, x)); |
| 161 | } |
| 162 | |
| 163 | JEMALLOC_INLINE uint32_t |
| 164 | atomic_sub_uint32(uint32_t *p, uint32_t x) |
| 165 | { |
| 166 | |
| 167 | return (__sync_sub_and_fetch(p, x)); |
| 168 | } |
| Jason Evans | 92d3284 | 2011-03-18 18:15:37 -0700 | [diff] [blame] | 169 | #else |
| 170 | # error "Missing implementation for 32-bit atomic operations" |
| 171 | #endif |
| Jason Evans | 06304a9 | 2012-03-23 16:09:56 -0700 | [diff] [blame] | 172 | |
| 173 | /******************************************************************************/ |
| 174 | /* size_t operations. */ |
| 175 | JEMALLOC_INLINE size_t |
| 176 | atomic_add_z(size_t *p, size_t x) |
| 177 | { |
| 178 | |
| 179 | #if (LG_SIZEOF_PTR == 3) |
| 180 | return ((size_t)atomic_add_uint64((uint64_t *)p, (uint64_t)x)); |
| 181 | #elif (LG_SIZEOF_PTR == 2) |
| 182 | return ((size_t)atomic_add_uint32((uint32_t *)p, (uint32_t)x)); |
| 183 | #endif |
| 184 | } |
| 185 | |
| 186 | JEMALLOC_INLINE size_t |
| 187 | atomic_sub_z(size_t *p, size_t x) |
| 188 | { |
| 189 | |
| 190 | #if (LG_SIZEOF_PTR == 3) |
| 191 | return ((size_t)atomic_add_uint64((uint64_t *)p, |
| 192 | (uint64_t)-((int64_t)x))); |
| 193 | #elif (LG_SIZEOF_PTR == 2) |
| 194 | return ((size_t)atomic_add_uint32((uint32_t *)p, |
| 195 | (uint32_t)-((int32_t)x))); |
| 196 | #endif |
| 197 | } |
| Jason Evans | 6da5418 | 2012-03-23 18:05:51 -0700 | [diff] [blame^] | 198 | |
| 199 | /******************************************************************************/ |
| 200 | /* unsigned operations. */ |
| 201 | JEMALLOC_INLINE unsigned |
| 202 | atomic_add_u(unsigned *p, unsigned x) |
| 203 | { |
| 204 | |
| 205 | #if (LG_SIZEOF_INT == 3) |
| 206 | return ((unsigned)atomic_add_uint64((uint64_t *)p, (uint64_t)x)); |
| 207 | #elif (LG_SIZEOF_INT == 2) |
| 208 | return ((unsigned)atomic_add_uint32((uint32_t *)p, (uint32_t)x)); |
| 209 | #endif |
| 210 | } |
| 211 | |
| 212 | JEMALLOC_INLINE unsigned |
| 213 | atomic_sub_u(unsigned *p, unsigned x) |
| 214 | { |
| 215 | |
| 216 | #if (LG_SIZEOF_INT == 3) |
| 217 | return ((unsigned)atomic_add_uint64((uint64_t *)p, |
| 218 | (uint64_t)-((int64_t)x))); |
| 219 | #elif (LG_SIZEOF_INT == 2) |
| 220 | return ((unsigned)atomic_add_uint32((uint32_t *)p, |
| 221 | (uint32_t)-((int32_t)x))); |
| 222 | #endif |
| 223 | } |
| 224 | /******************************************************************************/ |
| Jason Evans | 92d3284 | 2011-03-18 18:15:37 -0700 | [diff] [blame] | 225 | #endif |
| 226 | |
| 227 | #endif /* JEMALLOC_H_INLINES */ |
| 228 | /******************************************************************************/ |