| Jason Evans | 92d3284 | 2011-03-18 18:15:37 -0700 | [diff] [blame] | 1 | /******************************************************************************/ |
| 2 | #ifdef JEMALLOC_H_TYPES |
| 3 | |
| 4 | #endif /* JEMALLOC_H_TYPES */ |
| 5 | /******************************************************************************/ |
| 6 | #ifdef JEMALLOC_H_STRUCTS |
| 7 | |
| 8 | #endif /* JEMALLOC_H_STRUCTS */ |
| 9 | /******************************************************************************/ |
| 10 | #ifdef JEMALLOC_H_EXTERNS |
| 11 | |
| 12 | #define atomic_read_uint64(p) atomic_add_uint64(p, 0) |
| 13 | #define atomic_read_uint32(p) atomic_add_uint32(p, 0) |
| Jason Evans | 06304a9 | 2012-03-23 16:09:56 -0700 | [diff] [blame] | 14 | #define atomic_read_z(p) atomic_add_z(p, 0) |
| Jason Evans | 92d3284 | 2011-03-18 18:15:37 -0700 | [diff] [blame] | 15 | |
| 16 | #endif /* JEMALLOC_H_EXTERNS */ |
| 17 | /******************************************************************************/ |
| 18 | #ifdef JEMALLOC_H_INLINES |
| 19 | |
| 20 | #ifndef JEMALLOC_ENABLE_INLINE |
| 21 | uint64_t atomic_add_uint64(uint64_t *p, uint64_t x); |
| 22 | uint64_t atomic_sub_uint64(uint64_t *p, uint64_t x); |
| 23 | uint32_t atomic_add_uint32(uint32_t *p, uint32_t x); |
| 24 | uint32_t atomic_sub_uint32(uint32_t *p, uint32_t x); |
| Jason Evans | 06304a9 | 2012-03-23 16:09:56 -0700 | [diff] [blame] | 25 | size_t atomic_add_z(size_t *p, size_t x); |
| 26 | size_t atomic_sub_z(size_t *p, size_t x); |
| Jason Evans | 92d3284 | 2011-03-18 18:15:37 -0700 | [diff] [blame] | 27 | #endif |
| 28 | |
| 29 | #if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_ATOMIC_C_)) |
| Jason Evans | 3e29247 | 2011-03-24 16:48:11 -0700 | [diff] [blame] | 30 | /******************************************************************************/ |
| Jason Evans | 92d3284 | 2011-03-18 18:15:37 -0700 | [diff] [blame] | 31 | /* 64-bit operations. */ |
| 32 | #ifdef __GCC_HAVE_SYNC_COMPARE_AND_SWAP_8 |
| 33 | JEMALLOC_INLINE uint64_t |
| 34 | atomic_add_uint64(uint64_t *p, uint64_t x) |
| 35 | { |
| 36 | |
| 37 | return (__sync_add_and_fetch(p, x)); |
| 38 | } |
| 39 | |
| 40 | JEMALLOC_INLINE uint64_t |
| 41 | atomic_sub_uint64(uint64_t *p, uint64_t x) |
| 42 | { |
| 43 | |
| 44 | return (__sync_sub_and_fetch(p, x)); |
| 45 | } |
| Jason Evans | 763baa6 | 2011-03-18 19:10:31 -0700 | [diff] [blame] | 46 | #elif (defined(JEMALLOC_OSATOMIC)) |
| 47 | JEMALLOC_INLINE uint64_t |
| 48 | atomic_add_uint64(uint64_t *p, uint64_t x) |
| 49 | { |
| 50 | |
| 51 | return (OSAtomicAdd64((int64_t)x, (int64_t *)p)); |
| 52 | } |
| 53 | |
| 54 | JEMALLOC_INLINE uint64_t |
| 55 | atomic_sub_uint64(uint64_t *p, uint64_t x) |
| 56 | { |
| 57 | |
| 58 | return (OSAtomicAdd64(-((int64_t)x), (int64_t *)p)); |
| 59 | } |
| Jason Evans | b172610 | 2012-02-28 16:50:47 -0800 | [diff] [blame] | 60 | #elif (defined(__amd64__) || defined(__x86_64__)) |
| Jason Evans | 3e29247 | 2011-03-24 16:48:11 -0700 | [diff] [blame] | 61 | JEMALLOC_INLINE uint64_t |
| 62 | atomic_add_uint64(uint64_t *p, uint64_t x) |
| 63 | { |
| 64 | |
| 65 | asm volatile ( |
| 66 | "lock; xaddq %0, %1;" |
| 67 | : "+r" (x), "=m" (*p) /* Outputs. */ |
| 68 | : "m" (*p) /* Inputs. */ |
| 69 | ); |
| 70 | |
| 71 | return (x); |
| 72 | } |
| 73 | |
| 74 | JEMALLOC_INLINE uint64_t |
| 75 | atomic_sub_uint64(uint64_t *p, uint64_t x) |
| 76 | { |
| 77 | |
| 78 | x = (uint64_t)(-(int64_t)x); |
| 79 | asm volatile ( |
| 80 | "lock; xaddq %0, %1;" |
| 81 | : "+r" (x), "=m" (*p) /* Outputs. */ |
| 82 | : "m" (*p) /* Inputs. */ |
| 83 | ); |
| 84 | |
| 85 | return (x); |
| 86 | } |
| Jason Evans | 92d3284 | 2011-03-18 18:15:37 -0700 | [diff] [blame] | 87 | #else |
| Jason Evans | 47e57f9 | 2011-03-22 09:00:56 -0700 | [diff] [blame] | 88 | # if (LG_SIZEOF_PTR == 3) |
| 89 | # error "Missing implementation for 64-bit atomic operations" |
| 90 | # endif |
| Jason Evans | 92d3284 | 2011-03-18 18:15:37 -0700 | [diff] [blame] | 91 | #endif |
| 92 | |
| Jason Evans | 3e29247 | 2011-03-24 16:48:11 -0700 | [diff] [blame] | 93 | /******************************************************************************/ |
| Jason Evans | 92d3284 | 2011-03-18 18:15:37 -0700 | [diff] [blame] | 94 | /* 32-bit operations. */ |
| 95 | #ifdef __GCC_HAVE_SYNC_COMPARE_AND_SWAP_4 |
| 96 | JEMALLOC_INLINE uint32_t |
| 97 | atomic_add_uint32(uint32_t *p, uint32_t x) |
| 98 | { |
| 99 | |
| 100 | return (__sync_add_and_fetch(p, x)); |
| 101 | } |
| 102 | |
| 103 | JEMALLOC_INLINE uint32_t |
| 104 | atomic_sub_uint32(uint32_t *p, uint32_t x) |
| 105 | { |
| 106 | |
| 107 | return (__sync_sub_and_fetch(p, x)); |
| 108 | } |
| Jason Evans | 763baa6 | 2011-03-18 19:10:31 -0700 | [diff] [blame] | 109 | #elif (defined(JEMALLOC_OSATOMIC)) |
| 110 | JEMALLOC_INLINE uint32_t |
| 111 | atomic_add_uint32(uint32_t *p, uint32_t x) |
| 112 | { |
| 113 | |
| 114 | return (OSAtomicAdd32((int32_t)x, (int32_t *)p)); |
| 115 | } |
| 116 | |
| 117 | JEMALLOC_INLINE uint32_t |
| 118 | atomic_sub_uint32(uint32_t *p, uint32_t x) |
| 119 | { |
| 120 | |
| 121 | return (OSAtomicAdd32(-((int32_t)x), (int32_t *)p)); |
| 122 | } |
| Jason Evans | b172610 | 2012-02-28 16:50:47 -0800 | [diff] [blame] | 123 | #elif (defined(__i386__) || defined(__amd64__) || defined(__x86_64__)) |
| Jason Evans | 3e29247 | 2011-03-24 16:48:11 -0700 | [diff] [blame] | 124 | JEMALLOC_INLINE uint32_t |
| 125 | atomic_add_uint32(uint32_t *p, uint32_t x) |
| 126 | { |
| 127 | |
| 128 | asm volatile ( |
| 129 | "lock; xaddl %0, %1;" |
| 130 | : "+r" (x), "=m" (*p) /* Outputs. */ |
| 131 | : "m" (*p) /* Inputs. */ |
| 132 | ); |
| 133 | |
| 134 | return (x); |
| 135 | } |
| 136 | |
| 137 | JEMALLOC_INLINE uint32_t |
| 138 | atomic_sub_uint32(uint32_t *p, uint32_t x) |
| 139 | { |
| 140 | |
| 141 | x = (uint32_t)(-(int32_t)x); |
| 142 | asm volatile ( |
| 143 | "lock; xaddl %0, %1;" |
| 144 | : "+r" (x), "=m" (*p) /* Outputs. */ |
| 145 | : "m" (*p) /* Inputs. */ |
| 146 | ); |
| 147 | |
| 148 | return (x); |
| 149 | } |
| Jason Evans | 3492daf | 2012-03-05 12:16:57 -0800 | [diff] [blame] | 150 | #elif (defined __SH4__ || defined __mips__) && (__GNUC__ > 4 || \ |
| 151 | (__GNUC__ == 4 && (__GNUC_MINOR__ > 1 || (__GNUC_MINOR__ == 1 && \ |
| 152 | __GNUC_PATCHLEVEL__ > 1)))) |
| 153 | JEMALLOC_INLINE uint32_t |
| 154 | atomic_add_uint32(uint32_t *p, uint32_t x) |
| 155 | { |
| 156 | |
| 157 | return (__sync_add_and_fetch(p, x)); |
| 158 | } |
| 159 | |
| 160 | JEMALLOC_INLINE uint32_t |
| 161 | atomic_sub_uint32(uint32_t *p, uint32_t x) |
| 162 | { |
| 163 | |
| 164 | return (__sync_sub_and_fetch(p, x)); |
| 165 | } |
| Jason Evans | 92d3284 | 2011-03-18 18:15:37 -0700 | [diff] [blame] | 166 | #else |
| 167 | # error "Missing implementation for 32-bit atomic operations" |
| 168 | #endif |
| Jason Evans | 06304a9 | 2012-03-23 16:09:56 -0700 | [diff] [blame] | 169 | |
| 170 | /******************************************************************************/ |
| 171 | /* size_t operations. */ |
| 172 | JEMALLOC_INLINE size_t |
| 173 | atomic_add_z(size_t *p, size_t x) |
| 174 | { |
| 175 | |
| 176 | #if (LG_SIZEOF_PTR == 3) |
| 177 | return ((size_t)atomic_add_uint64((uint64_t *)p, (uint64_t)x)); |
| 178 | #elif (LG_SIZEOF_PTR == 2) |
| 179 | return ((size_t)atomic_add_uint32((uint32_t *)p, (uint32_t)x)); |
| 180 | #endif |
| 181 | } |
| 182 | |
| 183 | JEMALLOC_INLINE size_t |
| 184 | atomic_sub_z(size_t *p, size_t x) |
| 185 | { |
| 186 | |
| 187 | #if (LG_SIZEOF_PTR == 3) |
| 188 | return ((size_t)atomic_add_uint64((uint64_t *)p, |
| 189 | (uint64_t)-((int64_t)x))); |
| 190 | #elif (LG_SIZEOF_PTR == 2) |
| 191 | return ((size_t)atomic_add_uint32((uint32_t *)p, |
| 192 | (uint32_t)-((int32_t)x))); |
| 193 | #endif |
| 194 | } |
| Jason Evans | 92d3284 | 2011-03-18 18:15:37 -0700 | [diff] [blame] | 195 | #endif |
| 196 | |
| 197 | #endif /* JEMALLOC_H_INLINES */ |
| 198 | /******************************************************************************/ |