Mike Hommey | 45f208e | 2012-04-16 16:30:26 +0200 | [diff] [blame] | 1 | #ifndef JEMALLOC_INTERNAL_H |
Jason Evans | a4f124f | 2013-12-08 22:28:27 -0800 | [diff] [blame] | 2 | #define JEMALLOC_INTERNAL_H |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 3 | |
Jason Evans | 86abd0d | 2013-11-30 15:25:42 -0800 | [diff] [blame] | 4 | #include "jemalloc_internal_defs.h" |
Mike Hommey | 12f74e6 | 2014-05-28 12:39:13 +0900 | [diff] [blame] | 5 | #include "jemalloc/internal/jemalloc_internal_decls.h" |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 6 | |
Jason Evans | b147611 | 2012-04-05 13:36:17 -0700 | [diff] [blame] | 7 | #ifdef JEMALLOC_UTRACE |
| 8 | #include <sys/ktrace.h> |
| 9 | #endif |
| 10 | |
Jason Evans | 86abd0d | 2013-11-30 15:25:42 -0800 | [diff] [blame] | 11 | #define JEMALLOC_NO_DEMANGLE |
| 12 | #ifdef JEMALLOC_JET |
| 13 | # define JEMALLOC_N(n) jet_##n |
| 14 | # include "jemalloc/internal/public_namespace.h" |
| 15 | # define JEMALLOC_NO_RENAME |
| 16 | # include "../jemalloc@install_suffix@.h" |
Jason Evans | f234dc5 | 2014-01-16 17:38:01 -0800 | [diff] [blame] | 17 | # undef JEMALLOC_NO_RENAME |
Jason Evans | 86abd0d | 2013-11-30 15:25:42 -0800 | [diff] [blame] | 18 | #else |
| 19 | # define JEMALLOC_N(n) @private_namespace@##n |
| 20 | # include "../jemalloc@install_suffix@.h" |
| 21 | #endif |
Jason Evans | 04ca1ef | 2011-07-30 17:58:07 -0700 | [diff] [blame] | 22 | #include "jemalloc/internal/private_namespace.h" |
Jason Evans | 746e77a | 2011-07-30 16:40:52 -0700 | [diff] [blame] | 23 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 24 | static const bool config_debug = |
| 25 | #ifdef JEMALLOC_DEBUG |
| 26 | true |
| 27 | #else |
| 28 | false |
| 29 | #endif |
| 30 | ; |
Jason Evans | 4d434ad | 2014-04-15 12:09:48 -0700 | [diff] [blame] | 31 | static const bool have_dss = |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 32 | #ifdef JEMALLOC_DSS |
| 33 | true |
| 34 | #else |
| 35 | false |
| 36 | #endif |
| 37 | ; |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 38 | static const bool config_fill = |
| 39 | #ifdef JEMALLOC_FILL |
| 40 | true |
| 41 | #else |
| 42 | false |
| 43 | #endif |
| 44 | ; |
| 45 | static const bool config_lazy_lock = |
| 46 | #ifdef JEMALLOC_LAZY_LOCK |
| 47 | true |
| 48 | #else |
| 49 | false |
| 50 | #endif |
| 51 | ; |
| 52 | static const bool config_prof = |
| 53 | #ifdef JEMALLOC_PROF |
| 54 | true |
| 55 | #else |
| 56 | false |
| 57 | #endif |
| 58 | ; |
| 59 | static const bool config_prof_libgcc = |
| 60 | #ifdef JEMALLOC_PROF_LIBGCC |
| 61 | true |
| 62 | #else |
| 63 | false |
| 64 | #endif |
| 65 | ; |
| 66 | static const bool config_prof_libunwind = |
| 67 | #ifdef JEMALLOC_PROF_LIBUNWIND |
| 68 | true |
| 69 | #else |
| 70 | false |
| 71 | #endif |
| 72 | ; |
Jason Evans | 7ca0fdf | 2012-04-12 20:20:58 -0700 | [diff] [blame] | 73 | static const bool config_munmap = |
| 74 | #ifdef JEMALLOC_MUNMAP |
| 75 | true |
| 76 | #else |
| 77 | false |
| 78 | #endif |
| 79 | ; |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 80 | static const bool config_stats = |
| 81 | #ifdef JEMALLOC_STATS |
| 82 | true |
| 83 | #else |
| 84 | false |
| 85 | #endif |
| 86 | ; |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 87 | static const bool config_tcache = |
| 88 | #ifdef JEMALLOC_TCACHE |
| 89 | true |
| 90 | #else |
| 91 | false |
| 92 | #endif |
| 93 | ; |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 94 | static const bool config_tls = |
| 95 | #ifdef JEMALLOC_TLS |
| 96 | true |
| 97 | #else |
| 98 | false |
| 99 | #endif |
| 100 | ; |
Jason Evans | b147611 | 2012-04-05 13:36:17 -0700 | [diff] [blame] | 101 | static const bool config_utrace = |
| 102 | #ifdef JEMALLOC_UTRACE |
| 103 | true |
| 104 | #else |
| 105 | false |
| 106 | #endif |
| 107 | ; |
Jason Evans | 122449b | 2012-04-06 00:35:09 -0700 | [diff] [blame] | 108 | static const bool config_valgrind = |
| 109 | #ifdef JEMALLOC_VALGRIND |
| 110 | true |
| 111 | #else |
| 112 | false |
| 113 | #endif |
| 114 | ; |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 115 | static const bool config_xmalloc = |
| 116 | #ifdef JEMALLOC_XMALLOC |
| 117 | true |
| 118 | #else |
| 119 | false |
| 120 | #endif |
| 121 | ; |
| 122 | static const bool config_ivsalloc = |
| 123 | #ifdef JEMALLOC_IVSALLOC |
| 124 | true |
| 125 | #else |
| 126 | false |
| 127 | #endif |
| 128 | ; |
| 129 | |
Chih-hung Hsieh | 59cd80e | 2014-12-05 17:42:41 -0800 | [diff] [blame^] | 130 | #ifdef JEMALLOC_C11ATOMICS |
| 131 | #include <stdatomic.h> |
| 132 | #endif |
| 133 | |
Jason Evans | b57d3ec | 2012-04-17 13:17:54 -0700 | [diff] [blame] | 134 | #ifdef JEMALLOC_ATOMIC9 |
| 135 | #include <machine/atomic.h> |
| 136 | #endif |
| 137 | |
Jason Evans | 893a0ed | 2011-03-18 19:30:18 -0700 | [diff] [blame] | 138 | #if (defined(JEMALLOC_OSATOMIC) || defined(JEMALLOC_OSSPIN)) |
Jason Evans | 763baa6 | 2011-03-18 19:10:31 -0700 | [diff] [blame] | 139 | #include <libkern/OSAtomic.h> |
| 140 | #endif |
| 141 | |
Jason Evans | 2dbecf1 | 2010-09-05 10:35:13 -0700 | [diff] [blame] | 142 | #ifdef JEMALLOC_ZONE |
| 143 | #include <mach/mach_error.h> |
| 144 | #include <mach/mach_init.h> |
| 145 | #include <mach/vm_map.h> |
| 146 | #include <malloc/malloc.h> |
| 147 | #endif |
| 148 | |
Jason Evans | f3ff752 | 2010-02-28 15:00:18 -0800 | [diff] [blame] | 149 | #define RB_COMPACT |
Jason Evans | 376b152 | 2010-02-11 14:45:59 -0800 | [diff] [blame] | 150 | #include "jemalloc/internal/rb.h" |
| 151 | #include "jemalloc/internal/qr.h" |
| 152 | #include "jemalloc/internal/ql.h" |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 153 | |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 154 | /* |
Jason Evans | fe5faa2 | 2010-02-11 13:38:12 -0800 | [diff] [blame] | 155 | * jemalloc can conceptually be broken into components (arena, tcache, etc.), |
| 156 | * but there are circular dependencies that cannot be broken without |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 157 | * substantial performance degradation. In order to reduce the effect on |
| 158 | * visual code flow, read the header files in multiple passes, with one of the |
| 159 | * following cpp variables defined during each pass: |
| 160 | * |
| 161 | * JEMALLOC_H_TYPES : Preprocessor-defined constants and psuedo-opaque data |
| 162 | * types. |
| 163 | * JEMALLOC_H_STRUCTS : Data structures. |
| 164 | * JEMALLOC_H_EXTERNS : Extern data declarations and function prototypes. |
| 165 | * JEMALLOC_H_INLINES : Inline functions. |
| 166 | */ |
| 167 | /******************************************************************************/ |
Jason Evans | a4f124f | 2013-12-08 22:28:27 -0800 | [diff] [blame] | 168 | #define JEMALLOC_H_TYPES |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 169 | |
Jason Evans | dc1bed6 | 2013-12-05 21:43:46 -0800 | [diff] [blame] | 170 | #include "jemalloc/internal/jemalloc_internal_macros.h" |
| 171 | |
Jason Evans | 155bfa7 | 2014-10-05 17:54:10 -0700 | [diff] [blame] | 172 | /* Size class index type. */ |
| 173 | typedef unsigned index_t; |
| 174 | |
Jason Evans | b718cf7 | 2014-09-07 14:40:19 -0700 | [diff] [blame] | 175 | #define MALLOCX_ARENA_MASK ((int)~0xff) |
Jason Evans | d82a5e6 | 2013-12-12 22:35:52 -0800 | [diff] [blame] | 176 | #define MALLOCX_LG_ALIGN_MASK ((int)0x3f) |
Jason Evans | b718cf7 | 2014-09-07 14:40:19 -0700 | [diff] [blame] | 177 | /* Use MALLOCX_ALIGN_GET() if alignment may not be specified in flags. */ |
| 178 | #define MALLOCX_ALIGN_GET_SPECIFIED(flags) \ |
| 179 | (ZU(1) << (flags & MALLOCX_LG_ALIGN_MASK)) |
| 180 | #define MALLOCX_ALIGN_GET(flags) \ |
| 181 | (MALLOCX_ALIGN_GET_SPECIFIED(flags) & (SIZE_T_MAX-1)) |
| 182 | #define MALLOCX_ZERO_GET(flags) \ |
| 183 | ((bool)(flags & MALLOCX_ZERO)) |
| 184 | #define MALLOCX_ARENA_GET(flags) \ |
| 185 | (((unsigned)(flags >> 8)) - 1) |
Jason Evans | 8e3c3c6 | 2010-09-17 15:46:18 -0700 | [diff] [blame] | 186 | |
Jason Evans | b172610 | 2012-02-28 16:50:47 -0800 | [diff] [blame] | 187 | /* Smallest size class to support. */ |
Jason Evans | b172610 | 2012-02-28 16:50:47 -0800 | [diff] [blame] | 188 | #define TINY_MIN (1U << LG_TINY_MIN) |
| 189 | |
| 190 | /* |
Jason Evans | fc0b3b7 | 2014-10-09 17:54:06 -0700 | [diff] [blame] | 191 | * Minimum allocation alignment is 2^LG_QUANTUM bytes (ignoring tiny size |
Jason Evans | b172610 | 2012-02-28 16:50:47 -0800 | [diff] [blame] | 192 | * classes). |
| 193 | */ |
| 194 | #ifndef LG_QUANTUM |
Mike Hommey | fd97b1d | 2012-04-30 12:38:31 +0200 | [diff] [blame] | 195 | # if (defined(__i386__) || defined(_M_IX86)) |
Jason Evans | b172610 | 2012-02-28 16:50:47 -0800 | [diff] [blame] | 196 | # define LG_QUANTUM 4 |
| 197 | # endif |
| 198 | # ifdef __ia64__ |
| 199 | # define LG_QUANTUM 4 |
| 200 | # endif |
| 201 | # ifdef __alpha__ |
| 202 | # define LG_QUANTUM 4 |
| 203 | # endif |
| 204 | # ifdef __sparc64__ |
| 205 | # define LG_QUANTUM 4 |
| 206 | # endif |
Mike Hommey | fd97b1d | 2012-04-30 12:38:31 +0200 | [diff] [blame] | 207 | # if (defined(__amd64__) || defined(__x86_64__) || defined(_M_X64)) |
Jason Evans | b172610 | 2012-02-28 16:50:47 -0800 | [diff] [blame] | 208 | # define LG_QUANTUM 4 |
| 209 | # endif |
| 210 | # ifdef __arm__ |
| 211 | # define LG_QUANTUM 3 |
| 212 | # endif |
Riku Voipio | daf6d04 | 2013-03-18 16:40:20 +0200 | [diff] [blame] | 213 | # ifdef __aarch64__ |
| 214 | # define LG_QUANTUM 4 |
| 215 | # endif |
Jason Evans | dd03a2e | 2012-10-08 15:41:06 -0700 | [diff] [blame] | 216 | # ifdef __hppa__ |
| 217 | # define LG_QUANTUM 4 |
| 218 | # endif |
Jason Evans | b172610 | 2012-02-28 16:50:47 -0800 | [diff] [blame] | 219 | # ifdef __mips__ |
| 220 | # define LG_QUANTUM 3 |
| 221 | # endif |
Manuel A. Fernandez Montecelo | ffa2598 | 2014-07-29 23:11:26 +0100 | [diff] [blame] | 222 | # ifdef __or1k__ |
| 223 | # define LG_QUANTUM 3 |
| 224 | # endif |
Jason Evans | b172610 | 2012-02-28 16:50:47 -0800 | [diff] [blame] | 225 | # ifdef __powerpc__ |
| 226 | # define LG_QUANTUM 4 |
| 227 | # endif |
Mike Frysinger | 9f9897a | 2013-01-28 15:19:34 -0500 | [diff] [blame] | 228 | # ifdef __s390__ |
Jason Evans | b172610 | 2012-02-28 16:50:47 -0800 | [diff] [blame] | 229 | # define LG_QUANTUM 4 |
| 230 | # endif |
Jason Evans | 3492daf | 2012-03-05 12:16:57 -0800 | [diff] [blame] | 231 | # ifdef __SH4__ |
| 232 | # define LG_QUANTUM 4 |
| 233 | # endif |
Jason Evans | b172610 | 2012-02-28 16:50:47 -0800 | [diff] [blame] | 234 | # ifdef __tile__ |
| 235 | # define LG_QUANTUM 4 |
| 236 | # endif |
Richard Diamond | 9c3a10f | 2014-05-28 21:37:02 -0500 | [diff] [blame] | 237 | # ifdef __le32__ |
| 238 | # define LG_QUANTUM 4 |
| 239 | # endif |
Jason Evans | b172610 | 2012-02-28 16:50:47 -0800 | [diff] [blame] | 240 | # ifndef LG_QUANTUM |
Jason Evans | fc0b3b7 | 2014-10-09 17:54:06 -0700 | [diff] [blame] | 241 | # error "Unknown minimum alignment for architecture; specify via " |
| 242 | "--with-lg-quantum" |
Jason Evans | b172610 | 2012-02-28 16:50:47 -0800 | [diff] [blame] | 243 | # endif |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 244 | #endif |
| 245 | |
| 246 | #define QUANTUM ((size_t)(1U << LG_QUANTUM)) |
| 247 | #define QUANTUM_MASK (QUANTUM - 1) |
| 248 | |
| 249 | /* Return the smallest quantum multiple that is >= a. */ |
| 250 | #define QUANTUM_CEILING(a) \ |
| 251 | (((a) + QUANTUM_MASK) & ~QUANTUM_MASK) |
| 252 | |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 253 | #define LONG ((size_t)(1U << LG_SIZEOF_LONG)) |
| 254 | #define LONG_MASK (LONG - 1) |
| 255 | |
| 256 | /* Return the smallest long multiple that is >= a. */ |
Jason Evans | 4c2faa8 | 2012-03-13 11:09:23 -0700 | [diff] [blame] | 257 | #define LONG_CEILING(a) \ |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 258 | (((a) + LONG_MASK) & ~LONG_MASK) |
| 259 | |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 260 | #define SIZEOF_PTR (1U << LG_SIZEOF_PTR) |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 261 | #define PTR_MASK (SIZEOF_PTR - 1) |
| 262 | |
| 263 | /* Return the smallest (void *) multiple that is >= a. */ |
Jason Evans | 4c2faa8 | 2012-03-13 11:09:23 -0700 | [diff] [blame] | 264 | #define PTR_CEILING(a) \ |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 265 | (((a) + PTR_MASK) & ~PTR_MASK) |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 266 | |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 267 | /* |
| 268 | * Maximum size of L1 cache line. This is used to avoid cache line aliasing. |
| 269 | * In addition, this controls the spacing of cacheline-spaced size classes. |
Jason Evans | 8d5865e | 2012-05-02 01:22:16 -0700 | [diff] [blame] | 270 | * |
| 271 | * CACHELINE cannot be based on LG_CACHELINE because __declspec(align()) can |
| 272 | * only handle raw constants. |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 273 | */ |
| 274 | #define LG_CACHELINE 6 |
Jason Evans | 8d5865e | 2012-05-02 01:22:16 -0700 | [diff] [blame] | 275 | #define CACHELINE 64 |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 276 | #define CACHELINE_MASK (CACHELINE - 1) |
| 277 | |
| 278 | /* Return the smallest cacheline multiple that is >= s. */ |
| 279 | #define CACHELINE_CEILING(s) \ |
| 280 | (((s) + CACHELINE_MASK) & ~CACHELINE_MASK) |
| 281 | |
Jason Evans | fc0b3b7 | 2014-10-09 17:54:06 -0700 | [diff] [blame] | 282 | /* Page size. LG_PAGE is determined by the configure script. */ |
Jason Evans | 2dbecf1 | 2010-09-05 10:35:13 -0700 | [diff] [blame] | 283 | #ifdef PAGE_MASK |
| 284 | # undef PAGE_MASK |
| 285 | #endif |
Jason Evans | fc0b3b7 | 2014-10-09 17:54:06 -0700 | [diff] [blame] | 286 | #define PAGE ((size_t)(1U << LG_PAGE)) |
Jason Evans | ae4c7b4 | 2012-04-02 07:04:34 -0700 | [diff] [blame] | 287 | #define PAGE_MASK ((size_t)(PAGE - 1)) |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 288 | |
| 289 | /* Return the smallest pagesize multiple that is >= s. */ |
| 290 | #define PAGE_CEILING(s) \ |
| 291 | (((s) + PAGE_MASK) & ~PAGE_MASK) |
| 292 | |
Jason Evans | 5ff709c | 2012-04-11 18:13:45 -0700 | [diff] [blame] | 293 | /* Return the nearest aligned address at or below a. */ |
| 294 | #define ALIGNMENT_ADDR2BASE(a, alignment) \ |
| 295 | ((void *)((uintptr_t)(a) & (-(alignment)))) |
| 296 | |
| 297 | /* Return the offset between a and the nearest aligned address at or below a. */ |
| 298 | #define ALIGNMENT_ADDR2OFFSET(a, alignment) \ |
| 299 | ((size_t)((uintptr_t)(a) & (alignment - 1))) |
| 300 | |
| 301 | /* Return the smallest alignment multiple that is >= s. */ |
| 302 | #define ALIGNMENT_CEILING(s, alignment) \ |
| 303 | (((s) + (alignment - 1)) & (-(alignment))) |
| 304 | |
Mike Hommey | 8b49971 | 2012-04-24 23:22:02 +0200 | [diff] [blame] | 305 | /* Declare a variable length array */ |
| 306 | #if __STDC_VERSION__ < 199901L |
| 307 | # ifdef _MSC_VER |
| 308 | # include <malloc.h> |
| 309 | # define alloca _alloca |
| 310 | # else |
Garrett Cooper | 13e4e24 | 2012-12-02 17:58:40 -0800 | [diff] [blame] | 311 | # ifdef JEMALLOC_HAS_ALLOCA_H |
| 312 | # include <alloca.h> |
| 313 | # else |
| 314 | # include <stdlib.h> |
| 315 | # endif |
Mike Hommey | 8b49971 | 2012-04-24 23:22:02 +0200 | [diff] [blame] | 316 | # endif |
| 317 | # define VARIABLE_ARRAY(type, name, count) \ |
Lucian Adrian Grijincu | 9d4e13f | 2014-04-21 20:52:35 -0700 | [diff] [blame] | 318 | type *name = alloca(sizeof(type) * (count)) |
Mike Hommey | 8b49971 | 2012-04-24 23:22:02 +0200 | [diff] [blame] | 319 | #else |
Lucian Adrian Grijincu | 9d4e13f | 2014-04-21 20:52:35 -0700 | [diff] [blame] | 320 | # define VARIABLE_ARRAY(type, name, count) type name[(count)] |
Mike Hommey | 8b49971 | 2012-04-24 23:22:02 +0200 | [diff] [blame] | 321 | #endif |
| 322 | |
Jason Evans | bd87b01 | 2014-04-15 16:35:08 -0700 | [diff] [blame] | 323 | #include "jemalloc/internal/valgrind.h" |
Jason Evans | d81e4bd | 2012-03-06 14:57:45 -0800 | [diff] [blame] | 324 | #include "jemalloc/internal/util.h" |
Jason Evans | 0657f12 | 2011-03-18 17:56:14 -0700 | [diff] [blame] | 325 | #include "jemalloc/internal/atomic.h" |
Jason Evans | 84f7cdb | 2012-03-02 15:59:45 -0800 | [diff] [blame] | 326 | #include "jemalloc/internal/prng.h" |
Jason Evans | 376b152 | 2010-02-11 14:45:59 -0800 | [diff] [blame] | 327 | #include "jemalloc/internal/ckh.h" |
Jason Evans | b172610 | 2012-02-28 16:50:47 -0800 | [diff] [blame] | 328 | #include "jemalloc/internal/size_classes.h" |
Jason Evans | 376b152 | 2010-02-11 14:45:59 -0800 | [diff] [blame] | 329 | #include "jemalloc/internal/stats.h" |
| 330 | #include "jemalloc/internal/ctl.h" |
| 331 | #include "jemalloc/internal/mutex.h" |
Jason Evans | cd9a134 | 2012-03-21 18:33:03 -0700 | [diff] [blame] | 332 | #include "jemalloc/internal/tsd.h" |
Jason Evans | 3b5ee5e | 2010-02-11 15:56:23 -0800 | [diff] [blame] | 333 | #include "jemalloc/internal/mb.h" |
Jason Evans | 376b152 | 2010-02-11 14:45:59 -0800 | [diff] [blame] | 334 | #include "jemalloc/internal/extent.h" |
| 335 | #include "jemalloc/internal/arena.h" |
Jason Evans | 47e57f9 | 2011-03-22 09:00:56 -0700 | [diff] [blame] | 336 | #include "jemalloc/internal/bitmap.h" |
Jason Evans | 376b152 | 2010-02-11 14:45:59 -0800 | [diff] [blame] | 337 | #include "jemalloc/internal/base.h" |
| 338 | #include "jemalloc/internal/chunk.h" |
| 339 | #include "jemalloc/internal/huge.h" |
Jason Evans | 2dbecf1 | 2010-09-05 10:35:13 -0700 | [diff] [blame] | 340 | #include "jemalloc/internal/rtree.h" |
Jason Evans | 376b152 | 2010-02-11 14:45:59 -0800 | [diff] [blame] | 341 | #include "jemalloc/internal/tcache.h" |
| 342 | #include "jemalloc/internal/hash.h" |
Jason Evans | 122449b | 2012-04-06 00:35:09 -0700 | [diff] [blame] | 343 | #include "jemalloc/internal/quarantine.h" |
Jason Evans | 4d6a134 | 2010-10-20 19:05:59 -0700 | [diff] [blame] | 344 | #include "jemalloc/internal/prof.h" |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 345 | |
| 346 | #undef JEMALLOC_H_TYPES |
| 347 | /******************************************************************************/ |
Jason Evans | a4f124f | 2013-12-08 22:28:27 -0800 | [diff] [blame] | 348 | #define JEMALLOC_H_STRUCTS |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 349 | |
Jason Evans | bd87b01 | 2014-04-15 16:35:08 -0700 | [diff] [blame] | 350 | #include "jemalloc/internal/valgrind.h" |
Jason Evans | d81e4bd | 2012-03-06 14:57:45 -0800 | [diff] [blame] | 351 | #include "jemalloc/internal/util.h" |
Jason Evans | 0657f12 | 2011-03-18 17:56:14 -0700 | [diff] [blame] | 352 | #include "jemalloc/internal/atomic.h" |
Jason Evans | 84f7cdb | 2012-03-02 15:59:45 -0800 | [diff] [blame] | 353 | #include "jemalloc/internal/prng.h" |
Jason Evans | 376b152 | 2010-02-11 14:45:59 -0800 | [diff] [blame] | 354 | #include "jemalloc/internal/ckh.h" |
Jason Evans | b172610 | 2012-02-28 16:50:47 -0800 | [diff] [blame] | 355 | #include "jemalloc/internal/size_classes.h" |
Jason Evans | 376b152 | 2010-02-11 14:45:59 -0800 | [diff] [blame] | 356 | #include "jemalloc/internal/stats.h" |
| 357 | #include "jemalloc/internal/ctl.h" |
| 358 | #include "jemalloc/internal/mutex.h" |
Jason Evans | 3b5ee5e | 2010-02-11 15:56:23 -0800 | [diff] [blame] | 359 | #include "jemalloc/internal/mb.h" |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 360 | #include "jemalloc/internal/bitmap.h" |
Jason Evans | 376b152 | 2010-02-11 14:45:59 -0800 | [diff] [blame] | 361 | #include "jemalloc/internal/extent.h" |
| 362 | #include "jemalloc/internal/arena.h" |
| 363 | #include "jemalloc/internal/base.h" |
| 364 | #include "jemalloc/internal/chunk.h" |
| 365 | #include "jemalloc/internal/huge.h" |
Jason Evans | 2dbecf1 | 2010-09-05 10:35:13 -0700 | [diff] [blame] | 366 | #include "jemalloc/internal/rtree.h" |
Jason Evans | 376b152 | 2010-02-11 14:45:59 -0800 | [diff] [blame] | 367 | #include "jemalloc/internal/tcache.h" |
| 368 | #include "jemalloc/internal/hash.h" |
Jason Evans | 122449b | 2012-04-06 00:35:09 -0700 | [diff] [blame] | 369 | #include "jemalloc/internal/quarantine.h" |
Jason Evans | 4d6a134 | 2010-10-20 19:05:59 -0700 | [diff] [blame] | 370 | #include "jemalloc/internal/prof.h" |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 371 | |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 372 | #include "jemalloc/internal/tsd.h" |
Jason Evans | 9dcad2d | 2011-02-13 18:11:54 -0800 | [diff] [blame] | 373 | |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 374 | #undef JEMALLOC_H_STRUCTS |
| 375 | /******************************************************************************/ |
Jason Evans | a4f124f | 2013-12-08 22:28:27 -0800 | [diff] [blame] | 376 | #define JEMALLOC_H_EXTERNS |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 377 | |
| 378 | extern bool opt_abort; |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 379 | extern bool opt_junk; |
Jason Evans | 122449b | 2012-04-06 00:35:09 -0700 | [diff] [blame] | 380 | extern size_t opt_quarantine; |
| 381 | extern bool opt_redzone; |
Jason Evans | b147611 | 2012-04-05 13:36:17 -0700 | [diff] [blame] | 382 | extern bool opt_utrace; |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 383 | extern bool opt_xmalloc; |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 384 | extern bool opt_zero; |
Jason Evans | e733970 | 2010-10-23 18:37:06 -0700 | [diff] [blame] | 385 | extern size_t opt_narenas; |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 386 | |
Jason Evans | ecd3e59 | 2014-04-15 14:33:50 -0700 | [diff] [blame] | 387 | extern bool in_valgrind; |
| 388 | |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 389 | /* Number of CPUs. */ |
| 390 | extern unsigned ncpus; |
| 391 | |
Jason Evans | 155bfa7 | 2014-10-05 17:54:10 -0700 | [diff] [blame] | 392 | /* |
| 393 | * index2size_tab encodes the same information as could be computed (at |
| 394 | * unacceptable cost in some code paths) by index2size_compute(). |
| 395 | */ |
| 396 | extern size_t const index2size_tab[NSIZES]; |
| 397 | /* |
| 398 | * size2index_tab is a compact lookup table that rounds request sizes up to |
| 399 | * size classes. In order to reduce cache footprint, the table is compressed, |
| 400 | * and all accesses are via size2index(). |
| 401 | */ |
| 402 | extern uint8_t const size2index_tab[]; |
| 403 | |
Jason Evans | 8bb3198 | 2014-10-07 23:14:57 -0700 | [diff] [blame] | 404 | arena_t *a0get(void); |
| 405 | void *a0malloc(size_t size); |
| 406 | void *a0calloc(size_t num, size_t size); |
| 407 | void a0free(void *ptr); |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 408 | arena_t *arenas_extend(unsigned ind); |
Jason Evans | 8bb3198 | 2014-10-07 23:14:57 -0700 | [diff] [blame] | 409 | arena_t *arena_init(unsigned ind); |
| 410 | unsigned narenas_total_get(void); |
| 411 | arena_t *arena_get_hard(tsd_t *tsd, unsigned ind, bool init_if_missing); |
| 412 | arena_t *arena_choose_hard(tsd_t *tsd); |
| 413 | void arena_migrate(tsd_t *tsd, unsigned oldind, unsigned newind); |
| 414 | unsigned arena_nbound(unsigned ind); |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 415 | void thread_allocated_cleanup(tsd_t *tsd); |
| 416 | void thread_deallocated_cleanup(tsd_t *tsd); |
| 417 | void arena_cleanup(tsd_t *tsd); |
Jason Evans | 8bb3198 | 2014-10-07 23:14:57 -0700 | [diff] [blame] | 418 | void arenas_cache_cleanup(tsd_t *tsd); |
| 419 | void narenas_cache_cleanup(tsd_t *tsd); |
| 420 | void arenas_cache_bypass_cleanup(tsd_t *tsd); |
Jason Evans | 2dbecf1 | 2010-09-05 10:35:13 -0700 | [diff] [blame] | 421 | void jemalloc_prefork(void); |
Jason Evans | 4e2e3dd | 2012-03-13 16:31:41 -0700 | [diff] [blame] | 422 | void jemalloc_postfork_parent(void); |
| 423 | void jemalloc_postfork_child(void); |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 424 | |
Jason Evans | bd87b01 | 2014-04-15 16:35:08 -0700 | [diff] [blame] | 425 | #include "jemalloc/internal/valgrind.h" |
Jason Evans | d81e4bd | 2012-03-06 14:57:45 -0800 | [diff] [blame] | 426 | #include "jemalloc/internal/util.h" |
Jason Evans | 0657f12 | 2011-03-18 17:56:14 -0700 | [diff] [blame] | 427 | #include "jemalloc/internal/atomic.h" |
Jason Evans | 84f7cdb | 2012-03-02 15:59:45 -0800 | [diff] [blame] | 428 | #include "jemalloc/internal/prng.h" |
Jason Evans | 376b152 | 2010-02-11 14:45:59 -0800 | [diff] [blame] | 429 | #include "jemalloc/internal/ckh.h" |
Jason Evans | b172610 | 2012-02-28 16:50:47 -0800 | [diff] [blame] | 430 | #include "jemalloc/internal/size_classes.h" |
Jason Evans | 376b152 | 2010-02-11 14:45:59 -0800 | [diff] [blame] | 431 | #include "jemalloc/internal/stats.h" |
| 432 | #include "jemalloc/internal/ctl.h" |
| 433 | #include "jemalloc/internal/mutex.h" |
Jason Evans | 3b5ee5e | 2010-02-11 15:56:23 -0800 | [diff] [blame] | 434 | #include "jemalloc/internal/mb.h" |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 435 | #include "jemalloc/internal/bitmap.h" |
Jason Evans | 376b152 | 2010-02-11 14:45:59 -0800 | [diff] [blame] | 436 | #include "jemalloc/internal/extent.h" |
| 437 | #include "jemalloc/internal/arena.h" |
| 438 | #include "jemalloc/internal/base.h" |
| 439 | #include "jemalloc/internal/chunk.h" |
| 440 | #include "jemalloc/internal/huge.h" |
Jason Evans | 2dbecf1 | 2010-09-05 10:35:13 -0700 | [diff] [blame] | 441 | #include "jemalloc/internal/rtree.h" |
Jason Evans | 376b152 | 2010-02-11 14:45:59 -0800 | [diff] [blame] | 442 | #include "jemalloc/internal/tcache.h" |
| 443 | #include "jemalloc/internal/hash.h" |
Jason Evans | 122449b | 2012-04-06 00:35:09 -0700 | [diff] [blame] | 444 | #include "jemalloc/internal/quarantine.h" |
Jason Evans | 4d6a134 | 2010-10-20 19:05:59 -0700 | [diff] [blame] | 445 | #include "jemalloc/internal/prof.h" |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 446 | #include "jemalloc/internal/tsd.h" |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 447 | |
| 448 | #undef JEMALLOC_H_EXTERNS |
| 449 | /******************************************************************************/ |
Jason Evans | a4f124f | 2013-12-08 22:28:27 -0800 | [diff] [blame] | 450 | #define JEMALLOC_H_INLINES |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 451 | |
Jason Evans | bd87b01 | 2014-04-15 16:35:08 -0700 | [diff] [blame] | 452 | #include "jemalloc/internal/valgrind.h" |
Jason Evans | d81e4bd | 2012-03-06 14:57:45 -0800 | [diff] [blame] | 453 | #include "jemalloc/internal/util.h" |
Jason Evans | 0657f12 | 2011-03-18 17:56:14 -0700 | [diff] [blame] | 454 | #include "jemalloc/internal/atomic.h" |
Jason Evans | 84f7cdb | 2012-03-02 15:59:45 -0800 | [diff] [blame] | 455 | #include "jemalloc/internal/prng.h" |
Jason Evans | 376b152 | 2010-02-11 14:45:59 -0800 | [diff] [blame] | 456 | #include "jemalloc/internal/ckh.h" |
Jason Evans | b172610 | 2012-02-28 16:50:47 -0800 | [diff] [blame] | 457 | #include "jemalloc/internal/size_classes.h" |
Jason Evans | 376b152 | 2010-02-11 14:45:59 -0800 | [diff] [blame] | 458 | #include "jemalloc/internal/stats.h" |
| 459 | #include "jemalloc/internal/ctl.h" |
| 460 | #include "jemalloc/internal/mutex.h" |
Jason Evans | cd9a134 | 2012-03-21 18:33:03 -0700 | [diff] [blame] | 461 | #include "jemalloc/internal/tsd.h" |
Jason Evans | 3b5ee5e | 2010-02-11 15:56:23 -0800 | [diff] [blame] | 462 | #include "jemalloc/internal/mb.h" |
Jason Evans | 376b152 | 2010-02-11 14:45:59 -0800 | [diff] [blame] | 463 | #include "jemalloc/internal/extent.h" |
| 464 | #include "jemalloc/internal/base.h" |
| 465 | #include "jemalloc/internal/chunk.h" |
| 466 | #include "jemalloc/internal/huge.h" |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 467 | |
| 468 | #ifndef JEMALLOC_ENABLE_INLINE |
Jason Evans | 155bfa7 | 2014-10-05 17:54:10 -0700 | [diff] [blame] | 469 | index_t size2index_compute(size_t size); |
| 470 | index_t size2index_lookup(size_t size); |
| 471 | index_t size2index(size_t size); |
| 472 | size_t index2size_compute(index_t index); |
| 473 | size_t index2size_lookup(index_t index); |
| 474 | size_t index2size(index_t index); |
| 475 | size_t s2u_compute(size_t size); |
| 476 | size_t s2u_lookup(size_t size); |
Jason Evans | 9344368 | 2010-10-20 17:39:18 -0700 | [diff] [blame] | 477 | size_t s2u(size_t size); |
Jason Evans | 5ff709c | 2012-04-11 18:13:45 -0700 | [diff] [blame] | 478 | size_t sa2u(size_t size, size_t alignment); |
Jason Evans | 8bb3198 | 2014-10-07 23:14:57 -0700 | [diff] [blame] | 479 | arena_t *arena_choose(tsd_t *tsd, arena_t *arena); |
| 480 | arena_t *arena_get(tsd_t *tsd, unsigned ind, bool init_if_missing, |
| 481 | bool refresh_if_missing); |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 482 | #endif |
| 483 | |
| 484 | #if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_C_)) |
Jason Evans | 155bfa7 | 2014-10-05 17:54:10 -0700 | [diff] [blame] | 485 | JEMALLOC_INLINE index_t |
| 486 | size2index_compute(size_t size) |
| 487 | { |
| 488 | |
| 489 | #if (NTBINS != 0) |
| 490 | if (size <= (ZU(1) << LG_TINY_MAXCLASS)) { |
| 491 | size_t lg_tmin = LG_TINY_MAXCLASS - NTBINS + 1; |
| 492 | size_t lg_ceil = lg_floor(pow2_ceil(size)); |
| 493 | return (lg_ceil < lg_tmin ? 0 : lg_ceil - lg_tmin); |
| 494 | } else |
| 495 | #endif |
| 496 | { |
| 497 | size_t x = lg_floor((size<<1)-1); |
| 498 | size_t shift = (x < LG_SIZE_CLASS_GROUP + LG_QUANTUM) ? 0 : |
| 499 | x - (LG_SIZE_CLASS_GROUP + LG_QUANTUM); |
| 500 | size_t grp = shift << LG_SIZE_CLASS_GROUP; |
| 501 | |
| 502 | size_t lg_delta = (x < LG_SIZE_CLASS_GROUP + LG_QUANTUM + 1) |
| 503 | ? LG_QUANTUM : x - LG_SIZE_CLASS_GROUP - 1; |
| 504 | |
| 505 | size_t delta_inverse_mask = ZI(-1) << lg_delta; |
| 506 | size_t mod = ((((size-1) & delta_inverse_mask) >> lg_delta)) & |
| 507 | ((ZU(1) << LG_SIZE_CLASS_GROUP) - 1); |
| 508 | |
| 509 | size_t index = NTBINS + grp + mod; |
| 510 | return (index); |
| 511 | } |
| 512 | } |
| 513 | |
| 514 | JEMALLOC_ALWAYS_INLINE index_t |
| 515 | size2index_lookup(size_t size) |
| 516 | { |
| 517 | |
| 518 | assert(size <= LOOKUP_MAXCLASS); |
| 519 | { |
| 520 | size_t ret = ((size_t)(size2index_tab[(size-1) >> |
| 521 | LG_TINY_MIN])); |
| 522 | assert(ret == size2index_compute(size)); |
| 523 | return (ret); |
| 524 | } |
| 525 | } |
| 526 | |
| 527 | JEMALLOC_ALWAYS_INLINE index_t |
| 528 | size2index(size_t size) |
| 529 | { |
| 530 | |
| 531 | assert(size > 0); |
| 532 | if (likely(size <= LOOKUP_MAXCLASS)) |
| 533 | return (size2index_lookup(size)); |
| 534 | else |
| 535 | return (size2index_compute(size)); |
| 536 | } |
| 537 | |
| 538 | JEMALLOC_INLINE size_t |
| 539 | index2size_compute(index_t index) |
| 540 | { |
| 541 | |
| 542 | #if (NTBINS > 0) |
| 543 | if (index < NTBINS) |
| 544 | return (ZU(1) << (LG_TINY_MAXCLASS - NTBINS + 1 + index)); |
| 545 | else |
| 546 | #endif |
| 547 | { |
| 548 | size_t reduced_index = index - NTBINS; |
| 549 | size_t grp = reduced_index >> LG_SIZE_CLASS_GROUP; |
| 550 | size_t mod = reduced_index & ((ZU(1) << LG_SIZE_CLASS_GROUP) - |
| 551 | 1); |
| 552 | |
| 553 | size_t grp_size_mask = ~((!!grp)-1); |
| 554 | size_t grp_size = ((ZU(1) << (LG_QUANTUM + |
| 555 | (LG_SIZE_CLASS_GROUP-1))) << grp) & grp_size_mask; |
| 556 | |
| 557 | size_t shift = (grp == 0) ? 1 : grp; |
| 558 | size_t lg_delta = shift + (LG_QUANTUM-1); |
| 559 | size_t mod_size = (mod+1) << lg_delta; |
| 560 | |
| 561 | size_t usize = grp_size + mod_size; |
| 562 | return (usize); |
| 563 | } |
| 564 | } |
| 565 | |
| 566 | JEMALLOC_ALWAYS_INLINE size_t |
| 567 | index2size_lookup(index_t index) |
| 568 | { |
| 569 | size_t ret = (size_t)index2size_tab[index]; |
| 570 | assert(ret == index2size_compute(index)); |
| 571 | return (ret); |
| 572 | } |
| 573 | |
| 574 | JEMALLOC_ALWAYS_INLINE size_t |
| 575 | index2size(index_t index) |
| 576 | { |
| 577 | |
| 578 | assert(index < NSIZES); |
| 579 | return (index2size_lookup(index)); |
| 580 | } |
| 581 | |
| 582 | JEMALLOC_ALWAYS_INLINE size_t |
| 583 | s2u_compute(size_t size) |
| 584 | { |
| 585 | |
| 586 | #if (NTBINS > 0) |
| 587 | if (size <= (ZU(1) << LG_TINY_MAXCLASS)) { |
| 588 | size_t lg_tmin = LG_TINY_MAXCLASS - NTBINS + 1; |
| 589 | size_t lg_ceil = lg_floor(pow2_ceil(size)); |
| 590 | return (lg_ceil < lg_tmin ? (ZU(1) << lg_tmin) : |
| 591 | (ZU(1) << lg_ceil)); |
| 592 | } else |
| 593 | #endif |
| 594 | { |
| 595 | size_t x = lg_floor((size<<1)-1); |
| 596 | size_t lg_delta = (x < LG_SIZE_CLASS_GROUP + LG_QUANTUM + 1) |
| 597 | ? LG_QUANTUM : x - LG_SIZE_CLASS_GROUP - 1; |
| 598 | size_t delta = ZU(1) << lg_delta; |
| 599 | size_t delta_mask = delta - 1; |
| 600 | size_t usize = (size + delta_mask) & ~delta_mask; |
| 601 | return (usize); |
| 602 | } |
| 603 | } |
| 604 | |
| 605 | JEMALLOC_ALWAYS_INLINE size_t |
| 606 | s2u_lookup(size_t size) |
| 607 | { |
| 608 | size_t ret = index2size_lookup(size2index_lookup(size)); |
| 609 | |
| 610 | assert(ret == s2u_compute(size)); |
| 611 | return (ret); |
| 612 | } |
| 613 | |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 614 | /* |
Jason Evans | 9344368 | 2010-10-20 17:39:18 -0700 | [diff] [blame] | 615 | * Compute usable size that would result from allocating an object with the |
| 616 | * specified size. |
| 617 | */ |
Jason Evans | 88393cb | 2013-01-22 08:45:43 -0800 | [diff] [blame] | 618 | JEMALLOC_ALWAYS_INLINE size_t |
Jason Evans | 9344368 | 2010-10-20 17:39:18 -0700 | [diff] [blame] | 619 | s2u(size_t size) |
| 620 | { |
| 621 | |
Jason Evans | 155bfa7 | 2014-10-05 17:54:10 -0700 | [diff] [blame] | 622 | assert(size > 0); |
| 623 | if (likely(size <= LOOKUP_MAXCLASS)) |
| 624 | return (s2u_lookup(size)); |
| 625 | else |
| 626 | return (s2u_compute(size)); |
Jason Evans | 9344368 | 2010-10-20 17:39:18 -0700 | [diff] [blame] | 627 | } |
| 628 | |
| 629 | /* |
| 630 | * Compute usable size that would result from allocating an object with the |
| 631 | * specified size and alignment. |
| 632 | */ |
Jason Evans | 88393cb | 2013-01-22 08:45:43 -0800 | [diff] [blame] | 633 | JEMALLOC_ALWAYS_INLINE size_t |
Jason Evans | 5ff709c | 2012-04-11 18:13:45 -0700 | [diff] [blame] | 634 | sa2u(size_t size, size_t alignment) |
Jason Evans | 9344368 | 2010-10-20 17:39:18 -0700 | [diff] [blame] | 635 | { |
| 636 | size_t usize; |
| 637 | |
Jason Evans | 5ff709c | 2012-04-11 18:13:45 -0700 | [diff] [blame] | 638 | assert(alignment != 0 && ((alignment - 1) & alignment) == 0); |
| 639 | |
Jason Evans | 155bfa7 | 2014-10-05 17:54:10 -0700 | [diff] [blame] | 640 | /* Try for a small size class. */ |
| 641 | if (size <= SMALL_MAXCLASS && alignment < PAGE) { |
| 642 | /* |
| 643 | * Round size up to the nearest multiple of alignment. |
| 644 | * |
| 645 | * This done, we can take advantage of the fact that for each |
| 646 | * small size class, every object is aligned at the smallest |
| 647 | * power of two that is non-zero in the base two representation |
| 648 | * of the size. For example: |
| 649 | * |
| 650 | * Size | Base 2 | Minimum alignment |
| 651 | * -----+----------+------------------ |
| 652 | * 96 | 1100000 | 32 |
| 653 | * 144 | 10100000 | 32 |
| 654 | * 192 | 11000000 | 64 |
| 655 | */ |
| 656 | usize = s2u(ALIGNMENT_CEILING(size, alignment)); |
| 657 | if (usize < LARGE_MINCLASS) |
| 658 | return (usize); |
Jason Evans | 9344368 | 2010-10-20 17:39:18 -0700 | [diff] [blame] | 659 | } |
| 660 | |
Jason Evans | 155bfa7 | 2014-10-05 17:54:10 -0700 | [diff] [blame] | 661 | /* Try for a large size class. */ |
Daniel Micay | 809b0ac | 2014-10-23 10:30:52 -0400 | [diff] [blame] | 662 | if (likely(size <= arena_maxclass) && likely(alignment < chunksize)) { |
Jason Evans | 9344368 | 2010-10-20 17:39:18 -0700 | [diff] [blame] | 663 | /* |
| 664 | * We can't achieve subpage alignment, so round up alignment |
Jason Evans | 155bfa7 | 2014-10-05 17:54:10 -0700 | [diff] [blame] | 665 | * to the minimum that can actually be supported. |
Jason Evans | 9344368 | 2010-10-20 17:39:18 -0700 | [diff] [blame] | 666 | */ |
| 667 | alignment = PAGE_CEILING(alignment); |
Jason Evans | 155bfa7 | 2014-10-05 17:54:10 -0700 | [diff] [blame] | 668 | |
| 669 | /* Make sure result is a large size class. */ |
| 670 | usize = (size <= LARGE_MINCLASS) ? LARGE_MINCLASS : s2u(size); |
Jason Evans | 9344368 | 2010-10-20 17:39:18 -0700 | [diff] [blame] | 671 | |
| 672 | /* |
| 673 | * Calculate the size of the over-size run that arena_palloc() |
| 674 | * would need to allocate in order to guarantee the alignment. |
| 675 | */ |
Jason Evans | 155bfa7 | 2014-10-05 17:54:10 -0700 | [diff] [blame] | 676 | if (usize + alignment - PAGE <= arena_maxrun) |
| 677 | return (usize); |
Jason Evans | 9344368 | 2010-10-20 17:39:18 -0700 | [diff] [blame] | 678 | } |
Jason Evans | 155bfa7 | 2014-10-05 17:54:10 -0700 | [diff] [blame] | 679 | |
| 680 | /* Huge size class. Beware of size_t overflow. */ |
| 681 | |
| 682 | /* |
| 683 | * We can't achieve subchunk alignment, so round up alignment to the |
| 684 | * minimum that can actually be supported. |
| 685 | */ |
| 686 | alignment = CHUNK_CEILING(alignment); |
| 687 | if (alignment == 0) { |
| 688 | /* size_t overflow. */ |
| 689 | return (0); |
| 690 | } |
| 691 | |
| 692 | /* Make sure result is a huge size class. */ |
| 693 | if (size <= chunksize) |
| 694 | usize = chunksize; |
| 695 | else { |
| 696 | usize = s2u(size); |
| 697 | if (usize < size) { |
| 698 | /* size_t overflow. */ |
| 699 | return (0); |
| 700 | } |
| 701 | } |
| 702 | |
| 703 | /* |
| 704 | * Calculate the multi-chunk mapping that huge_palloc() would need in |
| 705 | * order to guarantee the alignment. |
| 706 | */ |
| 707 | if (usize + alignment - PAGE < usize) { |
| 708 | /* size_t overflow. */ |
| 709 | return (0); |
| 710 | } |
| 711 | return (usize); |
Jason Evans | 9344368 | 2010-10-20 17:39:18 -0700 | [diff] [blame] | 712 | } |
| 713 | |
Jason Evans | 4c2faa8 | 2012-03-13 11:09:23 -0700 | [diff] [blame] | 714 | /* Choose an arena based on a per-thread value. */ |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 715 | JEMALLOC_INLINE arena_t * |
Jason Evans | 8bb3198 | 2014-10-07 23:14:57 -0700 | [diff] [blame] | 716 | arena_choose(tsd_t *tsd, arena_t *arena) |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 717 | { |
| 718 | arena_t *ret; |
| 719 | |
Jason Evans | 01b3fe5 | 2012-04-03 09:28:00 -0700 | [diff] [blame] | 720 | if (arena != NULL) |
| 721 | return (arena); |
| 722 | |
Jason Evans | 8bb3198 | 2014-10-07 23:14:57 -0700 | [diff] [blame] | 723 | if (unlikely((ret = tsd_arena_get(tsd)) == NULL)) |
| 724 | ret = arena_choose_hard(tsd); |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 725 | |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 726 | return (ret); |
| 727 | } |
Jason Evans | 8bb3198 | 2014-10-07 23:14:57 -0700 | [diff] [blame] | 728 | |
| 729 | JEMALLOC_INLINE arena_t * |
| 730 | arena_get(tsd_t *tsd, unsigned ind, bool init_if_missing, |
| 731 | bool refresh_if_missing) |
| 732 | { |
| 733 | arena_t *arena; |
| 734 | arena_t **arenas_cache = tsd_arenas_cache_get(tsd); |
| 735 | |
| 736 | /* init_if_missing requires refresh_if_missing. */ |
| 737 | assert(!init_if_missing || refresh_if_missing); |
| 738 | |
| 739 | if (unlikely(arenas_cache == NULL)) { |
| 740 | /* arenas_cache hasn't been initialized yet. */ |
| 741 | return (arena_get_hard(tsd, ind, init_if_missing)); |
| 742 | } |
| 743 | if (unlikely(ind >= tsd_narenas_cache_get(tsd))) { |
| 744 | /* |
| 745 | * ind is invalid, cache is old (too small), or arena to be |
| 746 | * initialized. |
| 747 | */ |
| 748 | return (refresh_if_missing ? arena_get_hard(tsd, ind, |
| 749 | init_if_missing) : NULL); |
| 750 | } |
| 751 | arena = arenas_cache[ind]; |
| 752 | if (likely(arena != NULL) || !refresh_if_missing) |
| 753 | return (arena); |
| 754 | if (init_if_missing) |
| 755 | return (arena_get_hard(tsd, ind, init_if_missing)); |
| 756 | else |
| 757 | return (NULL); |
| 758 | } |
Jason Evans | 9dcad2d | 2011-02-13 18:11:54 -0800 | [diff] [blame] | 759 | #endif |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 760 | |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 761 | #include "jemalloc/internal/bitmap.h" |
Jason Evans | 2dbecf1 | 2010-09-05 10:35:13 -0700 | [diff] [blame] | 762 | #include "jemalloc/internal/rtree.h" |
Jason Evans | 203484e | 2012-05-02 00:30:36 -0700 | [diff] [blame] | 763 | /* |
Jason Evans | 155bfa7 | 2014-10-05 17:54:10 -0700 | [diff] [blame] | 764 | * Include portions of arena.h interleaved with tcache.h in order to resolve |
| 765 | * circular dependencies. |
Jason Evans | 203484e | 2012-05-02 00:30:36 -0700 | [diff] [blame] | 766 | */ |
Jason Evans | 155bfa7 | 2014-10-05 17:54:10 -0700 | [diff] [blame] | 767 | #define JEMALLOC_ARENA_INLINE_A |
| 768 | #include "jemalloc/internal/arena.h" |
| 769 | #undef JEMALLOC_ARENA_INLINE_A |
| 770 | #include "jemalloc/internal/tcache.h" |
Jason Evans | 203484e | 2012-05-02 00:30:36 -0700 | [diff] [blame] | 771 | #define JEMALLOC_ARENA_INLINE_B |
| 772 | #include "jemalloc/internal/arena.h" |
| 773 | #undef JEMALLOC_ARENA_INLINE_B |
Jason Evans | 376b152 | 2010-02-11 14:45:59 -0800 | [diff] [blame] | 774 | #include "jemalloc/internal/hash.h" |
Jason Evans | 122449b | 2012-04-06 00:35:09 -0700 | [diff] [blame] | 775 | #include "jemalloc/internal/quarantine.h" |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 776 | |
| 777 | #ifndef JEMALLOC_ENABLE_INLINE |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 778 | void *imalloct(tsd_t *tsd, size_t size, bool try_tcache, arena_t *arena); |
| 779 | void *imalloc(tsd_t *tsd, size_t size); |
| 780 | void *icalloct(tsd_t *tsd, size_t size, bool try_tcache, arena_t *arena); |
| 781 | void *icalloc(tsd_t *tsd, size_t size); |
| 782 | void *ipalloct(tsd_t *tsd, size_t usize, size_t alignment, bool zero, |
| 783 | bool try_tcache, arena_t *arena); |
| 784 | void *ipalloc(tsd_t *tsd, size_t usize, size_t alignment, bool zero); |
Jason Evans | 122449b | 2012-04-06 00:35:09 -0700 | [diff] [blame] | 785 | size_t isalloc(const void *ptr, bool demote); |
| 786 | size_t ivsalloc(const void *ptr, bool demote); |
| 787 | size_t u2rz(size_t usize); |
| 788 | size_t p2rz(const void *ptr); |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 789 | void idalloct(tsd_t *tsd, void *ptr, bool try_tcache); |
| 790 | void isdalloct(tsd_t *tsd, void *ptr, size_t size, bool try_tcache); |
| 791 | void idalloc(tsd_t *tsd, void *ptr); |
| 792 | void iqalloc(tsd_t *tsd, void *ptr, bool try_tcache); |
| 793 | void isqalloc(tsd_t *tsd, void *ptr, size_t size, bool try_tcache); |
| 794 | void *iralloct_realign(tsd_t *tsd, void *ptr, size_t oldsize, size_t size, |
| 795 | size_t extra, size_t alignment, bool zero, bool try_tcache_alloc, |
| 796 | bool try_tcache_dalloc, arena_t *arena); |
Daniel Micay | d33f834 | 2014-10-24 13:18:57 -0400 | [diff] [blame] | 797 | void *iralloct(tsd_t *tsd, void *ptr, size_t oldsize, size_t size, |
| 798 | size_t alignment, bool zero, bool try_tcache_alloc, bool try_tcache_dalloc, |
| 799 | arena_t *arena); |
| 800 | void *iralloc(tsd_t *tsd, void *ptr, size_t oldsize, size_t size, |
| 801 | size_t alignment, bool zero); |
| 802 | bool ixalloc(void *ptr, size_t oldsize, size_t size, size_t extra, |
| 803 | size_t alignment, bool zero); |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 804 | #endif |
| 805 | |
| 806 | #if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_C_)) |
Jason Evans | 88393cb | 2013-01-22 08:45:43 -0800 | [diff] [blame] | 807 | JEMALLOC_ALWAYS_INLINE void * |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 808 | imalloct(tsd_t *tsd, size_t size, bool try_tcache, arena_t *arena) |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 809 | { |
| 810 | |
| 811 | assert(size != 0); |
| 812 | |
Daniel Micay | 809b0ac | 2014-10-23 10:30:52 -0400 | [diff] [blame] | 813 | if (likely(size <= arena_maxclass)) |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 814 | return (arena_malloc(tsd, arena, size, false, try_tcache)); |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 815 | else |
Jason Evans | fc0b3b7 | 2014-10-09 17:54:06 -0700 | [diff] [blame] | 816 | return (huge_malloc(tsd, arena, size, false, try_tcache)); |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 817 | } |
| 818 | |
Jason Evans | 88393cb | 2013-01-22 08:45:43 -0800 | [diff] [blame] | 819 | JEMALLOC_ALWAYS_INLINE void * |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 820 | imalloc(tsd_t *tsd, size_t size) |
Jason Evans | 609ae59 | 2012-10-11 13:53:15 -0700 | [diff] [blame] | 821 | { |
| 822 | |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 823 | return (imalloct(tsd, size, true, NULL)); |
Jason Evans | 609ae59 | 2012-10-11 13:53:15 -0700 | [diff] [blame] | 824 | } |
| 825 | |
Jason Evans | 88393cb | 2013-01-22 08:45:43 -0800 | [diff] [blame] | 826 | JEMALLOC_ALWAYS_INLINE void * |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 827 | icalloct(tsd_t *tsd, size_t size, bool try_tcache, arena_t *arena) |
Jason Evans | 609ae59 | 2012-10-11 13:53:15 -0700 | [diff] [blame] | 828 | { |
| 829 | |
Daniel Micay | 809b0ac | 2014-10-23 10:30:52 -0400 | [diff] [blame] | 830 | if (likely(size <= arena_maxclass)) |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 831 | return (arena_malloc(tsd, arena, size, true, try_tcache)); |
Jason Evans | 609ae59 | 2012-10-11 13:53:15 -0700 | [diff] [blame] | 832 | else |
Jason Evans | fc0b3b7 | 2014-10-09 17:54:06 -0700 | [diff] [blame] | 833 | return (huge_malloc(tsd, arena, size, true, try_tcache)); |
Jason Evans | 609ae59 | 2012-10-11 13:53:15 -0700 | [diff] [blame] | 834 | } |
| 835 | |
Jason Evans | 88393cb | 2013-01-22 08:45:43 -0800 | [diff] [blame] | 836 | JEMALLOC_ALWAYS_INLINE void * |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 837 | icalloc(tsd_t *tsd, size_t size) |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 838 | { |
| 839 | |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 840 | return (icalloct(tsd, size, true, NULL)); |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 841 | } |
| 842 | |
Jason Evans | 88393cb | 2013-01-22 08:45:43 -0800 | [diff] [blame] | 843 | JEMALLOC_ALWAYS_INLINE void * |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 844 | ipalloct(tsd_t *tsd, size_t usize, size_t alignment, bool zero, bool try_tcache, |
Jason Evans | 609ae59 | 2012-10-11 13:53:15 -0700 | [diff] [blame] | 845 | arena_t *arena) |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 846 | { |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 847 | void *ret; |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 848 | |
Jason Evans | 38d9210 | 2011-03-23 00:37:29 -0700 | [diff] [blame] | 849 | assert(usize != 0); |
Jason Evans | 5ff709c | 2012-04-11 18:13:45 -0700 | [diff] [blame] | 850 | assert(usize == sa2u(usize, alignment)); |
Jason Evans | 38d9210 | 2011-03-23 00:37:29 -0700 | [diff] [blame] | 851 | |
Jason Evans | 155bfa7 | 2014-10-05 17:54:10 -0700 | [diff] [blame] | 852 | if (usize <= SMALL_MAXCLASS && alignment < PAGE) |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 853 | ret = arena_malloc(tsd, arena, usize, zero, try_tcache); |
Jason Evans | 38d9210 | 2011-03-23 00:37:29 -0700 | [diff] [blame] | 854 | else { |
Daniel Micay | 809b0ac | 2014-10-23 10:30:52 -0400 | [diff] [blame] | 855 | if (likely(usize <= arena_maxclass)) { |
Jason Evans | 8bb3198 | 2014-10-07 23:14:57 -0700 | [diff] [blame] | 856 | arena = arena_choose(tsd, arena); |
| 857 | if (unlikely(arena == NULL)) |
| 858 | return (NULL); |
| 859 | ret = arena_palloc(arena, usize, alignment, zero); |
Daniel Micay | 809b0ac | 2014-10-23 10:30:52 -0400 | [diff] [blame] | 860 | } else if (likely(alignment <= chunksize)) |
Jason Evans | fc0b3b7 | 2014-10-09 17:54:06 -0700 | [diff] [blame] | 861 | ret = huge_malloc(tsd, arena, usize, zero, try_tcache); |
| 862 | else { |
| 863 | ret = huge_palloc(tsd, arena, usize, alignment, zero, |
| 864 | try_tcache); |
| 865 | } |
Jason Evans | 38d9210 | 2011-03-23 00:37:29 -0700 | [diff] [blame] | 866 | } |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 867 | |
Jason Evans | 5ff709c | 2012-04-11 18:13:45 -0700 | [diff] [blame] | 868 | assert(ALIGNMENT_ADDR2BASE(ret, alignment) == ret); |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 869 | return (ret); |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 870 | } |
| 871 | |
Jason Evans | 88393cb | 2013-01-22 08:45:43 -0800 | [diff] [blame] | 872 | JEMALLOC_ALWAYS_INLINE void * |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 873 | ipalloc(tsd_t *tsd, size_t usize, size_t alignment, bool zero) |
Jason Evans | 609ae59 | 2012-10-11 13:53:15 -0700 | [diff] [blame] | 874 | { |
| 875 | |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 876 | return (ipalloct(tsd, usize, alignment, zero, true, NULL)); |
Jason Evans | 609ae59 | 2012-10-11 13:53:15 -0700 | [diff] [blame] | 877 | } |
| 878 | |
Jason Evans | 122449b | 2012-04-06 00:35:09 -0700 | [diff] [blame] | 879 | /* |
| 880 | * Typical usage: |
| 881 | * void *ptr = [...] |
| 882 | * size_t sz = isalloc(ptr, config_prof); |
| 883 | */ |
Jason Evans | 88393cb | 2013-01-22 08:45:43 -0800 | [diff] [blame] | 884 | JEMALLOC_ALWAYS_INLINE size_t |
Jason Evans | 122449b | 2012-04-06 00:35:09 -0700 | [diff] [blame] | 885 | isalloc(const void *ptr, bool demote) |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 886 | { |
| 887 | size_t ret; |
| 888 | arena_chunk_t *chunk; |
| 889 | |
Jason Evans | f004737 | 2012-04-02 15:18:24 -0700 | [diff] [blame] | 890 | assert(ptr != NULL); |
Jason Evans | 122449b | 2012-04-06 00:35:09 -0700 | [diff] [blame] | 891 | /* Demotion only makes sense if config_prof is true. */ |
Jason Evans | 551ebc4 | 2014-10-03 10:16:09 -0700 | [diff] [blame] | 892 | assert(config_prof || !demote); |
Jason Evans | f004737 | 2012-04-02 15:18:24 -0700 | [diff] [blame] | 893 | |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 894 | chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr); |
Daniel Micay | 809b0ac | 2014-10-23 10:30:52 -0400 | [diff] [blame] | 895 | if (likely(chunk != ptr)) |
Jason Evans | 122449b | 2012-04-06 00:35:09 -0700 | [diff] [blame] | 896 | ret = arena_salloc(ptr, demote); |
Jason Evans | f7088e6 | 2012-04-19 18:28:03 -0700 | [diff] [blame] | 897 | else |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 898 | ret = huge_salloc(ptr); |
| 899 | |
| 900 | return (ret); |
| 901 | } |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 902 | |
Jason Evans | 88393cb | 2013-01-22 08:45:43 -0800 | [diff] [blame] | 903 | JEMALLOC_ALWAYS_INLINE size_t |
Jason Evans | 122449b | 2012-04-06 00:35:09 -0700 | [diff] [blame] | 904 | ivsalloc(const void *ptr, bool demote) |
Jason Evans | 2dbecf1 | 2010-09-05 10:35:13 -0700 | [diff] [blame] | 905 | { |
| 906 | |
| 907 | /* Return 0 if ptr is not within a chunk managed by jemalloc. */ |
Jason Evans | b954bc5 | 2014-01-02 17:36:38 -0800 | [diff] [blame] | 908 | if (rtree_get(chunks_rtree, (uintptr_t)CHUNK_ADDR2BASE(ptr)) == 0) |
Jason Evans | 2dbecf1 | 2010-09-05 10:35:13 -0700 | [diff] [blame] | 909 | return (0); |
| 910 | |
Jason Evans | 122449b | 2012-04-06 00:35:09 -0700 | [diff] [blame] | 911 | return (isalloc(ptr, demote)); |
| 912 | } |
| 913 | |
| 914 | JEMALLOC_INLINE size_t |
| 915 | u2rz(size_t usize) |
| 916 | { |
| 917 | size_t ret; |
| 918 | |
| 919 | if (usize <= SMALL_MAXCLASS) { |
Jason Evans | 155bfa7 | 2014-10-05 17:54:10 -0700 | [diff] [blame] | 920 | index_t binind = size2index(usize); |
Jason Evans | 122449b | 2012-04-06 00:35:09 -0700 | [diff] [blame] | 921 | ret = arena_bin_info[binind].redzone_size; |
| 922 | } else |
| 923 | ret = 0; |
| 924 | |
| 925 | return (ret); |
| 926 | } |
| 927 | |
| 928 | JEMALLOC_INLINE size_t |
| 929 | p2rz(const void *ptr) |
| 930 | { |
| 931 | size_t usize = isalloc(ptr, false); |
| 932 | |
| 933 | return (u2rz(usize)); |
Jason Evans | 2dbecf1 | 2010-09-05 10:35:13 -0700 | [diff] [blame] | 934 | } |
Jason Evans | 2dbecf1 | 2010-09-05 10:35:13 -0700 | [diff] [blame] | 935 | |
Jason Evans | 88393cb | 2013-01-22 08:45:43 -0800 | [diff] [blame] | 936 | JEMALLOC_ALWAYS_INLINE void |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 937 | idalloct(tsd_t *tsd, void *ptr, bool try_tcache) |
Jason Evans | 355b438 | 2010-09-20 19:20:48 -0700 | [diff] [blame] | 938 | { |
| 939 | arena_chunk_t *chunk; |
| 940 | |
Jason Evans | f004737 | 2012-04-02 15:18:24 -0700 | [diff] [blame] | 941 | assert(ptr != NULL); |
| 942 | |
Jason Evans | 355b438 | 2010-09-20 19:20:48 -0700 | [diff] [blame] | 943 | chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr); |
Daniel Micay | 809b0ac | 2014-10-23 10:30:52 -0400 | [diff] [blame] | 944 | if (likely(chunk != ptr)) |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 945 | arena_dalloc(tsd, chunk, ptr, try_tcache); |
Jason Evans | f004737 | 2012-04-02 15:18:24 -0700 | [diff] [blame] | 946 | else |
Jason Evans | fc0b3b7 | 2014-10-09 17:54:06 -0700 | [diff] [blame] | 947 | huge_dalloc(tsd, ptr, try_tcache); |
Jason Evans | 355b438 | 2010-09-20 19:20:48 -0700 | [diff] [blame] | 948 | } |
| 949 | |
Jason Evans | 88393cb | 2013-01-22 08:45:43 -0800 | [diff] [blame] | 950 | JEMALLOC_ALWAYS_INLINE void |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 951 | isdalloct(tsd_t *tsd, void *ptr, size_t size, bool try_tcache) |
Daniel Micay | 4cfe551 | 2014-08-28 15:41:48 -0400 | [diff] [blame] | 952 | { |
| 953 | arena_chunk_t *chunk; |
| 954 | |
| 955 | assert(ptr != NULL); |
| 956 | |
| 957 | chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr); |
Daniel Micay | 809b0ac | 2014-10-23 10:30:52 -0400 | [diff] [blame] | 958 | if (likely(chunk != ptr)) |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 959 | arena_sdalloc(tsd, chunk, ptr, size, try_tcache); |
Daniel Micay | 4cfe551 | 2014-08-28 15:41:48 -0400 | [diff] [blame] | 960 | else |
Jason Evans | fc0b3b7 | 2014-10-09 17:54:06 -0700 | [diff] [blame] | 961 | huge_dalloc(tsd, ptr, try_tcache); |
Daniel Micay | 4cfe551 | 2014-08-28 15:41:48 -0400 | [diff] [blame] | 962 | } |
| 963 | |
| 964 | JEMALLOC_ALWAYS_INLINE void |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 965 | idalloc(tsd_t *tsd, void *ptr) |
Jason Evans | 609ae59 | 2012-10-11 13:53:15 -0700 | [diff] [blame] | 966 | { |
| 967 | |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 968 | idalloct(tsd, ptr, true); |
Jason Evans | 609ae59 | 2012-10-11 13:53:15 -0700 | [diff] [blame] | 969 | } |
| 970 | |
Jason Evans | 88393cb | 2013-01-22 08:45:43 -0800 | [diff] [blame] | 971 | JEMALLOC_ALWAYS_INLINE void |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 972 | iqalloc(tsd_t *tsd, void *ptr, bool try_tcache) |
Jason Evans | 609ae59 | 2012-10-11 13:53:15 -0700 | [diff] [blame] | 973 | { |
| 974 | |
Jason Evans | 9c640bf | 2014-09-11 16:20:44 -0700 | [diff] [blame] | 975 | if (config_fill && unlikely(opt_quarantine)) |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 976 | quarantine(tsd, ptr); |
Jason Evans | 609ae59 | 2012-10-11 13:53:15 -0700 | [diff] [blame] | 977 | else |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 978 | idalloct(tsd, ptr, try_tcache); |
Jason Evans | 609ae59 | 2012-10-11 13:53:15 -0700 | [diff] [blame] | 979 | } |
| 980 | |
Daniel Micay | 4cfe551 | 2014-08-28 15:41:48 -0400 | [diff] [blame] | 981 | JEMALLOC_ALWAYS_INLINE void |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 982 | isqalloc(tsd_t *tsd, void *ptr, size_t size, bool try_tcache) |
Daniel Micay | 4cfe551 | 2014-08-28 15:41:48 -0400 | [diff] [blame] | 983 | { |
| 984 | |
Jason Evans | 9c640bf | 2014-09-11 16:20:44 -0700 | [diff] [blame] | 985 | if (config_fill && unlikely(opt_quarantine)) |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 986 | quarantine(tsd, ptr); |
Daniel Micay | 4cfe551 | 2014-08-28 15:41:48 -0400 | [diff] [blame] | 987 | else |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 988 | isdalloct(tsd, ptr, size, try_tcache); |
Daniel Micay | 4cfe551 | 2014-08-28 15:41:48 -0400 | [diff] [blame] | 989 | } |
| 990 | |
Jason Evans | 88393cb | 2013-01-22 08:45:43 -0800 | [diff] [blame] | 991 | JEMALLOC_ALWAYS_INLINE void * |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 992 | iralloct_realign(tsd_t *tsd, void *ptr, size_t oldsize, size_t size, |
| 993 | size_t extra, size_t alignment, bool zero, bool try_tcache_alloc, |
| 994 | bool try_tcache_dalloc, arena_t *arena) |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 995 | { |
Jason Evans | b2c3166 | 2014-01-12 15:05:44 -0800 | [diff] [blame] | 996 | void *p; |
| 997 | size_t usize, copysize; |
| 998 | |
| 999 | usize = sa2u(size + extra, alignment); |
| 1000 | if (usize == 0) |
| 1001 | return (NULL); |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 1002 | p = ipalloct(tsd, usize, alignment, zero, try_tcache_alloc, arena); |
Jason Evans | b2c3166 | 2014-01-12 15:05:44 -0800 | [diff] [blame] | 1003 | if (p == NULL) { |
| 1004 | if (extra == 0) |
| 1005 | return (NULL); |
| 1006 | /* Try again, without extra this time. */ |
| 1007 | usize = sa2u(size, alignment); |
| 1008 | if (usize == 0) |
| 1009 | return (NULL); |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 1010 | p = ipalloct(tsd, usize, alignment, zero, try_tcache_alloc, |
| 1011 | arena); |
Jason Evans | b2c3166 | 2014-01-12 15:05:44 -0800 | [diff] [blame] | 1012 | if (p == NULL) |
| 1013 | return (NULL); |
| 1014 | } |
| 1015 | /* |
| 1016 | * Copy at most size bytes (not size+extra), since the caller has no |
| 1017 | * expectation that the extra bytes will be reliably preserved. |
| 1018 | */ |
| 1019 | copysize = (size < oldsize) ? size : oldsize; |
| 1020 | memcpy(p, ptr, copysize); |
Daniel Micay | d33f834 | 2014-10-24 13:18:57 -0400 | [diff] [blame] | 1021 | isqalloc(tsd, ptr, oldsize, try_tcache_dalloc); |
Jason Evans | b2c3166 | 2014-01-12 15:05:44 -0800 | [diff] [blame] | 1022 | return (p); |
| 1023 | } |
| 1024 | |
| 1025 | JEMALLOC_ALWAYS_INLINE void * |
Daniel Micay | d33f834 | 2014-10-24 13:18:57 -0400 | [diff] [blame] | 1026 | iralloct(tsd_t *tsd, void *ptr, size_t oldsize, size_t size, size_t alignment, |
| 1027 | bool zero, bool try_tcache_alloc, bool try_tcache_dalloc, arena_t *arena) |
Jason Evans | b2c3166 | 2014-01-12 15:05:44 -0800 | [diff] [blame] | 1028 | { |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1029 | |
| 1030 | assert(ptr != NULL); |
| 1031 | assert(size != 0); |
| 1032 | |
Jason Evans | 8e3c3c6 | 2010-09-17 15:46:18 -0700 | [diff] [blame] | 1033 | if (alignment != 0 && ((uintptr_t)ptr & ((uintptr_t)alignment-1)) |
| 1034 | != 0) { |
Jason Evans | 8e3c3c6 | 2010-09-17 15:46:18 -0700 | [diff] [blame] | 1035 | /* |
Jason Evans | 122449b | 2012-04-06 00:35:09 -0700 | [diff] [blame] | 1036 | * Existing object alignment is inadequate; allocate new space |
Jason Evans | 8e3c3c6 | 2010-09-17 15:46:18 -0700 | [diff] [blame] | 1037 | * and copy. |
| 1038 | */ |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 1039 | return (iralloct_realign(tsd, ptr, oldsize, size, 0, alignment, |
| 1040 | zero, try_tcache_alloc, try_tcache_dalloc, arena)); |
Jason Evans | 8e3c3c6 | 2010-09-17 15:46:18 -0700 | [diff] [blame] | 1041 | } |
| 1042 | |
Daniel Micay | 809b0ac | 2014-10-23 10:30:52 -0400 | [diff] [blame] | 1043 | if (likely(size <= arena_maxclass)) { |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 1044 | return (arena_ralloc(tsd, arena, ptr, oldsize, size, 0, |
| 1045 | alignment, zero, try_tcache_alloc, try_tcache_dalloc)); |
Jason Evans | 8e3c3c6 | 2010-09-17 15:46:18 -0700 | [diff] [blame] | 1046 | } else { |
Jason Evans | 5460aa6 | 2014-09-22 21:09:23 -0700 | [diff] [blame] | 1047 | return (huge_ralloc(tsd, arena, ptr, oldsize, size, 0, |
Jason Evans | fc0b3b7 | 2014-10-09 17:54:06 -0700 | [diff] [blame] | 1048 | alignment, zero, try_tcache_alloc, try_tcache_dalloc)); |
Jason Evans | 8e3c3c6 | 2010-09-17 15:46:18 -0700 | [diff] [blame] | 1049 | } |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1050 | } |
Jason Evans | cd9a134 | 2012-03-21 18:33:03 -0700 | [diff] [blame] | 1051 | |
Jason Evans | 88393cb | 2013-01-22 08:45:43 -0800 | [diff] [blame] | 1052 | JEMALLOC_ALWAYS_INLINE void * |
Daniel Micay | d33f834 | 2014-10-24 13:18:57 -0400 | [diff] [blame] | 1053 | iralloc(tsd_t *tsd, void *ptr, size_t oldsize, size_t size, size_t alignment, |
| 1054 | bool zero) |
Jason Evans | 609ae59 | 2012-10-11 13:53:15 -0700 | [diff] [blame] | 1055 | { |
| 1056 | |
Daniel Micay | d33f834 | 2014-10-24 13:18:57 -0400 | [diff] [blame] | 1057 | return (iralloct(tsd, ptr, oldsize, size, alignment, zero, true, true, |
| 1058 | NULL)); |
Jason Evans | b2c3166 | 2014-01-12 15:05:44 -0800 | [diff] [blame] | 1059 | } |
| 1060 | |
| 1061 | JEMALLOC_ALWAYS_INLINE bool |
Daniel Micay | d33f834 | 2014-10-24 13:18:57 -0400 | [diff] [blame] | 1062 | ixalloc(void *ptr, size_t oldsize, size_t size, size_t extra, size_t alignment, |
| 1063 | bool zero) |
Jason Evans | b2c3166 | 2014-01-12 15:05:44 -0800 | [diff] [blame] | 1064 | { |
Jason Evans | b2c3166 | 2014-01-12 15:05:44 -0800 | [diff] [blame] | 1065 | |
| 1066 | assert(ptr != NULL); |
| 1067 | assert(size != 0); |
| 1068 | |
Jason Evans | b2c3166 | 2014-01-12 15:05:44 -0800 | [diff] [blame] | 1069 | if (alignment != 0 && ((uintptr_t)ptr & ((uintptr_t)alignment-1)) |
| 1070 | != 0) { |
| 1071 | /* Existing object alignment is inadequate. */ |
| 1072 | return (true); |
| 1073 | } |
| 1074 | |
Daniel Micay | 809b0ac | 2014-10-23 10:30:52 -0400 | [diff] [blame] | 1075 | if (likely(size <= arena_maxclass)) |
Jason Evans | b2c3166 | 2014-01-12 15:05:44 -0800 | [diff] [blame] | 1076 | return (arena_ralloc_no_move(ptr, oldsize, size, extra, zero)); |
| 1077 | else |
Daniel Micay | a95018e | 2014-10-04 01:39:32 -0400 | [diff] [blame] | 1078 | return (huge_ralloc_no_move(ptr, oldsize, size, extra, zero)); |
Jason Evans | 609ae59 | 2012-10-11 13:53:15 -0700 | [diff] [blame] | 1079 | } |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 1080 | #endif |
| 1081 | |
Jason Evans | 4d6a134 | 2010-10-20 19:05:59 -0700 | [diff] [blame] | 1082 | #include "jemalloc/internal/prof.h" |
| 1083 | |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 1084 | #undef JEMALLOC_H_INLINES |
| 1085 | /******************************************************************************/ |
Mike Hommey | 45f208e | 2012-04-16 16:30:26 +0200 | [diff] [blame] | 1086 | #endif /* JEMALLOC_INTERNAL_H */ |