blob: bf10617e04f9b70d43d47c414cddbfdaa8ba170d [file] [log] [blame]
Mike Hommey45f208e2012-04-16 16:30:26 +02001#ifndef JEMALLOC_INTERNAL_H
Jason Evansa4f124f2013-12-08 22:28:27 -08002#define JEMALLOC_INTERNAL_H
Jason Evanse476f8a2010-01-16 09:53:50 -08003
Jason Evans86abd0d2013-11-30 15:25:42 -08004#include "jemalloc_internal_defs.h"
Mike Hommey12f74e62014-05-28 12:39:13 +09005#include "jemalloc/internal/jemalloc_internal_decls.h"
Jason Evanse476f8a2010-01-16 09:53:50 -08006
Jason Evansb1476112012-04-05 13:36:17 -07007#ifdef JEMALLOC_UTRACE
8#include <sys/ktrace.h>
9#endif
10
Jason Evans86abd0d2013-11-30 15:25:42 -080011#define JEMALLOC_NO_DEMANGLE
12#ifdef JEMALLOC_JET
13# define JEMALLOC_N(n) jet_##n
14# include "jemalloc/internal/public_namespace.h"
15# define JEMALLOC_NO_RENAME
16# include "../jemalloc@install_suffix@.h"
Jason Evansf234dc52014-01-16 17:38:01 -080017# undef JEMALLOC_NO_RENAME
Jason Evans86abd0d2013-11-30 15:25:42 -080018#else
19# define JEMALLOC_N(n) @private_namespace@##n
20# include "../jemalloc@install_suffix@.h"
21#endif
Jason Evans04ca1ef2011-07-30 17:58:07 -070022#include "jemalloc/internal/private_namespace.h"
Jason Evans746e77a2011-07-30 16:40:52 -070023
Jason Evans7372b152012-02-10 20:22:09 -080024static const bool config_debug =
25#ifdef JEMALLOC_DEBUG
26 true
27#else
28 false
29#endif
30 ;
Jason Evans4d434ad2014-04-15 12:09:48 -070031static const bool have_dss =
Jason Evans7372b152012-02-10 20:22:09 -080032#ifdef JEMALLOC_DSS
33 true
34#else
35 false
36#endif
37 ;
Jason Evans7372b152012-02-10 20:22:09 -080038static const bool config_fill =
39#ifdef JEMALLOC_FILL
40 true
41#else
42 false
43#endif
44 ;
45static const bool config_lazy_lock =
46#ifdef JEMALLOC_LAZY_LOCK
47 true
48#else
49 false
50#endif
51 ;
52static const bool config_prof =
53#ifdef JEMALLOC_PROF
54 true
55#else
56 false
57#endif
58 ;
59static const bool config_prof_libgcc =
60#ifdef JEMALLOC_PROF_LIBGCC
61 true
62#else
63 false
64#endif
65 ;
66static const bool config_prof_libunwind =
67#ifdef JEMALLOC_PROF_LIBUNWIND
68 true
69#else
70 false
71#endif
72 ;
Jason Evans7ca0fdf2012-04-12 20:20:58 -070073static const bool config_munmap =
74#ifdef JEMALLOC_MUNMAP
75 true
76#else
77 false
78#endif
79 ;
Jason Evans7372b152012-02-10 20:22:09 -080080static const bool config_stats =
81#ifdef JEMALLOC_STATS
82 true
83#else
84 false
85#endif
86 ;
Jason Evans7372b152012-02-10 20:22:09 -080087static const bool config_tcache =
88#ifdef JEMALLOC_TCACHE
89 true
90#else
91 false
92#endif
93 ;
Jason Evans7372b152012-02-10 20:22:09 -080094static const bool config_tls =
95#ifdef JEMALLOC_TLS
96 true
97#else
98 false
99#endif
100 ;
Jason Evansb1476112012-04-05 13:36:17 -0700101static const bool config_utrace =
102#ifdef JEMALLOC_UTRACE
103 true
104#else
105 false
106#endif
107 ;
Jason Evans122449b2012-04-06 00:35:09 -0700108static const bool config_valgrind =
109#ifdef JEMALLOC_VALGRIND
110 true
111#else
112 false
113#endif
114 ;
Jason Evans7372b152012-02-10 20:22:09 -0800115static const bool config_xmalloc =
116#ifdef JEMALLOC_XMALLOC
117 true
118#else
119 false
120#endif
121 ;
122static const bool config_ivsalloc =
123#ifdef JEMALLOC_IVSALLOC
124 true
125#else
126 false
127#endif
128 ;
129
Chih-hung Hsieh59cd80e2014-12-05 17:42:41 -0800130#ifdef JEMALLOC_C11ATOMICS
131#include <stdatomic.h>
132#endif
133
Jason Evansb57d3ec2012-04-17 13:17:54 -0700134#ifdef JEMALLOC_ATOMIC9
135#include <machine/atomic.h>
136#endif
137
Jason Evans893a0ed2011-03-18 19:30:18 -0700138#if (defined(JEMALLOC_OSATOMIC) || defined(JEMALLOC_OSSPIN))
Jason Evans763baa62011-03-18 19:10:31 -0700139#include <libkern/OSAtomic.h>
140#endif
141
Jason Evans2dbecf12010-09-05 10:35:13 -0700142#ifdef JEMALLOC_ZONE
143#include <mach/mach_error.h>
144#include <mach/mach_init.h>
145#include <mach/vm_map.h>
146#include <malloc/malloc.h>
147#endif
148
Jason Evansf3ff7522010-02-28 15:00:18 -0800149#define RB_COMPACT
Jason Evans376b1522010-02-11 14:45:59 -0800150#include "jemalloc/internal/rb.h"
151#include "jemalloc/internal/qr.h"
152#include "jemalloc/internal/ql.h"
Jason Evanse476f8a2010-01-16 09:53:50 -0800153
Jason Evanse476f8a2010-01-16 09:53:50 -0800154/*
Jason Evansfe5faa22010-02-11 13:38:12 -0800155 * jemalloc can conceptually be broken into components (arena, tcache, etc.),
156 * but there are circular dependencies that cannot be broken without
Jason Evanse476f8a2010-01-16 09:53:50 -0800157 * substantial performance degradation. In order to reduce the effect on
158 * visual code flow, read the header files in multiple passes, with one of the
159 * following cpp variables defined during each pass:
160 *
161 * JEMALLOC_H_TYPES : Preprocessor-defined constants and psuedo-opaque data
162 * types.
163 * JEMALLOC_H_STRUCTS : Data structures.
164 * JEMALLOC_H_EXTERNS : Extern data declarations and function prototypes.
165 * JEMALLOC_H_INLINES : Inline functions.
166 */
167/******************************************************************************/
Jason Evansa4f124f2013-12-08 22:28:27 -0800168#define JEMALLOC_H_TYPES
Jason Evanse476f8a2010-01-16 09:53:50 -0800169
Jason Evansdc1bed62013-12-05 21:43:46 -0800170#include "jemalloc/internal/jemalloc_internal_macros.h"
171
Jason Evans155bfa72014-10-05 17:54:10 -0700172/* Size class index type. */
173typedef unsigned index_t;
174
Jason Evansb718cf72014-09-07 14:40:19 -0700175#define MALLOCX_ARENA_MASK ((int)~0xff)
Jason Evansd82a5e62013-12-12 22:35:52 -0800176#define MALLOCX_LG_ALIGN_MASK ((int)0x3f)
Jason Evansb718cf72014-09-07 14:40:19 -0700177/* Use MALLOCX_ALIGN_GET() if alignment may not be specified in flags. */
178#define MALLOCX_ALIGN_GET_SPECIFIED(flags) \
179 (ZU(1) << (flags & MALLOCX_LG_ALIGN_MASK))
180#define MALLOCX_ALIGN_GET(flags) \
181 (MALLOCX_ALIGN_GET_SPECIFIED(flags) & (SIZE_T_MAX-1))
182#define MALLOCX_ZERO_GET(flags) \
183 ((bool)(flags & MALLOCX_ZERO))
184#define MALLOCX_ARENA_GET(flags) \
185 (((unsigned)(flags >> 8)) - 1)
Jason Evans8e3c3c62010-09-17 15:46:18 -0700186
Jason Evansb1726102012-02-28 16:50:47 -0800187/* Smallest size class to support. */
Jason Evansb1726102012-02-28 16:50:47 -0800188#define TINY_MIN (1U << LG_TINY_MIN)
189
190/*
Jason Evansfc0b3b72014-10-09 17:54:06 -0700191 * Minimum allocation alignment is 2^LG_QUANTUM bytes (ignoring tiny size
Jason Evansb1726102012-02-28 16:50:47 -0800192 * classes).
193 */
194#ifndef LG_QUANTUM
Mike Hommeyfd97b1d2012-04-30 12:38:31 +0200195# if (defined(__i386__) || defined(_M_IX86))
Jason Evansb1726102012-02-28 16:50:47 -0800196# define LG_QUANTUM 4
197# endif
198# ifdef __ia64__
199# define LG_QUANTUM 4
200# endif
201# ifdef __alpha__
202# define LG_QUANTUM 4
203# endif
204# ifdef __sparc64__
205# define LG_QUANTUM 4
206# endif
Mike Hommeyfd97b1d2012-04-30 12:38:31 +0200207# if (defined(__amd64__) || defined(__x86_64__) || defined(_M_X64))
Jason Evansb1726102012-02-28 16:50:47 -0800208# define LG_QUANTUM 4
209# endif
210# ifdef __arm__
211# define LG_QUANTUM 3
212# endif
Riku Voipiodaf6d042013-03-18 16:40:20 +0200213# ifdef __aarch64__
214# define LG_QUANTUM 4
215# endif
Jason Evansdd03a2e2012-10-08 15:41:06 -0700216# ifdef __hppa__
217# define LG_QUANTUM 4
218# endif
Jason Evansb1726102012-02-28 16:50:47 -0800219# ifdef __mips__
220# define LG_QUANTUM 3
221# endif
Manuel A. Fernandez Monteceloffa25982014-07-29 23:11:26 +0100222# ifdef __or1k__
223# define LG_QUANTUM 3
224# endif
Jason Evansb1726102012-02-28 16:50:47 -0800225# ifdef __powerpc__
226# define LG_QUANTUM 4
227# endif
Mike Frysinger9f9897a2013-01-28 15:19:34 -0500228# ifdef __s390__
Jason Evansb1726102012-02-28 16:50:47 -0800229# define LG_QUANTUM 4
230# endif
Jason Evans3492daf2012-03-05 12:16:57 -0800231# ifdef __SH4__
232# define LG_QUANTUM 4
233# endif
Jason Evansb1726102012-02-28 16:50:47 -0800234# ifdef __tile__
235# define LG_QUANTUM 4
236# endif
Richard Diamond9c3a10f2014-05-28 21:37:02 -0500237# ifdef __le32__
238# define LG_QUANTUM 4
239# endif
Jason Evansb1726102012-02-28 16:50:47 -0800240# ifndef LG_QUANTUM
Jason Evansfc0b3b72014-10-09 17:54:06 -0700241# error "Unknown minimum alignment for architecture; specify via "
242 "--with-lg-quantum"
Jason Evansb1726102012-02-28 16:50:47 -0800243# endif
Jason Evanse476f8a2010-01-16 09:53:50 -0800244#endif
245
246#define QUANTUM ((size_t)(1U << LG_QUANTUM))
247#define QUANTUM_MASK (QUANTUM - 1)
248
249/* Return the smallest quantum multiple that is >= a. */
250#define QUANTUM_CEILING(a) \
251 (((a) + QUANTUM_MASK) & ~QUANTUM_MASK)
252
Jason Evans84c8eef2011-03-16 10:30:13 -0700253#define LONG ((size_t)(1U << LG_SIZEOF_LONG))
254#define LONG_MASK (LONG - 1)
255
256/* Return the smallest long multiple that is >= a. */
Jason Evans4c2faa82012-03-13 11:09:23 -0700257#define LONG_CEILING(a) \
Jason Evans84c8eef2011-03-16 10:30:13 -0700258 (((a) + LONG_MASK) & ~LONG_MASK)
259
Jason Evanse476f8a2010-01-16 09:53:50 -0800260#define SIZEOF_PTR (1U << LG_SIZEOF_PTR)
Jason Evans84c8eef2011-03-16 10:30:13 -0700261#define PTR_MASK (SIZEOF_PTR - 1)
262
263/* Return the smallest (void *) multiple that is >= a. */
Jason Evans4c2faa82012-03-13 11:09:23 -0700264#define PTR_CEILING(a) \
Jason Evans84c8eef2011-03-16 10:30:13 -0700265 (((a) + PTR_MASK) & ~PTR_MASK)
Jason Evanse476f8a2010-01-16 09:53:50 -0800266
Jason Evanse476f8a2010-01-16 09:53:50 -0800267/*
268 * Maximum size of L1 cache line. This is used to avoid cache line aliasing.
269 * In addition, this controls the spacing of cacheline-spaced size classes.
Jason Evans8d5865e2012-05-02 01:22:16 -0700270 *
271 * CACHELINE cannot be based on LG_CACHELINE because __declspec(align()) can
272 * only handle raw constants.
Jason Evanse476f8a2010-01-16 09:53:50 -0800273 */
274#define LG_CACHELINE 6
Jason Evans8d5865e2012-05-02 01:22:16 -0700275#define CACHELINE 64
Jason Evanse476f8a2010-01-16 09:53:50 -0800276#define CACHELINE_MASK (CACHELINE - 1)
277
278/* Return the smallest cacheline multiple that is >= s. */
279#define CACHELINE_CEILING(s) \
280 (((s) + CACHELINE_MASK) & ~CACHELINE_MASK)
281
Jason Evansfc0b3b72014-10-09 17:54:06 -0700282/* Page size. LG_PAGE is determined by the configure script. */
Jason Evans2dbecf12010-09-05 10:35:13 -0700283#ifdef PAGE_MASK
284# undef PAGE_MASK
285#endif
Jason Evansfc0b3b72014-10-09 17:54:06 -0700286#define PAGE ((size_t)(1U << LG_PAGE))
Jason Evansae4c7b42012-04-02 07:04:34 -0700287#define PAGE_MASK ((size_t)(PAGE - 1))
Jason Evanse476f8a2010-01-16 09:53:50 -0800288
289/* Return the smallest pagesize multiple that is >= s. */
290#define PAGE_CEILING(s) \
291 (((s) + PAGE_MASK) & ~PAGE_MASK)
292
Jason Evans5ff709c2012-04-11 18:13:45 -0700293/* Return the nearest aligned address at or below a. */
294#define ALIGNMENT_ADDR2BASE(a, alignment) \
295 ((void *)((uintptr_t)(a) & (-(alignment))))
296
297/* Return the offset between a and the nearest aligned address at or below a. */
298#define ALIGNMENT_ADDR2OFFSET(a, alignment) \
299 ((size_t)((uintptr_t)(a) & (alignment - 1)))
300
301/* Return the smallest alignment multiple that is >= s. */
302#define ALIGNMENT_CEILING(s, alignment) \
303 (((s) + (alignment - 1)) & (-(alignment)))
304
Mike Hommey8b499712012-04-24 23:22:02 +0200305/* Declare a variable length array */
306#if __STDC_VERSION__ < 199901L
307# ifdef _MSC_VER
308# include <malloc.h>
309# define alloca _alloca
310# else
Garrett Cooper13e4e242012-12-02 17:58:40 -0800311# ifdef JEMALLOC_HAS_ALLOCA_H
312# include <alloca.h>
313# else
314# include <stdlib.h>
315# endif
Mike Hommey8b499712012-04-24 23:22:02 +0200316# endif
317# define VARIABLE_ARRAY(type, name, count) \
Lucian Adrian Grijincu9d4e13f2014-04-21 20:52:35 -0700318 type *name = alloca(sizeof(type) * (count))
Mike Hommey8b499712012-04-24 23:22:02 +0200319#else
Lucian Adrian Grijincu9d4e13f2014-04-21 20:52:35 -0700320# define VARIABLE_ARRAY(type, name, count) type name[(count)]
Mike Hommey8b499712012-04-24 23:22:02 +0200321#endif
322
Jason Evansbd87b012014-04-15 16:35:08 -0700323#include "jemalloc/internal/valgrind.h"
Jason Evansd81e4bd2012-03-06 14:57:45 -0800324#include "jemalloc/internal/util.h"
Jason Evans0657f122011-03-18 17:56:14 -0700325#include "jemalloc/internal/atomic.h"
Jason Evans84f7cdb2012-03-02 15:59:45 -0800326#include "jemalloc/internal/prng.h"
Jason Evans376b1522010-02-11 14:45:59 -0800327#include "jemalloc/internal/ckh.h"
Jason Evansb1726102012-02-28 16:50:47 -0800328#include "jemalloc/internal/size_classes.h"
Jason Evans376b1522010-02-11 14:45:59 -0800329#include "jemalloc/internal/stats.h"
330#include "jemalloc/internal/ctl.h"
331#include "jemalloc/internal/mutex.h"
Jason Evanscd9a1342012-03-21 18:33:03 -0700332#include "jemalloc/internal/tsd.h"
Jason Evans3b5ee5e2010-02-11 15:56:23 -0800333#include "jemalloc/internal/mb.h"
Jason Evans376b1522010-02-11 14:45:59 -0800334#include "jemalloc/internal/extent.h"
335#include "jemalloc/internal/arena.h"
Jason Evans47e57f92011-03-22 09:00:56 -0700336#include "jemalloc/internal/bitmap.h"
Jason Evans376b1522010-02-11 14:45:59 -0800337#include "jemalloc/internal/base.h"
338#include "jemalloc/internal/chunk.h"
339#include "jemalloc/internal/huge.h"
Jason Evans2dbecf12010-09-05 10:35:13 -0700340#include "jemalloc/internal/rtree.h"
Jason Evans376b1522010-02-11 14:45:59 -0800341#include "jemalloc/internal/tcache.h"
342#include "jemalloc/internal/hash.h"
Jason Evans122449b2012-04-06 00:35:09 -0700343#include "jemalloc/internal/quarantine.h"
Jason Evans4d6a1342010-10-20 19:05:59 -0700344#include "jemalloc/internal/prof.h"
Jason Evanse476f8a2010-01-16 09:53:50 -0800345
346#undef JEMALLOC_H_TYPES
347/******************************************************************************/
Jason Evansa4f124f2013-12-08 22:28:27 -0800348#define JEMALLOC_H_STRUCTS
Jason Evanse476f8a2010-01-16 09:53:50 -0800349
Jason Evansbd87b012014-04-15 16:35:08 -0700350#include "jemalloc/internal/valgrind.h"
Jason Evansd81e4bd2012-03-06 14:57:45 -0800351#include "jemalloc/internal/util.h"
Jason Evans0657f122011-03-18 17:56:14 -0700352#include "jemalloc/internal/atomic.h"
Jason Evans84f7cdb2012-03-02 15:59:45 -0800353#include "jemalloc/internal/prng.h"
Jason Evans376b1522010-02-11 14:45:59 -0800354#include "jemalloc/internal/ckh.h"
Jason Evansb1726102012-02-28 16:50:47 -0800355#include "jemalloc/internal/size_classes.h"
Jason Evans376b1522010-02-11 14:45:59 -0800356#include "jemalloc/internal/stats.h"
357#include "jemalloc/internal/ctl.h"
358#include "jemalloc/internal/mutex.h"
Jason Evans3b5ee5e2010-02-11 15:56:23 -0800359#include "jemalloc/internal/mb.h"
Jason Evans84c8eef2011-03-16 10:30:13 -0700360#include "jemalloc/internal/bitmap.h"
Jason Evans376b1522010-02-11 14:45:59 -0800361#include "jemalloc/internal/extent.h"
362#include "jemalloc/internal/arena.h"
363#include "jemalloc/internal/base.h"
364#include "jemalloc/internal/chunk.h"
365#include "jemalloc/internal/huge.h"
Jason Evans2dbecf12010-09-05 10:35:13 -0700366#include "jemalloc/internal/rtree.h"
Jason Evans376b1522010-02-11 14:45:59 -0800367#include "jemalloc/internal/tcache.h"
368#include "jemalloc/internal/hash.h"
Jason Evans122449b2012-04-06 00:35:09 -0700369#include "jemalloc/internal/quarantine.h"
Jason Evans4d6a1342010-10-20 19:05:59 -0700370#include "jemalloc/internal/prof.h"
Jason Evanse476f8a2010-01-16 09:53:50 -0800371
Jason Evans5460aa62014-09-22 21:09:23 -0700372#include "jemalloc/internal/tsd.h"
Jason Evans9dcad2d2011-02-13 18:11:54 -0800373
Jason Evanse476f8a2010-01-16 09:53:50 -0800374#undef JEMALLOC_H_STRUCTS
375/******************************************************************************/
Jason Evansa4f124f2013-12-08 22:28:27 -0800376#define JEMALLOC_H_EXTERNS
Jason Evanse476f8a2010-01-16 09:53:50 -0800377
378extern bool opt_abort;
Jason Evanse476f8a2010-01-16 09:53:50 -0800379extern bool opt_junk;
Jason Evans122449b2012-04-06 00:35:09 -0700380extern size_t opt_quarantine;
381extern bool opt_redzone;
Jason Evansb1476112012-04-05 13:36:17 -0700382extern bool opt_utrace;
Jason Evanse476f8a2010-01-16 09:53:50 -0800383extern bool opt_xmalloc;
Jason Evanse476f8a2010-01-16 09:53:50 -0800384extern bool opt_zero;
Jason Evanse7339702010-10-23 18:37:06 -0700385extern size_t opt_narenas;
Jason Evanse476f8a2010-01-16 09:53:50 -0800386
Jason Evansecd3e592014-04-15 14:33:50 -0700387extern bool in_valgrind;
388
Jason Evanse476f8a2010-01-16 09:53:50 -0800389/* Number of CPUs. */
390extern unsigned ncpus;
391
Jason Evans155bfa72014-10-05 17:54:10 -0700392/*
393 * index2size_tab encodes the same information as could be computed (at
394 * unacceptable cost in some code paths) by index2size_compute().
395 */
396extern size_t const index2size_tab[NSIZES];
397/*
398 * size2index_tab is a compact lookup table that rounds request sizes up to
399 * size classes. In order to reduce cache footprint, the table is compressed,
400 * and all accesses are via size2index().
401 */
402extern uint8_t const size2index_tab[];
403
Jason Evans8bb31982014-10-07 23:14:57 -0700404arena_t *a0get(void);
405void *a0malloc(size_t size);
406void *a0calloc(size_t num, size_t size);
407void a0free(void *ptr);
Jason Evanse476f8a2010-01-16 09:53:50 -0800408arena_t *arenas_extend(unsigned ind);
Jason Evans8bb31982014-10-07 23:14:57 -0700409arena_t *arena_init(unsigned ind);
410unsigned narenas_total_get(void);
411arena_t *arena_get_hard(tsd_t *tsd, unsigned ind, bool init_if_missing);
412arena_t *arena_choose_hard(tsd_t *tsd);
413void arena_migrate(tsd_t *tsd, unsigned oldind, unsigned newind);
414unsigned arena_nbound(unsigned ind);
Jason Evans5460aa62014-09-22 21:09:23 -0700415void thread_allocated_cleanup(tsd_t *tsd);
416void thread_deallocated_cleanup(tsd_t *tsd);
417void arena_cleanup(tsd_t *tsd);
Jason Evans8bb31982014-10-07 23:14:57 -0700418void arenas_cache_cleanup(tsd_t *tsd);
419void narenas_cache_cleanup(tsd_t *tsd);
420void arenas_cache_bypass_cleanup(tsd_t *tsd);
Jason Evans2dbecf12010-09-05 10:35:13 -0700421void jemalloc_prefork(void);
Jason Evans4e2e3dd2012-03-13 16:31:41 -0700422void jemalloc_postfork_parent(void);
423void jemalloc_postfork_child(void);
Jason Evanse476f8a2010-01-16 09:53:50 -0800424
Jason Evansbd87b012014-04-15 16:35:08 -0700425#include "jemalloc/internal/valgrind.h"
Jason Evansd81e4bd2012-03-06 14:57:45 -0800426#include "jemalloc/internal/util.h"
Jason Evans0657f122011-03-18 17:56:14 -0700427#include "jemalloc/internal/atomic.h"
Jason Evans84f7cdb2012-03-02 15:59:45 -0800428#include "jemalloc/internal/prng.h"
Jason Evans376b1522010-02-11 14:45:59 -0800429#include "jemalloc/internal/ckh.h"
Jason Evansb1726102012-02-28 16:50:47 -0800430#include "jemalloc/internal/size_classes.h"
Jason Evans376b1522010-02-11 14:45:59 -0800431#include "jemalloc/internal/stats.h"
432#include "jemalloc/internal/ctl.h"
433#include "jemalloc/internal/mutex.h"
Jason Evans3b5ee5e2010-02-11 15:56:23 -0800434#include "jemalloc/internal/mb.h"
Jason Evans84c8eef2011-03-16 10:30:13 -0700435#include "jemalloc/internal/bitmap.h"
Jason Evans376b1522010-02-11 14:45:59 -0800436#include "jemalloc/internal/extent.h"
437#include "jemalloc/internal/arena.h"
438#include "jemalloc/internal/base.h"
439#include "jemalloc/internal/chunk.h"
440#include "jemalloc/internal/huge.h"
Jason Evans2dbecf12010-09-05 10:35:13 -0700441#include "jemalloc/internal/rtree.h"
Jason Evans376b1522010-02-11 14:45:59 -0800442#include "jemalloc/internal/tcache.h"
443#include "jemalloc/internal/hash.h"
Jason Evans122449b2012-04-06 00:35:09 -0700444#include "jemalloc/internal/quarantine.h"
Jason Evans4d6a1342010-10-20 19:05:59 -0700445#include "jemalloc/internal/prof.h"
Jason Evans5460aa62014-09-22 21:09:23 -0700446#include "jemalloc/internal/tsd.h"
Jason Evanse476f8a2010-01-16 09:53:50 -0800447
448#undef JEMALLOC_H_EXTERNS
449/******************************************************************************/
Jason Evansa4f124f2013-12-08 22:28:27 -0800450#define JEMALLOC_H_INLINES
Jason Evanse476f8a2010-01-16 09:53:50 -0800451
Jason Evansbd87b012014-04-15 16:35:08 -0700452#include "jemalloc/internal/valgrind.h"
Jason Evansd81e4bd2012-03-06 14:57:45 -0800453#include "jemalloc/internal/util.h"
Jason Evans0657f122011-03-18 17:56:14 -0700454#include "jemalloc/internal/atomic.h"
Jason Evans84f7cdb2012-03-02 15:59:45 -0800455#include "jemalloc/internal/prng.h"
Jason Evans376b1522010-02-11 14:45:59 -0800456#include "jemalloc/internal/ckh.h"
Jason Evansb1726102012-02-28 16:50:47 -0800457#include "jemalloc/internal/size_classes.h"
Jason Evans376b1522010-02-11 14:45:59 -0800458#include "jemalloc/internal/stats.h"
459#include "jemalloc/internal/ctl.h"
460#include "jemalloc/internal/mutex.h"
Jason Evanscd9a1342012-03-21 18:33:03 -0700461#include "jemalloc/internal/tsd.h"
Jason Evans3b5ee5e2010-02-11 15:56:23 -0800462#include "jemalloc/internal/mb.h"
Jason Evans376b1522010-02-11 14:45:59 -0800463#include "jemalloc/internal/extent.h"
464#include "jemalloc/internal/base.h"
465#include "jemalloc/internal/chunk.h"
466#include "jemalloc/internal/huge.h"
Jason Evanse476f8a2010-01-16 09:53:50 -0800467
468#ifndef JEMALLOC_ENABLE_INLINE
Jason Evans155bfa72014-10-05 17:54:10 -0700469index_t size2index_compute(size_t size);
470index_t size2index_lookup(size_t size);
471index_t size2index(size_t size);
472size_t index2size_compute(index_t index);
473size_t index2size_lookup(index_t index);
474size_t index2size(index_t index);
475size_t s2u_compute(size_t size);
476size_t s2u_lookup(size_t size);
Jason Evans93443682010-10-20 17:39:18 -0700477size_t s2u(size_t size);
Jason Evans5ff709c2012-04-11 18:13:45 -0700478size_t sa2u(size_t size, size_t alignment);
Jason Evans8bb31982014-10-07 23:14:57 -0700479arena_t *arena_choose(tsd_t *tsd, arena_t *arena);
480arena_t *arena_get(tsd_t *tsd, unsigned ind, bool init_if_missing,
481 bool refresh_if_missing);
Jason Evanse476f8a2010-01-16 09:53:50 -0800482#endif
483
484#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_C_))
Jason Evans155bfa72014-10-05 17:54:10 -0700485JEMALLOC_INLINE index_t
486size2index_compute(size_t size)
487{
488
489#if (NTBINS != 0)
490 if (size <= (ZU(1) << LG_TINY_MAXCLASS)) {
491 size_t lg_tmin = LG_TINY_MAXCLASS - NTBINS + 1;
492 size_t lg_ceil = lg_floor(pow2_ceil(size));
493 return (lg_ceil < lg_tmin ? 0 : lg_ceil - lg_tmin);
494 } else
495#endif
496 {
497 size_t x = lg_floor((size<<1)-1);
498 size_t shift = (x < LG_SIZE_CLASS_GROUP + LG_QUANTUM) ? 0 :
499 x - (LG_SIZE_CLASS_GROUP + LG_QUANTUM);
500 size_t grp = shift << LG_SIZE_CLASS_GROUP;
501
502 size_t lg_delta = (x < LG_SIZE_CLASS_GROUP + LG_QUANTUM + 1)
503 ? LG_QUANTUM : x - LG_SIZE_CLASS_GROUP - 1;
504
505 size_t delta_inverse_mask = ZI(-1) << lg_delta;
506 size_t mod = ((((size-1) & delta_inverse_mask) >> lg_delta)) &
507 ((ZU(1) << LG_SIZE_CLASS_GROUP) - 1);
508
509 size_t index = NTBINS + grp + mod;
510 return (index);
511 }
512}
513
514JEMALLOC_ALWAYS_INLINE index_t
515size2index_lookup(size_t size)
516{
517
518 assert(size <= LOOKUP_MAXCLASS);
519 {
520 size_t ret = ((size_t)(size2index_tab[(size-1) >>
521 LG_TINY_MIN]));
522 assert(ret == size2index_compute(size));
523 return (ret);
524 }
525}
526
527JEMALLOC_ALWAYS_INLINE index_t
528size2index(size_t size)
529{
530
531 assert(size > 0);
532 if (likely(size <= LOOKUP_MAXCLASS))
533 return (size2index_lookup(size));
534 else
535 return (size2index_compute(size));
536}
537
538JEMALLOC_INLINE size_t
539index2size_compute(index_t index)
540{
541
542#if (NTBINS > 0)
543 if (index < NTBINS)
544 return (ZU(1) << (LG_TINY_MAXCLASS - NTBINS + 1 + index));
545 else
546#endif
547 {
548 size_t reduced_index = index - NTBINS;
549 size_t grp = reduced_index >> LG_SIZE_CLASS_GROUP;
550 size_t mod = reduced_index & ((ZU(1) << LG_SIZE_CLASS_GROUP) -
551 1);
552
553 size_t grp_size_mask = ~((!!grp)-1);
554 size_t grp_size = ((ZU(1) << (LG_QUANTUM +
555 (LG_SIZE_CLASS_GROUP-1))) << grp) & grp_size_mask;
556
557 size_t shift = (grp == 0) ? 1 : grp;
558 size_t lg_delta = shift + (LG_QUANTUM-1);
559 size_t mod_size = (mod+1) << lg_delta;
560
561 size_t usize = grp_size + mod_size;
562 return (usize);
563 }
564}
565
566JEMALLOC_ALWAYS_INLINE size_t
567index2size_lookup(index_t index)
568{
569 size_t ret = (size_t)index2size_tab[index];
570 assert(ret == index2size_compute(index));
571 return (ret);
572}
573
574JEMALLOC_ALWAYS_INLINE size_t
575index2size(index_t index)
576{
577
578 assert(index < NSIZES);
579 return (index2size_lookup(index));
580}
581
582JEMALLOC_ALWAYS_INLINE size_t
583s2u_compute(size_t size)
584{
585
586#if (NTBINS > 0)
587 if (size <= (ZU(1) << LG_TINY_MAXCLASS)) {
588 size_t lg_tmin = LG_TINY_MAXCLASS - NTBINS + 1;
589 size_t lg_ceil = lg_floor(pow2_ceil(size));
590 return (lg_ceil < lg_tmin ? (ZU(1) << lg_tmin) :
591 (ZU(1) << lg_ceil));
592 } else
593#endif
594 {
595 size_t x = lg_floor((size<<1)-1);
596 size_t lg_delta = (x < LG_SIZE_CLASS_GROUP + LG_QUANTUM + 1)
597 ? LG_QUANTUM : x - LG_SIZE_CLASS_GROUP - 1;
598 size_t delta = ZU(1) << lg_delta;
599 size_t delta_mask = delta - 1;
600 size_t usize = (size + delta_mask) & ~delta_mask;
601 return (usize);
602 }
603}
604
605JEMALLOC_ALWAYS_INLINE size_t
606s2u_lookup(size_t size)
607{
608 size_t ret = index2size_lookup(size2index_lookup(size));
609
610 assert(ret == s2u_compute(size));
611 return (ret);
612}
613
Jason Evanse476f8a2010-01-16 09:53:50 -0800614/*
Jason Evans93443682010-10-20 17:39:18 -0700615 * Compute usable size that would result from allocating an object with the
616 * specified size.
617 */
Jason Evans88393cb2013-01-22 08:45:43 -0800618JEMALLOC_ALWAYS_INLINE size_t
Jason Evans93443682010-10-20 17:39:18 -0700619s2u(size_t size)
620{
621
Jason Evans155bfa72014-10-05 17:54:10 -0700622 assert(size > 0);
623 if (likely(size <= LOOKUP_MAXCLASS))
624 return (s2u_lookup(size));
625 else
626 return (s2u_compute(size));
Jason Evans93443682010-10-20 17:39:18 -0700627}
628
629/*
630 * Compute usable size that would result from allocating an object with the
631 * specified size and alignment.
632 */
Jason Evans88393cb2013-01-22 08:45:43 -0800633JEMALLOC_ALWAYS_INLINE size_t
Jason Evans5ff709c2012-04-11 18:13:45 -0700634sa2u(size_t size, size_t alignment)
Jason Evans93443682010-10-20 17:39:18 -0700635{
636 size_t usize;
637
Jason Evans5ff709c2012-04-11 18:13:45 -0700638 assert(alignment != 0 && ((alignment - 1) & alignment) == 0);
639
Jason Evans155bfa72014-10-05 17:54:10 -0700640 /* Try for a small size class. */
641 if (size <= SMALL_MAXCLASS && alignment < PAGE) {
642 /*
643 * Round size up to the nearest multiple of alignment.
644 *
645 * This done, we can take advantage of the fact that for each
646 * small size class, every object is aligned at the smallest
647 * power of two that is non-zero in the base two representation
648 * of the size. For example:
649 *
650 * Size | Base 2 | Minimum alignment
651 * -----+----------+------------------
652 * 96 | 1100000 | 32
653 * 144 | 10100000 | 32
654 * 192 | 11000000 | 64
655 */
656 usize = s2u(ALIGNMENT_CEILING(size, alignment));
657 if (usize < LARGE_MINCLASS)
658 return (usize);
Jason Evans93443682010-10-20 17:39:18 -0700659 }
660
Jason Evans155bfa72014-10-05 17:54:10 -0700661 /* Try for a large size class. */
Daniel Micay809b0ac2014-10-23 10:30:52 -0400662 if (likely(size <= arena_maxclass) && likely(alignment < chunksize)) {
Jason Evans93443682010-10-20 17:39:18 -0700663 /*
664 * We can't achieve subpage alignment, so round up alignment
Jason Evans155bfa72014-10-05 17:54:10 -0700665 * to the minimum that can actually be supported.
Jason Evans93443682010-10-20 17:39:18 -0700666 */
667 alignment = PAGE_CEILING(alignment);
Jason Evans155bfa72014-10-05 17:54:10 -0700668
669 /* Make sure result is a large size class. */
670 usize = (size <= LARGE_MINCLASS) ? LARGE_MINCLASS : s2u(size);
Jason Evans93443682010-10-20 17:39:18 -0700671
672 /*
673 * Calculate the size of the over-size run that arena_palloc()
674 * would need to allocate in order to guarantee the alignment.
675 */
Jason Evans155bfa72014-10-05 17:54:10 -0700676 if (usize + alignment - PAGE <= arena_maxrun)
677 return (usize);
Jason Evans93443682010-10-20 17:39:18 -0700678 }
Jason Evans155bfa72014-10-05 17:54:10 -0700679
680 /* Huge size class. Beware of size_t overflow. */
681
682 /*
683 * We can't achieve subchunk alignment, so round up alignment to the
684 * minimum that can actually be supported.
685 */
686 alignment = CHUNK_CEILING(alignment);
687 if (alignment == 0) {
688 /* size_t overflow. */
689 return (0);
690 }
691
692 /* Make sure result is a huge size class. */
693 if (size <= chunksize)
694 usize = chunksize;
695 else {
696 usize = s2u(size);
697 if (usize < size) {
698 /* size_t overflow. */
699 return (0);
700 }
701 }
702
703 /*
704 * Calculate the multi-chunk mapping that huge_palloc() would need in
705 * order to guarantee the alignment.
706 */
707 if (usize + alignment - PAGE < usize) {
708 /* size_t overflow. */
709 return (0);
710 }
711 return (usize);
Jason Evans93443682010-10-20 17:39:18 -0700712}
713
Jason Evans4c2faa82012-03-13 11:09:23 -0700714/* Choose an arena based on a per-thread value. */
Jason Evanse476f8a2010-01-16 09:53:50 -0800715JEMALLOC_INLINE arena_t *
Jason Evans8bb31982014-10-07 23:14:57 -0700716arena_choose(tsd_t *tsd, arena_t *arena)
Jason Evanse476f8a2010-01-16 09:53:50 -0800717{
718 arena_t *ret;
719
Jason Evans01b3fe52012-04-03 09:28:00 -0700720 if (arena != NULL)
721 return (arena);
722
Jason Evans8bb31982014-10-07 23:14:57 -0700723 if (unlikely((ret = tsd_arena_get(tsd)) == NULL))
724 ret = arena_choose_hard(tsd);
Jason Evanse476f8a2010-01-16 09:53:50 -0800725
Jason Evanse476f8a2010-01-16 09:53:50 -0800726 return (ret);
727}
Jason Evans8bb31982014-10-07 23:14:57 -0700728
729JEMALLOC_INLINE arena_t *
730arena_get(tsd_t *tsd, unsigned ind, bool init_if_missing,
731 bool refresh_if_missing)
732{
733 arena_t *arena;
734 arena_t **arenas_cache = tsd_arenas_cache_get(tsd);
735
736 /* init_if_missing requires refresh_if_missing. */
737 assert(!init_if_missing || refresh_if_missing);
738
739 if (unlikely(arenas_cache == NULL)) {
740 /* arenas_cache hasn't been initialized yet. */
741 return (arena_get_hard(tsd, ind, init_if_missing));
742 }
743 if (unlikely(ind >= tsd_narenas_cache_get(tsd))) {
744 /*
745 * ind is invalid, cache is old (too small), or arena to be
746 * initialized.
747 */
748 return (refresh_if_missing ? arena_get_hard(tsd, ind,
749 init_if_missing) : NULL);
750 }
751 arena = arenas_cache[ind];
752 if (likely(arena != NULL) || !refresh_if_missing)
753 return (arena);
754 if (init_if_missing)
755 return (arena_get_hard(tsd, ind, init_if_missing));
756 else
757 return (NULL);
758}
Jason Evans9dcad2d2011-02-13 18:11:54 -0800759#endif
Jason Evanse476f8a2010-01-16 09:53:50 -0800760
Jason Evans84c8eef2011-03-16 10:30:13 -0700761#include "jemalloc/internal/bitmap.h"
Jason Evans2dbecf12010-09-05 10:35:13 -0700762#include "jemalloc/internal/rtree.h"
Jason Evans203484e2012-05-02 00:30:36 -0700763/*
Jason Evans155bfa72014-10-05 17:54:10 -0700764 * Include portions of arena.h interleaved with tcache.h in order to resolve
765 * circular dependencies.
Jason Evans203484e2012-05-02 00:30:36 -0700766 */
Jason Evans155bfa72014-10-05 17:54:10 -0700767#define JEMALLOC_ARENA_INLINE_A
768#include "jemalloc/internal/arena.h"
769#undef JEMALLOC_ARENA_INLINE_A
770#include "jemalloc/internal/tcache.h"
Jason Evans203484e2012-05-02 00:30:36 -0700771#define JEMALLOC_ARENA_INLINE_B
772#include "jemalloc/internal/arena.h"
773#undef JEMALLOC_ARENA_INLINE_B
Jason Evans376b1522010-02-11 14:45:59 -0800774#include "jemalloc/internal/hash.h"
Jason Evans122449b2012-04-06 00:35:09 -0700775#include "jemalloc/internal/quarantine.h"
Jason Evanse476f8a2010-01-16 09:53:50 -0800776
777#ifndef JEMALLOC_ENABLE_INLINE
Jason Evans5460aa62014-09-22 21:09:23 -0700778void *imalloct(tsd_t *tsd, size_t size, bool try_tcache, arena_t *arena);
779void *imalloc(tsd_t *tsd, size_t size);
780void *icalloct(tsd_t *tsd, size_t size, bool try_tcache, arena_t *arena);
781void *icalloc(tsd_t *tsd, size_t size);
782void *ipalloct(tsd_t *tsd, size_t usize, size_t alignment, bool zero,
783 bool try_tcache, arena_t *arena);
784void *ipalloc(tsd_t *tsd, size_t usize, size_t alignment, bool zero);
Jason Evans122449b2012-04-06 00:35:09 -0700785size_t isalloc(const void *ptr, bool demote);
786size_t ivsalloc(const void *ptr, bool demote);
787size_t u2rz(size_t usize);
788size_t p2rz(const void *ptr);
Jason Evans5460aa62014-09-22 21:09:23 -0700789void idalloct(tsd_t *tsd, void *ptr, bool try_tcache);
790void isdalloct(tsd_t *tsd, void *ptr, size_t size, bool try_tcache);
791void idalloc(tsd_t *tsd, void *ptr);
792void iqalloc(tsd_t *tsd, void *ptr, bool try_tcache);
793void isqalloc(tsd_t *tsd, void *ptr, size_t size, bool try_tcache);
794void *iralloct_realign(tsd_t *tsd, void *ptr, size_t oldsize, size_t size,
795 size_t extra, size_t alignment, bool zero, bool try_tcache_alloc,
796 bool try_tcache_dalloc, arena_t *arena);
Daniel Micayd33f8342014-10-24 13:18:57 -0400797void *iralloct(tsd_t *tsd, void *ptr, size_t oldsize, size_t size,
798 size_t alignment, bool zero, bool try_tcache_alloc, bool try_tcache_dalloc,
799 arena_t *arena);
800void *iralloc(tsd_t *tsd, void *ptr, size_t oldsize, size_t size,
801 size_t alignment, bool zero);
802bool ixalloc(void *ptr, size_t oldsize, size_t size, size_t extra,
803 size_t alignment, bool zero);
Jason Evanse476f8a2010-01-16 09:53:50 -0800804#endif
805
806#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_C_))
Jason Evans88393cb2013-01-22 08:45:43 -0800807JEMALLOC_ALWAYS_INLINE void *
Jason Evans5460aa62014-09-22 21:09:23 -0700808imalloct(tsd_t *tsd, size_t size, bool try_tcache, arena_t *arena)
Jason Evanse476f8a2010-01-16 09:53:50 -0800809{
810
811 assert(size != 0);
812
Daniel Micay809b0ac2014-10-23 10:30:52 -0400813 if (likely(size <= arena_maxclass))
Jason Evans5460aa62014-09-22 21:09:23 -0700814 return (arena_malloc(tsd, arena, size, false, try_tcache));
Jason Evanse476f8a2010-01-16 09:53:50 -0800815 else
Jason Evansfc0b3b72014-10-09 17:54:06 -0700816 return (huge_malloc(tsd, arena, size, false, try_tcache));
Jason Evanse476f8a2010-01-16 09:53:50 -0800817}
818
Jason Evans88393cb2013-01-22 08:45:43 -0800819JEMALLOC_ALWAYS_INLINE void *
Jason Evans5460aa62014-09-22 21:09:23 -0700820imalloc(tsd_t *tsd, size_t size)
Jason Evans609ae592012-10-11 13:53:15 -0700821{
822
Jason Evans5460aa62014-09-22 21:09:23 -0700823 return (imalloct(tsd, size, true, NULL));
Jason Evans609ae592012-10-11 13:53:15 -0700824}
825
Jason Evans88393cb2013-01-22 08:45:43 -0800826JEMALLOC_ALWAYS_INLINE void *
Jason Evans5460aa62014-09-22 21:09:23 -0700827icalloct(tsd_t *tsd, size_t size, bool try_tcache, arena_t *arena)
Jason Evans609ae592012-10-11 13:53:15 -0700828{
829
Daniel Micay809b0ac2014-10-23 10:30:52 -0400830 if (likely(size <= arena_maxclass))
Jason Evans5460aa62014-09-22 21:09:23 -0700831 return (arena_malloc(tsd, arena, size, true, try_tcache));
Jason Evans609ae592012-10-11 13:53:15 -0700832 else
Jason Evansfc0b3b72014-10-09 17:54:06 -0700833 return (huge_malloc(tsd, arena, size, true, try_tcache));
Jason Evans609ae592012-10-11 13:53:15 -0700834}
835
Jason Evans88393cb2013-01-22 08:45:43 -0800836JEMALLOC_ALWAYS_INLINE void *
Jason Evans5460aa62014-09-22 21:09:23 -0700837icalloc(tsd_t *tsd, size_t size)
Jason Evanse476f8a2010-01-16 09:53:50 -0800838{
839
Jason Evans5460aa62014-09-22 21:09:23 -0700840 return (icalloct(tsd, size, true, NULL));
Jason Evanse476f8a2010-01-16 09:53:50 -0800841}
842
Jason Evans88393cb2013-01-22 08:45:43 -0800843JEMALLOC_ALWAYS_INLINE void *
Jason Evans5460aa62014-09-22 21:09:23 -0700844ipalloct(tsd_t *tsd, size_t usize, size_t alignment, bool zero, bool try_tcache,
Jason Evans609ae592012-10-11 13:53:15 -0700845 arena_t *arena)
Jason Evanse476f8a2010-01-16 09:53:50 -0800846{
Jason Evans6109fe02010-02-10 10:37:56 -0800847 void *ret;
Jason Evanse476f8a2010-01-16 09:53:50 -0800848
Jason Evans38d92102011-03-23 00:37:29 -0700849 assert(usize != 0);
Jason Evans5ff709c2012-04-11 18:13:45 -0700850 assert(usize == sa2u(usize, alignment));
Jason Evans38d92102011-03-23 00:37:29 -0700851
Jason Evans155bfa72014-10-05 17:54:10 -0700852 if (usize <= SMALL_MAXCLASS && alignment < PAGE)
Jason Evans5460aa62014-09-22 21:09:23 -0700853 ret = arena_malloc(tsd, arena, usize, zero, try_tcache);
Jason Evans38d92102011-03-23 00:37:29 -0700854 else {
Daniel Micay809b0ac2014-10-23 10:30:52 -0400855 if (likely(usize <= arena_maxclass)) {
Jason Evans8bb31982014-10-07 23:14:57 -0700856 arena = arena_choose(tsd, arena);
857 if (unlikely(arena == NULL))
858 return (NULL);
859 ret = arena_palloc(arena, usize, alignment, zero);
Daniel Micay809b0ac2014-10-23 10:30:52 -0400860 } else if (likely(alignment <= chunksize))
Jason Evansfc0b3b72014-10-09 17:54:06 -0700861 ret = huge_malloc(tsd, arena, usize, zero, try_tcache);
862 else {
863 ret = huge_palloc(tsd, arena, usize, alignment, zero,
864 try_tcache);
865 }
Jason Evans38d92102011-03-23 00:37:29 -0700866 }
Jason Evans6109fe02010-02-10 10:37:56 -0800867
Jason Evans5ff709c2012-04-11 18:13:45 -0700868 assert(ALIGNMENT_ADDR2BASE(ret, alignment) == ret);
Jason Evans6109fe02010-02-10 10:37:56 -0800869 return (ret);
Jason Evanse476f8a2010-01-16 09:53:50 -0800870}
871
Jason Evans88393cb2013-01-22 08:45:43 -0800872JEMALLOC_ALWAYS_INLINE void *
Jason Evans5460aa62014-09-22 21:09:23 -0700873ipalloc(tsd_t *tsd, size_t usize, size_t alignment, bool zero)
Jason Evans609ae592012-10-11 13:53:15 -0700874{
875
Jason Evans5460aa62014-09-22 21:09:23 -0700876 return (ipalloct(tsd, usize, alignment, zero, true, NULL));
Jason Evans609ae592012-10-11 13:53:15 -0700877}
878
Jason Evans122449b2012-04-06 00:35:09 -0700879/*
880 * Typical usage:
881 * void *ptr = [...]
882 * size_t sz = isalloc(ptr, config_prof);
883 */
Jason Evans88393cb2013-01-22 08:45:43 -0800884JEMALLOC_ALWAYS_INLINE size_t
Jason Evans122449b2012-04-06 00:35:09 -0700885isalloc(const void *ptr, bool demote)
Jason Evanse476f8a2010-01-16 09:53:50 -0800886{
887 size_t ret;
888 arena_chunk_t *chunk;
889
Jason Evansf0047372012-04-02 15:18:24 -0700890 assert(ptr != NULL);
Jason Evans122449b2012-04-06 00:35:09 -0700891 /* Demotion only makes sense if config_prof is true. */
Jason Evans551ebc42014-10-03 10:16:09 -0700892 assert(config_prof || !demote);
Jason Evansf0047372012-04-02 15:18:24 -0700893
Jason Evanse476f8a2010-01-16 09:53:50 -0800894 chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
Daniel Micay809b0ac2014-10-23 10:30:52 -0400895 if (likely(chunk != ptr))
Jason Evans122449b2012-04-06 00:35:09 -0700896 ret = arena_salloc(ptr, demote);
Jason Evansf7088e62012-04-19 18:28:03 -0700897 else
Jason Evanse476f8a2010-01-16 09:53:50 -0800898 ret = huge_salloc(ptr);
899
900 return (ret);
901}
Jason Evans6109fe02010-02-10 10:37:56 -0800902
Jason Evans88393cb2013-01-22 08:45:43 -0800903JEMALLOC_ALWAYS_INLINE size_t
Jason Evans122449b2012-04-06 00:35:09 -0700904ivsalloc(const void *ptr, bool demote)
Jason Evans2dbecf12010-09-05 10:35:13 -0700905{
906
907 /* Return 0 if ptr is not within a chunk managed by jemalloc. */
Jason Evansb954bc52014-01-02 17:36:38 -0800908 if (rtree_get(chunks_rtree, (uintptr_t)CHUNK_ADDR2BASE(ptr)) == 0)
Jason Evans2dbecf12010-09-05 10:35:13 -0700909 return (0);
910
Jason Evans122449b2012-04-06 00:35:09 -0700911 return (isalloc(ptr, demote));
912}
913
914JEMALLOC_INLINE size_t
915u2rz(size_t usize)
916{
917 size_t ret;
918
919 if (usize <= SMALL_MAXCLASS) {
Jason Evans155bfa72014-10-05 17:54:10 -0700920 index_t binind = size2index(usize);
Jason Evans122449b2012-04-06 00:35:09 -0700921 ret = arena_bin_info[binind].redzone_size;
922 } else
923 ret = 0;
924
925 return (ret);
926}
927
928JEMALLOC_INLINE size_t
929p2rz(const void *ptr)
930{
931 size_t usize = isalloc(ptr, false);
932
933 return (u2rz(usize));
Jason Evans2dbecf12010-09-05 10:35:13 -0700934}
Jason Evans2dbecf12010-09-05 10:35:13 -0700935
Jason Evans88393cb2013-01-22 08:45:43 -0800936JEMALLOC_ALWAYS_INLINE void
Jason Evans5460aa62014-09-22 21:09:23 -0700937idalloct(tsd_t *tsd, void *ptr, bool try_tcache)
Jason Evans355b4382010-09-20 19:20:48 -0700938{
939 arena_chunk_t *chunk;
940
Jason Evansf0047372012-04-02 15:18:24 -0700941 assert(ptr != NULL);
942
Jason Evans355b4382010-09-20 19:20:48 -0700943 chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
Daniel Micay809b0ac2014-10-23 10:30:52 -0400944 if (likely(chunk != ptr))
Jason Evans5460aa62014-09-22 21:09:23 -0700945 arena_dalloc(tsd, chunk, ptr, try_tcache);
Jason Evansf0047372012-04-02 15:18:24 -0700946 else
Jason Evansfc0b3b72014-10-09 17:54:06 -0700947 huge_dalloc(tsd, ptr, try_tcache);
Jason Evans355b4382010-09-20 19:20:48 -0700948}
949
Jason Evans88393cb2013-01-22 08:45:43 -0800950JEMALLOC_ALWAYS_INLINE void
Jason Evans5460aa62014-09-22 21:09:23 -0700951isdalloct(tsd_t *tsd, void *ptr, size_t size, bool try_tcache)
Daniel Micay4cfe5512014-08-28 15:41:48 -0400952{
953 arena_chunk_t *chunk;
954
955 assert(ptr != NULL);
956
957 chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr);
Daniel Micay809b0ac2014-10-23 10:30:52 -0400958 if (likely(chunk != ptr))
Jason Evans5460aa62014-09-22 21:09:23 -0700959 arena_sdalloc(tsd, chunk, ptr, size, try_tcache);
Daniel Micay4cfe5512014-08-28 15:41:48 -0400960 else
Jason Evansfc0b3b72014-10-09 17:54:06 -0700961 huge_dalloc(tsd, ptr, try_tcache);
Daniel Micay4cfe5512014-08-28 15:41:48 -0400962}
963
964JEMALLOC_ALWAYS_INLINE void
Jason Evans5460aa62014-09-22 21:09:23 -0700965idalloc(tsd_t *tsd, void *ptr)
Jason Evans609ae592012-10-11 13:53:15 -0700966{
967
Jason Evans5460aa62014-09-22 21:09:23 -0700968 idalloct(tsd, ptr, true);
Jason Evans609ae592012-10-11 13:53:15 -0700969}
970
Jason Evans88393cb2013-01-22 08:45:43 -0800971JEMALLOC_ALWAYS_INLINE void
Jason Evans5460aa62014-09-22 21:09:23 -0700972iqalloc(tsd_t *tsd, void *ptr, bool try_tcache)
Jason Evans609ae592012-10-11 13:53:15 -0700973{
974
Jason Evans9c640bf2014-09-11 16:20:44 -0700975 if (config_fill && unlikely(opt_quarantine))
Jason Evans5460aa62014-09-22 21:09:23 -0700976 quarantine(tsd, ptr);
Jason Evans609ae592012-10-11 13:53:15 -0700977 else
Jason Evans5460aa62014-09-22 21:09:23 -0700978 idalloct(tsd, ptr, try_tcache);
Jason Evans609ae592012-10-11 13:53:15 -0700979}
980
Daniel Micay4cfe5512014-08-28 15:41:48 -0400981JEMALLOC_ALWAYS_INLINE void
Jason Evans5460aa62014-09-22 21:09:23 -0700982isqalloc(tsd_t *tsd, void *ptr, size_t size, bool try_tcache)
Daniel Micay4cfe5512014-08-28 15:41:48 -0400983{
984
Jason Evans9c640bf2014-09-11 16:20:44 -0700985 if (config_fill && unlikely(opt_quarantine))
Jason Evans5460aa62014-09-22 21:09:23 -0700986 quarantine(tsd, ptr);
Daniel Micay4cfe5512014-08-28 15:41:48 -0400987 else
Jason Evans5460aa62014-09-22 21:09:23 -0700988 isdalloct(tsd, ptr, size, try_tcache);
Daniel Micay4cfe5512014-08-28 15:41:48 -0400989}
990
Jason Evans88393cb2013-01-22 08:45:43 -0800991JEMALLOC_ALWAYS_INLINE void *
Jason Evans5460aa62014-09-22 21:09:23 -0700992iralloct_realign(tsd_t *tsd, void *ptr, size_t oldsize, size_t size,
993 size_t extra, size_t alignment, bool zero, bool try_tcache_alloc,
994 bool try_tcache_dalloc, arena_t *arena)
Jason Evans6109fe02010-02-10 10:37:56 -0800995{
Jason Evansb2c31662014-01-12 15:05:44 -0800996 void *p;
997 size_t usize, copysize;
998
999 usize = sa2u(size + extra, alignment);
1000 if (usize == 0)
1001 return (NULL);
Jason Evans5460aa62014-09-22 21:09:23 -07001002 p = ipalloct(tsd, usize, alignment, zero, try_tcache_alloc, arena);
Jason Evansb2c31662014-01-12 15:05:44 -08001003 if (p == NULL) {
1004 if (extra == 0)
1005 return (NULL);
1006 /* Try again, without extra this time. */
1007 usize = sa2u(size, alignment);
1008 if (usize == 0)
1009 return (NULL);
Jason Evans5460aa62014-09-22 21:09:23 -07001010 p = ipalloct(tsd, usize, alignment, zero, try_tcache_alloc,
1011 arena);
Jason Evansb2c31662014-01-12 15:05:44 -08001012 if (p == NULL)
1013 return (NULL);
1014 }
1015 /*
1016 * Copy at most size bytes (not size+extra), since the caller has no
1017 * expectation that the extra bytes will be reliably preserved.
1018 */
1019 copysize = (size < oldsize) ? size : oldsize;
1020 memcpy(p, ptr, copysize);
Daniel Micayd33f8342014-10-24 13:18:57 -04001021 isqalloc(tsd, ptr, oldsize, try_tcache_dalloc);
Jason Evansb2c31662014-01-12 15:05:44 -08001022 return (p);
1023}
1024
1025JEMALLOC_ALWAYS_INLINE void *
Daniel Micayd33f8342014-10-24 13:18:57 -04001026iralloct(tsd_t *tsd, void *ptr, size_t oldsize, size_t size, size_t alignment,
1027 bool zero, bool try_tcache_alloc, bool try_tcache_dalloc, arena_t *arena)
Jason Evansb2c31662014-01-12 15:05:44 -08001028{
Jason Evans6109fe02010-02-10 10:37:56 -08001029
1030 assert(ptr != NULL);
1031 assert(size != 0);
1032
Jason Evans8e3c3c62010-09-17 15:46:18 -07001033 if (alignment != 0 && ((uintptr_t)ptr & ((uintptr_t)alignment-1))
1034 != 0) {
Jason Evans8e3c3c62010-09-17 15:46:18 -07001035 /*
Jason Evans122449b2012-04-06 00:35:09 -07001036 * Existing object alignment is inadequate; allocate new space
Jason Evans8e3c3c62010-09-17 15:46:18 -07001037 * and copy.
1038 */
Jason Evans5460aa62014-09-22 21:09:23 -07001039 return (iralloct_realign(tsd, ptr, oldsize, size, 0, alignment,
1040 zero, try_tcache_alloc, try_tcache_dalloc, arena));
Jason Evans8e3c3c62010-09-17 15:46:18 -07001041 }
1042
Daniel Micay809b0ac2014-10-23 10:30:52 -04001043 if (likely(size <= arena_maxclass)) {
Jason Evans5460aa62014-09-22 21:09:23 -07001044 return (arena_ralloc(tsd, arena, ptr, oldsize, size, 0,
1045 alignment, zero, try_tcache_alloc, try_tcache_dalloc));
Jason Evans8e3c3c62010-09-17 15:46:18 -07001046 } else {
Jason Evans5460aa62014-09-22 21:09:23 -07001047 return (huge_ralloc(tsd, arena, ptr, oldsize, size, 0,
Jason Evansfc0b3b72014-10-09 17:54:06 -07001048 alignment, zero, try_tcache_alloc, try_tcache_dalloc));
Jason Evans8e3c3c62010-09-17 15:46:18 -07001049 }
Jason Evans6109fe02010-02-10 10:37:56 -08001050}
Jason Evanscd9a1342012-03-21 18:33:03 -07001051
Jason Evans88393cb2013-01-22 08:45:43 -08001052JEMALLOC_ALWAYS_INLINE void *
Daniel Micayd33f8342014-10-24 13:18:57 -04001053iralloc(tsd_t *tsd, void *ptr, size_t oldsize, size_t size, size_t alignment,
1054 bool zero)
Jason Evans609ae592012-10-11 13:53:15 -07001055{
1056
Daniel Micayd33f8342014-10-24 13:18:57 -04001057 return (iralloct(tsd, ptr, oldsize, size, alignment, zero, true, true,
1058 NULL));
Jason Evansb2c31662014-01-12 15:05:44 -08001059}
1060
1061JEMALLOC_ALWAYS_INLINE bool
Daniel Micayd33f8342014-10-24 13:18:57 -04001062ixalloc(void *ptr, size_t oldsize, size_t size, size_t extra, size_t alignment,
1063 bool zero)
Jason Evansb2c31662014-01-12 15:05:44 -08001064{
Jason Evansb2c31662014-01-12 15:05:44 -08001065
1066 assert(ptr != NULL);
1067 assert(size != 0);
1068
Jason Evansb2c31662014-01-12 15:05:44 -08001069 if (alignment != 0 && ((uintptr_t)ptr & ((uintptr_t)alignment-1))
1070 != 0) {
1071 /* Existing object alignment is inadequate. */
1072 return (true);
1073 }
1074
Daniel Micay809b0ac2014-10-23 10:30:52 -04001075 if (likely(size <= arena_maxclass))
Jason Evansb2c31662014-01-12 15:05:44 -08001076 return (arena_ralloc_no_move(ptr, oldsize, size, extra, zero));
1077 else
Daniel Micaya95018e2014-10-04 01:39:32 -04001078 return (huge_ralloc_no_move(ptr, oldsize, size, extra, zero));
Jason Evans609ae592012-10-11 13:53:15 -07001079}
Jason Evanse476f8a2010-01-16 09:53:50 -08001080#endif
1081
Jason Evans4d6a1342010-10-20 19:05:59 -07001082#include "jemalloc/internal/prof.h"
1083
Jason Evanse476f8a2010-01-16 09:53:50 -08001084#undef JEMALLOC_H_INLINES
1085/******************************************************************************/
Mike Hommey45f208e2012-04-16 16:30:26 +02001086#endif /* JEMALLOC_INTERNAL_H */