blob: 57ab20d8cc2ddafe7943f702416f257d71426959 [file] [log] [blame]
Jason Evanse476f8a2010-01-16 09:53:50 -08001#define JEMALLOC_CHUNK_C_
Jason Evans376b1522010-02-11 14:45:59 -08002#include "jemalloc/internal/jemalloc_internal.h"
Jason Evanse476f8a2010-01-16 09:53:50 -08003
4/******************************************************************************/
5/* Data. */
6
7size_t opt_lg_chunk = LG_CHUNK_DEFAULT;
Jason Evans4201af02010-01-24 02:53:40 -08008bool opt_overcommit = true;
Jason Evanse476f8a2010-01-16 09:53:50 -08009
Jason Evans3c234352010-01-27 13:10:55 -080010malloc_mutex_t chunks_mtx;
Jason Evanse476f8a2010-01-16 09:53:50 -080011chunk_stats_t stats_chunks;
Jason Evanse476f8a2010-01-16 09:53:50 -080012
Jason Evans2dbecf12010-09-05 10:35:13 -070013rtree_t *chunks_rtree;
Jason Evans2dbecf12010-09-05 10:35:13 -070014
Jason Evanse476f8a2010-01-16 09:53:50 -080015/* Various chunk-related settings. */
16size_t chunksize;
17size_t chunksize_mask; /* (chunksize - 1). */
18size_t chunk_npages;
Jason Evans7393f442010-10-01 17:35:43 -070019size_t map_bias;
Jason Evanse476f8a2010-01-16 09:53:50 -080020size_t arena_maxclass; /* Max size class for arenas. */
21
Jason Evanse476f8a2010-01-16 09:53:50 -080022/******************************************************************************/
Jason Evanse476f8a2010-01-16 09:53:50 -080023
Jason Evans41631d02010-01-24 17:13:07 -080024/*
25 * If the caller specifies (*zero == false), it is still possible to receive
26 * zeroed memory, in which case *zero is toggled to true. arena_chunk_alloc()
27 * takes advantage of this to avoid demanding zeroed chunks, but taking
28 * advantage of them if they are returned.
29 */
Jason Evanse476f8a2010-01-16 09:53:50 -080030void *
Jason Evans2dbecf12010-09-05 10:35:13 -070031chunk_alloc(size_t size, bool base, bool *zero)
Jason Evanse476f8a2010-01-16 09:53:50 -080032{
33 void *ret;
34
35 assert(size != 0);
36 assert((size & chunksize_mask) == 0);
37
Jason Evans7372b152012-02-10 20:22:09 -080038 if (config_swap && swap_enabled) {
Jason Evans4201af02010-01-24 02:53:40 -080039 ret = chunk_alloc_swap(size, zero);
40 if (ret != NULL)
41 goto RETURN;
Jason Evanse476f8a2010-01-16 09:53:50 -080042 }
43
Jason Evans4201af02010-01-24 02:53:40 -080044 if (swap_enabled == false || opt_overcommit) {
Jason Evans7372b152012-02-10 20:22:09 -080045 if (config_dss) {
46 ret = chunk_alloc_dss(size, zero);
47 if (ret != NULL)
48 goto RETURN;
49 }
Jason Evans4201af02010-01-24 02:53:40 -080050 ret = chunk_alloc_mmap(size);
Jason Evans41631d02010-01-24 17:13:07 -080051 if (ret != NULL) {
52 *zero = true;
Jason Evans4201af02010-01-24 02:53:40 -080053 goto RETURN;
Jason Evans41631d02010-01-24 17:13:07 -080054 }
Jason Evans4201af02010-01-24 02:53:40 -080055 }
Jason Evanse476f8a2010-01-16 09:53:50 -080056
57 /* All strategies for allocation failed. */
58 ret = NULL;
59RETURN:
Jason Evans7372b152012-02-10 20:22:09 -080060 if (config_ivsalloc && base == false && ret != NULL) {
Jason Evans2dbecf12010-09-05 10:35:13 -070061 if (rtree_set(chunks_rtree, (uintptr_t)ret, ret)) {
Jason Evans12a48872011-11-11 14:41:59 -080062 chunk_dealloc(ret, size, true);
Jason Evans2dbecf12010-09-05 10:35:13 -070063 return (NULL);
64 }
65 }
Jason Evans7372b152012-02-10 20:22:09 -080066 if ((config_stats || config_prof) && ret != NULL) {
Jason Evanse7339702010-10-23 18:37:06 -070067 bool gdump;
Jason Evans3c234352010-01-27 13:10:55 -080068 malloc_mutex_lock(&chunks_mtx);
Jason Evans7372b152012-02-10 20:22:09 -080069 if (config_stats)
70 stats_chunks.nchunks += (size / chunksize);
Jason Evanse476f8a2010-01-16 09:53:50 -080071 stats_chunks.curchunks += (size / chunksize);
Jason Evans6109fe02010-02-10 10:37:56 -080072 if (stats_chunks.curchunks > stats_chunks.highchunks) {
Jason Evans3c234352010-01-27 13:10:55 -080073 stats_chunks.highchunks = stats_chunks.curchunks;
Jason Evans7372b152012-02-10 20:22:09 -080074 if (config_prof)
75 gdump = true;
76 } else if (config_prof)
Jason Evanse7339702010-10-23 18:37:06 -070077 gdump = false;
Jason Evans3c234352010-01-27 13:10:55 -080078 malloc_mutex_unlock(&chunks_mtx);
Jason Evans7372b152012-02-10 20:22:09 -080079 if (config_prof && opt_prof && opt_prof_gdump && gdump)
Jason Evanse7339702010-10-23 18:37:06 -070080 prof_gdump();
Jason Evanse476f8a2010-01-16 09:53:50 -080081 }
Jason Evanse476f8a2010-01-16 09:53:50 -080082
83 assert(CHUNK_ADDR2BASE(ret) == ret);
84 return (ret);
85}
86
Jason Evanse476f8a2010-01-16 09:53:50 -080087void
Jason Evans12a48872011-11-11 14:41:59 -080088chunk_dealloc(void *chunk, size_t size, bool unmap)
Jason Evanse476f8a2010-01-16 09:53:50 -080089{
90
91 assert(chunk != NULL);
92 assert(CHUNK_ADDR2BASE(chunk) == chunk);
93 assert(size != 0);
94 assert((size & chunksize_mask) == 0);
95
Jason Evans7372b152012-02-10 20:22:09 -080096 if (config_ivsalloc)
97 rtree_set(chunks_rtree, (uintptr_t)chunk, NULL);
98 if (config_stats || config_prof) {
99 malloc_mutex_lock(&chunks_mtx);
100 stats_chunks.curchunks -= (size / chunksize);
101 malloc_mutex_unlock(&chunks_mtx);
102 }
Jason Evanse476f8a2010-01-16 09:53:50 -0800103
Jason Evans12a48872011-11-11 14:41:59 -0800104 if (unmap) {
Jason Evans7372b152012-02-10 20:22:09 -0800105 if (config_swap && swap_enabled && chunk_dealloc_swap(chunk,
106 size) == false)
Jason Evans12a48872011-11-11 14:41:59 -0800107 return;
Jason Evans7372b152012-02-10 20:22:09 -0800108 if (config_dss && chunk_dealloc_dss(chunk, size) == false)
Jason Evans12a48872011-11-11 14:41:59 -0800109 return;
Jason Evans12a48872011-11-11 14:41:59 -0800110 chunk_dealloc_mmap(chunk, size);
111 }
Jason Evanse476f8a2010-01-16 09:53:50 -0800112}
113
114bool
115chunk_boot(void)
116{
117
118 /* Set variables according to the value of opt_lg_chunk. */
Jason Evans2dbecf12010-09-05 10:35:13 -0700119 chunksize = (ZU(1) << opt_lg_chunk);
Jason Evanse476f8a2010-01-16 09:53:50 -0800120 assert(chunksize >= PAGE_SIZE);
121 chunksize_mask = chunksize - 1;
122 chunk_npages = (chunksize >> PAGE_SHIFT);
123
Jason Evans7372b152012-02-10 20:22:09 -0800124 if (config_stats || config_prof) {
125 if (malloc_mutex_init(&chunks_mtx))
126 return (true);
127 memset(&stats_chunks, 0, sizeof(chunk_stats_t));
128 }
129 if (config_swap && chunk_swap_boot())
Jason Evans3c234352010-01-27 13:10:55 -0800130 return (true);
Jason Evans2dbecf12010-09-05 10:35:13 -0700131 if (chunk_mmap_boot())
132 return (true);
Jason Evans7372b152012-02-10 20:22:09 -0800133 if (config_dss && chunk_dss_boot())
Jason Evans4201af02010-01-24 02:53:40 -0800134 return (true);
Jason Evans7372b152012-02-10 20:22:09 -0800135 if (config_ivsalloc) {
136 chunks_rtree = rtree_new((ZU(1) << (LG_SIZEOF_PTR+3)) -
137 opt_lg_chunk);
138 if (chunks_rtree == NULL)
139 return (true);
140 }
Jason Evanse476f8a2010-01-16 09:53:50 -0800141
142 return (false);
143}