| Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 1 | #define JEMALLOC_CHUNK_C_ |
| Jason Evans | 376b152 | 2010-02-11 14:45:59 -0800 | [diff] [blame] | 2 | #include "jemalloc/internal/jemalloc_internal.h" |
| Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 3 | |
| 4 | /******************************************************************************/ |
| 5 | /* Data. */ |
| 6 | |
| 7 | size_t opt_lg_chunk = LG_CHUNK_DEFAULT; |
| Jason Evans | 4201af0 | 2010-01-24 02:53:40 -0800 | [diff] [blame] | 8 | bool opt_overcommit = true; |
| Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 9 | |
| Jason Evans | 3c23435 | 2010-01-27 13:10:55 -0800 | [diff] [blame] | 10 | malloc_mutex_t chunks_mtx; |
| Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 11 | chunk_stats_t stats_chunks; |
| Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 12 | |
| Jason Evans | 2dbecf1 | 2010-09-05 10:35:13 -0700 | [diff] [blame] | 13 | rtree_t *chunks_rtree; |
| Jason Evans | 2dbecf1 | 2010-09-05 10:35:13 -0700 | [diff] [blame] | 14 | |
| Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 15 | /* Various chunk-related settings. */ |
| 16 | size_t chunksize; |
| 17 | size_t chunksize_mask; /* (chunksize - 1). */ |
| 18 | size_t chunk_npages; |
| Jason Evans | 7393f44 | 2010-10-01 17:35:43 -0700 | [diff] [blame] | 19 | size_t map_bias; |
| Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 20 | size_t arena_maxclass; /* Max size class for arenas. */ |
| 21 | |
| Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 22 | /******************************************************************************/ |
| Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 23 | |
| Jason Evans | 41631d0 | 2010-01-24 17:13:07 -0800 | [diff] [blame] | 24 | /* |
| 25 | * If the caller specifies (*zero == false), it is still possible to receive |
| 26 | * zeroed memory, in which case *zero is toggled to true. arena_chunk_alloc() |
| 27 | * takes advantage of this to avoid demanding zeroed chunks, but taking |
| 28 | * advantage of them if they are returned. |
| 29 | */ |
| Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 30 | void * |
| Jason Evans | 2dbecf1 | 2010-09-05 10:35:13 -0700 | [diff] [blame] | 31 | chunk_alloc(size_t size, bool base, bool *zero) |
| Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 32 | { |
| 33 | void *ret; |
| 34 | |
| 35 | assert(size != 0); |
| 36 | assert((size & chunksize_mask) == 0); |
| 37 | |
| Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame^] | 38 | if (config_swap && swap_enabled) { |
| Jason Evans | 4201af0 | 2010-01-24 02:53:40 -0800 | [diff] [blame] | 39 | ret = chunk_alloc_swap(size, zero); |
| 40 | if (ret != NULL) |
| 41 | goto RETURN; |
| Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 42 | } |
| 43 | |
| Jason Evans | 4201af0 | 2010-01-24 02:53:40 -0800 | [diff] [blame] | 44 | if (swap_enabled == false || opt_overcommit) { |
| Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame^] | 45 | if (config_dss) { |
| 46 | ret = chunk_alloc_dss(size, zero); |
| 47 | if (ret != NULL) |
| 48 | goto RETURN; |
| 49 | } |
| Jason Evans | 4201af0 | 2010-01-24 02:53:40 -0800 | [diff] [blame] | 50 | ret = chunk_alloc_mmap(size); |
| Jason Evans | 41631d0 | 2010-01-24 17:13:07 -0800 | [diff] [blame] | 51 | if (ret != NULL) { |
| 52 | *zero = true; |
| Jason Evans | 4201af0 | 2010-01-24 02:53:40 -0800 | [diff] [blame] | 53 | goto RETURN; |
| Jason Evans | 41631d0 | 2010-01-24 17:13:07 -0800 | [diff] [blame] | 54 | } |
| Jason Evans | 4201af0 | 2010-01-24 02:53:40 -0800 | [diff] [blame] | 55 | } |
| Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 56 | |
| 57 | /* All strategies for allocation failed. */ |
| 58 | ret = NULL; |
| 59 | RETURN: |
| Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame^] | 60 | if (config_ivsalloc && base == false && ret != NULL) { |
| Jason Evans | 2dbecf1 | 2010-09-05 10:35:13 -0700 | [diff] [blame] | 61 | if (rtree_set(chunks_rtree, (uintptr_t)ret, ret)) { |
| Jason Evans | 12a4887 | 2011-11-11 14:41:59 -0800 | [diff] [blame] | 62 | chunk_dealloc(ret, size, true); |
| Jason Evans | 2dbecf1 | 2010-09-05 10:35:13 -0700 | [diff] [blame] | 63 | return (NULL); |
| 64 | } |
| 65 | } |
| Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame^] | 66 | if ((config_stats || config_prof) && ret != NULL) { |
| Jason Evans | e733970 | 2010-10-23 18:37:06 -0700 | [diff] [blame] | 67 | bool gdump; |
| Jason Evans | 3c23435 | 2010-01-27 13:10:55 -0800 | [diff] [blame] | 68 | malloc_mutex_lock(&chunks_mtx); |
| Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame^] | 69 | if (config_stats) |
| 70 | stats_chunks.nchunks += (size / chunksize); |
| Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 71 | stats_chunks.curchunks += (size / chunksize); |
| Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 72 | if (stats_chunks.curchunks > stats_chunks.highchunks) { |
| Jason Evans | 3c23435 | 2010-01-27 13:10:55 -0800 | [diff] [blame] | 73 | stats_chunks.highchunks = stats_chunks.curchunks; |
| Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame^] | 74 | if (config_prof) |
| 75 | gdump = true; |
| 76 | } else if (config_prof) |
| Jason Evans | e733970 | 2010-10-23 18:37:06 -0700 | [diff] [blame] | 77 | gdump = false; |
| Jason Evans | 3c23435 | 2010-01-27 13:10:55 -0800 | [diff] [blame] | 78 | malloc_mutex_unlock(&chunks_mtx); |
| Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame^] | 79 | if (config_prof && opt_prof && opt_prof_gdump && gdump) |
| Jason Evans | e733970 | 2010-10-23 18:37:06 -0700 | [diff] [blame] | 80 | prof_gdump(); |
| Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 81 | } |
| Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 82 | |
| 83 | assert(CHUNK_ADDR2BASE(ret) == ret); |
| 84 | return (ret); |
| 85 | } |
| 86 | |
| Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 87 | void |
| Jason Evans | 12a4887 | 2011-11-11 14:41:59 -0800 | [diff] [blame] | 88 | chunk_dealloc(void *chunk, size_t size, bool unmap) |
| Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 89 | { |
| 90 | |
| 91 | assert(chunk != NULL); |
| 92 | assert(CHUNK_ADDR2BASE(chunk) == chunk); |
| 93 | assert(size != 0); |
| 94 | assert((size & chunksize_mask) == 0); |
| 95 | |
| Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame^] | 96 | if (config_ivsalloc) |
| 97 | rtree_set(chunks_rtree, (uintptr_t)chunk, NULL); |
| 98 | if (config_stats || config_prof) { |
| 99 | malloc_mutex_lock(&chunks_mtx); |
| 100 | stats_chunks.curchunks -= (size / chunksize); |
| 101 | malloc_mutex_unlock(&chunks_mtx); |
| 102 | } |
| Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 103 | |
| Jason Evans | 12a4887 | 2011-11-11 14:41:59 -0800 | [diff] [blame] | 104 | if (unmap) { |
| Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame^] | 105 | if (config_swap && swap_enabled && chunk_dealloc_swap(chunk, |
| 106 | size) == false) |
| Jason Evans | 12a4887 | 2011-11-11 14:41:59 -0800 | [diff] [blame] | 107 | return; |
| Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame^] | 108 | if (config_dss && chunk_dealloc_dss(chunk, size) == false) |
| Jason Evans | 12a4887 | 2011-11-11 14:41:59 -0800 | [diff] [blame] | 109 | return; |
| Jason Evans | 12a4887 | 2011-11-11 14:41:59 -0800 | [diff] [blame] | 110 | chunk_dealloc_mmap(chunk, size); |
| 111 | } |
| Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 112 | } |
| 113 | |
| 114 | bool |
| 115 | chunk_boot(void) |
| 116 | { |
| 117 | |
| 118 | /* Set variables according to the value of opt_lg_chunk. */ |
| Jason Evans | 2dbecf1 | 2010-09-05 10:35:13 -0700 | [diff] [blame] | 119 | chunksize = (ZU(1) << opt_lg_chunk); |
| Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 120 | assert(chunksize >= PAGE_SIZE); |
| 121 | chunksize_mask = chunksize - 1; |
| 122 | chunk_npages = (chunksize >> PAGE_SHIFT); |
| 123 | |
| Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame^] | 124 | if (config_stats || config_prof) { |
| 125 | if (malloc_mutex_init(&chunks_mtx)) |
| 126 | return (true); |
| 127 | memset(&stats_chunks, 0, sizeof(chunk_stats_t)); |
| 128 | } |
| 129 | if (config_swap && chunk_swap_boot()) |
| Jason Evans | 3c23435 | 2010-01-27 13:10:55 -0800 | [diff] [blame] | 130 | return (true); |
| Jason Evans | 2dbecf1 | 2010-09-05 10:35:13 -0700 | [diff] [blame] | 131 | if (chunk_mmap_boot()) |
| 132 | return (true); |
| Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame^] | 133 | if (config_dss && chunk_dss_boot()) |
| Jason Evans | 4201af0 | 2010-01-24 02:53:40 -0800 | [diff] [blame] | 134 | return (true); |
| Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame^] | 135 | if (config_ivsalloc) { |
| 136 | chunks_rtree = rtree_new((ZU(1) << (LG_SIZEOF_PTR+3)) - |
| 137 | opt_lg_chunk); |
| 138 | if (chunks_rtree == NULL) |
| 139 | return (true); |
| 140 | } |
| Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 141 | |
| 142 | return (false); |
| 143 | } |