Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 1 | #define JEMALLOC_TCACHE_C_ |
Jason Evans | 376b152 | 2010-02-11 14:45:59 -0800 | [diff] [blame] | 2 | #include "jemalloc/internal/jemalloc_internal.h" |
Jason Evans | 962463d | 2012-02-13 12:29:49 -0800 | [diff] [blame] | 3 | |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 4 | /******************************************************************************/ |
| 5 | /* Data. */ |
| 6 | |
Jason Evans | cd9a134 | 2012-03-21 18:33:03 -0700 | [diff] [blame] | 7 | malloc_tsd_data(, tcache, tcache_t *, NULL) |
Jason Evans | d4be8b7 | 2012-03-26 18:54:44 -0700 | [diff] [blame] | 8 | malloc_tsd_data(, tcache_enabled, tcache_enabled_t, tcache_enabled_default) |
Jason Evans | cd9a134 | 2012-03-21 18:33:03 -0700 | [diff] [blame] | 9 | |
Jason Evans | 3fa9a2f | 2010-03-07 15:34:14 -0800 | [diff] [blame] | 10 | bool opt_tcache = true; |
Jason Evans | e733970 | 2010-10-23 18:37:06 -0700 | [diff] [blame] | 11 | ssize_t opt_lg_tcache_max = LG_TCACHE_MAXCLASS_DEFAULT; |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 12 | |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 13 | tcache_bin_info_t *tcache_bin_info; |
| 14 | static unsigned stack_nelms; /* Total stack elms per tcache. */ |
| 15 | |
Jason Evans | cd9a134 | 2012-03-21 18:33:03 -0700 | [diff] [blame] | 16 | size_t nhbins; |
| 17 | size_t tcache_maxclass; |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 18 | |
| 19 | /******************************************************************************/ |
| 20 | |
Jason Evans | f7088e6 | 2012-04-19 18:28:03 -0700 | [diff] [blame] | 21 | size_t tcache_salloc(const void *ptr) |
| 22 | { |
| 23 | |
| 24 | return (arena_salloc(ptr, false)); |
| 25 | } |
| 26 | |
Jason Evans | 203484e | 2012-05-02 00:30:36 -0700 | [diff] [blame] | 27 | void |
| 28 | tcache_event_hard(tcache_t *tcache) |
| 29 | { |
| 30 | size_t binind = tcache->next_gc_bin; |
| 31 | tcache_bin_t *tbin = &tcache->tbins[binind]; |
| 32 | tcache_bin_info_t *tbin_info = &tcache_bin_info[binind]; |
| 33 | |
| 34 | if (tbin->low_water > 0) { |
| 35 | /* |
| 36 | * Flush (ceiling) 3/4 of the objects below the low water mark. |
| 37 | */ |
| 38 | if (binind < NBINS) { |
| 39 | tcache_bin_flush_small(tbin, binind, tbin->ncached - |
| 40 | tbin->low_water + (tbin->low_water >> 2), tcache); |
| 41 | } else { |
| 42 | tcache_bin_flush_large(tbin, binind, tbin->ncached - |
| 43 | tbin->low_water + (tbin->low_water >> 2), tcache); |
| 44 | } |
| 45 | /* |
| 46 | * Reduce fill count by 2X. Limit lg_fill_div such that the |
| 47 | * fill count is always at least 1. |
| 48 | */ |
| 49 | if ((tbin_info->ncached_max >> (tbin->lg_fill_div+1)) >= 1) |
| 50 | tbin->lg_fill_div++; |
| 51 | } else if (tbin->low_water < 0) { |
| 52 | /* |
| 53 | * Increase fill count by 2X. Make sure lg_fill_div stays |
| 54 | * greater than 0. |
| 55 | */ |
| 56 | if (tbin->lg_fill_div > 1) |
| 57 | tbin->lg_fill_div--; |
| 58 | } |
| 59 | tbin->low_water = tbin->ncached; |
| 60 | |
| 61 | tcache->next_gc_bin++; |
| 62 | if (tcache->next_gc_bin == nhbins) |
| 63 | tcache->next_gc_bin = 0; |
| 64 | tcache->ev_cnt = 0; |
| 65 | } |
| 66 | |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 67 | void * |
Jason Evans | dafde14 | 2010-03-17 16:27:39 -0700 | [diff] [blame] | 68 | tcache_alloc_small_hard(tcache_t *tcache, tcache_bin_t *tbin, size_t binind) |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 69 | { |
| 70 | void *ret; |
| 71 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 72 | arena_tcache_fill_small(tcache->arena, tbin, binind, |
| 73 | config_prof ? tcache->prof_accumbytes : 0); |
| 74 | if (config_prof) |
| 75 | tcache->prof_accumbytes = 0; |
Jason Evans | dafde14 | 2010-03-17 16:27:39 -0700 | [diff] [blame] | 76 | ret = tcache_alloc_easy(tbin); |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 77 | |
| 78 | return (ret); |
| 79 | } |
| 80 | |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 81 | void |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 82 | tcache_bin_flush_small(tcache_bin_t *tbin, size_t binind, unsigned rem, |
| 83 | tcache_t *tcache) |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 84 | { |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 85 | void *ptr; |
Jason Evans | 3fa9a2f | 2010-03-07 15:34:14 -0800 | [diff] [blame] | 86 | unsigned i, nflush, ndeferred; |
Jason Evans | a811823 | 2011-03-14 12:56:51 -0700 | [diff] [blame] | 87 | bool merged_stats = false; |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 88 | |
Jason Evans | b172610 | 2012-02-28 16:50:47 -0800 | [diff] [blame] | 89 | assert(binind < NBINS); |
Jason Evans | 86815df | 2010-03-13 20:32:56 -0800 | [diff] [blame] | 90 | assert(rem <= tbin->ncached); |
| 91 | |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 92 | for (nflush = tbin->ncached - rem; nflush > 0; nflush = ndeferred) { |
Jason Evans | 86815df | 2010-03-13 20:32:56 -0800 | [diff] [blame] | 93 | /* Lock the arena bin associated with the first object. */ |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 94 | arena_chunk_t *chunk = (arena_chunk_t *)CHUNK_ADDR2BASE( |
| 95 | tbin->avail[0]); |
Jason Evans | 3fa9a2f | 2010-03-07 15:34:14 -0800 | [diff] [blame] | 96 | arena_t *arena = chunk->arena; |
Jason Evans | 86815df | 2010-03-13 20:32:56 -0800 | [diff] [blame] | 97 | arena_bin_t *bin = &arena->bins[binind]; |
| 98 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 99 | if (config_prof && arena == tcache->arena) { |
Jason Evans | 88c222c | 2013-02-06 11:59:30 -0800 | [diff] [blame] | 100 | if (arena_prof_accum(arena, tcache->prof_accumbytes)) |
| 101 | prof_idump(); |
Jason Evans | d34f9e7 | 2010-02-11 13:19:21 -0800 | [diff] [blame] | 102 | tcache->prof_accumbytes = 0; |
Jason Evans | e69bee0 | 2010-03-15 22:25:23 -0700 | [diff] [blame] | 103 | } |
Jason Evans | e69bee0 | 2010-03-15 22:25:23 -0700 | [diff] [blame] | 104 | |
| 105 | malloc_mutex_lock(&bin->lock); |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 106 | if (config_stats && arena == tcache->arena) { |
Jason Evans | a811823 | 2011-03-14 12:56:51 -0700 | [diff] [blame] | 107 | assert(merged_stats == false); |
| 108 | merged_stats = true; |
Jason Evans | 86815df | 2010-03-13 20:32:56 -0800 | [diff] [blame] | 109 | bin->stats.nflushes++; |
| 110 | bin->stats.nrequests += tbin->tstats.nrequests; |
| 111 | tbin->tstats.nrequests = 0; |
Jason Evans | d34f9e7 | 2010-02-11 13:19:21 -0800 | [diff] [blame] | 112 | } |
Jason Evans | 3fa9a2f | 2010-03-07 15:34:14 -0800 | [diff] [blame] | 113 | ndeferred = 0; |
| 114 | for (i = 0; i < nflush; i++) { |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 115 | ptr = tbin->avail[i]; |
Jason Evans | 3fa9a2f | 2010-03-07 15:34:14 -0800 | [diff] [blame] | 116 | assert(ptr != NULL); |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 117 | chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr); |
| 118 | if (chunk->arena == arena) { |
Jason Evans | 7393f44 | 2010-10-01 17:35:43 -0700 | [diff] [blame] | 119 | size_t pageind = ((uintptr_t)ptr - |
Jason Evans | ae4c7b4 | 2012-04-02 07:04:34 -0700 | [diff] [blame] | 120 | (uintptr_t)chunk) >> LG_PAGE; |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 121 | arena_chunk_map_t *mapelm = |
Jason Evans | 203484e | 2012-05-02 00:30:36 -0700 | [diff] [blame] | 122 | arena_mapp_get(chunk, pageind); |
Jason Evans | 122449b | 2012-04-06 00:35:09 -0700 | [diff] [blame] | 123 | if (config_fill && opt_junk) { |
| 124 | arena_alloc_junk_small(ptr, |
| 125 | &arena_bin_info[binind], true); |
| 126 | } |
Jason Evans | 203484e | 2012-05-02 00:30:36 -0700 | [diff] [blame] | 127 | arena_dalloc_bin_locked(arena, chunk, ptr, |
| 128 | mapelm); |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 129 | } else { |
| 130 | /* |
| 131 | * This object was allocated via a different |
Jason Evans | 86815df | 2010-03-13 20:32:56 -0800 | [diff] [blame] | 132 | * arena bin than the one that is currently |
| 133 | * locked. Stash the object, so that it can be |
| 134 | * handled in a future pass. |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 135 | */ |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 136 | tbin->avail[ndeferred] = ptr; |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 137 | ndeferred++; |
| 138 | } |
| 139 | } |
Jason Evans | 86815df | 2010-03-13 20:32:56 -0800 | [diff] [blame] | 140 | malloc_mutex_unlock(&bin->lock); |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 141 | } |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 142 | if (config_stats && merged_stats == false) { |
Jason Evans | a811823 | 2011-03-14 12:56:51 -0700 | [diff] [blame] | 143 | /* |
| 144 | * The flush loop didn't happen to flush to this thread's |
| 145 | * arena, so the stats didn't get merged. Manually do so now. |
| 146 | */ |
| 147 | arena_bin_t *bin = &tcache->arena->bins[binind]; |
| 148 | malloc_mutex_lock(&bin->lock); |
| 149 | bin->stats.nflushes++; |
| 150 | bin->stats.nrequests += tbin->tstats.nrequests; |
| 151 | tbin->tstats.nrequests = 0; |
| 152 | malloc_mutex_unlock(&bin->lock); |
| 153 | } |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 154 | |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 155 | memmove(tbin->avail, &tbin->avail[tbin->ncached - rem], |
| 156 | rem * sizeof(void *)); |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 157 | tbin->ncached = rem; |
Jason Evans | 1dcb4f8 | 2011-03-21 00:18:17 -0700 | [diff] [blame] | 158 | if ((int)tbin->ncached < tbin->low_water) |
Jason Evans | 86815df | 2010-03-13 20:32:56 -0800 | [diff] [blame] | 159 | tbin->low_water = tbin->ncached; |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 160 | } |
| 161 | |
Jason Evans | dafde14 | 2010-03-17 16:27:39 -0700 | [diff] [blame] | 162 | void |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 163 | tcache_bin_flush_large(tcache_bin_t *tbin, size_t binind, unsigned rem, |
| 164 | tcache_t *tcache) |
Jason Evans | dafde14 | 2010-03-17 16:27:39 -0700 | [diff] [blame] | 165 | { |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 166 | void *ptr; |
Jason Evans | dafde14 | 2010-03-17 16:27:39 -0700 | [diff] [blame] | 167 | unsigned i, nflush, ndeferred; |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 168 | bool merged_stats = false; |
Jason Evans | dafde14 | 2010-03-17 16:27:39 -0700 | [diff] [blame] | 169 | |
| 170 | assert(binind < nhbins); |
| 171 | assert(rem <= tbin->ncached); |
| 172 | |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 173 | for (nflush = tbin->ncached - rem; nflush > 0; nflush = ndeferred) { |
Jason Evans | dafde14 | 2010-03-17 16:27:39 -0700 | [diff] [blame] | 174 | /* Lock the arena associated with the first object. */ |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 175 | arena_chunk_t *chunk = (arena_chunk_t *)CHUNK_ADDR2BASE( |
| 176 | tbin->avail[0]); |
Jason Evans | dafde14 | 2010-03-17 16:27:39 -0700 | [diff] [blame] | 177 | arena_t *arena = chunk->arena; |
Jason Evans | 88c222c | 2013-02-06 11:59:30 -0800 | [diff] [blame] | 178 | UNUSED bool idump; |
Jason Evans | dafde14 | 2010-03-17 16:27:39 -0700 | [diff] [blame] | 179 | |
Jason Evans | 88c222c | 2013-02-06 11:59:30 -0800 | [diff] [blame] | 180 | if (config_prof) |
| 181 | idump = false; |
Jason Evans | dafde14 | 2010-03-17 16:27:39 -0700 | [diff] [blame] | 182 | malloc_mutex_lock(&arena->lock); |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 183 | if ((config_prof || config_stats) && arena == tcache->arena) { |
| 184 | if (config_prof) { |
Jason Evans | 88c222c | 2013-02-06 11:59:30 -0800 | [diff] [blame] | 185 | idump = arena_prof_accum_locked(arena, |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 186 | tcache->prof_accumbytes); |
| 187 | tcache->prof_accumbytes = 0; |
| 188 | } |
| 189 | if (config_stats) { |
| 190 | merged_stats = true; |
| 191 | arena->stats.nrequests_large += |
| 192 | tbin->tstats.nrequests; |
Jason Evans | b172610 | 2012-02-28 16:50:47 -0800 | [diff] [blame] | 193 | arena->stats.lstats[binind - NBINS].nrequests += |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 194 | tbin->tstats.nrequests; |
| 195 | tbin->tstats.nrequests = 0; |
| 196 | } |
Jason Evans | dafde14 | 2010-03-17 16:27:39 -0700 | [diff] [blame] | 197 | } |
Jason Evans | dafde14 | 2010-03-17 16:27:39 -0700 | [diff] [blame] | 198 | ndeferred = 0; |
| 199 | for (i = 0; i < nflush; i++) { |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 200 | ptr = tbin->avail[i]; |
Jason Evans | dafde14 | 2010-03-17 16:27:39 -0700 | [diff] [blame] | 201 | assert(ptr != NULL); |
Jason Evans | dafde14 | 2010-03-17 16:27:39 -0700 | [diff] [blame] | 202 | chunk = (arena_chunk_t *)CHUNK_ADDR2BASE(ptr); |
| 203 | if (chunk->arena == arena) |
Jason Evans | 203484e | 2012-05-02 00:30:36 -0700 | [diff] [blame] | 204 | arena_dalloc_large_locked(arena, chunk, ptr); |
Jason Evans | dafde14 | 2010-03-17 16:27:39 -0700 | [diff] [blame] | 205 | else { |
| 206 | /* |
| 207 | * This object was allocated via a different |
| 208 | * arena than the one that is currently locked. |
| 209 | * Stash the object, so that it can be handled |
| 210 | * in a future pass. |
| 211 | */ |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 212 | tbin->avail[ndeferred] = ptr; |
Jason Evans | dafde14 | 2010-03-17 16:27:39 -0700 | [diff] [blame] | 213 | ndeferred++; |
| 214 | } |
| 215 | } |
| 216 | malloc_mutex_unlock(&arena->lock); |
Jason Evans | 88c222c | 2013-02-06 11:59:30 -0800 | [diff] [blame] | 217 | if (config_prof && idump) |
| 218 | prof_idump(); |
Jason Evans | dafde14 | 2010-03-17 16:27:39 -0700 | [diff] [blame] | 219 | } |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 220 | if (config_stats && merged_stats == false) { |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 221 | /* |
| 222 | * The flush loop didn't happen to flush to this thread's |
| 223 | * arena, so the stats didn't get merged. Manually do so now. |
| 224 | */ |
| 225 | arena_t *arena = tcache->arena; |
| 226 | malloc_mutex_lock(&arena->lock); |
| 227 | arena->stats.nrequests_large += tbin->tstats.nrequests; |
Jason Evans | b172610 | 2012-02-28 16:50:47 -0800 | [diff] [blame] | 228 | arena->stats.lstats[binind - NBINS].nrequests += |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 229 | tbin->tstats.nrequests; |
| 230 | tbin->tstats.nrequests = 0; |
| 231 | malloc_mutex_unlock(&arena->lock); |
| 232 | } |
Jason Evans | dafde14 | 2010-03-17 16:27:39 -0700 | [diff] [blame] | 233 | |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 234 | memmove(tbin->avail, &tbin->avail[tbin->ncached - rem], |
| 235 | rem * sizeof(void *)); |
Jason Evans | dafde14 | 2010-03-17 16:27:39 -0700 | [diff] [blame] | 236 | tbin->ncached = rem; |
Jason Evans | 1dcb4f8 | 2011-03-21 00:18:17 -0700 | [diff] [blame] | 237 | if ((int)tbin->ncached < tbin->low_water) |
Jason Evans | dafde14 | 2010-03-17 16:27:39 -0700 | [diff] [blame] | 238 | tbin->low_water = tbin->ncached; |
| 239 | } |
| 240 | |
Jason Evans | cd9a134 | 2012-03-21 18:33:03 -0700 | [diff] [blame] | 241 | void |
| 242 | tcache_arena_associate(tcache_t *tcache, arena_t *arena) |
| 243 | { |
| 244 | |
| 245 | if (config_stats) { |
| 246 | /* Link into list of extant tcaches. */ |
| 247 | malloc_mutex_lock(&arena->lock); |
| 248 | ql_elm_new(tcache, link); |
| 249 | ql_tail_insert(&arena->tcache_ql, tcache, link); |
| 250 | malloc_mutex_unlock(&arena->lock); |
| 251 | } |
| 252 | tcache->arena = arena; |
| 253 | } |
| 254 | |
| 255 | void |
| 256 | tcache_arena_dissociate(tcache_t *tcache) |
| 257 | { |
| 258 | |
| 259 | if (config_stats) { |
| 260 | /* Unlink from list of extant tcaches. */ |
| 261 | malloc_mutex_lock(&tcache->arena->lock); |
| 262 | ql_remove(&tcache->arena->tcache_ql, tcache, link); |
Jason Evans | cd9a134 | 2012-03-21 18:33:03 -0700 | [diff] [blame] | 263 | tcache_stats_merge(tcache, tcache->arena); |
Jason Evans | 30e7cb1 | 2013-10-21 15:00:06 -0700 | [diff] [blame] | 264 | malloc_mutex_unlock(&tcache->arena->lock); |
Jason Evans | cd9a134 | 2012-03-21 18:33:03 -0700 | [diff] [blame] | 265 | } |
| 266 | } |
| 267 | |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 268 | tcache_t * |
Ben Maurer | a7619b7 | 2014-04-15 13:28:37 -0700 | [diff] [blame] | 269 | tcache_get_hard(tcache_t *tcache, bool create) |
| 270 | { |
| 271 | |
| 272 | if (tcache == NULL) { |
| 273 | if (create == false) { |
| 274 | /* |
| 275 | * Creating a tcache here would cause |
| 276 | * allocation as a side effect of free(). |
| 277 | * Ordinarily that would be okay since |
| 278 | * tcache_create() failure is a soft failure |
| 279 | * that doesn't propagate. However, if TLS |
| 280 | * data are freed via free() as in glibc, |
| 281 | * subtle corruption could result from setting |
| 282 | * a TLS variable after its backing memory is |
| 283 | * freed. |
| 284 | */ |
| 285 | return (NULL); |
| 286 | } |
| 287 | if (tcache_enabled_get() == false) { |
| 288 | tcache_enabled_set(false); /* Memoize. */ |
| 289 | return (NULL); |
| 290 | } |
| 291 | return (tcache_create(choose_arena(NULL))); |
| 292 | } |
| 293 | if (tcache == TCACHE_STATE_PURGATORY) { |
| 294 | /* |
| 295 | * Make a note that an allocator function was called |
| 296 | * after tcache_thread_cleanup() was called. |
| 297 | */ |
| 298 | tcache = TCACHE_STATE_REINCARNATED; |
| 299 | tcache_tsd_set(&tcache); |
| 300 | return (NULL); |
| 301 | } |
| 302 | if (tcache == TCACHE_STATE_REINCARNATED) |
| 303 | return (NULL); |
| 304 | not_reached(); |
| 305 | return (NULL); |
| 306 | } |
| 307 | |
| 308 | tcache_t * |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 309 | tcache_create(arena_t *arena) |
| 310 | { |
| 311 | tcache_t *tcache; |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 312 | size_t size, stack_offset; |
Jason Evans | 3fa9a2f | 2010-03-07 15:34:14 -0800 | [diff] [blame] | 313 | unsigned i; |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 314 | |
Jason Evans | c2fc8c8 | 2010-10-01 18:02:43 -0700 | [diff] [blame] | 315 | size = offsetof(tcache_t, tbins) + (sizeof(tcache_bin_t) * nhbins); |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 316 | /* Naturally align the pointer stacks. */ |
| 317 | size = PTR_CEILING(size); |
| 318 | stack_offset = size; |
| 319 | size += stack_nelms * sizeof(void *); |
Jason Evans | 3fa9a2f | 2010-03-07 15:34:14 -0800 | [diff] [blame] | 320 | /* |
| 321 | * Round up to the nearest multiple of the cacheline size, in order to |
| 322 | * avoid the possibility of false cacheline sharing. |
| 323 | * |
Jason Evans | 8e3c3c6 | 2010-09-17 15:46:18 -0700 | [diff] [blame] | 324 | * That this works relies on the same logic as in ipalloc(), but we |
| 325 | * cannot directly call ipalloc() here due to tcache bootstrapping |
| 326 | * issues. |
Jason Evans | 3fa9a2f | 2010-03-07 15:34:14 -0800 | [diff] [blame] | 327 | */ |
| 328 | size = (size + CACHELINE_MASK) & (-CACHELINE); |
| 329 | |
Jason Evans | b172610 | 2012-02-28 16:50:47 -0800 | [diff] [blame] | 330 | if (size <= SMALL_MAXCLASS) |
Jason Evans | 3fa9a2f | 2010-03-07 15:34:14 -0800 | [diff] [blame] | 331 | tcache = (tcache_t *)arena_malloc_small(arena, size, true); |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 332 | else if (size <= tcache_maxclass) |
| 333 | tcache = (tcache_t *)arena_malloc_large(arena, size, true); |
Jason Evans | 3fa9a2f | 2010-03-07 15:34:14 -0800 | [diff] [blame] | 334 | else |
Jason Evans | d82a5e6 | 2013-12-12 22:35:52 -0800 | [diff] [blame] | 335 | tcache = (tcache_t *)icalloct(size, false, arena); |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 336 | |
| 337 | if (tcache == NULL) |
| 338 | return (NULL); |
| 339 | |
Jason Evans | cd9a134 | 2012-03-21 18:33:03 -0700 | [diff] [blame] | 340 | tcache_arena_associate(tcache, arena); |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 341 | |
Jason Evans | dafde14 | 2010-03-17 16:27:39 -0700 | [diff] [blame] | 342 | assert((TCACHE_NSLOTS_SMALL_MAX & 1U) == 0); |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 343 | for (i = 0; i < nhbins; i++) { |
Jason Evans | 1dcb4f8 | 2011-03-21 00:18:17 -0700 | [diff] [blame] | 344 | tcache->tbins[i].lg_fill_div = 1; |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 345 | tcache->tbins[i].avail = (void **)((uintptr_t)tcache + |
| 346 | (uintptr_t)stack_offset); |
| 347 | stack_offset += tcache_bin_info[i].ncached_max * sizeof(void *); |
Jason Evans | 3fa9a2f | 2010-03-07 15:34:14 -0800 | [diff] [blame] | 348 | } |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 349 | |
Jason Evans | cd9a134 | 2012-03-21 18:33:03 -0700 | [diff] [blame] | 350 | tcache_tsd_set(&tcache); |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 351 | |
| 352 | return (tcache); |
| 353 | } |
| 354 | |
| 355 | void |
| 356 | tcache_destroy(tcache_t *tcache) |
| 357 | { |
| 358 | unsigned i; |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 359 | size_t tcache_size; |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 360 | |
Jason Evans | cd9a134 | 2012-03-21 18:33:03 -0700 | [diff] [blame] | 361 | tcache_arena_dissociate(tcache); |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 362 | |
Jason Evans | b172610 | 2012-02-28 16:50:47 -0800 | [diff] [blame] | 363 | for (i = 0; i < NBINS; i++) { |
Jason Evans | 3fa9a2f | 2010-03-07 15:34:14 -0800 | [diff] [blame] | 364 | tcache_bin_t *tbin = &tcache->tbins[i]; |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 365 | tcache_bin_flush_small(tbin, i, 0, tcache); |
Jason Evans | 3fa9a2f | 2010-03-07 15:34:14 -0800 | [diff] [blame] | 366 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 367 | if (config_stats && tbin->tstats.nrequests != 0) { |
Jason Evans | 3fa9a2f | 2010-03-07 15:34:14 -0800 | [diff] [blame] | 368 | arena_t *arena = tcache->arena; |
| 369 | arena_bin_t *bin = &arena->bins[i]; |
Jason Evans | 86815df | 2010-03-13 20:32:56 -0800 | [diff] [blame] | 370 | malloc_mutex_lock(&bin->lock); |
Jason Evans | 3fa9a2f | 2010-03-07 15:34:14 -0800 | [diff] [blame] | 371 | bin->stats.nrequests += tbin->tstats.nrequests; |
Jason Evans | 86815df | 2010-03-13 20:32:56 -0800 | [diff] [blame] | 372 | malloc_mutex_unlock(&bin->lock); |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 373 | } |
| 374 | } |
| 375 | |
Jason Evans | dafde14 | 2010-03-17 16:27:39 -0700 | [diff] [blame] | 376 | for (; i < nhbins; i++) { |
| 377 | tcache_bin_t *tbin = &tcache->tbins[i]; |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 378 | tcache_bin_flush_large(tbin, i, 0, tcache); |
Jason Evans | dafde14 | 2010-03-17 16:27:39 -0700 | [diff] [blame] | 379 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 380 | if (config_stats && tbin->tstats.nrequests != 0) { |
Jason Evans | dafde14 | 2010-03-17 16:27:39 -0700 | [diff] [blame] | 381 | arena_t *arena = tcache->arena; |
| 382 | malloc_mutex_lock(&arena->lock); |
| 383 | arena->stats.nrequests_large += tbin->tstats.nrequests; |
Jason Evans | b172610 | 2012-02-28 16:50:47 -0800 | [diff] [blame] | 384 | arena->stats.lstats[i - NBINS].nrequests += |
Jason Evans | dafde14 | 2010-03-17 16:27:39 -0700 | [diff] [blame] | 385 | tbin->tstats.nrequests; |
| 386 | malloc_mutex_unlock(&arena->lock); |
| 387 | } |
Jason Evans | dafde14 | 2010-03-17 16:27:39 -0700 | [diff] [blame] | 388 | } |
| 389 | |
Jason Evans | 88c222c | 2013-02-06 11:59:30 -0800 | [diff] [blame] | 390 | if (config_prof && tcache->prof_accumbytes > 0 && |
| 391 | arena_prof_accum(tcache->arena, tcache->prof_accumbytes)) |
| 392 | prof_idump(); |
Jason Evans | d34f9e7 | 2010-02-11 13:19:21 -0800 | [diff] [blame] | 393 | |
Jason Evans | 122449b | 2012-04-06 00:35:09 -0700 | [diff] [blame] | 394 | tcache_size = arena_salloc(tcache, false); |
Jason Evans | b172610 | 2012-02-28 16:50:47 -0800 | [diff] [blame] | 395 | if (tcache_size <= SMALL_MAXCLASS) { |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 396 | arena_chunk_t *chunk = CHUNK_ADDR2BASE(tcache); |
| 397 | arena_t *arena = chunk->arena; |
Jason Evans | 7393f44 | 2010-10-01 17:35:43 -0700 | [diff] [blame] | 398 | size_t pageind = ((uintptr_t)tcache - (uintptr_t)chunk) >> |
Jason Evans | ae4c7b4 | 2012-04-02 07:04:34 -0700 | [diff] [blame] | 399 | LG_PAGE; |
Jason Evans | 203484e | 2012-05-02 00:30:36 -0700 | [diff] [blame] | 400 | arena_chunk_map_t *mapelm = arena_mapp_get(chunk, pageind); |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 401 | |
Jason Evans | 203484e | 2012-05-02 00:30:36 -0700 | [diff] [blame] | 402 | arena_dalloc_bin(arena, chunk, tcache, pageind, mapelm); |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 403 | } else if (tcache_size <= tcache_maxclass) { |
| 404 | arena_chunk_t *chunk = CHUNK_ADDR2BASE(tcache); |
| 405 | arena_t *arena = chunk->arena; |
| 406 | |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 407 | arena_dalloc_large(arena, chunk, tcache); |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 408 | } else |
Jason Evans | d82a5e6 | 2013-12-12 22:35:52 -0800 | [diff] [blame] | 409 | idalloct(tcache, false); |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 410 | } |
| 411 | |
Jason Evans | cd9a134 | 2012-03-21 18:33:03 -0700 | [diff] [blame] | 412 | void |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 413 | tcache_thread_cleanup(void *arg) |
| 414 | { |
Jason Evans | cd9a134 | 2012-03-21 18:33:03 -0700 | [diff] [blame] | 415 | tcache_t *tcache = *(tcache_t **)arg; |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 416 | |
Jason Evans | d4be8b7 | 2012-03-26 18:54:44 -0700 | [diff] [blame] | 417 | if (tcache == TCACHE_STATE_DISABLED) { |
| 418 | /* Do nothing. */ |
| 419 | } else if (tcache == TCACHE_STATE_REINCARNATED) { |
| 420 | /* |
| 421 | * Another destructor called an allocator function after this |
Jason Evans | 3701367 | 2012-04-06 12:41:55 -0700 | [diff] [blame] | 422 | * destructor was called. Reset tcache to |
| 423 | * TCACHE_STATE_PURGATORY in order to receive another callback. |
Jason Evans | d4be8b7 | 2012-03-26 18:54:44 -0700 | [diff] [blame] | 424 | */ |
| 425 | tcache = TCACHE_STATE_PURGATORY; |
| 426 | tcache_tsd_set(&tcache); |
| 427 | } else if (tcache == TCACHE_STATE_PURGATORY) { |
Jason Evans | 2dbecf1 | 2010-09-05 10:35:13 -0700 | [diff] [blame] | 428 | /* |
| 429 | * The previous time this destructor was called, we set the key |
Jason Evans | 3701367 | 2012-04-06 12:41:55 -0700 | [diff] [blame] | 430 | * to TCACHE_STATE_PURGATORY so that other destructors wouldn't |
| 431 | * cause re-creation of the tcache. This time, do nothing, so |
| 432 | * that the destructor will not be called again. |
Jason Evans | 2dbecf1 | 2010-09-05 10:35:13 -0700 | [diff] [blame] | 433 | */ |
Jason Evans | 2dbecf1 | 2010-09-05 10:35:13 -0700 | [diff] [blame] | 434 | } else if (tcache != NULL) { |
Jason Evans | d4be8b7 | 2012-03-26 18:54:44 -0700 | [diff] [blame] | 435 | assert(tcache != TCACHE_STATE_PURGATORY); |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 436 | tcache_destroy(tcache); |
Jason Evans | d4be8b7 | 2012-03-26 18:54:44 -0700 | [diff] [blame] | 437 | tcache = TCACHE_STATE_PURGATORY; |
Jason Evans | cd9a134 | 2012-03-21 18:33:03 -0700 | [diff] [blame] | 438 | tcache_tsd_set(&tcache); |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 439 | } |
| 440 | } |
| 441 | |
Jason Evans | 30e7cb1 | 2013-10-21 15:00:06 -0700 | [diff] [blame] | 442 | /* Caller must own arena->lock. */ |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 443 | void |
| 444 | tcache_stats_merge(tcache_t *tcache, arena_t *arena) |
| 445 | { |
| 446 | unsigned i; |
| 447 | |
Jason Evans | 30e7cb1 | 2013-10-21 15:00:06 -0700 | [diff] [blame] | 448 | cassert(config_stats); |
| 449 | |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 450 | /* Merge and reset tcache stats. */ |
Jason Evans | b172610 | 2012-02-28 16:50:47 -0800 | [diff] [blame] | 451 | for (i = 0; i < NBINS; i++) { |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 452 | arena_bin_t *bin = &arena->bins[i]; |
Jason Evans | 3fa9a2f | 2010-03-07 15:34:14 -0800 | [diff] [blame] | 453 | tcache_bin_t *tbin = &tcache->tbins[i]; |
Jason Evans | 86815df | 2010-03-13 20:32:56 -0800 | [diff] [blame] | 454 | malloc_mutex_lock(&bin->lock); |
Jason Evans | 3fa9a2f | 2010-03-07 15:34:14 -0800 | [diff] [blame] | 455 | bin->stats.nrequests += tbin->tstats.nrequests; |
Jason Evans | 86815df | 2010-03-13 20:32:56 -0800 | [diff] [blame] | 456 | malloc_mutex_unlock(&bin->lock); |
Jason Evans | 3fa9a2f | 2010-03-07 15:34:14 -0800 | [diff] [blame] | 457 | tbin->tstats.nrequests = 0; |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 458 | } |
Jason Evans | dafde14 | 2010-03-17 16:27:39 -0700 | [diff] [blame] | 459 | |
| 460 | for (; i < nhbins; i++) { |
Jason Evans | b172610 | 2012-02-28 16:50:47 -0800 | [diff] [blame] | 461 | malloc_large_stats_t *lstats = &arena->stats.lstats[i - NBINS]; |
Jason Evans | dafde14 | 2010-03-17 16:27:39 -0700 | [diff] [blame] | 462 | tcache_bin_t *tbin = &tcache->tbins[i]; |
| 463 | arena->stats.nrequests_large += tbin->tstats.nrequests; |
| 464 | lstats->nrequests += tbin->tstats.nrequests; |
| 465 | tbin->tstats.nrequests = 0; |
| 466 | } |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 467 | } |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 468 | |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 469 | bool |
Jason Evans | cd9a134 | 2012-03-21 18:33:03 -0700 | [diff] [blame] | 470 | tcache_boot0(void) |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 471 | { |
Jason Evans | 3701367 | 2012-04-06 12:41:55 -0700 | [diff] [blame] | 472 | unsigned i; |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 473 | |
Jason Evans | 3701367 | 2012-04-06 12:41:55 -0700 | [diff] [blame] | 474 | /* |
| 475 | * If necessary, clamp opt_lg_tcache_max, now that arena_maxclass is |
| 476 | * known. |
| 477 | */ |
| 478 | if (opt_lg_tcache_max < 0 || (1U << opt_lg_tcache_max) < SMALL_MAXCLASS) |
| 479 | tcache_maxclass = SMALL_MAXCLASS; |
| 480 | else if ((1U << opt_lg_tcache_max) > arena_maxclass) |
| 481 | tcache_maxclass = arena_maxclass; |
| 482 | else |
| 483 | tcache_maxclass = (1U << opt_lg_tcache_max); |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 484 | |
Jason Evans | 3701367 | 2012-04-06 12:41:55 -0700 | [diff] [blame] | 485 | nhbins = NBINS + (tcache_maxclass >> LG_PAGE); |
Jason Evans | dafde14 | 2010-03-17 16:27:39 -0700 | [diff] [blame] | 486 | |
Jason Evans | 3701367 | 2012-04-06 12:41:55 -0700 | [diff] [blame] | 487 | /* Initialize tcache_bin_info. */ |
| 488 | tcache_bin_info = (tcache_bin_info_t *)base_alloc(nhbins * |
| 489 | sizeof(tcache_bin_info_t)); |
| 490 | if (tcache_bin_info == NULL) |
| 491 | return (true); |
| 492 | stack_nelms = 0; |
| 493 | for (i = 0; i < NBINS; i++) { |
| 494 | if ((arena_bin_info[i].nregs << 1) <= TCACHE_NSLOTS_SMALL_MAX) { |
| 495 | tcache_bin_info[i].ncached_max = |
| 496 | (arena_bin_info[i].nregs << 1); |
| 497 | } else { |
| 498 | tcache_bin_info[i].ncached_max = |
| 499 | TCACHE_NSLOTS_SMALL_MAX; |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 500 | } |
Jason Evans | 3701367 | 2012-04-06 12:41:55 -0700 | [diff] [blame] | 501 | stack_nelms += tcache_bin_info[i].ncached_max; |
| 502 | } |
| 503 | for (; i < nhbins; i++) { |
| 504 | tcache_bin_info[i].ncached_max = TCACHE_NSLOTS_LARGE; |
| 505 | stack_nelms += tcache_bin_info[i].ncached_max; |
Jason Evans | cd9a134 | 2012-03-21 18:33:03 -0700 | [diff] [blame] | 506 | } |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 507 | |
Jason Evans | cd9a134 | 2012-03-21 18:33:03 -0700 | [diff] [blame] | 508 | return (false); |
| 509 | } |
| 510 | |
| 511 | bool |
| 512 | tcache_boot1(void) |
| 513 | { |
| 514 | |
Jason Evans | 3701367 | 2012-04-06 12:41:55 -0700 | [diff] [blame] | 515 | if (tcache_tsd_boot() || tcache_enabled_tsd_boot()) |
| 516 | return (true); |
Jason Evans | 84c8eef | 2011-03-16 10:30:13 -0700 | [diff] [blame] | 517 | |
| 518 | return (false); |
Jason Evans | e476f8a | 2010-01-16 09:53:50 -0800 | [diff] [blame] | 519 | } |