Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1 | #define JEMALLOC_PROF_C_ |
Jason Evans | 376b152 | 2010-02-11 14:45:59 -0800 | [diff] [blame] | 2 | #include "jemalloc/internal/jemalloc_internal.h" |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 3 | /******************************************************************************/ |
| 4 | |
| 5 | #ifdef JEMALLOC_PROF_LIBUNWIND |
| 6 | #define UNW_LOCAL_ONLY |
| 7 | #include <libunwind.h> |
| 8 | #endif |
| 9 | |
Jason Evans | 77f350b | 2011-03-15 22:23:12 -0700 | [diff] [blame] | 10 | #ifdef JEMALLOC_PROF_LIBGCC |
| 11 | #include <unwind.h> |
| 12 | #endif |
| 13 | |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 14 | /******************************************************************************/ |
| 15 | /* Data. */ |
| 16 | |
Jason Evans | cd9a134 | 2012-03-21 18:33:03 -0700 | [diff] [blame] | 17 | malloc_tsd_data(, prof_tdata, prof_tdata_t *, NULL) |
| 18 | |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 19 | bool opt_prof = false; |
Jason Evans | f18c982 | 2010-03-31 18:43:24 -0700 | [diff] [blame] | 20 | bool opt_prof_active = true; |
Jason Evans | b9477e7 | 2010-03-01 20:15:26 -0800 | [diff] [blame] | 21 | size_t opt_lg_prof_sample = LG_PROF_SAMPLE_DEFAULT; |
Jason Evans | a02fc08 | 2010-03-31 17:35:51 -0700 | [diff] [blame] | 22 | ssize_t opt_lg_prof_interval = LG_PROF_INTERVAL_DEFAULT; |
Jason Evans | e733970 | 2010-10-23 18:37:06 -0700 | [diff] [blame] | 23 | bool opt_prof_gdump = false; |
Jason Evans | 0b25fe7 | 2012-04-17 16:39:33 -0700 | [diff] [blame] | 24 | bool opt_prof_final = true; |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 25 | bool opt_prof_leak = false; |
Jason Evans | 0b25fe7 | 2012-04-17 16:39:33 -0700 | [diff] [blame] | 26 | bool opt_prof_accum = false; |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 27 | char opt_prof_prefix[ |
| 28 | /* Minimize memory bloat for non-prof builds. */ |
| 29 | #ifdef JEMALLOC_PROF |
| 30 | PATH_MAX + |
| 31 | #endif |
| 32 | 1 |
| 33 | ]; |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 34 | |
Jason Evans | a3b3386 | 2012-11-13 12:56:27 -0800 | [diff] [blame] | 35 | uint64_t prof_interval = 0; |
Jason Evans | 0b270a9 | 2010-03-31 16:45:04 -0700 | [diff] [blame] | 36 | bool prof_promote; |
Jason Evans | d34f9e7 | 2010-02-11 13:19:21 -0800 | [diff] [blame] | 37 | |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 38 | /* |
Jason Evans | 6da5418 | 2012-03-23 18:05:51 -0700 | [diff] [blame] | 39 | * Table of mutexes that are shared among ctx's. These are leaf locks, so |
| 40 | * there is no problem with using them for more than one ctx at the same time. |
| 41 | * The primary motivation for this sharing though is that ctx's are ephemeral, |
| 42 | * and destroying mutexes causes complications for systems that allocate when |
| 43 | * creating/destroying mutexes. |
| 44 | */ |
| 45 | static malloc_mutex_t *ctx_locks; |
| 46 | static unsigned cum_ctxs; /* Atomic counter. */ |
| 47 | |
| 48 | /* |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 49 | * Global hash of (prof_bt_t *)-->(prof_ctx_t *). This is the master data |
Jason Evans | a881cd2 | 2010-10-02 15:18:50 -0700 | [diff] [blame] | 50 | * structure that knows about all backtraces currently captured. |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 51 | */ |
| 52 | static ckh_t bt2ctx; |
| 53 | static malloc_mutex_t bt2ctx_mtx; |
| 54 | |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 55 | static malloc_mutex_t prof_dump_seq_mtx; |
| 56 | static uint64_t prof_dump_seq; |
| 57 | static uint64_t prof_dump_iseq; |
| 58 | static uint64_t prof_dump_mseq; |
| 59 | static uint64_t prof_dump_useq; |
| 60 | |
| 61 | /* |
| 62 | * This buffer is rather large for stack allocation, so use a single buffer for |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 63 | * all profile dumps. |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 64 | */ |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 65 | static malloc_mutex_t prof_dump_mtx; |
| 66 | static char prof_dump_buf[ |
| 67 | /* Minimize memory bloat for non-prof builds. */ |
| 68 | #ifdef JEMALLOC_PROF |
| 69 | PROF_DUMP_BUFSIZE |
| 70 | #else |
| 71 | 1 |
| 72 | #endif |
| 73 | ]; |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 74 | static unsigned prof_dump_buf_end; |
| 75 | static int prof_dump_fd; |
| 76 | |
| 77 | /* Do not dump any profiles until bootstrapping is complete. */ |
| 78 | static bool prof_booted = false; |
| 79 | |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 80 | /******************************************************************************/ |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 81 | |
Jason Evans | 4d6a134 | 2010-10-20 19:05:59 -0700 | [diff] [blame] | 82 | void |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 83 | bt_init(prof_bt_t *bt, void **vec) |
| 84 | { |
| 85 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 86 | cassert(config_prof); |
| 87 | |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 88 | bt->vec = vec; |
| 89 | bt->len = 0; |
| 90 | } |
| 91 | |
Jason Evans | a881cd2 | 2010-10-02 15:18:50 -0700 | [diff] [blame] | 92 | static void |
| 93 | bt_destroy(prof_bt_t *bt) |
| 94 | { |
| 95 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 96 | cassert(config_prof); |
| 97 | |
Jason Evans | a881cd2 | 2010-10-02 15:18:50 -0700 | [diff] [blame] | 98 | idalloc(bt); |
| 99 | } |
| 100 | |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 101 | static prof_bt_t * |
| 102 | bt_dup(prof_bt_t *bt) |
| 103 | { |
| 104 | prof_bt_t *ret; |
| 105 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 106 | cassert(config_prof); |
| 107 | |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 108 | /* |
| 109 | * Create a single allocation that has space for vec immediately |
| 110 | * following the prof_bt_t structure. The backtraces that get |
| 111 | * stored in the backtrace caches are copied from stack-allocated |
| 112 | * temporary variables, so size is known at creation time. Making this |
| 113 | * a contiguous object improves cache locality. |
| 114 | */ |
| 115 | ret = (prof_bt_t *)imalloc(QUANTUM_CEILING(sizeof(prof_bt_t)) + |
| 116 | (bt->len * sizeof(void *))); |
| 117 | if (ret == NULL) |
| 118 | return (NULL); |
| 119 | ret->vec = (void **)((uintptr_t)ret + |
| 120 | QUANTUM_CEILING(sizeof(prof_bt_t))); |
| 121 | memcpy(ret->vec, bt->vec, bt->len * sizeof(void *)); |
| 122 | ret->len = bt->len; |
| 123 | |
| 124 | return (ret); |
| 125 | } |
| 126 | |
| 127 | static inline void |
Jason Evans | 52386b2 | 2012-04-22 16:00:11 -0700 | [diff] [blame] | 128 | prof_enter(prof_tdata_t *prof_tdata) |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 129 | { |
| 130 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 131 | cassert(config_prof); |
| 132 | |
Jason Evans | 52386b2 | 2012-04-22 16:00:11 -0700 | [diff] [blame] | 133 | assert(prof_tdata->enq == false); |
| 134 | prof_tdata->enq = true; |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 135 | |
| 136 | malloc_mutex_lock(&bt2ctx_mtx); |
| 137 | } |
| 138 | |
| 139 | static inline void |
Jason Evans | 52386b2 | 2012-04-22 16:00:11 -0700 | [diff] [blame] | 140 | prof_leave(prof_tdata_t *prof_tdata) |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 141 | { |
Jason Evans | e733970 | 2010-10-23 18:37:06 -0700 | [diff] [blame] | 142 | bool idump, gdump; |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 143 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 144 | cassert(config_prof); |
| 145 | |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 146 | malloc_mutex_unlock(&bt2ctx_mtx); |
| 147 | |
Jason Evans | 52386b2 | 2012-04-22 16:00:11 -0700 | [diff] [blame] | 148 | assert(prof_tdata->enq); |
| 149 | prof_tdata->enq = false; |
| 150 | idump = prof_tdata->enq_idump; |
| 151 | prof_tdata->enq_idump = false; |
| 152 | gdump = prof_tdata->enq_gdump; |
| 153 | prof_tdata->enq_gdump = false; |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 154 | |
Jason Evans | d34f9e7 | 2010-02-11 13:19:21 -0800 | [diff] [blame] | 155 | if (idump) |
| 156 | prof_idump(); |
Jason Evans | e733970 | 2010-10-23 18:37:06 -0700 | [diff] [blame] | 157 | if (gdump) |
| 158 | prof_gdump(); |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 159 | } |
| 160 | |
Jason Evans | 77f350b | 2011-03-15 22:23:12 -0700 | [diff] [blame] | 161 | #ifdef JEMALLOC_PROF_LIBUNWIND |
Jason Evans | 4d6a134 | 2010-10-20 19:05:59 -0700 | [diff] [blame] | 162 | void |
Jason Evans | 5389146 | 2012-02-13 18:23:41 -0800 | [diff] [blame] | 163 | prof_backtrace(prof_bt_t *bt, unsigned nignore) |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 164 | { |
| 165 | unw_context_t uc; |
| 166 | unw_cursor_t cursor; |
| 167 | unsigned i; |
| 168 | int err; |
| 169 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 170 | cassert(config_prof); |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 171 | assert(bt->len == 0); |
| 172 | assert(bt->vec != NULL); |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 173 | |
| 174 | unw_getcontext(&uc); |
| 175 | unw_init_local(&cursor, &uc); |
| 176 | |
Jason Evans | 9f949f9 | 2011-03-22 20:44:40 -0700 | [diff] [blame] | 177 | /* Throw away (nignore+1) stack frames, if that many exist. */ |
| 178 | for (i = 0; i < nignore + 1; i++) { |
| 179 | err = unw_step(&cursor); |
| 180 | if (err <= 0) |
| 181 | return; |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 182 | } |
| 183 | |
Jason Evans | 9f949f9 | 2011-03-22 20:44:40 -0700 | [diff] [blame] | 184 | /* |
| 185 | * Iterate over stack frames until there are no more, or until no space |
| 186 | * remains in bt. |
| 187 | */ |
Jason Evans | 5389146 | 2012-02-13 18:23:41 -0800 | [diff] [blame] | 188 | for (i = 0; i < PROF_BT_MAX; i++) { |
Jason Evans | 9f949f9 | 2011-03-22 20:44:40 -0700 | [diff] [blame] | 189 | unw_get_reg(&cursor, UNW_REG_IP, (unw_word_t *)&bt->vec[i]); |
| 190 | bt->len++; |
| 191 | err = unw_step(&cursor); |
| 192 | if (err <= 0) |
| 193 | break; |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 194 | } |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 195 | } |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 196 | #elif (defined(JEMALLOC_PROF_LIBGCC)) |
Jason Evans | 77f350b | 2011-03-15 22:23:12 -0700 | [diff] [blame] | 197 | static _Unwind_Reason_Code |
| 198 | prof_unwind_init_callback(struct _Unwind_Context *context, void *arg) |
| 199 | { |
| 200 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 201 | cassert(config_prof); |
| 202 | |
Jason Evans | 77f350b | 2011-03-15 22:23:12 -0700 | [diff] [blame] | 203 | return (_URC_NO_REASON); |
| 204 | } |
| 205 | |
| 206 | static _Unwind_Reason_Code |
| 207 | prof_unwind_callback(struct _Unwind_Context *context, void *arg) |
| 208 | { |
| 209 | prof_unwind_data_t *data = (prof_unwind_data_t *)arg; |
| 210 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 211 | cassert(config_prof); |
| 212 | |
Jason Evans | 77f350b | 2011-03-15 22:23:12 -0700 | [diff] [blame] | 213 | if (data->nignore > 0) |
| 214 | data->nignore--; |
| 215 | else { |
| 216 | data->bt->vec[data->bt->len] = (void *)_Unwind_GetIP(context); |
| 217 | data->bt->len++; |
| 218 | if (data->bt->len == data->max) |
| 219 | return (_URC_END_OF_STACK); |
| 220 | } |
| 221 | |
| 222 | return (_URC_NO_REASON); |
| 223 | } |
| 224 | |
| 225 | void |
Jason Evans | 5389146 | 2012-02-13 18:23:41 -0800 | [diff] [blame] | 226 | prof_backtrace(prof_bt_t *bt, unsigned nignore) |
Jason Evans | 77f350b | 2011-03-15 22:23:12 -0700 | [diff] [blame] | 227 | { |
Jason Evans | 5389146 | 2012-02-13 18:23:41 -0800 | [diff] [blame] | 228 | prof_unwind_data_t data = {bt, nignore, PROF_BT_MAX}; |
Jason Evans | 77f350b | 2011-03-15 22:23:12 -0700 | [diff] [blame] | 229 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 230 | cassert(config_prof); |
| 231 | |
Jason Evans | 77f350b | 2011-03-15 22:23:12 -0700 | [diff] [blame] | 232 | _Unwind_Backtrace(prof_unwind_callback, &data); |
| 233 | } |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 234 | #elif (defined(JEMALLOC_PROF_GCC)) |
Jason Evans | 4d6a134 | 2010-10-20 19:05:59 -0700 | [diff] [blame] | 235 | void |
Jason Evans | 5389146 | 2012-02-13 18:23:41 -0800 | [diff] [blame] | 236 | prof_backtrace(prof_bt_t *bt, unsigned nignore) |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 237 | { |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 238 | #define BT_FRAME(i) \ |
Jason Evans | 5389146 | 2012-02-13 18:23:41 -0800 | [diff] [blame] | 239 | if ((i) < nignore + PROF_BT_MAX) { \ |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 240 | void *p; \ |
| 241 | if (__builtin_frame_address(i) == 0) \ |
Jason Evans | b27805b | 2010-02-10 18:15:53 -0800 | [diff] [blame] | 242 | return; \ |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 243 | p = __builtin_return_address(i); \ |
| 244 | if (p == NULL) \ |
Jason Evans | b27805b | 2010-02-10 18:15:53 -0800 | [diff] [blame] | 245 | return; \ |
Jason Evans | e4f7846 | 2010-10-22 10:45:59 -0700 | [diff] [blame] | 246 | if (i >= nignore) { \ |
| 247 | bt->vec[(i) - nignore] = p; \ |
| 248 | bt->len = (i) - nignore + 1; \ |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 249 | } \ |
| 250 | } else \ |
Jason Evans | b27805b | 2010-02-10 18:15:53 -0800 | [diff] [blame] | 251 | return; |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 252 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 253 | cassert(config_prof); |
Jason Evans | b04a940 | 2010-10-27 19:47:40 -0700 | [diff] [blame] | 254 | assert(nignore <= 3); |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 255 | |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 256 | BT_FRAME(0) |
| 257 | BT_FRAME(1) |
| 258 | BT_FRAME(2) |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 259 | BT_FRAME(3) |
| 260 | BT_FRAME(4) |
| 261 | BT_FRAME(5) |
| 262 | BT_FRAME(6) |
| 263 | BT_FRAME(7) |
| 264 | BT_FRAME(8) |
| 265 | BT_FRAME(9) |
| 266 | |
| 267 | BT_FRAME(10) |
| 268 | BT_FRAME(11) |
| 269 | BT_FRAME(12) |
| 270 | BT_FRAME(13) |
| 271 | BT_FRAME(14) |
| 272 | BT_FRAME(15) |
| 273 | BT_FRAME(16) |
| 274 | BT_FRAME(17) |
| 275 | BT_FRAME(18) |
| 276 | BT_FRAME(19) |
| 277 | |
| 278 | BT_FRAME(20) |
| 279 | BT_FRAME(21) |
| 280 | BT_FRAME(22) |
| 281 | BT_FRAME(23) |
| 282 | BT_FRAME(24) |
| 283 | BT_FRAME(25) |
| 284 | BT_FRAME(26) |
| 285 | BT_FRAME(27) |
| 286 | BT_FRAME(28) |
| 287 | BT_FRAME(29) |
| 288 | |
| 289 | BT_FRAME(30) |
| 290 | BT_FRAME(31) |
| 291 | BT_FRAME(32) |
| 292 | BT_FRAME(33) |
| 293 | BT_FRAME(34) |
| 294 | BT_FRAME(35) |
| 295 | BT_FRAME(36) |
| 296 | BT_FRAME(37) |
| 297 | BT_FRAME(38) |
| 298 | BT_FRAME(39) |
| 299 | |
| 300 | BT_FRAME(40) |
| 301 | BT_FRAME(41) |
| 302 | BT_FRAME(42) |
| 303 | BT_FRAME(43) |
| 304 | BT_FRAME(44) |
| 305 | BT_FRAME(45) |
| 306 | BT_FRAME(46) |
| 307 | BT_FRAME(47) |
| 308 | BT_FRAME(48) |
| 309 | BT_FRAME(49) |
| 310 | |
| 311 | BT_FRAME(50) |
| 312 | BT_FRAME(51) |
| 313 | BT_FRAME(52) |
| 314 | BT_FRAME(53) |
| 315 | BT_FRAME(54) |
| 316 | BT_FRAME(55) |
| 317 | BT_FRAME(56) |
| 318 | BT_FRAME(57) |
| 319 | BT_FRAME(58) |
| 320 | BT_FRAME(59) |
| 321 | |
| 322 | BT_FRAME(60) |
| 323 | BT_FRAME(61) |
| 324 | BT_FRAME(62) |
| 325 | BT_FRAME(63) |
| 326 | BT_FRAME(64) |
| 327 | BT_FRAME(65) |
| 328 | BT_FRAME(66) |
| 329 | BT_FRAME(67) |
| 330 | BT_FRAME(68) |
| 331 | BT_FRAME(69) |
| 332 | |
| 333 | BT_FRAME(70) |
| 334 | BT_FRAME(71) |
| 335 | BT_FRAME(72) |
| 336 | BT_FRAME(73) |
| 337 | BT_FRAME(74) |
| 338 | BT_FRAME(75) |
| 339 | BT_FRAME(76) |
| 340 | BT_FRAME(77) |
| 341 | BT_FRAME(78) |
| 342 | BT_FRAME(79) |
| 343 | |
| 344 | BT_FRAME(80) |
| 345 | BT_FRAME(81) |
| 346 | BT_FRAME(82) |
| 347 | BT_FRAME(83) |
| 348 | BT_FRAME(84) |
| 349 | BT_FRAME(85) |
| 350 | BT_FRAME(86) |
| 351 | BT_FRAME(87) |
| 352 | BT_FRAME(88) |
| 353 | BT_FRAME(89) |
| 354 | |
| 355 | BT_FRAME(90) |
| 356 | BT_FRAME(91) |
| 357 | BT_FRAME(92) |
| 358 | BT_FRAME(93) |
| 359 | BT_FRAME(94) |
| 360 | BT_FRAME(95) |
| 361 | BT_FRAME(96) |
| 362 | BT_FRAME(97) |
| 363 | BT_FRAME(98) |
| 364 | BT_FRAME(99) |
| 365 | |
| 366 | BT_FRAME(100) |
| 367 | BT_FRAME(101) |
| 368 | BT_FRAME(102) |
| 369 | BT_FRAME(103) |
| 370 | BT_FRAME(104) |
| 371 | BT_FRAME(105) |
| 372 | BT_FRAME(106) |
| 373 | BT_FRAME(107) |
| 374 | BT_FRAME(108) |
| 375 | BT_FRAME(109) |
| 376 | |
| 377 | BT_FRAME(110) |
| 378 | BT_FRAME(111) |
| 379 | BT_FRAME(112) |
| 380 | BT_FRAME(113) |
| 381 | BT_FRAME(114) |
| 382 | BT_FRAME(115) |
| 383 | BT_FRAME(116) |
| 384 | BT_FRAME(117) |
| 385 | BT_FRAME(118) |
| 386 | BT_FRAME(119) |
| 387 | |
| 388 | BT_FRAME(120) |
| 389 | BT_FRAME(121) |
| 390 | BT_FRAME(122) |
| 391 | BT_FRAME(123) |
| 392 | BT_FRAME(124) |
| 393 | BT_FRAME(125) |
| 394 | BT_FRAME(126) |
| 395 | BT_FRAME(127) |
| 396 | |
Jason Evans | b04a940 | 2010-10-27 19:47:40 -0700 | [diff] [blame] | 397 | /* Extras to compensate for nignore. */ |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 398 | BT_FRAME(128) |
| 399 | BT_FRAME(129) |
| 400 | BT_FRAME(130) |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 401 | #undef BT_FRAME |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 402 | } |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 403 | #else |
| 404 | void |
Jason Evans | 5389146 | 2012-02-13 18:23:41 -0800 | [diff] [blame] | 405 | prof_backtrace(prof_bt_t *bt, unsigned nignore) |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 406 | { |
| 407 | |
| 408 | cassert(config_prof); |
Jason Evans | 6556e28 | 2013-10-21 14:56:27 -0700 | [diff] [blame] | 409 | not_reached(); |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 410 | } |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 411 | #endif |
| 412 | |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 413 | static malloc_mutex_t * |
| 414 | prof_ctx_mutex_choose(void) |
| 415 | { |
| 416 | unsigned nctxs = atomic_add_u(&cum_ctxs, 1); |
| 417 | |
| 418 | return (&ctx_locks[(nctxs - 1) % PROF_NCTX_LOCKS]); |
| 419 | } |
| 420 | |
| 421 | static void |
| 422 | prof_ctx_init(prof_ctx_t *ctx, prof_bt_t *bt) |
| 423 | { |
| 424 | |
| 425 | ctx->bt = bt; |
| 426 | ctx->lock = prof_ctx_mutex_choose(); |
| 427 | /* |
| 428 | * Set nlimbo to 1, in order to avoid a race condition with |
| 429 | * prof_ctx_merge()/prof_ctx_destroy(). |
| 430 | */ |
| 431 | ctx->nlimbo = 1; |
| 432 | ql_elm_new(ctx, dump_link); |
| 433 | memset(&ctx->cnt_merged, 0, sizeof(prof_cnt_t)); |
| 434 | ql_new(&ctx->cnts_ql); |
| 435 | } |
| 436 | |
| 437 | static void |
| 438 | prof_ctx_destroy(prof_ctx_t *ctx) |
| 439 | { |
| 440 | prof_tdata_t *prof_tdata; |
| 441 | |
| 442 | cassert(config_prof); |
| 443 | |
| 444 | /* |
| 445 | * Check that ctx is still unused by any thread cache before destroying |
| 446 | * it. prof_lookup() increments ctx->nlimbo in order to avoid a race |
| 447 | * condition with this function, as does prof_ctx_merge() in order to |
| 448 | * avoid a race between the main body of prof_ctx_merge() and entry |
| 449 | * into this function. |
| 450 | */ |
| 451 | prof_tdata = prof_tdata_get(false); |
| 452 | assert((uintptr_t)prof_tdata > (uintptr_t)PROF_TDATA_STATE_MAX); |
| 453 | prof_enter(prof_tdata); |
| 454 | malloc_mutex_lock(ctx->lock); |
| 455 | if (ql_first(&ctx->cnts_ql) == NULL && ctx->cnt_merged.curobjs == 0 && |
| 456 | ctx->nlimbo == 1) { |
| 457 | assert(ctx->cnt_merged.curbytes == 0); |
| 458 | assert(ctx->cnt_merged.accumobjs == 0); |
| 459 | assert(ctx->cnt_merged.accumbytes == 0); |
| 460 | /* Remove ctx from bt2ctx. */ |
| 461 | if (ckh_remove(&bt2ctx, ctx->bt, NULL, NULL)) |
| 462 | not_reached(); |
| 463 | prof_leave(prof_tdata); |
| 464 | /* Destroy ctx. */ |
| 465 | malloc_mutex_unlock(ctx->lock); |
| 466 | bt_destroy(ctx->bt); |
| 467 | idalloc(ctx); |
| 468 | } else { |
| 469 | /* |
| 470 | * Compensate for increment in prof_ctx_merge() or |
| 471 | * prof_lookup(). |
| 472 | */ |
| 473 | ctx->nlimbo--; |
| 474 | malloc_mutex_unlock(ctx->lock); |
| 475 | prof_leave(prof_tdata); |
| 476 | } |
| 477 | } |
| 478 | |
| 479 | static void |
| 480 | prof_ctx_merge(prof_ctx_t *ctx, prof_thr_cnt_t *cnt) |
| 481 | { |
| 482 | bool destroy; |
| 483 | |
| 484 | cassert(config_prof); |
| 485 | |
| 486 | /* Merge cnt stats and detach from ctx. */ |
| 487 | malloc_mutex_lock(ctx->lock); |
| 488 | ctx->cnt_merged.curobjs += cnt->cnts.curobjs; |
| 489 | ctx->cnt_merged.curbytes += cnt->cnts.curbytes; |
| 490 | ctx->cnt_merged.accumobjs += cnt->cnts.accumobjs; |
| 491 | ctx->cnt_merged.accumbytes += cnt->cnts.accumbytes; |
| 492 | ql_remove(&ctx->cnts_ql, cnt, cnts_link); |
| 493 | if (opt_prof_accum == false && ql_first(&ctx->cnts_ql) == NULL && |
| 494 | ctx->cnt_merged.curobjs == 0 && ctx->nlimbo == 0) { |
| 495 | /* |
| 496 | * Increment ctx->nlimbo in order to keep another thread from |
| 497 | * winning the race to destroy ctx while this one has ctx->lock |
| 498 | * dropped. Without this, it would be possible for another |
| 499 | * thread to: |
| 500 | * |
| 501 | * 1) Sample an allocation associated with ctx. |
| 502 | * 2) Deallocate the sampled object. |
| 503 | * 3) Successfully prof_ctx_destroy(ctx). |
| 504 | * |
| 505 | * The result would be that ctx no longer exists by the time |
| 506 | * this thread accesses it in prof_ctx_destroy(). |
| 507 | */ |
| 508 | ctx->nlimbo++; |
| 509 | destroy = true; |
| 510 | } else |
| 511 | destroy = false; |
| 512 | malloc_mutex_unlock(ctx->lock); |
| 513 | if (destroy) |
| 514 | prof_ctx_destroy(ctx); |
| 515 | } |
| 516 | |
Jason Evans | fb1775e | 2014-01-14 17:04:34 -0800 | [diff] [blame] | 517 | static bool |
| 518 | prof_lookup_global(prof_bt_t *bt, prof_tdata_t *prof_tdata, void **p_btkey, |
| 519 | prof_ctx_t **p_ctx, bool *p_new_ctx) |
| 520 | { |
| 521 | union { |
| 522 | prof_ctx_t *p; |
| 523 | void *v; |
| 524 | } ctx; |
| 525 | union { |
| 526 | prof_bt_t *p; |
| 527 | void *v; |
| 528 | } btkey; |
| 529 | bool new_ctx; |
| 530 | |
| 531 | prof_enter(prof_tdata); |
| 532 | if (ckh_search(&bt2ctx, bt, &btkey.v, &ctx.v)) { |
| 533 | /* bt has never been seen before. Insert it. */ |
| 534 | ctx.v = imalloc(sizeof(prof_ctx_t)); |
| 535 | if (ctx.v == NULL) { |
| 536 | prof_leave(prof_tdata); |
| 537 | return (true); |
| 538 | } |
| 539 | btkey.p = bt_dup(bt); |
| 540 | if (btkey.v == NULL) { |
| 541 | prof_leave(prof_tdata); |
| 542 | idalloc(ctx.v); |
| 543 | return (true); |
| 544 | } |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 545 | prof_ctx_init(ctx.p, btkey.p); |
Jason Evans | fb1775e | 2014-01-14 17:04:34 -0800 | [diff] [blame] | 546 | if (ckh_insert(&bt2ctx, btkey.v, ctx.v)) { |
| 547 | /* OOM. */ |
| 548 | prof_leave(prof_tdata); |
| 549 | idalloc(btkey.v); |
| 550 | idalloc(ctx.v); |
| 551 | return (true); |
| 552 | } |
| 553 | new_ctx = true; |
| 554 | } else { |
| 555 | /* |
| 556 | * Increment nlimbo, in order to avoid a race condition with |
| 557 | * prof_ctx_merge()/prof_ctx_destroy(). |
| 558 | */ |
| 559 | malloc_mutex_lock(ctx.p->lock); |
| 560 | ctx.p->nlimbo++; |
| 561 | malloc_mutex_unlock(ctx.p->lock); |
| 562 | new_ctx = false; |
| 563 | } |
| 564 | prof_leave(prof_tdata); |
| 565 | |
| 566 | *p_btkey = btkey.v; |
| 567 | *p_ctx = ctx.p; |
| 568 | *p_new_ctx = new_ctx; |
| 569 | return (false); |
| 570 | } |
| 571 | |
Jason Evans | 4d6a134 | 2010-10-20 19:05:59 -0700 | [diff] [blame] | 572 | prof_thr_cnt_t * |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 573 | prof_lookup(prof_bt_t *bt) |
| 574 | { |
Jason Evans | 075e77c | 2010-09-20 19:53:25 -0700 | [diff] [blame] | 575 | union { |
| 576 | prof_thr_cnt_t *p; |
| 577 | void *v; |
| 578 | } ret; |
Jason Evans | 4d6a134 | 2010-10-20 19:05:59 -0700 | [diff] [blame] | 579 | prof_tdata_t *prof_tdata; |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 580 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 581 | cassert(config_prof); |
| 582 | |
Jason Evans | bbe29d3 | 2013-01-30 15:03:11 -0800 | [diff] [blame] | 583 | prof_tdata = prof_tdata_get(false); |
Jason Evans | f278994 | 2012-04-28 23:27:13 -0700 | [diff] [blame] | 584 | if ((uintptr_t)prof_tdata <= (uintptr_t)PROF_TDATA_STATE_MAX) |
Jason Evans | 52386b2 | 2012-04-22 16:00:11 -0700 | [diff] [blame] | 585 | return (NULL); |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 586 | |
Jason Evans | 4d6a134 | 2010-10-20 19:05:59 -0700 | [diff] [blame] | 587 | if (ckh_search(&prof_tdata->bt2cnt, bt, NULL, &ret.v)) { |
Jason Evans | fb1775e | 2014-01-14 17:04:34 -0800 | [diff] [blame] | 588 | void *btkey; |
| 589 | prof_ctx_t *ctx; |
Jason Evans | 10e4523 | 2011-01-14 17:27:44 -0800 | [diff] [blame] | 590 | bool new_ctx; |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 591 | |
| 592 | /* |
| 593 | * This thread's cache lacks bt. Look for it in the global |
| 594 | * cache. |
| 595 | */ |
Jason Evans | fb1775e | 2014-01-14 17:04:34 -0800 | [diff] [blame] | 596 | if (prof_lookup_global(bt, prof_tdata, &btkey, &ctx, &new_ctx)) |
| 597 | return (NULL); |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 598 | |
| 599 | /* Link a prof_thd_cnt_t into ctx for this thread. */ |
Jason Evans | 0b526ff | 2012-02-13 18:04:26 -0800 | [diff] [blame] | 600 | if (ckh_count(&prof_tdata->bt2cnt) == PROF_TCMAX) { |
Jason Evans | 4d6a134 | 2010-10-20 19:05:59 -0700 | [diff] [blame] | 601 | assert(ckh_count(&prof_tdata->bt2cnt) > 0); |
Jason Evans | a881cd2 | 2010-10-02 15:18:50 -0700 | [diff] [blame] | 602 | /* |
Jason Evans | e4f7846 | 2010-10-22 10:45:59 -0700 | [diff] [blame] | 603 | * Flush the least recently used cnt in order to keep |
| 604 | * bt2cnt from becoming too large. |
Jason Evans | a881cd2 | 2010-10-02 15:18:50 -0700 | [diff] [blame] | 605 | */ |
Jason Evans | 4d6a134 | 2010-10-20 19:05:59 -0700 | [diff] [blame] | 606 | ret.p = ql_last(&prof_tdata->lru_ql, lru_link); |
Jason Evans | a881cd2 | 2010-10-02 15:18:50 -0700 | [diff] [blame] | 607 | assert(ret.v != NULL); |
Jason Evans | a9076c9 | 2011-08-30 23:40:11 -0700 | [diff] [blame] | 608 | if (ckh_remove(&prof_tdata->bt2cnt, ret.p->ctx->bt, |
| 609 | NULL, NULL)) |
Jason Evans | 6556e28 | 2013-10-21 14:56:27 -0700 | [diff] [blame] | 610 | not_reached(); |
Jason Evans | 4d6a134 | 2010-10-20 19:05:59 -0700 | [diff] [blame] | 611 | ql_remove(&prof_tdata->lru_ql, ret.p, lru_link); |
Jason Evans | a881cd2 | 2010-10-02 15:18:50 -0700 | [diff] [blame] | 612 | prof_ctx_merge(ret.p->ctx, ret.p); |
| 613 | /* ret can now be re-used. */ |
| 614 | } else { |
Jason Evans | 0b526ff | 2012-02-13 18:04:26 -0800 | [diff] [blame] | 615 | assert(ckh_count(&prof_tdata->bt2cnt) < PROF_TCMAX); |
Jason Evans | a881cd2 | 2010-10-02 15:18:50 -0700 | [diff] [blame] | 616 | /* Allocate and partially initialize a new cnt. */ |
| 617 | ret.v = imalloc(sizeof(prof_thr_cnt_t)); |
Jason Evans | b04a940 | 2010-10-27 19:47:40 -0700 | [diff] [blame] | 618 | if (ret.p == NULL) { |
Jason Evans | 0cdd42e | 2011-08-09 19:06:06 -0700 | [diff] [blame] | 619 | if (new_ctx) |
Jason Evans | fb1775e | 2014-01-14 17:04:34 -0800 | [diff] [blame] | 620 | prof_ctx_destroy(ctx); |
Jason Evans | a881cd2 | 2010-10-02 15:18:50 -0700 | [diff] [blame] | 621 | return (NULL); |
Jason Evans | b04a940 | 2010-10-27 19:47:40 -0700 | [diff] [blame] | 622 | } |
Jason Evans | a881cd2 | 2010-10-02 15:18:50 -0700 | [diff] [blame] | 623 | ql_elm_new(ret.p, cnts_link); |
| 624 | ql_elm_new(ret.p, lru_link); |
| 625 | } |
| 626 | /* Finish initializing ret. */ |
Jason Evans | fb1775e | 2014-01-14 17:04:34 -0800 | [diff] [blame] | 627 | ret.p->ctx = ctx; |
Jason Evans | 075e77c | 2010-09-20 19:53:25 -0700 | [diff] [blame] | 628 | ret.p->epoch = 0; |
| 629 | memset(&ret.p->cnts, 0, sizeof(prof_cnt_t)); |
Jason Evans | fb1775e | 2014-01-14 17:04:34 -0800 | [diff] [blame] | 630 | if (ckh_insert(&prof_tdata->bt2cnt, btkey, ret.v)) { |
Jason Evans | 0cdd42e | 2011-08-09 19:06:06 -0700 | [diff] [blame] | 631 | if (new_ctx) |
Jason Evans | fb1775e | 2014-01-14 17:04:34 -0800 | [diff] [blame] | 632 | prof_ctx_destroy(ctx); |
Jason Evans | 075e77c | 2010-09-20 19:53:25 -0700 | [diff] [blame] | 633 | idalloc(ret.v); |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 634 | return (NULL); |
| 635 | } |
Jason Evans | 4d6a134 | 2010-10-20 19:05:59 -0700 | [diff] [blame] | 636 | ql_head_insert(&prof_tdata->lru_ql, ret.p, lru_link); |
Jason Evans | fb1775e | 2014-01-14 17:04:34 -0800 | [diff] [blame] | 637 | malloc_mutex_lock(ctx->lock); |
| 638 | ql_tail_insert(&ctx->cnts_ql, ret.p, cnts_link); |
| 639 | ctx->nlimbo--; |
| 640 | malloc_mutex_unlock(ctx->lock); |
Jason Evans | a881cd2 | 2010-10-02 15:18:50 -0700 | [diff] [blame] | 641 | } else { |
| 642 | /* Move ret to the front of the LRU. */ |
Jason Evans | 4d6a134 | 2010-10-20 19:05:59 -0700 | [diff] [blame] | 643 | ql_remove(&prof_tdata->lru_ql, ret.p, lru_link); |
| 644 | ql_head_insert(&prof_tdata->lru_ql, ret.p, lru_link); |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 645 | } |
| 646 | |
Jason Evans | 075e77c | 2010-09-20 19:53:25 -0700 | [diff] [blame] | 647 | return (ret.p); |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 648 | } |
| 649 | |
Jason Evans | 22ca855 | 2010-03-02 11:57:30 -0800 | [diff] [blame] | 650 | static bool |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 651 | prof_dump_open(bool propagate_err, const char *filename) |
| 652 | { |
| 653 | |
| 654 | prof_dump_fd = creat(filename, 0644); |
| 655 | if (prof_dump_fd == -1) { |
| 656 | if (propagate_err == false) { |
| 657 | malloc_printf( |
| 658 | "<jemalloc>: creat(\"%s\"), 0644) failed\n", |
| 659 | filename); |
| 660 | if (opt_abort) |
| 661 | abort(); |
| 662 | } |
| 663 | return (true); |
| 664 | } |
| 665 | |
| 666 | return (false); |
| 667 | } |
| 668 | |
| 669 | static bool |
| 670 | prof_dump_flush(bool propagate_err) |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 671 | { |
Jason Evans | 22ca855 | 2010-03-02 11:57:30 -0800 | [diff] [blame] | 672 | bool ret = false; |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 673 | ssize_t err; |
| 674 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 675 | cassert(config_prof); |
| 676 | |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 677 | err = write(prof_dump_fd, prof_dump_buf, prof_dump_buf_end); |
| 678 | if (err == -1) { |
Jason Evans | 22ca855 | 2010-03-02 11:57:30 -0800 | [diff] [blame] | 679 | if (propagate_err == false) { |
Jason Evans | 698805c | 2010-03-03 17:45:38 -0800 | [diff] [blame] | 680 | malloc_write("<jemalloc>: write() failed during heap " |
| 681 | "profile flush\n"); |
Jason Evans | 22ca855 | 2010-03-02 11:57:30 -0800 | [diff] [blame] | 682 | if (opt_abort) |
| 683 | abort(); |
| 684 | } |
| 685 | ret = true; |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 686 | } |
| 687 | prof_dump_buf_end = 0; |
Jason Evans | 22ca855 | 2010-03-02 11:57:30 -0800 | [diff] [blame] | 688 | |
| 689 | return (ret); |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 690 | } |
| 691 | |
Jason Evans | 22ca855 | 2010-03-02 11:57:30 -0800 | [diff] [blame] | 692 | static bool |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 693 | prof_dump_close(bool propagate_err) |
| 694 | { |
| 695 | bool ret; |
| 696 | |
| 697 | assert(prof_dump_fd != -1); |
| 698 | ret = prof_dump_flush(propagate_err); |
| 699 | close(prof_dump_fd); |
| 700 | prof_dump_fd = -1; |
| 701 | |
| 702 | return (ret); |
| 703 | } |
| 704 | |
| 705 | static bool |
| 706 | prof_dump_write(bool propagate_err, const char *s) |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 707 | { |
| 708 | unsigned i, slen, n; |
| 709 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 710 | cassert(config_prof); |
| 711 | |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 712 | i = 0; |
| 713 | slen = strlen(s); |
| 714 | while (i < slen) { |
| 715 | /* Flush the buffer if it is full. */ |
Jason Evans | cd9a134 | 2012-03-21 18:33:03 -0700 | [diff] [blame] | 716 | if (prof_dump_buf_end == PROF_DUMP_BUFSIZE) |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 717 | if (prof_dump_flush(propagate_err) && propagate_err) |
Jason Evans | 22ca855 | 2010-03-02 11:57:30 -0800 | [diff] [blame] | 718 | return (true); |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 719 | |
Jason Evans | cd9a134 | 2012-03-21 18:33:03 -0700 | [diff] [blame] | 720 | if (prof_dump_buf_end + slen <= PROF_DUMP_BUFSIZE) { |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 721 | /* Finish writing. */ |
| 722 | n = slen - i; |
| 723 | } else { |
| 724 | /* Write as much of s as will fit. */ |
Jason Evans | cd9a134 | 2012-03-21 18:33:03 -0700 | [diff] [blame] | 725 | n = PROF_DUMP_BUFSIZE - prof_dump_buf_end; |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 726 | } |
| 727 | memcpy(&prof_dump_buf[prof_dump_buf_end], &s[i], n); |
| 728 | prof_dump_buf_end += n; |
| 729 | i += n; |
| 730 | } |
Jason Evans | 22ca855 | 2010-03-02 11:57:30 -0800 | [diff] [blame] | 731 | |
| 732 | return (false); |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 733 | } |
| 734 | |
Jason Evans | d81e4bd | 2012-03-06 14:57:45 -0800 | [diff] [blame] | 735 | JEMALLOC_ATTR(format(printf, 2, 3)) |
| 736 | static bool |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 737 | prof_dump_printf(bool propagate_err, const char *format, ...) |
Jason Evans | d81e4bd | 2012-03-06 14:57:45 -0800 | [diff] [blame] | 738 | { |
| 739 | bool ret; |
| 740 | va_list ap; |
Jason Evans | cd9a134 | 2012-03-21 18:33:03 -0700 | [diff] [blame] | 741 | char buf[PROF_PRINTF_BUFSIZE]; |
Jason Evans | d81e4bd | 2012-03-06 14:57:45 -0800 | [diff] [blame] | 742 | |
| 743 | va_start(ap, format); |
Jason Evans | 6da5418 | 2012-03-23 18:05:51 -0700 | [diff] [blame] | 744 | malloc_vsnprintf(buf, sizeof(buf), format, ap); |
Jason Evans | d81e4bd | 2012-03-06 14:57:45 -0800 | [diff] [blame] | 745 | va_end(ap); |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 746 | ret = prof_dump_write(propagate_err, buf); |
Jason Evans | d81e4bd | 2012-03-06 14:57:45 -0800 | [diff] [blame] | 747 | |
| 748 | return (ret); |
| 749 | } |
| 750 | |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 751 | static void |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 752 | prof_dump_ctx_prep(prof_ctx_t *ctx, prof_cnt_t *cnt_all, size_t *leak_nctx, |
| 753 | prof_ctx_list_t *ctx_ql) |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 754 | { |
| 755 | prof_thr_cnt_t *thr_cnt; |
| 756 | prof_cnt_t tcnt; |
| 757 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 758 | cassert(config_prof); |
| 759 | |
Jason Evans | 6da5418 | 2012-03-23 18:05:51 -0700 | [diff] [blame] | 760 | malloc_mutex_lock(ctx->lock); |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 761 | |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 762 | /* |
| 763 | * Increment nlimbo so that ctx won't go away before dump. |
| 764 | * Additionally, link ctx into the dump list so that it is included in |
| 765 | * prof_dump()'s second pass. |
| 766 | */ |
| 767 | ctx->nlimbo++; |
| 768 | ql_tail_insert(ctx_ql, ctx, dump_link); |
| 769 | |
Jason Evans | a881cd2 | 2010-10-02 15:18:50 -0700 | [diff] [blame] | 770 | memcpy(&ctx->cnt_summed, &ctx->cnt_merged, sizeof(prof_cnt_t)); |
| 771 | ql_foreach(thr_cnt, &ctx->cnts_ql, cnts_link) { |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 772 | volatile unsigned *epoch = &thr_cnt->epoch; |
| 773 | |
| 774 | while (true) { |
| 775 | unsigned epoch0 = *epoch; |
| 776 | |
| 777 | /* Make sure epoch is even. */ |
| 778 | if (epoch0 & 1U) |
| 779 | continue; |
| 780 | |
| 781 | memcpy(&tcnt, &thr_cnt->cnts, sizeof(prof_cnt_t)); |
| 782 | |
| 783 | /* Terminate if epoch didn't change while reading. */ |
| 784 | if (*epoch == epoch0) |
| 785 | break; |
| 786 | } |
| 787 | |
Jason Evans | a881cd2 | 2010-10-02 15:18:50 -0700 | [diff] [blame] | 788 | ctx->cnt_summed.curobjs += tcnt.curobjs; |
| 789 | ctx->cnt_summed.curbytes += tcnt.curbytes; |
| 790 | if (opt_prof_accum) { |
| 791 | ctx->cnt_summed.accumobjs += tcnt.accumobjs; |
| 792 | ctx->cnt_summed.accumbytes += tcnt.accumbytes; |
| 793 | } |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 794 | } |
| 795 | |
Jason Evans | 9ce3bfd | 2010-10-02 22:39:59 -0700 | [diff] [blame] | 796 | if (ctx->cnt_summed.curobjs != 0) |
| 797 | (*leak_nctx)++; |
| 798 | |
Jason Evans | a881cd2 | 2010-10-02 15:18:50 -0700 | [diff] [blame] | 799 | /* Add to cnt_all. */ |
| 800 | cnt_all->curobjs += ctx->cnt_summed.curobjs; |
| 801 | cnt_all->curbytes += ctx->cnt_summed.curbytes; |
| 802 | if (opt_prof_accum) { |
| 803 | cnt_all->accumobjs += ctx->cnt_summed.accumobjs; |
| 804 | cnt_all->accumbytes += ctx->cnt_summed.accumbytes; |
| 805 | } |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 806 | |
Jason Evans | 6da5418 | 2012-03-23 18:05:51 -0700 | [diff] [blame] | 807 | malloc_mutex_unlock(ctx->lock); |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 808 | } |
| 809 | |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 810 | static bool |
| 811 | prof_dump_header(bool propagate_err, const prof_cnt_t *cnt_all) |
Jason Evans | a881cd2 | 2010-10-02 15:18:50 -0700 | [diff] [blame] | 812 | { |
| 813 | |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 814 | if (opt_lg_prof_sample == 0) { |
| 815 | if (prof_dump_printf(propagate_err, |
| 816 | "heap profile: %"PRId64": %"PRId64 |
| 817 | " [%"PRIu64": %"PRIu64"] @ heapprofile\n", |
| 818 | cnt_all->curobjs, cnt_all->curbytes, |
| 819 | cnt_all->accumobjs, cnt_all->accumbytes)) |
| 820 | return (true); |
Jason Evans | a881cd2 | 2010-10-02 15:18:50 -0700 | [diff] [blame] | 821 | } else { |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 822 | if (prof_dump_printf(propagate_err, |
| 823 | "heap profile: %"PRId64": %"PRId64 |
| 824 | " [%"PRIu64": %"PRIu64"] @ heap_v2/%"PRIu64"\n", |
| 825 | cnt_all->curobjs, cnt_all->curbytes, |
| 826 | cnt_all->accumobjs, cnt_all->accumbytes, |
| 827 | ((uint64_t)1U << opt_lg_prof_sample))) |
| 828 | return (true); |
Jason Evans | a881cd2 | 2010-10-02 15:18:50 -0700 | [diff] [blame] | 829 | } |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 830 | |
| 831 | return (false); |
Jason Evans | a881cd2 | 2010-10-02 15:18:50 -0700 | [diff] [blame] | 832 | } |
| 833 | |
| 834 | static void |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 835 | prof_dump_ctx_cleanup_locked(prof_ctx_t *ctx, prof_ctx_list_t *ctx_ql) |
Jason Evans | a881cd2 | 2010-10-02 15:18:50 -0700 | [diff] [blame] | 836 | { |
Jason Evans | a881cd2 | 2010-10-02 15:18:50 -0700 | [diff] [blame] | 837 | |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 838 | ctx->nlimbo--; |
| 839 | ql_remove(ctx_ql, ctx, dump_link); |
| 840 | } |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 841 | |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 842 | static void |
| 843 | prof_dump_ctx_cleanup(prof_ctx_t *ctx, prof_ctx_list_t *ctx_ql) |
| 844 | { |
| 845 | |
Jason Evans | 6da5418 | 2012-03-23 18:05:51 -0700 | [diff] [blame] | 846 | malloc_mutex_lock(ctx->lock); |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 847 | prof_dump_ctx_cleanup_locked(ctx, ctx_ql); |
Jason Evans | 6da5418 | 2012-03-23 18:05:51 -0700 | [diff] [blame] | 848 | malloc_mutex_unlock(ctx->lock); |
Jason Evans | a881cd2 | 2010-10-02 15:18:50 -0700 | [diff] [blame] | 849 | } |
| 850 | |
Jason Evans | 22ca855 | 2010-03-02 11:57:30 -0800 | [diff] [blame] | 851 | static bool |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 852 | prof_dump_ctx(bool propagate_err, prof_ctx_t *ctx, const prof_bt_t *bt, |
| 853 | prof_ctx_list_t *ctx_ql) |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 854 | { |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 855 | bool ret; |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 856 | unsigned i; |
| 857 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 858 | cassert(config_prof); |
| 859 | |
Jason Evans | 52386b2 | 2012-04-22 16:00:11 -0700 | [diff] [blame] | 860 | /* |
| 861 | * Current statistics can sum to 0 as a result of unmerged per thread |
| 862 | * statistics. Additionally, interval- and growth-triggered dumps can |
| 863 | * occur between the time a ctx is created and when its statistics are |
| 864 | * filled in. Avoid dumping any ctx that is an artifact of either |
| 865 | * implementation detail. |
| 866 | */ |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 867 | malloc_mutex_lock(ctx->lock); |
Jason Evans | 52386b2 | 2012-04-22 16:00:11 -0700 | [diff] [blame] | 868 | if ((opt_prof_accum == false && ctx->cnt_summed.curobjs == 0) || |
| 869 | (opt_prof_accum && ctx->cnt_summed.accumobjs == 0)) { |
| 870 | assert(ctx->cnt_summed.curobjs == 0); |
Jason Evans | a881cd2 | 2010-10-02 15:18:50 -0700 | [diff] [blame] | 871 | assert(ctx->cnt_summed.curbytes == 0); |
| 872 | assert(ctx->cnt_summed.accumobjs == 0); |
| 873 | assert(ctx->cnt_summed.accumbytes == 0); |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 874 | ret = false; |
| 875 | goto label_return; |
Jason Evans | a881cd2 | 2010-10-02 15:18:50 -0700 | [diff] [blame] | 876 | } |
| 877 | |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 878 | if (prof_dump_printf(propagate_err, "%"PRId64": %"PRId64 |
Jason Evans | d81e4bd | 2012-03-06 14:57:45 -0800 | [diff] [blame] | 879 | " [%"PRIu64": %"PRIu64"] @", |
| 880 | ctx->cnt_summed.curobjs, ctx->cnt_summed.curbytes, |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 881 | ctx->cnt_summed.accumobjs, ctx->cnt_summed.accumbytes)) { |
| 882 | ret = true; |
| 883 | goto label_return; |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 884 | } |
| 885 | |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 886 | for (i = 0; i < bt->len; i++) { |
| 887 | if (prof_dump_printf(propagate_err, " %#"PRIxPTR, |
| 888 | (uintptr_t)bt->vec[i])) { |
| 889 | ret = true; |
| 890 | goto label_return; |
| 891 | } |
| 892 | } |
Jason Evans | 22ca855 | 2010-03-02 11:57:30 -0800 | [diff] [blame] | 893 | |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 894 | if (prof_dump_write(propagate_err, "\n")) { |
| 895 | ret = true; |
| 896 | goto label_return; |
| 897 | } |
| 898 | |
| 899 | label_return: |
| 900 | prof_dump_ctx_cleanup_locked(ctx, ctx_ql); |
| 901 | malloc_mutex_unlock(ctx->lock); |
| 902 | return (ret); |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 903 | } |
| 904 | |
Jason Evans | 22ca855 | 2010-03-02 11:57:30 -0800 | [diff] [blame] | 905 | static bool |
| 906 | prof_dump_maps(bool propagate_err) |
Jason Evans | c717718 | 2010-02-11 09:25:56 -0800 | [diff] [blame] | 907 | { |
Jason Evans | 93f39f8 | 2013-10-21 15:07:40 -0700 | [diff] [blame] | 908 | bool ret; |
Jason Evans | c717718 | 2010-02-11 09:25:56 -0800 | [diff] [blame] | 909 | int mfd; |
Jason Evans | cd9a134 | 2012-03-21 18:33:03 -0700 | [diff] [blame] | 910 | char filename[PATH_MAX + 1]; |
Jason Evans | c717718 | 2010-02-11 09:25:56 -0800 | [diff] [blame] | 911 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 912 | cassert(config_prof); |
| 913 | |
Jason Evans | cd9a134 | 2012-03-21 18:33:03 -0700 | [diff] [blame] | 914 | malloc_snprintf(filename, sizeof(filename), "/proc/%d/maps", |
| 915 | (int)getpid()); |
| 916 | mfd = open(filename, O_RDONLY); |
Jason Evans | c717718 | 2010-02-11 09:25:56 -0800 | [diff] [blame] | 917 | if (mfd != -1) { |
| 918 | ssize_t nread; |
| 919 | |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 920 | if (prof_dump_write(propagate_err, "\nMAPPED_LIBRARIES:\n") && |
Jason Evans | 93f39f8 | 2013-10-21 15:07:40 -0700 | [diff] [blame] | 921 | propagate_err) { |
| 922 | ret = true; |
| 923 | goto label_return; |
| 924 | } |
Jason Evans | c717718 | 2010-02-11 09:25:56 -0800 | [diff] [blame] | 925 | nread = 0; |
| 926 | do { |
| 927 | prof_dump_buf_end += nread; |
Jason Evans | cd9a134 | 2012-03-21 18:33:03 -0700 | [diff] [blame] | 928 | if (prof_dump_buf_end == PROF_DUMP_BUFSIZE) { |
Jason Evans | c717718 | 2010-02-11 09:25:56 -0800 | [diff] [blame] | 929 | /* Make space in prof_dump_buf before read(). */ |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 930 | if (prof_dump_flush(propagate_err) && |
Jason Evans | 93f39f8 | 2013-10-21 15:07:40 -0700 | [diff] [blame] | 931 | propagate_err) { |
| 932 | ret = true; |
| 933 | goto label_return; |
| 934 | } |
Jason Evans | c717718 | 2010-02-11 09:25:56 -0800 | [diff] [blame] | 935 | } |
| 936 | nread = read(mfd, &prof_dump_buf[prof_dump_buf_end], |
Jason Evans | cd9a134 | 2012-03-21 18:33:03 -0700 | [diff] [blame] | 937 | PROF_DUMP_BUFSIZE - prof_dump_buf_end); |
Jason Evans | c717718 | 2010-02-11 09:25:56 -0800 | [diff] [blame] | 938 | } while (nread > 0); |
Jason Evans | 93f39f8 | 2013-10-21 15:07:40 -0700 | [diff] [blame] | 939 | } else { |
| 940 | ret = true; |
| 941 | goto label_return; |
| 942 | } |
Jason Evans | 22ca855 | 2010-03-02 11:57:30 -0800 | [diff] [blame] | 943 | |
Jason Evans | 93f39f8 | 2013-10-21 15:07:40 -0700 | [diff] [blame] | 944 | ret = false; |
| 945 | label_return: |
| 946 | if (mfd != -1) |
| 947 | close(mfd); |
| 948 | return (ret); |
Jason Evans | c717718 | 2010-02-11 09:25:56 -0800 | [diff] [blame] | 949 | } |
| 950 | |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 951 | static void |
| 952 | prof_leakcheck(const prof_cnt_t *cnt_all, size_t leak_nctx, |
| 953 | const char *filename) |
| 954 | { |
| 955 | |
| 956 | if (cnt_all->curbytes != 0) { |
| 957 | malloc_printf("<jemalloc>: Leak summary: %"PRId64" byte%s, %" |
| 958 | PRId64" object%s, %zu context%s\n", |
| 959 | cnt_all->curbytes, (cnt_all->curbytes != 1) ? "s" : "", |
| 960 | cnt_all->curobjs, (cnt_all->curobjs != 1) ? "s" : "", |
| 961 | leak_nctx, (leak_nctx != 1) ? "s" : ""); |
| 962 | malloc_printf( |
| 963 | "<jemalloc>: Run pprof on \"%s\" for leak detail\n", |
| 964 | filename); |
| 965 | } |
| 966 | } |
| 967 | |
Jason Evans | 22ca855 | 2010-03-02 11:57:30 -0800 | [diff] [blame] | 968 | static bool |
Jason Evans | d81e4bd | 2012-03-06 14:57:45 -0800 | [diff] [blame] | 969 | prof_dump(bool propagate_err, const char *filename, bool leakcheck) |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 970 | { |
Jason Evans | 52386b2 | 2012-04-22 16:00:11 -0700 | [diff] [blame] | 971 | prof_tdata_t *prof_tdata; |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 972 | prof_cnt_t cnt_all; |
| 973 | size_t tabind; |
Jason Evans | 075e77c | 2010-09-20 19:53:25 -0700 | [diff] [blame] | 974 | union { |
Jason Evans | 075e77c | 2010-09-20 19:53:25 -0700 | [diff] [blame] | 975 | prof_ctx_t *p; |
| 976 | void *v; |
| 977 | } ctx; |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 978 | size_t leak_nctx; |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 979 | prof_ctx_list_t ctx_ql; |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 980 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 981 | cassert(config_prof); |
| 982 | |
Jason Evans | bbe29d3 | 2013-01-30 15:03:11 -0800 | [diff] [blame] | 983 | prof_tdata = prof_tdata_get(false); |
Jason Evans | f278994 | 2012-04-28 23:27:13 -0700 | [diff] [blame] | 984 | if ((uintptr_t)prof_tdata <= (uintptr_t)PROF_TDATA_STATE_MAX) |
Jason Evans | 52386b2 | 2012-04-22 16:00:11 -0700 | [diff] [blame] | 985 | return (true); |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 986 | |
| 987 | malloc_mutex_lock(&prof_dump_mtx); |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 988 | |
| 989 | /* Merge per thread profile stats, and sum them in cnt_all. */ |
| 990 | memset(&cnt_all, 0, sizeof(prof_cnt_t)); |
| 991 | leak_nctx = 0; |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 992 | ql_new(&ctx_ql); |
| 993 | prof_enter(prof_tdata); |
Jason Evans | 588a32c | 2010-10-02 22:38:14 -0700 | [diff] [blame] | 994 | for (tabind = 0; ckh_iter(&bt2ctx, &tabind, NULL, &ctx.v) == false;) |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 995 | prof_dump_ctx_prep(ctx.p, &cnt_all, &leak_nctx, &ctx_ql); |
| 996 | prof_leave(prof_tdata); |
| 997 | |
| 998 | /* Create dump file. */ |
| 999 | if (prof_dump_open(propagate_err, filename)) |
| 1000 | goto label_open_close_error; |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1001 | |
| 1002 | /* Dump profile header. */ |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 1003 | if (prof_dump_header(propagate_err, &cnt_all)) |
| 1004 | goto label_write_error; |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1005 | |
Jason Evans | 239692b | 2013-10-28 12:41:37 -0700 | [diff] [blame] | 1006 | /* Dump per ctx profile stats. */ |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 1007 | while ((ctx.p = ql_first(&ctx_ql)) != NULL) { |
| 1008 | if (prof_dump_ctx(propagate_err, ctx.p, ctx.p->bt, &ctx_ql)) |
| 1009 | goto label_write_error; |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1010 | } |
| 1011 | |
Jason Evans | c717718 | 2010-02-11 09:25:56 -0800 | [diff] [blame] | 1012 | /* Dump /proc/<pid>/maps if possible. */ |
Jason Evans | 22ca855 | 2010-03-02 11:57:30 -0800 | [diff] [blame] | 1013 | if (prof_dump_maps(propagate_err)) |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 1014 | goto label_write_error; |
Jason Evans | c717718 | 2010-02-11 09:25:56 -0800 | [diff] [blame] | 1015 | |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 1016 | if (prof_dump_close(propagate_err)) |
| 1017 | goto label_open_close_error; |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1018 | |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 1019 | malloc_mutex_unlock(&prof_dump_mtx); |
| 1020 | |
| 1021 | if (leakcheck) |
| 1022 | prof_leakcheck(&cnt_all, leak_nctx, filename); |
Jason Evans | 22ca855 | 2010-03-02 11:57:30 -0800 | [diff] [blame] | 1023 | |
| 1024 | return (false); |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 1025 | label_write_error: |
| 1026 | prof_dump_close(propagate_err); |
| 1027 | label_open_close_error: |
| 1028 | while ((ctx.p = ql_first(&ctx_ql)) != NULL) |
| 1029 | prof_dump_ctx_cleanup(ctx.p, &ctx_ql); |
| 1030 | malloc_mutex_unlock(&prof_dump_mtx); |
Jason Evans | 22ca855 | 2010-03-02 11:57:30 -0800 | [diff] [blame] | 1031 | return (true); |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1032 | } |
| 1033 | |
Jason Evans | d81e4bd | 2012-03-06 14:57:45 -0800 | [diff] [blame] | 1034 | #define DUMP_FILENAME_BUFSIZE (PATH_MAX + 1) |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 1035 | #define VSEQ_INVALID UINT64_C(0xffffffffffffffff) |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1036 | static void |
| 1037 | prof_dump_filename(char *filename, char v, int64_t vseq) |
| 1038 | { |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1039 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 1040 | cassert(config_prof); |
| 1041 | |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 1042 | if (vseq != VSEQ_INVALID) { |
Jason Evans | d81e4bd | 2012-03-06 14:57:45 -0800 | [diff] [blame] | 1043 | /* "<prefix>.<pid>.<seq>.v<vseq>.heap" */ |
| 1044 | malloc_snprintf(filename, DUMP_FILENAME_BUFSIZE, |
| 1045 | "%s.%d.%"PRIu64".%c%"PRId64".heap", |
| 1046 | opt_prof_prefix, (int)getpid(), prof_dump_seq, v, vseq); |
| 1047 | } else { |
| 1048 | /* "<prefix>.<pid>.<seq>.<v>.heap" */ |
| 1049 | malloc_snprintf(filename, DUMP_FILENAME_BUFSIZE, |
| 1050 | "%s.%d.%"PRIu64".%c.heap", |
| 1051 | opt_prof_prefix, (int)getpid(), prof_dump_seq, v); |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1052 | } |
Jason Evans | 52386b2 | 2012-04-22 16:00:11 -0700 | [diff] [blame] | 1053 | prof_dump_seq++; |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1054 | } |
| 1055 | |
| 1056 | static void |
| 1057 | prof_fdump(void) |
| 1058 | { |
| 1059 | char filename[DUMP_FILENAME_BUFSIZE]; |
| 1060 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 1061 | cassert(config_prof); |
| 1062 | |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1063 | if (prof_booted == false) |
| 1064 | return; |
| 1065 | |
Jason Evans | 0b25fe7 | 2012-04-17 16:39:33 -0700 | [diff] [blame] | 1066 | if (opt_prof_final && opt_prof_prefix[0] != '\0') { |
Jason Evans | e733970 | 2010-10-23 18:37:06 -0700 | [diff] [blame] | 1067 | malloc_mutex_lock(&prof_dump_seq_mtx); |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 1068 | prof_dump_filename(filename, 'f', VSEQ_INVALID); |
Jason Evans | e733970 | 2010-10-23 18:37:06 -0700 | [diff] [blame] | 1069 | malloc_mutex_unlock(&prof_dump_seq_mtx); |
Jason Evans | d81e4bd | 2012-03-06 14:57:45 -0800 | [diff] [blame] | 1070 | prof_dump(false, filename, opt_prof_leak); |
Jason Evans | e733970 | 2010-10-23 18:37:06 -0700 | [diff] [blame] | 1071 | } |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1072 | } |
| 1073 | |
| 1074 | void |
| 1075 | prof_idump(void) |
| 1076 | { |
Jason Evans | 52386b2 | 2012-04-22 16:00:11 -0700 | [diff] [blame] | 1077 | prof_tdata_t *prof_tdata; |
Jason Evans | d81e4bd | 2012-03-06 14:57:45 -0800 | [diff] [blame] | 1078 | char filename[PATH_MAX + 1]; |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1079 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 1080 | cassert(config_prof); |
| 1081 | |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1082 | if (prof_booted == false) |
| 1083 | return; |
Jason Evans | bbe29d3 | 2013-01-30 15:03:11 -0800 | [diff] [blame] | 1084 | prof_tdata = prof_tdata_get(false); |
Jason Evans | 0050a0f | 2012-04-28 18:14:24 -0700 | [diff] [blame] | 1085 | if ((uintptr_t)prof_tdata <= (uintptr_t)PROF_TDATA_STATE_MAX) |
Jason Evans | 52386b2 | 2012-04-22 16:00:11 -0700 | [diff] [blame] | 1086 | return; |
| 1087 | if (prof_tdata->enq) { |
| 1088 | prof_tdata->enq_idump = true; |
Jason Evans | d34f9e7 | 2010-02-11 13:19:21 -0800 | [diff] [blame] | 1089 | return; |
| 1090 | } |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1091 | |
Jason Evans | e733970 | 2010-10-23 18:37:06 -0700 | [diff] [blame] | 1092 | if (opt_prof_prefix[0] != '\0') { |
| 1093 | malloc_mutex_lock(&prof_dump_seq_mtx); |
| 1094 | prof_dump_filename(filename, 'i', prof_dump_iseq); |
| 1095 | prof_dump_iseq++; |
| 1096 | malloc_mutex_unlock(&prof_dump_seq_mtx); |
Jason Evans | d81e4bd | 2012-03-06 14:57:45 -0800 | [diff] [blame] | 1097 | prof_dump(false, filename, false); |
Jason Evans | e733970 | 2010-10-23 18:37:06 -0700 | [diff] [blame] | 1098 | } |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1099 | } |
| 1100 | |
Jason Evans | 22ca855 | 2010-03-02 11:57:30 -0800 | [diff] [blame] | 1101 | bool |
| 1102 | prof_mdump(const char *filename) |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1103 | { |
Jason Evans | 22ca855 | 2010-03-02 11:57:30 -0800 | [diff] [blame] | 1104 | char filename_buf[DUMP_FILENAME_BUFSIZE]; |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1105 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 1106 | cassert(config_prof); |
| 1107 | |
Jason Evans | 22ca855 | 2010-03-02 11:57:30 -0800 | [diff] [blame] | 1108 | if (opt_prof == false || prof_booted == false) |
| 1109 | return (true); |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1110 | |
Jason Evans | 22ca855 | 2010-03-02 11:57:30 -0800 | [diff] [blame] | 1111 | if (filename == NULL) { |
| 1112 | /* No filename specified, so automatically generate one. */ |
Jason Evans | e733970 | 2010-10-23 18:37:06 -0700 | [diff] [blame] | 1113 | if (opt_prof_prefix[0] == '\0') |
| 1114 | return (true); |
Jason Evans | 22ca855 | 2010-03-02 11:57:30 -0800 | [diff] [blame] | 1115 | malloc_mutex_lock(&prof_dump_seq_mtx); |
| 1116 | prof_dump_filename(filename_buf, 'm', prof_dump_mseq); |
| 1117 | prof_dump_mseq++; |
| 1118 | malloc_mutex_unlock(&prof_dump_seq_mtx); |
| 1119 | filename = filename_buf; |
| 1120 | } |
Jason Evans | d81e4bd | 2012-03-06 14:57:45 -0800 | [diff] [blame] | 1121 | return (prof_dump(true, filename, false)); |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1122 | } |
| 1123 | |
| 1124 | void |
Jason Evans | e733970 | 2010-10-23 18:37:06 -0700 | [diff] [blame] | 1125 | prof_gdump(void) |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1126 | { |
Jason Evans | 52386b2 | 2012-04-22 16:00:11 -0700 | [diff] [blame] | 1127 | prof_tdata_t *prof_tdata; |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1128 | char filename[DUMP_FILENAME_BUFSIZE]; |
| 1129 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 1130 | cassert(config_prof); |
| 1131 | |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1132 | if (prof_booted == false) |
| 1133 | return; |
Jason Evans | bbe29d3 | 2013-01-30 15:03:11 -0800 | [diff] [blame] | 1134 | prof_tdata = prof_tdata_get(false); |
Jason Evans | 0050a0f | 2012-04-28 18:14:24 -0700 | [diff] [blame] | 1135 | if ((uintptr_t)prof_tdata <= (uintptr_t)PROF_TDATA_STATE_MAX) |
Jason Evans | 52386b2 | 2012-04-22 16:00:11 -0700 | [diff] [blame] | 1136 | return; |
| 1137 | if (prof_tdata->enq) { |
| 1138 | prof_tdata->enq_gdump = true; |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1139 | return; |
| 1140 | } |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1141 | |
Jason Evans | e733970 | 2010-10-23 18:37:06 -0700 | [diff] [blame] | 1142 | if (opt_prof_prefix[0] != '\0') { |
| 1143 | malloc_mutex_lock(&prof_dump_seq_mtx); |
| 1144 | prof_dump_filename(filename, 'u', prof_dump_useq); |
| 1145 | prof_dump_useq++; |
| 1146 | malloc_mutex_unlock(&prof_dump_seq_mtx); |
Jason Evans | d81e4bd | 2012-03-06 14:57:45 -0800 | [diff] [blame] | 1147 | prof_dump(false, filename, false); |
Jason Evans | e733970 | 2010-10-23 18:37:06 -0700 | [diff] [blame] | 1148 | } |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1149 | } |
| 1150 | |
| 1151 | static void |
Jason Evans | ae03bf6 | 2013-01-22 12:02:08 -0800 | [diff] [blame] | 1152 | prof_bt_hash(const void *key, size_t r_hash[2]) |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1153 | { |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1154 | prof_bt_t *bt = (prof_bt_t *)key; |
| 1155 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 1156 | cassert(config_prof); |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1157 | |
Jason Evans | ae03bf6 | 2013-01-22 12:02:08 -0800 | [diff] [blame] | 1158 | hash(bt->vec, bt->len * sizeof(void *), 0x94122f33U, r_hash); |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1159 | } |
| 1160 | |
| 1161 | static bool |
| 1162 | prof_bt_keycomp(const void *k1, const void *k2) |
| 1163 | { |
| 1164 | const prof_bt_t *bt1 = (prof_bt_t *)k1; |
| 1165 | const prof_bt_t *bt2 = (prof_bt_t *)k2; |
| 1166 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 1167 | cassert(config_prof); |
| 1168 | |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1169 | if (bt1->len != bt2->len) |
| 1170 | return (false); |
| 1171 | return (memcmp(bt1->vec, bt2->vec, bt1->len * sizeof(void *)) == 0); |
| 1172 | } |
| 1173 | |
Jason Evans | 4d6a134 | 2010-10-20 19:05:59 -0700 | [diff] [blame] | 1174 | prof_tdata_t * |
| 1175 | prof_tdata_init(void) |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1176 | { |
Jason Evans | 4d6a134 | 2010-10-20 19:05:59 -0700 | [diff] [blame] | 1177 | prof_tdata_t *prof_tdata; |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1178 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 1179 | cassert(config_prof); |
| 1180 | |
Jason Evans | 4d6a134 | 2010-10-20 19:05:59 -0700 | [diff] [blame] | 1181 | /* Initialize an empty cache for this thread. */ |
| 1182 | prof_tdata = (prof_tdata_t *)imalloc(sizeof(prof_tdata_t)); |
| 1183 | if (prof_tdata == NULL) |
| 1184 | return (NULL); |
| 1185 | |
| 1186 | if (ckh_new(&prof_tdata->bt2cnt, PROF_CKH_MINITEMS, |
| 1187 | prof_bt_hash, prof_bt_keycomp)) { |
| 1188 | idalloc(prof_tdata); |
| 1189 | return (NULL); |
| 1190 | } |
| 1191 | ql_new(&prof_tdata->lru_ql); |
| 1192 | |
Jason Evans | 5389146 | 2012-02-13 18:23:41 -0800 | [diff] [blame] | 1193 | prof_tdata->vec = imalloc(sizeof(void *) * PROF_BT_MAX); |
Jason Evans | 4d6a134 | 2010-10-20 19:05:59 -0700 | [diff] [blame] | 1194 | if (prof_tdata->vec == NULL) { |
Jason Evans | 4d6a134 | 2010-10-20 19:05:59 -0700 | [diff] [blame] | 1195 | ckh_delete(&prof_tdata->bt2cnt); |
| 1196 | idalloc(prof_tdata); |
| 1197 | return (NULL); |
| 1198 | } |
| 1199 | |
Jason Evans | 84f7cdb | 2012-03-02 15:59:45 -0800 | [diff] [blame] | 1200 | prof_tdata->prng_state = 0; |
Jason Evans | 4d6a134 | 2010-10-20 19:05:59 -0700 | [diff] [blame] | 1201 | prof_tdata->threshold = 0; |
| 1202 | prof_tdata->accum = 0; |
| 1203 | |
Jason Evans | 52386b2 | 2012-04-22 16:00:11 -0700 | [diff] [blame] | 1204 | prof_tdata->enq = false; |
| 1205 | prof_tdata->enq_idump = false; |
| 1206 | prof_tdata->enq_gdump = false; |
| 1207 | |
Jason Evans | cd9a134 | 2012-03-21 18:33:03 -0700 | [diff] [blame] | 1208 | prof_tdata_tsd_set(&prof_tdata); |
Jason Evans | 4d6a134 | 2010-10-20 19:05:59 -0700 | [diff] [blame] | 1209 | |
| 1210 | return (prof_tdata); |
| 1211 | } |
| 1212 | |
Jason Evans | cd9a134 | 2012-03-21 18:33:03 -0700 | [diff] [blame] | 1213 | void |
Jason Evans | 4d6a134 | 2010-10-20 19:05:59 -0700 | [diff] [blame] | 1214 | prof_tdata_cleanup(void *arg) |
| 1215 | { |
Jason Evans | 41b954e | 2011-08-08 17:10:07 -0700 | [diff] [blame] | 1216 | prof_thr_cnt_t *cnt; |
Jason Evans | cd9a134 | 2012-03-21 18:33:03 -0700 | [diff] [blame] | 1217 | prof_tdata_t *prof_tdata = *(prof_tdata_t **)arg; |
Jason Evans | 4d6a134 | 2010-10-20 19:05:59 -0700 | [diff] [blame] | 1218 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 1219 | cassert(config_prof); |
| 1220 | |
Jason Evans | 0050a0f | 2012-04-28 18:14:24 -0700 | [diff] [blame] | 1221 | if (prof_tdata == PROF_TDATA_STATE_REINCARNATED) { |
| 1222 | /* |
| 1223 | * Another destructor deallocated memory after this destructor |
| 1224 | * was called. Reset prof_tdata to PROF_TDATA_STATE_PURGATORY |
| 1225 | * in order to receive another callback. |
| 1226 | */ |
| 1227 | prof_tdata = PROF_TDATA_STATE_PURGATORY; |
| 1228 | prof_tdata_tsd_set(&prof_tdata); |
| 1229 | } else if (prof_tdata == PROF_TDATA_STATE_PURGATORY) { |
| 1230 | /* |
| 1231 | * The previous time this destructor was called, we set the key |
| 1232 | * to PROF_TDATA_STATE_PURGATORY so that other destructors |
| 1233 | * wouldn't cause re-creation of the prof_tdata. This time, do |
| 1234 | * nothing, so that the destructor will not be called again. |
| 1235 | */ |
| 1236 | } else if (prof_tdata != NULL) { |
| 1237 | /* |
| 1238 | * Delete the hash table. All of its contents can still be |
| 1239 | * iterated over via the LRU. |
| 1240 | */ |
| 1241 | ckh_delete(&prof_tdata->bt2cnt); |
| 1242 | /* |
| 1243 | * Iteratively merge cnt's into the global stats and delete |
| 1244 | * them. |
| 1245 | */ |
| 1246 | while ((cnt = ql_last(&prof_tdata->lru_ql, lru_link)) != NULL) { |
| 1247 | ql_remove(&prof_tdata->lru_ql, cnt, lru_link); |
| 1248 | prof_ctx_merge(cnt->ctx, cnt); |
| 1249 | idalloc(cnt); |
| 1250 | } |
| 1251 | idalloc(prof_tdata->vec); |
| 1252 | idalloc(prof_tdata); |
| 1253 | prof_tdata = PROF_TDATA_STATE_PURGATORY; |
| 1254 | prof_tdata_tsd_set(&prof_tdata); |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1255 | } |
| 1256 | } |
| 1257 | |
| 1258 | void |
| 1259 | prof_boot0(void) |
| 1260 | { |
| 1261 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 1262 | cassert(config_prof); |
| 1263 | |
Jason Evans | e733970 | 2010-10-23 18:37:06 -0700 | [diff] [blame] | 1264 | memcpy(opt_prof_prefix, PROF_PREFIX_DEFAULT, |
| 1265 | sizeof(PROF_PREFIX_DEFAULT)); |
| 1266 | } |
| 1267 | |
| 1268 | void |
| 1269 | prof_boot1(void) |
| 1270 | { |
| 1271 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 1272 | cassert(config_prof); |
| 1273 | |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1274 | /* |
Jason Evans | 0b270a9 | 2010-03-31 16:45:04 -0700 | [diff] [blame] | 1275 | * opt_prof and prof_promote must be in their final state before any |
| 1276 | * arenas are initialized, so this function must be executed early. |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1277 | */ |
| 1278 | |
| 1279 | if (opt_prof_leak && opt_prof == false) { |
| 1280 | /* |
| 1281 | * Enable opt_prof, but in such a way that profiles are never |
| 1282 | * automatically dumped. |
| 1283 | */ |
| 1284 | opt_prof = true; |
Jason Evans | e733970 | 2010-10-23 18:37:06 -0700 | [diff] [blame] | 1285 | opt_prof_gdump = false; |
Jason Evans | a02fc08 | 2010-03-31 17:35:51 -0700 | [diff] [blame] | 1286 | } else if (opt_prof) { |
| 1287 | if (opt_lg_prof_interval >= 0) { |
| 1288 | prof_interval = (((uint64_t)1U) << |
| 1289 | opt_lg_prof_interval); |
Jason Evans | a3b3386 | 2012-11-13 12:56:27 -0800 | [diff] [blame] | 1290 | } |
Jason Evans | a02fc08 | 2010-03-31 17:35:51 -0700 | [diff] [blame] | 1291 | } |
Jason Evans | 0b270a9 | 2010-03-31 16:45:04 -0700 | [diff] [blame] | 1292 | |
Jason Evans | ae4c7b4 | 2012-04-02 07:04:34 -0700 | [diff] [blame] | 1293 | prof_promote = (opt_prof && opt_lg_prof_sample > LG_PAGE); |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1294 | } |
| 1295 | |
| 1296 | bool |
Jason Evans | e733970 | 2010-10-23 18:37:06 -0700 | [diff] [blame] | 1297 | prof_boot2(void) |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1298 | { |
| 1299 | |
Jason Evans | 7372b15 | 2012-02-10 20:22:09 -0800 | [diff] [blame] | 1300 | cassert(config_prof); |
| 1301 | |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1302 | if (opt_prof) { |
Jason Evans | 6da5418 | 2012-03-23 18:05:51 -0700 | [diff] [blame] | 1303 | unsigned i; |
| 1304 | |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1305 | if (ckh_new(&bt2ctx, PROF_CKH_MINITEMS, prof_bt_hash, |
| 1306 | prof_bt_keycomp)) |
| 1307 | return (true); |
| 1308 | if (malloc_mutex_init(&bt2ctx_mtx)) |
| 1309 | return (true); |
Jason Evans | cd9a134 | 2012-03-21 18:33:03 -0700 | [diff] [blame] | 1310 | if (prof_tdata_tsd_boot()) { |
Jason Evans | 698805c | 2010-03-03 17:45:38 -0800 | [diff] [blame] | 1311 | malloc_write( |
| 1312 | "<jemalloc>: Error in pthread_key_create()\n"); |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1313 | abort(); |
| 1314 | } |
| 1315 | |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1316 | if (malloc_mutex_init(&prof_dump_seq_mtx)) |
| 1317 | return (true); |
Jason Evans | 4f37ef6 | 2014-01-16 13:23:56 -0800 | [diff] [blame^] | 1318 | if (malloc_mutex_init(&prof_dump_mtx)) |
| 1319 | return (true); |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1320 | |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1321 | if (atexit(prof_fdump) != 0) { |
Jason Evans | 698805c | 2010-03-03 17:45:38 -0800 | [diff] [blame] | 1322 | malloc_write("<jemalloc>: Error in atexit()\n"); |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1323 | if (opt_abort) |
| 1324 | abort(); |
| 1325 | } |
Jason Evans | 6da5418 | 2012-03-23 18:05:51 -0700 | [diff] [blame] | 1326 | |
| 1327 | ctx_locks = (malloc_mutex_t *)base_alloc(PROF_NCTX_LOCKS * |
| 1328 | sizeof(malloc_mutex_t)); |
| 1329 | if (ctx_locks == NULL) |
| 1330 | return (true); |
| 1331 | for (i = 0; i < PROF_NCTX_LOCKS; i++) { |
| 1332 | if (malloc_mutex_init(&ctx_locks[i])) |
| 1333 | return (true); |
| 1334 | } |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1335 | } |
| 1336 | |
Jason Evans | b27805b | 2010-02-10 18:15:53 -0800 | [diff] [blame] | 1337 | #ifdef JEMALLOC_PROF_LIBGCC |
| 1338 | /* |
| 1339 | * Cause the backtracing machinery to allocate its internal state |
| 1340 | * before enabling profiling. |
| 1341 | */ |
| 1342 | _Unwind_Backtrace(prof_unwind_init_callback, NULL); |
| 1343 | #endif |
| 1344 | |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1345 | prof_booted = true; |
| 1346 | |
| 1347 | return (false); |
| 1348 | } |
| 1349 | |
Jason Evans | 20f1fc9 | 2012-10-09 14:46:22 -0700 | [diff] [blame] | 1350 | void |
| 1351 | prof_prefork(void) |
| 1352 | { |
| 1353 | |
| 1354 | if (opt_prof) { |
| 1355 | unsigned i; |
| 1356 | |
Jason Evans | f1c3da8 | 2013-10-21 14:59:10 -0700 | [diff] [blame] | 1357 | malloc_mutex_prefork(&bt2ctx_mtx); |
| 1358 | malloc_mutex_prefork(&prof_dump_seq_mtx); |
Jason Evans | 20f1fc9 | 2012-10-09 14:46:22 -0700 | [diff] [blame] | 1359 | for (i = 0; i < PROF_NCTX_LOCKS; i++) |
Jason Evans | f1c3da8 | 2013-10-21 14:59:10 -0700 | [diff] [blame] | 1360 | malloc_mutex_prefork(&ctx_locks[i]); |
Jason Evans | 20f1fc9 | 2012-10-09 14:46:22 -0700 | [diff] [blame] | 1361 | } |
| 1362 | } |
| 1363 | |
| 1364 | void |
| 1365 | prof_postfork_parent(void) |
| 1366 | { |
| 1367 | |
| 1368 | if (opt_prof) { |
| 1369 | unsigned i; |
| 1370 | |
| 1371 | for (i = 0; i < PROF_NCTX_LOCKS; i++) |
| 1372 | malloc_mutex_postfork_parent(&ctx_locks[i]); |
| 1373 | malloc_mutex_postfork_parent(&prof_dump_seq_mtx); |
| 1374 | malloc_mutex_postfork_parent(&bt2ctx_mtx); |
| 1375 | } |
| 1376 | } |
| 1377 | |
| 1378 | void |
| 1379 | prof_postfork_child(void) |
| 1380 | { |
| 1381 | |
| 1382 | if (opt_prof) { |
| 1383 | unsigned i; |
| 1384 | |
| 1385 | for (i = 0; i < PROF_NCTX_LOCKS; i++) |
| 1386 | malloc_mutex_postfork_child(&ctx_locks[i]); |
| 1387 | malloc_mutex_postfork_child(&prof_dump_seq_mtx); |
| 1388 | malloc_mutex_postfork_child(&bt2ctx_mtx); |
| 1389 | } |
| 1390 | } |
| 1391 | |
Jason Evans | 6109fe0 | 2010-02-10 10:37:56 -0800 | [diff] [blame] | 1392 | /******************************************************************************/ |