Frederic Weisbecker | 8a0ecfb | 2010-05-13 19:47:16 +0200 | [diff] [blame] | 1 | #include "util.h" |
Frederic Weisbecker | 598357e | 2010-05-21 12:48:39 +0200 | [diff] [blame] | 2 | #include "build-id.h" |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 3 | #include "hist.h" |
Arnaldo Carvalho de Melo | 4e4f06e | 2009-12-14 13:10:39 -0200 | [diff] [blame] | 4 | #include "session.h" |
| 5 | #include "sort.h" |
Arnaldo Carvalho de Melo | 2a1731f | 2014-10-10 15:49:21 -0300 | [diff] [blame] | 6 | #include "evlist.h" |
Namhyung Kim | 29d720e | 2013-01-22 18:09:33 +0900 | [diff] [blame] | 7 | #include "evsel.h" |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 8 | #include "annotate.h" |
Namhyung Kim | 740b97f | 2014-12-22 13:44:10 +0900 | [diff] [blame] | 9 | #include "ui/progress.h" |
Arnaldo Carvalho de Melo | 9b33827 | 2009-12-16 14:31:49 -0200 | [diff] [blame] | 10 | #include <math.h> |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 11 | |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 12 | static bool hists__filter_entry_by_dso(struct hists *hists, |
| 13 | struct hist_entry *he); |
| 14 | static bool hists__filter_entry_by_thread(struct hists *hists, |
| 15 | struct hist_entry *he); |
Namhyung Kim | e94d53e | 2012-03-16 17:50:51 +0900 | [diff] [blame] | 16 | static bool hists__filter_entry_by_symbol(struct hists *hists, |
| 17 | struct hist_entry *he); |
Kan Liang | 21394d9 | 2015-09-04 10:45:44 -0400 | [diff] [blame] | 18 | static bool hists__filter_entry_by_socket(struct hists *hists, |
| 19 | struct hist_entry *he); |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 20 | |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 21 | u16 hists__col_len(struct hists *hists, enum hist_column col) |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 22 | { |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 23 | return hists->col_len[col]; |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 24 | } |
| 25 | |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 26 | void hists__set_col_len(struct hists *hists, enum hist_column col, u16 len) |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 27 | { |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 28 | hists->col_len[col] = len; |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 29 | } |
| 30 | |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 31 | bool hists__new_col_len(struct hists *hists, enum hist_column col, u16 len) |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 32 | { |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 33 | if (len > hists__col_len(hists, col)) { |
| 34 | hists__set_col_len(hists, col, len); |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 35 | return true; |
| 36 | } |
| 37 | return false; |
| 38 | } |
| 39 | |
Namhyung Kim | 7ccf4f9 | 2012-08-20 13:52:05 +0900 | [diff] [blame] | 40 | void hists__reset_col_len(struct hists *hists) |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 41 | { |
| 42 | enum hist_column col; |
| 43 | |
| 44 | for (col = 0; col < HISTC_NR_COLS; ++col) |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 45 | hists__set_col_len(hists, col, 0); |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 46 | } |
| 47 | |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 48 | static void hists__set_unres_dso_col_len(struct hists *hists, int dso) |
| 49 | { |
| 50 | const unsigned int unresolved_col_width = BITS_PER_LONG / 4; |
| 51 | |
| 52 | if (hists__col_len(hists, dso) < unresolved_col_width && |
| 53 | !symbol_conf.col_width_list_str && !symbol_conf.field_sep && |
| 54 | !symbol_conf.dso_list) |
| 55 | hists__set_col_len(hists, dso, unresolved_col_width); |
| 56 | } |
| 57 | |
Namhyung Kim | 7ccf4f9 | 2012-08-20 13:52:05 +0900 | [diff] [blame] | 58 | void hists__calc_col_len(struct hists *hists, struct hist_entry *h) |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 59 | { |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 60 | const unsigned int unresolved_col_width = BITS_PER_LONG / 4; |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 61 | int symlen; |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 62 | u16 len; |
| 63 | |
Namhyung Kim | ded19d5 | 2013-04-01 20:35:19 +0900 | [diff] [blame] | 64 | /* |
| 65 | * +4 accounts for '[x] ' priv level info |
| 66 | * +2 accounts for 0x prefix on raw addresses |
| 67 | * +3 accounts for ' y ' symtab origin info |
| 68 | */ |
| 69 | if (h->ms.sym) { |
| 70 | symlen = h->ms.sym->namelen + 4; |
| 71 | if (verbose) |
| 72 | symlen += BITS_PER_LONG / 4 + 2 + 3; |
| 73 | hists__new_col_len(hists, HISTC_SYMBOL, symlen); |
| 74 | } else { |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 75 | symlen = unresolved_col_width + 4 + 2; |
| 76 | hists__new_col_len(hists, HISTC_SYMBOL, symlen); |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 77 | hists__set_unres_dso_col_len(hists, HISTC_DSO); |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 78 | } |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 79 | |
| 80 | len = thread__comm_len(h->thread); |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 81 | if (hists__new_col_len(hists, HISTC_COMM, len)) |
| 82 | hists__set_col_len(hists, HISTC_THREAD, len + 6); |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 83 | |
| 84 | if (h->ms.map) { |
| 85 | len = dso__name_len(h->ms.map->dso); |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 86 | hists__new_col_len(hists, HISTC_DSO, len); |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 87 | } |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 88 | |
Namhyung Kim | cb99374 | 2012-12-27 18:11:42 +0900 | [diff] [blame] | 89 | if (h->parent) |
| 90 | hists__new_col_len(hists, HISTC_PARENT, h->parent->namelen); |
| 91 | |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 92 | if (h->branch_info) { |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 93 | if (h->branch_info->from.sym) { |
| 94 | symlen = (int)h->branch_info->from.sym->namelen + 4; |
Namhyung Kim | ded19d5 | 2013-04-01 20:35:19 +0900 | [diff] [blame] | 95 | if (verbose) |
| 96 | symlen += BITS_PER_LONG / 4 + 2 + 3; |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 97 | hists__new_col_len(hists, HISTC_SYMBOL_FROM, symlen); |
| 98 | |
| 99 | symlen = dso__name_len(h->branch_info->from.map->dso); |
| 100 | hists__new_col_len(hists, HISTC_DSO_FROM, symlen); |
| 101 | } else { |
| 102 | symlen = unresolved_col_width + 4 + 2; |
| 103 | hists__new_col_len(hists, HISTC_SYMBOL_FROM, symlen); |
| 104 | hists__set_unres_dso_col_len(hists, HISTC_DSO_FROM); |
| 105 | } |
| 106 | |
| 107 | if (h->branch_info->to.sym) { |
| 108 | symlen = (int)h->branch_info->to.sym->namelen + 4; |
Namhyung Kim | ded19d5 | 2013-04-01 20:35:19 +0900 | [diff] [blame] | 109 | if (verbose) |
| 110 | symlen += BITS_PER_LONG / 4 + 2 + 3; |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 111 | hists__new_col_len(hists, HISTC_SYMBOL_TO, symlen); |
| 112 | |
| 113 | symlen = dso__name_len(h->branch_info->to.map->dso); |
| 114 | hists__new_col_len(hists, HISTC_DSO_TO, symlen); |
| 115 | } else { |
| 116 | symlen = unresolved_col_width + 4 + 2; |
| 117 | hists__new_col_len(hists, HISTC_SYMBOL_TO, symlen); |
| 118 | hists__set_unres_dso_col_len(hists, HISTC_DSO_TO); |
| 119 | } |
| 120 | } |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 121 | |
| 122 | if (h->mem_info) { |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 123 | if (h->mem_info->daddr.sym) { |
| 124 | symlen = (int)h->mem_info->daddr.sym->namelen + 4 |
| 125 | + unresolved_col_width + 2; |
| 126 | hists__new_col_len(hists, HISTC_MEM_DADDR_SYMBOL, |
| 127 | symlen); |
Don Zickus | 9b32ba7 | 2014-06-01 15:38:29 +0200 | [diff] [blame] | 128 | hists__new_col_len(hists, HISTC_MEM_DCACHELINE, |
| 129 | symlen + 1); |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 130 | } else { |
| 131 | symlen = unresolved_col_width + 4 + 2; |
| 132 | hists__new_col_len(hists, HISTC_MEM_DADDR_SYMBOL, |
| 133 | symlen); |
Jiri Olsa | 0805909 | 2016-01-20 12:56:33 +0100 | [diff] [blame] | 134 | hists__new_col_len(hists, HISTC_MEM_DCACHELINE, |
| 135 | symlen); |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 136 | } |
Jiri Olsa | b34b3bf | 2015-10-05 20:06:08 +0200 | [diff] [blame] | 137 | |
| 138 | if (h->mem_info->iaddr.sym) { |
| 139 | symlen = (int)h->mem_info->iaddr.sym->namelen + 4 |
| 140 | + unresolved_col_width + 2; |
| 141 | hists__new_col_len(hists, HISTC_MEM_IADDR_SYMBOL, |
| 142 | symlen); |
| 143 | } else { |
| 144 | symlen = unresolved_col_width + 4 + 2; |
| 145 | hists__new_col_len(hists, HISTC_MEM_IADDR_SYMBOL, |
| 146 | symlen); |
| 147 | } |
| 148 | |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 149 | if (h->mem_info->daddr.map) { |
| 150 | symlen = dso__name_len(h->mem_info->daddr.map->dso); |
| 151 | hists__new_col_len(hists, HISTC_MEM_DADDR_DSO, |
| 152 | symlen); |
| 153 | } else { |
| 154 | symlen = unresolved_col_width + 4 + 2; |
| 155 | hists__set_unres_dso_col_len(hists, HISTC_MEM_DADDR_DSO); |
| 156 | } |
| 157 | } else { |
| 158 | symlen = unresolved_col_width + 4 + 2; |
| 159 | hists__new_col_len(hists, HISTC_MEM_DADDR_SYMBOL, symlen); |
Jiri Olsa | b34b3bf | 2015-10-05 20:06:08 +0200 | [diff] [blame] | 160 | hists__new_col_len(hists, HISTC_MEM_IADDR_SYMBOL, symlen); |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 161 | hists__set_unres_dso_col_len(hists, HISTC_MEM_DADDR_DSO); |
| 162 | } |
| 163 | |
Arnaldo Carvalho de Melo | a4978ec | 2015-09-09 12:14:00 -0300 | [diff] [blame] | 164 | hists__new_col_len(hists, HISTC_CPU, 3); |
Kan Liang | 2e7ea3a | 2015-09-04 10:45:43 -0400 | [diff] [blame] | 165 | hists__new_col_len(hists, HISTC_SOCKET, 6); |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 166 | hists__new_col_len(hists, HISTC_MEM_LOCKED, 6); |
| 167 | hists__new_col_len(hists, HISTC_MEM_TLB, 22); |
| 168 | hists__new_col_len(hists, HISTC_MEM_SNOOP, 12); |
| 169 | hists__new_col_len(hists, HISTC_MEM_LVL, 21 + 3); |
| 170 | hists__new_col_len(hists, HISTC_LOCAL_WEIGHT, 12); |
| 171 | hists__new_col_len(hists, HISTC_GLOBAL_WEIGHT, 12); |
Andi Kleen | 475eeab | 2013-09-20 07:40:43 -0700 | [diff] [blame] | 172 | |
Arnaldo Carvalho de Melo | e8e6d37 | 2015-08-10 16:53:54 -0300 | [diff] [blame] | 173 | if (h->srcline) |
| 174 | hists__new_col_len(hists, HISTC_SRCLINE, strlen(h->srcline)); |
| 175 | |
Andi Kleen | 31191a8 | 2015-08-07 15:54:24 -0700 | [diff] [blame] | 176 | if (h->srcfile) |
| 177 | hists__new_col_len(hists, HISTC_SRCFILE, strlen(h->srcfile)); |
| 178 | |
Andi Kleen | 475eeab | 2013-09-20 07:40:43 -0700 | [diff] [blame] | 179 | if (h->transaction) |
| 180 | hists__new_col_len(hists, HISTC_TRANSACTION, |
| 181 | hist_entry__transaction_len()); |
Namhyung Kim | 0c0af78 | 2016-02-21 23:22:38 +0900 | [diff] [blame] | 182 | |
| 183 | if (h->trace_output) |
| 184 | hists__new_col_len(hists, HISTC_TRACE, strlen(h->trace_output)); |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 185 | } |
| 186 | |
Namhyung Kim | 7ccf4f9 | 2012-08-20 13:52:05 +0900 | [diff] [blame] | 187 | void hists__output_recalc_col_len(struct hists *hists, int max_rows) |
| 188 | { |
| 189 | struct rb_node *next = rb_first(&hists->entries); |
| 190 | struct hist_entry *n; |
| 191 | int row = 0; |
| 192 | |
| 193 | hists__reset_col_len(hists); |
| 194 | |
| 195 | while (next && row++ < max_rows) { |
| 196 | n = rb_entry(next, struct hist_entry, rb_node); |
| 197 | if (!n->filtered) |
| 198 | hists__calc_col_len(hists, n); |
| 199 | next = rb_next(&n->rb_node); |
| 200 | } |
| 201 | } |
| 202 | |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 203 | static void he_stat__add_cpumode_period(struct he_stat *he_stat, |
| 204 | unsigned int cpumode, u64 period) |
Zhang, Yanmin | a1645ce | 2010-04-19 13:32:50 +0800 | [diff] [blame] | 205 | { |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 206 | switch (cpumode) { |
Zhang, Yanmin | a1645ce | 2010-04-19 13:32:50 +0800 | [diff] [blame] | 207 | case PERF_RECORD_MISC_KERNEL: |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 208 | he_stat->period_sys += period; |
Zhang, Yanmin | a1645ce | 2010-04-19 13:32:50 +0800 | [diff] [blame] | 209 | break; |
| 210 | case PERF_RECORD_MISC_USER: |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 211 | he_stat->period_us += period; |
Zhang, Yanmin | a1645ce | 2010-04-19 13:32:50 +0800 | [diff] [blame] | 212 | break; |
| 213 | case PERF_RECORD_MISC_GUEST_KERNEL: |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 214 | he_stat->period_guest_sys += period; |
Zhang, Yanmin | a1645ce | 2010-04-19 13:32:50 +0800 | [diff] [blame] | 215 | break; |
| 216 | case PERF_RECORD_MISC_GUEST_USER: |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 217 | he_stat->period_guest_us += period; |
Zhang, Yanmin | a1645ce | 2010-04-19 13:32:50 +0800 | [diff] [blame] | 218 | break; |
| 219 | default: |
| 220 | break; |
| 221 | } |
| 222 | } |
| 223 | |
Andi Kleen | 0548429 | 2013-01-24 16:10:29 +0100 | [diff] [blame] | 224 | static void he_stat__add_period(struct he_stat *he_stat, u64 period, |
| 225 | u64 weight) |
Namhyung Kim | 139c081 | 2012-10-04 21:49:43 +0900 | [diff] [blame] | 226 | { |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 227 | |
Namhyung Kim | 139c081 | 2012-10-04 21:49:43 +0900 | [diff] [blame] | 228 | he_stat->period += period; |
Andi Kleen | 0548429 | 2013-01-24 16:10:29 +0100 | [diff] [blame] | 229 | he_stat->weight += weight; |
Namhyung Kim | 139c081 | 2012-10-04 21:49:43 +0900 | [diff] [blame] | 230 | he_stat->nr_events += 1; |
| 231 | } |
| 232 | |
| 233 | static void he_stat__add_stat(struct he_stat *dest, struct he_stat *src) |
| 234 | { |
| 235 | dest->period += src->period; |
| 236 | dest->period_sys += src->period_sys; |
| 237 | dest->period_us += src->period_us; |
| 238 | dest->period_guest_sys += src->period_guest_sys; |
| 239 | dest->period_guest_us += src->period_guest_us; |
| 240 | dest->nr_events += src->nr_events; |
Andi Kleen | 0548429 | 2013-01-24 16:10:29 +0100 | [diff] [blame] | 241 | dest->weight += src->weight; |
Namhyung Kim | 139c081 | 2012-10-04 21:49:43 +0900 | [diff] [blame] | 242 | } |
| 243 | |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 244 | static void he_stat__decay(struct he_stat *he_stat) |
Arnaldo Carvalho de Melo | ab81f3f | 2011-10-05 19:16:15 -0300 | [diff] [blame] | 245 | { |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 246 | he_stat->period = (he_stat->period * 7) / 8; |
| 247 | he_stat->nr_events = (he_stat->nr_events * 7) / 8; |
Andi Kleen | 0548429 | 2013-01-24 16:10:29 +0100 | [diff] [blame] | 248 | /* XXX need decay for weight too? */ |
Arnaldo Carvalho de Melo | ab81f3f | 2011-10-05 19:16:15 -0300 | [diff] [blame] | 249 | } |
| 250 | |
Namhyung Kim | 5d8200a | 2016-02-25 00:13:49 +0900 | [diff] [blame] | 251 | static void hists__delete_entry(struct hists *hists, struct hist_entry *he); |
| 252 | |
Arnaldo Carvalho de Melo | ab81f3f | 2011-10-05 19:16:15 -0300 | [diff] [blame] | 253 | static bool hists__decay_entry(struct hists *hists, struct hist_entry *he) |
| 254 | { |
Namhyung Kim | b24c28f | 2012-10-04 21:49:41 +0900 | [diff] [blame] | 255 | u64 prev_period = he->stat.period; |
Namhyung Kim | 3186b68 | 2014-04-22 13:44:23 +0900 | [diff] [blame] | 256 | u64 diff; |
Arnaldo Carvalho de Melo | c64550c | 2011-10-20 06:45:44 -0200 | [diff] [blame] | 257 | |
| 258 | if (prev_period == 0) |
Arnaldo Carvalho de Melo | df71d95 | 2011-10-13 08:01:33 -0300 | [diff] [blame] | 259 | return true; |
Arnaldo Carvalho de Melo | c64550c | 2011-10-20 06:45:44 -0200 | [diff] [blame] | 260 | |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 261 | he_stat__decay(&he->stat); |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 262 | if (symbol_conf.cumulate_callchain) |
| 263 | he_stat__decay(he->stat_acc); |
Namhyung Kim | 42b276a | 2016-01-05 12:06:00 +0900 | [diff] [blame] | 264 | decay_callchain(he->callchain); |
Arnaldo Carvalho de Melo | c64550c | 2011-10-20 06:45:44 -0200 | [diff] [blame] | 265 | |
Namhyung Kim | 3186b68 | 2014-04-22 13:44:23 +0900 | [diff] [blame] | 266 | diff = prev_period - he->stat.period; |
| 267 | |
Namhyung Kim | 5d8200a | 2016-02-25 00:13:49 +0900 | [diff] [blame] | 268 | if (!he->depth) { |
| 269 | hists->stats.total_period -= diff; |
| 270 | if (!he->filtered) |
| 271 | hists->stats.total_non_filtered_period -= diff; |
| 272 | } |
| 273 | |
| 274 | if (!he->leaf) { |
| 275 | struct hist_entry *child; |
| 276 | struct rb_node *node = rb_first(&he->hroot_out); |
| 277 | while (node) { |
| 278 | child = rb_entry(node, struct hist_entry, rb_node); |
| 279 | node = rb_next(node); |
| 280 | |
| 281 | if (hists__decay_entry(hists, child)) |
| 282 | hists__delete_entry(hists, child); |
| 283 | } |
| 284 | } |
Arnaldo Carvalho de Melo | c64550c | 2011-10-20 06:45:44 -0200 | [diff] [blame] | 285 | |
Namhyung Kim | b24c28f | 2012-10-04 21:49:41 +0900 | [diff] [blame] | 286 | return he->stat.period == 0; |
Arnaldo Carvalho de Melo | ab81f3f | 2011-10-05 19:16:15 -0300 | [diff] [blame] | 287 | } |
| 288 | |
Arnaldo Carvalho de Melo | 956b65e | 2014-12-19 12:41:28 -0300 | [diff] [blame] | 289 | static void hists__delete_entry(struct hists *hists, struct hist_entry *he) |
| 290 | { |
Namhyung Kim | 5d8200a | 2016-02-25 00:13:49 +0900 | [diff] [blame] | 291 | struct rb_root *root_in; |
| 292 | struct rb_root *root_out; |
Arnaldo Carvalho de Melo | 956b65e | 2014-12-19 12:41:28 -0300 | [diff] [blame] | 293 | |
Namhyung Kim | 5d8200a | 2016-02-25 00:13:49 +0900 | [diff] [blame] | 294 | if (he->parent_he) { |
| 295 | root_in = &he->parent_he->hroot_in; |
| 296 | root_out = &he->parent_he->hroot_out; |
| 297 | } else { |
| 298 | if (sort__need_collapse) |
| 299 | root_in = &hists->entries_collapsed; |
| 300 | else |
| 301 | root_in = hists->entries_in; |
| 302 | root_out = &hists->entries; |
| 303 | } |
| 304 | |
| 305 | rb_erase(&he->rb_node_in, root_in); |
| 306 | rb_erase(&he->rb_node, root_out); |
Arnaldo Carvalho de Melo | 956b65e | 2014-12-19 12:41:28 -0300 | [diff] [blame] | 307 | |
| 308 | --hists->nr_entries; |
| 309 | if (!he->filtered) |
| 310 | --hists->nr_non_filtered_entries; |
| 311 | |
| 312 | hist_entry__delete(he); |
| 313 | } |
| 314 | |
Namhyung Kim | 3a5714f | 2013-05-14 11:09:01 +0900 | [diff] [blame] | 315 | void hists__decay_entries(struct hists *hists, bool zap_user, bool zap_kernel) |
Arnaldo Carvalho de Melo | ab81f3f | 2011-10-05 19:16:15 -0300 | [diff] [blame] | 316 | { |
| 317 | struct rb_node *next = rb_first(&hists->entries); |
| 318 | struct hist_entry *n; |
| 319 | |
| 320 | while (next) { |
| 321 | n = rb_entry(next, struct hist_entry, rb_node); |
| 322 | next = rb_next(&n->rb_node); |
Arnaldo Carvalho de Melo | b079d4e | 2011-10-17 09:05:04 -0200 | [diff] [blame] | 323 | if (((zap_user && n->level == '.') || |
| 324 | (zap_kernel && n->level != '.') || |
Arnaldo Carvalho de Melo | 4c47f4f | 2015-03-17 17:18:58 -0300 | [diff] [blame] | 325 | hists__decay_entry(hists, n))) { |
Arnaldo Carvalho de Melo | 956b65e | 2014-12-19 12:41:28 -0300 | [diff] [blame] | 326 | hists__delete_entry(hists, n); |
Arnaldo Carvalho de Melo | ab81f3f | 2011-10-05 19:16:15 -0300 | [diff] [blame] | 327 | } |
| 328 | } |
| 329 | } |
| 330 | |
Namhyung Kim | 701937b | 2014-08-12 17:16:05 +0900 | [diff] [blame] | 331 | void hists__delete_entries(struct hists *hists) |
| 332 | { |
| 333 | struct rb_node *next = rb_first(&hists->entries); |
| 334 | struct hist_entry *n; |
| 335 | |
| 336 | while (next) { |
| 337 | n = rb_entry(next, struct hist_entry, rb_node); |
| 338 | next = rb_next(&n->rb_node); |
| 339 | |
Arnaldo Carvalho de Melo | 956b65e | 2014-12-19 12:41:28 -0300 | [diff] [blame] | 340 | hists__delete_entry(hists, n); |
Namhyung Kim | 701937b | 2014-08-12 17:16:05 +0900 | [diff] [blame] | 341 | } |
| 342 | } |
| 343 | |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 344 | /* |
Arnaldo Carvalho de Melo | c82ee82 | 2010-05-14 14:19:35 -0300 | [diff] [blame] | 345 | * histogram, sorted on item, collects periods |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 346 | */ |
| 347 | |
Namhyung Kim | a0b51af | 2012-09-11 13:34:27 +0900 | [diff] [blame] | 348 | static struct hist_entry *hist_entry__new(struct hist_entry *template, |
| 349 | bool sample_self) |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 350 | { |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 351 | size_t callchain_size = 0; |
| 352 | struct hist_entry *he; |
| 353 | |
Namhyung Kim | 82aa019 | 2014-12-22 13:44:14 +0900 | [diff] [blame] | 354 | if (symbol_conf.use_callchain) |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 355 | callchain_size = sizeof(struct callchain_root); |
| 356 | |
| 357 | he = zalloc(sizeof(*he) + callchain_size); |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 358 | |
Arnaldo Carvalho de Melo | 12c1427 | 2012-01-04 12:27:03 -0200 | [diff] [blame] | 359 | if (he != NULL) { |
| 360 | *he = *template; |
Namhyung Kim | c4b3535 | 2012-10-04 21:49:42 +0900 | [diff] [blame] | 361 | |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 362 | if (symbol_conf.cumulate_callchain) { |
| 363 | he->stat_acc = malloc(sizeof(he->stat)); |
| 364 | if (he->stat_acc == NULL) { |
| 365 | free(he); |
| 366 | return NULL; |
| 367 | } |
| 368 | memcpy(he->stat_acc, &he->stat, sizeof(he->stat)); |
Namhyung Kim | a0b51af | 2012-09-11 13:34:27 +0900 | [diff] [blame] | 369 | if (!sample_self) |
| 370 | memset(&he->stat, 0, sizeof(he->stat)); |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 371 | } |
| 372 | |
Arnaldo Carvalho de Melo | 5c24b67 | 2015-06-15 23:29:51 -0300 | [diff] [blame] | 373 | map__get(he->ms.map); |
Stephane Eranian | 3cf0cb1 | 2013-01-14 15:02:45 +0100 | [diff] [blame] | 374 | |
| 375 | if (he->branch_info) { |
Namhyung Kim | 26353a6 | 2013-04-01 20:35:17 +0900 | [diff] [blame] | 376 | /* |
| 377 | * This branch info is (a part of) allocated from |
Arnaldo Carvalho de Melo | 644f2df | 2014-01-22 13:15:36 -0300 | [diff] [blame] | 378 | * sample__resolve_bstack() and will be freed after |
Namhyung Kim | 26353a6 | 2013-04-01 20:35:17 +0900 | [diff] [blame] | 379 | * adding new entries. So we need to save a copy. |
| 380 | */ |
| 381 | he->branch_info = malloc(sizeof(*he->branch_info)); |
| 382 | if (he->branch_info == NULL) { |
Arnaldo Carvalho de Melo | 5c24b67 | 2015-06-15 23:29:51 -0300 | [diff] [blame] | 383 | map__zput(he->ms.map); |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 384 | free(he->stat_acc); |
Namhyung Kim | 26353a6 | 2013-04-01 20:35:17 +0900 | [diff] [blame] | 385 | free(he); |
| 386 | return NULL; |
| 387 | } |
| 388 | |
| 389 | memcpy(he->branch_info, template->branch_info, |
| 390 | sizeof(*he->branch_info)); |
| 391 | |
Arnaldo Carvalho de Melo | 5c24b67 | 2015-06-15 23:29:51 -0300 | [diff] [blame] | 392 | map__get(he->branch_info->from.map); |
| 393 | map__get(he->branch_info->to.map); |
Stephane Eranian | 3cf0cb1 | 2013-01-14 15:02:45 +0100 | [diff] [blame] | 394 | } |
| 395 | |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 396 | if (he->mem_info) { |
Arnaldo Carvalho de Melo | 5c24b67 | 2015-06-15 23:29:51 -0300 | [diff] [blame] | 397 | map__get(he->mem_info->iaddr.map); |
| 398 | map__get(he->mem_info->daddr.map); |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 399 | } |
| 400 | |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 401 | if (symbol_conf.use_callchain) |
Arnaldo Carvalho de Melo | 12c1427 | 2012-01-04 12:27:03 -0200 | [diff] [blame] | 402 | callchain_init(he->callchain); |
Arnaldo Carvalho de Melo | b821c73 | 2012-10-25 14:42:45 -0200 | [diff] [blame] | 403 | |
Namhyung Kim | 7239283 | 2015-12-24 11:16:17 +0900 | [diff] [blame] | 404 | if (he->raw_data) { |
| 405 | he->raw_data = memdup(he->raw_data, he->raw_size); |
| 406 | |
| 407 | if (he->raw_data == NULL) { |
| 408 | map__put(he->ms.map); |
| 409 | if (he->branch_info) { |
| 410 | map__put(he->branch_info->from.map); |
| 411 | map__put(he->branch_info->to.map); |
| 412 | free(he->branch_info); |
| 413 | } |
| 414 | if (he->mem_info) { |
| 415 | map__put(he->mem_info->iaddr.map); |
| 416 | map__put(he->mem_info->daddr.map); |
| 417 | } |
| 418 | free(he->stat_acc); |
| 419 | free(he); |
| 420 | return NULL; |
| 421 | } |
| 422 | } |
Arnaldo Carvalho de Melo | b821c73 | 2012-10-25 14:42:45 -0200 | [diff] [blame] | 423 | INIT_LIST_HEAD(&he->pairs.node); |
Arnaldo Carvalho de Melo | f3b623b | 2015-03-02 22:21:35 -0300 | [diff] [blame] | 424 | thread__get(he->thread); |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 425 | |
| 426 | if (!symbol_conf.report_hierarchy) |
| 427 | he->leaf = true; |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 428 | } |
| 429 | |
Arnaldo Carvalho de Melo | 12c1427 | 2012-01-04 12:27:03 -0200 | [diff] [blame] | 430 | return he; |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 431 | } |
| 432 | |
Arnaldo Carvalho de Melo | 7a007ca | 2010-07-21 09:19:41 -0300 | [diff] [blame] | 433 | static u8 symbol__parent_filter(const struct symbol *parent) |
| 434 | { |
| 435 | if (symbol_conf.exclude_other && parent == NULL) |
| 436 | return 1 << HIST_FILTER__PARENT; |
| 437 | return 0; |
| 438 | } |
| 439 | |
Namhyung Kim | 467ef10 | 2016-02-16 23:08:19 +0900 | [diff] [blame] | 440 | static void hist_entry__add_callchain_period(struct hist_entry *he, u64 period) |
| 441 | { |
| 442 | if (!symbol_conf.use_callchain) |
| 443 | return; |
| 444 | |
| 445 | he->hists->callchain_period += period; |
| 446 | if (!he->filtered) |
| 447 | he->hists->callchain_non_filtered_period += period; |
| 448 | } |
| 449 | |
Arnaldo Carvalho de Melo | e7e0efc | 2015-05-19 11:31:22 -0300 | [diff] [blame] | 450 | static struct hist_entry *hists__findnew_entry(struct hists *hists, |
| 451 | struct hist_entry *entry, |
| 452 | struct addr_location *al, |
| 453 | bool sample_self) |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 454 | { |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 455 | struct rb_node **p; |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 456 | struct rb_node *parent = NULL; |
| 457 | struct hist_entry *he; |
Andi Kleen | 354cc40 | 2013-10-01 07:22:15 -0700 | [diff] [blame] | 458 | int64_t cmp; |
Namhyung Kim | f1cbf78 | 2013-12-18 14:21:11 +0900 | [diff] [blame] | 459 | u64 period = entry->stat.period; |
| 460 | u64 weight = entry->stat.weight; |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 461 | |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 462 | p = &hists->entries_in->rb_node; |
| 463 | |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 464 | while (*p != NULL) { |
| 465 | parent = *p; |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 466 | he = rb_entry(parent, struct hist_entry, rb_node_in); |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 467 | |
Namhyung Kim | 9afcf93 | 2012-12-10 17:29:54 +0900 | [diff] [blame] | 468 | /* |
| 469 | * Make sure that it receives arguments in a same order as |
| 470 | * hist_entry__collapse() so that we can use an appropriate |
| 471 | * function when searching an entry regardless which sort |
| 472 | * keys were used. |
| 473 | */ |
| 474 | cmp = hist_entry__cmp(he, entry); |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 475 | |
| 476 | if (!cmp) { |
Namhyung Kim | 0f58474 | 2016-01-28 00:40:49 +0900 | [diff] [blame] | 477 | if (sample_self) { |
Namhyung Kim | a0b51af | 2012-09-11 13:34:27 +0900 | [diff] [blame] | 478 | he_stat__add_period(&he->stat, period, weight); |
Namhyung Kim | 467ef10 | 2016-02-16 23:08:19 +0900 | [diff] [blame] | 479 | hist_entry__add_callchain_period(he, period); |
Namhyung Kim | 0f58474 | 2016-01-28 00:40:49 +0900 | [diff] [blame] | 480 | } |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 481 | if (symbol_conf.cumulate_callchain) |
| 482 | he_stat__add_period(he->stat_acc, period, weight); |
David Miller | 63fa471 | 2012-03-27 03:14:18 -0400 | [diff] [blame] | 483 | |
Namhyung Kim | ceb2acb | 2013-04-01 20:35:18 +0900 | [diff] [blame] | 484 | /* |
Arnaldo Carvalho de Melo | e80faac | 2014-01-22 13:05:06 -0300 | [diff] [blame] | 485 | * This mem info was allocated from sample__resolve_mem |
Namhyung Kim | ceb2acb | 2013-04-01 20:35:18 +0900 | [diff] [blame] | 486 | * and will not be used anymore. |
| 487 | */ |
Arnaldo Carvalho de Melo | 74cf249 | 2013-12-27 16:55:14 -0300 | [diff] [blame] | 488 | zfree(&entry->mem_info); |
Namhyung Kim | ceb2acb | 2013-04-01 20:35:18 +0900 | [diff] [blame] | 489 | |
David Miller | 63fa471 | 2012-03-27 03:14:18 -0400 | [diff] [blame] | 490 | /* If the map of an existing hist_entry has |
| 491 | * become out-of-date due to an exec() or |
| 492 | * similar, update it. Otherwise we will |
| 493 | * mis-adjust symbol addresses when computing |
| 494 | * the history counter to increment. |
| 495 | */ |
| 496 | if (he->ms.map != entry->ms.map) { |
Arnaldo Carvalho de Melo | 5c24b67 | 2015-06-15 23:29:51 -0300 | [diff] [blame] | 497 | map__put(he->ms.map); |
| 498 | he->ms.map = map__get(entry->ms.map); |
David Miller | 63fa471 | 2012-03-27 03:14:18 -0400 | [diff] [blame] | 499 | } |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 500 | goto out; |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 501 | } |
| 502 | |
| 503 | if (cmp < 0) |
| 504 | p = &(*p)->rb_left; |
| 505 | else |
| 506 | p = &(*p)->rb_right; |
| 507 | } |
| 508 | |
Namhyung Kim | a0b51af | 2012-09-11 13:34:27 +0900 | [diff] [blame] | 509 | he = hist_entry__new(entry, sample_self); |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 510 | if (!he) |
Namhyung Kim | 27a0dcb | 2013-05-14 11:09:02 +0900 | [diff] [blame] | 511 | return NULL; |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 512 | |
Namhyung Kim | 0f58474 | 2016-01-28 00:40:49 +0900 | [diff] [blame] | 513 | if (sample_self) |
Namhyung Kim | 467ef10 | 2016-02-16 23:08:19 +0900 | [diff] [blame] | 514 | hist_entry__add_callchain_period(he, period); |
| 515 | hists->nr_entries++; |
Namhyung Kim | 590cd34 | 2014-12-22 13:44:09 +0900 | [diff] [blame] | 516 | |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 517 | rb_link_node(&he->rb_node_in, parent, p); |
| 518 | rb_insert_color(&he->rb_node_in, hists->entries_in); |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 519 | out: |
Namhyung Kim | a0b51af | 2012-09-11 13:34:27 +0900 | [diff] [blame] | 520 | if (sample_self) |
| 521 | he_stat__add_cpumode_period(&he->stat, al->cpumode, period); |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 522 | if (symbol_conf.cumulate_callchain) |
| 523 | he_stat__add_cpumode_period(he->stat_acc, al->cpumode, period); |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 524 | return he; |
| 525 | } |
| 526 | |
Arnaldo Carvalho de Melo | c824c43 | 2013-10-22 19:01:31 -0300 | [diff] [blame] | 527 | struct hist_entry *__hists__add_entry(struct hists *hists, |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 528 | struct addr_location *al, |
Namhyung Kim | 41a4e6e | 2013-10-31 15:56:03 +0900 | [diff] [blame] | 529 | struct symbol *sym_parent, |
| 530 | struct branch_info *bi, |
| 531 | struct mem_info *mi, |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 532 | struct perf_sample *sample, |
Namhyung Kim | a0b51af | 2012-09-11 13:34:27 +0900 | [diff] [blame] | 533 | bool sample_self) |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 534 | { |
| 535 | struct hist_entry entry = { |
| 536 | .thread = al->thread, |
Namhyung Kim | 4dfced3 | 2013-09-13 16:28:57 +0900 | [diff] [blame] | 537 | .comm = thread__comm(al->thread), |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 538 | .ms = { |
| 539 | .map = al->map, |
| 540 | .sym = al->sym, |
| 541 | }, |
Kan Liang | 0c4c4de | 2015-09-04 10:45:42 -0400 | [diff] [blame] | 542 | .socket = al->socket, |
Don Zickus | 7365be5 | 2014-05-27 12:28:05 -0400 | [diff] [blame] | 543 | .cpu = al->cpu, |
| 544 | .cpumode = al->cpumode, |
| 545 | .ip = al->addr, |
| 546 | .level = al->level, |
Namhyung Kim | b24c28f | 2012-10-04 21:49:41 +0900 | [diff] [blame] | 547 | .stat = { |
Namhyung Kim | c4b3535 | 2012-10-04 21:49:42 +0900 | [diff] [blame] | 548 | .nr_events = 1, |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 549 | .period = sample->period, |
| 550 | .weight = sample->weight, |
Namhyung Kim | b24c28f | 2012-10-04 21:49:41 +0900 | [diff] [blame] | 551 | }, |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 552 | .parent = sym_parent, |
Namhyung Kim | 2c86c7c | 2014-03-17 18:18:54 -0300 | [diff] [blame] | 553 | .filtered = symbol__parent_filter(sym_parent) | al->filtered, |
Arnaldo Carvalho de Melo | c824c43 | 2013-10-22 19:01:31 -0300 | [diff] [blame] | 554 | .hists = hists, |
Namhyung Kim | 41a4e6e | 2013-10-31 15:56:03 +0900 | [diff] [blame] | 555 | .branch_info = bi, |
| 556 | .mem_info = mi, |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 557 | .transaction = sample->transaction, |
Namhyung Kim | 7239283 | 2015-12-24 11:16:17 +0900 | [diff] [blame] | 558 | .raw_data = sample->raw_data, |
| 559 | .raw_size = sample->raw_size, |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 560 | }; |
| 561 | |
Arnaldo Carvalho de Melo | e7e0efc | 2015-05-19 11:31:22 -0300 | [diff] [blame] | 562 | return hists__findnew_entry(hists, &entry, al, sample_self); |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 563 | } |
| 564 | |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 565 | static int |
| 566 | iter_next_nop_entry(struct hist_entry_iter *iter __maybe_unused, |
| 567 | struct addr_location *al __maybe_unused) |
| 568 | { |
| 569 | return 0; |
| 570 | } |
| 571 | |
| 572 | static int |
| 573 | iter_add_next_nop_entry(struct hist_entry_iter *iter __maybe_unused, |
| 574 | struct addr_location *al __maybe_unused) |
| 575 | { |
| 576 | return 0; |
| 577 | } |
| 578 | |
| 579 | static int |
| 580 | iter_prepare_mem_entry(struct hist_entry_iter *iter, struct addr_location *al) |
| 581 | { |
| 582 | struct perf_sample *sample = iter->sample; |
| 583 | struct mem_info *mi; |
| 584 | |
| 585 | mi = sample__resolve_mem(sample, al); |
| 586 | if (mi == NULL) |
| 587 | return -ENOMEM; |
| 588 | |
| 589 | iter->priv = mi; |
| 590 | return 0; |
| 591 | } |
| 592 | |
| 593 | static int |
| 594 | iter_add_single_mem_entry(struct hist_entry_iter *iter, struct addr_location *al) |
| 595 | { |
| 596 | u64 cost; |
| 597 | struct mem_info *mi = iter->priv; |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 598 | struct hists *hists = evsel__hists(iter->evsel); |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 599 | struct perf_sample *sample = iter->sample; |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 600 | struct hist_entry *he; |
| 601 | |
| 602 | if (mi == NULL) |
| 603 | return -EINVAL; |
| 604 | |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 605 | cost = sample->weight; |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 606 | if (!cost) |
| 607 | cost = 1; |
| 608 | |
| 609 | /* |
| 610 | * must pass period=weight in order to get the correct |
| 611 | * sorting from hists__collapse_resort() which is solely |
| 612 | * based on periods. We want sorting be done on nr_events * weight |
| 613 | * and this is indirectly achieved by passing period=weight here |
| 614 | * and the he_stat__add_period() function. |
| 615 | */ |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 616 | sample->period = cost; |
| 617 | |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 618 | he = __hists__add_entry(hists, al, iter->parent, NULL, mi, |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 619 | sample, true); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 620 | if (!he) |
| 621 | return -ENOMEM; |
| 622 | |
| 623 | iter->he = he; |
| 624 | return 0; |
| 625 | } |
| 626 | |
| 627 | static int |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 628 | iter_finish_mem_entry(struct hist_entry_iter *iter, |
| 629 | struct addr_location *al __maybe_unused) |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 630 | { |
| 631 | struct perf_evsel *evsel = iter->evsel; |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 632 | struct hists *hists = evsel__hists(evsel); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 633 | struct hist_entry *he = iter->he; |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 634 | int err = -EINVAL; |
| 635 | |
| 636 | if (he == NULL) |
| 637 | goto out; |
| 638 | |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 639 | hists__inc_nr_samples(hists, he->filtered); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 640 | |
| 641 | err = hist_entry__append_callchain(he, iter->sample); |
| 642 | |
| 643 | out: |
| 644 | /* |
Arnaldo Carvalho de Melo | e7e0efc | 2015-05-19 11:31:22 -0300 | [diff] [blame] | 645 | * We don't need to free iter->priv (mem_info) here since the mem info |
| 646 | * was either already freed in hists__findnew_entry() or passed to a |
| 647 | * new hist entry by hist_entry__new(). |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 648 | */ |
| 649 | iter->priv = NULL; |
| 650 | |
| 651 | iter->he = NULL; |
| 652 | return err; |
| 653 | } |
| 654 | |
| 655 | static int |
| 656 | iter_prepare_branch_entry(struct hist_entry_iter *iter, struct addr_location *al) |
| 657 | { |
| 658 | struct branch_info *bi; |
| 659 | struct perf_sample *sample = iter->sample; |
| 660 | |
| 661 | bi = sample__resolve_bstack(sample, al); |
| 662 | if (!bi) |
| 663 | return -ENOMEM; |
| 664 | |
| 665 | iter->curr = 0; |
| 666 | iter->total = sample->branch_stack->nr; |
| 667 | |
| 668 | iter->priv = bi; |
| 669 | return 0; |
| 670 | } |
| 671 | |
| 672 | static int |
| 673 | iter_add_single_branch_entry(struct hist_entry_iter *iter __maybe_unused, |
| 674 | struct addr_location *al __maybe_unused) |
| 675 | { |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 676 | /* to avoid calling callback function */ |
| 677 | iter->he = NULL; |
| 678 | |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 679 | return 0; |
| 680 | } |
| 681 | |
| 682 | static int |
| 683 | iter_next_branch_entry(struct hist_entry_iter *iter, struct addr_location *al) |
| 684 | { |
| 685 | struct branch_info *bi = iter->priv; |
| 686 | int i = iter->curr; |
| 687 | |
| 688 | if (bi == NULL) |
| 689 | return 0; |
| 690 | |
| 691 | if (iter->curr >= iter->total) |
| 692 | return 0; |
| 693 | |
| 694 | al->map = bi[i].to.map; |
| 695 | al->sym = bi[i].to.sym; |
| 696 | al->addr = bi[i].to.addr; |
| 697 | return 1; |
| 698 | } |
| 699 | |
| 700 | static int |
| 701 | iter_add_next_branch_entry(struct hist_entry_iter *iter, struct addr_location *al) |
| 702 | { |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 703 | struct branch_info *bi; |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 704 | struct perf_evsel *evsel = iter->evsel; |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 705 | struct hists *hists = evsel__hists(evsel); |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 706 | struct perf_sample *sample = iter->sample; |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 707 | struct hist_entry *he = NULL; |
| 708 | int i = iter->curr; |
| 709 | int err = 0; |
| 710 | |
| 711 | bi = iter->priv; |
| 712 | |
| 713 | if (iter->hide_unresolved && !(bi[i].from.sym && bi[i].to.sym)) |
| 714 | goto out; |
| 715 | |
| 716 | /* |
| 717 | * The report shows the percentage of total branches captured |
| 718 | * and not events sampled. Thus we use a pseudo period of 1. |
| 719 | */ |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 720 | sample->period = 1; |
| 721 | sample->weight = bi->flags.cycles ? bi->flags.cycles : 1; |
| 722 | |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 723 | he = __hists__add_entry(hists, al, iter->parent, &bi[i], NULL, |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 724 | sample, true); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 725 | if (he == NULL) |
| 726 | return -ENOMEM; |
| 727 | |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 728 | hists__inc_nr_samples(hists, he->filtered); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 729 | |
| 730 | out: |
| 731 | iter->he = he; |
| 732 | iter->curr++; |
| 733 | return err; |
| 734 | } |
| 735 | |
| 736 | static int |
| 737 | iter_finish_branch_entry(struct hist_entry_iter *iter, |
| 738 | struct addr_location *al __maybe_unused) |
| 739 | { |
| 740 | zfree(&iter->priv); |
| 741 | iter->he = NULL; |
| 742 | |
| 743 | return iter->curr >= iter->total ? 0 : -1; |
| 744 | } |
| 745 | |
| 746 | static int |
| 747 | iter_prepare_normal_entry(struct hist_entry_iter *iter __maybe_unused, |
| 748 | struct addr_location *al __maybe_unused) |
| 749 | { |
| 750 | return 0; |
| 751 | } |
| 752 | |
| 753 | static int |
| 754 | iter_add_single_normal_entry(struct hist_entry_iter *iter, struct addr_location *al) |
| 755 | { |
| 756 | struct perf_evsel *evsel = iter->evsel; |
| 757 | struct perf_sample *sample = iter->sample; |
| 758 | struct hist_entry *he; |
| 759 | |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 760 | he = __hists__add_entry(evsel__hists(evsel), al, iter->parent, NULL, NULL, |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 761 | sample, true); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 762 | if (he == NULL) |
| 763 | return -ENOMEM; |
| 764 | |
| 765 | iter->he = he; |
| 766 | return 0; |
| 767 | } |
| 768 | |
| 769 | static int |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 770 | iter_finish_normal_entry(struct hist_entry_iter *iter, |
| 771 | struct addr_location *al __maybe_unused) |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 772 | { |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 773 | struct hist_entry *he = iter->he; |
| 774 | struct perf_evsel *evsel = iter->evsel; |
| 775 | struct perf_sample *sample = iter->sample; |
| 776 | |
| 777 | if (he == NULL) |
| 778 | return 0; |
| 779 | |
| 780 | iter->he = NULL; |
| 781 | |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 782 | hists__inc_nr_samples(evsel__hists(evsel), he->filtered); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 783 | |
| 784 | return hist_entry__append_callchain(he, sample); |
| 785 | } |
| 786 | |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 787 | static int |
Adrian Hunter | 96b40f3 | 2015-09-25 16:15:47 +0300 | [diff] [blame] | 788 | iter_prepare_cumulative_entry(struct hist_entry_iter *iter, |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 789 | struct addr_location *al __maybe_unused) |
| 790 | { |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 791 | struct hist_entry **he_cache; |
| 792 | |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 793 | callchain_cursor_commit(&callchain_cursor); |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 794 | |
| 795 | /* |
| 796 | * This is for detecting cycles or recursions so that they're |
| 797 | * cumulated only one time to prevent entries more than 100% |
| 798 | * overhead. |
| 799 | */ |
Adrian Hunter | 96b40f3 | 2015-09-25 16:15:47 +0300 | [diff] [blame] | 800 | he_cache = malloc(sizeof(*he_cache) * (iter->max_stack + 1)); |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 801 | if (he_cache == NULL) |
| 802 | return -ENOMEM; |
| 803 | |
| 804 | iter->priv = he_cache; |
| 805 | iter->curr = 0; |
| 806 | |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 807 | return 0; |
| 808 | } |
| 809 | |
| 810 | static int |
| 811 | iter_add_single_cumulative_entry(struct hist_entry_iter *iter, |
| 812 | struct addr_location *al) |
| 813 | { |
| 814 | struct perf_evsel *evsel = iter->evsel; |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 815 | struct hists *hists = evsel__hists(evsel); |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 816 | struct perf_sample *sample = iter->sample; |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 817 | struct hist_entry **he_cache = iter->priv; |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 818 | struct hist_entry *he; |
| 819 | int err = 0; |
| 820 | |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 821 | he = __hists__add_entry(hists, al, iter->parent, NULL, NULL, |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 822 | sample, true); |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 823 | if (he == NULL) |
| 824 | return -ENOMEM; |
| 825 | |
| 826 | iter->he = he; |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 827 | he_cache[iter->curr++] = he; |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 828 | |
Namhyung Kim | 82aa019 | 2014-12-22 13:44:14 +0900 | [diff] [blame] | 829 | hist_entry__append_callchain(he, sample); |
Namhyung Kim | be7f855 | 2013-12-26 17:44:10 +0900 | [diff] [blame] | 830 | |
| 831 | /* |
| 832 | * We need to re-initialize the cursor since callchain_append() |
| 833 | * advanced the cursor to the end. |
| 834 | */ |
| 835 | callchain_cursor_commit(&callchain_cursor); |
| 836 | |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 837 | hists__inc_nr_samples(hists, he->filtered); |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 838 | |
| 839 | return err; |
| 840 | } |
| 841 | |
| 842 | static int |
| 843 | iter_next_cumulative_entry(struct hist_entry_iter *iter, |
| 844 | struct addr_location *al) |
| 845 | { |
| 846 | struct callchain_cursor_node *node; |
| 847 | |
| 848 | node = callchain_cursor_current(&callchain_cursor); |
| 849 | if (node == NULL) |
| 850 | return 0; |
| 851 | |
Namhyung Kim | c7405d8 | 2013-10-31 13:58:30 +0900 | [diff] [blame] | 852 | return fill_callchain_info(al, node, iter->hide_unresolved); |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 853 | } |
| 854 | |
| 855 | static int |
| 856 | iter_add_next_cumulative_entry(struct hist_entry_iter *iter, |
| 857 | struct addr_location *al) |
| 858 | { |
| 859 | struct perf_evsel *evsel = iter->evsel; |
| 860 | struct perf_sample *sample = iter->sample; |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 861 | struct hist_entry **he_cache = iter->priv; |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 862 | struct hist_entry *he; |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 863 | struct hist_entry he_tmp = { |
Arnaldo Carvalho de Melo | 5cef897 | 2015-08-10 15:45:55 -0300 | [diff] [blame] | 864 | .hists = evsel__hists(evsel), |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 865 | .cpu = al->cpu, |
| 866 | .thread = al->thread, |
| 867 | .comm = thread__comm(al->thread), |
| 868 | .ip = al->addr, |
| 869 | .ms = { |
| 870 | .map = al->map, |
| 871 | .sym = al->sym, |
| 872 | }, |
| 873 | .parent = iter->parent, |
Namhyung Kim | 7239283 | 2015-12-24 11:16:17 +0900 | [diff] [blame] | 874 | .raw_data = sample->raw_data, |
| 875 | .raw_size = sample->raw_size, |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 876 | }; |
| 877 | int i; |
Namhyung Kim | be7f855 | 2013-12-26 17:44:10 +0900 | [diff] [blame] | 878 | struct callchain_cursor cursor; |
| 879 | |
| 880 | callchain_cursor_snapshot(&cursor, &callchain_cursor); |
| 881 | |
| 882 | callchain_cursor_advance(&callchain_cursor); |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 883 | |
| 884 | /* |
| 885 | * Check if there's duplicate entries in the callchain. |
| 886 | * It's possible that it has cycles or recursive calls. |
| 887 | */ |
| 888 | for (i = 0; i < iter->curr; i++) { |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 889 | if (hist_entry__cmp(he_cache[i], &he_tmp) == 0) { |
| 890 | /* to avoid calling callback function */ |
| 891 | iter->he = NULL; |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 892 | return 0; |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 893 | } |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 894 | } |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 895 | |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 896 | he = __hists__add_entry(evsel__hists(evsel), al, iter->parent, NULL, NULL, |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 897 | sample, false); |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 898 | if (he == NULL) |
| 899 | return -ENOMEM; |
| 900 | |
| 901 | iter->he = he; |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 902 | he_cache[iter->curr++] = he; |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 903 | |
Namhyung Kim | 82aa019 | 2014-12-22 13:44:14 +0900 | [diff] [blame] | 904 | if (symbol_conf.use_callchain) |
| 905 | callchain_append(he->callchain, &cursor, sample->period); |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 906 | return 0; |
| 907 | } |
| 908 | |
| 909 | static int |
| 910 | iter_finish_cumulative_entry(struct hist_entry_iter *iter, |
| 911 | struct addr_location *al __maybe_unused) |
| 912 | { |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 913 | zfree(&iter->priv); |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 914 | iter->he = NULL; |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 915 | |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 916 | return 0; |
| 917 | } |
| 918 | |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 919 | const struct hist_iter_ops hist_iter_mem = { |
| 920 | .prepare_entry = iter_prepare_mem_entry, |
| 921 | .add_single_entry = iter_add_single_mem_entry, |
| 922 | .next_entry = iter_next_nop_entry, |
| 923 | .add_next_entry = iter_add_next_nop_entry, |
| 924 | .finish_entry = iter_finish_mem_entry, |
| 925 | }; |
| 926 | |
| 927 | const struct hist_iter_ops hist_iter_branch = { |
| 928 | .prepare_entry = iter_prepare_branch_entry, |
| 929 | .add_single_entry = iter_add_single_branch_entry, |
| 930 | .next_entry = iter_next_branch_entry, |
| 931 | .add_next_entry = iter_add_next_branch_entry, |
| 932 | .finish_entry = iter_finish_branch_entry, |
| 933 | }; |
| 934 | |
| 935 | const struct hist_iter_ops hist_iter_normal = { |
| 936 | .prepare_entry = iter_prepare_normal_entry, |
| 937 | .add_single_entry = iter_add_single_normal_entry, |
| 938 | .next_entry = iter_next_nop_entry, |
| 939 | .add_next_entry = iter_add_next_nop_entry, |
| 940 | .finish_entry = iter_finish_normal_entry, |
| 941 | }; |
| 942 | |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 943 | const struct hist_iter_ops hist_iter_cumulative = { |
| 944 | .prepare_entry = iter_prepare_cumulative_entry, |
| 945 | .add_single_entry = iter_add_single_cumulative_entry, |
| 946 | .next_entry = iter_next_cumulative_entry, |
| 947 | .add_next_entry = iter_add_next_cumulative_entry, |
| 948 | .finish_entry = iter_finish_cumulative_entry, |
| 949 | }; |
| 950 | |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 951 | int hist_entry_iter__add(struct hist_entry_iter *iter, struct addr_location *al, |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 952 | int max_stack_depth, void *arg) |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 953 | { |
| 954 | int err, err2; |
| 955 | |
Namhyung Kim | 063bd93 | 2015-05-19 17:04:10 +0900 | [diff] [blame] | 956 | err = sample__resolve_callchain(iter->sample, &iter->parent, |
| 957 | iter->evsel, al, max_stack_depth); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 958 | if (err) |
| 959 | return err; |
| 960 | |
Adrian Hunter | 96b40f3 | 2015-09-25 16:15:47 +0300 | [diff] [blame] | 961 | iter->max_stack = max_stack_depth; |
| 962 | |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 963 | err = iter->ops->prepare_entry(iter, al); |
| 964 | if (err) |
| 965 | goto out; |
| 966 | |
| 967 | err = iter->ops->add_single_entry(iter, al); |
| 968 | if (err) |
| 969 | goto out; |
| 970 | |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 971 | if (iter->he && iter->add_entry_cb) { |
| 972 | err = iter->add_entry_cb(iter, al, true, arg); |
| 973 | if (err) |
| 974 | goto out; |
| 975 | } |
| 976 | |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 977 | while (iter->ops->next_entry(iter, al)) { |
| 978 | err = iter->ops->add_next_entry(iter, al); |
| 979 | if (err) |
| 980 | break; |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 981 | |
| 982 | if (iter->he && iter->add_entry_cb) { |
| 983 | err = iter->add_entry_cb(iter, al, false, arg); |
| 984 | if (err) |
| 985 | goto out; |
| 986 | } |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 987 | } |
| 988 | |
| 989 | out: |
| 990 | err2 = iter->ops->finish_entry(iter, al); |
| 991 | if (!err) |
| 992 | err = err2; |
| 993 | |
| 994 | return err; |
| 995 | } |
| 996 | |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 997 | int64_t |
| 998 | hist_entry__cmp(struct hist_entry *left, struct hist_entry *right) |
| 999 | { |
Jiri Olsa | aa6f50a | 2016-01-18 10:24:24 +0100 | [diff] [blame] | 1000 | struct hists *hists = left->hists; |
Namhyung Kim | 093f0ef3 | 2014-03-03 12:07:47 +0900 | [diff] [blame] | 1001 | struct perf_hpp_fmt *fmt; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1002 | int64_t cmp = 0; |
| 1003 | |
Jiri Olsa | aa6f50a | 2016-01-18 10:24:24 +0100 | [diff] [blame] | 1004 | hists__for_each_sort_list(hists, fmt) { |
Namhyung Kim | 87bbdf7 | 2015-01-08 09:45:46 +0900 | [diff] [blame] | 1005 | cmp = fmt->cmp(fmt, left, right); |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1006 | if (cmp) |
| 1007 | break; |
| 1008 | } |
| 1009 | |
| 1010 | return cmp; |
| 1011 | } |
| 1012 | |
| 1013 | int64_t |
| 1014 | hist_entry__collapse(struct hist_entry *left, struct hist_entry *right) |
| 1015 | { |
Jiri Olsa | aa6f50a | 2016-01-18 10:24:24 +0100 | [diff] [blame] | 1016 | struct hists *hists = left->hists; |
Namhyung Kim | 093f0ef3 | 2014-03-03 12:07:47 +0900 | [diff] [blame] | 1017 | struct perf_hpp_fmt *fmt; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1018 | int64_t cmp = 0; |
| 1019 | |
Jiri Olsa | aa6f50a | 2016-01-18 10:24:24 +0100 | [diff] [blame] | 1020 | hists__for_each_sort_list(hists, fmt) { |
Namhyung Kim | 87bbdf7 | 2015-01-08 09:45:46 +0900 | [diff] [blame] | 1021 | cmp = fmt->collapse(fmt, left, right); |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1022 | if (cmp) |
| 1023 | break; |
| 1024 | } |
| 1025 | |
| 1026 | return cmp; |
| 1027 | } |
| 1028 | |
Arnaldo Carvalho de Melo | 6733d1b | 2014-12-19 12:31:40 -0300 | [diff] [blame] | 1029 | void hist_entry__delete(struct hist_entry *he) |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1030 | { |
Arnaldo Carvalho de Melo | f3b623b | 2015-03-02 22:21:35 -0300 | [diff] [blame] | 1031 | thread__zput(he->thread); |
Arnaldo Carvalho de Melo | 5c24b67 | 2015-06-15 23:29:51 -0300 | [diff] [blame] | 1032 | map__zput(he->ms.map); |
| 1033 | |
| 1034 | if (he->branch_info) { |
| 1035 | map__zput(he->branch_info->from.map); |
| 1036 | map__zput(he->branch_info->to.map); |
| 1037 | zfree(&he->branch_info); |
| 1038 | } |
| 1039 | |
| 1040 | if (he->mem_info) { |
| 1041 | map__zput(he->mem_info->iaddr.map); |
| 1042 | map__zput(he->mem_info->daddr.map); |
| 1043 | zfree(&he->mem_info); |
| 1044 | } |
| 1045 | |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 1046 | zfree(&he->stat_acc); |
Namhyung Kim | f048d54 | 2013-09-11 14:09:28 +0900 | [diff] [blame] | 1047 | free_srcline(he->srcline); |
Andi Kleen | 31191a8 | 2015-08-07 15:54:24 -0700 | [diff] [blame] | 1048 | if (he->srcfile && he->srcfile[0]) |
| 1049 | free(he->srcfile); |
Namhyung Kim | d114960 | 2014-12-30 14:38:13 +0900 | [diff] [blame] | 1050 | free_callchain(he->callchain); |
Namhyung Kim | 60517d2 | 2015-12-23 02:07:03 +0900 | [diff] [blame] | 1051 | free(he->trace_output); |
Namhyung Kim | 7239283 | 2015-12-24 11:16:17 +0900 | [diff] [blame] | 1052 | free(he->raw_data); |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1053 | free(he); |
| 1054 | } |
| 1055 | |
| 1056 | /* |
Arnaldo Carvalho de Melo | 89fee70 | 2016-02-11 17:14:13 -0300 | [diff] [blame] | 1057 | * If this is not the last column, then we need to pad it according to the |
| 1058 | * pre-calculated max lenght for this column, otherwise don't bother adding |
| 1059 | * spaces because that would break viewing this with, for instance, 'less', |
| 1060 | * that would show tons of trailing spaces when a long C++ demangled method |
| 1061 | * names is sampled. |
| 1062 | */ |
| 1063 | int hist_entry__snprintf_alignment(struct hist_entry *he, struct perf_hpp *hpp, |
| 1064 | struct perf_hpp_fmt *fmt, int printed) |
| 1065 | { |
| 1066 | if (!list_is_last(&fmt->list, &he->hists->hpp_list->fields)) { |
| 1067 | const int width = fmt->width(fmt, hpp, hists_to_evsel(he->hists)); |
| 1068 | if (printed < width) { |
| 1069 | advance_hpp(hpp, printed); |
| 1070 | printed = scnprintf(hpp->buf, hpp->size, "%-*s", width - printed, " "); |
| 1071 | } |
| 1072 | } |
| 1073 | |
| 1074 | return printed; |
| 1075 | } |
| 1076 | |
| 1077 | /* |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1078 | * collapse the histogram |
| 1079 | */ |
| 1080 | |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1081 | static void hists__apply_filters(struct hists *hists, struct hist_entry *he); |
| 1082 | |
| 1083 | static struct hist_entry *hierarchy_insert_entry(struct hists *hists, |
| 1084 | struct rb_root *root, |
| 1085 | struct hist_entry *he, |
| 1086 | struct perf_hpp_fmt *fmt) |
| 1087 | { |
| 1088 | struct rb_node **p = &root->rb_node; |
| 1089 | struct rb_node *parent = NULL; |
| 1090 | struct hist_entry *iter, *new; |
| 1091 | int64_t cmp; |
| 1092 | |
| 1093 | while (*p != NULL) { |
| 1094 | parent = *p; |
| 1095 | iter = rb_entry(parent, struct hist_entry, rb_node_in); |
| 1096 | |
| 1097 | cmp = fmt->collapse(fmt, iter, he); |
| 1098 | if (!cmp) { |
| 1099 | he_stat__add_stat(&iter->stat, &he->stat); |
| 1100 | return iter; |
| 1101 | } |
| 1102 | |
| 1103 | if (cmp < 0) |
| 1104 | p = &parent->rb_left; |
| 1105 | else |
| 1106 | p = &parent->rb_right; |
| 1107 | } |
| 1108 | |
| 1109 | new = hist_entry__new(he, true); |
| 1110 | if (new == NULL) |
| 1111 | return NULL; |
| 1112 | |
| 1113 | hists__apply_filters(hists, new); |
| 1114 | hists->nr_entries++; |
| 1115 | |
| 1116 | /* save related format for output */ |
| 1117 | new->fmt = fmt; |
| 1118 | |
| 1119 | /* some fields are now passed to 'new' */ |
| 1120 | if (perf_hpp__is_trace_entry(fmt)) |
| 1121 | he->trace_output = NULL; |
| 1122 | else |
| 1123 | new->trace_output = NULL; |
| 1124 | |
| 1125 | if (perf_hpp__is_srcline_entry(fmt)) |
| 1126 | he->srcline = NULL; |
| 1127 | else |
| 1128 | new->srcline = NULL; |
| 1129 | |
| 1130 | if (perf_hpp__is_srcfile_entry(fmt)) |
| 1131 | he->srcfile = NULL; |
| 1132 | else |
| 1133 | new->srcfile = NULL; |
| 1134 | |
| 1135 | rb_link_node(&new->rb_node_in, parent, p); |
| 1136 | rb_insert_color(&new->rb_node_in, root); |
| 1137 | return new; |
| 1138 | } |
| 1139 | |
| 1140 | static int hists__hierarchy_insert_entry(struct hists *hists, |
| 1141 | struct rb_root *root, |
| 1142 | struct hist_entry *he) |
| 1143 | { |
| 1144 | struct perf_hpp_fmt *fmt; |
| 1145 | struct hist_entry *new_he = NULL; |
| 1146 | struct hist_entry *parent = NULL; |
| 1147 | int depth = 0; |
| 1148 | int ret = 0; |
| 1149 | |
| 1150 | hists__for_each_sort_list(hists, fmt) { |
| 1151 | if (!perf_hpp__is_sort_entry(fmt) && |
| 1152 | !perf_hpp__is_dynamic_entry(fmt)) |
| 1153 | continue; |
| 1154 | if (perf_hpp__should_skip(fmt, hists)) |
| 1155 | continue; |
| 1156 | |
| 1157 | /* insert copy of 'he' for each fmt into the hierarchy */ |
| 1158 | new_he = hierarchy_insert_entry(hists, root, he, fmt); |
| 1159 | if (new_he == NULL) { |
| 1160 | ret = -1; |
| 1161 | break; |
| 1162 | } |
| 1163 | |
| 1164 | root = &new_he->hroot_in; |
| 1165 | new_he->parent_he = parent; |
| 1166 | new_he->depth = depth++; |
| 1167 | parent = new_he; |
| 1168 | } |
| 1169 | |
| 1170 | if (new_he) { |
| 1171 | new_he->leaf = true; |
| 1172 | |
| 1173 | if (symbol_conf.use_callchain) { |
| 1174 | callchain_cursor_reset(&callchain_cursor); |
| 1175 | if (callchain_merge(&callchain_cursor, |
| 1176 | new_he->callchain, |
| 1177 | he->callchain) < 0) |
| 1178 | ret = -1; |
| 1179 | } |
| 1180 | } |
| 1181 | |
| 1182 | /* 'he' is no longer used */ |
| 1183 | hist_entry__delete(he); |
| 1184 | |
| 1185 | /* return 0 (or -1) since it already applied filters */ |
| 1186 | return ret; |
| 1187 | } |
| 1188 | |
Namhyung Kim | bba58cd | 2016-02-16 23:08:25 +0900 | [diff] [blame] | 1189 | int hists__collapse_insert_entry(struct hists *hists, struct rb_root *root, |
| 1190 | struct hist_entry *he) |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1191 | { |
Arnaldo Carvalho de Melo | b9bf089 | 2009-12-14 11:37:11 -0200 | [diff] [blame] | 1192 | struct rb_node **p = &root->rb_node; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1193 | struct rb_node *parent = NULL; |
| 1194 | struct hist_entry *iter; |
| 1195 | int64_t cmp; |
| 1196 | |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1197 | if (symbol_conf.report_hierarchy) |
| 1198 | return hists__hierarchy_insert_entry(hists, root, he); |
| 1199 | |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1200 | while (*p != NULL) { |
| 1201 | parent = *p; |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1202 | iter = rb_entry(parent, struct hist_entry, rb_node_in); |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1203 | |
| 1204 | cmp = hist_entry__collapse(iter, he); |
| 1205 | |
| 1206 | if (!cmp) { |
Namhyung Kim | bba58cd | 2016-02-16 23:08:25 +0900 | [diff] [blame] | 1207 | int ret = 0; |
| 1208 | |
Namhyung Kim | 139c081 | 2012-10-04 21:49:43 +0900 | [diff] [blame] | 1209 | he_stat__add_stat(&iter->stat, &he->stat); |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 1210 | if (symbol_conf.cumulate_callchain) |
| 1211 | he_stat__add_stat(iter->stat_acc, he->stat_acc); |
Namhyung Kim | 9ec6097 | 2012-09-26 16:47:28 +0900 | [diff] [blame] | 1212 | |
Frederic Weisbecker | 1b3a0e9 | 2011-01-14 04:51:58 +0100 | [diff] [blame] | 1213 | if (symbol_conf.use_callchain) { |
Namhyung Kim | 4726064 | 2012-05-31 14:43:26 +0900 | [diff] [blame] | 1214 | callchain_cursor_reset(&callchain_cursor); |
Namhyung Kim | bba58cd | 2016-02-16 23:08:25 +0900 | [diff] [blame] | 1215 | if (callchain_merge(&callchain_cursor, |
| 1216 | iter->callchain, |
| 1217 | he->callchain) < 0) |
| 1218 | ret = -1; |
Frederic Weisbecker | 1b3a0e9 | 2011-01-14 04:51:58 +0100 | [diff] [blame] | 1219 | } |
Arnaldo Carvalho de Melo | 6733d1b | 2014-12-19 12:31:40 -0300 | [diff] [blame] | 1220 | hist_entry__delete(he); |
Namhyung Kim | bba58cd | 2016-02-16 23:08:25 +0900 | [diff] [blame] | 1221 | return ret; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1222 | } |
| 1223 | |
| 1224 | if (cmp < 0) |
| 1225 | p = &(*p)->rb_left; |
| 1226 | else |
| 1227 | p = &(*p)->rb_right; |
| 1228 | } |
Namhyung Kim | 740b97f | 2014-12-22 13:44:10 +0900 | [diff] [blame] | 1229 | hists->nr_entries++; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1230 | |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1231 | rb_link_node(&he->rb_node_in, parent, p); |
| 1232 | rb_insert_color(&he->rb_node_in, root); |
Namhyung Kim | bba58cd | 2016-02-16 23:08:25 +0900 | [diff] [blame] | 1233 | return 1; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1234 | } |
| 1235 | |
Namhyung Kim | fc284be | 2016-01-07 10:14:10 +0100 | [diff] [blame] | 1236 | struct rb_root *hists__get_rotate_entries_in(struct hists *hists) |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1237 | { |
| 1238 | struct rb_root *root; |
| 1239 | |
| 1240 | pthread_mutex_lock(&hists->lock); |
| 1241 | |
| 1242 | root = hists->entries_in; |
| 1243 | if (++hists->entries_in > &hists->entries_in_array[1]) |
| 1244 | hists->entries_in = &hists->entries_in_array[0]; |
| 1245 | |
| 1246 | pthread_mutex_unlock(&hists->lock); |
| 1247 | |
| 1248 | return root; |
| 1249 | } |
| 1250 | |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 1251 | static void hists__apply_filters(struct hists *hists, struct hist_entry *he) |
| 1252 | { |
| 1253 | hists__filter_entry_by_dso(hists, he); |
| 1254 | hists__filter_entry_by_thread(hists, he); |
Namhyung Kim | e94d53e | 2012-03-16 17:50:51 +0900 | [diff] [blame] | 1255 | hists__filter_entry_by_symbol(hists, he); |
Kan Liang | 21394d9 | 2015-09-04 10:45:44 -0400 | [diff] [blame] | 1256 | hists__filter_entry_by_socket(hists, he); |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 1257 | } |
| 1258 | |
Namhyung Kim | bba58cd | 2016-02-16 23:08:25 +0900 | [diff] [blame] | 1259 | int hists__collapse_resort(struct hists *hists, struct ui_progress *prog) |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1260 | { |
| 1261 | struct rb_root *root; |
| 1262 | struct rb_node *next; |
| 1263 | struct hist_entry *n; |
Namhyung Kim | bba58cd | 2016-02-16 23:08:25 +0900 | [diff] [blame] | 1264 | int ret; |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1265 | |
Namhyung Kim | 3a5714f | 2013-05-14 11:09:01 +0900 | [diff] [blame] | 1266 | if (!sort__need_collapse) |
Namhyung Kim | bba58cd | 2016-02-16 23:08:25 +0900 | [diff] [blame] | 1267 | return 0; |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1268 | |
Namhyung Kim | 740b97f | 2014-12-22 13:44:10 +0900 | [diff] [blame] | 1269 | hists->nr_entries = 0; |
| 1270 | |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1271 | root = hists__get_rotate_entries_in(hists); |
Namhyung Kim | 740b97f | 2014-12-22 13:44:10 +0900 | [diff] [blame] | 1272 | |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1273 | next = rb_first(root); |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1274 | |
| 1275 | while (next) { |
Arnaldo Carvalho de Melo | 33e940a | 2013-09-17 16:34:28 -0300 | [diff] [blame] | 1276 | if (session_done()) |
| 1277 | break; |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1278 | n = rb_entry(next, struct hist_entry, rb_node_in); |
| 1279 | next = rb_next(&n->rb_node_in); |
| 1280 | |
| 1281 | rb_erase(&n->rb_node_in, root); |
Namhyung Kim | bba58cd | 2016-02-16 23:08:25 +0900 | [diff] [blame] | 1282 | ret = hists__collapse_insert_entry(hists, &hists->entries_collapsed, n); |
| 1283 | if (ret < 0) |
| 1284 | return -1; |
| 1285 | |
| 1286 | if (ret) { |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 1287 | /* |
| 1288 | * If it wasn't combined with one of the entries already |
| 1289 | * collapsed, we need to apply the filters that may have |
| 1290 | * been set by, say, the hist_browser. |
| 1291 | */ |
| 1292 | hists__apply_filters(hists, n); |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 1293 | } |
Namhyung Kim | c1fb565 | 2013-10-11 14:15:38 +0900 | [diff] [blame] | 1294 | if (prog) |
| 1295 | ui_progress__update(prog, 1); |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1296 | } |
Namhyung Kim | bba58cd | 2016-02-16 23:08:25 +0900 | [diff] [blame] | 1297 | return 0; |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1298 | } |
| 1299 | |
Namhyung Kim | 043ca389 | 2014-03-03 14:18:00 +0900 | [diff] [blame] | 1300 | static int hist_entry__sort(struct hist_entry *a, struct hist_entry *b) |
Namhyung Kim | 29d720e | 2013-01-22 18:09:33 +0900 | [diff] [blame] | 1301 | { |
Jiri Olsa | aa6f50a | 2016-01-18 10:24:24 +0100 | [diff] [blame] | 1302 | struct hists *hists = a->hists; |
Namhyung Kim | 043ca389 | 2014-03-03 14:18:00 +0900 | [diff] [blame] | 1303 | struct perf_hpp_fmt *fmt; |
| 1304 | int64_t cmp = 0; |
Namhyung Kim | 29d720e | 2013-01-22 18:09:33 +0900 | [diff] [blame] | 1305 | |
Jiri Olsa | aa6f50a | 2016-01-18 10:24:24 +0100 | [diff] [blame] | 1306 | hists__for_each_sort_list(hists, fmt) { |
Namhyung Kim | 361459f | 2015-12-23 02:07:08 +0900 | [diff] [blame] | 1307 | if (perf_hpp__should_skip(fmt, a->hists)) |
Namhyung Kim | e67d49a | 2014-03-18 13:00:59 +0900 | [diff] [blame] | 1308 | continue; |
| 1309 | |
Namhyung Kim | 87bbdf7 | 2015-01-08 09:45:46 +0900 | [diff] [blame] | 1310 | cmp = fmt->sort(fmt, a, b); |
Namhyung Kim | 043ca389 | 2014-03-03 14:18:00 +0900 | [diff] [blame] | 1311 | if (cmp) |
Namhyung Kim | 29d720e | 2013-01-22 18:09:33 +0900 | [diff] [blame] | 1312 | break; |
| 1313 | } |
| 1314 | |
Namhyung Kim | 043ca389 | 2014-03-03 14:18:00 +0900 | [diff] [blame] | 1315 | return cmp; |
Namhyung Kim | 29d720e | 2013-01-22 18:09:33 +0900 | [diff] [blame] | 1316 | } |
| 1317 | |
Namhyung Kim | 9283ba9 | 2014-04-24 16:37:26 +0900 | [diff] [blame] | 1318 | static void hists__reset_filter_stats(struct hists *hists) |
| 1319 | { |
| 1320 | hists->nr_non_filtered_entries = 0; |
| 1321 | hists->stats.total_non_filtered_period = 0; |
| 1322 | } |
| 1323 | |
| 1324 | void hists__reset_stats(struct hists *hists) |
| 1325 | { |
| 1326 | hists->nr_entries = 0; |
| 1327 | hists->stats.total_period = 0; |
| 1328 | |
| 1329 | hists__reset_filter_stats(hists); |
| 1330 | } |
| 1331 | |
| 1332 | static void hists__inc_filter_stats(struct hists *hists, struct hist_entry *h) |
| 1333 | { |
| 1334 | hists->nr_non_filtered_entries++; |
| 1335 | hists->stats.total_non_filtered_period += h->stat.period; |
| 1336 | } |
| 1337 | |
| 1338 | void hists__inc_stats(struct hists *hists, struct hist_entry *h) |
| 1339 | { |
| 1340 | if (!h->filtered) |
| 1341 | hists__inc_filter_stats(hists, h); |
| 1342 | |
| 1343 | hists->nr_entries++; |
| 1344 | hists->stats.total_period += h->stat.period; |
| 1345 | } |
| 1346 | |
Namhyung Kim | 1a3906a | 2016-02-25 00:13:35 +0900 | [diff] [blame] | 1347 | static void hierarchy_insert_output_entry(struct rb_root *root, |
| 1348 | struct hist_entry *he) |
| 1349 | { |
| 1350 | struct rb_node **p = &root->rb_node; |
| 1351 | struct rb_node *parent = NULL; |
| 1352 | struct hist_entry *iter; |
| 1353 | |
| 1354 | while (*p != NULL) { |
| 1355 | parent = *p; |
| 1356 | iter = rb_entry(parent, struct hist_entry, rb_node); |
| 1357 | |
| 1358 | if (hist_entry__sort(he, iter) > 0) |
| 1359 | p = &parent->rb_left; |
| 1360 | else |
| 1361 | p = &parent->rb_right; |
| 1362 | } |
| 1363 | |
| 1364 | rb_link_node(&he->rb_node, parent, p); |
| 1365 | rb_insert_color(&he->rb_node, root); |
| 1366 | } |
| 1367 | |
| 1368 | static void hists__hierarchy_output_resort(struct hists *hists, |
| 1369 | struct ui_progress *prog, |
| 1370 | struct rb_root *root_in, |
| 1371 | struct rb_root *root_out, |
| 1372 | u64 min_callchain_hits, |
| 1373 | bool use_callchain) |
| 1374 | { |
| 1375 | struct rb_node *node; |
| 1376 | struct hist_entry *he; |
| 1377 | |
| 1378 | *root_out = RB_ROOT; |
| 1379 | node = rb_first(root_in); |
| 1380 | |
| 1381 | while (node) { |
| 1382 | he = rb_entry(node, struct hist_entry, rb_node_in); |
| 1383 | node = rb_next(node); |
| 1384 | |
| 1385 | hierarchy_insert_output_entry(root_out, he); |
| 1386 | |
| 1387 | if (prog) |
| 1388 | ui_progress__update(prog, 1); |
| 1389 | |
| 1390 | if (!he->leaf) { |
| 1391 | hists__hierarchy_output_resort(hists, prog, |
| 1392 | &he->hroot_in, |
| 1393 | &he->hroot_out, |
| 1394 | min_callchain_hits, |
| 1395 | use_callchain); |
| 1396 | hists->nr_entries++; |
| 1397 | if (!he->filtered) { |
| 1398 | hists->nr_non_filtered_entries++; |
| 1399 | hists__calc_col_len(hists, he); |
| 1400 | } |
| 1401 | |
| 1402 | continue; |
| 1403 | } |
| 1404 | |
| 1405 | /* only update stat for leaf entries to avoid duplication */ |
| 1406 | hists__inc_stats(hists, he); |
| 1407 | if (!he->filtered) |
| 1408 | hists__calc_col_len(hists, he); |
| 1409 | |
| 1410 | if (!use_callchain) |
| 1411 | continue; |
| 1412 | |
| 1413 | if (callchain_param.mode == CHAIN_GRAPH_REL) { |
| 1414 | u64 total = he->stat.period; |
| 1415 | |
| 1416 | if (symbol_conf.cumulate_callchain) |
| 1417 | total = he->stat_acc->period; |
| 1418 | |
| 1419 | min_callchain_hits = total * (callchain_param.min_percent / 100); |
| 1420 | } |
| 1421 | |
| 1422 | callchain_param.sort(&he->sorted_chain, he->callchain, |
| 1423 | min_callchain_hits, &callchain_param); |
| 1424 | } |
| 1425 | } |
| 1426 | |
Arnaldo Carvalho de Melo | 1c02c4d | 2010-05-10 13:04:11 -0300 | [diff] [blame] | 1427 | static void __hists__insert_output_entry(struct rb_root *entries, |
| 1428 | struct hist_entry *he, |
Kan Liang | f9db0d0 | 2015-08-11 06:30:48 -0400 | [diff] [blame] | 1429 | u64 min_callchain_hits, |
| 1430 | bool use_callchain) |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1431 | { |
Arnaldo Carvalho de Melo | 1c02c4d | 2010-05-10 13:04:11 -0300 | [diff] [blame] | 1432 | struct rb_node **p = &entries->rb_node; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1433 | struct rb_node *parent = NULL; |
| 1434 | struct hist_entry *iter; |
| 1435 | |
Namhyung Kim | 744070e | 2016-01-28 00:40:48 +0900 | [diff] [blame] | 1436 | if (use_callchain) { |
| 1437 | if (callchain_param.mode == CHAIN_GRAPH_REL) { |
| 1438 | u64 total = he->stat.period; |
| 1439 | |
| 1440 | if (symbol_conf.cumulate_callchain) |
| 1441 | total = he->stat_acc->period; |
| 1442 | |
| 1443 | min_callchain_hits = total * (callchain_param.min_percent / 100); |
| 1444 | } |
Arnaldo Carvalho de Melo | b9fb930 | 2010-04-02 09:50:42 -0300 | [diff] [blame] | 1445 | callchain_param.sort(&he->sorted_chain, he->callchain, |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1446 | min_callchain_hits, &callchain_param); |
Namhyung Kim | 744070e | 2016-01-28 00:40:48 +0900 | [diff] [blame] | 1447 | } |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1448 | |
| 1449 | while (*p != NULL) { |
| 1450 | parent = *p; |
| 1451 | iter = rb_entry(parent, struct hist_entry, rb_node); |
| 1452 | |
Namhyung Kim | 043ca389 | 2014-03-03 14:18:00 +0900 | [diff] [blame] | 1453 | if (hist_entry__sort(he, iter) > 0) |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1454 | p = &(*p)->rb_left; |
| 1455 | else |
| 1456 | p = &(*p)->rb_right; |
| 1457 | } |
| 1458 | |
| 1459 | rb_link_node(&he->rb_node, parent, p); |
Arnaldo Carvalho de Melo | 1c02c4d | 2010-05-10 13:04:11 -0300 | [diff] [blame] | 1460 | rb_insert_color(&he->rb_node, entries); |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1461 | } |
| 1462 | |
Jiri Olsa | 01441af | 2016-01-18 10:23:59 +0100 | [diff] [blame] | 1463 | static void output_resort(struct hists *hists, struct ui_progress *prog, |
| 1464 | bool use_callchain) |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1465 | { |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1466 | struct rb_root *root; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1467 | struct rb_node *next; |
| 1468 | struct hist_entry *n; |
Namhyung Kim | 467ef10 | 2016-02-16 23:08:19 +0900 | [diff] [blame] | 1469 | u64 callchain_total; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1470 | u64 min_callchain_hits; |
| 1471 | |
Namhyung Kim | 467ef10 | 2016-02-16 23:08:19 +0900 | [diff] [blame] | 1472 | callchain_total = hists->callchain_period; |
| 1473 | if (symbol_conf.filter_relative) |
| 1474 | callchain_total = hists->callchain_non_filtered_period; |
| 1475 | |
| 1476 | min_callchain_hits = callchain_total * (callchain_param.min_percent / 100); |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1477 | |
Namhyung Kim | 1a3906a | 2016-02-25 00:13:35 +0900 | [diff] [blame] | 1478 | hists__reset_stats(hists); |
| 1479 | hists__reset_col_len(hists); |
| 1480 | |
| 1481 | if (symbol_conf.report_hierarchy) { |
| 1482 | return hists__hierarchy_output_resort(hists, prog, |
| 1483 | &hists->entries_collapsed, |
| 1484 | &hists->entries, |
| 1485 | min_callchain_hits, |
| 1486 | use_callchain); |
| 1487 | } |
| 1488 | |
Namhyung Kim | 3a5714f | 2013-05-14 11:09:01 +0900 | [diff] [blame] | 1489 | if (sort__need_collapse) |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1490 | root = &hists->entries_collapsed; |
| 1491 | else |
| 1492 | root = hists->entries_in; |
| 1493 | |
| 1494 | next = rb_first(root); |
| 1495 | hists->entries = RB_ROOT; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1496 | |
| 1497 | while (next) { |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1498 | n = rb_entry(next, struct hist_entry, rb_node_in); |
| 1499 | next = rb_next(&n->rb_node_in); |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1500 | |
Kan Liang | f9db0d0 | 2015-08-11 06:30:48 -0400 | [diff] [blame] | 1501 | __hists__insert_output_entry(&hists->entries, n, min_callchain_hits, use_callchain); |
Namhyung Kim | 6263835 | 2014-04-24 16:21:46 +0900 | [diff] [blame] | 1502 | hists__inc_stats(hists, n); |
Namhyung Kim | ae993ef | 2014-04-24 16:25:19 +0900 | [diff] [blame] | 1503 | |
| 1504 | if (!n->filtered) |
| 1505 | hists__calc_col_len(hists, n); |
Namhyung Kim | 740b97f | 2014-12-22 13:44:10 +0900 | [diff] [blame] | 1506 | |
| 1507 | if (prog) |
| 1508 | ui_progress__update(prog, 1); |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1509 | } |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1510 | } |
Arnaldo Carvalho de Melo | b9bf089 | 2009-12-14 11:37:11 -0200 | [diff] [blame] | 1511 | |
Jiri Olsa | 452ce03 | 2016-01-18 10:24:00 +0100 | [diff] [blame] | 1512 | void perf_evsel__output_resort(struct perf_evsel *evsel, struct ui_progress *prog) |
Jiri Olsa | 01441af | 2016-01-18 10:23:59 +0100 | [diff] [blame] | 1513 | { |
Jiri Olsa | 01441af | 2016-01-18 10:23:59 +0100 | [diff] [blame] | 1514 | bool use_callchain; |
| 1515 | |
| 1516 | if (evsel && symbol_conf.use_callchain && !symbol_conf.show_ref_callgraph) |
| 1517 | use_callchain = evsel->attr.sample_type & PERF_SAMPLE_CALLCHAIN; |
| 1518 | else |
| 1519 | use_callchain = symbol_conf.use_callchain; |
| 1520 | |
Jiri Olsa | 452ce03 | 2016-01-18 10:24:00 +0100 | [diff] [blame] | 1521 | output_resort(evsel__hists(evsel), prog, use_callchain); |
| 1522 | } |
| 1523 | |
| 1524 | void hists__output_resort(struct hists *hists, struct ui_progress *prog) |
| 1525 | { |
| 1526 | output_resort(hists, prog, symbol_conf.use_callchain); |
Jiri Olsa | 01441af | 2016-01-18 10:23:59 +0100 | [diff] [blame] | 1527 | } |
| 1528 | |
Namhyung Kim | 8c01872 | 2016-02-25 00:13:36 +0900 | [diff] [blame] | 1529 | static bool can_goto_child(struct hist_entry *he, enum hierarchy_move_dir hmd) |
| 1530 | { |
| 1531 | if (he->leaf || hmd == HMD_FORCE_SIBLING) |
| 1532 | return false; |
| 1533 | |
| 1534 | if (he->unfolded || hmd == HMD_FORCE_CHILD) |
| 1535 | return true; |
| 1536 | |
| 1537 | return false; |
| 1538 | } |
| 1539 | |
| 1540 | struct rb_node *rb_hierarchy_last(struct rb_node *node) |
| 1541 | { |
| 1542 | struct hist_entry *he = rb_entry(node, struct hist_entry, rb_node); |
| 1543 | |
| 1544 | while (can_goto_child(he, HMD_NORMAL)) { |
| 1545 | node = rb_last(&he->hroot_out); |
| 1546 | he = rb_entry(node, struct hist_entry, rb_node); |
| 1547 | } |
| 1548 | return node; |
| 1549 | } |
| 1550 | |
| 1551 | struct rb_node *__rb_hierarchy_next(struct rb_node *node, enum hierarchy_move_dir hmd) |
| 1552 | { |
| 1553 | struct hist_entry *he = rb_entry(node, struct hist_entry, rb_node); |
| 1554 | |
| 1555 | if (can_goto_child(he, hmd)) |
| 1556 | node = rb_first(&he->hroot_out); |
| 1557 | else |
| 1558 | node = rb_next(node); |
| 1559 | |
| 1560 | while (node == NULL) { |
| 1561 | he = he->parent_he; |
| 1562 | if (he == NULL) |
| 1563 | break; |
| 1564 | |
| 1565 | node = rb_next(&he->rb_node); |
| 1566 | } |
| 1567 | return node; |
| 1568 | } |
| 1569 | |
| 1570 | struct rb_node *rb_hierarchy_prev(struct rb_node *node) |
| 1571 | { |
| 1572 | struct hist_entry *he = rb_entry(node, struct hist_entry, rb_node); |
| 1573 | |
| 1574 | node = rb_prev(node); |
| 1575 | if (node) |
| 1576 | return rb_hierarchy_last(node); |
| 1577 | |
| 1578 | he = he->parent_he; |
| 1579 | if (he == NULL) |
| 1580 | return NULL; |
| 1581 | |
| 1582 | return &he->rb_node; |
| 1583 | } |
| 1584 | |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 1585 | static void hists__remove_entry_filter(struct hists *hists, struct hist_entry *h, |
Arnaldo Carvalho de Melo | cc5edb0 | 2010-07-16 12:35:07 -0300 | [diff] [blame] | 1586 | enum hist_filter filter) |
| 1587 | { |
| 1588 | h->filtered &= ~(1 << filter); |
Namhyung Kim | 155e9af | 2016-02-25 00:13:38 +0900 | [diff] [blame] | 1589 | |
| 1590 | if (symbol_conf.report_hierarchy) { |
| 1591 | struct hist_entry *parent = h->parent_he; |
| 1592 | |
| 1593 | while (parent) { |
| 1594 | he_stat__add_stat(&parent->stat, &h->stat); |
| 1595 | |
| 1596 | parent->filtered &= ~(1 << filter); |
| 1597 | |
| 1598 | if (parent->filtered) |
| 1599 | goto next; |
| 1600 | |
| 1601 | /* force fold unfiltered entry for simplicity */ |
| 1602 | parent->unfolded = false; |
| 1603 | parent->row_offset = 0; |
| 1604 | parent->nr_rows = 0; |
| 1605 | next: |
| 1606 | parent = parent->parent_he; |
| 1607 | } |
| 1608 | } |
| 1609 | |
Arnaldo Carvalho de Melo | cc5edb0 | 2010-07-16 12:35:07 -0300 | [diff] [blame] | 1610 | if (h->filtered) |
| 1611 | return; |
| 1612 | |
Namhyung Kim | 87e90f4 | 2014-04-24 16:44:16 +0900 | [diff] [blame] | 1613 | /* force fold unfiltered entry for simplicity */ |
Namhyung Kim | 3698dab | 2015-05-05 23:55:46 +0900 | [diff] [blame] | 1614 | h->unfolded = false; |
Arnaldo Carvalho de Melo | 0f0cbf7 | 2010-07-26 17:13:40 -0300 | [diff] [blame] | 1615 | h->row_offset = 0; |
He Kuang | a8cd1f4 | 2015-03-11 20:36:03 +0800 | [diff] [blame] | 1616 | h->nr_rows = 0; |
Namhyung Kim | 9283ba9 | 2014-04-24 16:37:26 +0900 | [diff] [blame] | 1617 | |
Namhyung Kim | 1ab1fa5 | 2013-12-26 15:11:52 +0900 | [diff] [blame] | 1618 | hists->stats.nr_non_filtered_samples += h->stat.nr_events; |
Arnaldo Carvalho de Melo | cc5edb0 | 2010-07-16 12:35:07 -0300 | [diff] [blame] | 1619 | |
Namhyung Kim | 9283ba9 | 2014-04-24 16:37:26 +0900 | [diff] [blame] | 1620 | hists__inc_filter_stats(hists, h); |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 1621 | hists__calc_col_len(hists, h); |
Arnaldo Carvalho de Melo | cc5edb0 | 2010-07-16 12:35:07 -0300 | [diff] [blame] | 1622 | } |
| 1623 | |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 1624 | |
| 1625 | static bool hists__filter_entry_by_dso(struct hists *hists, |
| 1626 | struct hist_entry *he) |
| 1627 | { |
| 1628 | if (hists->dso_filter != NULL && |
| 1629 | (he->ms.map == NULL || he->ms.map->dso != hists->dso_filter)) { |
| 1630 | he->filtered |= (1 << HIST_FILTER__DSO); |
| 1631 | return true; |
| 1632 | } |
| 1633 | |
| 1634 | return false; |
| 1635 | } |
| 1636 | |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 1637 | static bool hists__filter_entry_by_thread(struct hists *hists, |
| 1638 | struct hist_entry *he) |
| 1639 | { |
| 1640 | if (hists->thread_filter != NULL && |
| 1641 | he->thread != hists->thread_filter) { |
| 1642 | he->filtered |= (1 << HIST_FILTER__THREAD); |
| 1643 | return true; |
| 1644 | } |
| 1645 | |
| 1646 | return false; |
| 1647 | } |
| 1648 | |
Namhyung Kim | e94d53e | 2012-03-16 17:50:51 +0900 | [diff] [blame] | 1649 | static bool hists__filter_entry_by_symbol(struct hists *hists, |
| 1650 | struct hist_entry *he) |
| 1651 | { |
| 1652 | if (hists->symbol_filter_str != NULL && |
| 1653 | (!he->ms.sym || strstr(he->ms.sym->name, |
| 1654 | hists->symbol_filter_str) == NULL)) { |
| 1655 | he->filtered |= (1 << HIST_FILTER__SYMBOL); |
| 1656 | return true; |
| 1657 | } |
| 1658 | |
| 1659 | return false; |
| 1660 | } |
| 1661 | |
Kan Liang | 21394d9 | 2015-09-04 10:45:44 -0400 | [diff] [blame] | 1662 | static bool hists__filter_entry_by_socket(struct hists *hists, |
| 1663 | struct hist_entry *he) |
| 1664 | { |
| 1665 | if ((hists->socket_filter > -1) && |
| 1666 | (he->socket != hists->socket_filter)) { |
| 1667 | he->filtered |= (1 << HIST_FILTER__SOCKET); |
| 1668 | return true; |
| 1669 | } |
| 1670 | |
| 1671 | return false; |
| 1672 | } |
| 1673 | |
Namhyung Kim | 1f7c254 | 2016-01-20 10:15:21 +0900 | [diff] [blame] | 1674 | typedef bool (*filter_fn_t)(struct hists *hists, struct hist_entry *he); |
| 1675 | |
| 1676 | static void hists__filter_by_type(struct hists *hists, int type, filter_fn_t filter) |
Kan Liang | 84734b0 | 2015-09-04 10:45:45 -0400 | [diff] [blame] | 1677 | { |
| 1678 | struct rb_node *nd; |
| 1679 | |
| 1680 | hists->stats.nr_non_filtered_samples = 0; |
| 1681 | |
| 1682 | hists__reset_filter_stats(hists); |
| 1683 | hists__reset_col_len(hists); |
| 1684 | |
| 1685 | for (nd = rb_first(&hists->entries); nd; nd = rb_next(nd)) { |
| 1686 | struct hist_entry *h = rb_entry(nd, struct hist_entry, rb_node); |
| 1687 | |
Namhyung Kim | 1f7c254 | 2016-01-20 10:15:21 +0900 | [diff] [blame] | 1688 | if (filter(hists, h)) |
Kan Liang | 84734b0 | 2015-09-04 10:45:45 -0400 | [diff] [blame] | 1689 | continue; |
| 1690 | |
Namhyung Kim | 1f7c254 | 2016-01-20 10:15:21 +0900 | [diff] [blame] | 1691 | hists__remove_entry_filter(hists, h, type); |
Kan Liang | 84734b0 | 2015-09-04 10:45:45 -0400 | [diff] [blame] | 1692 | } |
| 1693 | } |
| 1694 | |
Namhyung Kim | 7064285 | 2016-02-25 00:13:39 +0900 | [diff] [blame] | 1695 | static void resort_filtered_entry(struct rb_root *root, struct hist_entry *he) |
| 1696 | { |
| 1697 | struct rb_node **p = &root->rb_node; |
| 1698 | struct rb_node *parent = NULL; |
| 1699 | struct hist_entry *iter; |
| 1700 | struct rb_root new_root = RB_ROOT; |
| 1701 | struct rb_node *nd; |
| 1702 | |
| 1703 | while (*p != NULL) { |
| 1704 | parent = *p; |
| 1705 | iter = rb_entry(parent, struct hist_entry, rb_node); |
| 1706 | |
| 1707 | if (hist_entry__sort(he, iter) > 0) |
| 1708 | p = &(*p)->rb_left; |
| 1709 | else |
| 1710 | p = &(*p)->rb_right; |
| 1711 | } |
| 1712 | |
| 1713 | rb_link_node(&he->rb_node, parent, p); |
| 1714 | rb_insert_color(&he->rb_node, root); |
| 1715 | |
| 1716 | if (he->leaf || he->filtered) |
| 1717 | return; |
| 1718 | |
| 1719 | nd = rb_first(&he->hroot_out); |
| 1720 | while (nd) { |
| 1721 | struct hist_entry *h = rb_entry(nd, struct hist_entry, rb_node); |
| 1722 | |
| 1723 | nd = rb_next(nd); |
| 1724 | rb_erase(&h->rb_node, &he->hroot_out); |
| 1725 | |
| 1726 | resort_filtered_entry(&new_root, h); |
| 1727 | } |
| 1728 | |
| 1729 | he->hroot_out = new_root; |
| 1730 | } |
| 1731 | |
Namhyung Kim | 155e9af | 2016-02-25 00:13:38 +0900 | [diff] [blame] | 1732 | static void hists__filter_hierarchy(struct hists *hists, int type, const void *arg) |
| 1733 | { |
| 1734 | struct rb_node *nd; |
Namhyung Kim | 7064285 | 2016-02-25 00:13:39 +0900 | [diff] [blame] | 1735 | struct rb_root new_root = RB_ROOT; |
Namhyung Kim | 155e9af | 2016-02-25 00:13:38 +0900 | [diff] [blame] | 1736 | |
| 1737 | hists->stats.nr_non_filtered_samples = 0; |
| 1738 | |
| 1739 | hists__reset_filter_stats(hists); |
| 1740 | hists__reset_col_len(hists); |
| 1741 | |
| 1742 | nd = rb_first(&hists->entries); |
| 1743 | while (nd) { |
| 1744 | struct hist_entry *h = rb_entry(nd, struct hist_entry, rb_node); |
| 1745 | int ret; |
| 1746 | |
| 1747 | ret = hist_entry__filter(h, type, arg); |
| 1748 | |
| 1749 | /* |
| 1750 | * case 1. non-matching type |
| 1751 | * zero out the period, set filter marker and move to child |
| 1752 | */ |
| 1753 | if (ret < 0) { |
| 1754 | memset(&h->stat, 0, sizeof(h->stat)); |
| 1755 | h->filtered |= (1 << type); |
| 1756 | |
| 1757 | nd = __rb_hierarchy_next(&h->rb_node, HMD_FORCE_CHILD); |
| 1758 | } |
| 1759 | /* |
| 1760 | * case 2. matched type (filter out) |
| 1761 | * set filter marker and move to next |
| 1762 | */ |
| 1763 | else if (ret == 1) { |
| 1764 | h->filtered |= (1 << type); |
| 1765 | |
| 1766 | nd = __rb_hierarchy_next(&h->rb_node, HMD_FORCE_SIBLING); |
| 1767 | } |
| 1768 | /* |
| 1769 | * case 3. ok (not filtered) |
| 1770 | * add period to hists and parents, erase the filter marker |
| 1771 | * and move to next sibling |
| 1772 | */ |
| 1773 | else { |
| 1774 | hists__remove_entry_filter(hists, h, type); |
| 1775 | |
| 1776 | nd = __rb_hierarchy_next(&h->rb_node, HMD_FORCE_SIBLING); |
| 1777 | } |
| 1778 | } |
Namhyung Kim | 7064285 | 2016-02-25 00:13:39 +0900 | [diff] [blame] | 1779 | |
| 1780 | /* |
| 1781 | * resort output after applying a new filter since filter in a lower |
| 1782 | * hierarchy can change periods in a upper hierarchy. |
| 1783 | */ |
| 1784 | nd = rb_first(&hists->entries); |
| 1785 | while (nd) { |
| 1786 | struct hist_entry *h = rb_entry(nd, struct hist_entry, rb_node); |
| 1787 | |
| 1788 | nd = rb_next(nd); |
| 1789 | rb_erase(&h->rb_node, &hists->entries); |
| 1790 | |
| 1791 | resort_filtered_entry(&new_root, h); |
| 1792 | } |
| 1793 | |
| 1794 | hists->entries = new_root; |
Namhyung Kim | 155e9af | 2016-02-25 00:13:38 +0900 | [diff] [blame] | 1795 | } |
| 1796 | |
Namhyung Kim | 1f7c254 | 2016-01-20 10:15:21 +0900 | [diff] [blame] | 1797 | void hists__filter_by_thread(struct hists *hists) |
| 1798 | { |
Namhyung Kim | 155e9af | 2016-02-25 00:13:38 +0900 | [diff] [blame] | 1799 | if (symbol_conf.report_hierarchy) |
| 1800 | hists__filter_hierarchy(hists, HIST_FILTER__THREAD, |
| 1801 | hists->thread_filter); |
| 1802 | else |
| 1803 | hists__filter_by_type(hists, HIST_FILTER__THREAD, |
| 1804 | hists__filter_entry_by_thread); |
Namhyung Kim | 1f7c254 | 2016-01-20 10:15:21 +0900 | [diff] [blame] | 1805 | } |
| 1806 | |
| 1807 | void hists__filter_by_dso(struct hists *hists) |
| 1808 | { |
Namhyung Kim | 155e9af | 2016-02-25 00:13:38 +0900 | [diff] [blame] | 1809 | if (symbol_conf.report_hierarchy) |
| 1810 | hists__filter_hierarchy(hists, HIST_FILTER__DSO, |
| 1811 | hists->dso_filter); |
| 1812 | else |
| 1813 | hists__filter_by_type(hists, HIST_FILTER__DSO, |
| 1814 | hists__filter_entry_by_dso); |
Namhyung Kim | 1f7c254 | 2016-01-20 10:15:21 +0900 | [diff] [blame] | 1815 | } |
| 1816 | |
| 1817 | void hists__filter_by_symbol(struct hists *hists) |
| 1818 | { |
Namhyung Kim | 155e9af | 2016-02-25 00:13:38 +0900 | [diff] [blame] | 1819 | if (symbol_conf.report_hierarchy) |
| 1820 | hists__filter_hierarchy(hists, HIST_FILTER__SYMBOL, |
| 1821 | hists->symbol_filter_str); |
| 1822 | else |
| 1823 | hists__filter_by_type(hists, HIST_FILTER__SYMBOL, |
| 1824 | hists__filter_entry_by_symbol); |
Namhyung Kim | 1f7c254 | 2016-01-20 10:15:21 +0900 | [diff] [blame] | 1825 | } |
| 1826 | |
| 1827 | void hists__filter_by_socket(struct hists *hists) |
| 1828 | { |
Namhyung Kim | 155e9af | 2016-02-25 00:13:38 +0900 | [diff] [blame] | 1829 | if (symbol_conf.report_hierarchy) |
| 1830 | hists__filter_hierarchy(hists, HIST_FILTER__SOCKET, |
| 1831 | &hists->socket_filter); |
| 1832 | else |
| 1833 | hists__filter_by_type(hists, HIST_FILTER__SOCKET, |
| 1834 | hists__filter_entry_by_socket); |
Namhyung Kim | 1f7c254 | 2016-01-20 10:15:21 +0900 | [diff] [blame] | 1835 | } |
| 1836 | |
Arnaldo Carvalho de Melo | 28a6b6a | 2012-12-18 16:24:46 -0300 | [diff] [blame] | 1837 | void events_stats__inc(struct events_stats *stats, u32 type) |
| 1838 | { |
| 1839 | ++stats->nr_events[0]; |
| 1840 | ++stats->nr_events[type]; |
| 1841 | } |
| 1842 | |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 1843 | void hists__inc_nr_events(struct hists *hists, u32 type) |
Arnaldo Carvalho de Melo | c8446b9 | 2010-05-14 10:36:42 -0300 | [diff] [blame] | 1844 | { |
Arnaldo Carvalho de Melo | 28a6b6a | 2012-12-18 16:24:46 -0300 | [diff] [blame] | 1845 | events_stats__inc(&hists->stats, type); |
Arnaldo Carvalho de Melo | c8446b9 | 2010-05-14 10:36:42 -0300 | [diff] [blame] | 1846 | } |
Arnaldo Carvalho de Melo | 95529be | 2012-11-08 17:54:33 -0300 | [diff] [blame] | 1847 | |
Namhyung Kim | 1844dbc | 2014-05-28 14:12:18 +0900 | [diff] [blame] | 1848 | void hists__inc_nr_samples(struct hists *hists, bool filtered) |
| 1849 | { |
| 1850 | events_stats__inc(&hists->stats, PERF_RECORD_SAMPLE); |
| 1851 | if (!filtered) |
| 1852 | hists->stats.nr_non_filtered_samples++; |
| 1853 | } |
| 1854 | |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 1855 | static struct hist_entry *hists__add_dummy_entry(struct hists *hists, |
| 1856 | struct hist_entry *pair) |
| 1857 | { |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 1858 | struct rb_root *root; |
| 1859 | struct rb_node **p; |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 1860 | struct rb_node *parent = NULL; |
| 1861 | struct hist_entry *he; |
Andi Kleen | 354cc40 | 2013-10-01 07:22:15 -0700 | [diff] [blame] | 1862 | int64_t cmp; |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 1863 | |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 1864 | if (sort__need_collapse) |
| 1865 | root = &hists->entries_collapsed; |
| 1866 | else |
| 1867 | root = hists->entries_in; |
| 1868 | |
| 1869 | p = &root->rb_node; |
| 1870 | |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 1871 | while (*p != NULL) { |
| 1872 | parent = *p; |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 1873 | he = rb_entry(parent, struct hist_entry, rb_node_in); |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 1874 | |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 1875 | cmp = hist_entry__collapse(he, pair); |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 1876 | |
| 1877 | if (!cmp) |
| 1878 | goto out; |
| 1879 | |
| 1880 | if (cmp < 0) |
| 1881 | p = &(*p)->rb_left; |
| 1882 | else |
| 1883 | p = &(*p)->rb_right; |
| 1884 | } |
| 1885 | |
Namhyung Kim | a0b51af | 2012-09-11 13:34:27 +0900 | [diff] [blame] | 1886 | he = hist_entry__new(pair, true); |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 1887 | if (he) { |
Arnaldo Carvalho de Melo | 30193d7 | 2012-11-12 13:20:03 -0300 | [diff] [blame] | 1888 | memset(&he->stat, 0, sizeof(he->stat)); |
| 1889 | he->hists = hists; |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 1890 | rb_link_node(&he->rb_node_in, parent, p); |
| 1891 | rb_insert_color(&he->rb_node_in, root); |
Namhyung Kim | 6263835 | 2014-04-24 16:21:46 +0900 | [diff] [blame] | 1892 | hists__inc_stats(hists, he); |
Jiri Olsa | e0af43d | 2012-12-01 21:18:20 +0100 | [diff] [blame] | 1893 | he->dummy = true; |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 1894 | } |
| 1895 | out: |
| 1896 | return he; |
| 1897 | } |
| 1898 | |
Arnaldo Carvalho de Melo | 95529be | 2012-11-08 17:54:33 -0300 | [diff] [blame] | 1899 | static struct hist_entry *hists__find_entry(struct hists *hists, |
| 1900 | struct hist_entry *he) |
| 1901 | { |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 1902 | struct rb_node *n; |
| 1903 | |
| 1904 | if (sort__need_collapse) |
| 1905 | n = hists->entries_collapsed.rb_node; |
| 1906 | else |
| 1907 | n = hists->entries_in->rb_node; |
Arnaldo Carvalho de Melo | 95529be | 2012-11-08 17:54:33 -0300 | [diff] [blame] | 1908 | |
| 1909 | while (n) { |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 1910 | struct hist_entry *iter = rb_entry(n, struct hist_entry, rb_node_in); |
| 1911 | int64_t cmp = hist_entry__collapse(iter, he); |
Arnaldo Carvalho de Melo | 95529be | 2012-11-08 17:54:33 -0300 | [diff] [blame] | 1912 | |
| 1913 | if (cmp < 0) |
| 1914 | n = n->rb_left; |
| 1915 | else if (cmp > 0) |
| 1916 | n = n->rb_right; |
| 1917 | else |
| 1918 | return iter; |
| 1919 | } |
| 1920 | |
| 1921 | return NULL; |
| 1922 | } |
| 1923 | |
| 1924 | /* |
| 1925 | * Look for pairs to link to the leader buckets (hist_entries): |
| 1926 | */ |
| 1927 | void hists__match(struct hists *leader, struct hists *other) |
| 1928 | { |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 1929 | struct rb_root *root; |
Arnaldo Carvalho de Melo | 95529be | 2012-11-08 17:54:33 -0300 | [diff] [blame] | 1930 | struct rb_node *nd; |
| 1931 | struct hist_entry *pos, *pair; |
| 1932 | |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 1933 | if (sort__need_collapse) |
| 1934 | root = &leader->entries_collapsed; |
| 1935 | else |
| 1936 | root = leader->entries_in; |
| 1937 | |
| 1938 | for (nd = rb_first(root); nd; nd = rb_next(nd)) { |
| 1939 | pos = rb_entry(nd, struct hist_entry, rb_node_in); |
Arnaldo Carvalho de Melo | 95529be | 2012-11-08 17:54:33 -0300 | [diff] [blame] | 1940 | pair = hists__find_entry(other, pos); |
| 1941 | |
| 1942 | if (pair) |
Namhyung Kim | 5fa9041 | 2012-11-29 15:38:34 +0900 | [diff] [blame] | 1943 | hist_entry__add_pair(pair, pos); |
Arnaldo Carvalho de Melo | 95529be | 2012-11-08 17:54:33 -0300 | [diff] [blame] | 1944 | } |
| 1945 | } |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 1946 | |
| 1947 | /* |
| 1948 | * Look for entries in the other hists that are not present in the leader, if |
| 1949 | * we find them, just add a dummy entry on the leader hists, with period=0, |
| 1950 | * nr_events=0, to serve as the list header. |
| 1951 | */ |
| 1952 | int hists__link(struct hists *leader, struct hists *other) |
| 1953 | { |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 1954 | struct rb_root *root; |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 1955 | struct rb_node *nd; |
| 1956 | struct hist_entry *pos, *pair; |
| 1957 | |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 1958 | if (sort__need_collapse) |
| 1959 | root = &other->entries_collapsed; |
| 1960 | else |
| 1961 | root = other->entries_in; |
| 1962 | |
| 1963 | for (nd = rb_first(root); nd; nd = rb_next(nd)) { |
| 1964 | pos = rb_entry(nd, struct hist_entry, rb_node_in); |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 1965 | |
| 1966 | if (!hist_entry__has_pairs(pos)) { |
| 1967 | pair = hists__add_dummy_entry(leader, pos); |
| 1968 | if (pair == NULL) |
| 1969 | return -1; |
Namhyung Kim | 5fa9041 | 2012-11-29 15:38:34 +0900 | [diff] [blame] | 1970 | hist_entry__add_pair(pos, pair); |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 1971 | } |
| 1972 | } |
| 1973 | |
| 1974 | return 0; |
| 1975 | } |
Namhyung Kim | f214833 | 2014-01-14 11:52:48 +0900 | [diff] [blame] | 1976 | |
Andi Kleen | 57849998 | 2015-07-18 08:24:49 -0700 | [diff] [blame] | 1977 | void hist__account_cycles(struct branch_stack *bs, struct addr_location *al, |
| 1978 | struct perf_sample *sample, bool nonany_branch_mode) |
| 1979 | { |
| 1980 | struct branch_info *bi; |
| 1981 | |
| 1982 | /* If we have branch cycles always annotate them. */ |
| 1983 | if (bs && bs->nr && bs->entries[0].flags.cycles) { |
| 1984 | int i; |
| 1985 | |
| 1986 | bi = sample__resolve_bstack(sample, al); |
| 1987 | if (bi) { |
| 1988 | struct addr_map_symbol *prev = NULL; |
| 1989 | |
| 1990 | /* |
| 1991 | * Ignore errors, still want to process the |
| 1992 | * other entries. |
| 1993 | * |
| 1994 | * For non standard branch modes always |
| 1995 | * force no IPC (prev == NULL) |
| 1996 | * |
| 1997 | * Note that perf stores branches reversed from |
| 1998 | * program order! |
| 1999 | */ |
| 2000 | for (i = bs->nr - 1; i >= 0; i--) { |
| 2001 | addr_map_symbol__account_cycles(&bi[i].from, |
| 2002 | nonany_branch_mode ? NULL : prev, |
| 2003 | bi[i].flags.cycles); |
| 2004 | prev = &bi[i].to; |
| 2005 | } |
| 2006 | free(bi); |
| 2007 | } |
| 2008 | } |
| 2009 | } |
Arnaldo Carvalho de Melo | 2a1731f | 2014-10-10 15:49:21 -0300 | [diff] [blame] | 2010 | |
| 2011 | size_t perf_evlist__fprintf_nr_events(struct perf_evlist *evlist, FILE *fp) |
| 2012 | { |
| 2013 | struct perf_evsel *pos; |
| 2014 | size_t ret = 0; |
| 2015 | |
| 2016 | evlist__for_each(evlist, pos) { |
| 2017 | ret += fprintf(fp, "%s stats:\n", perf_evsel__name(pos)); |
| 2018 | ret += events_stats__fprintf(&evsel__hists(pos)->stats, fp); |
| 2019 | } |
| 2020 | |
| 2021 | return ret; |
| 2022 | } |
| 2023 | |
| 2024 | |
Namhyung Kim | f214833 | 2014-01-14 11:52:48 +0900 | [diff] [blame] | 2025 | u64 hists__total_period(struct hists *hists) |
| 2026 | { |
| 2027 | return symbol_conf.filter_relative ? hists->stats.total_non_filtered_period : |
| 2028 | hists->stats.total_period; |
| 2029 | } |
Namhyung Kim | 33db456 | 2014-02-07 12:06:07 +0900 | [diff] [blame] | 2030 | |
| 2031 | int parse_filter_percentage(const struct option *opt __maybe_unused, |
| 2032 | const char *arg, int unset __maybe_unused) |
| 2033 | { |
| 2034 | if (!strcmp(arg, "relative")) |
| 2035 | symbol_conf.filter_relative = true; |
| 2036 | else if (!strcmp(arg, "absolute")) |
| 2037 | symbol_conf.filter_relative = false; |
| 2038 | else |
| 2039 | return -1; |
| 2040 | |
| 2041 | return 0; |
| 2042 | } |
Namhyung Kim | 0b93da1 | 2014-01-14 12:02:15 +0900 | [diff] [blame] | 2043 | |
| 2044 | int perf_hist_config(const char *var, const char *value) |
| 2045 | { |
| 2046 | if (!strcmp(var, "hist.percentage")) |
| 2047 | return parse_filter_percentage(NULL, value, 0); |
| 2048 | |
| 2049 | return 0; |
| 2050 | } |
Arnaldo Carvalho de Melo | a635fc5 | 2014-10-09 16:16:00 -0300 | [diff] [blame] | 2051 | |
Jiri Olsa | 5b65855 | 2016-01-18 10:24:22 +0100 | [diff] [blame] | 2052 | int __hists__init(struct hists *hists, struct perf_hpp_list *hpp_list) |
Arnaldo Carvalho de Melo | a635fc5 | 2014-10-09 16:16:00 -0300 | [diff] [blame] | 2053 | { |
Arnaldo Carvalho de Melo | a635fc5 | 2014-10-09 16:16:00 -0300 | [diff] [blame] | 2054 | memset(hists, 0, sizeof(*hists)); |
| 2055 | hists->entries_in_array[0] = hists->entries_in_array[1] = RB_ROOT; |
| 2056 | hists->entries_in = &hists->entries_in_array[0]; |
| 2057 | hists->entries_collapsed = RB_ROOT; |
| 2058 | hists->entries = RB_ROOT; |
| 2059 | pthread_mutex_init(&hists->lock, NULL); |
Kan Liang | 21394d9 | 2015-09-04 10:45:44 -0400 | [diff] [blame] | 2060 | hists->socket_filter = -1; |
Jiri Olsa | 5b65855 | 2016-01-18 10:24:22 +0100 | [diff] [blame] | 2061 | hists->hpp_list = hpp_list; |
Arnaldo Carvalho de Melo | a635fc5 | 2014-10-09 16:16:00 -0300 | [diff] [blame] | 2062 | return 0; |
| 2063 | } |
| 2064 | |
Namhyung Kim | 61fa0e9 | 2015-12-10 16:53:20 +0900 | [diff] [blame] | 2065 | static void hists__delete_remaining_entries(struct rb_root *root) |
| 2066 | { |
| 2067 | struct rb_node *node; |
| 2068 | struct hist_entry *he; |
| 2069 | |
| 2070 | while (!RB_EMPTY_ROOT(root)) { |
| 2071 | node = rb_first(root); |
| 2072 | rb_erase(node, root); |
| 2073 | |
| 2074 | he = rb_entry(node, struct hist_entry, rb_node_in); |
| 2075 | hist_entry__delete(he); |
| 2076 | } |
| 2077 | } |
| 2078 | |
| 2079 | static void hists__delete_all_entries(struct hists *hists) |
| 2080 | { |
| 2081 | hists__delete_entries(hists); |
| 2082 | hists__delete_remaining_entries(&hists->entries_in_array[0]); |
| 2083 | hists__delete_remaining_entries(&hists->entries_in_array[1]); |
| 2084 | hists__delete_remaining_entries(&hists->entries_collapsed); |
| 2085 | } |
| 2086 | |
Masami Hiramatsu | 17577de | 2015-12-09 11:11:29 +0900 | [diff] [blame] | 2087 | static void hists_evsel__exit(struct perf_evsel *evsel) |
| 2088 | { |
| 2089 | struct hists *hists = evsel__hists(evsel); |
| 2090 | |
Namhyung Kim | 61fa0e9 | 2015-12-10 16:53:20 +0900 | [diff] [blame] | 2091 | hists__delete_all_entries(hists); |
Masami Hiramatsu | 17577de | 2015-12-09 11:11:29 +0900 | [diff] [blame] | 2092 | } |
| 2093 | |
Namhyung Kim | fc284be | 2016-01-07 10:14:10 +0100 | [diff] [blame] | 2094 | static int hists_evsel__init(struct perf_evsel *evsel) |
| 2095 | { |
| 2096 | struct hists *hists = evsel__hists(evsel); |
| 2097 | |
Jiri Olsa | 5b65855 | 2016-01-18 10:24:22 +0100 | [diff] [blame] | 2098 | __hists__init(hists, &perf_hpp_list); |
Namhyung Kim | fc284be | 2016-01-07 10:14:10 +0100 | [diff] [blame] | 2099 | return 0; |
| 2100 | } |
| 2101 | |
Arnaldo Carvalho de Melo | a635fc5 | 2014-10-09 16:16:00 -0300 | [diff] [blame] | 2102 | /* |
| 2103 | * XXX We probably need a hists_evsel__exit() to free the hist_entries |
| 2104 | * stored in the rbtree... |
| 2105 | */ |
| 2106 | |
| 2107 | int hists__init(void) |
| 2108 | { |
| 2109 | int err = perf_evsel__object_config(sizeof(struct hists_evsel), |
Masami Hiramatsu | 17577de | 2015-12-09 11:11:29 +0900 | [diff] [blame] | 2110 | hists_evsel__init, |
| 2111 | hists_evsel__exit); |
Arnaldo Carvalho de Melo | a635fc5 | 2014-10-09 16:16:00 -0300 | [diff] [blame] | 2112 | if (err) |
| 2113 | fputs("FATAL ERROR: Couldn't setup hists class\n", stderr); |
| 2114 | |
| 2115 | return err; |
| 2116 | } |
Jiri Olsa | 94b3dc3 | 2016-01-18 10:24:13 +0100 | [diff] [blame] | 2117 | |
| 2118 | void perf_hpp_list__init(struct perf_hpp_list *list) |
| 2119 | { |
| 2120 | INIT_LIST_HEAD(&list->fields); |
| 2121 | INIT_LIST_HEAD(&list->sorts); |
| 2122 | } |