Frederic Weisbecker | 8a0ecfb | 2010-05-13 19:47:16 +0200 | [diff] [blame] | 1 | #include "util.h" |
Frederic Weisbecker | 598357e | 2010-05-21 12:48:39 +0200 | [diff] [blame] | 2 | #include "build-id.h" |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 3 | #include "hist.h" |
Arnaldo Carvalho de Melo | 4e4f06e | 2009-12-14 13:10:39 -0200 | [diff] [blame] | 4 | #include "session.h" |
| 5 | #include "sort.h" |
Arnaldo Carvalho de Melo | 2a1731f | 2014-10-10 15:49:21 -0300 | [diff] [blame] | 6 | #include "evlist.h" |
Namhyung Kim | 29d720e | 2013-01-22 18:09:33 +0900 | [diff] [blame] | 7 | #include "evsel.h" |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 8 | #include "annotate.h" |
Namhyung Kim | 740b97f | 2014-12-22 13:44:10 +0900 | [diff] [blame] | 9 | #include "ui/progress.h" |
Arnaldo Carvalho de Melo | 9b33827 | 2009-12-16 14:31:49 -0200 | [diff] [blame] | 10 | #include <math.h> |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 11 | |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 12 | static bool hists__filter_entry_by_dso(struct hists *hists, |
| 13 | struct hist_entry *he); |
| 14 | static bool hists__filter_entry_by_thread(struct hists *hists, |
| 15 | struct hist_entry *he); |
Namhyung Kim | e94d53e | 2012-03-16 17:50:51 +0900 | [diff] [blame] | 16 | static bool hists__filter_entry_by_symbol(struct hists *hists, |
| 17 | struct hist_entry *he); |
Kan Liang | 21394d9 | 2015-09-04 10:45:44 -0400 | [diff] [blame] | 18 | static bool hists__filter_entry_by_socket(struct hists *hists, |
| 19 | struct hist_entry *he); |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 20 | |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 21 | u16 hists__col_len(struct hists *hists, enum hist_column col) |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 22 | { |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 23 | return hists->col_len[col]; |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 24 | } |
| 25 | |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 26 | void hists__set_col_len(struct hists *hists, enum hist_column col, u16 len) |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 27 | { |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 28 | hists->col_len[col] = len; |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 29 | } |
| 30 | |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 31 | bool hists__new_col_len(struct hists *hists, enum hist_column col, u16 len) |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 32 | { |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 33 | if (len > hists__col_len(hists, col)) { |
| 34 | hists__set_col_len(hists, col, len); |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 35 | return true; |
| 36 | } |
| 37 | return false; |
| 38 | } |
| 39 | |
Namhyung Kim | 7ccf4f9 | 2012-08-20 13:52:05 +0900 | [diff] [blame] | 40 | void hists__reset_col_len(struct hists *hists) |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 41 | { |
| 42 | enum hist_column col; |
| 43 | |
| 44 | for (col = 0; col < HISTC_NR_COLS; ++col) |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 45 | hists__set_col_len(hists, col, 0); |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 46 | } |
| 47 | |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 48 | static void hists__set_unres_dso_col_len(struct hists *hists, int dso) |
| 49 | { |
| 50 | const unsigned int unresolved_col_width = BITS_PER_LONG / 4; |
| 51 | |
| 52 | if (hists__col_len(hists, dso) < unresolved_col_width && |
| 53 | !symbol_conf.col_width_list_str && !symbol_conf.field_sep && |
| 54 | !symbol_conf.dso_list) |
| 55 | hists__set_col_len(hists, dso, unresolved_col_width); |
| 56 | } |
| 57 | |
Namhyung Kim | 7ccf4f9 | 2012-08-20 13:52:05 +0900 | [diff] [blame] | 58 | void hists__calc_col_len(struct hists *hists, struct hist_entry *h) |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 59 | { |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 60 | const unsigned int unresolved_col_width = BITS_PER_LONG / 4; |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 61 | int symlen; |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 62 | u16 len; |
| 63 | |
Namhyung Kim | ded19d5 | 2013-04-01 20:35:19 +0900 | [diff] [blame] | 64 | /* |
| 65 | * +4 accounts for '[x] ' priv level info |
| 66 | * +2 accounts for 0x prefix on raw addresses |
| 67 | * +3 accounts for ' y ' symtab origin info |
| 68 | */ |
| 69 | if (h->ms.sym) { |
| 70 | symlen = h->ms.sym->namelen + 4; |
| 71 | if (verbose) |
| 72 | symlen += BITS_PER_LONG / 4 + 2 + 3; |
| 73 | hists__new_col_len(hists, HISTC_SYMBOL, symlen); |
| 74 | } else { |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 75 | symlen = unresolved_col_width + 4 + 2; |
| 76 | hists__new_col_len(hists, HISTC_SYMBOL, symlen); |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 77 | hists__set_unres_dso_col_len(hists, HISTC_DSO); |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 78 | } |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 79 | |
| 80 | len = thread__comm_len(h->thread); |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 81 | if (hists__new_col_len(hists, HISTC_COMM, len)) |
Jiri Olsa | 89c7cb2 | 2016-06-20 23:58:19 +0200 | [diff] [blame] | 82 | hists__set_col_len(hists, HISTC_THREAD, len + 8); |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 83 | |
| 84 | if (h->ms.map) { |
| 85 | len = dso__name_len(h->ms.map->dso); |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 86 | hists__new_col_len(hists, HISTC_DSO, len); |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 87 | } |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 88 | |
Namhyung Kim | cb99374 | 2012-12-27 18:11:42 +0900 | [diff] [blame] | 89 | if (h->parent) |
| 90 | hists__new_col_len(hists, HISTC_PARENT, h->parent->namelen); |
| 91 | |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 92 | if (h->branch_info) { |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 93 | if (h->branch_info->from.sym) { |
| 94 | symlen = (int)h->branch_info->from.sym->namelen + 4; |
Namhyung Kim | ded19d5 | 2013-04-01 20:35:19 +0900 | [diff] [blame] | 95 | if (verbose) |
| 96 | symlen += BITS_PER_LONG / 4 + 2 + 3; |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 97 | hists__new_col_len(hists, HISTC_SYMBOL_FROM, symlen); |
| 98 | |
| 99 | symlen = dso__name_len(h->branch_info->from.map->dso); |
| 100 | hists__new_col_len(hists, HISTC_DSO_FROM, symlen); |
| 101 | } else { |
| 102 | symlen = unresolved_col_width + 4 + 2; |
| 103 | hists__new_col_len(hists, HISTC_SYMBOL_FROM, symlen); |
| 104 | hists__set_unres_dso_col_len(hists, HISTC_DSO_FROM); |
| 105 | } |
| 106 | |
| 107 | if (h->branch_info->to.sym) { |
| 108 | symlen = (int)h->branch_info->to.sym->namelen + 4; |
Namhyung Kim | ded19d5 | 2013-04-01 20:35:19 +0900 | [diff] [blame] | 109 | if (verbose) |
| 110 | symlen += BITS_PER_LONG / 4 + 2 + 3; |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 111 | hists__new_col_len(hists, HISTC_SYMBOL_TO, symlen); |
| 112 | |
| 113 | symlen = dso__name_len(h->branch_info->to.map->dso); |
| 114 | hists__new_col_len(hists, HISTC_DSO_TO, symlen); |
| 115 | } else { |
| 116 | symlen = unresolved_col_width + 4 + 2; |
| 117 | hists__new_col_len(hists, HISTC_SYMBOL_TO, symlen); |
| 118 | hists__set_unres_dso_col_len(hists, HISTC_DSO_TO); |
| 119 | } |
Andi Kleen | 508be0d | 2016-05-20 13:15:08 -0700 | [diff] [blame] | 120 | |
| 121 | if (h->branch_info->srcline_from) |
| 122 | hists__new_col_len(hists, HISTC_SRCLINE_FROM, |
| 123 | strlen(h->branch_info->srcline_from)); |
| 124 | if (h->branch_info->srcline_to) |
| 125 | hists__new_col_len(hists, HISTC_SRCLINE_TO, |
| 126 | strlen(h->branch_info->srcline_to)); |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 127 | } |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 128 | |
| 129 | if (h->mem_info) { |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 130 | if (h->mem_info->daddr.sym) { |
| 131 | symlen = (int)h->mem_info->daddr.sym->namelen + 4 |
| 132 | + unresolved_col_width + 2; |
| 133 | hists__new_col_len(hists, HISTC_MEM_DADDR_SYMBOL, |
| 134 | symlen); |
Don Zickus | 9b32ba7 | 2014-06-01 15:38:29 +0200 | [diff] [blame] | 135 | hists__new_col_len(hists, HISTC_MEM_DCACHELINE, |
| 136 | symlen + 1); |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 137 | } else { |
| 138 | symlen = unresolved_col_width + 4 + 2; |
| 139 | hists__new_col_len(hists, HISTC_MEM_DADDR_SYMBOL, |
| 140 | symlen); |
Jiri Olsa | 0805909 | 2016-01-20 12:56:33 +0100 | [diff] [blame] | 141 | hists__new_col_len(hists, HISTC_MEM_DCACHELINE, |
| 142 | symlen); |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 143 | } |
Jiri Olsa | b34b3bf | 2015-10-05 20:06:08 +0200 | [diff] [blame] | 144 | |
| 145 | if (h->mem_info->iaddr.sym) { |
| 146 | symlen = (int)h->mem_info->iaddr.sym->namelen + 4 |
| 147 | + unresolved_col_width + 2; |
| 148 | hists__new_col_len(hists, HISTC_MEM_IADDR_SYMBOL, |
| 149 | symlen); |
| 150 | } else { |
| 151 | symlen = unresolved_col_width + 4 + 2; |
| 152 | hists__new_col_len(hists, HISTC_MEM_IADDR_SYMBOL, |
| 153 | symlen); |
| 154 | } |
| 155 | |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 156 | if (h->mem_info->daddr.map) { |
| 157 | symlen = dso__name_len(h->mem_info->daddr.map->dso); |
| 158 | hists__new_col_len(hists, HISTC_MEM_DADDR_DSO, |
| 159 | symlen); |
| 160 | } else { |
| 161 | symlen = unresolved_col_width + 4 + 2; |
| 162 | hists__set_unres_dso_col_len(hists, HISTC_MEM_DADDR_DSO); |
| 163 | } |
| 164 | } else { |
| 165 | symlen = unresolved_col_width + 4 + 2; |
| 166 | hists__new_col_len(hists, HISTC_MEM_DADDR_SYMBOL, symlen); |
Jiri Olsa | b34b3bf | 2015-10-05 20:06:08 +0200 | [diff] [blame] | 167 | hists__new_col_len(hists, HISTC_MEM_IADDR_SYMBOL, symlen); |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 168 | hists__set_unres_dso_col_len(hists, HISTC_MEM_DADDR_DSO); |
| 169 | } |
| 170 | |
Arnaldo Carvalho de Melo | a4978ec | 2015-09-09 12:14:00 -0300 | [diff] [blame] | 171 | hists__new_col_len(hists, HISTC_CPU, 3); |
Kan Liang | 2e7ea3a | 2015-09-04 10:45:43 -0400 | [diff] [blame] | 172 | hists__new_col_len(hists, HISTC_SOCKET, 6); |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 173 | hists__new_col_len(hists, HISTC_MEM_LOCKED, 6); |
| 174 | hists__new_col_len(hists, HISTC_MEM_TLB, 22); |
| 175 | hists__new_col_len(hists, HISTC_MEM_SNOOP, 12); |
| 176 | hists__new_col_len(hists, HISTC_MEM_LVL, 21 + 3); |
| 177 | hists__new_col_len(hists, HISTC_LOCAL_WEIGHT, 12); |
| 178 | hists__new_col_len(hists, HISTC_GLOBAL_WEIGHT, 12); |
Andi Kleen | 475eeab | 2013-09-20 07:40:43 -0700 | [diff] [blame] | 179 | |
Arnaldo Carvalho de Melo | e8e6d37 | 2015-08-10 16:53:54 -0300 | [diff] [blame] | 180 | if (h->srcline) |
| 181 | hists__new_col_len(hists, HISTC_SRCLINE, strlen(h->srcline)); |
| 182 | |
Andi Kleen | 31191a8 | 2015-08-07 15:54:24 -0700 | [diff] [blame] | 183 | if (h->srcfile) |
| 184 | hists__new_col_len(hists, HISTC_SRCFILE, strlen(h->srcfile)); |
| 185 | |
Andi Kleen | 475eeab | 2013-09-20 07:40:43 -0700 | [diff] [blame] | 186 | if (h->transaction) |
| 187 | hists__new_col_len(hists, HISTC_TRANSACTION, |
| 188 | hist_entry__transaction_len()); |
Namhyung Kim | 0c0af78 | 2016-02-21 23:22:38 +0900 | [diff] [blame] | 189 | |
| 190 | if (h->trace_output) |
| 191 | hists__new_col_len(hists, HISTC_TRACE, strlen(h->trace_output)); |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 192 | } |
| 193 | |
Namhyung Kim | 7ccf4f9 | 2012-08-20 13:52:05 +0900 | [diff] [blame] | 194 | void hists__output_recalc_col_len(struct hists *hists, int max_rows) |
| 195 | { |
| 196 | struct rb_node *next = rb_first(&hists->entries); |
| 197 | struct hist_entry *n; |
| 198 | int row = 0; |
| 199 | |
| 200 | hists__reset_col_len(hists); |
| 201 | |
| 202 | while (next && row++ < max_rows) { |
| 203 | n = rb_entry(next, struct hist_entry, rb_node); |
| 204 | if (!n->filtered) |
| 205 | hists__calc_col_len(hists, n); |
| 206 | next = rb_next(&n->rb_node); |
| 207 | } |
| 208 | } |
| 209 | |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 210 | static void he_stat__add_cpumode_period(struct he_stat *he_stat, |
| 211 | unsigned int cpumode, u64 period) |
Zhang, Yanmin | a1645ce | 2010-04-19 13:32:50 +0800 | [diff] [blame] | 212 | { |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 213 | switch (cpumode) { |
Zhang, Yanmin | a1645ce | 2010-04-19 13:32:50 +0800 | [diff] [blame] | 214 | case PERF_RECORD_MISC_KERNEL: |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 215 | he_stat->period_sys += period; |
Zhang, Yanmin | a1645ce | 2010-04-19 13:32:50 +0800 | [diff] [blame] | 216 | break; |
| 217 | case PERF_RECORD_MISC_USER: |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 218 | he_stat->period_us += period; |
Zhang, Yanmin | a1645ce | 2010-04-19 13:32:50 +0800 | [diff] [blame] | 219 | break; |
| 220 | case PERF_RECORD_MISC_GUEST_KERNEL: |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 221 | he_stat->period_guest_sys += period; |
Zhang, Yanmin | a1645ce | 2010-04-19 13:32:50 +0800 | [diff] [blame] | 222 | break; |
| 223 | case PERF_RECORD_MISC_GUEST_USER: |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 224 | he_stat->period_guest_us += period; |
Zhang, Yanmin | a1645ce | 2010-04-19 13:32:50 +0800 | [diff] [blame] | 225 | break; |
| 226 | default: |
| 227 | break; |
| 228 | } |
| 229 | } |
| 230 | |
Andi Kleen | 0548429 | 2013-01-24 16:10:29 +0100 | [diff] [blame] | 231 | static void he_stat__add_period(struct he_stat *he_stat, u64 period, |
| 232 | u64 weight) |
Namhyung Kim | 139c081 | 2012-10-04 21:49:43 +0900 | [diff] [blame] | 233 | { |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 234 | |
Namhyung Kim | 139c081 | 2012-10-04 21:49:43 +0900 | [diff] [blame] | 235 | he_stat->period += period; |
Andi Kleen | 0548429 | 2013-01-24 16:10:29 +0100 | [diff] [blame] | 236 | he_stat->weight += weight; |
Namhyung Kim | 139c081 | 2012-10-04 21:49:43 +0900 | [diff] [blame] | 237 | he_stat->nr_events += 1; |
| 238 | } |
| 239 | |
| 240 | static void he_stat__add_stat(struct he_stat *dest, struct he_stat *src) |
| 241 | { |
| 242 | dest->period += src->period; |
| 243 | dest->period_sys += src->period_sys; |
| 244 | dest->period_us += src->period_us; |
| 245 | dest->period_guest_sys += src->period_guest_sys; |
| 246 | dest->period_guest_us += src->period_guest_us; |
| 247 | dest->nr_events += src->nr_events; |
Andi Kleen | 0548429 | 2013-01-24 16:10:29 +0100 | [diff] [blame] | 248 | dest->weight += src->weight; |
Namhyung Kim | 139c081 | 2012-10-04 21:49:43 +0900 | [diff] [blame] | 249 | } |
| 250 | |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 251 | static void he_stat__decay(struct he_stat *he_stat) |
Arnaldo Carvalho de Melo | ab81f3f | 2011-10-05 19:16:15 -0300 | [diff] [blame] | 252 | { |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 253 | he_stat->period = (he_stat->period * 7) / 8; |
| 254 | he_stat->nr_events = (he_stat->nr_events * 7) / 8; |
Andi Kleen | 0548429 | 2013-01-24 16:10:29 +0100 | [diff] [blame] | 255 | /* XXX need decay for weight too? */ |
Arnaldo Carvalho de Melo | ab81f3f | 2011-10-05 19:16:15 -0300 | [diff] [blame] | 256 | } |
| 257 | |
Namhyung Kim | 5d8200a | 2016-02-25 00:13:49 +0900 | [diff] [blame] | 258 | static void hists__delete_entry(struct hists *hists, struct hist_entry *he); |
| 259 | |
Arnaldo Carvalho de Melo | ab81f3f | 2011-10-05 19:16:15 -0300 | [diff] [blame] | 260 | static bool hists__decay_entry(struct hists *hists, struct hist_entry *he) |
| 261 | { |
Namhyung Kim | b24c28f | 2012-10-04 21:49:41 +0900 | [diff] [blame] | 262 | u64 prev_period = he->stat.period; |
Namhyung Kim | 3186b68 | 2014-04-22 13:44:23 +0900 | [diff] [blame] | 263 | u64 diff; |
Arnaldo Carvalho de Melo | c64550c | 2011-10-20 06:45:44 -0200 | [diff] [blame] | 264 | |
| 265 | if (prev_period == 0) |
Arnaldo Carvalho de Melo | df71d95 | 2011-10-13 08:01:33 -0300 | [diff] [blame] | 266 | return true; |
Arnaldo Carvalho de Melo | c64550c | 2011-10-20 06:45:44 -0200 | [diff] [blame] | 267 | |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 268 | he_stat__decay(&he->stat); |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 269 | if (symbol_conf.cumulate_callchain) |
| 270 | he_stat__decay(he->stat_acc); |
Namhyung Kim | 42b276a | 2016-01-05 12:06:00 +0900 | [diff] [blame] | 271 | decay_callchain(he->callchain); |
Arnaldo Carvalho de Melo | c64550c | 2011-10-20 06:45:44 -0200 | [diff] [blame] | 272 | |
Namhyung Kim | 3186b68 | 2014-04-22 13:44:23 +0900 | [diff] [blame] | 273 | diff = prev_period - he->stat.period; |
| 274 | |
Namhyung Kim | 5d8200a | 2016-02-25 00:13:49 +0900 | [diff] [blame] | 275 | if (!he->depth) { |
| 276 | hists->stats.total_period -= diff; |
| 277 | if (!he->filtered) |
| 278 | hists->stats.total_non_filtered_period -= diff; |
| 279 | } |
| 280 | |
| 281 | if (!he->leaf) { |
| 282 | struct hist_entry *child; |
| 283 | struct rb_node *node = rb_first(&he->hroot_out); |
| 284 | while (node) { |
| 285 | child = rb_entry(node, struct hist_entry, rb_node); |
| 286 | node = rb_next(node); |
| 287 | |
| 288 | if (hists__decay_entry(hists, child)) |
| 289 | hists__delete_entry(hists, child); |
| 290 | } |
| 291 | } |
Arnaldo Carvalho de Melo | c64550c | 2011-10-20 06:45:44 -0200 | [diff] [blame] | 292 | |
Namhyung Kim | b24c28f | 2012-10-04 21:49:41 +0900 | [diff] [blame] | 293 | return he->stat.period == 0; |
Arnaldo Carvalho de Melo | ab81f3f | 2011-10-05 19:16:15 -0300 | [diff] [blame] | 294 | } |
| 295 | |
Arnaldo Carvalho de Melo | 956b65e | 2014-12-19 12:41:28 -0300 | [diff] [blame] | 296 | static void hists__delete_entry(struct hists *hists, struct hist_entry *he) |
| 297 | { |
Namhyung Kim | 5d8200a | 2016-02-25 00:13:49 +0900 | [diff] [blame] | 298 | struct rb_root *root_in; |
| 299 | struct rb_root *root_out; |
Arnaldo Carvalho de Melo | 956b65e | 2014-12-19 12:41:28 -0300 | [diff] [blame] | 300 | |
Namhyung Kim | 5d8200a | 2016-02-25 00:13:49 +0900 | [diff] [blame] | 301 | if (he->parent_he) { |
| 302 | root_in = &he->parent_he->hroot_in; |
| 303 | root_out = &he->parent_he->hroot_out; |
| 304 | } else { |
Jiri Olsa | 5222503 | 2016-05-03 13:54:42 +0200 | [diff] [blame] | 305 | if (hists__has(hists, need_collapse)) |
Namhyung Kim | 5d8200a | 2016-02-25 00:13:49 +0900 | [diff] [blame] | 306 | root_in = &hists->entries_collapsed; |
| 307 | else |
| 308 | root_in = hists->entries_in; |
| 309 | root_out = &hists->entries; |
| 310 | } |
| 311 | |
| 312 | rb_erase(&he->rb_node_in, root_in); |
| 313 | rb_erase(&he->rb_node, root_out); |
Arnaldo Carvalho de Melo | 956b65e | 2014-12-19 12:41:28 -0300 | [diff] [blame] | 314 | |
| 315 | --hists->nr_entries; |
| 316 | if (!he->filtered) |
| 317 | --hists->nr_non_filtered_entries; |
| 318 | |
| 319 | hist_entry__delete(he); |
| 320 | } |
| 321 | |
Namhyung Kim | 3a5714f | 2013-05-14 11:09:01 +0900 | [diff] [blame] | 322 | void hists__decay_entries(struct hists *hists, bool zap_user, bool zap_kernel) |
Arnaldo Carvalho de Melo | ab81f3f | 2011-10-05 19:16:15 -0300 | [diff] [blame] | 323 | { |
| 324 | struct rb_node *next = rb_first(&hists->entries); |
| 325 | struct hist_entry *n; |
| 326 | |
| 327 | while (next) { |
| 328 | n = rb_entry(next, struct hist_entry, rb_node); |
| 329 | next = rb_next(&n->rb_node); |
Arnaldo Carvalho de Melo | b079d4e | 2011-10-17 09:05:04 -0200 | [diff] [blame] | 330 | if (((zap_user && n->level == '.') || |
| 331 | (zap_kernel && n->level != '.') || |
Arnaldo Carvalho de Melo | 4c47f4f | 2015-03-17 17:18:58 -0300 | [diff] [blame] | 332 | hists__decay_entry(hists, n))) { |
Arnaldo Carvalho de Melo | 956b65e | 2014-12-19 12:41:28 -0300 | [diff] [blame] | 333 | hists__delete_entry(hists, n); |
Arnaldo Carvalho de Melo | ab81f3f | 2011-10-05 19:16:15 -0300 | [diff] [blame] | 334 | } |
| 335 | } |
| 336 | } |
| 337 | |
Namhyung Kim | 701937b | 2014-08-12 17:16:05 +0900 | [diff] [blame] | 338 | void hists__delete_entries(struct hists *hists) |
| 339 | { |
| 340 | struct rb_node *next = rb_first(&hists->entries); |
| 341 | struct hist_entry *n; |
| 342 | |
| 343 | while (next) { |
| 344 | n = rb_entry(next, struct hist_entry, rb_node); |
| 345 | next = rb_next(&n->rb_node); |
| 346 | |
Arnaldo Carvalho de Melo | 956b65e | 2014-12-19 12:41:28 -0300 | [diff] [blame] | 347 | hists__delete_entry(hists, n); |
Namhyung Kim | 701937b | 2014-08-12 17:16:05 +0900 | [diff] [blame] | 348 | } |
| 349 | } |
| 350 | |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 351 | /* |
Arnaldo Carvalho de Melo | c82ee82 | 2010-05-14 14:19:35 -0300 | [diff] [blame] | 352 | * histogram, sorted on item, collects periods |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 353 | */ |
| 354 | |
Namhyung Kim | a0b51af | 2012-09-11 13:34:27 +0900 | [diff] [blame] | 355 | static struct hist_entry *hist_entry__new(struct hist_entry *template, |
| 356 | bool sample_self) |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 357 | { |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 358 | size_t callchain_size = 0; |
| 359 | struct hist_entry *he; |
| 360 | |
Namhyung Kim | 82aa019 | 2014-12-22 13:44:14 +0900 | [diff] [blame] | 361 | if (symbol_conf.use_callchain) |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 362 | callchain_size = sizeof(struct callchain_root); |
| 363 | |
| 364 | he = zalloc(sizeof(*he) + callchain_size); |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 365 | |
Arnaldo Carvalho de Melo | 12c1427 | 2012-01-04 12:27:03 -0200 | [diff] [blame] | 366 | if (he != NULL) { |
| 367 | *he = *template; |
Namhyung Kim | c4b3535 | 2012-10-04 21:49:42 +0900 | [diff] [blame] | 368 | |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 369 | if (symbol_conf.cumulate_callchain) { |
| 370 | he->stat_acc = malloc(sizeof(he->stat)); |
| 371 | if (he->stat_acc == NULL) { |
| 372 | free(he); |
| 373 | return NULL; |
| 374 | } |
| 375 | memcpy(he->stat_acc, &he->stat, sizeof(he->stat)); |
Namhyung Kim | a0b51af | 2012-09-11 13:34:27 +0900 | [diff] [blame] | 376 | if (!sample_self) |
| 377 | memset(&he->stat, 0, sizeof(he->stat)); |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 378 | } |
| 379 | |
Arnaldo Carvalho de Melo | 5c24b67 | 2015-06-15 23:29:51 -0300 | [diff] [blame] | 380 | map__get(he->ms.map); |
Stephane Eranian | 3cf0cb1 | 2013-01-14 15:02:45 +0100 | [diff] [blame] | 381 | |
| 382 | if (he->branch_info) { |
Namhyung Kim | 26353a6 | 2013-04-01 20:35:17 +0900 | [diff] [blame] | 383 | /* |
| 384 | * This branch info is (a part of) allocated from |
Arnaldo Carvalho de Melo | 644f2df | 2014-01-22 13:15:36 -0300 | [diff] [blame] | 385 | * sample__resolve_bstack() and will be freed after |
Namhyung Kim | 26353a6 | 2013-04-01 20:35:17 +0900 | [diff] [blame] | 386 | * adding new entries. So we need to save a copy. |
| 387 | */ |
| 388 | he->branch_info = malloc(sizeof(*he->branch_info)); |
| 389 | if (he->branch_info == NULL) { |
Arnaldo Carvalho de Melo | 5c24b67 | 2015-06-15 23:29:51 -0300 | [diff] [blame] | 390 | map__zput(he->ms.map); |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 391 | free(he->stat_acc); |
Namhyung Kim | 26353a6 | 2013-04-01 20:35:17 +0900 | [diff] [blame] | 392 | free(he); |
| 393 | return NULL; |
| 394 | } |
| 395 | |
| 396 | memcpy(he->branch_info, template->branch_info, |
| 397 | sizeof(*he->branch_info)); |
| 398 | |
Arnaldo Carvalho de Melo | 5c24b67 | 2015-06-15 23:29:51 -0300 | [diff] [blame] | 399 | map__get(he->branch_info->from.map); |
| 400 | map__get(he->branch_info->to.map); |
Stephane Eranian | 3cf0cb1 | 2013-01-14 15:02:45 +0100 | [diff] [blame] | 401 | } |
| 402 | |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 403 | if (he->mem_info) { |
Arnaldo Carvalho de Melo | 5c24b67 | 2015-06-15 23:29:51 -0300 | [diff] [blame] | 404 | map__get(he->mem_info->iaddr.map); |
| 405 | map__get(he->mem_info->daddr.map); |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 406 | } |
| 407 | |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 408 | if (symbol_conf.use_callchain) |
Arnaldo Carvalho de Melo | 12c1427 | 2012-01-04 12:27:03 -0200 | [diff] [blame] | 409 | callchain_init(he->callchain); |
Arnaldo Carvalho de Melo | b821c73 | 2012-10-25 14:42:45 -0200 | [diff] [blame] | 410 | |
Namhyung Kim | 7239283 | 2015-12-24 11:16:17 +0900 | [diff] [blame] | 411 | if (he->raw_data) { |
| 412 | he->raw_data = memdup(he->raw_data, he->raw_size); |
| 413 | |
| 414 | if (he->raw_data == NULL) { |
| 415 | map__put(he->ms.map); |
| 416 | if (he->branch_info) { |
| 417 | map__put(he->branch_info->from.map); |
| 418 | map__put(he->branch_info->to.map); |
| 419 | free(he->branch_info); |
| 420 | } |
| 421 | if (he->mem_info) { |
| 422 | map__put(he->mem_info->iaddr.map); |
| 423 | map__put(he->mem_info->daddr.map); |
| 424 | } |
| 425 | free(he->stat_acc); |
| 426 | free(he); |
| 427 | return NULL; |
| 428 | } |
| 429 | } |
Arnaldo Carvalho de Melo | b821c73 | 2012-10-25 14:42:45 -0200 | [diff] [blame] | 430 | INIT_LIST_HEAD(&he->pairs.node); |
Arnaldo Carvalho de Melo | f3b623b | 2015-03-02 22:21:35 -0300 | [diff] [blame] | 431 | thread__get(he->thread); |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 432 | |
| 433 | if (!symbol_conf.report_hierarchy) |
| 434 | he->leaf = true; |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 435 | } |
| 436 | |
Arnaldo Carvalho de Melo | 12c1427 | 2012-01-04 12:27:03 -0200 | [diff] [blame] | 437 | return he; |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 438 | } |
| 439 | |
Arnaldo Carvalho de Melo | 7a007ca | 2010-07-21 09:19:41 -0300 | [diff] [blame] | 440 | static u8 symbol__parent_filter(const struct symbol *parent) |
| 441 | { |
| 442 | if (symbol_conf.exclude_other && parent == NULL) |
| 443 | return 1 << HIST_FILTER__PARENT; |
| 444 | return 0; |
| 445 | } |
| 446 | |
Namhyung Kim | 467ef10 | 2016-02-16 23:08:19 +0900 | [diff] [blame] | 447 | static void hist_entry__add_callchain_period(struct hist_entry *he, u64 period) |
| 448 | { |
| 449 | if (!symbol_conf.use_callchain) |
| 450 | return; |
| 451 | |
| 452 | he->hists->callchain_period += period; |
| 453 | if (!he->filtered) |
| 454 | he->hists->callchain_non_filtered_period += period; |
| 455 | } |
| 456 | |
Arnaldo Carvalho de Melo | e7e0efc | 2015-05-19 11:31:22 -0300 | [diff] [blame] | 457 | static struct hist_entry *hists__findnew_entry(struct hists *hists, |
| 458 | struct hist_entry *entry, |
| 459 | struct addr_location *al, |
| 460 | bool sample_self) |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 461 | { |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 462 | struct rb_node **p; |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 463 | struct rb_node *parent = NULL; |
| 464 | struct hist_entry *he; |
Andi Kleen | 354cc40 | 2013-10-01 07:22:15 -0700 | [diff] [blame] | 465 | int64_t cmp; |
Namhyung Kim | f1cbf78 | 2013-12-18 14:21:11 +0900 | [diff] [blame] | 466 | u64 period = entry->stat.period; |
| 467 | u64 weight = entry->stat.weight; |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 468 | |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 469 | p = &hists->entries_in->rb_node; |
| 470 | |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 471 | while (*p != NULL) { |
| 472 | parent = *p; |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 473 | he = rb_entry(parent, struct hist_entry, rb_node_in); |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 474 | |
Namhyung Kim | 9afcf93 | 2012-12-10 17:29:54 +0900 | [diff] [blame] | 475 | /* |
| 476 | * Make sure that it receives arguments in a same order as |
| 477 | * hist_entry__collapse() so that we can use an appropriate |
| 478 | * function when searching an entry regardless which sort |
| 479 | * keys were used. |
| 480 | */ |
| 481 | cmp = hist_entry__cmp(he, entry); |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 482 | |
| 483 | if (!cmp) { |
Namhyung Kim | 0f58474 | 2016-01-28 00:40:49 +0900 | [diff] [blame] | 484 | if (sample_self) { |
Namhyung Kim | a0b51af | 2012-09-11 13:34:27 +0900 | [diff] [blame] | 485 | he_stat__add_period(&he->stat, period, weight); |
Namhyung Kim | 467ef10 | 2016-02-16 23:08:19 +0900 | [diff] [blame] | 486 | hist_entry__add_callchain_period(he, period); |
Namhyung Kim | 0f58474 | 2016-01-28 00:40:49 +0900 | [diff] [blame] | 487 | } |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 488 | if (symbol_conf.cumulate_callchain) |
| 489 | he_stat__add_period(he->stat_acc, period, weight); |
David Miller | 63fa471 | 2012-03-27 03:14:18 -0400 | [diff] [blame] | 490 | |
Namhyung Kim | ceb2acb | 2013-04-01 20:35:18 +0900 | [diff] [blame] | 491 | /* |
Arnaldo Carvalho de Melo | e80faac | 2014-01-22 13:05:06 -0300 | [diff] [blame] | 492 | * This mem info was allocated from sample__resolve_mem |
Namhyung Kim | ceb2acb | 2013-04-01 20:35:18 +0900 | [diff] [blame] | 493 | * and will not be used anymore. |
| 494 | */ |
Arnaldo Carvalho de Melo | 74cf249 | 2013-12-27 16:55:14 -0300 | [diff] [blame] | 495 | zfree(&entry->mem_info); |
Namhyung Kim | ceb2acb | 2013-04-01 20:35:18 +0900 | [diff] [blame] | 496 | |
David Miller | 63fa471 | 2012-03-27 03:14:18 -0400 | [diff] [blame] | 497 | /* If the map of an existing hist_entry has |
| 498 | * become out-of-date due to an exec() or |
| 499 | * similar, update it. Otherwise we will |
| 500 | * mis-adjust symbol addresses when computing |
| 501 | * the history counter to increment. |
| 502 | */ |
| 503 | if (he->ms.map != entry->ms.map) { |
Arnaldo Carvalho de Melo | 5c24b67 | 2015-06-15 23:29:51 -0300 | [diff] [blame] | 504 | map__put(he->ms.map); |
| 505 | he->ms.map = map__get(entry->ms.map); |
David Miller | 63fa471 | 2012-03-27 03:14:18 -0400 | [diff] [blame] | 506 | } |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 507 | goto out; |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 508 | } |
| 509 | |
| 510 | if (cmp < 0) |
| 511 | p = &(*p)->rb_left; |
| 512 | else |
| 513 | p = &(*p)->rb_right; |
| 514 | } |
| 515 | |
Namhyung Kim | a0b51af | 2012-09-11 13:34:27 +0900 | [diff] [blame] | 516 | he = hist_entry__new(entry, sample_self); |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 517 | if (!he) |
Namhyung Kim | 27a0dcb | 2013-05-14 11:09:02 +0900 | [diff] [blame] | 518 | return NULL; |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 519 | |
Namhyung Kim | 0f58474 | 2016-01-28 00:40:49 +0900 | [diff] [blame] | 520 | if (sample_self) |
Namhyung Kim | 467ef10 | 2016-02-16 23:08:19 +0900 | [diff] [blame] | 521 | hist_entry__add_callchain_period(he, period); |
| 522 | hists->nr_entries++; |
Namhyung Kim | 590cd34 | 2014-12-22 13:44:09 +0900 | [diff] [blame] | 523 | |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 524 | rb_link_node(&he->rb_node_in, parent, p); |
| 525 | rb_insert_color(&he->rb_node_in, hists->entries_in); |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 526 | out: |
Namhyung Kim | a0b51af | 2012-09-11 13:34:27 +0900 | [diff] [blame] | 527 | if (sample_self) |
| 528 | he_stat__add_cpumode_period(&he->stat, al->cpumode, period); |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 529 | if (symbol_conf.cumulate_callchain) |
| 530 | he_stat__add_cpumode_period(he->stat_acc, al->cpumode, period); |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 531 | return he; |
| 532 | } |
| 533 | |
Jiri Olsa | 0102ef3 | 2016-06-14 20:19:21 +0200 | [diff] [blame] | 534 | struct hist_entry *hists__add_entry(struct hists *hists, |
| 535 | struct addr_location *al, |
| 536 | struct symbol *sym_parent, |
| 537 | struct branch_info *bi, |
| 538 | struct mem_info *mi, |
| 539 | struct perf_sample *sample, |
| 540 | bool sample_self) |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 541 | { |
| 542 | struct hist_entry entry = { |
| 543 | .thread = al->thread, |
Namhyung Kim | 4dfced3 | 2013-09-13 16:28:57 +0900 | [diff] [blame] | 544 | .comm = thread__comm(al->thread), |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 545 | .ms = { |
| 546 | .map = al->map, |
| 547 | .sym = al->sym, |
| 548 | }, |
Kan Liang | 0c4c4de | 2015-09-04 10:45:42 -0400 | [diff] [blame] | 549 | .socket = al->socket, |
Don Zickus | 7365be5 | 2014-05-27 12:28:05 -0400 | [diff] [blame] | 550 | .cpu = al->cpu, |
| 551 | .cpumode = al->cpumode, |
| 552 | .ip = al->addr, |
| 553 | .level = al->level, |
Namhyung Kim | b24c28f | 2012-10-04 21:49:41 +0900 | [diff] [blame] | 554 | .stat = { |
Namhyung Kim | c4b3535 | 2012-10-04 21:49:42 +0900 | [diff] [blame] | 555 | .nr_events = 1, |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 556 | .period = sample->period, |
| 557 | .weight = sample->weight, |
Namhyung Kim | b24c28f | 2012-10-04 21:49:41 +0900 | [diff] [blame] | 558 | }, |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 559 | .parent = sym_parent, |
Namhyung Kim | 2c86c7c | 2014-03-17 18:18:54 -0300 | [diff] [blame] | 560 | .filtered = symbol__parent_filter(sym_parent) | al->filtered, |
Arnaldo Carvalho de Melo | c824c43 | 2013-10-22 19:01:31 -0300 | [diff] [blame] | 561 | .hists = hists, |
Namhyung Kim | 41a4e6e | 2013-10-31 15:56:03 +0900 | [diff] [blame] | 562 | .branch_info = bi, |
| 563 | .mem_info = mi, |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 564 | .transaction = sample->transaction, |
Namhyung Kim | 7239283 | 2015-12-24 11:16:17 +0900 | [diff] [blame] | 565 | .raw_data = sample->raw_data, |
| 566 | .raw_size = sample->raw_size, |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 567 | }; |
| 568 | |
Arnaldo Carvalho de Melo | e7e0efc | 2015-05-19 11:31:22 -0300 | [diff] [blame] | 569 | return hists__findnew_entry(hists, &entry, al, sample_self); |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 570 | } |
| 571 | |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 572 | static int |
| 573 | iter_next_nop_entry(struct hist_entry_iter *iter __maybe_unused, |
| 574 | struct addr_location *al __maybe_unused) |
| 575 | { |
| 576 | return 0; |
| 577 | } |
| 578 | |
| 579 | static int |
| 580 | iter_add_next_nop_entry(struct hist_entry_iter *iter __maybe_unused, |
| 581 | struct addr_location *al __maybe_unused) |
| 582 | { |
| 583 | return 0; |
| 584 | } |
| 585 | |
| 586 | static int |
| 587 | iter_prepare_mem_entry(struct hist_entry_iter *iter, struct addr_location *al) |
| 588 | { |
| 589 | struct perf_sample *sample = iter->sample; |
| 590 | struct mem_info *mi; |
| 591 | |
| 592 | mi = sample__resolve_mem(sample, al); |
| 593 | if (mi == NULL) |
| 594 | return -ENOMEM; |
| 595 | |
| 596 | iter->priv = mi; |
| 597 | return 0; |
| 598 | } |
| 599 | |
| 600 | static int |
| 601 | iter_add_single_mem_entry(struct hist_entry_iter *iter, struct addr_location *al) |
| 602 | { |
| 603 | u64 cost; |
| 604 | struct mem_info *mi = iter->priv; |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 605 | struct hists *hists = evsel__hists(iter->evsel); |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 606 | struct perf_sample *sample = iter->sample; |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 607 | struct hist_entry *he; |
| 608 | |
| 609 | if (mi == NULL) |
| 610 | return -EINVAL; |
| 611 | |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 612 | cost = sample->weight; |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 613 | if (!cost) |
| 614 | cost = 1; |
| 615 | |
| 616 | /* |
| 617 | * must pass period=weight in order to get the correct |
| 618 | * sorting from hists__collapse_resort() which is solely |
| 619 | * based on periods. We want sorting be done on nr_events * weight |
| 620 | * and this is indirectly achieved by passing period=weight here |
| 621 | * and the he_stat__add_period() function. |
| 622 | */ |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 623 | sample->period = cost; |
| 624 | |
Jiri Olsa | 0102ef3 | 2016-06-14 20:19:21 +0200 | [diff] [blame] | 625 | he = hists__add_entry(hists, al, iter->parent, NULL, mi, |
| 626 | sample, true); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 627 | if (!he) |
| 628 | return -ENOMEM; |
| 629 | |
| 630 | iter->he = he; |
| 631 | return 0; |
| 632 | } |
| 633 | |
| 634 | static int |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 635 | iter_finish_mem_entry(struct hist_entry_iter *iter, |
| 636 | struct addr_location *al __maybe_unused) |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 637 | { |
| 638 | struct perf_evsel *evsel = iter->evsel; |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 639 | struct hists *hists = evsel__hists(evsel); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 640 | struct hist_entry *he = iter->he; |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 641 | int err = -EINVAL; |
| 642 | |
| 643 | if (he == NULL) |
| 644 | goto out; |
| 645 | |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 646 | hists__inc_nr_samples(hists, he->filtered); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 647 | |
| 648 | err = hist_entry__append_callchain(he, iter->sample); |
| 649 | |
| 650 | out: |
| 651 | /* |
Arnaldo Carvalho de Melo | e7e0efc | 2015-05-19 11:31:22 -0300 | [diff] [blame] | 652 | * We don't need to free iter->priv (mem_info) here since the mem info |
| 653 | * was either already freed in hists__findnew_entry() or passed to a |
| 654 | * new hist entry by hist_entry__new(). |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 655 | */ |
| 656 | iter->priv = NULL; |
| 657 | |
| 658 | iter->he = NULL; |
| 659 | return err; |
| 660 | } |
| 661 | |
| 662 | static int |
| 663 | iter_prepare_branch_entry(struct hist_entry_iter *iter, struct addr_location *al) |
| 664 | { |
| 665 | struct branch_info *bi; |
| 666 | struct perf_sample *sample = iter->sample; |
| 667 | |
| 668 | bi = sample__resolve_bstack(sample, al); |
| 669 | if (!bi) |
| 670 | return -ENOMEM; |
| 671 | |
| 672 | iter->curr = 0; |
| 673 | iter->total = sample->branch_stack->nr; |
| 674 | |
| 675 | iter->priv = bi; |
| 676 | return 0; |
| 677 | } |
| 678 | |
| 679 | static int |
Arnaldo Carvalho de Melo | b8f8eb8 | 2016-03-22 13:09:37 -0300 | [diff] [blame] | 680 | iter_add_single_branch_entry(struct hist_entry_iter *iter, |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 681 | struct addr_location *al __maybe_unused) |
| 682 | { |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 683 | /* to avoid calling callback function */ |
| 684 | iter->he = NULL; |
| 685 | |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 686 | return 0; |
| 687 | } |
| 688 | |
| 689 | static int |
| 690 | iter_next_branch_entry(struct hist_entry_iter *iter, struct addr_location *al) |
| 691 | { |
| 692 | struct branch_info *bi = iter->priv; |
| 693 | int i = iter->curr; |
| 694 | |
| 695 | if (bi == NULL) |
| 696 | return 0; |
| 697 | |
| 698 | if (iter->curr >= iter->total) |
| 699 | return 0; |
| 700 | |
| 701 | al->map = bi[i].to.map; |
| 702 | al->sym = bi[i].to.sym; |
| 703 | al->addr = bi[i].to.addr; |
| 704 | return 1; |
| 705 | } |
| 706 | |
| 707 | static int |
| 708 | iter_add_next_branch_entry(struct hist_entry_iter *iter, struct addr_location *al) |
| 709 | { |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 710 | struct branch_info *bi; |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 711 | struct perf_evsel *evsel = iter->evsel; |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 712 | struct hists *hists = evsel__hists(evsel); |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 713 | struct perf_sample *sample = iter->sample; |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 714 | struct hist_entry *he = NULL; |
| 715 | int i = iter->curr; |
| 716 | int err = 0; |
| 717 | |
| 718 | bi = iter->priv; |
| 719 | |
| 720 | if (iter->hide_unresolved && !(bi[i].from.sym && bi[i].to.sym)) |
| 721 | goto out; |
| 722 | |
| 723 | /* |
| 724 | * The report shows the percentage of total branches captured |
| 725 | * and not events sampled. Thus we use a pseudo period of 1. |
| 726 | */ |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 727 | sample->period = 1; |
| 728 | sample->weight = bi->flags.cycles ? bi->flags.cycles : 1; |
| 729 | |
Jiri Olsa | 0102ef3 | 2016-06-14 20:19:21 +0200 | [diff] [blame] | 730 | he = hists__add_entry(hists, al, iter->parent, &bi[i], NULL, |
| 731 | sample, true); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 732 | if (he == NULL) |
| 733 | return -ENOMEM; |
| 734 | |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 735 | hists__inc_nr_samples(hists, he->filtered); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 736 | |
| 737 | out: |
| 738 | iter->he = he; |
| 739 | iter->curr++; |
| 740 | return err; |
| 741 | } |
| 742 | |
| 743 | static int |
| 744 | iter_finish_branch_entry(struct hist_entry_iter *iter, |
| 745 | struct addr_location *al __maybe_unused) |
| 746 | { |
| 747 | zfree(&iter->priv); |
| 748 | iter->he = NULL; |
| 749 | |
| 750 | return iter->curr >= iter->total ? 0 : -1; |
| 751 | } |
| 752 | |
| 753 | static int |
| 754 | iter_prepare_normal_entry(struct hist_entry_iter *iter __maybe_unused, |
| 755 | struct addr_location *al __maybe_unused) |
| 756 | { |
| 757 | return 0; |
| 758 | } |
| 759 | |
| 760 | static int |
| 761 | iter_add_single_normal_entry(struct hist_entry_iter *iter, struct addr_location *al) |
| 762 | { |
| 763 | struct perf_evsel *evsel = iter->evsel; |
| 764 | struct perf_sample *sample = iter->sample; |
| 765 | struct hist_entry *he; |
| 766 | |
Jiri Olsa | 0102ef3 | 2016-06-14 20:19:21 +0200 | [diff] [blame] | 767 | he = hists__add_entry(evsel__hists(evsel), al, iter->parent, NULL, NULL, |
| 768 | sample, true); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 769 | if (he == NULL) |
| 770 | return -ENOMEM; |
| 771 | |
| 772 | iter->he = he; |
| 773 | return 0; |
| 774 | } |
| 775 | |
| 776 | static int |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 777 | iter_finish_normal_entry(struct hist_entry_iter *iter, |
| 778 | struct addr_location *al __maybe_unused) |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 779 | { |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 780 | struct hist_entry *he = iter->he; |
| 781 | struct perf_evsel *evsel = iter->evsel; |
| 782 | struct perf_sample *sample = iter->sample; |
| 783 | |
| 784 | if (he == NULL) |
| 785 | return 0; |
| 786 | |
| 787 | iter->he = NULL; |
| 788 | |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 789 | hists__inc_nr_samples(evsel__hists(evsel), he->filtered); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 790 | |
| 791 | return hist_entry__append_callchain(he, sample); |
| 792 | } |
| 793 | |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 794 | static int |
Adrian Hunter | 96b40f3 | 2015-09-25 16:15:47 +0300 | [diff] [blame] | 795 | iter_prepare_cumulative_entry(struct hist_entry_iter *iter, |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 796 | struct addr_location *al __maybe_unused) |
| 797 | { |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 798 | struct hist_entry **he_cache; |
| 799 | |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 800 | callchain_cursor_commit(&callchain_cursor); |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 801 | |
| 802 | /* |
| 803 | * This is for detecting cycles or recursions so that they're |
| 804 | * cumulated only one time to prevent entries more than 100% |
| 805 | * overhead. |
| 806 | */ |
Adrian Hunter | 96b40f3 | 2015-09-25 16:15:47 +0300 | [diff] [blame] | 807 | he_cache = malloc(sizeof(*he_cache) * (iter->max_stack + 1)); |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 808 | if (he_cache == NULL) |
| 809 | return -ENOMEM; |
| 810 | |
| 811 | iter->priv = he_cache; |
| 812 | iter->curr = 0; |
| 813 | |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 814 | return 0; |
| 815 | } |
| 816 | |
| 817 | static int |
| 818 | iter_add_single_cumulative_entry(struct hist_entry_iter *iter, |
| 819 | struct addr_location *al) |
| 820 | { |
| 821 | struct perf_evsel *evsel = iter->evsel; |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 822 | struct hists *hists = evsel__hists(evsel); |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 823 | struct perf_sample *sample = iter->sample; |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 824 | struct hist_entry **he_cache = iter->priv; |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 825 | struct hist_entry *he; |
| 826 | int err = 0; |
| 827 | |
Jiri Olsa | 0102ef3 | 2016-06-14 20:19:21 +0200 | [diff] [blame] | 828 | he = hists__add_entry(hists, al, iter->parent, NULL, NULL, |
| 829 | sample, true); |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 830 | if (he == NULL) |
| 831 | return -ENOMEM; |
| 832 | |
| 833 | iter->he = he; |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 834 | he_cache[iter->curr++] = he; |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 835 | |
Namhyung Kim | 82aa019 | 2014-12-22 13:44:14 +0900 | [diff] [blame] | 836 | hist_entry__append_callchain(he, sample); |
Namhyung Kim | be7f855 | 2013-12-26 17:44:10 +0900 | [diff] [blame] | 837 | |
| 838 | /* |
| 839 | * We need to re-initialize the cursor since callchain_append() |
| 840 | * advanced the cursor to the end. |
| 841 | */ |
| 842 | callchain_cursor_commit(&callchain_cursor); |
| 843 | |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 844 | hists__inc_nr_samples(hists, he->filtered); |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 845 | |
| 846 | return err; |
| 847 | } |
| 848 | |
| 849 | static int |
| 850 | iter_next_cumulative_entry(struct hist_entry_iter *iter, |
| 851 | struct addr_location *al) |
| 852 | { |
| 853 | struct callchain_cursor_node *node; |
| 854 | |
| 855 | node = callchain_cursor_current(&callchain_cursor); |
| 856 | if (node == NULL) |
| 857 | return 0; |
| 858 | |
Namhyung Kim | c7405d8 | 2013-10-31 13:58:30 +0900 | [diff] [blame] | 859 | return fill_callchain_info(al, node, iter->hide_unresolved); |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 860 | } |
| 861 | |
| 862 | static int |
| 863 | iter_add_next_cumulative_entry(struct hist_entry_iter *iter, |
| 864 | struct addr_location *al) |
| 865 | { |
| 866 | struct perf_evsel *evsel = iter->evsel; |
| 867 | struct perf_sample *sample = iter->sample; |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 868 | struct hist_entry **he_cache = iter->priv; |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 869 | struct hist_entry *he; |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 870 | struct hist_entry he_tmp = { |
Arnaldo Carvalho de Melo | 5cef897 | 2015-08-10 15:45:55 -0300 | [diff] [blame] | 871 | .hists = evsel__hists(evsel), |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 872 | .cpu = al->cpu, |
| 873 | .thread = al->thread, |
| 874 | .comm = thread__comm(al->thread), |
| 875 | .ip = al->addr, |
| 876 | .ms = { |
| 877 | .map = al->map, |
| 878 | .sym = al->sym, |
| 879 | }, |
| 880 | .parent = iter->parent, |
Namhyung Kim | 7239283 | 2015-12-24 11:16:17 +0900 | [diff] [blame] | 881 | .raw_data = sample->raw_data, |
| 882 | .raw_size = sample->raw_size, |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 883 | }; |
| 884 | int i; |
Namhyung Kim | be7f855 | 2013-12-26 17:44:10 +0900 | [diff] [blame] | 885 | struct callchain_cursor cursor; |
| 886 | |
| 887 | callchain_cursor_snapshot(&cursor, &callchain_cursor); |
| 888 | |
| 889 | callchain_cursor_advance(&callchain_cursor); |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 890 | |
| 891 | /* |
| 892 | * Check if there's duplicate entries in the callchain. |
| 893 | * It's possible that it has cycles or recursive calls. |
| 894 | */ |
| 895 | for (i = 0; i < iter->curr; i++) { |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 896 | if (hist_entry__cmp(he_cache[i], &he_tmp) == 0) { |
| 897 | /* to avoid calling callback function */ |
| 898 | iter->he = NULL; |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 899 | return 0; |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 900 | } |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 901 | } |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 902 | |
Jiri Olsa | 0102ef3 | 2016-06-14 20:19:21 +0200 | [diff] [blame] | 903 | he = hists__add_entry(evsel__hists(evsel), al, iter->parent, NULL, NULL, |
| 904 | sample, false); |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 905 | if (he == NULL) |
| 906 | return -ENOMEM; |
| 907 | |
| 908 | iter->he = he; |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 909 | he_cache[iter->curr++] = he; |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 910 | |
Namhyung Kim | 82aa019 | 2014-12-22 13:44:14 +0900 | [diff] [blame] | 911 | if (symbol_conf.use_callchain) |
| 912 | callchain_append(he->callchain, &cursor, sample->period); |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 913 | return 0; |
| 914 | } |
| 915 | |
| 916 | static int |
| 917 | iter_finish_cumulative_entry(struct hist_entry_iter *iter, |
| 918 | struct addr_location *al __maybe_unused) |
| 919 | { |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 920 | zfree(&iter->priv); |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 921 | iter->he = NULL; |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 922 | |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 923 | return 0; |
| 924 | } |
| 925 | |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 926 | const struct hist_iter_ops hist_iter_mem = { |
| 927 | .prepare_entry = iter_prepare_mem_entry, |
| 928 | .add_single_entry = iter_add_single_mem_entry, |
| 929 | .next_entry = iter_next_nop_entry, |
| 930 | .add_next_entry = iter_add_next_nop_entry, |
| 931 | .finish_entry = iter_finish_mem_entry, |
| 932 | }; |
| 933 | |
| 934 | const struct hist_iter_ops hist_iter_branch = { |
| 935 | .prepare_entry = iter_prepare_branch_entry, |
| 936 | .add_single_entry = iter_add_single_branch_entry, |
| 937 | .next_entry = iter_next_branch_entry, |
| 938 | .add_next_entry = iter_add_next_branch_entry, |
| 939 | .finish_entry = iter_finish_branch_entry, |
| 940 | }; |
| 941 | |
| 942 | const struct hist_iter_ops hist_iter_normal = { |
| 943 | .prepare_entry = iter_prepare_normal_entry, |
| 944 | .add_single_entry = iter_add_single_normal_entry, |
| 945 | .next_entry = iter_next_nop_entry, |
| 946 | .add_next_entry = iter_add_next_nop_entry, |
| 947 | .finish_entry = iter_finish_normal_entry, |
| 948 | }; |
| 949 | |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 950 | const struct hist_iter_ops hist_iter_cumulative = { |
| 951 | .prepare_entry = iter_prepare_cumulative_entry, |
| 952 | .add_single_entry = iter_add_single_cumulative_entry, |
| 953 | .next_entry = iter_next_cumulative_entry, |
| 954 | .add_next_entry = iter_add_next_cumulative_entry, |
| 955 | .finish_entry = iter_finish_cumulative_entry, |
| 956 | }; |
| 957 | |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 958 | int hist_entry_iter__add(struct hist_entry_iter *iter, struct addr_location *al, |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 959 | int max_stack_depth, void *arg) |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 960 | { |
| 961 | int err, err2; |
| 962 | |
Arnaldo Carvalho de Melo | 91d7b2d | 2016-04-14 14:48:07 -0300 | [diff] [blame] | 963 | err = sample__resolve_callchain(iter->sample, &callchain_cursor, &iter->parent, |
Namhyung Kim | 063bd93 | 2015-05-19 17:04:10 +0900 | [diff] [blame] | 964 | iter->evsel, al, max_stack_depth); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 965 | if (err) |
| 966 | return err; |
| 967 | |
Adrian Hunter | 96b40f3 | 2015-09-25 16:15:47 +0300 | [diff] [blame] | 968 | iter->max_stack = max_stack_depth; |
| 969 | |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 970 | err = iter->ops->prepare_entry(iter, al); |
| 971 | if (err) |
| 972 | goto out; |
| 973 | |
| 974 | err = iter->ops->add_single_entry(iter, al); |
| 975 | if (err) |
| 976 | goto out; |
| 977 | |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 978 | if (iter->he && iter->add_entry_cb) { |
| 979 | err = iter->add_entry_cb(iter, al, true, arg); |
| 980 | if (err) |
| 981 | goto out; |
| 982 | } |
| 983 | |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 984 | while (iter->ops->next_entry(iter, al)) { |
| 985 | err = iter->ops->add_next_entry(iter, al); |
| 986 | if (err) |
| 987 | break; |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 988 | |
| 989 | if (iter->he && iter->add_entry_cb) { |
| 990 | err = iter->add_entry_cb(iter, al, false, arg); |
| 991 | if (err) |
| 992 | goto out; |
| 993 | } |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 994 | } |
| 995 | |
| 996 | out: |
| 997 | err2 = iter->ops->finish_entry(iter, al); |
| 998 | if (!err) |
| 999 | err = err2; |
| 1000 | |
| 1001 | return err; |
| 1002 | } |
| 1003 | |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1004 | int64_t |
| 1005 | hist_entry__cmp(struct hist_entry *left, struct hist_entry *right) |
| 1006 | { |
Jiri Olsa | aa6f50a | 2016-01-18 10:24:24 +0100 | [diff] [blame] | 1007 | struct hists *hists = left->hists; |
Namhyung Kim | 093f0ef3 | 2014-03-03 12:07:47 +0900 | [diff] [blame] | 1008 | struct perf_hpp_fmt *fmt; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1009 | int64_t cmp = 0; |
| 1010 | |
Jiri Olsa | aa6f50a | 2016-01-18 10:24:24 +0100 | [diff] [blame] | 1011 | hists__for_each_sort_list(hists, fmt) { |
Namhyung Kim | 84b6ee8 | 2016-02-27 03:52:43 +0900 | [diff] [blame] | 1012 | if (perf_hpp__is_dynamic_entry(fmt) && |
| 1013 | !perf_hpp__defined_dynamic_entry(fmt, hists)) |
| 1014 | continue; |
| 1015 | |
Namhyung Kim | 87bbdf7 | 2015-01-08 09:45:46 +0900 | [diff] [blame] | 1016 | cmp = fmt->cmp(fmt, left, right); |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1017 | if (cmp) |
| 1018 | break; |
| 1019 | } |
| 1020 | |
| 1021 | return cmp; |
| 1022 | } |
| 1023 | |
| 1024 | int64_t |
| 1025 | hist_entry__collapse(struct hist_entry *left, struct hist_entry *right) |
| 1026 | { |
Jiri Olsa | aa6f50a | 2016-01-18 10:24:24 +0100 | [diff] [blame] | 1027 | struct hists *hists = left->hists; |
Namhyung Kim | 093f0ef3 | 2014-03-03 12:07:47 +0900 | [diff] [blame] | 1028 | struct perf_hpp_fmt *fmt; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1029 | int64_t cmp = 0; |
| 1030 | |
Jiri Olsa | aa6f50a | 2016-01-18 10:24:24 +0100 | [diff] [blame] | 1031 | hists__for_each_sort_list(hists, fmt) { |
Namhyung Kim | 84b6ee8 | 2016-02-27 03:52:43 +0900 | [diff] [blame] | 1032 | if (perf_hpp__is_dynamic_entry(fmt) && |
| 1033 | !perf_hpp__defined_dynamic_entry(fmt, hists)) |
| 1034 | continue; |
| 1035 | |
Namhyung Kim | 87bbdf7 | 2015-01-08 09:45:46 +0900 | [diff] [blame] | 1036 | cmp = fmt->collapse(fmt, left, right); |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1037 | if (cmp) |
| 1038 | break; |
| 1039 | } |
| 1040 | |
| 1041 | return cmp; |
| 1042 | } |
| 1043 | |
Arnaldo Carvalho de Melo | 6733d1b | 2014-12-19 12:31:40 -0300 | [diff] [blame] | 1044 | void hist_entry__delete(struct hist_entry *he) |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1045 | { |
Arnaldo Carvalho de Melo | f3b623b | 2015-03-02 22:21:35 -0300 | [diff] [blame] | 1046 | thread__zput(he->thread); |
Arnaldo Carvalho de Melo | 5c24b67 | 2015-06-15 23:29:51 -0300 | [diff] [blame] | 1047 | map__zput(he->ms.map); |
| 1048 | |
| 1049 | if (he->branch_info) { |
| 1050 | map__zput(he->branch_info->from.map); |
| 1051 | map__zput(he->branch_info->to.map); |
Andi Kleen | 508be0d | 2016-05-20 13:15:08 -0700 | [diff] [blame] | 1052 | free_srcline(he->branch_info->srcline_from); |
| 1053 | free_srcline(he->branch_info->srcline_to); |
Arnaldo Carvalho de Melo | 5c24b67 | 2015-06-15 23:29:51 -0300 | [diff] [blame] | 1054 | zfree(&he->branch_info); |
| 1055 | } |
| 1056 | |
| 1057 | if (he->mem_info) { |
| 1058 | map__zput(he->mem_info->iaddr.map); |
| 1059 | map__zput(he->mem_info->daddr.map); |
| 1060 | zfree(&he->mem_info); |
| 1061 | } |
| 1062 | |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 1063 | zfree(&he->stat_acc); |
Namhyung Kim | f048d54 | 2013-09-11 14:09:28 +0900 | [diff] [blame] | 1064 | free_srcline(he->srcline); |
Andi Kleen | 31191a8 | 2015-08-07 15:54:24 -0700 | [diff] [blame] | 1065 | if (he->srcfile && he->srcfile[0]) |
| 1066 | free(he->srcfile); |
Namhyung Kim | d114960 | 2014-12-30 14:38:13 +0900 | [diff] [blame] | 1067 | free_callchain(he->callchain); |
Namhyung Kim | 60517d2 | 2015-12-23 02:07:03 +0900 | [diff] [blame] | 1068 | free(he->trace_output); |
Namhyung Kim | 7239283 | 2015-12-24 11:16:17 +0900 | [diff] [blame] | 1069 | free(he->raw_data); |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1070 | free(he); |
| 1071 | } |
| 1072 | |
| 1073 | /* |
Arnaldo Carvalho de Melo | 89fee70 | 2016-02-11 17:14:13 -0300 | [diff] [blame] | 1074 | * If this is not the last column, then we need to pad it according to the |
| 1075 | * pre-calculated max lenght for this column, otherwise don't bother adding |
| 1076 | * spaces because that would break viewing this with, for instance, 'less', |
| 1077 | * that would show tons of trailing spaces when a long C++ demangled method |
| 1078 | * names is sampled. |
| 1079 | */ |
| 1080 | int hist_entry__snprintf_alignment(struct hist_entry *he, struct perf_hpp *hpp, |
| 1081 | struct perf_hpp_fmt *fmt, int printed) |
| 1082 | { |
| 1083 | if (!list_is_last(&fmt->list, &he->hists->hpp_list->fields)) { |
Jiri Olsa | da1b040 | 2016-06-14 20:19:20 +0200 | [diff] [blame] | 1084 | const int width = fmt->width(fmt, hpp, he->hists); |
Arnaldo Carvalho de Melo | 89fee70 | 2016-02-11 17:14:13 -0300 | [diff] [blame] | 1085 | if (printed < width) { |
| 1086 | advance_hpp(hpp, printed); |
| 1087 | printed = scnprintf(hpp->buf, hpp->size, "%-*s", width - printed, " "); |
| 1088 | } |
| 1089 | } |
| 1090 | |
| 1091 | return printed; |
| 1092 | } |
| 1093 | |
| 1094 | /* |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1095 | * collapse the histogram |
| 1096 | */ |
| 1097 | |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1098 | static void hists__apply_filters(struct hists *hists, struct hist_entry *he); |
Namhyung Kim | aec13a7 | 2016-03-09 22:46:58 +0900 | [diff] [blame] | 1099 | static void hists__remove_entry_filter(struct hists *hists, struct hist_entry *he, |
| 1100 | enum hist_filter type); |
| 1101 | |
| 1102 | typedef bool (*fmt_chk_fn)(struct perf_hpp_fmt *fmt); |
| 1103 | |
| 1104 | static bool check_thread_entry(struct perf_hpp_fmt *fmt) |
| 1105 | { |
| 1106 | return perf_hpp__is_thread_entry(fmt) || perf_hpp__is_comm_entry(fmt); |
| 1107 | } |
| 1108 | |
| 1109 | static void hist_entry__check_and_remove_filter(struct hist_entry *he, |
| 1110 | enum hist_filter type, |
| 1111 | fmt_chk_fn check) |
| 1112 | { |
| 1113 | struct perf_hpp_fmt *fmt; |
| 1114 | bool type_match = false; |
| 1115 | struct hist_entry *parent = he->parent_he; |
| 1116 | |
| 1117 | switch (type) { |
| 1118 | case HIST_FILTER__THREAD: |
| 1119 | if (symbol_conf.comm_list == NULL && |
| 1120 | symbol_conf.pid_list == NULL && |
| 1121 | symbol_conf.tid_list == NULL) |
| 1122 | return; |
| 1123 | break; |
| 1124 | case HIST_FILTER__DSO: |
| 1125 | if (symbol_conf.dso_list == NULL) |
| 1126 | return; |
| 1127 | break; |
| 1128 | case HIST_FILTER__SYMBOL: |
| 1129 | if (symbol_conf.sym_list == NULL) |
| 1130 | return; |
| 1131 | break; |
| 1132 | case HIST_FILTER__PARENT: |
| 1133 | case HIST_FILTER__GUEST: |
| 1134 | case HIST_FILTER__HOST: |
| 1135 | case HIST_FILTER__SOCKET: |
| 1136 | default: |
| 1137 | return; |
| 1138 | } |
| 1139 | |
| 1140 | /* if it's filtered by own fmt, it has to have filter bits */ |
| 1141 | perf_hpp_list__for_each_format(he->hpp_list, fmt) { |
| 1142 | if (check(fmt)) { |
| 1143 | type_match = true; |
| 1144 | break; |
| 1145 | } |
| 1146 | } |
| 1147 | |
| 1148 | if (type_match) { |
| 1149 | /* |
| 1150 | * If the filter is for current level entry, propagate |
| 1151 | * filter marker to parents. The marker bit was |
| 1152 | * already set by default so it only needs to clear |
| 1153 | * non-filtered entries. |
| 1154 | */ |
| 1155 | if (!(he->filtered & (1 << type))) { |
| 1156 | while (parent) { |
| 1157 | parent->filtered &= ~(1 << type); |
| 1158 | parent = parent->parent_he; |
| 1159 | } |
| 1160 | } |
| 1161 | } else { |
| 1162 | /* |
| 1163 | * If current entry doesn't have matching formats, set |
| 1164 | * filter marker for upper level entries. it will be |
| 1165 | * cleared if its lower level entries is not filtered. |
| 1166 | * |
| 1167 | * For lower-level entries, it inherits parent's |
| 1168 | * filter bit so that lower level entries of a |
| 1169 | * non-filtered entry won't set the filter marker. |
| 1170 | */ |
| 1171 | if (parent == NULL) |
| 1172 | he->filtered |= (1 << type); |
| 1173 | else |
| 1174 | he->filtered |= (parent->filtered & (1 << type)); |
| 1175 | } |
| 1176 | } |
| 1177 | |
| 1178 | static void hist_entry__apply_hierarchy_filters(struct hist_entry *he) |
| 1179 | { |
| 1180 | hist_entry__check_and_remove_filter(he, HIST_FILTER__THREAD, |
| 1181 | check_thread_entry); |
| 1182 | |
| 1183 | hist_entry__check_and_remove_filter(he, HIST_FILTER__DSO, |
| 1184 | perf_hpp__is_dso_entry); |
| 1185 | |
| 1186 | hist_entry__check_and_remove_filter(he, HIST_FILTER__SYMBOL, |
| 1187 | perf_hpp__is_sym_entry); |
| 1188 | |
| 1189 | hists__apply_filters(he->hists, he); |
| 1190 | } |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1191 | |
| 1192 | static struct hist_entry *hierarchy_insert_entry(struct hists *hists, |
| 1193 | struct rb_root *root, |
| 1194 | struct hist_entry *he, |
Namhyung Kim | aec13a7 | 2016-03-09 22:46:58 +0900 | [diff] [blame] | 1195 | struct hist_entry *parent_he, |
Namhyung Kim | 1b2dbbf | 2016-03-07 16:44:46 -0300 | [diff] [blame] | 1196 | struct perf_hpp_list *hpp_list) |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1197 | { |
| 1198 | struct rb_node **p = &root->rb_node; |
| 1199 | struct rb_node *parent = NULL; |
| 1200 | struct hist_entry *iter, *new; |
Namhyung Kim | 1b2dbbf | 2016-03-07 16:44:46 -0300 | [diff] [blame] | 1201 | struct perf_hpp_fmt *fmt; |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1202 | int64_t cmp; |
| 1203 | |
| 1204 | while (*p != NULL) { |
| 1205 | parent = *p; |
| 1206 | iter = rb_entry(parent, struct hist_entry, rb_node_in); |
| 1207 | |
Namhyung Kim | 1b2dbbf | 2016-03-07 16:44:46 -0300 | [diff] [blame] | 1208 | cmp = 0; |
| 1209 | perf_hpp_list__for_each_sort_list(hpp_list, fmt) { |
| 1210 | cmp = fmt->collapse(fmt, iter, he); |
| 1211 | if (cmp) |
| 1212 | break; |
| 1213 | } |
| 1214 | |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1215 | if (!cmp) { |
| 1216 | he_stat__add_stat(&iter->stat, &he->stat); |
| 1217 | return iter; |
| 1218 | } |
| 1219 | |
| 1220 | if (cmp < 0) |
| 1221 | p = &parent->rb_left; |
| 1222 | else |
| 1223 | p = &parent->rb_right; |
| 1224 | } |
| 1225 | |
| 1226 | new = hist_entry__new(he, true); |
| 1227 | if (new == NULL) |
| 1228 | return NULL; |
| 1229 | |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1230 | hists->nr_entries++; |
| 1231 | |
Namhyung Kim | 1b2dbbf | 2016-03-07 16:44:46 -0300 | [diff] [blame] | 1232 | /* save related format list for output */ |
| 1233 | new->hpp_list = hpp_list; |
Namhyung Kim | aec13a7 | 2016-03-09 22:46:58 +0900 | [diff] [blame] | 1234 | new->parent_he = parent_he; |
| 1235 | |
| 1236 | hist_entry__apply_hierarchy_filters(new); |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1237 | |
| 1238 | /* some fields are now passed to 'new' */ |
Namhyung Kim | 1b2dbbf | 2016-03-07 16:44:46 -0300 | [diff] [blame] | 1239 | perf_hpp_list__for_each_sort_list(hpp_list, fmt) { |
| 1240 | if (perf_hpp__is_trace_entry(fmt) || perf_hpp__is_dynamic_entry(fmt)) |
| 1241 | he->trace_output = NULL; |
| 1242 | else |
| 1243 | new->trace_output = NULL; |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1244 | |
Namhyung Kim | 1b2dbbf | 2016-03-07 16:44:46 -0300 | [diff] [blame] | 1245 | if (perf_hpp__is_srcline_entry(fmt)) |
| 1246 | he->srcline = NULL; |
| 1247 | else |
| 1248 | new->srcline = NULL; |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1249 | |
Namhyung Kim | 1b2dbbf | 2016-03-07 16:44:46 -0300 | [diff] [blame] | 1250 | if (perf_hpp__is_srcfile_entry(fmt)) |
| 1251 | he->srcfile = NULL; |
| 1252 | else |
| 1253 | new->srcfile = NULL; |
| 1254 | } |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1255 | |
| 1256 | rb_link_node(&new->rb_node_in, parent, p); |
| 1257 | rb_insert_color(&new->rb_node_in, root); |
| 1258 | return new; |
| 1259 | } |
| 1260 | |
| 1261 | static int hists__hierarchy_insert_entry(struct hists *hists, |
| 1262 | struct rb_root *root, |
| 1263 | struct hist_entry *he) |
| 1264 | { |
Namhyung Kim | 1b2dbbf | 2016-03-07 16:44:46 -0300 | [diff] [blame] | 1265 | struct perf_hpp_list_node *node; |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1266 | struct hist_entry *new_he = NULL; |
| 1267 | struct hist_entry *parent = NULL; |
| 1268 | int depth = 0; |
| 1269 | int ret = 0; |
| 1270 | |
Namhyung Kim | 1b2dbbf | 2016-03-07 16:44:46 -0300 | [diff] [blame] | 1271 | list_for_each_entry(node, &hists->hpp_formats, list) { |
| 1272 | /* skip period (overhead) and elided columns */ |
| 1273 | if (node->level == 0 || node->skip) |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1274 | continue; |
| 1275 | |
| 1276 | /* insert copy of 'he' for each fmt into the hierarchy */ |
Namhyung Kim | aec13a7 | 2016-03-09 22:46:58 +0900 | [diff] [blame] | 1277 | new_he = hierarchy_insert_entry(hists, root, he, parent, &node->hpp); |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1278 | if (new_he == NULL) { |
| 1279 | ret = -1; |
| 1280 | break; |
| 1281 | } |
| 1282 | |
| 1283 | root = &new_he->hroot_in; |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1284 | new_he->depth = depth++; |
| 1285 | parent = new_he; |
| 1286 | } |
| 1287 | |
| 1288 | if (new_he) { |
| 1289 | new_he->leaf = true; |
| 1290 | |
| 1291 | if (symbol_conf.use_callchain) { |
| 1292 | callchain_cursor_reset(&callchain_cursor); |
| 1293 | if (callchain_merge(&callchain_cursor, |
| 1294 | new_he->callchain, |
| 1295 | he->callchain) < 0) |
| 1296 | ret = -1; |
| 1297 | } |
| 1298 | } |
| 1299 | |
| 1300 | /* 'he' is no longer used */ |
| 1301 | hist_entry__delete(he); |
| 1302 | |
| 1303 | /* return 0 (or -1) since it already applied filters */ |
| 1304 | return ret; |
| 1305 | } |
| 1306 | |
Jiri Olsa | 592dac6 | 2016-03-24 13:52:17 +0100 | [diff] [blame] | 1307 | static int hists__collapse_insert_entry(struct hists *hists, |
| 1308 | struct rb_root *root, |
| 1309 | struct hist_entry *he) |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1310 | { |
Arnaldo Carvalho de Melo | b9bf089 | 2009-12-14 11:37:11 -0200 | [diff] [blame] | 1311 | struct rb_node **p = &root->rb_node; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1312 | struct rb_node *parent = NULL; |
| 1313 | struct hist_entry *iter; |
| 1314 | int64_t cmp; |
| 1315 | |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1316 | if (symbol_conf.report_hierarchy) |
| 1317 | return hists__hierarchy_insert_entry(hists, root, he); |
| 1318 | |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1319 | while (*p != NULL) { |
| 1320 | parent = *p; |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1321 | iter = rb_entry(parent, struct hist_entry, rb_node_in); |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1322 | |
| 1323 | cmp = hist_entry__collapse(iter, he); |
| 1324 | |
| 1325 | if (!cmp) { |
Namhyung Kim | bba58cd | 2016-02-16 23:08:25 +0900 | [diff] [blame] | 1326 | int ret = 0; |
| 1327 | |
Namhyung Kim | 139c081 | 2012-10-04 21:49:43 +0900 | [diff] [blame] | 1328 | he_stat__add_stat(&iter->stat, &he->stat); |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 1329 | if (symbol_conf.cumulate_callchain) |
| 1330 | he_stat__add_stat(iter->stat_acc, he->stat_acc); |
Namhyung Kim | 9ec6097 | 2012-09-26 16:47:28 +0900 | [diff] [blame] | 1331 | |
Frederic Weisbecker | 1b3a0e9 | 2011-01-14 04:51:58 +0100 | [diff] [blame] | 1332 | if (symbol_conf.use_callchain) { |
Namhyung Kim | 4726064 | 2012-05-31 14:43:26 +0900 | [diff] [blame] | 1333 | callchain_cursor_reset(&callchain_cursor); |
Namhyung Kim | bba58cd | 2016-02-16 23:08:25 +0900 | [diff] [blame] | 1334 | if (callchain_merge(&callchain_cursor, |
| 1335 | iter->callchain, |
| 1336 | he->callchain) < 0) |
| 1337 | ret = -1; |
Frederic Weisbecker | 1b3a0e9 | 2011-01-14 04:51:58 +0100 | [diff] [blame] | 1338 | } |
Arnaldo Carvalho de Melo | 6733d1b | 2014-12-19 12:31:40 -0300 | [diff] [blame] | 1339 | hist_entry__delete(he); |
Namhyung Kim | bba58cd | 2016-02-16 23:08:25 +0900 | [diff] [blame] | 1340 | return ret; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1341 | } |
| 1342 | |
| 1343 | if (cmp < 0) |
| 1344 | p = &(*p)->rb_left; |
| 1345 | else |
| 1346 | p = &(*p)->rb_right; |
| 1347 | } |
Namhyung Kim | 740b97f | 2014-12-22 13:44:10 +0900 | [diff] [blame] | 1348 | hists->nr_entries++; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1349 | |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1350 | rb_link_node(&he->rb_node_in, parent, p); |
| 1351 | rb_insert_color(&he->rb_node_in, root); |
Namhyung Kim | bba58cd | 2016-02-16 23:08:25 +0900 | [diff] [blame] | 1352 | return 1; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1353 | } |
| 1354 | |
Namhyung Kim | fc284be | 2016-01-07 10:14:10 +0100 | [diff] [blame] | 1355 | struct rb_root *hists__get_rotate_entries_in(struct hists *hists) |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1356 | { |
| 1357 | struct rb_root *root; |
| 1358 | |
| 1359 | pthread_mutex_lock(&hists->lock); |
| 1360 | |
| 1361 | root = hists->entries_in; |
| 1362 | if (++hists->entries_in > &hists->entries_in_array[1]) |
| 1363 | hists->entries_in = &hists->entries_in_array[0]; |
| 1364 | |
| 1365 | pthread_mutex_unlock(&hists->lock); |
| 1366 | |
| 1367 | return root; |
| 1368 | } |
| 1369 | |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 1370 | static void hists__apply_filters(struct hists *hists, struct hist_entry *he) |
| 1371 | { |
| 1372 | hists__filter_entry_by_dso(hists, he); |
| 1373 | hists__filter_entry_by_thread(hists, he); |
Namhyung Kim | e94d53e | 2012-03-16 17:50:51 +0900 | [diff] [blame] | 1374 | hists__filter_entry_by_symbol(hists, he); |
Kan Liang | 21394d9 | 2015-09-04 10:45:44 -0400 | [diff] [blame] | 1375 | hists__filter_entry_by_socket(hists, he); |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 1376 | } |
| 1377 | |
Namhyung Kim | bba58cd | 2016-02-16 23:08:25 +0900 | [diff] [blame] | 1378 | int hists__collapse_resort(struct hists *hists, struct ui_progress *prog) |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1379 | { |
| 1380 | struct rb_root *root; |
| 1381 | struct rb_node *next; |
| 1382 | struct hist_entry *n; |
Namhyung Kim | bba58cd | 2016-02-16 23:08:25 +0900 | [diff] [blame] | 1383 | int ret; |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1384 | |
Jiri Olsa | 5222503 | 2016-05-03 13:54:42 +0200 | [diff] [blame] | 1385 | if (!hists__has(hists, need_collapse)) |
Namhyung Kim | bba58cd | 2016-02-16 23:08:25 +0900 | [diff] [blame] | 1386 | return 0; |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1387 | |
Namhyung Kim | 740b97f | 2014-12-22 13:44:10 +0900 | [diff] [blame] | 1388 | hists->nr_entries = 0; |
| 1389 | |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1390 | root = hists__get_rotate_entries_in(hists); |
Namhyung Kim | 740b97f | 2014-12-22 13:44:10 +0900 | [diff] [blame] | 1391 | |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1392 | next = rb_first(root); |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1393 | |
| 1394 | while (next) { |
Arnaldo Carvalho de Melo | 33e940a | 2013-09-17 16:34:28 -0300 | [diff] [blame] | 1395 | if (session_done()) |
| 1396 | break; |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1397 | n = rb_entry(next, struct hist_entry, rb_node_in); |
| 1398 | next = rb_next(&n->rb_node_in); |
| 1399 | |
| 1400 | rb_erase(&n->rb_node_in, root); |
Namhyung Kim | bba58cd | 2016-02-16 23:08:25 +0900 | [diff] [blame] | 1401 | ret = hists__collapse_insert_entry(hists, &hists->entries_collapsed, n); |
| 1402 | if (ret < 0) |
| 1403 | return -1; |
| 1404 | |
| 1405 | if (ret) { |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 1406 | /* |
| 1407 | * If it wasn't combined with one of the entries already |
| 1408 | * collapsed, we need to apply the filters that may have |
| 1409 | * been set by, say, the hist_browser. |
| 1410 | */ |
| 1411 | hists__apply_filters(hists, n); |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 1412 | } |
Namhyung Kim | c1fb565 | 2013-10-11 14:15:38 +0900 | [diff] [blame] | 1413 | if (prog) |
| 1414 | ui_progress__update(prog, 1); |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1415 | } |
Namhyung Kim | bba58cd | 2016-02-16 23:08:25 +0900 | [diff] [blame] | 1416 | return 0; |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1417 | } |
| 1418 | |
Namhyung Kim | 043ca389 | 2014-03-03 14:18:00 +0900 | [diff] [blame] | 1419 | static int hist_entry__sort(struct hist_entry *a, struct hist_entry *b) |
Namhyung Kim | 29d720e | 2013-01-22 18:09:33 +0900 | [diff] [blame] | 1420 | { |
Jiri Olsa | aa6f50a | 2016-01-18 10:24:24 +0100 | [diff] [blame] | 1421 | struct hists *hists = a->hists; |
Namhyung Kim | 043ca389 | 2014-03-03 14:18:00 +0900 | [diff] [blame] | 1422 | struct perf_hpp_fmt *fmt; |
| 1423 | int64_t cmp = 0; |
Namhyung Kim | 29d720e | 2013-01-22 18:09:33 +0900 | [diff] [blame] | 1424 | |
Jiri Olsa | aa6f50a | 2016-01-18 10:24:24 +0100 | [diff] [blame] | 1425 | hists__for_each_sort_list(hists, fmt) { |
Namhyung Kim | 361459f | 2015-12-23 02:07:08 +0900 | [diff] [blame] | 1426 | if (perf_hpp__should_skip(fmt, a->hists)) |
Namhyung Kim | e67d49a | 2014-03-18 13:00:59 +0900 | [diff] [blame] | 1427 | continue; |
| 1428 | |
Namhyung Kim | 87bbdf7 | 2015-01-08 09:45:46 +0900 | [diff] [blame] | 1429 | cmp = fmt->sort(fmt, a, b); |
Namhyung Kim | 043ca389 | 2014-03-03 14:18:00 +0900 | [diff] [blame] | 1430 | if (cmp) |
Namhyung Kim | 29d720e | 2013-01-22 18:09:33 +0900 | [diff] [blame] | 1431 | break; |
| 1432 | } |
| 1433 | |
Namhyung Kim | 043ca389 | 2014-03-03 14:18:00 +0900 | [diff] [blame] | 1434 | return cmp; |
Namhyung Kim | 29d720e | 2013-01-22 18:09:33 +0900 | [diff] [blame] | 1435 | } |
| 1436 | |
Namhyung Kim | 9283ba9 | 2014-04-24 16:37:26 +0900 | [diff] [blame] | 1437 | static void hists__reset_filter_stats(struct hists *hists) |
| 1438 | { |
| 1439 | hists->nr_non_filtered_entries = 0; |
| 1440 | hists->stats.total_non_filtered_period = 0; |
| 1441 | } |
| 1442 | |
| 1443 | void hists__reset_stats(struct hists *hists) |
| 1444 | { |
| 1445 | hists->nr_entries = 0; |
| 1446 | hists->stats.total_period = 0; |
| 1447 | |
| 1448 | hists__reset_filter_stats(hists); |
| 1449 | } |
| 1450 | |
| 1451 | static void hists__inc_filter_stats(struct hists *hists, struct hist_entry *h) |
| 1452 | { |
| 1453 | hists->nr_non_filtered_entries++; |
| 1454 | hists->stats.total_non_filtered_period += h->stat.period; |
| 1455 | } |
| 1456 | |
| 1457 | void hists__inc_stats(struct hists *hists, struct hist_entry *h) |
| 1458 | { |
| 1459 | if (!h->filtered) |
| 1460 | hists__inc_filter_stats(hists, h); |
| 1461 | |
| 1462 | hists->nr_entries++; |
| 1463 | hists->stats.total_period += h->stat.period; |
| 1464 | } |
| 1465 | |
Namhyung Kim | f7fb538 | 2016-03-09 22:47:02 +0900 | [diff] [blame] | 1466 | static void hierarchy_recalc_total_periods(struct hists *hists) |
| 1467 | { |
| 1468 | struct rb_node *node; |
| 1469 | struct hist_entry *he; |
| 1470 | |
| 1471 | node = rb_first(&hists->entries); |
| 1472 | |
| 1473 | hists->stats.total_period = 0; |
| 1474 | hists->stats.total_non_filtered_period = 0; |
| 1475 | |
| 1476 | /* |
| 1477 | * recalculate total period using top-level entries only |
| 1478 | * since lower level entries only see non-filtered entries |
| 1479 | * but upper level entries have sum of both entries. |
| 1480 | */ |
| 1481 | while (node) { |
| 1482 | he = rb_entry(node, struct hist_entry, rb_node); |
| 1483 | node = rb_next(node); |
| 1484 | |
| 1485 | hists->stats.total_period += he->stat.period; |
| 1486 | if (!he->filtered) |
| 1487 | hists->stats.total_non_filtered_period += he->stat.period; |
| 1488 | } |
| 1489 | } |
| 1490 | |
Namhyung Kim | 1a3906a | 2016-02-25 00:13:35 +0900 | [diff] [blame] | 1491 | static void hierarchy_insert_output_entry(struct rb_root *root, |
| 1492 | struct hist_entry *he) |
| 1493 | { |
| 1494 | struct rb_node **p = &root->rb_node; |
| 1495 | struct rb_node *parent = NULL; |
| 1496 | struct hist_entry *iter; |
Namhyung Kim | 1b2dbbf | 2016-03-07 16:44:46 -0300 | [diff] [blame] | 1497 | struct perf_hpp_fmt *fmt; |
Namhyung Kim | 1a3906a | 2016-02-25 00:13:35 +0900 | [diff] [blame] | 1498 | |
| 1499 | while (*p != NULL) { |
| 1500 | parent = *p; |
| 1501 | iter = rb_entry(parent, struct hist_entry, rb_node); |
| 1502 | |
| 1503 | if (hist_entry__sort(he, iter) > 0) |
| 1504 | p = &parent->rb_left; |
| 1505 | else |
| 1506 | p = &parent->rb_right; |
| 1507 | } |
| 1508 | |
| 1509 | rb_link_node(&he->rb_node, parent, p); |
| 1510 | rb_insert_color(&he->rb_node, root); |
Namhyung Kim | abab5e7 | 2016-02-27 03:52:47 +0900 | [diff] [blame] | 1511 | |
| 1512 | /* update column width of dynamic entry */ |
Namhyung Kim | 1b2dbbf | 2016-03-07 16:44:46 -0300 | [diff] [blame] | 1513 | perf_hpp_list__for_each_sort_list(he->hpp_list, fmt) { |
| 1514 | if (perf_hpp__is_dynamic_entry(fmt)) |
| 1515 | fmt->sort(fmt, he, NULL); |
| 1516 | } |
Namhyung Kim | 1a3906a | 2016-02-25 00:13:35 +0900 | [diff] [blame] | 1517 | } |
| 1518 | |
| 1519 | static void hists__hierarchy_output_resort(struct hists *hists, |
| 1520 | struct ui_progress *prog, |
| 1521 | struct rb_root *root_in, |
| 1522 | struct rb_root *root_out, |
| 1523 | u64 min_callchain_hits, |
| 1524 | bool use_callchain) |
| 1525 | { |
| 1526 | struct rb_node *node; |
| 1527 | struct hist_entry *he; |
| 1528 | |
| 1529 | *root_out = RB_ROOT; |
| 1530 | node = rb_first(root_in); |
| 1531 | |
| 1532 | while (node) { |
| 1533 | he = rb_entry(node, struct hist_entry, rb_node_in); |
| 1534 | node = rb_next(node); |
| 1535 | |
| 1536 | hierarchy_insert_output_entry(root_out, he); |
| 1537 | |
| 1538 | if (prog) |
| 1539 | ui_progress__update(prog, 1); |
| 1540 | |
| 1541 | if (!he->leaf) { |
| 1542 | hists__hierarchy_output_resort(hists, prog, |
| 1543 | &he->hroot_in, |
| 1544 | &he->hroot_out, |
| 1545 | min_callchain_hits, |
| 1546 | use_callchain); |
| 1547 | hists->nr_entries++; |
| 1548 | if (!he->filtered) { |
| 1549 | hists->nr_non_filtered_entries++; |
| 1550 | hists__calc_col_len(hists, he); |
| 1551 | } |
| 1552 | |
| 1553 | continue; |
| 1554 | } |
| 1555 | |
Namhyung Kim | 1a3906a | 2016-02-25 00:13:35 +0900 | [diff] [blame] | 1556 | if (!use_callchain) |
| 1557 | continue; |
| 1558 | |
| 1559 | if (callchain_param.mode == CHAIN_GRAPH_REL) { |
| 1560 | u64 total = he->stat.period; |
| 1561 | |
| 1562 | if (symbol_conf.cumulate_callchain) |
| 1563 | total = he->stat_acc->period; |
| 1564 | |
| 1565 | min_callchain_hits = total * (callchain_param.min_percent / 100); |
| 1566 | } |
| 1567 | |
| 1568 | callchain_param.sort(&he->sorted_chain, he->callchain, |
| 1569 | min_callchain_hits, &callchain_param); |
| 1570 | } |
| 1571 | } |
| 1572 | |
Arnaldo Carvalho de Melo | 1c02c4d | 2010-05-10 13:04:11 -0300 | [diff] [blame] | 1573 | static void __hists__insert_output_entry(struct rb_root *entries, |
| 1574 | struct hist_entry *he, |
Kan Liang | f9db0d0 | 2015-08-11 06:30:48 -0400 | [diff] [blame] | 1575 | u64 min_callchain_hits, |
| 1576 | bool use_callchain) |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1577 | { |
Arnaldo Carvalho de Melo | 1c02c4d | 2010-05-10 13:04:11 -0300 | [diff] [blame] | 1578 | struct rb_node **p = &entries->rb_node; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1579 | struct rb_node *parent = NULL; |
| 1580 | struct hist_entry *iter; |
Namhyung Kim | abab5e7 | 2016-02-27 03:52:47 +0900 | [diff] [blame] | 1581 | struct perf_hpp_fmt *fmt; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1582 | |
Namhyung Kim | 744070e | 2016-01-28 00:40:48 +0900 | [diff] [blame] | 1583 | if (use_callchain) { |
| 1584 | if (callchain_param.mode == CHAIN_GRAPH_REL) { |
| 1585 | u64 total = he->stat.period; |
| 1586 | |
| 1587 | if (symbol_conf.cumulate_callchain) |
| 1588 | total = he->stat_acc->period; |
| 1589 | |
| 1590 | min_callchain_hits = total * (callchain_param.min_percent / 100); |
| 1591 | } |
Arnaldo Carvalho de Melo | b9fb930 | 2010-04-02 09:50:42 -0300 | [diff] [blame] | 1592 | callchain_param.sort(&he->sorted_chain, he->callchain, |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1593 | min_callchain_hits, &callchain_param); |
Namhyung Kim | 744070e | 2016-01-28 00:40:48 +0900 | [diff] [blame] | 1594 | } |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1595 | |
| 1596 | while (*p != NULL) { |
| 1597 | parent = *p; |
| 1598 | iter = rb_entry(parent, struct hist_entry, rb_node); |
| 1599 | |
Namhyung Kim | 043ca389 | 2014-03-03 14:18:00 +0900 | [diff] [blame] | 1600 | if (hist_entry__sort(he, iter) > 0) |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1601 | p = &(*p)->rb_left; |
| 1602 | else |
| 1603 | p = &(*p)->rb_right; |
| 1604 | } |
| 1605 | |
| 1606 | rb_link_node(&he->rb_node, parent, p); |
Arnaldo Carvalho de Melo | 1c02c4d | 2010-05-10 13:04:11 -0300 | [diff] [blame] | 1607 | rb_insert_color(&he->rb_node, entries); |
Namhyung Kim | abab5e7 | 2016-02-27 03:52:47 +0900 | [diff] [blame] | 1608 | |
| 1609 | perf_hpp_list__for_each_sort_list(&perf_hpp_list, fmt) { |
| 1610 | if (perf_hpp__is_dynamic_entry(fmt) && |
| 1611 | perf_hpp__defined_dynamic_entry(fmt, he->hists)) |
| 1612 | fmt->sort(fmt, he, NULL); /* update column width */ |
| 1613 | } |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1614 | } |
| 1615 | |
Jiri Olsa | 01441af | 2016-01-18 10:23:59 +0100 | [diff] [blame] | 1616 | static void output_resort(struct hists *hists, struct ui_progress *prog, |
| 1617 | bool use_callchain) |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1618 | { |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1619 | struct rb_root *root; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1620 | struct rb_node *next; |
| 1621 | struct hist_entry *n; |
Namhyung Kim | 467ef10 | 2016-02-16 23:08:19 +0900 | [diff] [blame] | 1622 | u64 callchain_total; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1623 | u64 min_callchain_hits; |
| 1624 | |
Namhyung Kim | 467ef10 | 2016-02-16 23:08:19 +0900 | [diff] [blame] | 1625 | callchain_total = hists->callchain_period; |
| 1626 | if (symbol_conf.filter_relative) |
| 1627 | callchain_total = hists->callchain_non_filtered_period; |
| 1628 | |
| 1629 | min_callchain_hits = callchain_total * (callchain_param.min_percent / 100); |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1630 | |
Namhyung Kim | 1a3906a | 2016-02-25 00:13:35 +0900 | [diff] [blame] | 1631 | hists__reset_stats(hists); |
| 1632 | hists__reset_col_len(hists); |
| 1633 | |
| 1634 | if (symbol_conf.report_hierarchy) { |
Namhyung Kim | f7fb538 | 2016-03-09 22:47:02 +0900 | [diff] [blame] | 1635 | hists__hierarchy_output_resort(hists, prog, |
| 1636 | &hists->entries_collapsed, |
| 1637 | &hists->entries, |
| 1638 | min_callchain_hits, |
| 1639 | use_callchain); |
| 1640 | hierarchy_recalc_total_periods(hists); |
| 1641 | return; |
Namhyung Kim | 1a3906a | 2016-02-25 00:13:35 +0900 | [diff] [blame] | 1642 | } |
| 1643 | |
Jiri Olsa | 5222503 | 2016-05-03 13:54:42 +0200 | [diff] [blame] | 1644 | if (hists__has(hists, need_collapse)) |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1645 | root = &hists->entries_collapsed; |
| 1646 | else |
| 1647 | root = hists->entries_in; |
| 1648 | |
| 1649 | next = rb_first(root); |
| 1650 | hists->entries = RB_ROOT; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1651 | |
| 1652 | while (next) { |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1653 | n = rb_entry(next, struct hist_entry, rb_node_in); |
| 1654 | next = rb_next(&n->rb_node_in); |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1655 | |
Kan Liang | f9db0d0 | 2015-08-11 06:30:48 -0400 | [diff] [blame] | 1656 | __hists__insert_output_entry(&hists->entries, n, min_callchain_hits, use_callchain); |
Namhyung Kim | 6263835 | 2014-04-24 16:21:46 +0900 | [diff] [blame] | 1657 | hists__inc_stats(hists, n); |
Namhyung Kim | ae993ef | 2014-04-24 16:25:19 +0900 | [diff] [blame] | 1658 | |
| 1659 | if (!n->filtered) |
| 1660 | hists__calc_col_len(hists, n); |
Namhyung Kim | 740b97f | 2014-12-22 13:44:10 +0900 | [diff] [blame] | 1661 | |
| 1662 | if (prog) |
| 1663 | ui_progress__update(prog, 1); |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1664 | } |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1665 | } |
Arnaldo Carvalho de Melo | b9bf089 | 2009-12-14 11:37:11 -0200 | [diff] [blame] | 1666 | |
Jiri Olsa | 452ce03 | 2016-01-18 10:24:00 +0100 | [diff] [blame] | 1667 | void perf_evsel__output_resort(struct perf_evsel *evsel, struct ui_progress *prog) |
Jiri Olsa | 01441af | 2016-01-18 10:23:59 +0100 | [diff] [blame] | 1668 | { |
Jiri Olsa | 01441af | 2016-01-18 10:23:59 +0100 | [diff] [blame] | 1669 | bool use_callchain; |
| 1670 | |
| 1671 | if (evsel && symbol_conf.use_callchain && !symbol_conf.show_ref_callgraph) |
| 1672 | use_callchain = evsel->attr.sample_type & PERF_SAMPLE_CALLCHAIN; |
| 1673 | else |
| 1674 | use_callchain = symbol_conf.use_callchain; |
| 1675 | |
Jiri Olsa | 452ce03 | 2016-01-18 10:24:00 +0100 | [diff] [blame] | 1676 | output_resort(evsel__hists(evsel), prog, use_callchain); |
| 1677 | } |
| 1678 | |
| 1679 | void hists__output_resort(struct hists *hists, struct ui_progress *prog) |
| 1680 | { |
| 1681 | output_resort(hists, prog, symbol_conf.use_callchain); |
Jiri Olsa | 01441af | 2016-01-18 10:23:59 +0100 | [diff] [blame] | 1682 | } |
| 1683 | |
Namhyung Kim | 8c01872 | 2016-02-25 00:13:36 +0900 | [diff] [blame] | 1684 | static bool can_goto_child(struct hist_entry *he, enum hierarchy_move_dir hmd) |
| 1685 | { |
| 1686 | if (he->leaf || hmd == HMD_FORCE_SIBLING) |
| 1687 | return false; |
| 1688 | |
| 1689 | if (he->unfolded || hmd == HMD_FORCE_CHILD) |
| 1690 | return true; |
| 1691 | |
| 1692 | return false; |
| 1693 | } |
| 1694 | |
| 1695 | struct rb_node *rb_hierarchy_last(struct rb_node *node) |
| 1696 | { |
| 1697 | struct hist_entry *he = rb_entry(node, struct hist_entry, rb_node); |
| 1698 | |
| 1699 | while (can_goto_child(he, HMD_NORMAL)) { |
| 1700 | node = rb_last(&he->hroot_out); |
| 1701 | he = rb_entry(node, struct hist_entry, rb_node); |
| 1702 | } |
| 1703 | return node; |
| 1704 | } |
| 1705 | |
| 1706 | struct rb_node *__rb_hierarchy_next(struct rb_node *node, enum hierarchy_move_dir hmd) |
| 1707 | { |
| 1708 | struct hist_entry *he = rb_entry(node, struct hist_entry, rb_node); |
| 1709 | |
| 1710 | if (can_goto_child(he, hmd)) |
| 1711 | node = rb_first(&he->hroot_out); |
| 1712 | else |
| 1713 | node = rb_next(node); |
| 1714 | |
| 1715 | while (node == NULL) { |
| 1716 | he = he->parent_he; |
| 1717 | if (he == NULL) |
| 1718 | break; |
| 1719 | |
| 1720 | node = rb_next(&he->rb_node); |
| 1721 | } |
| 1722 | return node; |
| 1723 | } |
| 1724 | |
| 1725 | struct rb_node *rb_hierarchy_prev(struct rb_node *node) |
| 1726 | { |
| 1727 | struct hist_entry *he = rb_entry(node, struct hist_entry, rb_node); |
| 1728 | |
| 1729 | node = rb_prev(node); |
| 1730 | if (node) |
| 1731 | return rb_hierarchy_last(node); |
| 1732 | |
| 1733 | he = he->parent_he; |
| 1734 | if (he == NULL) |
| 1735 | return NULL; |
| 1736 | |
| 1737 | return &he->rb_node; |
| 1738 | } |
| 1739 | |
Namhyung Kim | a7b5895 | 2016-02-26 21:13:16 +0900 | [diff] [blame] | 1740 | bool hist_entry__has_hierarchy_children(struct hist_entry *he, float limit) |
| 1741 | { |
| 1742 | struct rb_node *node; |
| 1743 | struct hist_entry *child; |
| 1744 | float percent; |
| 1745 | |
| 1746 | if (he->leaf) |
| 1747 | return false; |
| 1748 | |
| 1749 | node = rb_first(&he->hroot_out); |
| 1750 | child = rb_entry(node, struct hist_entry, rb_node); |
| 1751 | |
| 1752 | while (node && child->filtered) { |
| 1753 | node = rb_next(node); |
| 1754 | child = rb_entry(node, struct hist_entry, rb_node); |
| 1755 | } |
| 1756 | |
| 1757 | if (node) |
| 1758 | percent = hist_entry__get_percent_limit(child); |
| 1759 | else |
| 1760 | percent = 0; |
| 1761 | |
| 1762 | return node && percent >= limit; |
| 1763 | } |
| 1764 | |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 1765 | static void hists__remove_entry_filter(struct hists *hists, struct hist_entry *h, |
Arnaldo Carvalho de Melo | cc5edb0 | 2010-07-16 12:35:07 -0300 | [diff] [blame] | 1766 | enum hist_filter filter) |
| 1767 | { |
| 1768 | h->filtered &= ~(1 << filter); |
Namhyung Kim | 155e9af | 2016-02-25 00:13:38 +0900 | [diff] [blame] | 1769 | |
| 1770 | if (symbol_conf.report_hierarchy) { |
| 1771 | struct hist_entry *parent = h->parent_he; |
| 1772 | |
| 1773 | while (parent) { |
| 1774 | he_stat__add_stat(&parent->stat, &h->stat); |
| 1775 | |
| 1776 | parent->filtered &= ~(1 << filter); |
| 1777 | |
| 1778 | if (parent->filtered) |
| 1779 | goto next; |
| 1780 | |
| 1781 | /* force fold unfiltered entry for simplicity */ |
| 1782 | parent->unfolded = false; |
Namhyung Kim | 79dded8 | 2016-02-26 21:13:19 +0900 | [diff] [blame] | 1783 | parent->has_no_entry = false; |
Namhyung Kim | 155e9af | 2016-02-25 00:13:38 +0900 | [diff] [blame] | 1784 | parent->row_offset = 0; |
| 1785 | parent->nr_rows = 0; |
| 1786 | next: |
| 1787 | parent = parent->parent_he; |
| 1788 | } |
| 1789 | } |
| 1790 | |
Arnaldo Carvalho de Melo | cc5edb0 | 2010-07-16 12:35:07 -0300 | [diff] [blame] | 1791 | if (h->filtered) |
| 1792 | return; |
| 1793 | |
Namhyung Kim | 87e90f4 | 2014-04-24 16:44:16 +0900 | [diff] [blame] | 1794 | /* force fold unfiltered entry for simplicity */ |
Namhyung Kim | 3698dab | 2015-05-05 23:55:46 +0900 | [diff] [blame] | 1795 | h->unfolded = false; |
Namhyung Kim | 79dded8 | 2016-02-26 21:13:19 +0900 | [diff] [blame] | 1796 | h->has_no_entry = false; |
Arnaldo Carvalho de Melo | 0f0cbf7 | 2010-07-26 17:13:40 -0300 | [diff] [blame] | 1797 | h->row_offset = 0; |
He Kuang | a8cd1f4 | 2015-03-11 20:36:03 +0800 | [diff] [blame] | 1798 | h->nr_rows = 0; |
Namhyung Kim | 9283ba9 | 2014-04-24 16:37:26 +0900 | [diff] [blame] | 1799 | |
Namhyung Kim | 1ab1fa5 | 2013-12-26 15:11:52 +0900 | [diff] [blame] | 1800 | hists->stats.nr_non_filtered_samples += h->stat.nr_events; |
Arnaldo Carvalho de Melo | cc5edb0 | 2010-07-16 12:35:07 -0300 | [diff] [blame] | 1801 | |
Namhyung Kim | 9283ba9 | 2014-04-24 16:37:26 +0900 | [diff] [blame] | 1802 | hists__inc_filter_stats(hists, h); |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 1803 | hists__calc_col_len(hists, h); |
Arnaldo Carvalho de Melo | cc5edb0 | 2010-07-16 12:35:07 -0300 | [diff] [blame] | 1804 | } |
| 1805 | |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 1806 | |
| 1807 | static bool hists__filter_entry_by_dso(struct hists *hists, |
| 1808 | struct hist_entry *he) |
| 1809 | { |
| 1810 | if (hists->dso_filter != NULL && |
| 1811 | (he->ms.map == NULL || he->ms.map->dso != hists->dso_filter)) { |
| 1812 | he->filtered |= (1 << HIST_FILTER__DSO); |
| 1813 | return true; |
| 1814 | } |
| 1815 | |
| 1816 | return false; |
| 1817 | } |
| 1818 | |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 1819 | static bool hists__filter_entry_by_thread(struct hists *hists, |
| 1820 | struct hist_entry *he) |
| 1821 | { |
| 1822 | if (hists->thread_filter != NULL && |
| 1823 | he->thread != hists->thread_filter) { |
| 1824 | he->filtered |= (1 << HIST_FILTER__THREAD); |
| 1825 | return true; |
| 1826 | } |
| 1827 | |
| 1828 | return false; |
| 1829 | } |
| 1830 | |
Namhyung Kim | e94d53e | 2012-03-16 17:50:51 +0900 | [diff] [blame] | 1831 | static bool hists__filter_entry_by_symbol(struct hists *hists, |
| 1832 | struct hist_entry *he) |
| 1833 | { |
| 1834 | if (hists->symbol_filter_str != NULL && |
| 1835 | (!he->ms.sym || strstr(he->ms.sym->name, |
| 1836 | hists->symbol_filter_str) == NULL)) { |
| 1837 | he->filtered |= (1 << HIST_FILTER__SYMBOL); |
| 1838 | return true; |
| 1839 | } |
| 1840 | |
| 1841 | return false; |
| 1842 | } |
| 1843 | |
Kan Liang | 21394d9 | 2015-09-04 10:45:44 -0400 | [diff] [blame] | 1844 | static bool hists__filter_entry_by_socket(struct hists *hists, |
| 1845 | struct hist_entry *he) |
| 1846 | { |
| 1847 | if ((hists->socket_filter > -1) && |
| 1848 | (he->socket != hists->socket_filter)) { |
| 1849 | he->filtered |= (1 << HIST_FILTER__SOCKET); |
| 1850 | return true; |
| 1851 | } |
| 1852 | |
| 1853 | return false; |
| 1854 | } |
| 1855 | |
Namhyung Kim | 1f7c254 | 2016-01-20 10:15:21 +0900 | [diff] [blame] | 1856 | typedef bool (*filter_fn_t)(struct hists *hists, struct hist_entry *he); |
| 1857 | |
| 1858 | static void hists__filter_by_type(struct hists *hists, int type, filter_fn_t filter) |
Kan Liang | 84734b0 | 2015-09-04 10:45:45 -0400 | [diff] [blame] | 1859 | { |
| 1860 | struct rb_node *nd; |
| 1861 | |
| 1862 | hists->stats.nr_non_filtered_samples = 0; |
| 1863 | |
| 1864 | hists__reset_filter_stats(hists); |
| 1865 | hists__reset_col_len(hists); |
| 1866 | |
| 1867 | for (nd = rb_first(&hists->entries); nd; nd = rb_next(nd)) { |
| 1868 | struct hist_entry *h = rb_entry(nd, struct hist_entry, rb_node); |
| 1869 | |
Namhyung Kim | 1f7c254 | 2016-01-20 10:15:21 +0900 | [diff] [blame] | 1870 | if (filter(hists, h)) |
Kan Liang | 84734b0 | 2015-09-04 10:45:45 -0400 | [diff] [blame] | 1871 | continue; |
| 1872 | |
Namhyung Kim | 1f7c254 | 2016-01-20 10:15:21 +0900 | [diff] [blame] | 1873 | hists__remove_entry_filter(hists, h, type); |
Kan Liang | 84734b0 | 2015-09-04 10:45:45 -0400 | [diff] [blame] | 1874 | } |
| 1875 | } |
| 1876 | |
Namhyung Kim | 7064285 | 2016-02-25 00:13:39 +0900 | [diff] [blame] | 1877 | static void resort_filtered_entry(struct rb_root *root, struct hist_entry *he) |
| 1878 | { |
| 1879 | struct rb_node **p = &root->rb_node; |
| 1880 | struct rb_node *parent = NULL; |
| 1881 | struct hist_entry *iter; |
| 1882 | struct rb_root new_root = RB_ROOT; |
| 1883 | struct rb_node *nd; |
| 1884 | |
| 1885 | while (*p != NULL) { |
| 1886 | parent = *p; |
| 1887 | iter = rb_entry(parent, struct hist_entry, rb_node); |
| 1888 | |
| 1889 | if (hist_entry__sort(he, iter) > 0) |
| 1890 | p = &(*p)->rb_left; |
| 1891 | else |
| 1892 | p = &(*p)->rb_right; |
| 1893 | } |
| 1894 | |
| 1895 | rb_link_node(&he->rb_node, parent, p); |
| 1896 | rb_insert_color(&he->rb_node, root); |
| 1897 | |
| 1898 | if (he->leaf || he->filtered) |
| 1899 | return; |
| 1900 | |
| 1901 | nd = rb_first(&he->hroot_out); |
| 1902 | while (nd) { |
| 1903 | struct hist_entry *h = rb_entry(nd, struct hist_entry, rb_node); |
| 1904 | |
| 1905 | nd = rb_next(nd); |
| 1906 | rb_erase(&h->rb_node, &he->hroot_out); |
| 1907 | |
| 1908 | resort_filtered_entry(&new_root, h); |
| 1909 | } |
| 1910 | |
| 1911 | he->hroot_out = new_root; |
| 1912 | } |
| 1913 | |
Namhyung Kim | 155e9af | 2016-02-25 00:13:38 +0900 | [diff] [blame] | 1914 | static void hists__filter_hierarchy(struct hists *hists, int type, const void *arg) |
| 1915 | { |
| 1916 | struct rb_node *nd; |
Namhyung Kim | 7064285 | 2016-02-25 00:13:39 +0900 | [diff] [blame] | 1917 | struct rb_root new_root = RB_ROOT; |
Namhyung Kim | 155e9af | 2016-02-25 00:13:38 +0900 | [diff] [blame] | 1918 | |
| 1919 | hists->stats.nr_non_filtered_samples = 0; |
| 1920 | |
| 1921 | hists__reset_filter_stats(hists); |
| 1922 | hists__reset_col_len(hists); |
| 1923 | |
| 1924 | nd = rb_first(&hists->entries); |
| 1925 | while (nd) { |
| 1926 | struct hist_entry *h = rb_entry(nd, struct hist_entry, rb_node); |
| 1927 | int ret; |
| 1928 | |
| 1929 | ret = hist_entry__filter(h, type, arg); |
| 1930 | |
| 1931 | /* |
| 1932 | * case 1. non-matching type |
| 1933 | * zero out the period, set filter marker and move to child |
| 1934 | */ |
| 1935 | if (ret < 0) { |
| 1936 | memset(&h->stat, 0, sizeof(h->stat)); |
| 1937 | h->filtered |= (1 << type); |
| 1938 | |
| 1939 | nd = __rb_hierarchy_next(&h->rb_node, HMD_FORCE_CHILD); |
| 1940 | } |
| 1941 | /* |
| 1942 | * case 2. matched type (filter out) |
| 1943 | * set filter marker and move to next |
| 1944 | */ |
| 1945 | else if (ret == 1) { |
| 1946 | h->filtered |= (1 << type); |
| 1947 | |
| 1948 | nd = __rb_hierarchy_next(&h->rb_node, HMD_FORCE_SIBLING); |
| 1949 | } |
| 1950 | /* |
| 1951 | * case 3. ok (not filtered) |
| 1952 | * add period to hists and parents, erase the filter marker |
| 1953 | * and move to next sibling |
| 1954 | */ |
| 1955 | else { |
| 1956 | hists__remove_entry_filter(hists, h, type); |
| 1957 | |
| 1958 | nd = __rb_hierarchy_next(&h->rb_node, HMD_FORCE_SIBLING); |
| 1959 | } |
| 1960 | } |
Namhyung Kim | 7064285 | 2016-02-25 00:13:39 +0900 | [diff] [blame] | 1961 | |
Namhyung Kim | f7fb538 | 2016-03-09 22:47:02 +0900 | [diff] [blame] | 1962 | hierarchy_recalc_total_periods(hists); |
| 1963 | |
Namhyung Kim | 7064285 | 2016-02-25 00:13:39 +0900 | [diff] [blame] | 1964 | /* |
| 1965 | * resort output after applying a new filter since filter in a lower |
| 1966 | * hierarchy can change periods in a upper hierarchy. |
| 1967 | */ |
| 1968 | nd = rb_first(&hists->entries); |
| 1969 | while (nd) { |
| 1970 | struct hist_entry *h = rb_entry(nd, struct hist_entry, rb_node); |
| 1971 | |
| 1972 | nd = rb_next(nd); |
| 1973 | rb_erase(&h->rb_node, &hists->entries); |
| 1974 | |
| 1975 | resort_filtered_entry(&new_root, h); |
| 1976 | } |
| 1977 | |
| 1978 | hists->entries = new_root; |
Namhyung Kim | 155e9af | 2016-02-25 00:13:38 +0900 | [diff] [blame] | 1979 | } |
| 1980 | |
Namhyung Kim | 1f7c254 | 2016-01-20 10:15:21 +0900 | [diff] [blame] | 1981 | void hists__filter_by_thread(struct hists *hists) |
| 1982 | { |
Namhyung Kim | 155e9af | 2016-02-25 00:13:38 +0900 | [diff] [blame] | 1983 | if (symbol_conf.report_hierarchy) |
| 1984 | hists__filter_hierarchy(hists, HIST_FILTER__THREAD, |
| 1985 | hists->thread_filter); |
| 1986 | else |
| 1987 | hists__filter_by_type(hists, HIST_FILTER__THREAD, |
| 1988 | hists__filter_entry_by_thread); |
Namhyung Kim | 1f7c254 | 2016-01-20 10:15:21 +0900 | [diff] [blame] | 1989 | } |
| 1990 | |
| 1991 | void hists__filter_by_dso(struct hists *hists) |
| 1992 | { |
Namhyung Kim | 155e9af | 2016-02-25 00:13:38 +0900 | [diff] [blame] | 1993 | if (symbol_conf.report_hierarchy) |
| 1994 | hists__filter_hierarchy(hists, HIST_FILTER__DSO, |
| 1995 | hists->dso_filter); |
| 1996 | else |
| 1997 | hists__filter_by_type(hists, HIST_FILTER__DSO, |
| 1998 | hists__filter_entry_by_dso); |
Namhyung Kim | 1f7c254 | 2016-01-20 10:15:21 +0900 | [diff] [blame] | 1999 | } |
| 2000 | |
| 2001 | void hists__filter_by_symbol(struct hists *hists) |
| 2002 | { |
Namhyung Kim | 155e9af | 2016-02-25 00:13:38 +0900 | [diff] [blame] | 2003 | if (symbol_conf.report_hierarchy) |
| 2004 | hists__filter_hierarchy(hists, HIST_FILTER__SYMBOL, |
| 2005 | hists->symbol_filter_str); |
| 2006 | else |
| 2007 | hists__filter_by_type(hists, HIST_FILTER__SYMBOL, |
| 2008 | hists__filter_entry_by_symbol); |
Namhyung Kim | 1f7c254 | 2016-01-20 10:15:21 +0900 | [diff] [blame] | 2009 | } |
| 2010 | |
| 2011 | void hists__filter_by_socket(struct hists *hists) |
| 2012 | { |
Namhyung Kim | 155e9af | 2016-02-25 00:13:38 +0900 | [diff] [blame] | 2013 | if (symbol_conf.report_hierarchy) |
| 2014 | hists__filter_hierarchy(hists, HIST_FILTER__SOCKET, |
| 2015 | &hists->socket_filter); |
| 2016 | else |
| 2017 | hists__filter_by_type(hists, HIST_FILTER__SOCKET, |
| 2018 | hists__filter_entry_by_socket); |
Namhyung Kim | 1f7c254 | 2016-01-20 10:15:21 +0900 | [diff] [blame] | 2019 | } |
| 2020 | |
Arnaldo Carvalho de Melo | 28a6b6a | 2012-12-18 16:24:46 -0300 | [diff] [blame] | 2021 | void events_stats__inc(struct events_stats *stats, u32 type) |
| 2022 | { |
| 2023 | ++stats->nr_events[0]; |
| 2024 | ++stats->nr_events[type]; |
| 2025 | } |
| 2026 | |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 2027 | void hists__inc_nr_events(struct hists *hists, u32 type) |
Arnaldo Carvalho de Melo | c8446b9 | 2010-05-14 10:36:42 -0300 | [diff] [blame] | 2028 | { |
Arnaldo Carvalho de Melo | 28a6b6a | 2012-12-18 16:24:46 -0300 | [diff] [blame] | 2029 | events_stats__inc(&hists->stats, type); |
Arnaldo Carvalho de Melo | c8446b9 | 2010-05-14 10:36:42 -0300 | [diff] [blame] | 2030 | } |
Arnaldo Carvalho de Melo | 95529be | 2012-11-08 17:54:33 -0300 | [diff] [blame] | 2031 | |
Namhyung Kim | 1844dbc | 2014-05-28 14:12:18 +0900 | [diff] [blame] | 2032 | void hists__inc_nr_samples(struct hists *hists, bool filtered) |
| 2033 | { |
| 2034 | events_stats__inc(&hists->stats, PERF_RECORD_SAMPLE); |
| 2035 | if (!filtered) |
| 2036 | hists->stats.nr_non_filtered_samples++; |
| 2037 | } |
| 2038 | |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 2039 | static struct hist_entry *hists__add_dummy_entry(struct hists *hists, |
| 2040 | struct hist_entry *pair) |
| 2041 | { |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 2042 | struct rb_root *root; |
| 2043 | struct rb_node **p; |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 2044 | struct rb_node *parent = NULL; |
| 2045 | struct hist_entry *he; |
Andi Kleen | 354cc40 | 2013-10-01 07:22:15 -0700 | [diff] [blame] | 2046 | int64_t cmp; |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 2047 | |
Jiri Olsa | 5222503 | 2016-05-03 13:54:42 +0200 | [diff] [blame] | 2048 | if (hists__has(hists, need_collapse)) |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 2049 | root = &hists->entries_collapsed; |
| 2050 | else |
| 2051 | root = hists->entries_in; |
| 2052 | |
| 2053 | p = &root->rb_node; |
| 2054 | |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 2055 | while (*p != NULL) { |
| 2056 | parent = *p; |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 2057 | he = rb_entry(parent, struct hist_entry, rb_node_in); |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 2058 | |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 2059 | cmp = hist_entry__collapse(he, pair); |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 2060 | |
| 2061 | if (!cmp) |
| 2062 | goto out; |
| 2063 | |
| 2064 | if (cmp < 0) |
| 2065 | p = &(*p)->rb_left; |
| 2066 | else |
| 2067 | p = &(*p)->rb_right; |
| 2068 | } |
| 2069 | |
Namhyung Kim | a0b51af | 2012-09-11 13:34:27 +0900 | [diff] [blame] | 2070 | he = hist_entry__new(pair, true); |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 2071 | if (he) { |
Arnaldo Carvalho de Melo | 30193d7 | 2012-11-12 13:20:03 -0300 | [diff] [blame] | 2072 | memset(&he->stat, 0, sizeof(he->stat)); |
| 2073 | he->hists = hists; |
Kan Liang | 09623d7 | 2016-04-24 23:28:09 -0700 | [diff] [blame] | 2074 | if (symbol_conf.cumulate_callchain) |
| 2075 | memset(he->stat_acc, 0, sizeof(he->stat)); |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 2076 | rb_link_node(&he->rb_node_in, parent, p); |
| 2077 | rb_insert_color(&he->rb_node_in, root); |
Namhyung Kim | 6263835 | 2014-04-24 16:21:46 +0900 | [diff] [blame] | 2078 | hists__inc_stats(hists, he); |
Jiri Olsa | e0af43d | 2012-12-01 21:18:20 +0100 | [diff] [blame] | 2079 | he->dummy = true; |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 2080 | } |
| 2081 | out: |
| 2082 | return he; |
| 2083 | } |
| 2084 | |
Arnaldo Carvalho de Melo | 95529be | 2012-11-08 17:54:33 -0300 | [diff] [blame] | 2085 | static struct hist_entry *hists__find_entry(struct hists *hists, |
| 2086 | struct hist_entry *he) |
| 2087 | { |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 2088 | struct rb_node *n; |
| 2089 | |
Jiri Olsa | 5222503 | 2016-05-03 13:54:42 +0200 | [diff] [blame] | 2090 | if (hists__has(hists, need_collapse)) |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 2091 | n = hists->entries_collapsed.rb_node; |
| 2092 | else |
| 2093 | n = hists->entries_in->rb_node; |
Arnaldo Carvalho de Melo | 95529be | 2012-11-08 17:54:33 -0300 | [diff] [blame] | 2094 | |
| 2095 | while (n) { |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 2096 | struct hist_entry *iter = rb_entry(n, struct hist_entry, rb_node_in); |
| 2097 | int64_t cmp = hist_entry__collapse(iter, he); |
Arnaldo Carvalho de Melo | 95529be | 2012-11-08 17:54:33 -0300 | [diff] [blame] | 2098 | |
| 2099 | if (cmp < 0) |
| 2100 | n = n->rb_left; |
| 2101 | else if (cmp > 0) |
| 2102 | n = n->rb_right; |
| 2103 | else |
| 2104 | return iter; |
| 2105 | } |
| 2106 | |
| 2107 | return NULL; |
| 2108 | } |
| 2109 | |
| 2110 | /* |
| 2111 | * Look for pairs to link to the leader buckets (hist_entries): |
| 2112 | */ |
| 2113 | void hists__match(struct hists *leader, struct hists *other) |
| 2114 | { |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 2115 | struct rb_root *root; |
Arnaldo Carvalho de Melo | 95529be | 2012-11-08 17:54:33 -0300 | [diff] [blame] | 2116 | struct rb_node *nd; |
| 2117 | struct hist_entry *pos, *pair; |
| 2118 | |
Jiri Olsa | 5222503 | 2016-05-03 13:54:42 +0200 | [diff] [blame] | 2119 | if (hists__has(leader, need_collapse)) |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 2120 | root = &leader->entries_collapsed; |
| 2121 | else |
| 2122 | root = leader->entries_in; |
| 2123 | |
| 2124 | for (nd = rb_first(root); nd; nd = rb_next(nd)) { |
| 2125 | pos = rb_entry(nd, struct hist_entry, rb_node_in); |
Arnaldo Carvalho de Melo | 95529be | 2012-11-08 17:54:33 -0300 | [diff] [blame] | 2126 | pair = hists__find_entry(other, pos); |
| 2127 | |
| 2128 | if (pair) |
Namhyung Kim | 5fa9041 | 2012-11-29 15:38:34 +0900 | [diff] [blame] | 2129 | hist_entry__add_pair(pair, pos); |
Arnaldo Carvalho de Melo | 95529be | 2012-11-08 17:54:33 -0300 | [diff] [blame] | 2130 | } |
| 2131 | } |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 2132 | |
| 2133 | /* |
| 2134 | * Look for entries in the other hists that are not present in the leader, if |
| 2135 | * we find them, just add a dummy entry on the leader hists, with period=0, |
| 2136 | * nr_events=0, to serve as the list header. |
| 2137 | */ |
| 2138 | int hists__link(struct hists *leader, struct hists *other) |
| 2139 | { |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 2140 | struct rb_root *root; |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 2141 | struct rb_node *nd; |
| 2142 | struct hist_entry *pos, *pair; |
| 2143 | |
Jiri Olsa | 5222503 | 2016-05-03 13:54:42 +0200 | [diff] [blame] | 2144 | if (hists__has(other, need_collapse)) |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 2145 | root = &other->entries_collapsed; |
| 2146 | else |
| 2147 | root = other->entries_in; |
| 2148 | |
| 2149 | for (nd = rb_first(root); nd; nd = rb_next(nd)) { |
| 2150 | pos = rb_entry(nd, struct hist_entry, rb_node_in); |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 2151 | |
| 2152 | if (!hist_entry__has_pairs(pos)) { |
| 2153 | pair = hists__add_dummy_entry(leader, pos); |
| 2154 | if (pair == NULL) |
| 2155 | return -1; |
Namhyung Kim | 5fa9041 | 2012-11-29 15:38:34 +0900 | [diff] [blame] | 2156 | hist_entry__add_pair(pos, pair); |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 2157 | } |
| 2158 | } |
| 2159 | |
| 2160 | return 0; |
| 2161 | } |
Namhyung Kim | f214833 | 2014-01-14 11:52:48 +0900 | [diff] [blame] | 2162 | |
Andi Kleen | 57849998 | 2015-07-18 08:24:49 -0700 | [diff] [blame] | 2163 | void hist__account_cycles(struct branch_stack *bs, struct addr_location *al, |
| 2164 | struct perf_sample *sample, bool nonany_branch_mode) |
| 2165 | { |
| 2166 | struct branch_info *bi; |
| 2167 | |
| 2168 | /* If we have branch cycles always annotate them. */ |
| 2169 | if (bs && bs->nr && bs->entries[0].flags.cycles) { |
| 2170 | int i; |
| 2171 | |
| 2172 | bi = sample__resolve_bstack(sample, al); |
| 2173 | if (bi) { |
| 2174 | struct addr_map_symbol *prev = NULL; |
| 2175 | |
| 2176 | /* |
| 2177 | * Ignore errors, still want to process the |
| 2178 | * other entries. |
| 2179 | * |
| 2180 | * For non standard branch modes always |
| 2181 | * force no IPC (prev == NULL) |
| 2182 | * |
| 2183 | * Note that perf stores branches reversed from |
| 2184 | * program order! |
| 2185 | */ |
| 2186 | for (i = bs->nr - 1; i >= 0; i--) { |
| 2187 | addr_map_symbol__account_cycles(&bi[i].from, |
| 2188 | nonany_branch_mode ? NULL : prev, |
| 2189 | bi[i].flags.cycles); |
| 2190 | prev = &bi[i].to; |
| 2191 | } |
| 2192 | free(bi); |
| 2193 | } |
| 2194 | } |
| 2195 | } |
Arnaldo Carvalho de Melo | 2a1731f | 2014-10-10 15:49:21 -0300 | [diff] [blame] | 2196 | |
| 2197 | size_t perf_evlist__fprintf_nr_events(struct perf_evlist *evlist, FILE *fp) |
| 2198 | { |
| 2199 | struct perf_evsel *pos; |
| 2200 | size_t ret = 0; |
| 2201 | |
| 2202 | evlist__for_each(evlist, pos) { |
| 2203 | ret += fprintf(fp, "%s stats:\n", perf_evsel__name(pos)); |
| 2204 | ret += events_stats__fprintf(&evsel__hists(pos)->stats, fp); |
| 2205 | } |
| 2206 | |
| 2207 | return ret; |
| 2208 | } |
| 2209 | |
| 2210 | |
Namhyung Kim | f214833 | 2014-01-14 11:52:48 +0900 | [diff] [blame] | 2211 | u64 hists__total_period(struct hists *hists) |
| 2212 | { |
| 2213 | return symbol_conf.filter_relative ? hists->stats.total_non_filtered_period : |
| 2214 | hists->stats.total_period; |
| 2215 | } |
Namhyung Kim | 33db456 | 2014-02-07 12:06:07 +0900 | [diff] [blame] | 2216 | |
| 2217 | int parse_filter_percentage(const struct option *opt __maybe_unused, |
| 2218 | const char *arg, int unset __maybe_unused) |
| 2219 | { |
| 2220 | if (!strcmp(arg, "relative")) |
| 2221 | symbol_conf.filter_relative = true; |
| 2222 | else if (!strcmp(arg, "absolute")) |
| 2223 | symbol_conf.filter_relative = false; |
| 2224 | else |
| 2225 | return -1; |
| 2226 | |
| 2227 | return 0; |
| 2228 | } |
Namhyung Kim | 0b93da1 | 2014-01-14 12:02:15 +0900 | [diff] [blame] | 2229 | |
| 2230 | int perf_hist_config(const char *var, const char *value) |
| 2231 | { |
| 2232 | if (!strcmp(var, "hist.percentage")) |
| 2233 | return parse_filter_percentage(NULL, value, 0); |
| 2234 | |
| 2235 | return 0; |
| 2236 | } |
Arnaldo Carvalho de Melo | a635fc5 | 2014-10-09 16:16:00 -0300 | [diff] [blame] | 2237 | |
Jiri Olsa | 5b65855 | 2016-01-18 10:24:22 +0100 | [diff] [blame] | 2238 | int __hists__init(struct hists *hists, struct perf_hpp_list *hpp_list) |
Arnaldo Carvalho de Melo | a635fc5 | 2014-10-09 16:16:00 -0300 | [diff] [blame] | 2239 | { |
Arnaldo Carvalho de Melo | a635fc5 | 2014-10-09 16:16:00 -0300 | [diff] [blame] | 2240 | memset(hists, 0, sizeof(*hists)); |
| 2241 | hists->entries_in_array[0] = hists->entries_in_array[1] = RB_ROOT; |
| 2242 | hists->entries_in = &hists->entries_in_array[0]; |
| 2243 | hists->entries_collapsed = RB_ROOT; |
| 2244 | hists->entries = RB_ROOT; |
| 2245 | pthread_mutex_init(&hists->lock, NULL); |
Kan Liang | 21394d9 | 2015-09-04 10:45:44 -0400 | [diff] [blame] | 2246 | hists->socket_filter = -1; |
Jiri Olsa | 5b65855 | 2016-01-18 10:24:22 +0100 | [diff] [blame] | 2247 | hists->hpp_list = hpp_list; |
Namhyung Kim | c3bc0c4 | 2016-03-07 16:44:45 -0300 | [diff] [blame] | 2248 | INIT_LIST_HEAD(&hists->hpp_formats); |
Arnaldo Carvalho de Melo | a635fc5 | 2014-10-09 16:16:00 -0300 | [diff] [blame] | 2249 | return 0; |
| 2250 | } |
| 2251 | |
Namhyung Kim | 61fa0e9 | 2015-12-10 16:53:20 +0900 | [diff] [blame] | 2252 | static void hists__delete_remaining_entries(struct rb_root *root) |
| 2253 | { |
| 2254 | struct rb_node *node; |
| 2255 | struct hist_entry *he; |
| 2256 | |
| 2257 | while (!RB_EMPTY_ROOT(root)) { |
| 2258 | node = rb_first(root); |
| 2259 | rb_erase(node, root); |
| 2260 | |
| 2261 | he = rb_entry(node, struct hist_entry, rb_node_in); |
| 2262 | hist_entry__delete(he); |
| 2263 | } |
| 2264 | } |
| 2265 | |
| 2266 | static void hists__delete_all_entries(struct hists *hists) |
| 2267 | { |
| 2268 | hists__delete_entries(hists); |
| 2269 | hists__delete_remaining_entries(&hists->entries_in_array[0]); |
| 2270 | hists__delete_remaining_entries(&hists->entries_in_array[1]); |
| 2271 | hists__delete_remaining_entries(&hists->entries_collapsed); |
| 2272 | } |
| 2273 | |
Masami Hiramatsu | 17577de | 2015-12-09 11:11:29 +0900 | [diff] [blame] | 2274 | static void hists_evsel__exit(struct perf_evsel *evsel) |
| 2275 | { |
| 2276 | struct hists *hists = evsel__hists(evsel); |
Namhyung Kim | c3bc0c4 | 2016-03-07 16:44:45 -0300 | [diff] [blame] | 2277 | struct perf_hpp_fmt *fmt, *pos; |
| 2278 | struct perf_hpp_list_node *node, *tmp; |
Masami Hiramatsu | 17577de | 2015-12-09 11:11:29 +0900 | [diff] [blame] | 2279 | |
Namhyung Kim | 61fa0e9 | 2015-12-10 16:53:20 +0900 | [diff] [blame] | 2280 | hists__delete_all_entries(hists); |
Namhyung Kim | c3bc0c4 | 2016-03-07 16:44:45 -0300 | [diff] [blame] | 2281 | |
| 2282 | list_for_each_entry_safe(node, tmp, &hists->hpp_formats, list) { |
| 2283 | perf_hpp_list__for_each_format_safe(&node->hpp, fmt, pos) { |
| 2284 | list_del(&fmt->list); |
| 2285 | free(fmt); |
| 2286 | } |
| 2287 | list_del(&node->list); |
| 2288 | free(node); |
| 2289 | } |
Masami Hiramatsu | 17577de | 2015-12-09 11:11:29 +0900 | [diff] [blame] | 2290 | } |
| 2291 | |
Namhyung Kim | fc284be | 2016-01-07 10:14:10 +0100 | [diff] [blame] | 2292 | static int hists_evsel__init(struct perf_evsel *evsel) |
| 2293 | { |
| 2294 | struct hists *hists = evsel__hists(evsel); |
| 2295 | |
Jiri Olsa | 5b65855 | 2016-01-18 10:24:22 +0100 | [diff] [blame] | 2296 | __hists__init(hists, &perf_hpp_list); |
Namhyung Kim | fc284be | 2016-01-07 10:14:10 +0100 | [diff] [blame] | 2297 | return 0; |
| 2298 | } |
| 2299 | |
Arnaldo Carvalho de Melo | a635fc5 | 2014-10-09 16:16:00 -0300 | [diff] [blame] | 2300 | /* |
| 2301 | * XXX We probably need a hists_evsel__exit() to free the hist_entries |
| 2302 | * stored in the rbtree... |
| 2303 | */ |
| 2304 | |
| 2305 | int hists__init(void) |
| 2306 | { |
| 2307 | int err = perf_evsel__object_config(sizeof(struct hists_evsel), |
Masami Hiramatsu | 17577de | 2015-12-09 11:11:29 +0900 | [diff] [blame] | 2308 | hists_evsel__init, |
| 2309 | hists_evsel__exit); |
Arnaldo Carvalho de Melo | a635fc5 | 2014-10-09 16:16:00 -0300 | [diff] [blame] | 2310 | if (err) |
| 2311 | fputs("FATAL ERROR: Couldn't setup hists class\n", stderr); |
| 2312 | |
| 2313 | return err; |
| 2314 | } |
Jiri Olsa | 94b3dc3 | 2016-01-18 10:24:13 +0100 | [diff] [blame] | 2315 | |
| 2316 | void perf_hpp_list__init(struct perf_hpp_list *list) |
| 2317 | { |
| 2318 | INIT_LIST_HEAD(&list->fields); |
| 2319 | INIT_LIST_HEAD(&list->sorts); |
| 2320 | } |