Frederic Weisbecker | 8a0ecfb | 2010-05-13 19:47:16 +0200 | [diff] [blame] | 1 | #include "util.h" |
Frederic Weisbecker | 598357e | 2010-05-21 12:48:39 +0200 | [diff] [blame] | 2 | #include "build-id.h" |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 3 | #include "hist.h" |
Arnaldo Carvalho de Melo | 4e4f06e | 2009-12-14 13:10:39 -0200 | [diff] [blame] | 4 | #include "session.h" |
| 5 | #include "sort.h" |
Arnaldo Carvalho de Melo | 2a1731f | 2014-10-10 15:49:21 -0300 | [diff] [blame] | 6 | #include "evlist.h" |
Namhyung Kim | 29d720e | 2013-01-22 18:09:33 +0900 | [diff] [blame] | 7 | #include "evsel.h" |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 8 | #include "annotate.h" |
Namhyung Kim | 740b97f | 2014-12-22 13:44:10 +0900 | [diff] [blame] | 9 | #include "ui/progress.h" |
Arnaldo Carvalho de Melo | 9b33827 | 2009-12-16 14:31:49 -0200 | [diff] [blame] | 10 | #include <math.h> |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 11 | |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 12 | static bool hists__filter_entry_by_dso(struct hists *hists, |
| 13 | struct hist_entry *he); |
| 14 | static bool hists__filter_entry_by_thread(struct hists *hists, |
| 15 | struct hist_entry *he); |
Namhyung Kim | e94d53e | 2012-03-16 17:50:51 +0900 | [diff] [blame] | 16 | static bool hists__filter_entry_by_symbol(struct hists *hists, |
| 17 | struct hist_entry *he); |
Kan Liang | 21394d9 | 2015-09-04 10:45:44 -0400 | [diff] [blame] | 18 | static bool hists__filter_entry_by_socket(struct hists *hists, |
| 19 | struct hist_entry *he); |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 20 | |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 21 | u16 hists__col_len(struct hists *hists, enum hist_column col) |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 22 | { |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 23 | return hists->col_len[col]; |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 24 | } |
| 25 | |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 26 | void hists__set_col_len(struct hists *hists, enum hist_column col, u16 len) |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 27 | { |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 28 | hists->col_len[col] = len; |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 29 | } |
| 30 | |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 31 | bool hists__new_col_len(struct hists *hists, enum hist_column col, u16 len) |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 32 | { |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 33 | if (len > hists__col_len(hists, col)) { |
| 34 | hists__set_col_len(hists, col, len); |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 35 | return true; |
| 36 | } |
| 37 | return false; |
| 38 | } |
| 39 | |
Namhyung Kim | 7ccf4f9 | 2012-08-20 13:52:05 +0900 | [diff] [blame] | 40 | void hists__reset_col_len(struct hists *hists) |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 41 | { |
| 42 | enum hist_column col; |
| 43 | |
| 44 | for (col = 0; col < HISTC_NR_COLS; ++col) |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 45 | hists__set_col_len(hists, col, 0); |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 46 | } |
| 47 | |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 48 | static void hists__set_unres_dso_col_len(struct hists *hists, int dso) |
| 49 | { |
| 50 | const unsigned int unresolved_col_width = BITS_PER_LONG / 4; |
| 51 | |
| 52 | if (hists__col_len(hists, dso) < unresolved_col_width && |
| 53 | !symbol_conf.col_width_list_str && !symbol_conf.field_sep && |
| 54 | !symbol_conf.dso_list) |
| 55 | hists__set_col_len(hists, dso, unresolved_col_width); |
| 56 | } |
| 57 | |
Namhyung Kim | 7ccf4f9 | 2012-08-20 13:52:05 +0900 | [diff] [blame] | 58 | void hists__calc_col_len(struct hists *hists, struct hist_entry *h) |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 59 | { |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 60 | const unsigned int unresolved_col_width = BITS_PER_LONG / 4; |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 61 | int symlen; |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 62 | u16 len; |
| 63 | |
Namhyung Kim | ded19d5 | 2013-04-01 20:35:19 +0900 | [diff] [blame] | 64 | /* |
| 65 | * +4 accounts for '[x] ' priv level info |
| 66 | * +2 accounts for 0x prefix on raw addresses |
| 67 | * +3 accounts for ' y ' symtab origin info |
| 68 | */ |
| 69 | if (h->ms.sym) { |
| 70 | symlen = h->ms.sym->namelen + 4; |
| 71 | if (verbose) |
| 72 | symlen += BITS_PER_LONG / 4 + 2 + 3; |
| 73 | hists__new_col_len(hists, HISTC_SYMBOL, symlen); |
| 74 | } else { |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 75 | symlen = unresolved_col_width + 4 + 2; |
| 76 | hists__new_col_len(hists, HISTC_SYMBOL, symlen); |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 77 | hists__set_unres_dso_col_len(hists, HISTC_DSO); |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 78 | } |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 79 | |
| 80 | len = thread__comm_len(h->thread); |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 81 | if (hists__new_col_len(hists, HISTC_COMM, len)) |
| 82 | hists__set_col_len(hists, HISTC_THREAD, len + 6); |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 83 | |
| 84 | if (h->ms.map) { |
| 85 | len = dso__name_len(h->ms.map->dso); |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 86 | hists__new_col_len(hists, HISTC_DSO, len); |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 87 | } |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 88 | |
Namhyung Kim | cb99374 | 2012-12-27 18:11:42 +0900 | [diff] [blame] | 89 | if (h->parent) |
| 90 | hists__new_col_len(hists, HISTC_PARENT, h->parent->namelen); |
| 91 | |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 92 | if (h->branch_info) { |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 93 | if (h->branch_info->from.sym) { |
| 94 | symlen = (int)h->branch_info->from.sym->namelen + 4; |
Namhyung Kim | ded19d5 | 2013-04-01 20:35:19 +0900 | [diff] [blame] | 95 | if (verbose) |
| 96 | symlen += BITS_PER_LONG / 4 + 2 + 3; |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 97 | hists__new_col_len(hists, HISTC_SYMBOL_FROM, symlen); |
| 98 | |
| 99 | symlen = dso__name_len(h->branch_info->from.map->dso); |
| 100 | hists__new_col_len(hists, HISTC_DSO_FROM, symlen); |
| 101 | } else { |
| 102 | symlen = unresolved_col_width + 4 + 2; |
| 103 | hists__new_col_len(hists, HISTC_SYMBOL_FROM, symlen); |
| 104 | hists__set_unres_dso_col_len(hists, HISTC_DSO_FROM); |
| 105 | } |
| 106 | |
| 107 | if (h->branch_info->to.sym) { |
| 108 | symlen = (int)h->branch_info->to.sym->namelen + 4; |
Namhyung Kim | ded19d5 | 2013-04-01 20:35:19 +0900 | [diff] [blame] | 109 | if (verbose) |
| 110 | symlen += BITS_PER_LONG / 4 + 2 + 3; |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 111 | hists__new_col_len(hists, HISTC_SYMBOL_TO, symlen); |
| 112 | |
| 113 | symlen = dso__name_len(h->branch_info->to.map->dso); |
| 114 | hists__new_col_len(hists, HISTC_DSO_TO, symlen); |
| 115 | } else { |
| 116 | symlen = unresolved_col_width + 4 + 2; |
| 117 | hists__new_col_len(hists, HISTC_SYMBOL_TO, symlen); |
| 118 | hists__set_unres_dso_col_len(hists, HISTC_DSO_TO); |
| 119 | } |
| 120 | } |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 121 | |
| 122 | if (h->mem_info) { |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 123 | if (h->mem_info->daddr.sym) { |
| 124 | symlen = (int)h->mem_info->daddr.sym->namelen + 4 |
| 125 | + unresolved_col_width + 2; |
| 126 | hists__new_col_len(hists, HISTC_MEM_DADDR_SYMBOL, |
| 127 | symlen); |
Don Zickus | 9b32ba7 | 2014-06-01 15:38:29 +0200 | [diff] [blame] | 128 | hists__new_col_len(hists, HISTC_MEM_DCACHELINE, |
| 129 | symlen + 1); |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 130 | } else { |
| 131 | symlen = unresolved_col_width + 4 + 2; |
| 132 | hists__new_col_len(hists, HISTC_MEM_DADDR_SYMBOL, |
| 133 | symlen); |
Jiri Olsa | 0805909 | 2016-01-20 12:56:33 +0100 | [diff] [blame] | 134 | hists__new_col_len(hists, HISTC_MEM_DCACHELINE, |
| 135 | symlen); |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 136 | } |
Jiri Olsa | b34b3bf | 2015-10-05 20:06:08 +0200 | [diff] [blame] | 137 | |
| 138 | if (h->mem_info->iaddr.sym) { |
| 139 | symlen = (int)h->mem_info->iaddr.sym->namelen + 4 |
| 140 | + unresolved_col_width + 2; |
| 141 | hists__new_col_len(hists, HISTC_MEM_IADDR_SYMBOL, |
| 142 | symlen); |
| 143 | } else { |
| 144 | symlen = unresolved_col_width + 4 + 2; |
| 145 | hists__new_col_len(hists, HISTC_MEM_IADDR_SYMBOL, |
| 146 | symlen); |
| 147 | } |
| 148 | |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 149 | if (h->mem_info->daddr.map) { |
| 150 | symlen = dso__name_len(h->mem_info->daddr.map->dso); |
| 151 | hists__new_col_len(hists, HISTC_MEM_DADDR_DSO, |
| 152 | symlen); |
| 153 | } else { |
| 154 | symlen = unresolved_col_width + 4 + 2; |
| 155 | hists__set_unres_dso_col_len(hists, HISTC_MEM_DADDR_DSO); |
| 156 | } |
| 157 | } else { |
| 158 | symlen = unresolved_col_width + 4 + 2; |
| 159 | hists__new_col_len(hists, HISTC_MEM_DADDR_SYMBOL, symlen); |
Jiri Olsa | b34b3bf | 2015-10-05 20:06:08 +0200 | [diff] [blame] | 160 | hists__new_col_len(hists, HISTC_MEM_IADDR_SYMBOL, symlen); |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 161 | hists__set_unres_dso_col_len(hists, HISTC_MEM_DADDR_DSO); |
| 162 | } |
| 163 | |
Arnaldo Carvalho de Melo | a4978ec | 2015-09-09 12:14:00 -0300 | [diff] [blame] | 164 | hists__new_col_len(hists, HISTC_CPU, 3); |
Kan Liang | 2e7ea3a | 2015-09-04 10:45:43 -0400 | [diff] [blame] | 165 | hists__new_col_len(hists, HISTC_SOCKET, 6); |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 166 | hists__new_col_len(hists, HISTC_MEM_LOCKED, 6); |
| 167 | hists__new_col_len(hists, HISTC_MEM_TLB, 22); |
| 168 | hists__new_col_len(hists, HISTC_MEM_SNOOP, 12); |
| 169 | hists__new_col_len(hists, HISTC_MEM_LVL, 21 + 3); |
| 170 | hists__new_col_len(hists, HISTC_LOCAL_WEIGHT, 12); |
| 171 | hists__new_col_len(hists, HISTC_GLOBAL_WEIGHT, 12); |
Andi Kleen | 475eeab | 2013-09-20 07:40:43 -0700 | [diff] [blame] | 172 | |
Arnaldo Carvalho de Melo | e8e6d37 | 2015-08-10 16:53:54 -0300 | [diff] [blame] | 173 | if (h->srcline) |
| 174 | hists__new_col_len(hists, HISTC_SRCLINE, strlen(h->srcline)); |
| 175 | |
Andi Kleen | 31191a8 | 2015-08-07 15:54:24 -0700 | [diff] [blame] | 176 | if (h->srcfile) |
| 177 | hists__new_col_len(hists, HISTC_SRCFILE, strlen(h->srcfile)); |
| 178 | |
Andi Kleen | 475eeab | 2013-09-20 07:40:43 -0700 | [diff] [blame] | 179 | if (h->transaction) |
| 180 | hists__new_col_len(hists, HISTC_TRANSACTION, |
| 181 | hist_entry__transaction_len()); |
Namhyung Kim | 0c0af78 | 2016-02-21 23:22:38 +0900 | [diff] [blame] | 182 | |
| 183 | if (h->trace_output) |
| 184 | hists__new_col_len(hists, HISTC_TRACE, strlen(h->trace_output)); |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 185 | } |
| 186 | |
Namhyung Kim | 7ccf4f9 | 2012-08-20 13:52:05 +0900 | [diff] [blame] | 187 | void hists__output_recalc_col_len(struct hists *hists, int max_rows) |
| 188 | { |
| 189 | struct rb_node *next = rb_first(&hists->entries); |
| 190 | struct hist_entry *n; |
| 191 | int row = 0; |
| 192 | |
| 193 | hists__reset_col_len(hists); |
| 194 | |
| 195 | while (next && row++ < max_rows) { |
| 196 | n = rb_entry(next, struct hist_entry, rb_node); |
| 197 | if (!n->filtered) |
| 198 | hists__calc_col_len(hists, n); |
| 199 | next = rb_next(&n->rb_node); |
| 200 | } |
| 201 | } |
| 202 | |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 203 | static void he_stat__add_cpumode_period(struct he_stat *he_stat, |
| 204 | unsigned int cpumode, u64 period) |
Zhang, Yanmin | a1645ce | 2010-04-19 13:32:50 +0800 | [diff] [blame] | 205 | { |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 206 | switch (cpumode) { |
Zhang, Yanmin | a1645ce | 2010-04-19 13:32:50 +0800 | [diff] [blame] | 207 | case PERF_RECORD_MISC_KERNEL: |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 208 | he_stat->period_sys += period; |
Zhang, Yanmin | a1645ce | 2010-04-19 13:32:50 +0800 | [diff] [blame] | 209 | break; |
| 210 | case PERF_RECORD_MISC_USER: |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 211 | he_stat->period_us += period; |
Zhang, Yanmin | a1645ce | 2010-04-19 13:32:50 +0800 | [diff] [blame] | 212 | break; |
| 213 | case PERF_RECORD_MISC_GUEST_KERNEL: |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 214 | he_stat->period_guest_sys += period; |
Zhang, Yanmin | a1645ce | 2010-04-19 13:32:50 +0800 | [diff] [blame] | 215 | break; |
| 216 | case PERF_RECORD_MISC_GUEST_USER: |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 217 | he_stat->period_guest_us += period; |
Zhang, Yanmin | a1645ce | 2010-04-19 13:32:50 +0800 | [diff] [blame] | 218 | break; |
| 219 | default: |
| 220 | break; |
| 221 | } |
| 222 | } |
| 223 | |
Andi Kleen | 0548429 | 2013-01-24 16:10:29 +0100 | [diff] [blame] | 224 | static void he_stat__add_period(struct he_stat *he_stat, u64 period, |
| 225 | u64 weight) |
Namhyung Kim | 139c081 | 2012-10-04 21:49:43 +0900 | [diff] [blame] | 226 | { |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 227 | |
Namhyung Kim | 139c081 | 2012-10-04 21:49:43 +0900 | [diff] [blame] | 228 | he_stat->period += period; |
Andi Kleen | 0548429 | 2013-01-24 16:10:29 +0100 | [diff] [blame] | 229 | he_stat->weight += weight; |
Namhyung Kim | 139c081 | 2012-10-04 21:49:43 +0900 | [diff] [blame] | 230 | he_stat->nr_events += 1; |
| 231 | } |
| 232 | |
| 233 | static void he_stat__add_stat(struct he_stat *dest, struct he_stat *src) |
| 234 | { |
| 235 | dest->period += src->period; |
| 236 | dest->period_sys += src->period_sys; |
| 237 | dest->period_us += src->period_us; |
| 238 | dest->period_guest_sys += src->period_guest_sys; |
| 239 | dest->period_guest_us += src->period_guest_us; |
| 240 | dest->nr_events += src->nr_events; |
Andi Kleen | 0548429 | 2013-01-24 16:10:29 +0100 | [diff] [blame] | 241 | dest->weight += src->weight; |
Namhyung Kim | 139c081 | 2012-10-04 21:49:43 +0900 | [diff] [blame] | 242 | } |
| 243 | |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 244 | static void he_stat__decay(struct he_stat *he_stat) |
Arnaldo Carvalho de Melo | ab81f3f | 2011-10-05 19:16:15 -0300 | [diff] [blame] | 245 | { |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 246 | he_stat->period = (he_stat->period * 7) / 8; |
| 247 | he_stat->nr_events = (he_stat->nr_events * 7) / 8; |
Andi Kleen | 0548429 | 2013-01-24 16:10:29 +0100 | [diff] [blame] | 248 | /* XXX need decay for weight too? */ |
Arnaldo Carvalho de Melo | ab81f3f | 2011-10-05 19:16:15 -0300 | [diff] [blame] | 249 | } |
| 250 | |
Namhyung Kim | 5d8200a | 2016-02-25 00:13:49 +0900 | [diff] [blame] | 251 | static void hists__delete_entry(struct hists *hists, struct hist_entry *he); |
| 252 | |
Arnaldo Carvalho de Melo | ab81f3f | 2011-10-05 19:16:15 -0300 | [diff] [blame] | 253 | static bool hists__decay_entry(struct hists *hists, struct hist_entry *he) |
| 254 | { |
Namhyung Kim | b24c28f | 2012-10-04 21:49:41 +0900 | [diff] [blame] | 255 | u64 prev_period = he->stat.period; |
Namhyung Kim | 3186b68 | 2014-04-22 13:44:23 +0900 | [diff] [blame] | 256 | u64 diff; |
Arnaldo Carvalho de Melo | c64550c | 2011-10-20 06:45:44 -0200 | [diff] [blame] | 257 | |
| 258 | if (prev_period == 0) |
Arnaldo Carvalho de Melo | df71d95 | 2011-10-13 08:01:33 -0300 | [diff] [blame] | 259 | return true; |
Arnaldo Carvalho de Melo | c64550c | 2011-10-20 06:45:44 -0200 | [diff] [blame] | 260 | |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 261 | he_stat__decay(&he->stat); |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 262 | if (symbol_conf.cumulate_callchain) |
| 263 | he_stat__decay(he->stat_acc); |
Namhyung Kim | 42b276a | 2016-01-05 12:06:00 +0900 | [diff] [blame] | 264 | decay_callchain(he->callchain); |
Arnaldo Carvalho de Melo | c64550c | 2011-10-20 06:45:44 -0200 | [diff] [blame] | 265 | |
Namhyung Kim | 3186b68 | 2014-04-22 13:44:23 +0900 | [diff] [blame] | 266 | diff = prev_period - he->stat.period; |
| 267 | |
Namhyung Kim | 5d8200a | 2016-02-25 00:13:49 +0900 | [diff] [blame] | 268 | if (!he->depth) { |
| 269 | hists->stats.total_period -= diff; |
| 270 | if (!he->filtered) |
| 271 | hists->stats.total_non_filtered_period -= diff; |
| 272 | } |
| 273 | |
| 274 | if (!he->leaf) { |
| 275 | struct hist_entry *child; |
| 276 | struct rb_node *node = rb_first(&he->hroot_out); |
| 277 | while (node) { |
| 278 | child = rb_entry(node, struct hist_entry, rb_node); |
| 279 | node = rb_next(node); |
| 280 | |
| 281 | if (hists__decay_entry(hists, child)) |
| 282 | hists__delete_entry(hists, child); |
| 283 | } |
| 284 | } |
Arnaldo Carvalho de Melo | c64550c | 2011-10-20 06:45:44 -0200 | [diff] [blame] | 285 | |
Namhyung Kim | b24c28f | 2012-10-04 21:49:41 +0900 | [diff] [blame] | 286 | return he->stat.period == 0; |
Arnaldo Carvalho de Melo | ab81f3f | 2011-10-05 19:16:15 -0300 | [diff] [blame] | 287 | } |
| 288 | |
Arnaldo Carvalho de Melo | 956b65e | 2014-12-19 12:41:28 -0300 | [diff] [blame] | 289 | static void hists__delete_entry(struct hists *hists, struct hist_entry *he) |
| 290 | { |
Namhyung Kim | 5d8200a | 2016-02-25 00:13:49 +0900 | [diff] [blame] | 291 | struct rb_root *root_in; |
| 292 | struct rb_root *root_out; |
Arnaldo Carvalho de Melo | 956b65e | 2014-12-19 12:41:28 -0300 | [diff] [blame] | 293 | |
Namhyung Kim | 5d8200a | 2016-02-25 00:13:49 +0900 | [diff] [blame] | 294 | if (he->parent_he) { |
| 295 | root_in = &he->parent_he->hroot_in; |
| 296 | root_out = &he->parent_he->hroot_out; |
| 297 | } else { |
| 298 | if (sort__need_collapse) |
| 299 | root_in = &hists->entries_collapsed; |
| 300 | else |
| 301 | root_in = hists->entries_in; |
| 302 | root_out = &hists->entries; |
| 303 | } |
| 304 | |
| 305 | rb_erase(&he->rb_node_in, root_in); |
| 306 | rb_erase(&he->rb_node, root_out); |
Arnaldo Carvalho de Melo | 956b65e | 2014-12-19 12:41:28 -0300 | [diff] [blame] | 307 | |
| 308 | --hists->nr_entries; |
| 309 | if (!he->filtered) |
| 310 | --hists->nr_non_filtered_entries; |
| 311 | |
| 312 | hist_entry__delete(he); |
| 313 | } |
| 314 | |
Namhyung Kim | 3a5714f | 2013-05-14 11:09:01 +0900 | [diff] [blame] | 315 | void hists__decay_entries(struct hists *hists, bool zap_user, bool zap_kernel) |
Arnaldo Carvalho de Melo | ab81f3f | 2011-10-05 19:16:15 -0300 | [diff] [blame] | 316 | { |
| 317 | struct rb_node *next = rb_first(&hists->entries); |
| 318 | struct hist_entry *n; |
| 319 | |
| 320 | while (next) { |
| 321 | n = rb_entry(next, struct hist_entry, rb_node); |
| 322 | next = rb_next(&n->rb_node); |
Arnaldo Carvalho de Melo | b079d4e | 2011-10-17 09:05:04 -0200 | [diff] [blame] | 323 | if (((zap_user && n->level == '.') || |
| 324 | (zap_kernel && n->level != '.') || |
Arnaldo Carvalho de Melo | 4c47f4f | 2015-03-17 17:18:58 -0300 | [diff] [blame] | 325 | hists__decay_entry(hists, n))) { |
Arnaldo Carvalho de Melo | 956b65e | 2014-12-19 12:41:28 -0300 | [diff] [blame] | 326 | hists__delete_entry(hists, n); |
Arnaldo Carvalho de Melo | ab81f3f | 2011-10-05 19:16:15 -0300 | [diff] [blame] | 327 | } |
| 328 | } |
| 329 | } |
| 330 | |
Namhyung Kim | 701937b | 2014-08-12 17:16:05 +0900 | [diff] [blame] | 331 | void hists__delete_entries(struct hists *hists) |
| 332 | { |
| 333 | struct rb_node *next = rb_first(&hists->entries); |
| 334 | struct hist_entry *n; |
| 335 | |
| 336 | while (next) { |
| 337 | n = rb_entry(next, struct hist_entry, rb_node); |
| 338 | next = rb_next(&n->rb_node); |
| 339 | |
Arnaldo Carvalho de Melo | 956b65e | 2014-12-19 12:41:28 -0300 | [diff] [blame] | 340 | hists__delete_entry(hists, n); |
Namhyung Kim | 701937b | 2014-08-12 17:16:05 +0900 | [diff] [blame] | 341 | } |
| 342 | } |
| 343 | |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 344 | /* |
Arnaldo Carvalho de Melo | c82ee82 | 2010-05-14 14:19:35 -0300 | [diff] [blame] | 345 | * histogram, sorted on item, collects periods |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 346 | */ |
| 347 | |
Namhyung Kim | a0b51af | 2012-09-11 13:34:27 +0900 | [diff] [blame] | 348 | static struct hist_entry *hist_entry__new(struct hist_entry *template, |
| 349 | bool sample_self) |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 350 | { |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 351 | size_t callchain_size = 0; |
| 352 | struct hist_entry *he; |
| 353 | |
Namhyung Kim | 82aa019 | 2014-12-22 13:44:14 +0900 | [diff] [blame] | 354 | if (symbol_conf.use_callchain) |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 355 | callchain_size = sizeof(struct callchain_root); |
| 356 | |
| 357 | he = zalloc(sizeof(*he) + callchain_size); |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 358 | |
Arnaldo Carvalho de Melo | 12c1427 | 2012-01-04 12:27:03 -0200 | [diff] [blame] | 359 | if (he != NULL) { |
| 360 | *he = *template; |
Namhyung Kim | c4b3535 | 2012-10-04 21:49:42 +0900 | [diff] [blame] | 361 | |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 362 | if (symbol_conf.cumulate_callchain) { |
| 363 | he->stat_acc = malloc(sizeof(he->stat)); |
| 364 | if (he->stat_acc == NULL) { |
| 365 | free(he); |
| 366 | return NULL; |
| 367 | } |
| 368 | memcpy(he->stat_acc, &he->stat, sizeof(he->stat)); |
Namhyung Kim | a0b51af | 2012-09-11 13:34:27 +0900 | [diff] [blame] | 369 | if (!sample_self) |
| 370 | memset(&he->stat, 0, sizeof(he->stat)); |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 371 | } |
| 372 | |
Arnaldo Carvalho de Melo | 5c24b67 | 2015-06-15 23:29:51 -0300 | [diff] [blame] | 373 | map__get(he->ms.map); |
Stephane Eranian | 3cf0cb1 | 2013-01-14 15:02:45 +0100 | [diff] [blame] | 374 | |
| 375 | if (he->branch_info) { |
Namhyung Kim | 26353a6 | 2013-04-01 20:35:17 +0900 | [diff] [blame] | 376 | /* |
| 377 | * This branch info is (a part of) allocated from |
Arnaldo Carvalho de Melo | 644f2df | 2014-01-22 13:15:36 -0300 | [diff] [blame] | 378 | * sample__resolve_bstack() and will be freed after |
Namhyung Kim | 26353a6 | 2013-04-01 20:35:17 +0900 | [diff] [blame] | 379 | * adding new entries. So we need to save a copy. |
| 380 | */ |
| 381 | he->branch_info = malloc(sizeof(*he->branch_info)); |
| 382 | if (he->branch_info == NULL) { |
Arnaldo Carvalho de Melo | 5c24b67 | 2015-06-15 23:29:51 -0300 | [diff] [blame] | 383 | map__zput(he->ms.map); |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 384 | free(he->stat_acc); |
Namhyung Kim | 26353a6 | 2013-04-01 20:35:17 +0900 | [diff] [blame] | 385 | free(he); |
| 386 | return NULL; |
| 387 | } |
| 388 | |
| 389 | memcpy(he->branch_info, template->branch_info, |
| 390 | sizeof(*he->branch_info)); |
| 391 | |
Arnaldo Carvalho de Melo | 5c24b67 | 2015-06-15 23:29:51 -0300 | [diff] [blame] | 392 | map__get(he->branch_info->from.map); |
| 393 | map__get(he->branch_info->to.map); |
Stephane Eranian | 3cf0cb1 | 2013-01-14 15:02:45 +0100 | [diff] [blame] | 394 | } |
| 395 | |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 396 | if (he->mem_info) { |
Arnaldo Carvalho de Melo | 5c24b67 | 2015-06-15 23:29:51 -0300 | [diff] [blame] | 397 | map__get(he->mem_info->iaddr.map); |
| 398 | map__get(he->mem_info->daddr.map); |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 399 | } |
| 400 | |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 401 | if (symbol_conf.use_callchain) |
Arnaldo Carvalho de Melo | 12c1427 | 2012-01-04 12:27:03 -0200 | [diff] [blame] | 402 | callchain_init(he->callchain); |
Arnaldo Carvalho de Melo | b821c73 | 2012-10-25 14:42:45 -0200 | [diff] [blame] | 403 | |
Namhyung Kim | 7239283 | 2015-12-24 11:16:17 +0900 | [diff] [blame] | 404 | if (he->raw_data) { |
| 405 | he->raw_data = memdup(he->raw_data, he->raw_size); |
| 406 | |
| 407 | if (he->raw_data == NULL) { |
| 408 | map__put(he->ms.map); |
| 409 | if (he->branch_info) { |
| 410 | map__put(he->branch_info->from.map); |
| 411 | map__put(he->branch_info->to.map); |
| 412 | free(he->branch_info); |
| 413 | } |
| 414 | if (he->mem_info) { |
| 415 | map__put(he->mem_info->iaddr.map); |
| 416 | map__put(he->mem_info->daddr.map); |
| 417 | } |
| 418 | free(he->stat_acc); |
| 419 | free(he); |
| 420 | return NULL; |
| 421 | } |
| 422 | } |
Arnaldo Carvalho de Melo | b821c73 | 2012-10-25 14:42:45 -0200 | [diff] [blame] | 423 | INIT_LIST_HEAD(&he->pairs.node); |
Arnaldo Carvalho de Melo | f3b623b | 2015-03-02 22:21:35 -0300 | [diff] [blame] | 424 | thread__get(he->thread); |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 425 | |
| 426 | if (!symbol_conf.report_hierarchy) |
| 427 | he->leaf = true; |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 428 | } |
| 429 | |
Arnaldo Carvalho de Melo | 12c1427 | 2012-01-04 12:27:03 -0200 | [diff] [blame] | 430 | return he; |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 431 | } |
| 432 | |
Arnaldo Carvalho de Melo | 7a007ca | 2010-07-21 09:19:41 -0300 | [diff] [blame] | 433 | static u8 symbol__parent_filter(const struct symbol *parent) |
| 434 | { |
| 435 | if (symbol_conf.exclude_other && parent == NULL) |
| 436 | return 1 << HIST_FILTER__PARENT; |
| 437 | return 0; |
| 438 | } |
| 439 | |
Namhyung Kim | 467ef10 | 2016-02-16 23:08:19 +0900 | [diff] [blame] | 440 | static void hist_entry__add_callchain_period(struct hist_entry *he, u64 period) |
| 441 | { |
| 442 | if (!symbol_conf.use_callchain) |
| 443 | return; |
| 444 | |
| 445 | he->hists->callchain_period += period; |
| 446 | if (!he->filtered) |
| 447 | he->hists->callchain_non_filtered_period += period; |
| 448 | } |
| 449 | |
Arnaldo Carvalho de Melo | e7e0efc | 2015-05-19 11:31:22 -0300 | [diff] [blame] | 450 | static struct hist_entry *hists__findnew_entry(struct hists *hists, |
| 451 | struct hist_entry *entry, |
| 452 | struct addr_location *al, |
| 453 | bool sample_self) |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 454 | { |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 455 | struct rb_node **p; |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 456 | struct rb_node *parent = NULL; |
| 457 | struct hist_entry *he; |
Andi Kleen | 354cc40 | 2013-10-01 07:22:15 -0700 | [diff] [blame] | 458 | int64_t cmp; |
Namhyung Kim | f1cbf78 | 2013-12-18 14:21:11 +0900 | [diff] [blame] | 459 | u64 period = entry->stat.period; |
| 460 | u64 weight = entry->stat.weight; |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 461 | |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 462 | p = &hists->entries_in->rb_node; |
| 463 | |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 464 | while (*p != NULL) { |
| 465 | parent = *p; |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 466 | he = rb_entry(parent, struct hist_entry, rb_node_in); |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 467 | |
Namhyung Kim | 9afcf93 | 2012-12-10 17:29:54 +0900 | [diff] [blame] | 468 | /* |
| 469 | * Make sure that it receives arguments in a same order as |
| 470 | * hist_entry__collapse() so that we can use an appropriate |
| 471 | * function when searching an entry regardless which sort |
| 472 | * keys were used. |
| 473 | */ |
| 474 | cmp = hist_entry__cmp(he, entry); |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 475 | |
| 476 | if (!cmp) { |
Namhyung Kim | 0f58474 | 2016-01-28 00:40:49 +0900 | [diff] [blame] | 477 | if (sample_self) { |
Namhyung Kim | a0b51af | 2012-09-11 13:34:27 +0900 | [diff] [blame] | 478 | he_stat__add_period(&he->stat, period, weight); |
Namhyung Kim | 467ef10 | 2016-02-16 23:08:19 +0900 | [diff] [blame] | 479 | hist_entry__add_callchain_period(he, period); |
Namhyung Kim | 0f58474 | 2016-01-28 00:40:49 +0900 | [diff] [blame] | 480 | } |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 481 | if (symbol_conf.cumulate_callchain) |
| 482 | he_stat__add_period(he->stat_acc, period, weight); |
David Miller | 63fa471 | 2012-03-27 03:14:18 -0400 | [diff] [blame] | 483 | |
Namhyung Kim | ceb2acb | 2013-04-01 20:35:18 +0900 | [diff] [blame] | 484 | /* |
Arnaldo Carvalho de Melo | e80faac | 2014-01-22 13:05:06 -0300 | [diff] [blame] | 485 | * This mem info was allocated from sample__resolve_mem |
Namhyung Kim | ceb2acb | 2013-04-01 20:35:18 +0900 | [diff] [blame] | 486 | * and will not be used anymore. |
| 487 | */ |
Arnaldo Carvalho de Melo | 74cf249 | 2013-12-27 16:55:14 -0300 | [diff] [blame] | 488 | zfree(&entry->mem_info); |
Namhyung Kim | ceb2acb | 2013-04-01 20:35:18 +0900 | [diff] [blame] | 489 | |
David Miller | 63fa471 | 2012-03-27 03:14:18 -0400 | [diff] [blame] | 490 | /* If the map of an existing hist_entry has |
| 491 | * become out-of-date due to an exec() or |
| 492 | * similar, update it. Otherwise we will |
| 493 | * mis-adjust symbol addresses when computing |
| 494 | * the history counter to increment. |
| 495 | */ |
| 496 | if (he->ms.map != entry->ms.map) { |
Arnaldo Carvalho de Melo | 5c24b67 | 2015-06-15 23:29:51 -0300 | [diff] [blame] | 497 | map__put(he->ms.map); |
| 498 | he->ms.map = map__get(entry->ms.map); |
David Miller | 63fa471 | 2012-03-27 03:14:18 -0400 | [diff] [blame] | 499 | } |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 500 | goto out; |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 501 | } |
| 502 | |
| 503 | if (cmp < 0) |
| 504 | p = &(*p)->rb_left; |
| 505 | else |
| 506 | p = &(*p)->rb_right; |
| 507 | } |
| 508 | |
Namhyung Kim | a0b51af | 2012-09-11 13:34:27 +0900 | [diff] [blame] | 509 | he = hist_entry__new(entry, sample_self); |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 510 | if (!he) |
Namhyung Kim | 27a0dcb | 2013-05-14 11:09:02 +0900 | [diff] [blame] | 511 | return NULL; |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 512 | |
Namhyung Kim | 0f58474 | 2016-01-28 00:40:49 +0900 | [diff] [blame] | 513 | if (sample_self) |
Namhyung Kim | 467ef10 | 2016-02-16 23:08:19 +0900 | [diff] [blame] | 514 | hist_entry__add_callchain_period(he, period); |
| 515 | hists->nr_entries++; |
Namhyung Kim | 590cd34 | 2014-12-22 13:44:09 +0900 | [diff] [blame] | 516 | |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 517 | rb_link_node(&he->rb_node_in, parent, p); |
| 518 | rb_insert_color(&he->rb_node_in, hists->entries_in); |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 519 | out: |
Namhyung Kim | a0b51af | 2012-09-11 13:34:27 +0900 | [diff] [blame] | 520 | if (sample_self) |
| 521 | he_stat__add_cpumode_period(&he->stat, al->cpumode, period); |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 522 | if (symbol_conf.cumulate_callchain) |
| 523 | he_stat__add_cpumode_period(he->stat_acc, al->cpumode, period); |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 524 | return he; |
| 525 | } |
| 526 | |
Arnaldo Carvalho de Melo | c824c43 | 2013-10-22 19:01:31 -0300 | [diff] [blame] | 527 | struct hist_entry *__hists__add_entry(struct hists *hists, |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 528 | struct addr_location *al, |
Namhyung Kim | 41a4e6e | 2013-10-31 15:56:03 +0900 | [diff] [blame] | 529 | struct symbol *sym_parent, |
| 530 | struct branch_info *bi, |
| 531 | struct mem_info *mi, |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 532 | struct perf_sample *sample, |
Namhyung Kim | a0b51af | 2012-09-11 13:34:27 +0900 | [diff] [blame] | 533 | bool sample_self) |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 534 | { |
| 535 | struct hist_entry entry = { |
| 536 | .thread = al->thread, |
Namhyung Kim | 4dfced3 | 2013-09-13 16:28:57 +0900 | [diff] [blame] | 537 | .comm = thread__comm(al->thread), |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 538 | .ms = { |
| 539 | .map = al->map, |
| 540 | .sym = al->sym, |
| 541 | }, |
Kan Liang | 0c4c4de | 2015-09-04 10:45:42 -0400 | [diff] [blame] | 542 | .socket = al->socket, |
Don Zickus | 7365be5 | 2014-05-27 12:28:05 -0400 | [diff] [blame] | 543 | .cpu = al->cpu, |
| 544 | .cpumode = al->cpumode, |
| 545 | .ip = al->addr, |
| 546 | .level = al->level, |
Namhyung Kim | b24c28f | 2012-10-04 21:49:41 +0900 | [diff] [blame] | 547 | .stat = { |
Namhyung Kim | c4b3535 | 2012-10-04 21:49:42 +0900 | [diff] [blame] | 548 | .nr_events = 1, |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 549 | .period = sample->period, |
| 550 | .weight = sample->weight, |
Namhyung Kim | b24c28f | 2012-10-04 21:49:41 +0900 | [diff] [blame] | 551 | }, |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 552 | .parent = sym_parent, |
Namhyung Kim | 2c86c7c | 2014-03-17 18:18:54 -0300 | [diff] [blame] | 553 | .filtered = symbol__parent_filter(sym_parent) | al->filtered, |
Arnaldo Carvalho de Melo | c824c43 | 2013-10-22 19:01:31 -0300 | [diff] [blame] | 554 | .hists = hists, |
Namhyung Kim | 41a4e6e | 2013-10-31 15:56:03 +0900 | [diff] [blame] | 555 | .branch_info = bi, |
| 556 | .mem_info = mi, |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 557 | .transaction = sample->transaction, |
Namhyung Kim | 7239283 | 2015-12-24 11:16:17 +0900 | [diff] [blame] | 558 | .raw_data = sample->raw_data, |
| 559 | .raw_size = sample->raw_size, |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 560 | }; |
| 561 | |
Arnaldo Carvalho de Melo | e7e0efc | 2015-05-19 11:31:22 -0300 | [diff] [blame] | 562 | return hists__findnew_entry(hists, &entry, al, sample_self); |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 563 | } |
| 564 | |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 565 | static int |
| 566 | iter_next_nop_entry(struct hist_entry_iter *iter __maybe_unused, |
| 567 | struct addr_location *al __maybe_unused) |
| 568 | { |
| 569 | return 0; |
| 570 | } |
| 571 | |
| 572 | static int |
| 573 | iter_add_next_nop_entry(struct hist_entry_iter *iter __maybe_unused, |
| 574 | struct addr_location *al __maybe_unused) |
| 575 | { |
| 576 | return 0; |
| 577 | } |
| 578 | |
| 579 | static int |
| 580 | iter_prepare_mem_entry(struct hist_entry_iter *iter, struct addr_location *al) |
| 581 | { |
| 582 | struct perf_sample *sample = iter->sample; |
| 583 | struct mem_info *mi; |
| 584 | |
| 585 | mi = sample__resolve_mem(sample, al); |
| 586 | if (mi == NULL) |
| 587 | return -ENOMEM; |
| 588 | |
| 589 | iter->priv = mi; |
| 590 | return 0; |
| 591 | } |
| 592 | |
| 593 | static int |
| 594 | iter_add_single_mem_entry(struct hist_entry_iter *iter, struct addr_location *al) |
| 595 | { |
| 596 | u64 cost; |
| 597 | struct mem_info *mi = iter->priv; |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 598 | struct hists *hists = evsel__hists(iter->evsel); |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 599 | struct perf_sample *sample = iter->sample; |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 600 | struct hist_entry *he; |
| 601 | |
| 602 | if (mi == NULL) |
| 603 | return -EINVAL; |
| 604 | |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 605 | cost = sample->weight; |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 606 | if (!cost) |
| 607 | cost = 1; |
| 608 | |
| 609 | /* |
| 610 | * must pass period=weight in order to get the correct |
| 611 | * sorting from hists__collapse_resort() which is solely |
| 612 | * based on periods. We want sorting be done on nr_events * weight |
| 613 | * and this is indirectly achieved by passing period=weight here |
| 614 | * and the he_stat__add_period() function. |
| 615 | */ |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 616 | sample->period = cost; |
| 617 | |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 618 | he = __hists__add_entry(hists, al, iter->parent, NULL, mi, |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 619 | sample, true); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 620 | if (!he) |
| 621 | return -ENOMEM; |
| 622 | |
| 623 | iter->he = he; |
| 624 | return 0; |
| 625 | } |
| 626 | |
| 627 | static int |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 628 | iter_finish_mem_entry(struct hist_entry_iter *iter, |
| 629 | struct addr_location *al __maybe_unused) |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 630 | { |
| 631 | struct perf_evsel *evsel = iter->evsel; |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 632 | struct hists *hists = evsel__hists(evsel); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 633 | struct hist_entry *he = iter->he; |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 634 | int err = -EINVAL; |
| 635 | |
| 636 | if (he == NULL) |
| 637 | goto out; |
| 638 | |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 639 | hists__inc_nr_samples(hists, he->filtered); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 640 | |
| 641 | err = hist_entry__append_callchain(he, iter->sample); |
| 642 | |
| 643 | out: |
| 644 | /* |
Arnaldo Carvalho de Melo | e7e0efc | 2015-05-19 11:31:22 -0300 | [diff] [blame] | 645 | * We don't need to free iter->priv (mem_info) here since the mem info |
| 646 | * was either already freed in hists__findnew_entry() or passed to a |
| 647 | * new hist entry by hist_entry__new(). |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 648 | */ |
| 649 | iter->priv = NULL; |
| 650 | |
| 651 | iter->he = NULL; |
| 652 | return err; |
| 653 | } |
| 654 | |
| 655 | static int |
| 656 | iter_prepare_branch_entry(struct hist_entry_iter *iter, struct addr_location *al) |
| 657 | { |
| 658 | struct branch_info *bi; |
| 659 | struct perf_sample *sample = iter->sample; |
| 660 | |
| 661 | bi = sample__resolve_bstack(sample, al); |
| 662 | if (!bi) |
| 663 | return -ENOMEM; |
| 664 | |
| 665 | iter->curr = 0; |
| 666 | iter->total = sample->branch_stack->nr; |
| 667 | |
| 668 | iter->priv = bi; |
| 669 | return 0; |
| 670 | } |
| 671 | |
| 672 | static int |
| 673 | iter_add_single_branch_entry(struct hist_entry_iter *iter __maybe_unused, |
| 674 | struct addr_location *al __maybe_unused) |
| 675 | { |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 676 | /* to avoid calling callback function */ |
| 677 | iter->he = NULL; |
| 678 | |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 679 | return 0; |
| 680 | } |
| 681 | |
| 682 | static int |
| 683 | iter_next_branch_entry(struct hist_entry_iter *iter, struct addr_location *al) |
| 684 | { |
| 685 | struct branch_info *bi = iter->priv; |
| 686 | int i = iter->curr; |
| 687 | |
| 688 | if (bi == NULL) |
| 689 | return 0; |
| 690 | |
| 691 | if (iter->curr >= iter->total) |
| 692 | return 0; |
| 693 | |
| 694 | al->map = bi[i].to.map; |
| 695 | al->sym = bi[i].to.sym; |
| 696 | al->addr = bi[i].to.addr; |
| 697 | return 1; |
| 698 | } |
| 699 | |
| 700 | static int |
| 701 | iter_add_next_branch_entry(struct hist_entry_iter *iter, struct addr_location *al) |
| 702 | { |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 703 | struct branch_info *bi; |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 704 | struct perf_evsel *evsel = iter->evsel; |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 705 | struct hists *hists = evsel__hists(evsel); |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 706 | struct perf_sample *sample = iter->sample; |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 707 | struct hist_entry *he = NULL; |
| 708 | int i = iter->curr; |
| 709 | int err = 0; |
| 710 | |
| 711 | bi = iter->priv; |
| 712 | |
| 713 | if (iter->hide_unresolved && !(bi[i].from.sym && bi[i].to.sym)) |
| 714 | goto out; |
| 715 | |
| 716 | /* |
| 717 | * The report shows the percentage of total branches captured |
| 718 | * and not events sampled. Thus we use a pseudo period of 1. |
| 719 | */ |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 720 | sample->period = 1; |
| 721 | sample->weight = bi->flags.cycles ? bi->flags.cycles : 1; |
| 722 | |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 723 | he = __hists__add_entry(hists, al, iter->parent, &bi[i], NULL, |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 724 | sample, true); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 725 | if (he == NULL) |
| 726 | return -ENOMEM; |
| 727 | |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 728 | hists__inc_nr_samples(hists, he->filtered); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 729 | |
| 730 | out: |
| 731 | iter->he = he; |
| 732 | iter->curr++; |
| 733 | return err; |
| 734 | } |
| 735 | |
| 736 | static int |
| 737 | iter_finish_branch_entry(struct hist_entry_iter *iter, |
| 738 | struct addr_location *al __maybe_unused) |
| 739 | { |
| 740 | zfree(&iter->priv); |
| 741 | iter->he = NULL; |
| 742 | |
| 743 | return iter->curr >= iter->total ? 0 : -1; |
| 744 | } |
| 745 | |
| 746 | static int |
| 747 | iter_prepare_normal_entry(struct hist_entry_iter *iter __maybe_unused, |
| 748 | struct addr_location *al __maybe_unused) |
| 749 | { |
| 750 | return 0; |
| 751 | } |
| 752 | |
| 753 | static int |
| 754 | iter_add_single_normal_entry(struct hist_entry_iter *iter, struct addr_location *al) |
| 755 | { |
| 756 | struct perf_evsel *evsel = iter->evsel; |
| 757 | struct perf_sample *sample = iter->sample; |
| 758 | struct hist_entry *he; |
| 759 | |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 760 | he = __hists__add_entry(evsel__hists(evsel), al, iter->parent, NULL, NULL, |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 761 | sample, true); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 762 | if (he == NULL) |
| 763 | return -ENOMEM; |
| 764 | |
| 765 | iter->he = he; |
| 766 | return 0; |
| 767 | } |
| 768 | |
| 769 | static int |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 770 | iter_finish_normal_entry(struct hist_entry_iter *iter, |
| 771 | struct addr_location *al __maybe_unused) |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 772 | { |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 773 | struct hist_entry *he = iter->he; |
| 774 | struct perf_evsel *evsel = iter->evsel; |
| 775 | struct perf_sample *sample = iter->sample; |
| 776 | |
| 777 | if (he == NULL) |
| 778 | return 0; |
| 779 | |
| 780 | iter->he = NULL; |
| 781 | |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 782 | hists__inc_nr_samples(evsel__hists(evsel), he->filtered); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 783 | |
| 784 | return hist_entry__append_callchain(he, sample); |
| 785 | } |
| 786 | |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 787 | static int |
Adrian Hunter | 96b40f3 | 2015-09-25 16:15:47 +0300 | [diff] [blame] | 788 | iter_prepare_cumulative_entry(struct hist_entry_iter *iter, |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 789 | struct addr_location *al __maybe_unused) |
| 790 | { |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 791 | struct hist_entry **he_cache; |
| 792 | |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 793 | callchain_cursor_commit(&callchain_cursor); |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 794 | |
| 795 | /* |
| 796 | * This is for detecting cycles or recursions so that they're |
| 797 | * cumulated only one time to prevent entries more than 100% |
| 798 | * overhead. |
| 799 | */ |
Adrian Hunter | 96b40f3 | 2015-09-25 16:15:47 +0300 | [diff] [blame] | 800 | he_cache = malloc(sizeof(*he_cache) * (iter->max_stack + 1)); |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 801 | if (he_cache == NULL) |
| 802 | return -ENOMEM; |
| 803 | |
| 804 | iter->priv = he_cache; |
| 805 | iter->curr = 0; |
| 806 | |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 807 | return 0; |
| 808 | } |
| 809 | |
| 810 | static int |
| 811 | iter_add_single_cumulative_entry(struct hist_entry_iter *iter, |
| 812 | struct addr_location *al) |
| 813 | { |
| 814 | struct perf_evsel *evsel = iter->evsel; |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 815 | struct hists *hists = evsel__hists(evsel); |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 816 | struct perf_sample *sample = iter->sample; |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 817 | struct hist_entry **he_cache = iter->priv; |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 818 | struct hist_entry *he; |
| 819 | int err = 0; |
| 820 | |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 821 | he = __hists__add_entry(hists, al, iter->parent, NULL, NULL, |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 822 | sample, true); |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 823 | if (he == NULL) |
| 824 | return -ENOMEM; |
| 825 | |
| 826 | iter->he = he; |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 827 | he_cache[iter->curr++] = he; |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 828 | |
Namhyung Kim | 82aa019 | 2014-12-22 13:44:14 +0900 | [diff] [blame] | 829 | hist_entry__append_callchain(he, sample); |
Namhyung Kim | be7f855 | 2013-12-26 17:44:10 +0900 | [diff] [blame] | 830 | |
| 831 | /* |
| 832 | * We need to re-initialize the cursor since callchain_append() |
| 833 | * advanced the cursor to the end. |
| 834 | */ |
| 835 | callchain_cursor_commit(&callchain_cursor); |
| 836 | |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 837 | hists__inc_nr_samples(hists, he->filtered); |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 838 | |
| 839 | return err; |
| 840 | } |
| 841 | |
| 842 | static int |
| 843 | iter_next_cumulative_entry(struct hist_entry_iter *iter, |
| 844 | struct addr_location *al) |
| 845 | { |
| 846 | struct callchain_cursor_node *node; |
| 847 | |
| 848 | node = callchain_cursor_current(&callchain_cursor); |
| 849 | if (node == NULL) |
| 850 | return 0; |
| 851 | |
Namhyung Kim | c7405d8 | 2013-10-31 13:58:30 +0900 | [diff] [blame] | 852 | return fill_callchain_info(al, node, iter->hide_unresolved); |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 853 | } |
| 854 | |
| 855 | static int |
| 856 | iter_add_next_cumulative_entry(struct hist_entry_iter *iter, |
| 857 | struct addr_location *al) |
| 858 | { |
| 859 | struct perf_evsel *evsel = iter->evsel; |
| 860 | struct perf_sample *sample = iter->sample; |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 861 | struct hist_entry **he_cache = iter->priv; |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 862 | struct hist_entry *he; |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 863 | struct hist_entry he_tmp = { |
Arnaldo Carvalho de Melo | 5cef897 | 2015-08-10 15:45:55 -0300 | [diff] [blame] | 864 | .hists = evsel__hists(evsel), |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 865 | .cpu = al->cpu, |
| 866 | .thread = al->thread, |
| 867 | .comm = thread__comm(al->thread), |
| 868 | .ip = al->addr, |
| 869 | .ms = { |
| 870 | .map = al->map, |
| 871 | .sym = al->sym, |
| 872 | }, |
| 873 | .parent = iter->parent, |
Namhyung Kim | 7239283 | 2015-12-24 11:16:17 +0900 | [diff] [blame] | 874 | .raw_data = sample->raw_data, |
| 875 | .raw_size = sample->raw_size, |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 876 | }; |
| 877 | int i; |
Namhyung Kim | be7f855 | 2013-12-26 17:44:10 +0900 | [diff] [blame] | 878 | struct callchain_cursor cursor; |
| 879 | |
| 880 | callchain_cursor_snapshot(&cursor, &callchain_cursor); |
| 881 | |
| 882 | callchain_cursor_advance(&callchain_cursor); |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 883 | |
| 884 | /* |
| 885 | * Check if there's duplicate entries in the callchain. |
| 886 | * It's possible that it has cycles or recursive calls. |
| 887 | */ |
| 888 | for (i = 0; i < iter->curr; i++) { |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 889 | if (hist_entry__cmp(he_cache[i], &he_tmp) == 0) { |
| 890 | /* to avoid calling callback function */ |
| 891 | iter->he = NULL; |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 892 | return 0; |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 893 | } |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 894 | } |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 895 | |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 896 | he = __hists__add_entry(evsel__hists(evsel), al, iter->parent, NULL, NULL, |
Namhyung Kim | fd36f3d | 2015-12-23 02:06:58 +0900 | [diff] [blame] | 897 | sample, false); |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 898 | if (he == NULL) |
| 899 | return -ENOMEM; |
| 900 | |
| 901 | iter->he = he; |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 902 | he_cache[iter->curr++] = he; |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 903 | |
Namhyung Kim | 82aa019 | 2014-12-22 13:44:14 +0900 | [diff] [blame] | 904 | if (symbol_conf.use_callchain) |
| 905 | callchain_append(he->callchain, &cursor, sample->period); |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 906 | return 0; |
| 907 | } |
| 908 | |
| 909 | static int |
| 910 | iter_finish_cumulative_entry(struct hist_entry_iter *iter, |
| 911 | struct addr_location *al __maybe_unused) |
| 912 | { |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 913 | zfree(&iter->priv); |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 914 | iter->he = NULL; |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 915 | |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 916 | return 0; |
| 917 | } |
| 918 | |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 919 | const struct hist_iter_ops hist_iter_mem = { |
| 920 | .prepare_entry = iter_prepare_mem_entry, |
| 921 | .add_single_entry = iter_add_single_mem_entry, |
| 922 | .next_entry = iter_next_nop_entry, |
| 923 | .add_next_entry = iter_add_next_nop_entry, |
| 924 | .finish_entry = iter_finish_mem_entry, |
| 925 | }; |
| 926 | |
| 927 | const struct hist_iter_ops hist_iter_branch = { |
| 928 | .prepare_entry = iter_prepare_branch_entry, |
| 929 | .add_single_entry = iter_add_single_branch_entry, |
| 930 | .next_entry = iter_next_branch_entry, |
| 931 | .add_next_entry = iter_add_next_branch_entry, |
| 932 | .finish_entry = iter_finish_branch_entry, |
| 933 | }; |
| 934 | |
| 935 | const struct hist_iter_ops hist_iter_normal = { |
| 936 | .prepare_entry = iter_prepare_normal_entry, |
| 937 | .add_single_entry = iter_add_single_normal_entry, |
| 938 | .next_entry = iter_next_nop_entry, |
| 939 | .add_next_entry = iter_add_next_nop_entry, |
| 940 | .finish_entry = iter_finish_normal_entry, |
| 941 | }; |
| 942 | |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 943 | const struct hist_iter_ops hist_iter_cumulative = { |
| 944 | .prepare_entry = iter_prepare_cumulative_entry, |
| 945 | .add_single_entry = iter_add_single_cumulative_entry, |
| 946 | .next_entry = iter_next_cumulative_entry, |
| 947 | .add_next_entry = iter_add_next_cumulative_entry, |
| 948 | .finish_entry = iter_finish_cumulative_entry, |
| 949 | }; |
| 950 | |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 951 | int hist_entry_iter__add(struct hist_entry_iter *iter, struct addr_location *al, |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 952 | int max_stack_depth, void *arg) |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 953 | { |
| 954 | int err, err2; |
| 955 | |
Namhyung Kim | 063bd93 | 2015-05-19 17:04:10 +0900 | [diff] [blame] | 956 | err = sample__resolve_callchain(iter->sample, &iter->parent, |
| 957 | iter->evsel, al, max_stack_depth); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 958 | if (err) |
| 959 | return err; |
| 960 | |
Adrian Hunter | 96b40f3 | 2015-09-25 16:15:47 +0300 | [diff] [blame] | 961 | iter->max_stack = max_stack_depth; |
| 962 | |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 963 | err = iter->ops->prepare_entry(iter, al); |
| 964 | if (err) |
| 965 | goto out; |
| 966 | |
| 967 | err = iter->ops->add_single_entry(iter, al); |
| 968 | if (err) |
| 969 | goto out; |
| 970 | |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 971 | if (iter->he && iter->add_entry_cb) { |
| 972 | err = iter->add_entry_cb(iter, al, true, arg); |
| 973 | if (err) |
| 974 | goto out; |
| 975 | } |
| 976 | |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 977 | while (iter->ops->next_entry(iter, al)) { |
| 978 | err = iter->ops->add_next_entry(iter, al); |
| 979 | if (err) |
| 980 | break; |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 981 | |
| 982 | if (iter->he && iter->add_entry_cb) { |
| 983 | err = iter->add_entry_cb(iter, al, false, arg); |
| 984 | if (err) |
| 985 | goto out; |
| 986 | } |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 987 | } |
| 988 | |
| 989 | out: |
| 990 | err2 = iter->ops->finish_entry(iter, al); |
| 991 | if (!err) |
| 992 | err = err2; |
| 993 | |
| 994 | return err; |
| 995 | } |
| 996 | |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 997 | int64_t |
| 998 | hist_entry__cmp(struct hist_entry *left, struct hist_entry *right) |
| 999 | { |
Jiri Olsa | aa6f50a | 2016-01-18 10:24:24 +0100 | [diff] [blame] | 1000 | struct hists *hists = left->hists; |
Namhyung Kim | 093f0ef3 | 2014-03-03 12:07:47 +0900 | [diff] [blame] | 1001 | struct perf_hpp_fmt *fmt; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1002 | int64_t cmp = 0; |
| 1003 | |
Jiri Olsa | aa6f50a | 2016-01-18 10:24:24 +0100 | [diff] [blame] | 1004 | hists__for_each_sort_list(hists, fmt) { |
Namhyung Kim | 84b6ee8 | 2016-02-27 03:52:43 +0900 | [diff] [blame] | 1005 | if (perf_hpp__is_dynamic_entry(fmt) && |
| 1006 | !perf_hpp__defined_dynamic_entry(fmt, hists)) |
| 1007 | continue; |
| 1008 | |
Namhyung Kim | 87bbdf7 | 2015-01-08 09:45:46 +0900 | [diff] [blame] | 1009 | cmp = fmt->cmp(fmt, left, right); |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1010 | if (cmp) |
| 1011 | break; |
| 1012 | } |
| 1013 | |
| 1014 | return cmp; |
| 1015 | } |
| 1016 | |
| 1017 | int64_t |
| 1018 | hist_entry__collapse(struct hist_entry *left, struct hist_entry *right) |
| 1019 | { |
Jiri Olsa | aa6f50a | 2016-01-18 10:24:24 +0100 | [diff] [blame] | 1020 | struct hists *hists = left->hists; |
Namhyung Kim | 093f0ef3 | 2014-03-03 12:07:47 +0900 | [diff] [blame] | 1021 | struct perf_hpp_fmt *fmt; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1022 | int64_t cmp = 0; |
| 1023 | |
Jiri Olsa | aa6f50a | 2016-01-18 10:24:24 +0100 | [diff] [blame] | 1024 | hists__for_each_sort_list(hists, fmt) { |
Namhyung Kim | 84b6ee8 | 2016-02-27 03:52:43 +0900 | [diff] [blame] | 1025 | if (perf_hpp__is_dynamic_entry(fmt) && |
| 1026 | !perf_hpp__defined_dynamic_entry(fmt, hists)) |
| 1027 | continue; |
| 1028 | |
Namhyung Kim | 87bbdf7 | 2015-01-08 09:45:46 +0900 | [diff] [blame] | 1029 | cmp = fmt->collapse(fmt, left, right); |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1030 | if (cmp) |
| 1031 | break; |
| 1032 | } |
| 1033 | |
| 1034 | return cmp; |
| 1035 | } |
| 1036 | |
Arnaldo Carvalho de Melo | 6733d1b | 2014-12-19 12:31:40 -0300 | [diff] [blame] | 1037 | void hist_entry__delete(struct hist_entry *he) |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1038 | { |
Arnaldo Carvalho de Melo | f3b623b | 2015-03-02 22:21:35 -0300 | [diff] [blame] | 1039 | thread__zput(he->thread); |
Arnaldo Carvalho de Melo | 5c24b67 | 2015-06-15 23:29:51 -0300 | [diff] [blame] | 1040 | map__zput(he->ms.map); |
| 1041 | |
| 1042 | if (he->branch_info) { |
| 1043 | map__zput(he->branch_info->from.map); |
| 1044 | map__zput(he->branch_info->to.map); |
| 1045 | zfree(&he->branch_info); |
| 1046 | } |
| 1047 | |
| 1048 | if (he->mem_info) { |
| 1049 | map__zput(he->mem_info->iaddr.map); |
| 1050 | map__zput(he->mem_info->daddr.map); |
| 1051 | zfree(&he->mem_info); |
| 1052 | } |
| 1053 | |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 1054 | zfree(&he->stat_acc); |
Namhyung Kim | f048d54 | 2013-09-11 14:09:28 +0900 | [diff] [blame] | 1055 | free_srcline(he->srcline); |
Andi Kleen | 31191a8 | 2015-08-07 15:54:24 -0700 | [diff] [blame] | 1056 | if (he->srcfile && he->srcfile[0]) |
| 1057 | free(he->srcfile); |
Namhyung Kim | d114960 | 2014-12-30 14:38:13 +0900 | [diff] [blame] | 1058 | free_callchain(he->callchain); |
Namhyung Kim | 60517d2 | 2015-12-23 02:07:03 +0900 | [diff] [blame] | 1059 | free(he->trace_output); |
Namhyung Kim | 7239283 | 2015-12-24 11:16:17 +0900 | [diff] [blame] | 1060 | free(he->raw_data); |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1061 | free(he); |
| 1062 | } |
| 1063 | |
| 1064 | /* |
Arnaldo Carvalho de Melo | 89fee70 | 2016-02-11 17:14:13 -0300 | [diff] [blame] | 1065 | * If this is not the last column, then we need to pad it according to the |
| 1066 | * pre-calculated max lenght for this column, otherwise don't bother adding |
| 1067 | * spaces because that would break viewing this with, for instance, 'less', |
| 1068 | * that would show tons of trailing spaces when a long C++ demangled method |
| 1069 | * names is sampled. |
| 1070 | */ |
| 1071 | int hist_entry__snprintf_alignment(struct hist_entry *he, struct perf_hpp *hpp, |
| 1072 | struct perf_hpp_fmt *fmt, int printed) |
| 1073 | { |
| 1074 | if (!list_is_last(&fmt->list, &he->hists->hpp_list->fields)) { |
| 1075 | const int width = fmt->width(fmt, hpp, hists_to_evsel(he->hists)); |
| 1076 | if (printed < width) { |
| 1077 | advance_hpp(hpp, printed); |
| 1078 | printed = scnprintf(hpp->buf, hpp->size, "%-*s", width - printed, " "); |
| 1079 | } |
| 1080 | } |
| 1081 | |
| 1082 | return printed; |
| 1083 | } |
| 1084 | |
| 1085 | /* |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1086 | * collapse the histogram |
| 1087 | */ |
| 1088 | |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1089 | static void hists__apply_filters(struct hists *hists, struct hist_entry *he); |
Namhyung Kim | aec13a7 | 2016-03-09 22:46:58 +0900 | [diff] [blame] | 1090 | static void hists__remove_entry_filter(struct hists *hists, struct hist_entry *he, |
| 1091 | enum hist_filter type); |
| 1092 | |
| 1093 | typedef bool (*fmt_chk_fn)(struct perf_hpp_fmt *fmt); |
| 1094 | |
| 1095 | static bool check_thread_entry(struct perf_hpp_fmt *fmt) |
| 1096 | { |
| 1097 | return perf_hpp__is_thread_entry(fmt) || perf_hpp__is_comm_entry(fmt); |
| 1098 | } |
| 1099 | |
| 1100 | static void hist_entry__check_and_remove_filter(struct hist_entry *he, |
| 1101 | enum hist_filter type, |
| 1102 | fmt_chk_fn check) |
| 1103 | { |
| 1104 | struct perf_hpp_fmt *fmt; |
| 1105 | bool type_match = false; |
| 1106 | struct hist_entry *parent = he->parent_he; |
| 1107 | |
| 1108 | switch (type) { |
| 1109 | case HIST_FILTER__THREAD: |
| 1110 | if (symbol_conf.comm_list == NULL && |
| 1111 | symbol_conf.pid_list == NULL && |
| 1112 | symbol_conf.tid_list == NULL) |
| 1113 | return; |
| 1114 | break; |
| 1115 | case HIST_FILTER__DSO: |
| 1116 | if (symbol_conf.dso_list == NULL) |
| 1117 | return; |
| 1118 | break; |
| 1119 | case HIST_FILTER__SYMBOL: |
| 1120 | if (symbol_conf.sym_list == NULL) |
| 1121 | return; |
| 1122 | break; |
| 1123 | case HIST_FILTER__PARENT: |
| 1124 | case HIST_FILTER__GUEST: |
| 1125 | case HIST_FILTER__HOST: |
| 1126 | case HIST_FILTER__SOCKET: |
| 1127 | default: |
| 1128 | return; |
| 1129 | } |
| 1130 | |
| 1131 | /* if it's filtered by own fmt, it has to have filter bits */ |
| 1132 | perf_hpp_list__for_each_format(he->hpp_list, fmt) { |
| 1133 | if (check(fmt)) { |
| 1134 | type_match = true; |
| 1135 | break; |
| 1136 | } |
| 1137 | } |
| 1138 | |
| 1139 | if (type_match) { |
| 1140 | /* |
| 1141 | * If the filter is for current level entry, propagate |
| 1142 | * filter marker to parents. The marker bit was |
| 1143 | * already set by default so it only needs to clear |
| 1144 | * non-filtered entries. |
| 1145 | */ |
| 1146 | if (!(he->filtered & (1 << type))) { |
| 1147 | while (parent) { |
| 1148 | parent->filtered &= ~(1 << type); |
| 1149 | parent = parent->parent_he; |
| 1150 | } |
| 1151 | } |
| 1152 | } else { |
| 1153 | /* |
| 1154 | * If current entry doesn't have matching formats, set |
| 1155 | * filter marker for upper level entries. it will be |
| 1156 | * cleared if its lower level entries is not filtered. |
| 1157 | * |
| 1158 | * For lower-level entries, it inherits parent's |
| 1159 | * filter bit so that lower level entries of a |
| 1160 | * non-filtered entry won't set the filter marker. |
| 1161 | */ |
| 1162 | if (parent == NULL) |
| 1163 | he->filtered |= (1 << type); |
| 1164 | else |
| 1165 | he->filtered |= (parent->filtered & (1 << type)); |
| 1166 | } |
| 1167 | } |
| 1168 | |
| 1169 | static void hist_entry__apply_hierarchy_filters(struct hist_entry *he) |
| 1170 | { |
| 1171 | hist_entry__check_and_remove_filter(he, HIST_FILTER__THREAD, |
| 1172 | check_thread_entry); |
| 1173 | |
| 1174 | hist_entry__check_and_remove_filter(he, HIST_FILTER__DSO, |
| 1175 | perf_hpp__is_dso_entry); |
| 1176 | |
| 1177 | hist_entry__check_and_remove_filter(he, HIST_FILTER__SYMBOL, |
| 1178 | perf_hpp__is_sym_entry); |
| 1179 | |
| 1180 | hists__apply_filters(he->hists, he); |
| 1181 | } |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1182 | |
| 1183 | static struct hist_entry *hierarchy_insert_entry(struct hists *hists, |
| 1184 | struct rb_root *root, |
| 1185 | struct hist_entry *he, |
Namhyung Kim | aec13a7 | 2016-03-09 22:46:58 +0900 | [diff] [blame] | 1186 | struct hist_entry *parent_he, |
Namhyung Kim | 1b2dbbf | 2016-03-07 16:44:46 -0300 | [diff] [blame] | 1187 | struct perf_hpp_list *hpp_list) |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1188 | { |
| 1189 | struct rb_node **p = &root->rb_node; |
| 1190 | struct rb_node *parent = NULL; |
| 1191 | struct hist_entry *iter, *new; |
Namhyung Kim | 1b2dbbf | 2016-03-07 16:44:46 -0300 | [diff] [blame] | 1192 | struct perf_hpp_fmt *fmt; |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1193 | int64_t cmp; |
| 1194 | |
| 1195 | while (*p != NULL) { |
| 1196 | parent = *p; |
| 1197 | iter = rb_entry(parent, struct hist_entry, rb_node_in); |
| 1198 | |
Namhyung Kim | 1b2dbbf | 2016-03-07 16:44:46 -0300 | [diff] [blame] | 1199 | cmp = 0; |
| 1200 | perf_hpp_list__for_each_sort_list(hpp_list, fmt) { |
| 1201 | cmp = fmt->collapse(fmt, iter, he); |
| 1202 | if (cmp) |
| 1203 | break; |
| 1204 | } |
| 1205 | |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1206 | if (!cmp) { |
| 1207 | he_stat__add_stat(&iter->stat, &he->stat); |
| 1208 | return iter; |
| 1209 | } |
| 1210 | |
| 1211 | if (cmp < 0) |
| 1212 | p = &parent->rb_left; |
| 1213 | else |
| 1214 | p = &parent->rb_right; |
| 1215 | } |
| 1216 | |
| 1217 | new = hist_entry__new(he, true); |
| 1218 | if (new == NULL) |
| 1219 | return NULL; |
| 1220 | |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1221 | hists->nr_entries++; |
| 1222 | |
Namhyung Kim | 1b2dbbf | 2016-03-07 16:44:46 -0300 | [diff] [blame] | 1223 | /* save related format list for output */ |
| 1224 | new->hpp_list = hpp_list; |
Namhyung Kim | aec13a7 | 2016-03-09 22:46:58 +0900 | [diff] [blame] | 1225 | new->parent_he = parent_he; |
| 1226 | |
| 1227 | hist_entry__apply_hierarchy_filters(new); |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1228 | |
| 1229 | /* some fields are now passed to 'new' */ |
Namhyung Kim | 1b2dbbf | 2016-03-07 16:44:46 -0300 | [diff] [blame] | 1230 | perf_hpp_list__for_each_sort_list(hpp_list, fmt) { |
| 1231 | if (perf_hpp__is_trace_entry(fmt) || perf_hpp__is_dynamic_entry(fmt)) |
| 1232 | he->trace_output = NULL; |
| 1233 | else |
| 1234 | new->trace_output = NULL; |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1235 | |
Namhyung Kim | 1b2dbbf | 2016-03-07 16:44:46 -0300 | [diff] [blame] | 1236 | if (perf_hpp__is_srcline_entry(fmt)) |
| 1237 | he->srcline = NULL; |
| 1238 | else |
| 1239 | new->srcline = NULL; |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1240 | |
Namhyung Kim | 1b2dbbf | 2016-03-07 16:44:46 -0300 | [diff] [blame] | 1241 | if (perf_hpp__is_srcfile_entry(fmt)) |
| 1242 | he->srcfile = NULL; |
| 1243 | else |
| 1244 | new->srcfile = NULL; |
| 1245 | } |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1246 | |
| 1247 | rb_link_node(&new->rb_node_in, parent, p); |
| 1248 | rb_insert_color(&new->rb_node_in, root); |
| 1249 | return new; |
| 1250 | } |
| 1251 | |
| 1252 | static int hists__hierarchy_insert_entry(struct hists *hists, |
| 1253 | struct rb_root *root, |
| 1254 | struct hist_entry *he) |
| 1255 | { |
Namhyung Kim | 1b2dbbf | 2016-03-07 16:44:46 -0300 | [diff] [blame] | 1256 | struct perf_hpp_list_node *node; |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1257 | struct hist_entry *new_he = NULL; |
| 1258 | struct hist_entry *parent = NULL; |
| 1259 | int depth = 0; |
| 1260 | int ret = 0; |
| 1261 | |
Namhyung Kim | 1b2dbbf | 2016-03-07 16:44:46 -0300 | [diff] [blame] | 1262 | list_for_each_entry(node, &hists->hpp_formats, list) { |
| 1263 | /* skip period (overhead) and elided columns */ |
| 1264 | if (node->level == 0 || node->skip) |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1265 | continue; |
| 1266 | |
| 1267 | /* insert copy of 'he' for each fmt into the hierarchy */ |
Namhyung Kim | aec13a7 | 2016-03-09 22:46:58 +0900 | [diff] [blame] | 1268 | new_he = hierarchy_insert_entry(hists, root, he, parent, &node->hpp); |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1269 | if (new_he == NULL) { |
| 1270 | ret = -1; |
| 1271 | break; |
| 1272 | } |
| 1273 | |
| 1274 | root = &new_he->hroot_in; |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1275 | new_he->depth = depth++; |
| 1276 | parent = new_he; |
| 1277 | } |
| 1278 | |
| 1279 | if (new_he) { |
| 1280 | new_he->leaf = true; |
| 1281 | |
| 1282 | if (symbol_conf.use_callchain) { |
| 1283 | callchain_cursor_reset(&callchain_cursor); |
| 1284 | if (callchain_merge(&callchain_cursor, |
| 1285 | new_he->callchain, |
| 1286 | he->callchain) < 0) |
| 1287 | ret = -1; |
| 1288 | } |
| 1289 | } |
| 1290 | |
| 1291 | /* 'he' is no longer used */ |
| 1292 | hist_entry__delete(he); |
| 1293 | |
| 1294 | /* return 0 (or -1) since it already applied filters */ |
| 1295 | return ret; |
| 1296 | } |
| 1297 | |
Namhyung Kim | bba58cd | 2016-02-16 23:08:25 +0900 | [diff] [blame] | 1298 | int hists__collapse_insert_entry(struct hists *hists, struct rb_root *root, |
| 1299 | struct hist_entry *he) |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1300 | { |
Arnaldo Carvalho de Melo | b9bf089 | 2009-12-14 11:37:11 -0200 | [diff] [blame] | 1301 | struct rb_node **p = &root->rb_node; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1302 | struct rb_node *parent = NULL; |
| 1303 | struct hist_entry *iter; |
| 1304 | int64_t cmp; |
| 1305 | |
Namhyung Kim | aef810e | 2016-02-25 00:13:34 +0900 | [diff] [blame] | 1306 | if (symbol_conf.report_hierarchy) |
| 1307 | return hists__hierarchy_insert_entry(hists, root, he); |
| 1308 | |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1309 | while (*p != NULL) { |
| 1310 | parent = *p; |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1311 | iter = rb_entry(parent, struct hist_entry, rb_node_in); |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1312 | |
| 1313 | cmp = hist_entry__collapse(iter, he); |
| 1314 | |
| 1315 | if (!cmp) { |
Namhyung Kim | bba58cd | 2016-02-16 23:08:25 +0900 | [diff] [blame] | 1316 | int ret = 0; |
| 1317 | |
Namhyung Kim | 139c081 | 2012-10-04 21:49:43 +0900 | [diff] [blame] | 1318 | he_stat__add_stat(&iter->stat, &he->stat); |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 1319 | if (symbol_conf.cumulate_callchain) |
| 1320 | he_stat__add_stat(iter->stat_acc, he->stat_acc); |
Namhyung Kim | 9ec6097 | 2012-09-26 16:47:28 +0900 | [diff] [blame] | 1321 | |
Frederic Weisbecker | 1b3a0e9 | 2011-01-14 04:51:58 +0100 | [diff] [blame] | 1322 | if (symbol_conf.use_callchain) { |
Namhyung Kim | 4726064 | 2012-05-31 14:43:26 +0900 | [diff] [blame] | 1323 | callchain_cursor_reset(&callchain_cursor); |
Namhyung Kim | bba58cd | 2016-02-16 23:08:25 +0900 | [diff] [blame] | 1324 | if (callchain_merge(&callchain_cursor, |
| 1325 | iter->callchain, |
| 1326 | he->callchain) < 0) |
| 1327 | ret = -1; |
Frederic Weisbecker | 1b3a0e9 | 2011-01-14 04:51:58 +0100 | [diff] [blame] | 1328 | } |
Arnaldo Carvalho de Melo | 6733d1b | 2014-12-19 12:31:40 -0300 | [diff] [blame] | 1329 | hist_entry__delete(he); |
Namhyung Kim | bba58cd | 2016-02-16 23:08:25 +0900 | [diff] [blame] | 1330 | return ret; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1331 | } |
| 1332 | |
| 1333 | if (cmp < 0) |
| 1334 | p = &(*p)->rb_left; |
| 1335 | else |
| 1336 | p = &(*p)->rb_right; |
| 1337 | } |
Namhyung Kim | 740b97f | 2014-12-22 13:44:10 +0900 | [diff] [blame] | 1338 | hists->nr_entries++; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1339 | |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1340 | rb_link_node(&he->rb_node_in, parent, p); |
| 1341 | rb_insert_color(&he->rb_node_in, root); |
Namhyung Kim | bba58cd | 2016-02-16 23:08:25 +0900 | [diff] [blame] | 1342 | return 1; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1343 | } |
| 1344 | |
Namhyung Kim | fc284be | 2016-01-07 10:14:10 +0100 | [diff] [blame] | 1345 | struct rb_root *hists__get_rotate_entries_in(struct hists *hists) |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1346 | { |
| 1347 | struct rb_root *root; |
| 1348 | |
| 1349 | pthread_mutex_lock(&hists->lock); |
| 1350 | |
| 1351 | root = hists->entries_in; |
| 1352 | if (++hists->entries_in > &hists->entries_in_array[1]) |
| 1353 | hists->entries_in = &hists->entries_in_array[0]; |
| 1354 | |
| 1355 | pthread_mutex_unlock(&hists->lock); |
| 1356 | |
| 1357 | return root; |
| 1358 | } |
| 1359 | |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 1360 | static void hists__apply_filters(struct hists *hists, struct hist_entry *he) |
| 1361 | { |
| 1362 | hists__filter_entry_by_dso(hists, he); |
| 1363 | hists__filter_entry_by_thread(hists, he); |
Namhyung Kim | e94d53e | 2012-03-16 17:50:51 +0900 | [diff] [blame] | 1364 | hists__filter_entry_by_symbol(hists, he); |
Kan Liang | 21394d9 | 2015-09-04 10:45:44 -0400 | [diff] [blame] | 1365 | hists__filter_entry_by_socket(hists, he); |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 1366 | } |
| 1367 | |
Namhyung Kim | bba58cd | 2016-02-16 23:08:25 +0900 | [diff] [blame] | 1368 | int hists__collapse_resort(struct hists *hists, struct ui_progress *prog) |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1369 | { |
| 1370 | struct rb_root *root; |
| 1371 | struct rb_node *next; |
| 1372 | struct hist_entry *n; |
Namhyung Kim | bba58cd | 2016-02-16 23:08:25 +0900 | [diff] [blame] | 1373 | int ret; |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1374 | |
Namhyung Kim | 3a5714f | 2013-05-14 11:09:01 +0900 | [diff] [blame] | 1375 | if (!sort__need_collapse) |
Namhyung Kim | bba58cd | 2016-02-16 23:08:25 +0900 | [diff] [blame] | 1376 | return 0; |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1377 | |
Namhyung Kim | 740b97f | 2014-12-22 13:44:10 +0900 | [diff] [blame] | 1378 | hists->nr_entries = 0; |
| 1379 | |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1380 | root = hists__get_rotate_entries_in(hists); |
Namhyung Kim | 740b97f | 2014-12-22 13:44:10 +0900 | [diff] [blame] | 1381 | |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1382 | next = rb_first(root); |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1383 | |
| 1384 | while (next) { |
Arnaldo Carvalho de Melo | 33e940a | 2013-09-17 16:34:28 -0300 | [diff] [blame] | 1385 | if (session_done()) |
| 1386 | break; |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1387 | n = rb_entry(next, struct hist_entry, rb_node_in); |
| 1388 | next = rb_next(&n->rb_node_in); |
| 1389 | |
| 1390 | rb_erase(&n->rb_node_in, root); |
Namhyung Kim | bba58cd | 2016-02-16 23:08:25 +0900 | [diff] [blame] | 1391 | ret = hists__collapse_insert_entry(hists, &hists->entries_collapsed, n); |
| 1392 | if (ret < 0) |
| 1393 | return -1; |
| 1394 | |
| 1395 | if (ret) { |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 1396 | /* |
| 1397 | * If it wasn't combined with one of the entries already |
| 1398 | * collapsed, we need to apply the filters that may have |
| 1399 | * been set by, say, the hist_browser. |
| 1400 | */ |
| 1401 | hists__apply_filters(hists, n); |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 1402 | } |
Namhyung Kim | c1fb565 | 2013-10-11 14:15:38 +0900 | [diff] [blame] | 1403 | if (prog) |
| 1404 | ui_progress__update(prog, 1); |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1405 | } |
Namhyung Kim | bba58cd | 2016-02-16 23:08:25 +0900 | [diff] [blame] | 1406 | return 0; |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1407 | } |
| 1408 | |
Namhyung Kim | 043ca389 | 2014-03-03 14:18:00 +0900 | [diff] [blame] | 1409 | static int hist_entry__sort(struct hist_entry *a, struct hist_entry *b) |
Namhyung Kim | 29d720e | 2013-01-22 18:09:33 +0900 | [diff] [blame] | 1410 | { |
Jiri Olsa | aa6f50a | 2016-01-18 10:24:24 +0100 | [diff] [blame] | 1411 | struct hists *hists = a->hists; |
Namhyung Kim | 043ca389 | 2014-03-03 14:18:00 +0900 | [diff] [blame] | 1412 | struct perf_hpp_fmt *fmt; |
| 1413 | int64_t cmp = 0; |
Namhyung Kim | 29d720e | 2013-01-22 18:09:33 +0900 | [diff] [blame] | 1414 | |
Jiri Olsa | aa6f50a | 2016-01-18 10:24:24 +0100 | [diff] [blame] | 1415 | hists__for_each_sort_list(hists, fmt) { |
Namhyung Kim | 361459f | 2015-12-23 02:07:08 +0900 | [diff] [blame] | 1416 | if (perf_hpp__should_skip(fmt, a->hists)) |
Namhyung Kim | e67d49a | 2014-03-18 13:00:59 +0900 | [diff] [blame] | 1417 | continue; |
| 1418 | |
Namhyung Kim | 87bbdf7 | 2015-01-08 09:45:46 +0900 | [diff] [blame] | 1419 | cmp = fmt->sort(fmt, a, b); |
Namhyung Kim | 043ca389 | 2014-03-03 14:18:00 +0900 | [diff] [blame] | 1420 | if (cmp) |
Namhyung Kim | 29d720e | 2013-01-22 18:09:33 +0900 | [diff] [blame] | 1421 | break; |
| 1422 | } |
| 1423 | |
Namhyung Kim | 043ca389 | 2014-03-03 14:18:00 +0900 | [diff] [blame] | 1424 | return cmp; |
Namhyung Kim | 29d720e | 2013-01-22 18:09:33 +0900 | [diff] [blame] | 1425 | } |
| 1426 | |
Namhyung Kim | 9283ba9 | 2014-04-24 16:37:26 +0900 | [diff] [blame] | 1427 | static void hists__reset_filter_stats(struct hists *hists) |
| 1428 | { |
| 1429 | hists->nr_non_filtered_entries = 0; |
| 1430 | hists->stats.total_non_filtered_period = 0; |
| 1431 | } |
| 1432 | |
| 1433 | void hists__reset_stats(struct hists *hists) |
| 1434 | { |
| 1435 | hists->nr_entries = 0; |
| 1436 | hists->stats.total_period = 0; |
| 1437 | |
| 1438 | hists__reset_filter_stats(hists); |
| 1439 | } |
| 1440 | |
| 1441 | static void hists__inc_filter_stats(struct hists *hists, struct hist_entry *h) |
| 1442 | { |
| 1443 | hists->nr_non_filtered_entries++; |
| 1444 | hists->stats.total_non_filtered_period += h->stat.period; |
| 1445 | } |
| 1446 | |
| 1447 | void hists__inc_stats(struct hists *hists, struct hist_entry *h) |
| 1448 | { |
| 1449 | if (!h->filtered) |
| 1450 | hists__inc_filter_stats(hists, h); |
| 1451 | |
| 1452 | hists->nr_entries++; |
| 1453 | hists->stats.total_period += h->stat.period; |
| 1454 | } |
| 1455 | |
Namhyung Kim | f7fb538 | 2016-03-09 22:47:02 +0900 | [diff] [blame] | 1456 | static void hierarchy_recalc_total_periods(struct hists *hists) |
| 1457 | { |
| 1458 | struct rb_node *node; |
| 1459 | struct hist_entry *he; |
| 1460 | |
| 1461 | node = rb_first(&hists->entries); |
| 1462 | |
| 1463 | hists->stats.total_period = 0; |
| 1464 | hists->stats.total_non_filtered_period = 0; |
| 1465 | |
| 1466 | /* |
| 1467 | * recalculate total period using top-level entries only |
| 1468 | * since lower level entries only see non-filtered entries |
| 1469 | * but upper level entries have sum of both entries. |
| 1470 | */ |
| 1471 | while (node) { |
| 1472 | he = rb_entry(node, struct hist_entry, rb_node); |
| 1473 | node = rb_next(node); |
| 1474 | |
| 1475 | hists->stats.total_period += he->stat.period; |
| 1476 | if (!he->filtered) |
| 1477 | hists->stats.total_non_filtered_period += he->stat.period; |
| 1478 | } |
| 1479 | } |
| 1480 | |
Namhyung Kim | 1a3906a | 2016-02-25 00:13:35 +0900 | [diff] [blame] | 1481 | static void hierarchy_insert_output_entry(struct rb_root *root, |
| 1482 | struct hist_entry *he) |
| 1483 | { |
| 1484 | struct rb_node **p = &root->rb_node; |
| 1485 | struct rb_node *parent = NULL; |
| 1486 | struct hist_entry *iter; |
Namhyung Kim | 1b2dbbf | 2016-03-07 16:44:46 -0300 | [diff] [blame] | 1487 | struct perf_hpp_fmt *fmt; |
Namhyung Kim | 1a3906a | 2016-02-25 00:13:35 +0900 | [diff] [blame] | 1488 | |
| 1489 | while (*p != NULL) { |
| 1490 | parent = *p; |
| 1491 | iter = rb_entry(parent, struct hist_entry, rb_node); |
| 1492 | |
| 1493 | if (hist_entry__sort(he, iter) > 0) |
| 1494 | p = &parent->rb_left; |
| 1495 | else |
| 1496 | p = &parent->rb_right; |
| 1497 | } |
| 1498 | |
| 1499 | rb_link_node(&he->rb_node, parent, p); |
| 1500 | rb_insert_color(&he->rb_node, root); |
Namhyung Kim | abab5e7 | 2016-02-27 03:52:47 +0900 | [diff] [blame] | 1501 | |
| 1502 | /* update column width of dynamic entry */ |
Namhyung Kim | 1b2dbbf | 2016-03-07 16:44:46 -0300 | [diff] [blame] | 1503 | perf_hpp_list__for_each_sort_list(he->hpp_list, fmt) { |
| 1504 | if (perf_hpp__is_dynamic_entry(fmt)) |
| 1505 | fmt->sort(fmt, he, NULL); |
| 1506 | } |
Namhyung Kim | 1a3906a | 2016-02-25 00:13:35 +0900 | [diff] [blame] | 1507 | } |
| 1508 | |
| 1509 | static void hists__hierarchy_output_resort(struct hists *hists, |
| 1510 | struct ui_progress *prog, |
| 1511 | struct rb_root *root_in, |
| 1512 | struct rb_root *root_out, |
| 1513 | u64 min_callchain_hits, |
| 1514 | bool use_callchain) |
| 1515 | { |
| 1516 | struct rb_node *node; |
| 1517 | struct hist_entry *he; |
| 1518 | |
| 1519 | *root_out = RB_ROOT; |
| 1520 | node = rb_first(root_in); |
| 1521 | |
| 1522 | while (node) { |
| 1523 | he = rb_entry(node, struct hist_entry, rb_node_in); |
| 1524 | node = rb_next(node); |
| 1525 | |
| 1526 | hierarchy_insert_output_entry(root_out, he); |
| 1527 | |
| 1528 | if (prog) |
| 1529 | ui_progress__update(prog, 1); |
| 1530 | |
| 1531 | if (!he->leaf) { |
| 1532 | hists__hierarchy_output_resort(hists, prog, |
| 1533 | &he->hroot_in, |
| 1534 | &he->hroot_out, |
| 1535 | min_callchain_hits, |
| 1536 | use_callchain); |
| 1537 | hists->nr_entries++; |
| 1538 | if (!he->filtered) { |
| 1539 | hists->nr_non_filtered_entries++; |
| 1540 | hists__calc_col_len(hists, he); |
| 1541 | } |
| 1542 | |
| 1543 | continue; |
| 1544 | } |
| 1545 | |
Namhyung Kim | 1a3906a | 2016-02-25 00:13:35 +0900 | [diff] [blame] | 1546 | if (!use_callchain) |
| 1547 | continue; |
| 1548 | |
| 1549 | if (callchain_param.mode == CHAIN_GRAPH_REL) { |
| 1550 | u64 total = he->stat.period; |
| 1551 | |
| 1552 | if (symbol_conf.cumulate_callchain) |
| 1553 | total = he->stat_acc->period; |
| 1554 | |
| 1555 | min_callchain_hits = total * (callchain_param.min_percent / 100); |
| 1556 | } |
| 1557 | |
| 1558 | callchain_param.sort(&he->sorted_chain, he->callchain, |
| 1559 | min_callchain_hits, &callchain_param); |
| 1560 | } |
| 1561 | } |
| 1562 | |
Arnaldo Carvalho de Melo | 1c02c4d | 2010-05-10 13:04:11 -0300 | [diff] [blame] | 1563 | static void __hists__insert_output_entry(struct rb_root *entries, |
| 1564 | struct hist_entry *he, |
Kan Liang | f9db0d0 | 2015-08-11 06:30:48 -0400 | [diff] [blame] | 1565 | u64 min_callchain_hits, |
| 1566 | bool use_callchain) |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1567 | { |
Arnaldo Carvalho de Melo | 1c02c4d | 2010-05-10 13:04:11 -0300 | [diff] [blame] | 1568 | struct rb_node **p = &entries->rb_node; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1569 | struct rb_node *parent = NULL; |
| 1570 | struct hist_entry *iter; |
Namhyung Kim | abab5e7 | 2016-02-27 03:52:47 +0900 | [diff] [blame] | 1571 | struct perf_hpp_fmt *fmt; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1572 | |
Namhyung Kim | 744070e | 2016-01-28 00:40:48 +0900 | [diff] [blame] | 1573 | if (use_callchain) { |
| 1574 | if (callchain_param.mode == CHAIN_GRAPH_REL) { |
| 1575 | u64 total = he->stat.period; |
| 1576 | |
| 1577 | if (symbol_conf.cumulate_callchain) |
| 1578 | total = he->stat_acc->period; |
| 1579 | |
| 1580 | min_callchain_hits = total * (callchain_param.min_percent / 100); |
| 1581 | } |
Arnaldo Carvalho de Melo | b9fb930 | 2010-04-02 09:50:42 -0300 | [diff] [blame] | 1582 | callchain_param.sort(&he->sorted_chain, he->callchain, |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1583 | min_callchain_hits, &callchain_param); |
Namhyung Kim | 744070e | 2016-01-28 00:40:48 +0900 | [diff] [blame] | 1584 | } |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1585 | |
| 1586 | while (*p != NULL) { |
| 1587 | parent = *p; |
| 1588 | iter = rb_entry(parent, struct hist_entry, rb_node); |
| 1589 | |
Namhyung Kim | 043ca389 | 2014-03-03 14:18:00 +0900 | [diff] [blame] | 1590 | if (hist_entry__sort(he, iter) > 0) |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1591 | p = &(*p)->rb_left; |
| 1592 | else |
| 1593 | p = &(*p)->rb_right; |
| 1594 | } |
| 1595 | |
| 1596 | rb_link_node(&he->rb_node, parent, p); |
Arnaldo Carvalho de Melo | 1c02c4d | 2010-05-10 13:04:11 -0300 | [diff] [blame] | 1597 | rb_insert_color(&he->rb_node, entries); |
Namhyung Kim | abab5e7 | 2016-02-27 03:52:47 +0900 | [diff] [blame] | 1598 | |
| 1599 | perf_hpp_list__for_each_sort_list(&perf_hpp_list, fmt) { |
| 1600 | if (perf_hpp__is_dynamic_entry(fmt) && |
| 1601 | perf_hpp__defined_dynamic_entry(fmt, he->hists)) |
| 1602 | fmt->sort(fmt, he, NULL); /* update column width */ |
| 1603 | } |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1604 | } |
| 1605 | |
Jiri Olsa | 01441af | 2016-01-18 10:23:59 +0100 | [diff] [blame] | 1606 | static void output_resort(struct hists *hists, struct ui_progress *prog, |
| 1607 | bool use_callchain) |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1608 | { |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1609 | struct rb_root *root; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1610 | struct rb_node *next; |
| 1611 | struct hist_entry *n; |
Namhyung Kim | 467ef10 | 2016-02-16 23:08:19 +0900 | [diff] [blame] | 1612 | u64 callchain_total; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1613 | u64 min_callchain_hits; |
| 1614 | |
Namhyung Kim | 467ef10 | 2016-02-16 23:08:19 +0900 | [diff] [blame] | 1615 | callchain_total = hists->callchain_period; |
| 1616 | if (symbol_conf.filter_relative) |
| 1617 | callchain_total = hists->callchain_non_filtered_period; |
| 1618 | |
| 1619 | min_callchain_hits = callchain_total * (callchain_param.min_percent / 100); |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1620 | |
Namhyung Kim | 1a3906a | 2016-02-25 00:13:35 +0900 | [diff] [blame] | 1621 | hists__reset_stats(hists); |
| 1622 | hists__reset_col_len(hists); |
| 1623 | |
| 1624 | if (symbol_conf.report_hierarchy) { |
Namhyung Kim | f7fb538 | 2016-03-09 22:47:02 +0900 | [diff] [blame] | 1625 | hists__hierarchy_output_resort(hists, prog, |
| 1626 | &hists->entries_collapsed, |
| 1627 | &hists->entries, |
| 1628 | min_callchain_hits, |
| 1629 | use_callchain); |
| 1630 | hierarchy_recalc_total_periods(hists); |
| 1631 | return; |
Namhyung Kim | 1a3906a | 2016-02-25 00:13:35 +0900 | [diff] [blame] | 1632 | } |
| 1633 | |
Namhyung Kim | 3a5714f | 2013-05-14 11:09:01 +0900 | [diff] [blame] | 1634 | if (sort__need_collapse) |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1635 | root = &hists->entries_collapsed; |
| 1636 | else |
| 1637 | root = hists->entries_in; |
| 1638 | |
| 1639 | next = rb_first(root); |
| 1640 | hists->entries = RB_ROOT; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1641 | |
| 1642 | while (next) { |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1643 | n = rb_entry(next, struct hist_entry, rb_node_in); |
| 1644 | next = rb_next(&n->rb_node_in); |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1645 | |
Kan Liang | f9db0d0 | 2015-08-11 06:30:48 -0400 | [diff] [blame] | 1646 | __hists__insert_output_entry(&hists->entries, n, min_callchain_hits, use_callchain); |
Namhyung Kim | 6263835 | 2014-04-24 16:21:46 +0900 | [diff] [blame] | 1647 | hists__inc_stats(hists, n); |
Namhyung Kim | ae993ef | 2014-04-24 16:25:19 +0900 | [diff] [blame] | 1648 | |
| 1649 | if (!n->filtered) |
| 1650 | hists__calc_col_len(hists, n); |
Namhyung Kim | 740b97f | 2014-12-22 13:44:10 +0900 | [diff] [blame] | 1651 | |
| 1652 | if (prog) |
| 1653 | ui_progress__update(prog, 1); |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1654 | } |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1655 | } |
Arnaldo Carvalho de Melo | b9bf089 | 2009-12-14 11:37:11 -0200 | [diff] [blame] | 1656 | |
Jiri Olsa | 452ce03 | 2016-01-18 10:24:00 +0100 | [diff] [blame] | 1657 | void perf_evsel__output_resort(struct perf_evsel *evsel, struct ui_progress *prog) |
Jiri Olsa | 01441af | 2016-01-18 10:23:59 +0100 | [diff] [blame] | 1658 | { |
Jiri Olsa | 01441af | 2016-01-18 10:23:59 +0100 | [diff] [blame] | 1659 | bool use_callchain; |
| 1660 | |
| 1661 | if (evsel && symbol_conf.use_callchain && !symbol_conf.show_ref_callgraph) |
| 1662 | use_callchain = evsel->attr.sample_type & PERF_SAMPLE_CALLCHAIN; |
| 1663 | else |
| 1664 | use_callchain = symbol_conf.use_callchain; |
| 1665 | |
Jiri Olsa | 452ce03 | 2016-01-18 10:24:00 +0100 | [diff] [blame] | 1666 | output_resort(evsel__hists(evsel), prog, use_callchain); |
| 1667 | } |
| 1668 | |
| 1669 | void hists__output_resort(struct hists *hists, struct ui_progress *prog) |
| 1670 | { |
| 1671 | output_resort(hists, prog, symbol_conf.use_callchain); |
Jiri Olsa | 01441af | 2016-01-18 10:23:59 +0100 | [diff] [blame] | 1672 | } |
| 1673 | |
Namhyung Kim | 8c01872 | 2016-02-25 00:13:36 +0900 | [diff] [blame] | 1674 | static bool can_goto_child(struct hist_entry *he, enum hierarchy_move_dir hmd) |
| 1675 | { |
| 1676 | if (he->leaf || hmd == HMD_FORCE_SIBLING) |
| 1677 | return false; |
| 1678 | |
| 1679 | if (he->unfolded || hmd == HMD_FORCE_CHILD) |
| 1680 | return true; |
| 1681 | |
| 1682 | return false; |
| 1683 | } |
| 1684 | |
| 1685 | struct rb_node *rb_hierarchy_last(struct rb_node *node) |
| 1686 | { |
| 1687 | struct hist_entry *he = rb_entry(node, struct hist_entry, rb_node); |
| 1688 | |
| 1689 | while (can_goto_child(he, HMD_NORMAL)) { |
| 1690 | node = rb_last(&he->hroot_out); |
| 1691 | he = rb_entry(node, struct hist_entry, rb_node); |
| 1692 | } |
| 1693 | return node; |
| 1694 | } |
| 1695 | |
| 1696 | struct rb_node *__rb_hierarchy_next(struct rb_node *node, enum hierarchy_move_dir hmd) |
| 1697 | { |
| 1698 | struct hist_entry *he = rb_entry(node, struct hist_entry, rb_node); |
| 1699 | |
| 1700 | if (can_goto_child(he, hmd)) |
| 1701 | node = rb_first(&he->hroot_out); |
| 1702 | else |
| 1703 | node = rb_next(node); |
| 1704 | |
| 1705 | while (node == NULL) { |
| 1706 | he = he->parent_he; |
| 1707 | if (he == NULL) |
| 1708 | break; |
| 1709 | |
| 1710 | node = rb_next(&he->rb_node); |
| 1711 | } |
| 1712 | return node; |
| 1713 | } |
| 1714 | |
| 1715 | struct rb_node *rb_hierarchy_prev(struct rb_node *node) |
| 1716 | { |
| 1717 | struct hist_entry *he = rb_entry(node, struct hist_entry, rb_node); |
| 1718 | |
| 1719 | node = rb_prev(node); |
| 1720 | if (node) |
| 1721 | return rb_hierarchy_last(node); |
| 1722 | |
| 1723 | he = he->parent_he; |
| 1724 | if (he == NULL) |
| 1725 | return NULL; |
| 1726 | |
| 1727 | return &he->rb_node; |
| 1728 | } |
| 1729 | |
Namhyung Kim | a7b5895 | 2016-02-26 21:13:16 +0900 | [diff] [blame] | 1730 | bool hist_entry__has_hierarchy_children(struct hist_entry *he, float limit) |
| 1731 | { |
| 1732 | struct rb_node *node; |
| 1733 | struct hist_entry *child; |
| 1734 | float percent; |
| 1735 | |
| 1736 | if (he->leaf) |
| 1737 | return false; |
| 1738 | |
| 1739 | node = rb_first(&he->hroot_out); |
| 1740 | child = rb_entry(node, struct hist_entry, rb_node); |
| 1741 | |
| 1742 | while (node && child->filtered) { |
| 1743 | node = rb_next(node); |
| 1744 | child = rb_entry(node, struct hist_entry, rb_node); |
| 1745 | } |
| 1746 | |
| 1747 | if (node) |
| 1748 | percent = hist_entry__get_percent_limit(child); |
| 1749 | else |
| 1750 | percent = 0; |
| 1751 | |
| 1752 | return node && percent >= limit; |
| 1753 | } |
| 1754 | |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 1755 | static void hists__remove_entry_filter(struct hists *hists, struct hist_entry *h, |
Arnaldo Carvalho de Melo | cc5edb0 | 2010-07-16 12:35:07 -0300 | [diff] [blame] | 1756 | enum hist_filter filter) |
| 1757 | { |
| 1758 | h->filtered &= ~(1 << filter); |
Namhyung Kim | 155e9af | 2016-02-25 00:13:38 +0900 | [diff] [blame] | 1759 | |
| 1760 | if (symbol_conf.report_hierarchy) { |
| 1761 | struct hist_entry *parent = h->parent_he; |
| 1762 | |
| 1763 | while (parent) { |
| 1764 | he_stat__add_stat(&parent->stat, &h->stat); |
| 1765 | |
| 1766 | parent->filtered &= ~(1 << filter); |
| 1767 | |
| 1768 | if (parent->filtered) |
| 1769 | goto next; |
| 1770 | |
| 1771 | /* force fold unfiltered entry for simplicity */ |
| 1772 | parent->unfolded = false; |
Namhyung Kim | 79dded8 | 2016-02-26 21:13:19 +0900 | [diff] [blame] | 1773 | parent->has_no_entry = false; |
Namhyung Kim | 155e9af | 2016-02-25 00:13:38 +0900 | [diff] [blame] | 1774 | parent->row_offset = 0; |
| 1775 | parent->nr_rows = 0; |
| 1776 | next: |
| 1777 | parent = parent->parent_he; |
| 1778 | } |
| 1779 | } |
| 1780 | |
Arnaldo Carvalho de Melo | cc5edb0 | 2010-07-16 12:35:07 -0300 | [diff] [blame] | 1781 | if (h->filtered) |
| 1782 | return; |
| 1783 | |
Namhyung Kim | 87e90f4 | 2014-04-24 16:44:16 +0900 | [diff] [blame] | 1784 | /* force fold unfiltered entry for simplicity */ |
Namhyung Kim | 3698dab | 2015-05-05 23:55:46 +0900 | [diff] [blame] | 1785 | h->unfolded = false; |
Namhyung Kim | 79dded8 | 2016-02-26 21:13:19 +0900 | [diff] [blame] | 1786 | h->has_no_entry = false; |
Arnaldo Carvalho de Melo | 0f0cbf7 | 2010-07-26 17:13:40 -0300 | [diff] [blame] | 1787 | h->row_offset = 0; |
He Kuang | a8cd1f4 | 2015-03-11 20:36:03 +0800 | [diff] [blame] | 1788 | h->nr_rows = 0; |
Namhyung Kim | 9283ba9 | 2014-04-24 16:37:26 +0900 | [diff] [blame] | 1789 | |
Namhyung Kim | 1ab1fa5 | 2013-12-26 15:11:52 +0900 | [diff] [blame] | 1790 | hists->stats.nr_non_filtered_samples += h->stat.nr_events; |
Arnaldo Carvalho de Melo | cc5edb0 | 2010-07-16 12:35:07 -0300 | [diff] [blame] | 1791 | |
Namhyung Kim | 9283ba9 | 2014-04-24 16:37:26 +0900 | [diff] [blame] | 1792 | hists__inc_filter_stats(hists, h); |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 1793 | hists__calc_col_len(hists, h); |
Arnaldo Carvalho de Melo | cc5edb0 | 2010-07-16 12:35:07 -0300 | [diff] [blame] | 1794 | } |
| 1795 | |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 1796 | |
| 1797 | static bool hists__filter_entry_by_dso(struct hists *hists, |
| 1798 | struct hist_entry *he) |
| 1799 | { |
| 1800 | if (hists->dso_filter != NULL && |
| 1801 | (he->ms.map == NULL || he->ms.map->dso != hists->dso_filter)) { |
| 1802 | he->filtered |= (1 << HIST_FILTER__DSO); |
| 1803 | return true; |
| 1804 | } |
| 1805 | |
| 1806 | return false; |
| 1807 | } |
| 1808 | |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 1809 | static bool hists__filter_entry_by_thread(struct hists *hists, |
| 1810 | struct hist_entry *he) |
| 1811 | { |
| 1812 | if (hists->thread_filter != NULL && |
| 1813 | he->thread != hists->thread_filter) { |
| 1814 | he->filtered |= (1 << HIST_FILTER__THREAD); |
| 1815 | return true; |
| 1816 | } |
| 1817 | |
| 1818 | return false; |
| 1819 | } |
| 1820 | |
Namhyung Kim | e94d53e | 2012-03-16 17:50:51 +0900 | [diff] [blame] | 1821 | static bool hists__filter_entry_by_symbol(struct hists *hists, |
| 1822 | struct hist_entry *he) |
| 1823 | { |
| 1824 | if (hists->symbol_filter_str != NULL && |
| 1825 | (!he->ms.sym || strstr(he->ms.sym->name, |
| 1826 | hists->symbol_filter_str) == NULL)) { |
| 1827 | he->filtered |= (1 << HIST_FILTER__SYMBOL); |
| 1828 | return true; |
| 1829 | } |
| 1830 | |
| 1831 | return false; |
| 1832 | } |
| 1833 | |
Kan Liang | 21394d9 | 2015-09-04 10:45:44 -0400 | [diff] [blame] | 1834 | static bool hists__filter_entry_by_socket(struct hists *hists, |
| 1835 | struct hist_entry *he) |
| 1836 | { |
| 1837 | if ((hists->socket_filter > -1) && |
| 1838 | (he->socket != hists->socket_filter)) { |
| 1839 | he->filtered |= (1 << HIST_FILTER__SOCKET); |
| 1840 | return true; |
| 1841 | } |
| 1842 | |
| 1843 | return false; |
| 1844 | } |
| 1845 | |
Namhyung Kim | 1f7c254 | 2016-01-20 10:15:21 +0900 | [diff] [blame] | 1846 | typedef bool (*filter_fn_t)(struct hists *hists, struct hist_entry *he); |
| 1847 | |
| 1848 | static void hists__filter_by_type(struct hists *hists, int type, filter_fn_t filter) |
Kan Liang | 84734b0 | 2015-09-04 10:45:45 -0400 | [diff] [blame] | 1849 | { |
| 1850 | struct rb_node *nd; |
| 1851 | |
| 1852 | hists->stats.nr_non_filtered_samples = 0; |
| 1853 | |
| 1854 | hists__reset_filter_stats(hists); |
| 1855 | hists__reset_col_len(hists); |
| 1856 | |
| 1857 | for (nd = rb_first(&hists->entries); nd; nd = rb_next(nd)) { |
| 1858 | struct hist_entry *h = rb_entry(nd, struct hist_entry, rb_node); |
| 1859 | |
Namhyung Kim | 1f7c254 | 2016-01-20 10:15:21 +0900 | [diff] [blame] | 1860 | if (filter(hists, h)) |
Kan Liang | 84734b0 | 2015-09-04 10:45:45 -0400 | [diff] [blame] | 1861 | continue; |
| 1862 | |
Namhyung Kim | 1f7c254 | 2016-01-20 10:15:21 +0900 | [diff] [blame] | 1863 | hists__remove_entry_filter(hists, h, type); |
Kan Liang | 84734b0 | 2015-09-04 10:45:45 -0400 | [diff] [blame] | 1864 | } |
| 1865 | } |
| 1866 | |
Namhyung Kim | 7064285 | 2016-02-25 00:13:39 +0900 | [diff] [blame] | 1867 | static void resort_filtered_entry(struct rb_root *root, struct hist_entry *he) |
| 1868 | { |
| 1869 | struct rb_node **p = &root->rb_node; |
| 1870 | struct rb_node *parent = NULL; |
| 1871 | struct hist_entry *iter; |
| 1872 | struct rb_root new_root = RB_ROOT; |
| 1873 | struct rb_node *nd; |
| 1874 | |
| 1875 | while (*p != NULL) { |
| 1876 | parent = *p; |
| 1877 | iter = rb_entry(parent, struct hist_entry, rb_node); |
| 1878 | |
| 1879 | if (hist_entry__sort(he, iter) > 0) |
| 1880 | p = &(*p)->rb_left; |
| 1881 | else |
| 1882 | p = &(*p)->rb_right; |
| 1883 | } |
| 1884 | |
| 1885 | rb_link_node(&he->rb_node, parent, p); |
| 1886 | rb_insert_color(&he->rb_node, root); |
| 1887 | |
| 1888 | if (he->leaf || he->filtered) |
| 1889 | return; |
| 1890 | |
| 1891 | nd = rb_first(&he->hroot_out); |
| 1892 | while (nd) { |
| 1893 | struct hist_entry *h = rb_entry(nd, struct hist_entry, rb_node); |
| 1894 | |
| 1895 | nd = rb_next(nd); |
| 1896 | rb_erase(&h->rb_node, &he->hroot_out); |
| 1897 | |
| 1898 | resort_filtered_entry(&new_root, h); |
| 1899 | } |
| 1900 | |
| 1901 | he->hroot_out = new_root; |
| 1902 | } |
| 1903 | |
Namhyung Kim | 155e9af | 2016-02-25 00:13:38 +0900 | [diff] [blame] | 1904 | static void hists__filter_hierarchy(struct hists *hists, int type, const void *arg) |
| 1905 | { |
| 1906 | struct rb_node *nd; |
Namhyung Kim | 7064285 | 2016-02-25 00:13:39 +0900 | [diff] [blame] | 1907 | struct rb_root new_root = RB_ROOT; |
Namhyung Kim | 155e9af | 2016-02-25 00:13:38 +0900 | [diff] [blame] | 1908 | |
| 1909 | hists->stats.nr_non_filtered_samples = 0; |
| 1910 | |
| 1911 | hists__reset_filter_stats(hists); |
| 1912 | hists__reset_col_len(hists); |
| 1913 | |
| 1914 | nd = rb_first(&hists->entries); |
| 1915 | while (nd) { |
| 1916 | struct hist_entry *h = rb_entry(nd, struct hist_entry, rb_node); |
| 1917 | int ret; |
| 1918 | |
| 1919 | ret = hist_entry__filter(h, type, arg); |
| 1920 | |
| 1921 | /* |
| 1922 | * case 1. non-matching type |
| 1923 | * zero out the period, set filter marker and move to child |
| 1924 | */ |
| 1925 | if (ret < 0) { |
| 1926 | memset(&h->stat, 0, sizeof(h->stat)); |
| 1927 | h->filtered |= (1 << type); |
| 1928 | |
| 1929 | nd = __rb_hierarchy_next(&h->rb_node, HMD_FORCE_CHILD); |
| 1930 | } |
| 1931 | /* |
| 1932 | * case 2. matched type (filter out) |
| 1933 | * set filter marker and move to next |
| 1934 | */ |
| 1935 | else if (ret == 1) { |
| 1936 | h->filtered |= (1 << type); |
| 1937 | |
| 1938 | nd = __rb_hierarchy_next(&h->rb_node, HMD_FORCE_SIBLING); |
| 1939 | } |
| 1940 | /* |
| 1941 | * case 3. ok (not filtered) |
| 1942 | * add period to hists and parents, erase the filter marker |
| 1943 | * and move to next sibling |
| 1944 | */ |
| 1945 | else { |
| 1946 | hists__remove_entry_filter(hists, h, type); |
| 1947 | |
| 1948 | nd = __rb_hierarchy_next(&h->rb_node, HMD_FORCE_SIBLING); |
| 1949 | } |
| 1950 | } |
Namhyung Kim | 7064285 | 2016-02-25 00:13:39 +0900 | [diff] [blame] | 1951 | |
Namhyung Kim | f7fb538 | 2016-03-09 22:47:02 +0900 | [diff] [blame] | 1952 | hierarchy_recalc_total_periods(hists); |
| 1953 | |
Namhyung Kim | 7064285 | 2016-02-25 00:13:39 +0900 | [diff] [blame] | 1954 | /* |
| 1955 | * resort output after applying a new filter since filter in a lower |
| 1956 | * hierarchy can change periods in a upper hierarchy. |
| 1957 | */ |
| 1958 | nd = rb_first(&hists->entries); |
| 1959 | while (nd) { |
| 1960 | struct hist_entry *h = rb_entry(nd, struct hist_entry, rb_node); |
| 1961 | |
| 1962 | nd = rb_next(nd); |
| 1963 | rb_erase(&h->rb_node, &hists->entries); |
| 1964 | |
| 1965 | resort_filtered_entry(&new_root, h); |
| 1966 | } |
| 1967 | |
| 1968 | hists->entries = new_root; |
Namhyung Kim | 155e9af | 2016-02-25 00:13:38 +0900 | [diff] [blame] | 1969 | } |
| 1970 | |
Namhyung Kim | 1f7c254 | 2016-01-20 10:15:21 +0900 | [diff] [blame] | 1971 | void hists__filter_by_thread(struct hists *hists) |
| 1972 | { |
Namhyung Kim | 155e9af | 2016-02-25 00:13:38 +0900 | [diff] [blame] | 1973 | if (symbol_conf.report_hierarchy) |
| 1974 | hists__filter_hierarchy(hists, HIST_FILTER__THREAD, |
| 1975 | hists->thread_filter); |
| 1976 | else |
| 1977 | hists__filter_by_type(hists, HIST_FILTER__THREAD, |
| 1978 | hists__filter_entry_by_thread); |
Namhyung Kim | 1f7c254 | 2016-01-20 10:15:21 +0900 | [diff] [blame] | 1979 | } |
| 1980 | |
| 1981 | void hists__filter_by_dso(struct hists *hists) |
| 1982 | { |
Namhyung Kim | 155e9af | 2016-02-25 00:13:38 +0900 | [diff] [blame] | 1983 | if (symbol_conf.report_hierarchy) |
| 1984 | hists__filter_hierarchy(hists, HIST_FILTER__DSO, |
| 1985 | hists->dso_filter); |
| 1986 | else |
| 1987 | hists__filter_by_type(hists, HIST_FILTER__DSO, |
| 1988 | hists__filter_entry_by_dso); |
Namhyung Kim | 1f7c254 | 2016-01-20 10:15:21 +0900 | [diff] [blame] | 1989 | } |
| 1990 | |
| 1991 | void hists__filter_by_symbol(struct hists *hists) |
| 1992 | { |
Namhyung Kim | 155e9af | 2016-02-25 00:13:38 +0900 | [diff] [blame] | 1993 | if (symbol_conf.report_hierarchy) |
| 1994 | hists__filter_hierarchy(hists, HIST_FILTER__SYMBOL, |
| 1995 | hists->symbol_filter_str); |
| 1996 | else |
| 1997 | hists__filter_by_type(hists, HIST_FILTER__SYMBOL, |
| 1998 | hists__filter_entry_by_symbol); |
Namhyung Kim | 1f7c254 | 2016-01-20 10:15:21 +0900 | [diff] [blame] | 1999 | } |
| 2000 | |
| 2001 | void hists__filter_by_socket(struct hists *hists) |
| 2002 | { |
Namhyung Kim | 155e9af | 2016-02-25 00:13:38 +0900 | [diff] [blame] | 2003 | if (symbol_conf.report_hierarchy) |
| 2004 | hists__filter_hierarchy(hists, HIST_FILTER__SOCKET, |
| 2005 | &hists->socket_filter); |
| 2006 | else |
| 2007 | hists__filter_by_type(hists, HIST_FILTER__SOCKET, |
| 2008 | hists__filter_entry_by_socket); |
Namhyung Kim | 1f7c254 | 2016-01-20 10:15:21 +0900 | [diff] [blame] | 2009 | } |
| 2010 | |
Arnaldo Carvalho de Melo | 28a6b6a | 2012-12-18 16:24:46 -0300 | [diff] [blame] | 2011 | void events_stats__inc(struct events_stats *stats, u32 type) |
| 2012 | { |
| 2013 | ++stats->nr_events[0]; |
| 2014 | ++stats->nr_events[type]; |
| 2015 | } |
| 2016 | |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 2017 | void hists__inc_nr_events(struct hists *hists, u32 type) |
Arnaldo Carvalho de Melo | c8446b9 | 2010-05-14 10:36:42 -0300 | [diff] [blame] | 2018 | { |
Arnaldo Carvalho de Melo | 28a6b6a | 2012-12-18 16:24:46 -0300 | [diff] [blame] | 2019 | events_stats__inc(&hists->stats, type); |
Arnaldo Carvalho de Melo | c8446b9 | 2010-05-14 10:36:42 -0300 | [diff] [blame] | 2020 | } |
Arnaldo Carvalho de Melo | 95529be | 2012-11-08 17:54:33 -0300 | [diff] [blame] | 2021 | |
Namhyung Kim | 1844dbc | 2014-05-28 14:12:18 +0900 | [diff] [blame] | 2022 | void hists__inc_nr_samples(struct hists *hists, bool filtered) |
| 2023 | { |
| 2024 | events_stats__inc(&hists->stats, PERF_RECORD_SAMPLE); |
| 2025 | if (!filtered) |
| 2026 | hists->stats.nr_non_filtered_samples++; |
| 2027 | } |
| 2028 | |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 2029 | static struct hist_entry *hists__add_dummy_entry(struct hists *hists, |
| 2030 | struct hist_entry *pair) |
| 2031 | { |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 2032 | struct rb_root *root; |
| 2033 | struct rb_node **p; |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 2034 | struct rb_node *parent = NULL; |
| 2035 | struct hist_entry *he; |
Andi Kleen | 354cc40 | 2013-10-01 07:22:15 -0700 | [diff] [blame] | 2036 | int64_t cmp; |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 2037 | |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 2038 | if (sort__need_collapse) |
| 2039 | root = &hists->entries_collapsed; |
| 2040 | else |
| 2041 | root = hists->entries_in; |
| 2042 | |
| 2043 | p = &root->rb_node; |
| 2044 | |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 2045 | while (*p != NULL) { |
| 2046 | parent = *p; |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 2047 | he = rb_entry(parent, struct hist_entry, rb_node_in); |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 2048 | |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 2049 | cmp = hist_entry__collapse(he, pair); |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 2050 | |
| 2051 | if (!cmp) |
| 2052 | goto out; |
| 2053 | |
| 2054 | if (cmp < 0) |
| 2055 | p = &(*p)->rb_left; |
| 2056 | else |
| 2057 | p = &(*p)->rb_right; |
| 2058 | } |
| 2059 | |
Namhyung Kim | a0b51af | 2012-09-11 13:34:27 +0900 | [diff] [blame] | 2060 | he = hist_entry__new(pair, true); |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 2061 | if (he) { |
Arnaldo Carvalho de Melo | 30193d7 | 2012-11-12 13:20:03 -0300 | [diff] [blame] | 2062 | memset(&he->stat, 0, sizeof(he->stat)); |
| 2063 | he->hists = hists; |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 2064 | rb_link_node(&he->rb_node_in, parent, p); |
| 2065 | rb_insert_color(&he->rb_node_in, root); |
Namhyung Kim | 6263835 | 2014-04-24 16:21:46 +0900 | [diff] [blame] | 2066 | hists__inc_stats(hists, he); |
Jiri Olsa | e0af43d | 2012-12-01 21:18:20 +0100 | [diff] [blame] | 2067 | he->dummy = true; |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 2068 | } |
| 2069 | out: |
| 2070 | return he; |
| 2071 | } |
| 2072 | |
Arnaldo Carvalho de Melo | 95529be | 2012-11-08 17:54:33 -0300 | [diff] [blame] | 2073 | static struct hist_entry *hists__find_entry(struct hists *hists, |
| 2074 | struct hist_entry *he) |
| 2075 | { |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 2076 | struct rb_node *n; |
| 2077 | |
| 2078 | if (sort__need_collapse) |
| 2079 | n = hists->entries_collapsed.rb_node; |
| 2080 | else |
| 2081 | n = hists->entries_in->rb_node; |
Arnaldo Carvalho de Melo | 95529be | 2012-11-08 17:54:33 -0300 | [diff] [blame] | 2082 | |
| 2083 | while (n) { |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 2084 | struct hist_entry *iter = rb_entry(n, struct hist_entry, rb_node_in); |
| 2085 | int64_t cmp = hist_entry__collapse(iter, he); |
Arnaldo Carvalho de Melo | 95529be | 2012-11-08 17:54:33 -0300 | [diff] [blame] | 2086 | |
| 2087 | if (cmp < 0) |
| 2088 | n = n->rb_left; |
| 2089 | else if (cmp > 0) |
| 2090 | n = n->rb_right; |
| 2091 | else |
| 2092 | return iter; |
| 2093 | } |
| 2094 | |
| 2095 | return NULL; |
| 2096 | } |
| 2097 | |
| 2098 | /* |
| 2099 | * Look for pairs to link to the leader buckets (hist_entries): |
| 2100 | */ |
| 2101 | void hists__match(struct hists *leader, struct hists *other) |
| 2102 | { |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 2103 | struct rb_root *root; |
Arnaldo Carvalho de Melo | 95529be | 2012-11-08 17:54:33 -0300 | [diff] [blame] | 2104 | struct rb_node *nd; |
| 2105 | struct hist_entry *pos, *pair; |
| 2106 | |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 2107 | if (sort__need_collapse) |
| 2108 | root = &leader->entries_collapsed; |
| 2109 | else |
| 2110 | root = leader->entries_in; |
| 2111 | |
| 2112 | for (nd = rb_first(root); nd; nd = rb_next(nd)) { |
| 2113 | pos = rb_entry(nd, struct hist_entry, rb_node_in); |
Arnaldo Carvalho de Melo | 95529be | 2012-11-08 17:54:33 -0300 | [diff] [blame] | 2114 | pair = hists__find_entry(other, pos); |
| 2115 | |
| 2116 | if (pair) |
Namhyung Kim | 5fa9041 | 2012-11-29 15:38:34 +0900 | [diff] [blame] | 2117 | hist_entry__add_pair(pair, pos); |
Arnaldo Carvalho de Melo | 95529be | 2012-11-08 17:54:33 -0300 | [diff] [blame] | 2118 | } |
| 2119 | } |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 2120 | |
| 2121 | /* |
| 2122 | * Look for entries in the other hists that are not present in the leader, if |
| 2123 | * we find them, just add a dummy entry on the leader hists, with period=0, |
| 2124 | * nr_events=0, to serve as the list header. |
| 2125 | */ |
| 2126 | int hists__link(struct hists *leader, struct hists *other) |
| 2127 | { |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 2128 | struct rb_root *root; |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 2129 | struct rb_node *nd; |
| 2130 | struct hist_entry *pos, *pair; |
| 2131 | |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 2132 | if (sort__need_collapse) |
| 2133 | root = &other->entries_collapsed; |
| 2134 | else |
| 2135 | root = other->entries_in; |
| 2136 | |
| 2137 | for (nd = rb_first(root); nd; nd = rb_next(nd)) { |
| 2138 | pos = rb_entry(nd, struct hist_entry, rb_node_in); |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 2139 | |
| 2140 | if (!hist_entry__has_pairs(pos)) { |
| 2141 | pair = hists__add_dummy_entry(leader, pos); |
| 2142 | if (pair == NULL) |
| 2143 | return -1; |
Namhyung Kim | 5fa9041 | 2012-11-29 15:38:34 +0900 | [diff] [blame] | 2144 | hist_entry__add_pair(pos, pair); |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 2145 | } |
| 2146 | } |
| 2147 | |
| 2148 | return 0; |
| 2149 | } |
Namhyung Kim | f214833 | 2014-01-14 11:52:48 +0900 | [diff] [blame] | 2150 | |
Andi Kleen | 57849998 | 2015-07-18 08:24:49 -0700 | [diff] [blame] | 2151 | void hist__account_cycles(struct branch_stack *bs, struct addr_location *al, |
| 2152 | struct perf_sample *sample, bool nonany_branch_mode) |
| 2153 | { |
| 2154 | struct branch_info *bi; |
| 2155 | |
| 2156 | /* If we have branch cycles always annotate them. */ |
| 2157 | if (bs && bs->nr && bs->entries[0].flags.cycles) { |
| 2158 | int i; |
| 2159 | |
| 2160 | bi = sample__resolve_bstack(sample, al); |
| 2161 | if (bi) { |
| 2162 | struct addr_map_symbol *prev = NULL; |
| 2163 | |
| 2164 | /* |
| 2165 | * Ignore errors, still want to process the |
| 2166 | * other entries. |
| 2167 | * |
| 2168 | * For non standard branch modes always |
| 2169 | * force no IPC (prev == NULL) |
| 2170 | * |
| 2171 | * Note that perf stores branches reversed from |
| 2172 | * program order! |
| 2173 | */ |
| 2174 | for (i = bs->nr - 1; i >= 0; i--) { |
| 2175 | addr_map_symbol__account_cycles(&bi[i].from, |
| 2176 | nonany_branch_mode ? NULL : prev, |
| 2177 | bi[i].flags.cycles); |
| 2178 | prev = &bi[i].to; |
| 2179 | } |
| 2180 | free(bi); |
| 2181 | } |
| 2182 | } |
| 2183 | } |
Arnaldo Carvalho de Melo | 2a1731f | 2014-10-10 15:49:21 -0300 | [diff] [blame] | 2184 | |
| 2185 | size_t perf_evlist__fprintf_nr_events(struct perf_evlist *evlist, FILE *fp) |
| 2186 | { |
| 2187 | struct perf_evsel *pos; |
| 2188 | size_t ret = 0; |
| 2189 | |
| 2190 | evlist__for_each(evlist, pos) { |
| 2191 | ret += fprintf(fp, "%s stats:\n", perf_evsel__name(pos)); |
| 2192 | ret += events_stats__fprintf(&evsel__hists(pos)->stats, fp); |
| 2193 | } |
| 2194 | |
| 2195 | return ret; |
| 2196 | } |
| 2197 | |
| 2198 | |
Namhyung Kim | f214833 | 2014-01-14 11:52:48 +0900 | [diff] [blame] | 2199 | u64 hists__total_period(struct hists *hists) |
| 2200 | { |
| 2201 | return symbol_conf.filter_relative ? hists->stats.total_non_filtered_period : |
| 2202 | hists->stats.total_period; |
| 2203 | } |
Namhyung Kim | 33db456 | 2014-02-07 12:06:07 +0900 | [diff] [blame] | 2204 | |
| 2205 | int parse_filter_percentage(const struct option *opt __maybe_unused, |
| 2206 | const char *arg, int unset __maybe_unused) |
| 2207 | { |
| 2208 | if (!strcmp(arg, "relative")) |
| 2209 | symbol_conf.filter_relative = true; |
| 2210 | else if (!strcmp(arg, "absolute")) |
| 2211 | symbol_conf.filter_relative = false; |
| 2212 | else |
| 2213 | return -1; |
| 2214 | |
| 2215 | return 0; |
| 2216 | } |
Namhyung Kim | 0b93da1 | 2014-01-14 12:02:15 +0900 | [diff] [blame] | 2217 | |
| 2218 | int perf_hist_config(const char *var, const char *value) |
| 2219 | { |
| 2220 | if (!strcmp(var, "hist.percentage")) |
| 2221 | return parse_filter_percentage(NULL, value, 0); |
| 2222 | |
| 2223 | return 0; |
| 2224 | } |
Arnaldo Carvalho de Melo | a635fc5 | 2014-10-09 16:16:00 -0300 | [diff] [blame] | 2225 | |
Jiri Olsa | 5b65855 | 2016-01-18 10:24:22 +0100 | [diff] [blame] | 2226 | int __hists__init(struct hists *hists, struct perf_hpp_list *hpp_list) |
Arnaldo Carvalho de Melo | a635fc5 | 2014-10-09 16:16:00 -0300 | [diff] [blame] | 2227 | { |
Arnaldo Carvalho de Melo | a635fc5 | 2014-10-09 16:16:00 -0300 | [diff] [blame] | 2228 | memset(hists, 0, sizeof(*hists)); |
| 2229 | hists->entries_in_array[0] = hists->entries_in_array[1] = RB_ROOT; |
| 2230 | hists->entries_in = &hists->entries_in_array[0]; |
| 2231 | hists->entries_collapsed = RB_ROOT; |
| 2232 | hists->entries = RB_ROOT; |
| 2233 | pthread_mutex_init(&hists->lock, NULL); |
Kan Liang | 21394d9 | 2015-09-04 10:45:44 -0400 | [diff] [blame] | 2234 | hists->socket_filter = -1; |
Jiri Olsa | 5b65855 | 2016-01-18 10:24:22 +0100 | [diff] [blame] | 2235 | hists->hpp_list = hpp_list; |
Namhyung Kim | c3bc0c4 | 2016-03-07 16:44:45 -0300 | [diff] [blame] | 2236 | INIT_LIST_HEAD(&hists->hpp_formats); |
Arnaldo Carvalho de Melo | a635fc5 | 2014-10-09 16:16:00 -0300 | [diff] [blame] | 2237 | return 0; |
| 2238 | } |
| 2239 | |
Namhyung Kim | 61fa0e9 | 2015-12-10 16:53:20 +0900 | [diff] [blame] | 2240 | static void hists__delete_remaining_entries(struct rb_root *root) |
| 2241 | { |
| 2242 | struct rb_node *node; |
| 2243 | struct hist_entry *he; |
| 2244 | |
| 2245 | while (!RB_EMPTY_ROOT(root)) { |
| 2246 | node = rb_first(root); |
| 2247 | rb_erase(node, root); |
| 2248 | |
| 2249 | he = rb_entry(node, struct hist_entry, rb_node_in); |
| 2250 | hist_entry__delete(he); |
| 2251 | } |
| 2252 | } |
| 2253 | |
| 2254 | static void hists__delete_all_entries(struct hists *hists) |
| 2255 | { |
| 2256 | hists__delete_entries(hists); |
| 2257 | hists__delete_remaining_entries(&hists->entries_in_array[0]); |
| 2258 | hists__delete_remaining_entries(&hists->entries_in_array[1]); |
| 2259 | hists__delete_remaining_entries(&hists->entries_collapsed); |
| 2260 | } |
| 2261 | |
Masami Hiramatsu | 17577de | 2015-12-09 11:11:29 +0900 | [diff] [blame] | 2262 | static void hists_evsel__exit(struct perf_evsel *evsel) |
| 2263 | { |
| 2264 | struct hists *hists = evsel__hists(evsel); |
Namhyung Kim | c3bc0c4 | 2016-03-07 16:44:45 -0300 | [diff] [blame] | 2265 | struct perf_hpp_fmt *fmt, *pos; |
| 2266 | struct perf_hpp_list_node *node, *tmp; |
Masami Hiramatsu | 17577de | 2015-12-09 11:11:29 +0900 | [diff] [blame] | 2267 | |
Namhyung Kim | 61fa0e9 | 2015-12-10 16:53:20 +0900 | [diff] [blame] | 2268 | hists__delete_all_entries(hists); |
Namhyung Kim | c3bc0c4 | 2016-03-07 16:44:45 -0300 | [diff] [blame] | 2269 | |
| 2270 | list_for_each_entry_safe(node, tmp, &hists->hpp_formats, list) { |
| 2271 | perf_hpp_list__for_each_format_safe(&node->hpp, fmt, pos) { |
| 2272 | list_del(&fmt->list); |
| 2273 | free(fmt); |
| 2274 | } |
| 2275 | list_del(&node->list); |
| 2276 | free(node); |
| 2277 | } |
Masami Hiramatsu | 17577de | 2015-12-09 11:11:29 +0900 | [diff] [blame] | 2278 | } |
| 2279 | |
Namhyung Kim | fc284be | 2016-01-07 10:14:10 +0100 | [diff] [blame] | 2280 | static int hists_evsel__init(struct perf_evsel *evsel) |
| 2281 | { |
| 2282 | struct hists *hists = evsel__hists(evsel); |
| 2283 | |
Jiri Olsa | 5b65855 | 2016-01-18 10:24:22 +0100 | [diff] [blame] | 2284 | __hists__init(hists, &perf_hpp_list); |
Namhyung Kim | fc284be | 2016-01-07 10:14:10 +0100 | [diff] [blame] | 2285 | return 0; |
| 2286 | } |
| 2287 | |
Arnaldo Carvalho de Melo | a635fc5 | 2014-10-09 16:16:00 -0300 | [diff] [blame] | 2288 | /* |
| 2289 | * XXX We probably need a hists_evsel__exit() to free the hist_entries |
| 2290 | * stored in the rbtree... |
| 2291 | */ |
| 2292 | |
| 2293 | int hists__init(void) |
| 2294 | { |
| 2295 | int err = perf_evsel__object_config(sizeof(struct hists_evsel), |
Masami Hiramatsu | 17577de | 2015-12-09 11:11:29 +0900 | [diff] [blame] | 2296 | hists_evsel__init, |
| 2297 | hists_evsel__exit); |
Arnaldo Carvalho de Melo | a635fc5 | 2014-10-09 16:16:00 -0300 | [diff] [blame] | 2298 | if (err) |
| 2299 | fputs("FATAL ERROR: Couldn't setup hists class\n", stderr); |
| 2300 | |
| 2301 | return err; |
| 2302 | } |
Jiri Olsa | 94b3dc3 | 2016-01-18 10:24:13 +0100 | [diff] [blame] | 2303 | |
| 2304 | void perf_hpp_list__init(struct perf_hpp_list *list) |
| 2305 | { |
| 2306 | INIT_LIST_HEAD(&list->fields); |
| 2307 | INIT_LIST_HEAD(&list->sorts); |
| 2308 | } |