Frederic Weisbecker | 8a0ecfb | 2010-05-13 19:47:16 +0200 | [diff] [blame] | 1 | #include "util.h" |
Frederic Weisbecker | 598357e | 2010-05-21 12:48:39 +0200 | [diff] [blame] | 2 | #include "build-id.h" |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 3 | #include "hist.h" |
Arnaldo Carvalho de Melo | 4e4f06e | 2009-12-14 13:10:39 -0200 | [diff] [blame] | 4 | #include "session.h" |
| 5 | #include "sort.h" |
Arnaldo Carvalho de Melo | 2a1731f | 2014-10-10 15:49:21 -0300 | [diff] [blame] | 6 | #include "evlist.h" |
Namhyung Kim | 29d720e | 2013-01-22 18:09:33 +0900 | [diff] [blame] | 7 | #include "evsel.h" |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 8 | #include "annotate.h" |
Namhyung Kim | 740b97f | 2014-12-22 13:44:10 +0900 | [diff] [blame] | 9 | #include "ui/progress.h" |
Arnaldo Carvalho de Melo | 9b33827 | 2009-12-16 14:31:49 -0200 | [diff] [blame] | 10 | #include <math.h> |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 11 | |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 12 | static bool hists__filter_entry_by_dso(struct hists *hists, |
| 13 | struct hist_entry *he); |
| 14 | static bool hists__filter_entry_by_thread(struct hists *hists, |
| 15 | struct hist_entry *he); |
Namhyung Kim | e94d53e | 2012-03-16 17:50:51 +0900 | [diff] [blame] | 16 | static bool hists__filter_entry_by_symbol(struct hists *hists, |
| 17 | struct hist_entry *he); |
Kan Liang | 21394d9 | 2015-09-04 10:45:44 -0400 | [diff] [blame] | 18 | static bool hists__filter_entry_by_socket(struct hists *hists, |
| 19 | struct hist_entry *he); |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 20 | |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 21 | u16 hists__col_len(struct hists *hists, enum hist_column col) |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 22 | { |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 23 | return hists->col_len[col]; |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 24 | } |
| 25 | |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 26 | void hists__set_col_len(struct hists *hists, enum hist_column col, u16 len) |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 27 | { |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 28 | hists->col_len[col] = len; |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 29 | } |
| 30 | |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 31 | bool hists__new_col_len(struct hists *hists, enum hist_column col, u16 len) |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 32 | { |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 33 | if (len > hists__col_len(hists, col)) { |
| 34 | hists__set_col_len(hists, col, len); |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 35 | return true; |
| 36 | } |
| 37 | return false; |
| 38 | } |
| 39 | |
Namhyung Kim | 7ccf4f9 | 2012-08-20 13:52:05 +0900 | [diff] [blame] | 40 | void hists__reset_col_len(struct hists *hists) |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 41 | { |
| 42 | enum hist_column col; |
| 43 | |
| 44 | for (col = 0; col < HISTC_NR_COLS; ++col) |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 45 | hists__set_col_len(hists, col, 0); |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 46 | } |
| 47 | |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 48 | static void hists__set_unres_dso_col_len(struct hists *hists, int dso) |
| 49 | { |
| 50 | const unsigned int unresolved_col_width = BITS_PER_LONG / 4; |
| 51 | |
| 52 | if (hists__col_len(hists, dso) < unresolved_col_width && |
| 53 | !symbol_conf.col_width_list_str && !symbol_conf.field_sep && |
| 54 | !symbol_conf.dso_list) |
| 55 | hists__set_col_len(hists, dso, unresolved_col_width); |
| 56 | } |
| 57 | |
Namhyung Kim | 7ccf4f9 | 2012-08-20 13:52:05 +0900 | [diff] [blame] | 58 | void hists__calc_col_len(struct hists *hists, struct hist_entry *h) |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 59 | { |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 60 | const unsigned int unresolved_col_width = BITS_PER_LONG / 4; |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 61 | int symlen; |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 62 | u16 len; |
| 63 | |
Namhyung Kim | ded19d5 | 2013-04-01 20:35:19 +0900 | [diff] [blame] | 64 | /* |
| 65 | * +4 accounts for '[x] ' priv level info |
| 66 | * +2 accounts for 0x prefix on raw addresses |
| 67 | * +3 accounts for ' y ' symtab origin info |
| 68 | */ |
| 69 | if (h->ms.sym) { |
| 70 | symlen = h->ms.sym->namelen + 4; |
| 71 | if (verbose) |
| 72 | symlen += BITS_PER_LONG / 4 + 2 + 3; |
| 73 | hists__new_col_len(hists, HISTC_SYMBOL, symlen); |
| 74 | } else { |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 75 | symlen = unresolved_col_width + 4 + 2; |
| 76 | hists__new_col_len(hists, HISTC_SYMBOL, symlen); |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 77 | hists__set_unres_dso_col_len(hists, HISTC_DSO); |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 78 | } |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 79 | |
| 80 | len = thread__comm_len(h->thread); |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 81 | if (hists__new_col_len(hists, HISTC_COMM, len)) |
| 82 | hists__set_col_len(hists, HISTC_THREAD, len + 6); |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 83 | |
| 84 | if (h->ms.map) { |
| 85 | len = dso__name_len(h->ms.map->dso); |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 86 | hists__new_col_len(hists, HISTC_DSO, len); |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 87 | } |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 88 | |
Namhyung Kim | cb99374 | 2012-12-27 18:11:42 +0900 | [diff] [blame] | 89 | if (h->parent) |
| 90 | hists__new_col_len(hists, HISTC_PARENT, h->parent->namelen); |
| 91 | |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 92 | if (h->branch_info) { |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 93 | if (h->branch_info->from.sym) { |
| 94 | symlen = (int)h->branch_info->from.sym->namelen + 4; |
Namhyung Kim | ded19d5 | 2013-04-01 20:35:19 +0900 | [diff] [blame] | 95 | if (verbose) |
| 96 | symlen += BITS_PER_LONG / 4 + 2 + 3; |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 97 | hists__new_col_len(hists, HISTC_SYMBOL_FROM, symlen); |
| 98 | |
| 99 | symlen = dso__name_len(h->branch_info->from.map->dso); |
| 100 | hists__new_col_len(hists, HISTC_DSO_FROM, symlen); |
| 101 | } else { |
| 102 | symlen = unresolved_col_width + 4 + 2; |
| 103 | hists__new_col_len(hists, HISTC_SYMBOL_FROM, symlen); |
| 104 | hists__set_unres_dso_col_len(hists, HISTC_DSO_FROM); |
| 105 | } |
| 106 | |
| 107 | if (h->branch_info->to.sym) { |
| 108 | symlen = (int)h->branch_info->to.sym->namelen + 4; |
Namhyung Kim | ded19d5 | 2013-04-01 20:35:19 +0900 | [diff] [blame] | 109 | if (verbose) |
| 110 | symlen += BITS_PER_LONG / 4 + 2 + 3; |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 111 | hists__new_col_len(hists, HISTC_SYMBOL_TO, symlen); |
| 112 | |
| 113 | symlen = dso__name_len(h->branch_info->to.map->dso); |
| 114 | hists__new_col_len(hists, HISTC_DSO_TO, symlen); |
| 115 | } else { |
| 116 | symlen = unresolved_col_width + 4 + 2; |
| 117 | hists__new_col_len(hists, HISTC_SYMBOL_TO, symlen); |
| 118 | hists__set_unres_dso_col_len(hists, HISTC_DSO_TO); |
| 119 | } |
| 120 | } |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 121 | |
| 122 | if (h->mem_info) { |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 123 | if (h->mem_info->daddr.sym) { |
| 124 | symlen = (int)h->mem_info->daddr.sym->namelen + 4 |
| 125 | + unresolved_col_width + 2; |
| 126 | hists__new_col_len(hists, HISTC_MEM_DADDR_SYMBOL, |
| 127 | symlen); |
Don Zickus | 9b32ba7 | 2014-06-01 15:38:29 +0200 | [diff] [blame] | 128 | hists__new_col_len(hists, HISTC_MEM_DCACHELINE, |
| 129 | symlen + 1); |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 130 | } else { |
| 131 | symlen = unresolved_col_width + 4 + 2; |
| 132 | hists__new_col_len(hists, HISTC_MEM_DADDR_SYMBOL, |
| 133 | symlen); |
| 134 | } |
| 135 | if (h->mem_info->daddr.map) { |
| 136 | symlen = dso__name_len(h->mem_info->daddr.map->dso); |
| 137 | hists__new_col_len(hists, HISTC_MEM_DADDR_DSO, |
| 138 | symlen); |
| 139 | } else { |
| 140 | symlen = unresolved_col_width + 4 + 2; |
| 141 | hists__set_unres_dso_col_len(hists, HISTC_MEM_DADDR_DSO); |
| 142 | } |
| 143 | } else { |
| 144 | symlen = unresolved_col_width + 4 + 2; |
| 145 | hists__new_col_len(hists, HISTC_MEM_DADDR_SYMBOL, symlen); |
| 146 | hists__set_unres_dso_col_len(hists, HISTC_MEM_DADDR_DSO); |
| 147 | } |
| 148 | |
Arnaldo Carvalho de Melo | a4978ec | 2015-09-09 12:14:00 -0300 | [diff] [blame] | 149 | hists__new_col_len(hists, HISTC_CPU, 3); |
Kan Liang | 2e7ea3a | 2015-09-04 10:45:43 -0400 | [diff] [blame] | 150 | hists__new_col_len(hists, HISTC_SOCKET, 6); |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 151 | hists__new_col_len(hists, HISTC_MEM_LOCKED, 6); |
| 152 | hists__new_col_len(hists, HISTC_MEM_TLB, 22); |
| 153 | hists__new_col_len(hists, HISTC_MEM_SNOOP, 12); |
| 154 | hists__new_col_len(hists, HISTC_MEM_LVL, 21 + 3); |
| 155 | hists__new_col_len(hists, HISTC_LOCAL_WEIGHT, 12); |
| 156 | hists__new_col_len(hists, HISTC_GLOBAL_WEIGHT, 12); |
Andi Kleen | 475eeab | 2013-09-20 07:40:43 -0700 | [diff] [blame] | 157 | |
Arnaldo Carvalho de Melo | e8e6d37 | 2015-08-10 16:53:54 -0300 | [diff] [blame] | 158 | if (h->srcline) |
| 159 | hists__new_col_len(hists, HISTC_SRCLINE, strlen(h->srcline)); |
| 160 | |
Andi Kleen | 31191a8 | 2015-08-07 15:54:24 -0700 | [diff] [blame] | 161 | if (h->srcfile) |
| 162 | hists__new_col_len(hists, HISTC_SRCFILE, strlen(h->srcfile)); |
| 163 | |
Andi Kleen | 475eeab | 2013-09-20 07:40:43 -0700 | [diff] [blame] | 164 | if (h->transaction) |
| 165 | hists__new_col_len(hists, HISTC_TRANSACTION, |
| 166 | hist_entry__transaction_len()); |
Arnaldo Carvalho de Melo | 8a6c5b2 | 2010-07-20 14:42:52 -0300 | [diff] [blame] | 167 | } |
| 168 | |
Namhyung Kim | 7ccf4f9 | 2012-08-20 13:52:05 +0900 | [diff] [blame] | 169 | void hists__output_recalc_col_len(struct hists *hists, int max_rows) |
| 170 | { |
| 171 | struct rb_node *next = rb_first(&hists->entries); |
| 172 | struct hist_entry *n; |
| 173 | int row = 0; |
| 174 | |
| 175 | hists__reset_col_len(hists); |
| 176 | |
| 177 | while (next && row++ < max_rows) { |
| 178 | n = rb_entry(next, struct hist_entry, rb_node); |
| 179 | if (!n->filtered) |
| 180 | hists__calc_col_len(hists, n); |
| 181 | next = rb_next(&n->rb_node); |
| 182 | } |
| 183 | } |
| 184 | |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 185 | static void he_stat__add_cpumode_period(struct he_stat *he_stat, |
| 186 | unsigned int cpumode, u64 period) |
Zhang, Yanmin | a1645ce | 2010-04-19 13:32:50 +0800 | [diff] [blame] | 187 | { |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 188 | switch (cpumode) { |
Zhang, Yanmin | a1645ce | 2010-04-19 13:32:50 +0800 | [diff] [blame] | 189 | case PERF_RECORD_MISC_KERNEL: |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 190 | he_stat->period_sys += period; |
Zhang, Yanmin | a1645ce | 2010-04-19 13:32:50 +0800 | [diff] [blame] | 191 | break; |
| 192 | case PERF_RECORD_MISC_USER: |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 193 | he_stat->period_us += period; |
Zhang, Yanmin | a1645ce | 2010-04-19 13:32:50 +0800 | [diff] [blame] | 194 | break; |
| 195 | case PERF_RECORD_MISC_GUEST_KERNEL: |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 196 | he_stat->period_guest_sys += period; |
Zhang, Yanmin | a1645ce | 2010-04-19 13:32:50 +0800 | [diff] [blame] | 197 | break; |
| 198 | case PERF_RECORD_MISC_GUEST_USER: |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 199 | he_stat->period_guest_us += period; |
Zhang, Yanmin | a1645ce | 2010-04-19 13:32:50 +0800 | [diff] [blame] | 200 | break; |
| 201 | default: |
| 202 | break; |
| 203 | } |
| 204 | } |
| 205 | |
Andi Kleen | 0548429 | 2013-01-24 16:10:29 +0100 | [diff] [blame] | 206 | static void he_stat__add_period(struct he_stat *he_stat, u64 period, |
| 207 | u64 weight) |
Namhyung Kim | 139c081 | 2012-10-04 21:49:43 +0900 | [diff] [blame] | 208 | { |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 209 | |
Namhyung Kim | 139c081 | 2012-10-04 21:49:43 +0900 | [diff] [blame] | 210 | he_stat->period += period; |
Andi Kleen | 0548429 | 2013-01-24 16:10:29 +0100 | [diff] [blame] | 211 | he_stat->weight += weight; |
Namhyung Kim | 139c081 | 2012-10-04 21:49:43 +0900 | [diff] [blame] | 212 | he_stat->nr_events += 1; |
| 213 | } |
| 214 | |
| 215 | static void he_stat__add_stat(struct he_stat *dest, struct he_stat *src) |
| 216 | { |
| 217 | dest->period += src->period; |
| 218 | dest->period_sys += src->period_sys; |
| 219 | dest->period_us += src->period_us; |
| 220 | dest->period_guest_sys += src->period_guest_sys; |
| 221 | dest->period_guest_us += src->period_guest_us; |
| 222 | dest->nr_events += src->nr_events; |
Andi Kleen | 0548429 | 2013-01-24 16:10:29 +0100 | [diff] [blame] | 223 | dest->weight += src->weight; |
Namhyung Kim | 139c081 | 2012-10-04 21:49:43 +0900 | [diff] [blame] | 224 | } |
| 225 | |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 226 | static void he_stat__decay(struct he_stat *he_stat) |
Arnaldo Carvalho de Melo | ab81f3f | 2011-10-05 19:16:15 -0300 | [diff] [blame] | 227 | { |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 228 | he_stat->period = (he_stat->period * 7) / 8; |
| 229 | he_stat->nr_events = (he_stat->nr_events * 7) / 8; |
Andi Kleen | 0548429 | 2013-01-24 16:10:29 +0100 | [diff] [blame] | 230 | /* XXX need decay for weight too? */ |
Arnaldo Carvalho de Melo | ab81f3f | 2011-10-05 19:16:15 -0300 | [diff] [blame] | 231 | } |
| 232 | |
| 233 | static bool hists__decay_entry(struct hists *hists, struct hist_entry *he) |
| 234 | { |
Namhyung Kim | b24c28f | 2012-10-04 21:49:41 +0900 | [diff] [blame] | 235 | u64 prev_period = he->stat.period; |
Namhyung Kim | 3186b68 | 2014-04-22 13:44:23 +0900 | [diff] [blame] | 236 | u64 diff; |
Arnaldo Carvalho de Melo | c64550c | 2011-10-20 06:45:44 -0200 | [diff] [blame] | 237 | |
| 238 | if (prev_period == 0) |
Arnaldo Carvalho de Melo | df71d95 | 2011-10-13 08:01:33 -0300 | [diff] [blame] | 239 | return true; |
Arnaldo Carvalho de Melo | c64550c | 2011-10-20 06:45:44 -0200 | [diff] [blame] | 240 | |
Namhyung Kim | f39056f | 2014-01-14 14:25:37 +0900 | [diff] [blame] | 241 | he_stat__decay(&he->stat); |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 242 | if (symbol_conf.cumulate_callchain) |
| 243 | he_stat__decay(he->stat_acc); |
Arnaldo Carvalho de Melo | c64550c | 2011-10-20 06:45:44 -0200 | [diff] [blame] | 244 | |
Namhyung Kim | 3186b68 | 2014-04-22 13:44:23 +0900 | [diff] [blame] | 245 | diff = prev_period - he->stat.period; |
| 246 | |
| 247 | hists->stats.total_period -= diff; |
Arnaldo Carvalho de Melo | c64550c | 2011-10-20 06:45:44 -0200 | [diff] [blame] | 248 | if (!he->filtered) |
Namhyung Kim | 3186b68 | 2014-04-22 13:44:23 +0900 | [diff] [blame] | 249 | hists->stats.total_non_filtered_period -= diff; |
Arnaldo Carvalho de Melo | c64550c | 2011-10-20 06:45:44 -0200 | [diff] [blame] | 250 | |
Namhyung Kim | b24c28f | 2012-10-04 21:49:41 +0900 | [diff] [blame] | 251 | return he->stat.period == 0; |
Arnaldo Carvalho de Melo | ab81f3f | 2011-10-05 19:16:15 -0300 | [diff] [blame] | 252 | } |
| 253 | |
Arnaldo Carvalho de Melo | 956b65e | 2014-12-19 12:41:28 -0300 | [diff] [blame] | 254 | static void hists__delete_entry(struct hists *hists, struct hist_entry *he) |
| 255 | { |
| 256 | rb_erase(&he->rb_node, &hists->entries); |
| 257 | |
| 258 | if (sort__need_collapse) |
| 259 | rb_erase(&he->rb_node_in, &hists->entries_collapsed); |
| 260 | |
| 261 | --hists->nr_entries; |
| 262 | if (!he->filtered) |
| 263 | --hists->nr_non_filtered_entries; |
| 264 | |
| 265 | hist_entry__delete(he); |
| 266 | } |
| 267 | |
Namhyung Kim | 3a5714f | 2013-05-14 11:09:01 +0900 | [diff] [blame] | 268 | void hists__decay_entries(struct hists *hists, bool zap_user, bool zap_kernel) |
Arnaldo Carvalho de Melo | ab81f3f | 2011-10-05 19:16:15 -0300 | [diff] [blame] | 269 | { |
| 270 | struct rb_node *next = rb_first(&hists->entries); |
| 271 | struct hist_entry *n; |
| 272 | |
| 273 | while (next) { |
| 274 | n = rb_entry(next, struct hist_entry, rb_node); |
| 275 | next = rb_next(&n->rb_node); |
Arnaldo Carvalho de Melo | b079d4e | 2011-10-17 09:05:04 -0200 | [diff] [blame] | 276 | if (((zap_user && n->level == '.') || |
| 277 | (zap_kernel && n->level != '.') || |
Arnaldo Carvalho de Melo | 4c47f4f | 2015-03-17 17:18:58 -0300 | [diff] [blame] | 278 | hists__decay_entry(hists, n))) { |
Arnaldo Carvalho de Melo | 956b65e | 2014-12-19 12:41:28 -0300 | [diff] [blame] | 279 | hists__delete_entry(hists, n); |
Arnaldo Carvalho de Melo | ab81f3f | 2011-10-05 19:16:15 -0300 | [diff] [blame] | 280 | } |
| 281 | } |
| 282 | } |
| 283 | |
Namhyung Kim | 701937b | 2014-08-12 17:16:05 +0900 | [diff] [blame] | 284 | void hists__delete_entries(struct hists *hists) |
| 285 | { |
| 286 | struct rb_node *next = rb_first(&hists->entries); |
| 287 | struct hist_entry *n; |
| 288 | |
| 289 | while (next) { |
| 290 | n = rb_entry(next, struct hist_entry, rb_node); |
| 291 | next = rb_next(&n->rb_node); |
| 292 | |
Arnaldo Carvalho de Melo | 956b65e | 2014-12-19 12:41:28 -0300 | [diff] [blame] | 293 | hists__delete_entry(hists, n); |
Namhyung Kim | 701937b | 2014-08-12 17:16:05 +0900 | [diff] [blame] | 294 | } |
| 295 | } |
| 296 | |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 297 | /* |
Arnaldo Carvalho de Melo | c82ee82 | 2010-05-14 14:19:35 -0300 | [diff] [blame] | 298 | * histogram, sorted on item, collects periods |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 299 | */ |
| 300 | |
Namhyung Kim | a0b51af | 2012-09-11 13:34:27 +0900 | [diff] [blame] | 301 | static struct hist_entry *hist_entry__new(struct hist_entry *template, |
| 302 | bool sample_self) |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 303 | { |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 304 | size_t callchain_size = 0; |
| 305 | struct hist_entry *he; |
| 306 | |
Namhyung Kim | 82aa019 | 2014-12-22 13:44:14 +0900 | [diff] [blame] | 307 | if (symbol_conf.use_callchain) |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 308 | callchain_size = sizeof(struct callchain_root); |
| 309 | |
| 310 | he = zalloc(sizeof(*he) + callchain_size); |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 311 | |
Arnaldo Carvalho de Melo | 12c1427 | 2012-01-04 12:27:03 -0200 | [diff] [blame] | 312 | if (he != NULL) { |
| 313 | *he = *template; |
Namhyung Kim | c4b3535 | 2012-10-04 21:49:42 +0900 | [diff] [blame] | 314 | |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 315 | if (symbol_conf.cumulate_callchain) { |
| 316 | he->stat_acc = malloc(sizeof(he->stat)); |
| 317 | if (he->stat_acc == NULL) { |
| 318 | free(he); |
| 319 | return NULL; |
| 320 | } |
| 321 | memcpy(he->stat_acc, &he->stat, sizeof(he->stat)); |
Namhyung Kim | a0b51af | 2012-09-11 13:34:27 +0900 | [diff] [blame] | 322 | if (!sample_self) |
| 323 | memset(&he->stat, 0, sizeof(he->stat)); |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 324 | } |
| 325 | |
Arnaldo Carvalho de Melo | 5c24b67 | 2015-06-15 23:29:51 -0300 | [diff] [blame] | 326 | map__get(he->ms.map); |
Stephane Eranian | 3cf0cb1 | 2013-01-14 15:02:45 +0100 | [diff] [blame] | 327 | |
| 328 | if (he->branch_info) { |
Namhyung Kim | 26353a6 | 2013-04-01 20:35:17 +0900 | [diff] [blame] | 329 | /* |
| 330 | * This branch info is (a part of) allocated from |
Arnaldo Carvalho de Melo | 644f2df | 2014-01-22 13:15:36 -0300 | [diff] [blame] | 331 | * sample__resolve_bstack() and will be freed after |
Namhyung Kim | 26353a6 | 2013-04-01 20:35:17 +0900 | [diff] [blame] | 332 | * adding new entries. So we need to save a copy. |
| 333 | */ |
| 334 | he->branch_info = malloc(sizeof(*he->branch_info)); |
| 335 | if (he->branch_info == NULL) { |
Arnaldo Carvalho de Melo | 5c24b67 | 2015-06-15 23:29:51 -0300 | [diff] [blame] | 336 | map__zput(he->ms.map); |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 337 | free(he->stat_acc); |
Namhyung Kim | 26353a6 | 2013-04-01 20:35:17 +0900 | [diff] [blame] | 338 | free(he); |
| 339 | return NULL; |
| 340 | } |
| 341 | |
| 342 | memcpy(he->branch_info, template->branch_info, |
| 343 | sizeof(*he->branch_info)); |
| 344 | |
Arnaldo Carvalho de Melo | 5c24b67 | 2015-06-15 23:29:51 -0300 | [diff] [blame] | 345 | map__get(he->branch_info->from.map); |
| 346 | map__get(he->branch_info->to.map); |
Stephane Eranian | 3cf0cb1 | 2013-01-14 15:02:45 +0100 | [diff] [blame] | 347 | } |
| 348 | |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 349 | if (he->mem_info) { |
Arnaldo Carvalho de Melo | 5c24b67 | 2015-06-15 23:29:51 -0300 | [diff] [blame] | 350 | map__get(he->mem_info->iaddr.map); |
| 351 | map__get(he->mem_info->daddr.map); |
Stephane Eranian | 98a3b32 | 2013-01-24 16:10:35 +0100 | [diff] [blame] | 352 | } |
| 353 | |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 354 | if (symbol_conf.use_callchain) |
Arnaldo Carvalho de Melo | 12c1427 | 2012-01-04 12:27:03 -0200 | [diff] [blame] | 355 | callchain_init(he->callchain); |
Arnaldo Carvalho de Melo | b821c73 | 2012-10-25 14:42:45 -0200 | [diff] [blame] | 356 | |
| 357 | INIT_LIST_HEAD(&he->pairs.node); |
Arnaldo Carvalho de Melo | f3b623b | 2015-03-02 22:21:35 -0300 | [diff] [blame] | 358 | thread__get(he->thread); |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 359 | } |
| 360 | |
Arnaldo Carvalho de Melo | 12c1427 | 2012-01-04 12:27:03 -0200 | [diff] [blame] | 361 | return he; |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 362 | } |
| 363 | |
Arnaldo Carvalho de Melo | 7a007ca | 2010-07-21 09:19:41 -0300 | [diff] [blame] | 364 | static u8 symbol__parent_filter(const struct symbol *parent) |
| 365 | { |
| 366 | if (symbol_conf.exclude_other && parent == NULL) |
| 367 | return 1 << HIST_FILTER__PARENT; |
| 368 | return 0; |
| 369 | } |
| 370 | |
Arnaldo Carvalho de Melo | e7e0efc | 2015-05-19 11:31:22 -0300 | [diff] [blame] | 371 | static struct hist_entry *hists__findnew_entry(struct hists *hists, |
| 372 | struct hist_entry *entry, |
| 373 | struct addr_location *al, |
| 374 | bool sample_self) |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 375 | { |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 376 | struct rb_node **p; |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 377 | struct rb_node *parent = NULL; |
| 378 | struct hist_entry *he; |
Andi Kleen | 354cc40 | 2013-10-01 07:22:15 -0700 | [diff] [blame] | 379 | int64_t cmp; |
Namhyung Kim | f1cbf78 | 2013-12-18 14:21:11 +0900 | [diff] [blame] | 380 | u64 period = entry->stat.period; |
| 381 | u64 weight = entry->stat.weight; |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 382 | |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 383 | p = &hists->entries_in->rb_node; |
| 384 | |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 385 | while (*p != NULL) { |
| 386 | parent = *p; |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 387 | he = rb_entry(parent, struct hist_entry, rb_node_in); |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 388 | |
Namhyung Kim | 9afcf93 | 2012-12-10 17:29:54 +0900 | [diff] [blame] | 389 | /* |
| 390 | * Make sure that it receives arguments in a same order as |
| 391 | * hist_entry__collapse() so that we can use an appropriate |
| 392 | * function when searching an entry regardless which sort |
| 393 | * keys were used. |
| 394 | */ |
| 395 | cmp = hist_entry__cmp(he, entry); |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 396 | |
| 397 | if (!cmp) { |
Namhyung Kim | a0b51af | 2012-09-11 13:34:27 +0900 | [diff] [blame] | 398 | if (sample_self) |
| 399 | he_stat__add_period(&he->stat, period, weight); |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 400 | if (symbol_conf.cumulate_callchain) |
| 401 | he_stat__add_period(he->stat_acc, period, weight); |
David Miller | 63fa471 | 2012-03-27 03:14:18 -0400 | [diff] [blame] | 402 | |
Namhyung Kim | ceb2acb | 2013-04-01 20:35:18 +0900 | [diff] [blame] | 403 | /* |
Arnaldo Carvalho de Melo | e80faac | 2014-01-22 13:05:06 -0300 | [diff] [blame] | 404 | * This mem info was allocated from sample__resolve_mem |
Namhyung Kim | ceb2acb | 2013-04-01 20:35:18 +0900 | [diff] [blame] | 405 | * and will not be used anymore. |
| 406 | */ |
Arnaldo Carvalho de Melo | 74cf249 | 2013-12-27 16:55:14 -0300 | [diff] [blame] | 407 | zfree(&entry->mem_info); |
Namhyung Kim | ceb2acb | 2013-04-01 20:35:18 +0900 | [diff] [blame] | 408 | |
David Miller | 63fa471 | 2012-03-27 03:14:18 -0400 | [diff] [blame] | 409 | /* If the map of an existing hist_entry has |
| 410 | * become out-of-date due to an exec() or |
| 411 | * similar, update it. Otherwise we will |
| 412 | * mis-adjust symbol addresses when computing |
| 413 | * the history counter to increment. |
| 414 | */ |
| 415 | if (he->ms.map != entry->ms.map) { |
Arnaldo Carvalho de Melo | 5c24b67 | 2015-06-15 23:29:51 -0300 | [diff] [blame] | 416 | map__put(he->ms.map); |
| 417 | he->ms.map = map__get(entry->ms.map); |
David Miller | 63fa471 | 2012-03-27 03:14:18 -0400 | [diff] [blame] | 418 | } |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 419 | goto out; |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 420 | } |
| 421 | |
| 422 | if (cmp < 0) |
| 423 | p = &(*p)->rb_left; |
| 424 | else |
| 425 | p = &(*p)->rb_right; |
| 426 | } |
| 427 | |
Namhyung Kim | a0b51af | 2012-09-11 13:34:27 +0900 | [diff] [blame] | 428 | he = hist_entry__new(entry, sample_self); |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 429 | if (!he) |
Namhyung Kim | 27a0dcb | 2013-05-14 11:09:02 +0900 | [diff] [blame] | 430 | return NULL; |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 431 | |
Namhyung Kim | 590cd34 | 2014-12-22 13:44:09 +0900 | [diff] [blame] | 432 | hists->nr_entries++; |
| 433 | |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 434 | rb_link_node(&he->rb_node_in, parent, p); |
| 435 | rb_insert_color(&he->rb_node_in, hists->entries_in); |
Arnaldo Carvalho de Melo | 28e2a10 | 2010-05-09 13:02:23 -0300 | [diff] [blame] | 436 | out: |
Namhyung Kim | a0b51af | 2012-09-11 13:34:27 +0900 | [diff] [blame] | 437 | if (sample_self) |
| 438 | he_stat__add_cpumode_period(&he->stat, al->cpumode, period); |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 439 | if (symbol_conf.cumulate_callchain) |
| 440 | he_stat__add_cpumode_period(he->stat_acc, al->cpumode, period); |
Arnaldo Carvalho de Melo | 9735abf | 2009-10-03 10:42:45 -0300 | [diff] [blame] | 441 | return he; |
| 442 | } |
| 443 | |
Arnaldo Carvalho de Melo | c824c43 | 2013-10-22 19:01:31 -0300 | [diff] [blame] | 444 | struct hist_entry *__hists__add_entry(struct hists *hists, |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 445 | struct addr_location *al, |
Namhyung Kim | 41a4e6e | 2013-10-31 15:56:03 +0900 | [diff] [blame] | 446 | struct symbol *sym_parent, |
| 447 | struct branch_info *bi, |
| 448 | struct mem_info *mi, |
Namhyung Kim | a0b51af | 2012-09-11 13:34:27 +0900 | [diff] [blame] | 449 | u64 period, u64 weight, u64 transaction, |
| 450 | bool sample_self) |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 451 | { |
| 452 | struct hist_entry entry = { |
| 453 | .thread = al->thread, |
Namhyung Kim | 4dfced3 | 2013-09-13 16:28:57 +0900 | [diff] [blame] | 454 | .comm = thread__comm(al->thread), |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 455 | .ms = { |
| 456 | .map = al->map, |
| 457 | .sym = al->sym, |
| 458 | }, |
Kan Liang | 0c4c4de | 2015-09-04 10:45:42 -0400 | [diff] [blame] | 459 | .socket = al->socket, |
Don Zickus | 7365be5 | 2014-05-27 12:28:05 -0400 | [diff] [blame] | 460 | .cpu = al->cpu, |
| 461 | .cpumode = al->cpumode, |
| 462 | .ip = al->addr, |
| 463 | .level = al->level, |
Namhyung Kim | b24c28f | 2012-10-04 21:49:41 +0900 | [diff] [blame] | 464 | .stat = { |
Namhyung Kim | c4b3535 | 2012-10-04 21:49:42 +0900 | [diff] [blame] | 465 | .nr_events = 1, |
Namhyung Kim | 41a4e6e | 2013-10-31 15:56:03 +0900 | [diff] [blame] | 466 | .period = period, |
Andi Kleen | 0548429 | 2013-01-24 16:10:29 +0100 | [diff] [blame] | 467 | .weight = weight, |
Namhyung Kim | b24c28f | 2012-10-04 21:49:41 +0900 | [diff] [blame] | 468 | }, |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 469 | .parent = sym_parent, |
Namhyung Kim | 2c86c7c | 2014-03-17 18:18:54 -0300 | [diff] [blame] | 470 | .filtered = symbol__parent_filter(sym_parent) | al->filtered, |
Arnaldo Carvalho de Melo | c824c43 | 2013-10-22 19:01:31 -0300 | [diff] [blame] | 471 | .hists = hists, |
Namhyung Kim | 41a4e6e | 2013-10-31 15:56:03 +0900 | [diff] [blame] | 472 | .branch_info = bi, |
| 473 | .mem_info = mi, |
Andi Kleen | 475eeab | 2013-09-20 07:40:43 -0700 | [diff] [blame] | 474 | .transaction = transaction, |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 475 | }; |
| 476 | |
Arnaldo Carvalho de Melo | e7e0efc | 2015-05-19 11:31:22 -0300 | [diff] [blame] | 477 | return hists__findnew_entry(hists, &entry, al, sample_self); |
Roberto Agostino Vitillo | b538752 | 2012-02-09 23:21:01 +0100 | [diff] [blame] | 478 | } |
| 479 | |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 480 | static int |
| 481 | iter_next_nop_entry(struct hist_entry_iter *iter __maybe_unused, |
| 482 | struct addr_location *al __maybe_unused) |
| 483 | { |
| 484 | return 0; |
| 485 | } |
| 486 | |
| 487 | static int |
| 488 | iter_add_next_nop_entry(struct hist_entry_iter *iter __maybe_unused, |
| 489 | struct addr_location *al __maybe_unused) |
| 490 | { |
| 491 | return 0; |
| 492 | } |
| 493 | |
| 494 | static int |
| 495 | iter_prepare_mem_entry(struct hist_entry_iter *iter, struct addr_location *al) |
| 496 | { |
| 497 | struct perf_sample *sample = iter->sample; |
| 498 | struct mem_info *mi; |
| 499 | |
| 500 | mi = sample__resolve_mem(sample, al); |
| 501 | if (mi == NULL) |
| 502 | return -ENOMEM; |
| 503 | |
| 504 | iter->priv = mi; |
| 505 | return 0; |
| 506 | } |
| 507 | |
| 508 | static int |
| 509 | iter_add_single_mem_entry(struct hist_entry_iter *iter, struct addr_location *al) |
| 510 | { |
| 511 | u64 cost; |
| 512 | struct mem_info *mi = iter->priv; |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 513 | struct hists *hists = evsel__hists(iter->evsel); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 514 | struct hist_entry *he; |
| 515 | |
| 516 | if (mi == NULL) |
| 517 | return -EINVAL; |
| 518 | |
| 519 | cost = iter->sample->weight; |
| 520 | if (!cost) |
| 521 | cost = 1; |
| 522 | |
| 523 | /* |
| 524 | * must pass period=weight in order to get the correct |
| 525 | * sorting from hists__collapse_resort() which is solely |
| 526 | * based on periods. We want sorting be done on nr_events * weight |
| 527 | * and this is indirectly achieved by passing period=weight here |
| 528 | * and the he_stat__add_period() function. |
| 529 | */ |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 530 | he = __hists__add_entry(hists, al, iter->parent, NULL, mi, |
Namhyung Kim | a0b51af | 2012-09-11 13:34:27 +0900 | [diff] [blame] | 531 | cost, cost, 0, true); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 532 | if (!he) |
| 533 | return -ENOMEM; |
| 534 | |
| 535 | iter->he = he; |
| 536 | return 0; |
| 537 | } |
| 538 | |
| 539 | static int |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 540 | iter_finish_mem_entry(struct hist_entry_iter *iter, |
| 541 | struct addr_location *al __maybe_unused) |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 542 | { |
| 543 | struct perf_evsel *evsel = iter->evsel; |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 544 | struct hists *hists = evsel__hists(evsel); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 545 | struct hist_entry *he = iter->he; |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 546 | int err = -EINVAL; |
| 547 | |
| 548 | if (he == NULL) |
| 549 | goto out; |
| 550 | |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 551 | hists__inc_nr_samples(hists, he->filtered); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 552 | |
| 553 | err = hist_entry__append_callchain(he, iter->sample); |
| 554 | |
| 555 | out: |
| 556 | /* |
Arnaldo Carvalho de Melo | e7e0efc | 2015-05-19 11:31:22 -0300 | [diff] [blame] | 557 | * We don't need to free iter->priv (mem_info) here since the mem info |
| 558 | * was either already freed in hists__findnew_entry() or passed to a |
| 559 | * new hist entry by hist_entry__new(). |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 560 | */ |
| 561 | iter->priv = NULL; |
| 562 | |
| 563 | iter->he = NULL; |
| 564 | return err; |
| 565 | } |
| 566 | |
| 567 | static int |
| 568 | iter_prepare_branch_entry(struct hist_entry_iter *iter, struct addr_location *al) |
| 569 | { |
| 570 | struct branch_info *bi; |
| 571 | struct perf_sample *sample = iter->sample; |
| 572 | |
| 573 | bi = sample__resolve_bstack(sample, al); |
| 574 | if (!bi) |
| 575 | return -ENOMEM; |
| 576 | |
| 577 | iter->curr = 0; |
| 578 | iter->total = sample->branch_stack->nr; |
| 579 | |
| 580 | iter->priv = bi; |
| 581 | return 0; |
| 582 | } |
| 583 | |
| 584 | static int |
| 585 | iter_add_single_branch_entry(struct hist_entry_iter *iter __maybe_unused, |
| 586 | struct addr_location *al __maybe_unused) |
| 587 | { |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 588 | /* to avoid calling callback function */ |
| 589 | iter->he = NULL; |
| 590 | |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 591 | return 0; |
| 592 | } |
| 593 | |
| 594 | static int |
| 595 | iter_next_branch_entry(struct hist_entry_iter *iter, struct addr_location *al) |
| 596 | { |
| 597 | struct branch_info *bi = iter->priv; |
| 598 | int i = iter->curr; |
| 599 | |
| 600 | if (bi == NULL) |
| 601 | return 0; |
| 602 | |
| 603 | if (iter->curr >= iter->total) |
| 604 | return 0; |
| 605 | |
| 606 | al->map = bi[i].to.map; |
| 607 | al->sym = bi[i].to.sym; |
| 608 | al->addr = bi[i].to.addr; |
| 609 | return 1; |
| 610 | } |
| 611 | |
| 612 | static int |
| 613 | iter_add_next_branch_entry(struct hist_entry_iter *iter, struct addr_location *al) |
| 614 | { |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 615 | struct branch_info *bi; |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 616 | struct perf_evsel *evsel = iter->evsel; |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 617 | struct hists *hists = evsel__hists(evsel); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 618 | struct hist_entry *he = NULL; |
| 619 | int i = iter->curr; |
| 620 | int err = 0; |
| 621 | |
| 622 | bi = iter->priv; |
| 623 | |
| 624 | if (iter->hide_unresolved && !(bi[i].from.sym && bi[i].to.sym)) |
| 625 | goto out; |
| 626 | |
| 627 | /* |
| 628 | * The report shows the percentage of total branches captured |
| 629 | * and not events sampled. Thus we use a pseudo period of 1. |
| 630 | */ |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 631 | he = __hists__add_entry(hists, al, iter->parent, &bi[i], NULL, |
Andi Kleen | 0e332f0 | 2015-07-18 08:24:46 -0700 | [diff] [blame] | 632 | 1, bi->flags.cycles ? bi->flags.cycles : 1, |
| 633 | 0, true); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 634 | if (he == NULL) |
| 635 | return -ENOMEM; |
| 636 | |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 637 | hists__inc_nr_samples(hists, he->filtered); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 638 | |
| 639 | out: |
| 640 | iter->he = he; |
| 641 | iter->curr++; |
| 642 | return err; |
| 643 | } |
| 644 | |
| 645 | static int |
| 646 | iter_finish_branch_entry(struct hist_entry_iter *iter, |
| 647 | struct addr_location *al __maybe_unused) |
| 648 | { |
| 649 | zfree(&iter->priv); |
| 650 | iter->he = NULL; |
| 651 | |
| 652 | return iter->curr >= iter->total ? 0 : -1; |
| 653 | } |
| 654 | |
| 655 | static int |
| 656 | iter_prepare_normal_entry(struct hist_entry_iter *iter __maybe_unused, |
| 657 | struct addr_location *al __maybe_unused) |
| 658 | { |
| 659 | return 0; |
| 660 | } |
| 661 | |
| 662 | static int |
| 663 | iter_add_single_normal_entry(struct hist_entry_iter *iter, struct addr_location *al) |
| 664 | { |
| 665 | struct perf_evsel *evsel = iter->evsel; |
| 666 | struct perf_sample *sample = iter->sample; |
| 667 | struct hist_entry *he; |
| 668 | |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 669 | he = __hists__add_entry(evsel__hists(evsel), al, iter->parent, NULL, NULL, |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 670 | sample->period, sample->weight, |
Namhyung Kim | a0b51af | 2012-09-11 13:34:27 +0900 | [diff] [blame] | 671 | sample->transaction, true); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 672 | if (he == NULL) |
| 673 | return -ENOMEM; |
| 674 | |
| 675 | iter->he = he; |
| 676 | return 0; |
| 677 | } |
| 678 | |
| 679 | static int |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 680 | iter_finish_normal_entry(struct hist_entry_iter *iter, |
| 681 | struct addr_location *al __maybe_unused) |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 682 | { |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 683 | struct hist_entry *he = iter->he; |
| 684 | struct perf_evsel *evsel = iter->evsel; |
| 685 | struct perf_sample *sample = iter->sample; |
| 686 | |
| 687 | if (he == NULL) |
| 688 | return 0; |
| 689 | |
| 690 | iter->he = NULL; |
| 691 | |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 692 | hists__inc_nr_samples(evsel__hists(evsel), he->filtered); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 693 | |
| 694 | return hist_entry__append_callchain(he, sample); |
| 695 | } |
| 696 | |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 697 | static int |
Adrian Hunter | 96b40f3 | 2015-09-25 16:15:47 +0300 | [diff] [blame] | 698 | iter_prepare_cumulative_entry(struct hist_entry_iter *iter, |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 699 | struct addr_location *al __maybe_unused) |
| 700 | { |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 701 | struct hist_entry **he_cache; |
| 702 | |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 703 | callchain_cursor_commit(&callchain_cursor); |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 704 | |
| 705 | /* |
| 706 | * This is for detecting cycles or recursions so that they're |
| 707 | * cumulated only one time to prevent entries more than 100% |
| 708 | * overhead. |
| 709 | */ |
Adrian Hunter | 96b40f3 | 2015-09-25 16:15:47 +0300 | [diff] [blame] | 710 | he_cache = malloc(sizeof(*he_cache) * (iter->max_stack + 1)); |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 711 | if (he_cache == NULL) |
| 712 | return -ENOMEM; |
| 713 | |
| 714 | iter->priv = he_cache; |
| 715 | iter->curr = 0; |
| 716 | |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 717 | return 0; |
| 718 | } |
| 719 | |
| 720 | static int |
| 721 | iter_add_single_cumulative_entry(struct hist_entry_iter *iter, |
| 722 | struct addr_location *al) |
| 723 | { |
| 724 | struct perf_evsel *evsel = iter->evsel; |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 725 | struct hists *hists = evsel__hists(evsel); |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 726 | struct perf_sample *sample = iter->sample; |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 727 | struct hist_entry **he_cache = iter->priv; |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 728 | struct hist_entry *he; |
| 729 | int err = 0; |
| 730 | |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 731 | he = __hists__add_entry(hists, al, iter->parent, NULL, NULL, |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 732 | sample->period, sample->weight, |
| 733 | sample->transaction, true); |
| 734 | if (he == NULL) |
| 735 | return -ENOMEM; |
| 736 | |
| 737 | iter->he = he; |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 738 | he_cache[iter->curr++] = he; |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 739 | |
Namhyung Kim | 82aa019 | 2014-12-22 13:44:14 +0900 | [diff] [blame] | 740 | hist_entry__append_callchain(he, sample); |
Namhyung Kim | be7f855 | 2013-12-26 17:44:10 +0900 | [diff] [blame] | 741 | |
| 742 | /* |
| 743 | * We need to re-initialize the cursor since callchain_append() |
| 744 | * advanced the cursor to the end. |
| 745 | */ |
| 746 | callchain_cursor_commit(&callchain_cursor); |
| 747 | |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 748 | hists__inc_nr_samples(hists, he->filtered); |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 749 | |
| 750 | return err; |
| 751 | } |
| 752 | |
| 753 | static int |
| 754 | iter_next_cumulative_entry(struct hist_entry_iter *iter, |
| 755 | struct addr_location *al) |
| 756 | { |
| 757 | struct callchain_cursor_node *node; |
| 758 | |
| 759 | node = callchain_cursor_current(&callchain_cursor); |
| 760 | if (node == NULL) |
| 761 | return 0; |
| 762 | |
Namhyung Kim | c7405d8 | 2013-10-31 13:58:30 +0900 | [diff] [blame] | 763 | return fill_callchain_info(al, node, iter->hide_unresolved); |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 764 | } |
| 765 | |
| 766 | static int |
| 767 | iter_add_next_cumulative_entry(struct hist_entry_iter *iter, |
| 768 | struct addr_location *al) |
| 769 | { |
| 770 | struct perf_evsel *evsel = iter->evsel; |
| 771 | struct perf_sample *sample = iter->sample; |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 772 | struct hist_entry **he_cache = iter->priv; |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 773 | struct hist_entry *he; |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 774 | struct hist_entry he_tmp = { |
Arnaldo Carvalho de Melo | 5cef897 | 2015-08-10 15:45:55 -0300 | [diff] [blame] | 775 | .hists = evsel__hists(evsel), |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 776 | .cpu = al->cpu, |
| 777 | .thread = al->thread, |
| 778 | .comm = thread__comm(al->thread), |
| 779 | .ip = al->addr, |
| 780 | .ms = { |
| 781 | .map = al->map, |
| 782 | .sym = al->sym, |
| 783 | }, |
| 784 | .parent = iter->parent, |
| 785 | }; |
| 786 | int i; |
Namhyung Kim | be7f855 | 2013-12-26 17:44:10 +0900 | [diff] [blame] | 787 | struct callchain_cursor cursor; |
| 788 | |
| 789 | callchain_cursor_snapshot(&cursor, &callchain_cursor); |
| 790 | |
| 791 | callchain_cursor_advance(&callchain_cursor); |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 792 | |
| 793 | /* |
| 794 | * Check if there's duplicate entries in the callchain. |
| 795 | * It's possible that it has cycles or recursive calls. |
| 796 | */ |
| 797 | for (i = 0; i < iter->curr; i++) { |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 798 | if (hist_entry__cmp(he_cache[i], &he_tmp) == 0) { |
| 799 | /* to avoid calling callback function */ |
| 800 | iter->he = NULL; |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 801 | return 0; |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 802 | } |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 803 | } |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 804 | |
Arnaldo Carvalho de Melo | 4ea062ed | 2014-10-09 13:13:41 -0300 | [diff] [blame] | 805 | he = __hists__add_entry(evsel__hists(evsel), al, iter->parent, NULL, NULL, |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 806 | sample->period, sample->weight, |
| 807 | sample->transaction, false); |
| 808 | if (he == NULL) |
| 809 | return -ENOMEM; |
| 810 | |
| 811 | iter->he = he; |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 812 | he_cache[iter->curr++] = he; |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 813 | |
Namhyung Kim | 82aa019 | 2014-12-22 13:44:14 +0900 | [diff] [blame] | 814 | if (symbol_conf.use_callchain) |
| 815 | callchain_append(he->callchain, &cursor, sample->period); |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 816 | return 0; |
| 817 | } |
| 818 | |
| 819 | static int |
| 820 | iter_finish_cumulative_entry(struct hist_entry_iter *iter, |
| 821 | struct addr_location *al __maybe_unused) |
| 822 | { |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 823 | zfree(&iter->priv); |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 824 | iter->he = NULL; |
Namhyung Kim | b4d3c8b | 2013-10-31 10:05:29 +0900 | [diff] [blame] | 825 | |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 826 | return 0; |
| 827 | } |
| 828 | |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 829 | const struct hist_iter_ops hist_iter_mem = { |
| 830 | .prepare_entry = iter_prepare_mem_entry, |
| 831 | .add_single_entry = iter_add_single_mem_entry, |
| 832 | .next_entry = iter_next_nop_entry, |
| 833 | .add_next_entry = iter_add_next_nop_entry, |
| 834 | .finish_entry = iter_finish_mem_entry, |
| 835 | }; |
| 836 | |
| 837 | const struct hist_iter_ops hist_iter_branch = { |
| 838 | .prepare_entry = iter_prepare_branch_entry, |
| 839 | .add_single_entry = iter_add_single_branch_entry, |
| 840 | .next_entry = iter_next_branch_entry, |
| 841 | .add_next_entry = iter_add_next_branch_entry, |
| 842 | .finish_entry = iter_finish_branch_entry, |
| 843 | }; |
| 844 | |
| 845 | const struct hist_iter_ops hist_iter_normal = { |
| 846 | .prepare_entry = iter_prepare_normal_entry, |
| 847 | .add_single_entry = iter_add_single_normal_entry, |
| 848 | .next_entry = iter_next_nop_entry, |
| 849 | .add_next_entry = iter_add_next_nop_entry, |
| 850 | .finish_entry = iter_finish_normal_entry, |
| 851 | }; |
| 852 | |
Namhyung Kim | 7a13aa2 | 2012-09-11 14:13:04 +0900 | [diff] [blame] | 853 | const struct hist_iter_ops hist_iter_cumulative = { |
| 854 | .prepare_entry = iter_prepare_cumulative_entry, |
| 855 | .add_single_entry = iter_add_single_cumulative_entry, |
| 856 | .next_entry = iter_next_cumulative_entry, |
| 857 | .add_next_entry = iter_add_next_cumulative_entry, |
| 858 | .finish_entry = iter_finish_cumulative_entry, |
| 859 | }; |
| 860 | |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 861 | int hist_entry_iter__add(struct hist_entry_iter *iter, struct addr_location *al, |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 862 | int max_stack_depth, void *arg) |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 863 | { |
| 864 | int err, err2; |
| 865 | |
Namhyung Kim | 063bd93 | 2015-05-19 17:04:10 +0900 | [diff] [blame] | 866 | err = sample__resolve_callchain(iter->sample, &iter->parent, |
| 867 | iter->evsel, al, max_stack_depth); |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 868 | if (err) |
| 869 | return err; |
| 870 | |
Adrian Hunter | 96b40f3 | 2015-09-25 16:15:47 +0300 | [diff] [blame] | 871 | iter->max_stack = max_stack_depth; |
| 872 | |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 873 | err = iter->ops->prepare_entry(iter, al); |
| 874 | if (err) |
| 875 | goto out; |
| 876 | |
| 877 | err = iter->ops->add_single_entry(iter, al); |
| 878 | if (err) |
| 879 | goto out; |
| 880 | |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 881 | if (iter->he && iter->add_entry_cb) { |
| 882 | err = iter->add_entry_cb(iter, al, true, arg); |
| 883 | if (err) |
| 884 | goto out; |
| 885 | } |
| 886 | |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 887 | while (iter->ops->next_entry(iter, al)) { |
| 888 | err = iter->ops->add_next_entry(iter, al); |
| 889 | if (err) |
| 890 | break; |
Namhyung Kim | 9d3c02d | 2014-01-07 17:02:25 +0900 | [diff] [blame] | 891 | |
| 892 | if (iter->he && iter->add_entry_cb) { |
| 893 | err = iter->add_entry_cb(iter, al, false, arg); |
| 894 | if (err) |
| 895 | goto out; |
| 896 | } |
Namhyung Kim | 69bcb01 | 2013-10-30 09:40:34 +0900 | [diff] [blame] | 897 | } |
| 898 | |
| 899 | out: |
| 900 | err2 = iter->ops->finish_entry(iter, al); |
| 901 | if (!err) |
| 902 | err = err2; |
| 903 | |
| 904 | return err; |
| 905 | } |
| 906 | |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 907 | int64_t |
| 908 | hist_entry__cmp(struct hist_entry *left, struct hist_entry *right) |
| 909 | { |
Namhyung Kim | 093f0ef3 | 2014-03-03 12:07:47 +0900 | [diff] [blame] | 910 | struct perf_hpp_fmt *fmt; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 911 | int64_t cmp = 0; |
| 912 | |
Namhyung Kim | 093f0ef3 | 2014-03-03 12:07:47 +0900 | [diff] [blame] | 913 | perf_hpp__for_each_sort_list(fmt) { |
Namhyung Kim | e67d49a | 2014-03-18 13:00:59 +0900 | [diff] [blame] | 914 | if (perf_hpp__should_skip(fmt)) |
| 915 | continue; |
| 916 | |
Namhyung Kim | 87bbdf7 | 2015-01-08 09:45:46 +0900 | [diff] [blame] | 917 | cmp = fmt->cmp(fmt, left, right); |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 918 | if (cmp) |
| 919 | break; |
| 920 | } |
| 921 | |
| 922 | return cmp; |
| 923 | } |
| 924 | |
| 925 | int64_t |
| 926 | hist_entry__collapse(struct hist_entry *left, struct hist_entry *right) |
| 927 | { |
Namhyung Kim | 093f0ef3 | 2014-03-03 12:07:47 +0900 | [diff] [blame] | 928 | struct perf_hpp_fmt *fmt; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 929 | int64_t cmp = 0; |
| 930 | |
Namhyung Kim | 093f0ef3 | 2014-03-03 12:07:47 +0900 | [diff] [blame] | 931 | perf_hpp__for_each_sort_list(fmt) { |
Namhyung Kim | e67d49a | 2014-03-18 13:00:59 +0900 | [diff] [blame] | 932 | if (perf_hpp__should_skip(fmt)) |
| 933 | continue; |
| 934 | |
Namhyung Kim | 87bbdf7 | 2015-01-08 09:45:46 +0900 | [diff] [blame] | 935 | cmp = fmt->collapse(fmt, left, right); |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 936 | if (cmp) |
| 937 | break; |
| 938 | } |
| 939 | |
| 940 | return cmp; |
| 941 | } |
| 942 | |
Arnaldo Carvalho de Melo | 6733d1b | 2014-12-19 12:31:40 -0300 | [diff] [blame] | 943 | void hist_entry__delete(struct hist_entry *he) |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 944 | { |
Arnaldo Carvalho de Melo | f3b623b | 2015-03-02 22:21:35 -0300 | [diff] [blame] | 945 | thread__zput(he->thread); |
Arnaldo Carvalho de Melo | 5c24b67 | 2015-06-15 23:29:51 -0300 | [diff] [blame] | 946 | map__zput(he->ms.map); |
| 947 | |
| 948 | if (he->branch_info) { |
| 949 | map__zput(he->branch_info->from.map); |
| 950 | map__zput(he->branch_info->to.map); |
| 951 | zfree(&he->branch_info); |
| 952 | } |
| 953 | |
| 954 | if (he->mem_info) { |
| 955 | map__zput(he->mem_info->iaddr.map); |
| 956 | map__zput(he->mem_info->daddr.map); |
| 957 | zfree(&he->mem_info); |
| 958 | } |
| 959 | |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 960 | zfree(&he->stat_acc); |
Namhyung Kim | f048d54 | 2013-09-11 14:09:28 +0900 | [diff] [blame] | 961 | free_srcline(he->srcline); |
Andi Kleen | 31191a8 | 2015-08-07 15:54:24 -0700 | [diff] [blame] | 962 | if (he->srcfile && he->srcfile[0]) |
| 963 | free(he->srcfile); |
Namhyung Kim | d114960 | 2014-12-30 14:38:13 +0900 | [diff] [blame] | 964 | free_callchain(he->callchain); |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 965 | free(he); |
| 966 | } |
| 967 | |
| 968 | /* |
| 969 | * collapse the histogram |
| 970 | */ |
| 971 | |
Irina Tirdea | 1d037ca | 2012-09-11 01:15:03 +0300 | [diff] [blame] | 972 | static bool hists__collapse_insert_entry(struct hists *hists __maybe_unused, |
Frederic Weisbecker | 1b3a0e9 | 2011-01-14 04:51:58 +0100 | [diff] [blame] | 973 | struct rb_root *root, |
| 974 | struct hist_entry *he) |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 975 | { |
Arnaldo Carvalho de Melo | b9bf089 | 2009-12-14 11:37:11 -0200 | [diff] [blame] | 976 | struct rb_node **p = &root->rb_node; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 977 | struct rb_node *parent = NULL; |
| 978 | struct hist_entry *iter; |
| 979 | int64_t cmp; |
| 980 | |
| 981 | while (*p != NULL) { |
| 982 | parent = *p; |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 983 | iter = rb_entry(parent, struct hist_entry, rb_node_in); |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 984 | |
| 985 | cmp = hist_entry__collapse(iter, he); |
| 986 | |
| 987 | if (!cmp) { |
Namhyung Kim | 139c081 | 2012-10-04 21:49:43 +0900 | [diff] [blame] | 988 | he_stat__add_stat(&iter->stat, &he->stat); |
Namhyung Kim | f8be1c8 | 2012-09-11 13:15:07 +0900 | [diff] [blame] | 989 | if (symbol_conf.cumulate_callchain) |
| 990 | he_stat__add_stat(iter->stat_acc, he->stat_acc); |
Namhyung Kim | 9ec6097 | 2012-09-26 16:47:28 +0900 | [diff] [blame] | 991 | |
Frederic Weisbecker | 1b3a0e9 | 2011-01-14 04:51:58 +0100 | [diff] [blame] | 992 | if (symbol_conf.use_callchain) { |
Namhyung Kim | 4726064 | 2012-05-31 14:43:26 +0900 | [diff] [blame] | 993 | callchain_cursor_reset(&callchain_cursor); |
| 994 | callchain_merge(&callchain_cursor, |
| 995 | iter->callchain, |
Frederic Weisbecker | 1b3a0e9 | 2011-01-14 04:51:58 +0100 | [diff] [blame] | 996 | he->callchain); |
| 997 | } |
Arnaldo Carvalho de Melo | 6733d1b | 2014-12-19 12:31:40 -0300 | [diff] [blame] | 998 | hist_entry__delete(he); |
Arnaldo Carvalho de Melo | fefb0b9 | 2010-05-10 13:57:51 -0300 | [diff] [blame] | 999 | return false; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1000 | } |
| 1001 | |
| 1002 | if (cmp < 0) |
| 1003 | p = &(*p)->rb_left; |
| 1004 | else |
| 1005 | p = &(*p)->rb_right; |
| 1006 | } |
Namhyung Kim | 740b97f | 2014-12-22 13:44:10 +0900 | [diff] [blame] | 1007 | hists->nr_entries++; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1008 | |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1009 | rb_link_node(&he->rb_node_in, parent, p); |
| 1010 | rb_insert_color(&he->rb_node_in, root); |
Arnaldo Carvalho de Melo | fefb0b9 | 2010-05-10 13:57:51 -0300 | [diff] [blame] | 1011 | return true; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1012 | } |
| 1013 | |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1014 | static struct rb_root *hists__get_rotate_entries_in(struct hists *hists) |
| 1015 | { |
| 1016 | struct rb_root *root; |
| 1017 | |
| 1018 | pthread_mutex_lock(&hists->lock); |
| 1019 | |
| 1020 | root = hists->entries_in; |
| 1021 | if (++hists->entries_in > &hists->entries_in_array[1]) |
| 1022 | hists->entries_in = &hists->entries_in_array[0]; |
| 1023 | |
| 1024 | pthread_mutex_unlock(&hists->lock); |
| 1025 | |
| 1026 | return root; |
| 1027 | } |
| 1028 | |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 1029 | static void hists__apply_filters(struct hists *hists, struct hist_entry *he) |
| 1030 | { |
| 1031 | hists__filter_entry_by_dso(hists, he); |
| 1032 | hists__filter_entry_by_thread(hists, he); |
Namhyung Kim | e94d53e | 2012-03-16 17:50:51 +0900 | [diff] [blame] | 1033 | hists__filter_entry_by_symbol(hists, he); |
Kan Liang | 21394d9 | 2015-09-04 10:45:44 -0400 | [diff] [blame] | 1034 | hists__filter_entry_by_socket(hists, he); |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 1035 | } |
| 1036 | |
Namhyung Kim | c1fb565 | 2013-10-11 14:15:38 +0900 | [diff] [blame] | 1037 | void hists__collapse_resort(struct hists *hists, struct ui_progress *prog) |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1038 | { |
| 1039 | struct rb_root *root; |
| 1040 | struct rb_node *next; |
| 1041 | struct hist_entry *n; |
| 1042 | |
Namhyung Kim | 3a5714f | 2013-05-14 11:09:01 +0900 | [diff] [blame] | 1043 | if (!sort__need_collapse) |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1044 | return; |
| 1045 | |
Namhyung Kim | 740b97f | 2014-12-22 13:44:10 +0900 | [diff] [blame] | 1046 | hists->nr_entries = 0; |
| 1047 | |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1048 | root = hists__get_rotate_entries_in(hists); |
Namhyung Kim | 740b97f | 2014-12-22 13:44:10 +0900 | [diff] [blame] | 1049 | |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1050 | next = rb_first(root); |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1051 | |
| 1052 | while (next) { |
Arnaldo Carvalho de Melo | 33e940a | 2013-09-17 16:34:28 -0300 | [diff] [blame] | 1053 | if (session_done()) |
| 1054 | break; |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1055 | n = rb_entry(next, struct hist_entry, rb_node_in); |
| 1056 | next = rb_next(&n->rb_node_in); |
| 1057 | |
| 1058 | rb_erase(&n->rb_node_in, root); |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 1059 | if (hists__collapse_insert_entry(hists, &hists->entries_collapsed, n)) { |
| 1060 | /* |
| 1061 | * If it wasn't combined with one of the entries already |
| 1062 | * collapsed, we need to apply the filters that may have |
| 1063 | * been set by, say, the hist_browser. |
| 1064 | */ |
| 1065 | hists__apply_filters(hists, n); |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 1066 | } |
Namhyung Kim | c1fb565 | 2013-10-11 14:15:38 +0900 | [diff] [blame] | 1067 | if (prog) |
| 1068 | ui_progress__update(prog, 1); |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1069 | } |
| 1070 | } |
| 1071 | |
Namhyung Kim | 043ca389 | 2014-03-03 14:18:00 +0900 | [diff] [blame] | 1072 | static int hist_entry__sort(struct hist_entry *a, struct hist_entry *b) |
Namhyung Kim | 29d720e | 2013-01-22 18:09:33 +0900 | [diff] [blame] | 1073 | { |
Namhyung Kim | 043ca389 | 2014-03-03 14:18:00 +0900 | [diff] [blame] | 1074 | struct perf_hpp_fmt *fmt; |
| 1075 | int64_t cmp = 0; |
Namhyung Kim | 29d720e | 2013-01-22 18:09:33 +0900 | [diff] [blame] | 1076 | |
Namhyung Kim | 26d8b33 | 2014-03-03 16:16:20 +0900 | [diff] [blame] | 1077 | perf_hpp__for_each_sort_list(fmt) { |
Namhyung Kim | e67d49a | 2014-03-18 13:00:59 +0900 | [diff] [blame] | 1078 | if (perf_hpp__should_skip(fmt)) |
| 1079 | continue; |
| 1080 | |
Namhyung Kim | 87bbdf7 | 2015-01-08 09:45:46 +0900 | [diff] [blame] | 1081 | cmp = fmt->sort(fmt, a, b); |
Namhyung Kim | 043ca389 | 2014-03-03 14:18:00 +0900 | [diff] [blame] | 1082 | if (cmp) |
Namhyung Kim | 29d720e | 2013-01-22 18:09:33 +0900 | [diff] [blame] | 1083 | break; |
| 1084 | } |
| 1085 | |
Namhyung Kim | 043ca389 | 2014-03-03 14:18:00 +0900 | [diff] [blame] | 1086 | return cmp; |
Namhyung Kim | 29d720e | 2013-01-22 18:09:33 +0900 | [diff] [blame] | 1087 | } |
| 1088 | |
Namhyung Kim | 9283ba9 | 2014-04-24 16:37:26 +0900 | [diff] [blame] | 1089 | static void hists__reset_filter_stats(struct hists *hists) |
| 1090 | { |
| 1091 | hists->nr_non_filtered_entries = 0; |
| 1092 | hists->stats.total_non_filtered_period = 0; |
| 1093 | } |
| 1094 | |
| 1095 | void hists__reset_stats(struct hists *hists) |
| 1096 | { |
| 1097 | hists->nr_entries = 0; |
| 1098 | hists->stats.total_period = 0; |
| 1099 | |
| 1100 | hists__reset_filter_stats(hists); |
| 1101 | } |
| 1102 | |
| 1103 | static void hists__inc_filter_stats(struct hists *hists, struct hist_entry *h) |
| 1104 | { |
| 1105 | hists->nr_non_filtered_entries++; |
| 1106 | hists->stats.total_non_filtered_period += h->stat.period; |
| 1107 | } |
| 1108 | |
| 1109 | void hists__inc_stats(struct hists *hists, struct hist_entry *h) |
| 1110 | { |
| 1111 | if (!h->filtered) |
| 1112 | hists__inc_filter_stats(hists, h); |
| 1113 | |
| 1114 | hists->nr_entries++; |
| 1115 | hists->stats.total_period += h->stat.period; |
| 1116 | } |
| 1117 | |
Arnaldo Carvalho de Melo | 1c02c4d | 2010-05-10 13:04:11 -0300 | [diff] [blame] | 1118 | static void __hists__insert_output_entry(struct rb_root *entries, |
| 1119 | struct hist_entry *he, |
Kan Liang | f9db0d0 | 2015-08-11 06:30:48 -0400 | [diff] [blame] | 1120 | u64 min_callchain_hits, |
| 1121 | bool use_callchain) |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1122 | { |
Arnaldo Carvalho de Melo | 1c02c4d | 2010-05-10 13:04:11 -0300 | [diff] [blame] | 1123 | struct rb_node **p = &entries->rb_node; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1124 | struct rb_node *parent = NULL; |
| 1125 | struct hist_entry *iter; |
| 1126 | |
Kan Liang | f9db0d0 | 2015-08-11 06:30:48 -0400 | [diff] [blame] | 1127 | if (use_callchain) |
Arnaldo Carvalho de Melo | b9fb930 | 2010-04-02 09:50:42 -0300 | [diff] [blame] | 1128 | callchain_param.sort(&he->sorted_chain, he->callchain, |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1129 | min_callchain_hits, &callchain_param); |
| 1130 | |
| 1131 | while (*p != NULL) { |
| 1132 | parent = *p; |
| 1133 | iter = rb_entry(parent, struct hist_entry, rb_node); |
| 1134 | |
Namhyung Kim | 043ca389 | 2014-03-03 14:18:00 +0900 | [diff] [blame] | 1135 | if (hist_entry__sort(he, iter) > 0) |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1136 | p = &(*p)->rb_left; |
| 1137 | else |
| 1138 | p = &(*p)->rb_right; |
| 1139 | } |
| 1140 | |
| 1141 | rb_link_node(&he->rb_node, parent, p); |
Arnaldo Carvalho de Melo | 1c02c4d | 2010-05-10 13:04:11 -0300 | [diff] [blame] | 1142 | rb_insert_color(&he->rb_node, entries); |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1143 | } |
| 1144 | |
Namhyung Kim | 740b97f | 2014-12-22 13:44:10 +0900 | [diff] [blame] | 1145 | void hists__output_resort(struct hists *hists, struct ui_progress *prog) |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1146 | { |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1147 | struct rb_root *root; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1148 | struct rb_node *next; |
| 1149 | struct hist_entry *n; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1150 | u64 min_callchain_hits; |
Kan Liang | f9db0d0 | 2015-08-11 06:30:48 -0400 | [diff] [blame] | 1151 | struct perf_evsel *evsel = hists_to_evsel(hists); |
Kan Liang | 9e207dd | 2015-08-11 06:30:49 -0400 | [diff] [blame] | 1152 | bool use_callchain; |
| 1153 | |
| 1154 | if (evsel && !symbol_conf.show_ref_callgraph) |
| 1155 | use_callchain = evsel->attr.sample_type & PERF_SAMPLE_CALLCHAIN; |
| 1156 | else |
| 1157 | use_callchain = symbol_conf.use_callchain; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1158 | |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 1159 | min_callchain_hits = hists->stats.total_period * (callchain_param.min_percent / 100); |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1160 | |
Namhyung Kim | 3a5714f | 2013-05-14 11:09:01 +0900 | [diff] [blame] | 1161 | if (sort__need_collapse) |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1162 | root = &hists->entries_collapsed; |
| 1163 | else |
| 1164 | root = hists->entries_in; |
| 1165 | |
| 1166 | next = rb_first(root); |
| 1167 | hists->entries = RB_ROOT; |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1168 | |
Namhyung Kim | 9283ba9 | 2014-04-24 16:37:26 +0900 | [diff] [blame] | 1169 | hists__reset_stats(hists); |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 1170 | hists__reset_col_len(hists); |
Arnaldo Carvalho de Melo | fefb0b9 | 2010-05-10 13:57:51 -0300 | [diff] [blame] | 1171 | |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1172 | while (next) { |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1173 | n = rb_entry(next, struct hist_entry, rb_node_in); |
| 1174 | next = rb_next(&n->rb_node_in); |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1175 | |
Kan Liang | f9db0d0 | 2015-08-11 06:30:48 -0400 | [diff] [blame] | 1176 | __hists__insert_output_entry(&hists->entries, n, min_callchain_hits, use_callchain); |
Namhyung Kim | 6263835 | 2014-04-24 16:21:46 +0900 | [diff] [blame] | 1177 | hists__inc_stats(hists, n); |
Namhyung Kim | ae993ef | 2014-04-24 16:25:19 +0900 | [diff] [blame] | 1178 | |
| 1179 | if (!n->filtered) |
| 1180 | hists__calc_col_len(hists, n); |
Namhyung Kim | 740b97f | 2014-12-22 13:44:10 +0900 | [diff] [blame] | 1181 | |
| 1182 | if (prog) |
| 1183 | ui_progress__update(prog, 1); |
John Kacur | 3d1d07e | 2009-09-28 15:32:55 +0200 | [diff] [blame] | 1184 | } |
Arnaldo Carvalho de Melo | 1980c2eb | 2011-10-05 17:50:23 -0300 | [diff] [blame] | 1185 | } |
Arnaldo Carvalho de Melo | b9bf089 | 2009-12-14 11:37:11 -0200 | [diff] [blame] | 1186 | |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 1187 | static void hists__remove_entry_filter(struct hists *hists, struct hist_entry *h, |
Arnaldo Carvalho de Melo | cc5edb0 | 2010-07-16 12:35:07 -0300 | [diff] [blame] | 1188 | enum hist_filter filter) |
| 1189 | { |
| 1190 | h->filtered &= ~(1 << filter); |
| 1191 | if (h->filtered) |
| 1192 | return; |
| 1193 | |
Namhyung Kim | 87e90f4 | 2014-04-24 16:44:16 +0900 | [diff] [blame] | 1194 | /* force fold unfiltered entry for simplicity */ |
Namhyung Kim | 3698dab | 2015-05-05 23:55:46 +0900 | [diff] [blame] | 1195 | h->unfolded = false; |
Arnaldo Carvalho de Melo | 0f0cbf7 | 2010-07-26 17:13:40 -0300 | [diff] [blame] | 1196 | h->row_offset = 0; |
He Kuang | a8cd1f4 | 2015-03-11 20:36:03 +0800 | [diff] [blame] | 1197 | h->nr_rows = 0; |
Namhyung Kim | 9283ba9 | 2014-04-24 16:37:26 +0900 | [diff] [blame] | 1198 | |
Namhyung Kim | 1ab1fa5 | 2013-12-26 15:11:52 +0900 | [diff] [blame] | 1199 | hists->stats.nr_non_filtered_samples += h->stat.nr_events; |
Arnaldo Carvalho de Melo | cc5edb0 | 2010-07-16 12:35:07 -0300 | [diff] [blame] | 1200 | |
Namhyung Kim | 9283ba9 | 2014-04-24 16:37:26 +0900 | [diff] [blame] | 1201 | hists__inc_filter_stats(hists, h); |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 1202 | hists__calc_col_len(hists, h); |
Arnaldo Carvalho de Melo | cc5edb0 | 2010-07-16 12:35:07 -0300 | [diff] [blame] | 1203 | } |
| 1204 | |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 1205 | |
| 1206 | static bool hists__filter_entry_by_dso(struct hists *hists, |
| 1207 | struct hist_entry *he) |
| 1208 | { |
| 1209 | if (hists->dso_filter != NULL && |
| 1210 | (he->ms.map == NULL || he->ms.map->dso != hists->dso_filter)) { |
| 1211 | he->filtered |= (1 << HIST_FILTER__DSO); |
| 1212 | return true; |
| 1213 | } |
| 1214 | |
| 1215 | return false; |
| 1216 | } |
| 1217 | |
Arnaldo Carvalho de Melo | d7b76f0 | 2011-10-18 19:07:34 -0200 | [diff] [blame] | 1218 | void hists__filter_by_dso(struct hists *hists) |
Arnaldo Carvalho de Melo | b09e019 | 2010-05-11 11:10:15 -0300 | [diff] [blame] | 1219 | { |
| 1220 | struct rb_node *nd; |
| 1221 | |
Namhyung Kim | 1ab1fa5 | 2013-12-26 15:11:52 +0900 | [diff] [blame] | 1222 | hists->stats.nr_non_filtered_samples = 0; |
Namhyung Kim | 9283ba9 | 2014-04-24 16:37:26 +0900 | [diff] [blame] | 1223 | |
| 1224 | hists__reset_filter_stats(hists); |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 1225 | hists__reset_col_len(hists); |
Arnaldo Carvalho de Melo | b09e019 | 2010-05-11 11:10:15 -0300 | [diff] [blame] | 1226 | |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 1227 | for (nd = rb_first(&hists->entries); nd; nd = rb_next(nd)) { |
Arnaldo Carvalho de Melo | b09e019 | 2010-05-11 11:10:15 -0300 | [diff] [blame] | 1228 | struct hist_entry *h = rb_entry(nd, struct hist_entry, rb_node); |
| 1229 | |
| 1230 | if (symbol_conf.exclude_other && !h->parent) |
| 1231 | continue; |
| 1232 | |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 1233 | if (hists__filter_entry_by_dso(hists, h)) |
Arnaldo Carvalho de Melo | b09e019 | 2010-05-11 11:10:15 -0300 | [diff] [blame] | 1234 | continue; |
Arnaldo Carvalho de Melo | b09e019 | 2010-05-11 11:10:15 -0300 | [diff] [blame] | 1235 | |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 1236 | hists__remove_entry_filter(hists, h, HIST_FILTER__DSO); |
Arnaldo Carvalho de Melo | b09e019 | 2010-05-11 11:10:15 -0300 | [diff] [blame] | 1237 | } |
| 1238 | } |
| 1239 | |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 1240 | static bool hists__filter_entry_by_thread(struct hists *hists, |
| 1241 | struct hist_entry *he) |
| 1242 | { |
| 1243 | if (hists->thread_filter != NULL && |
| 1244 | he->thread != hists->thread_filter) { |
| 1245 | he->filtered |= (1 << HIST_FILTER__THREAD); |
| 1246 | return true; |
| 1247 | } |
| 1248 | |
| 1249 | return false; |
| 1250 | } |
| 1251 | |
Arnaldo Carvalho de Melo | d7b76f0 | 2011-10-18 19:07:34 -0200 | [diff] [blame] | 1252 | void hists__filter_by_thread(struct hists *hists) |
Arnaldo Carvalho de Melo | b09e019 | 2010-05-11 11:10:15 -0300 | [diff] [blame] | 1253 | { |
| 1254 | struct rb_node *nd; |
| 1255 | |
Namhyung Kim | 1ab1fa5 | 2013-12-26 15:11:52 +0900 | [diff] [blame] | 1256 | hists->stats.nr_non_filtered_samples = 0; |
Namhyung Kim | 9283ba9 | 2014-04-24 16:37:26 +0900 | [diff] [blame] | 1257 | |
| 1258 | hists__reset_filter_stats(hists); |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 1259 | hists__reset_col_len(hists); |
Arnaldo Carvalho de Melo | b09e019 | 2010-05-11 11:10:15 -0300 | [diff] [blame] | 1260 | |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 1261 | for (nd = rb_first(&hists->entries); nd; nd = rb_next(nd)) { |
Arnaldo Carvalho de Melo | b09e019 | 2010-05-11 11:10:15 -0300 | [diff] [blame] | 1262 | struct hist_entry *h = rb_entry(nd, struct hist_entry, rb_node); |
| 1263 | |
Arnaldo Carvalho de Melo | 90cf1fb | 2011-10-19 13:09:10 -0200 | [diff] [blame] | 1264 | if (hists__filter_entry_by_thread(hists, h)) |
Arnaldo Carvalho de Melo | b09e019 | 2010-05-11 11:10:15 -0300 | [diff] [blame] | 1265 | continue; |
Arnaldo Carvalho de Melo | cc5edb0 | 2010-07-16 12:35:07 -0300 | [diff] [blame] | 1266 | |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 1267 | hists__remove_entry_filter(hists, h, HIST_FILTER__THREAD); |
Arnaldo Carvalho de Melo | b09e019 | 2010-05-11 11:10:15 -0300 | [diff] [blame] | 1268 | } |
| 1269 | } |
Arnaldo Carvalho de Melo | ef7b93a | 2010-05-11 23:18:06 -0300 | [diff] [blame] | 1270 | |
Namhyung Kim | e94d53e | 2012-03-16 17:50:51 +0900 | [diff] [blame] | 1271 | static bool hists__filter_entry_by_symbol(struct hists *hists, |
| 1272 | struct hist_entry *he) |
| 1273 | { |
| 1274 | if (hists->symbol_filter_str != NULL && |
| 1275 | (!he->ms.sym || strstr(he->ms.sym->name, |
| 1276 | hists->symbol_filter_str) == NULL)) { |
| 1277 | he->filtered |= (1 << HIST_FILTER__SYMBOL); |
| 1278 | return true; |
| 1279 | } |
| 1280 | |
| 1281 | return false; |
| 1282 | } |
| 1283 | |
| 1284 | void hists__filter_by_symbol(struct hists *hists) |
| 1285 | { |
| 1286 | struct rb_node *nd; |
| 1287 | |
Namhyung Kim | 1ab1fa5 | 2013-12-26 15:11:52 +0900 | [diff] [blame] | 1288 | hists->stats.nr_non_filtered_samples = 0; |
Namhyung Kim | 9283ba9 | 2014-04-24 16:37:26 +0900 | [diff] [blame] | 1289 | |
| 1290 | hists__reset_filter_stats(hists); |
Namhyung Kim | e94d53e | 2012-03-16 17:50:51 +0900 | [diff] [blame] | 1291 | hists__reset_col_len(hists); |
| 1292 | |
| 1293 | for (nd = rb_first(&hists->entries); nd; nd = rb_next(nd)) { |
| 1294 | struct hist_entry *h = rb_entry(nd, struct hist_entry, rb_node); |
| 1295 | |
| 1296 | if (hists__filter_entry_by_symbol(hists, h)) |
| 1297 | continue; |
| 1298 | |
| 1299 | hists__remove_entry_filter(hists, h, HIST_FILTER__SYMBOL); |
| 1300 | } |
| 1301 | } |
| 1302 | |
Kan Liang | 21394d9 | 2015-09-04 10:45:44 -0400 | [diff] [blame] | 1303 | static bool hists__filter_entry_by_socket(struct hists *hists, |
| 1304 | struct hist_entry *he) |
| 1305 | { |
| 1306 | if ((hists->socket_filter > -1) && |
| 1307 | (he->socket != hists->socket_filter)) { |
| 1308 | he->filtered |= (1 << HIST_FILTER__SOCKET); |
| 1309 | return true; |
| 1310 | } |
| 1311 | |
| 1312 | return false; |
| 1313 | } |
| 1314 | |
Kan Liang | 84734b0 | 2015-09-04 10:45:45 -0400 | [diff] [blame] | 1315 | void hists__filter_by_socket(struct hists *hists) |
| 1316 | { |
| 1317 | struct rb_node *nd; |
| 1318 | |
| 1319 | hists->stats.nr_non_filtered_samples = 0; |
| 1320 | |
| 1321 | hists__reset_filter_stats(hists); |
| 1322 | hists__reset_col_len(hists); |
| 1323 | |
| 1324 | for (nd = rb_first(&hists->entries); nd; nd = rb_next(nd)) { |
| 1325 | struct hist_entry *h = rb_entry(nd, struct hist_entry, rb_node); |
| 1326 | |
| 1327 | if (hists__filter_entry_by_socket(hists, h)) |
| 1328 | continue; |
| 1329 | |
| 1330 | hists__remove_entry_filter(hists, h, HIST_FILTER__SOCKET); |
| 1331 | } |
| 1332 | } |
| 1333 | |
Arnaldo Carvalho de Melo | 28a6b6a | 2012-12-18 16:24:46 -0300 | [diff] [blame] | 1334 | void events_stats__inc(struct events_stats *stats, u32 type) |
| 1335 | { |
| 1336 | ++stats->nr_events[0]; |
| 1337 | ++stats->nr_events[type]; |
| 1338 | } |
| 1339 | |
Arnaldo Carvalho de Melo | 42b28ac | 2011-09-26 12:33:28 -0300 | [diff] [blame] | 1340 | void hists__inc_nr_events(struct hists *hists, u32 type) |
Arnaldo Carvalho de Melo | c8446b9 | 2010-05-14 10:36:42 -0300 | [diff] [blame] | 1341 | { |
Arnaldo Carvalho de Melo | 28a6b6a | 2012-12-18 16:24:46 -0300 | [diff] [blame] | 1342 | events_stats__inc(&hists->stats, type); |
Arnaldo Carvalho de Melo | c8446b9 | 2010-05-14 10:36:42 -0300 | [diff] [blame] | 1343 | } |
Arnaldo Carvalho de Melo | 95529be | 2012-11-08 17:54:33 -0300 | [diff] [blame] | 1344 | |
Namhyung Kim | 1844dbc | 2014-05-28 14:12:18 +0900 | [diff] [blame] | 1345 | void hists__inc_nr_samples(struct hists *hists, bool filtered) |
| 1346 | { |
| 1347 | events_stats__inc(&hists->stats, PERF_RECORD_SAMPLE); |
| 1348 | if (!filtered) |
| 1349 | hists->stats.nr_non_filtered_samples++; |
| 1350 | } |
| 1351 | |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 1352 | static struct hist_entry *hists__add_dummy_entry(struct hists *hists, |
| 1353 | struct hist_entry *pair) |
| 1354 | { |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 1355 | struct rb_root *root; |
| 1356 | struct rb_node **p; |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 1357 | struct rb_node *parent = NULL; |
| 1358 | struct hist_entry *he; |
Andi Kleen | 354cc40 | 2013-10-01 07:22:15 -0700 | [diff] [blame] | 1359 | int64_t cmp; |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 1360 | |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 1361 | if (sort__need_collapse) |
| 1362 | root = &hists->entries_collapsed; |
| 1363 | else |
| 1364 | root = hists->entries_in; |
| 1365 | |
| 1366 | p = &root->rb_node; |
| 1367 | |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 1368 | while (*p != NULL) { |
| 1369 | parent = *p; |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 1370 | he = rb_entry(parent, struct hist_entry, rb_node_in); |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 1371 | |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 1372 | cmp = hist_entry__collapse(he, pair); |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 1373 | |
| 1374 | if (!cmp) |
| 1375 | goto out; |
| 1376 | |
| 1377 | if (cmp < 0) |
| 1378 | p = &(*p)->rb_left; |
| 1379 | else |
| 1380 | p = &(*p)->rb_right; |
| 1381 | } |
| 1382 | |
Namhyung Kim | a0b51af | 2012-09-11 13:34:27 +0900 | [diff] [blame] | 1383 | he = hist_entry__new(pair, true); |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 1384 | if (he) { |
Arnaldo Carvalho de Melo | 30193d7 | 2012-11-12 13:20:03 -0300 | [diff] [blame] | 1385 | memset(&he->stat, 0, sizeof(he->stat)); |
| 1386 | he->hists = hists; |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 1387 | rb_link_node(&he->rb_node_in, parent, p); |
| 1388 | rb_insert_color(&he->rb_node_in, root); |
Namhyung Kim | 6263835 | 2014-04-24 16:21:46 +0900 | [diff] [blame] | 1389 | hists__inc_stats(hists, he); |
Jiri Olsa | e0af43d | 2012-12-01 21:18:20 +0100 | [diff] [blame] | 1390 | he->dummy = true; |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 1391 | } |
| 1392 | out: |
| 1393 | return he; |
| 1394 | } |
| 1395 | |
Arnaldo Carvalho de Melo | 95529be | 2012-11-08 17:54:33 -0300 | [diff] [blame] | 1396 | static struct hist_entry *hists__find_entry(struct hists *hists, |
| 1397 | struct hist_entry *he) |
| 1398 | { |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 1399 | struct rb_node *n; |
| 1400 | |
| 1401 | if (sort__need_collapse) |
| 1402 | n = hists->entries_collapsed.rb_node; |
| 1403 | else |
| 1404 | n = hists->entries_in->rb_node; |
Arnaldo Carvalho de Melo | 95529be | 2012-11-08 17:54:33 -0300 | [diff] [blame] | 1405 | |
| 1406 | while (n) { |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 1407 | struct hist_entry *iter = rb_entry(n, struct hist_entry, rb_node_in); |
| 1408 | int64_t cmp = hist_entry__collapse(iter, he); |
Arnaldo Carvalho de Melo | 95529be | 2012-11-08 17:54:33 -0300 | [diff] [blame] | 1409 | |
| 1410 | if (cmp < 0) |
| 1411 | n = n->rb_left; |
| 1412 | else if (cmp > 0) |
| 1413 | n = n->rb_right; |
| 1414 | else |
| 1415 | return iter; |
| 1416 | } |
| 1417 | |
| 1418 | return NULL; |
| 1419 | } |
| 1420 | |
| 1421 | /* |
| 1422 | * Look for pairs to link to the leader buckets (hist_entries): |
| 1423 | */ |
| 1424 | void hists__match(struct hists *leader, struct hists *other) |
| 1425 | { |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 1426 | struct rb_root *root; |
Arnaldo Carvalho de Melo | 95529be | 2012-11-08 17:54:33 -0300 | [diff] [blame] | 1427 | struct rb_node *nd; |
| 1428 | struct hist_entry *pos, *pair; |
| 1429 | |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 1430 | if (sort__need_collapse) |
| 1431 | root = &leader->entries_collapsed; |
| 1432 | else |
| 1433 | root = leader->entries_in; |
| 1434 | |
| 1435 | for (nd = rb_first(root); nd; nd = rb_next(nd)) { |
| 1436 | pos = rb_entry(nd, struct hist_entry, rb_node_in); |
Arnaldo Carvalho de Melo | 95529be | 2012-11-08 17:54:33 -0300 | [diff] [blame] | 1437 | pair = hists__find_entry(other, pos); |
| 1438 | |
| 1439 | if (pair) |
Namhyung Kim | 5fa9041 | 2012-11-29 15:38:34 +0900 | [diff] [blame] | 1440 | hist_entry__add_pair(pair, pos); |
Arnaldo Carvalho de Melo | 95529be | 2012-11-08 17:54:33 -0300 | [diff] [blame] | 1441 | } |
| 1442 | } |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 1443 | |
| 1444 | /* |
| 1445 | * Look for entries in the other hists that are not present in the leader, if |
| 1446 | * we find them, just add a dummy entry on the leader hists, with period=0, |
| 1447 | * nr_events=0, to serve as the list header. |
| 1448 | */ |
| 1449 | int hists__link(struct hists *leader, struct hists *other) |
| 1450 | { |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 1451 | struct rb_root *root; |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 1452 | struct rb_node *nd; |
| 1453 | struct hist_entry *pos, *pair; |
| 1454 | |
Namhyung Kim | ce74f60 | 2012-12-10 17:29:55 +0900 | [diff] [blame] | 1455 | if (sort__need_collapse) |
| 1456 | root = &other->entries_collapsed; |
| 1457 | else |
| 1458 | root = other->entries_in; |
| 1459 | |
| 1460 | for (nd = rb_first(root); nd; nd = rb_next(nd)) { |
| 1461 | pos = rb_entry(nd, struct hist_entry, rb_node_in); |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 1462 | |
| 1463 | if (!hist_entry__has_pairs(pos)) { |
| 1464 | pair = hists__add_dummy_entry(leader, pos); |
| 1465 | if (pair == NULL) |
| 1466 | return -1; |
Namhyung Kim | 5fa9041 | 2012-11-29 15:38:34 +0900 | [diff] [blame] | 1467 | hist_entry__add_pair(pos, pair); |
Arnaldo Carvalho de Melo | 494d70a | 2012-11-08 18:03:09 -0300 | [diff] [blame] | 1468 | } |
| 1469 | } |
| 1470 | |
| 1471 | return 0; |
| 1472 | } |
Namhyung Kim | f214833 | 2014-01-14 11:52:48 +0900 | [diff] [blame] | 1473 | |
Andi Kleen | 57849998 | 2015-07-18 08:24:49 -0700 | [diff] [blame] | 1474 | void hist__account_cycles(struct branch_stack *bs, struct addr_location *al, |
| 1475 | struct perf_sample *sample, bool nonany_branch_mode) |
| 1476 | { |
| 1477 | struct branch_info *bi; |
| 1478 | |
| 1479 | /* If we have branch cycles always annotate them. */ |
| 1480 | if (bs && bs->nr && bs->entries[0].flags.cycles) { |
| 1481 | int i; |
| 1482 | |
| 1483 | bi = sample__resolve_bstack(sample, al); |
| 1484 | if (bi) { |
| 1485 | struct addr_map_symbol *prev = NULL; |
| 1486 | |
| 1487 | /* |
| 1488 | * Ignore errors, still want to process the |
| 1489 | * other entries. |
| 1490 | * |
| 1491 | * For non standard branch modes always |
| 1492 | * force no IPC (prev == NULL) |
| 1493 | * |
| 1494 | * Note that perf stores branches reversed from |
| 1495 | * program order! |
| 1496 | */ |
| 1497 | for (i = bs->nr - 1; i >= 0; i--) { |
| 1498 | addr_map_symbol__account_cycles(&bi[i].from, |
| 1499 | nonany_branch_mode ? NULL : prev, |
| 1500 | bi[i].flags.cycles); |
| 1501 | prev = &bi[i].to; |
| 1502 | } |
| 1503 | free(bi); |
| 1504 | } |
| 1505 | } |
| 1506 | } |
Arnaldo Carvalho de Melo | 2a1731f | 2014-10-10 15:49:21 -0300 | [diff] [blame] | 1507 | |
| 1508 | size_t perf_evlist__fprintf_nr_events(struct perf_evlist *evlist, FILE *fp) |
| 1509 | { |
| 1510 | struct perf_evsel *pos; |
| 1511 | size_t ret = 0; |
| 1512 | |
| 1513 | evlist__for_each(evlist, pos) { |
| 1514 | ret += fprintf(fp, "%s stats:\n", perf_evsel__name(pos)); |
| 1515 | ret += events_stats__fprintf(&evsel__hists(pos)->stats, fp); |
| 1516 | } |
| 1517 | |
| 1518 | return ret; |
| 1519 | } |
| 1520 | |
| 1521 | |
Namhyung Kim | f214833 | 2014-01-14 11:52:48 +0900 | [diff] [blame] | 1522 | u64 hists__total_period(struct hists *hists) |
| 1523 | { |
| 1524 | return symbol_conf.filter_relative ? hists->stats.total_non_filtered_period : |
| 1525 | hists->stats.total_period; |
| 1526 | } |
Namhyung Kim | 33db456 | 2014-02-07 12:06:07 +0900 | [diff] [blame] | 1527 | |
| 1528 | int parse_filter_percentage(const struct option *opt __maybe_unused, |
| 1529 | const char *arg, int unset __maybe_unused) |
| 1530 | { |
| 1531 | if (!strcmp(arg, "relative")) |
| 1532 | symbol_conf.filter_relative = true; |
| 1533 | else if (!strcmp(arg, "absolute")) |
| 1534 | symbol_conf.filter_relative = false; |
| 1535 | else |
| 1536 | return -1; |
| 1537 | |
| 1538 | return 0; |
| 1539 | } |
Namhyung Kim | 0b93da1 | 2014-01-14 12:02:15 +0900 | [diff] [blame] | 1540 | |
| 1541 | int perf_hist_config(const char *var, const char *value) |
| 1542 | { |
| 1543 | if (!strcmp(var, "hist.percentage")) |
| 1544 | return parse_filter_percentage(NULL, value, 0); |
| 1545 | |
| 1546 | return 0; |
| 1547 | } |
Arnaldo Carvalho de Melo | a635fc5 | 2014-10-09 16:16:00 -0300 | [diff] [blame] | 1548 | |
| 1549 | static int hists_evsel__init(struct perf_evsel *evsel) |
| 1550 | { |
| 1551 | struct hists *hists = evsel__hists(evsel); |
| 1552 | |
| 1553 | memset(hists, 0, sizeof(*hists)); |
| 1554 | hists->entries_in_array[0] = hists->entries_in_array[1] = RB_ROOT; |
| 1555 | hists->entries_in = &hists->entries_in_array[0]; |
| 1556 | hists->entries_collapsed = RB_ROOT; |
| 1557 | hists->entries = RB_ROOT; |
| 1558 | pthread_mutex_init(&hists->lock, NULL); |
Kan Liang | 21394d9 | 2015-09-04 10:45:44 -0400 | [diff] [blame] | 1559 | hists->socket_filter = -1; |
Arnaldo Carvalho de Melo | a635fc5 | 2014-10-09 16:16:00 -0300 | [diff] [blame] | 1560 | return 0; |
| 1561 | } |
| 1562 | |
| 1563 | /* |
| 1564 | * XXX We probably need a hists_evsel__exit() to free the hist_entries |
| 1565 | * stored in the rbtree... |
| 1566 | */ |
| 1567 | |
| 1568 | int hists__init(void) |
| 1569 | { |
| 1570 | int err = perf_evsel__object_config(sizeof(struct hists_evsel), |
| 1571 | hists_evsel__init, NULL); |
| 1572 | if (err) |
| 1573 | fputs("FATAL ERROR: Couldn't setup hists class\n", stderr); |
| 1574 | |
| 1575 | return err; |
| 1576 | } |