blob: d89b53edc9a31e19245ca0de6787efddbf1334be [file] [log] [blame]
weidendoa17f2a32006-03-20 10:27:30 +00001/*--------------------------------------------------------------------*/
2/*--- Callgrind ---*/
3/*--- ct_callstack.c ---*/
4/*--------------------------------------------------------------------*/
5
6/*
7 This file is part of Callgrind, a Valgrind tool for call tracing.
8
sewardj4d474d02008-02-11 11:34:59 +00009 Copyright (C) 2002-2008, Josef Weidendorfer (Josef.Weidendorfer@gmx.de)
weidendoa17f2a32006-03-20 10:27:30 +000010
11 This program is free software; you can redistribute it and/or
12 modify it under the terms of the GNU General Public License as
13 published by the Free Software Foundation; either version 2 of the
14 License, or (at your option) any later version.
15
16 This program is distributed in the hope that it will be useful, but
17 WITHOUT ANY WARRANTY; without even the implied warranty of
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 General Public License for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with this program; if not, write to the Free Software
23 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
24 02111-1307, USA.
25
26 The GNU General Public License is contained in the file COPYING.
27*/
28
29#include "global.h"
30
31/*------------------------------------------------------------*/
32/*--- Call stack, operations ---*/
33/*------------------------------------------------------------*/
34
35/* Stack of current thread. Gets initialized when switching to 1st thread.
36 *
37 * The artificial call stack is an array of call_entry's, representing
38 * stack frames of the executing program.
39 * Array call_stack and call_stack_esp have same size and grow on demand.
40 * Array call_stack_esp holds SPs of corresponding stack frames.
41 *
42 */
43
44#define N_CALL_STACK_INITIAL_ENTRIES 500
45
46call_stack CLG_(current_call_stack);
47
48void CLG_(init_call_stack)(call_stack* s)
49{
50 Int i;
51
52 CLG_ASSERT(s != 0);
53
54 s->size = N_CALL_STACK_INITIAL_ENTRIES;
sewardj9c606bd2008-09-18 18:12:50 +000055 s->entry = (call_entry*) CLG_MALLOC("cl.callstack.ics.1",
56 s->size * sizeof(call_entry));
weidendoa17f2a32006-03-20 10:27:30 +000057 s->sp = 0;
58 s->entry[0].cxt = 0; /* for assertion in push_cxt() */
59
60 for(i=0; i<s->size; i++) s->entry[i].enter_cost = 0;
61}
62
63call_entry* CLG_(get_call_entry)(Int sp)
64{
65 CLG_ASSERT(sp <= CLG_(current_call_stack).sp);
66 return &(CLG_(current_call_stack).entry[sp]);
67}
68
69void CLG_(copy_current_call_stack)(call_stack* dst)
70{
71 CLG_ASSERT(dst != 0);
72
73 dst->size = CLG_(current_call_stack).size;
74 dst->entry = CLG_(current_call_stack).entry;
75 dst->sp = CLG_(current_call_stack).sp;
76}
77
78void CLG_(set_current_call_stack)(call_stack* s)
79{
80 CLG_ASSERT(s != 0);
81
82 CLG_(current_call_stack).size = s->size;
83 CLG_(current_call_stack).entry = s->entry;
84 CLG_(current_call_stack).sp = s->sp;
85}
86
87
88static __inline__
89void ensure_stack_size(Int i)
90{
91 Int oldsize;
92 call_stack *cs = &CLG_(current_call_stack);
93
94 if (i < cs->size) return;
95
96 oldsize = cs->size;
97 cs->size *= 2;
98 while (i > cs->size) cs->size *= 2;
99
sewardj9c606bd2008-09-18 18:12:50 +0000100 cs->entry = (call_entry*) VG_(realloc)("cl.callstack.ess.1",
101 cs->entry,
weidendoa17f2a32006-03-20 10:27:30 +0000102 cs->size * sizeof(call_entry));
103
104 for(i=oldsize; i<cs->size; i++)
105 cs->entry[i].enter_cost = 0;
106
107 CLG_(stat).call_stack_resizes++;
108
109 CLG_DEBUGIF(2)
110 VG_(printf)(" call stack enlarged to %d entries\n",
111 CLG_(current_call_stack).size);
112}
113
114
115
116/* Called when function entered nonrecursive */
117static void function_entered(fn_node* fn, BBCC* to)
118{
119 CLG_ASSERT(fn != 0);
120
121#if CLG_ENABLE_DEBUG
122 if (fn->verbosity >=0) {
123 Int old = CLG_(clo).verbose;
124 CLG_(clo).verbose = fn->verbosity;
125 fn->verbosity = old;
126 VG_(message)(Vg_DebugMsg,
127 "Entering %s: Verbosity set to %d",
128 fn->name, CLG_(clo).verbose);
129 }
130#endif
131
132 if (fn->dump_before) {
133 Char trigger[FN_NAME_LEN];
134 VG_(sprintf)(trigger, "--dump-before=%s", fn->name);
135 CLG_(dump_profile)(trigger, True);
136 }
137 else if (fn->zero_before) {
138 CLG_(zero_all_cost)(True);
139 }
140
141 if (fn->toggle_collect) {
142 CLG_(current_state).collect = !CLG_(current_state).collect;
143 CLG_DEBUG(2," entering %s: toggled collection state to %s\n",
144 fn->name,
145 CLG_(current_state).collect ? "ON" : "OFF");
146 }
147}
148
149/* Called when function left (no recursive level active) */
150static void function_left(fn_node* fn, BBCC* from)
151{
152 CLG_ASSERT(fn != 0);
153
154 if (fn->dump_after) {
155 Char trigger[FN_NAME_LEN];
156 VG_(sprintf)(trigger, "--dump-after=%s", fn->name);
157 CLG_(dump_profile)(trigger, True);
158 }
159 if (fn->toggle_collect) {
160 CLG_(current_state).collect = !CLG_(current_state).collect;
161 CLG_DEBUG(2," leaving %s: toggled collection state to %s\n",
162 fn->name,
163 CLG_(current_state).collect ? "ON" : "OFF");
164 }
165
166#if CLG_ENABLE_DEBUG
167 if (fn->verbosity >=0) {
168 Int old = CLG_(clo).verbose;
169 CLG_(clo).verbose = fn->verbosity;
170 fn->verbosity = old;
171 VG_(message)(Vg_DebugMsg,
172 "Leaving %s: Verbosity set back to %d",
173 fn->name, CLG_(clo).verbose);
174 }
175#endif
176}
177
178
179/* Push call on call stack.
180 *
181 * Increment the usage count for the function called.
182 * A jump from <from> to <to>, with <sp>.
183 * If <skip> is true, this is a call to a function to be skipped;
184 * for this, we set jcc = 0.
185 */
186void CLG_(push_call_stack)(BBCC* from, UInt jmp, BBCC* to, Addr sp, Bool skip)
187{
188 jCC* jcc;
189 UInt* pdepth;
190 call_entry* current_entry;
191 Addr ret_addr;
192
193 /* Ensure a call stack of size <current_sp>+1.
194 * The +1 is needed as push_cxt will store the
195 * context at [current_sp]
196 */
197 ensure_stack_size(CLG_(current_call_stack).sp +1);
198 current_entry = &(CLG_(current_call_stack).entry[CLG_(current_call_stack).sp]);
199
200 if (skip) {
201 jcc = 0;
202 }
203 else {
204 fn_node* to_fn = to->cxt->fn[0];
205
206 if (CLG_(current_state).nonskipped) {
207 /* this is a jmp from skipped to nonskipped */
208 CLG_ASSERT(CLG_(current_state).nonskipped == from);
209 }
210
211 /* As push_cxt() has to be called before push_call_stack if not
212 * skipping, the old context should already be saved on the stack */
213 CLG_ASSERT(current_entry->cxt != 0);
214 CLG_(copy_cost_lz)( CLG_(sets).full, &(current_entry->enter_cost),
215 CLG_(current_state).cost );
216
217 jcc = CLG_(get_jcc)(from, jmp, to);
218 CLG_ASSERT(jcc != 0);
219
220 pdepth = CLG_(get_fn_entry)(to_fn->number);
221 if (CLG_(clo).skip_direct_recursion) {
222 /* only increment depth if another function is called */
223 if (jcc->from->cxt->fn[0] != to_fn) (*pdepth)++;
224 }
225 else (*pdepth)++;
226
227 if (*pdepth>1)
228 CLG_(stat).rec_call_counter++;
229
230 jcc->call_counter++;
231 CLG_(stat).call_counter++;
232
233 if (*pdepth == 1) function_entered(to_fn, to);
234 }
235
236 /* return address is only is useful with a real call;
237 * used to detect RET w/o CALL */
238 ret_addr = (from->bb->jmpkind == Ijk_Call) ?
239 bb_addr(from->bb) + from->bb->instr_len : 0;
240
241 /* put jcc on call stack */
242 current_entry->jcc = jcc;
243 current_entry->sp = sp;
244 current_entry->ret_addr = ret_addr;
245 current_entry->nonskipped = CLG_(current_state).nonskipped;
246
247 CLG_(current_call_stack).sp++;
248
249 /* To allow for above assertion we set context of next frame to 0 */
250 CLG_ASSERT(CLG_(current_call_stack).sp < CLG_(current_call_stack).size);
251 current_entry++;
252 current_entry->cxt = 0;
253
254 if (!skip)
255 CLG_(current_state).nonskipped = 0;
256 else if (!CLG_(current_state).nonskipped) {
257 /* a call from nonskipped to skipped */
258 CLG_(current_state).nonskipped = from;
259 if (!CLG_(current_state).nonskipped->skipped) {
260 CLG_(init_cost_lz)( CLG_(sets).full,
261 &CLG_(current_state).nonskipped->skipped);
262 CLG_(stat).distinct_skips++;
263 }
264 }
265
266#if CLG_ENABLE_DEBUG
267 CLG_DEBUGIF(0) {
268 if (CLG_(clo).verbose<2) {
269 if (jcc && jcc->to && jcc->to->bb) {
270 char spaces[][41] = { " . . . . . . . . . .",
271 " . . . . . . . . . . ",
272 " . . . . . . . . . . ",
273 ". . . . . . . . . . " };
274
275 int s = CLG_(current_call_stack).sp;
276 Int* pars = (Int*) sp;
277
278 BB* bb = jcc->to->bb;
279 if (s>40) s=40;
barta0b6b2c2008-07-07 06:49:24 +0000280 VG_(printf)("%s> %s(0x%x, 0x%x, ...) [%s / %#lx]\n", spaces[s%4]+40-s, bb->fn->name,
weidendoa17f2a32006-03-20 10:27:30 +0000281 pars ? pars[1]:0,
282 pars ? pars[2]:0,
283 bb->obj->name + bb->obj->last_slash_pos,
284 bb->offset);
285 }
286 }
287 else if (CLG_(clo).verbose<4) {
288 VG_(printf)("+ %2d ", CLG_(current_call_stack).sp);
289 CLG_(print_short_jcc)(jcc);
barta0b6b2c2008-07-07 06:49:24 +0000290 VG_(printf)(", SP %#lx, RA %#lx\n", sp, ret_addr);
weidendoa17f2a32006-03-20 10:27:30 +0000291 }
292 else {
293 VG_(printf)(" Pushed ");
294 CLG_(print_stackentry)(3, CLG_(current_call_stack).sp-1);
295 }
296 }
297#endif
298
299}
300
301
302/* Pop call stack and update inclusive sums.
303 * Returns modified fcc.
304 *
305 * If the JCC becomes inactive, call entries are freed if possible
306 */
307void CLG_(pop_call_stack)()
308{
309 jCC* jcc;
310 Int depth = 0;
311 call_entry* lower_entry;
312
313 if (CLG_(current_state).sig >0) {
314 /* Check if we leave a signal handler; this can happen when
315 * calling longjmp() in the handler */
316 CLG_(run_post_signal_on_call_stack_bottom)();
317 }
318
319 lower_entry =
320 &(CLG_(current_call_stack).entry[CLG_(current_call_stack).sp-1]);
321
322 CLG_DEBUG(4,"+ pop_call_stack: frame %d, jcc %p\n",
323 CLG_(current_call_stack).sp, lower_entry->jcc);
324
325 /* jCC item not any more on real stack: pop */
326 jcc = lower_entry->jcc;
327 CLG_(current_state).nonskipped = lower_entry->nonskipped;
328
329 if (jcc) {
330 fn_node* to_fn = jcc->to->cxt->fn[0];
331 UInt* pdepth = CLG_(get_fn_entry)(to_fn->number);
332 if (CLG_(clo).skip_direct_recursion) {
333 /* only decrement depth if another function was called */
334 if (jcc->from->cxt->fn[0] != to_fn) (*pdepth)--;
335 }
336 else (*pdepth)--;
337 depth = *pdepth;
338
339 /* add cost difference to sum */
340 if ( CLG_(add_diff_cost_lz)( CLG_(sets).full, &(jcc->cost),
341 lower_entry->enter_cost,
342 CLG_(current_state).cost) ) {
343
344 /* only count this call if it attributed some cost.
345 * the ret_counter is used to check if a BBCC dump is needed.
346 */
347 jcc->from->ret_counter++;
348 }
349 CLG_(stat).ret_counter++;
350
351 /* restore context */
352 CLG_(current_state).cxt = lower_entry->cxt;
353 CLG_(current_fn_stack).top =
354 CLG_(current_fn_stack).bottom + lower_entry->fn_sp;
355 CLG_ASSERT(CLG_(current_state).cxt != 0);
356
357 if (depth == 0) function_left(to_fn, jcc->from);
358 }
359
360 /* To allow for an assertion in push_call_stack() */
361 lower_entry->cxt = 0;
362
363 CLG_(current_call_stack).sp--;
364
365#if CLG_ENABLE_DEBUG
366 CLG_DEBUGIF(1) {
367 if (CLG_(clo).verbose<4) {
368 if (jcc) {
369 /* popped JCC target first */
barta0b6b2c2008-07-07 06:49:24 +0000370 VG_(printf)("- %2d %#lx => ",
weidendoa17f2a32006-03-20 10:27:30 +0000371 CLG_(current_call_stack).sp,
372 bb_addr(jcc->to->bb));
373 CLG_(print_addr)(bb_jmpaddr(jcc->from->bb));
barta0b6b2c2008-07-07 06:49:24 +0000374 VG_(printf)(", SP %#lx\n",
weidendoa17f2a32006-03-20 10:27:30 +0000375 CLG_(current_call_stack).entry[CLG_(current_call_stack).sp].sp);
376 CLG_(print_cost)(10, CLG_(sets).full, jcc->cost);
377 }
378 else
barta0b6b2c2008-07-07 06:49:24 +0000379 VG_(printf)("- %2d [Skipped JCC], SP %#lx\n",
weidendoa17f2a32006-03-20 10:27:30 +0000380 CLG_(current_call_stack).sp,
381 CLG_(current_call_stack).entry[CLG_(current_call_stack).sp].sp);
382 }
383 else {
384 VG_(printf)(" Popped ");
385 CLG_(print_stackentry)(7, CLG_(current_call_stack).sp);
386 if (jcc) {
387 VG_(printf)(" returned to ");
388 CLG_(print_addr_ln)(bb_jmpaddr(jcc->from->bb));
389 }
390 }
391 }
392#endif
393
394}
395
396
397/* remove CallStack items to sync with current SP
398 */
399void CLG_(unwind_call_stack)(Addr sp, Int minpops)
400{
401 Int csp;
barta0b6b2c2008-07-07 06:49:24 +0000402 CLG_DEBUG(4,"+ unwind_call_stack(sp %#lx, minpops %d): frame %d\n",
weidendoa17f2a32006-03-20 10:27:30 +0000403 sp, minpops, CLG_(current_call_stack).sp);
404
405 /* We pop old stack frames.
406 * For a call, be p the stack address with return address.
407 * - call_stack_esp[] has SP after the CALL: p-4
408 * - current sp is after a RET: >= p
409 */
410
411 while( (csp=CLG_(current_call_stack).sp) >0) {
412 call_entry* top_ce = &(CLG_(current_call_stack).entry[csp-1]);
413
414 if ((top_ce->sp < sp) ||
415 ((top_ce->sp == sp) && minpops>0)) {
416
417 minpops--;
418 CLG_(pop_call_stack)();
419 csp=CLG_(current_call_stack).sp;
420 continue;
421 }
422 break;
423 }
424
425 CLG_DEBUG(4,"- unwind_call_stack\n");
426}