1 /*--------------------------------------------------------------------*/
3 /*--- ct_callstack.c ---*/
4 /*--------------------------------------------------------------------*/
7 This file is part of Callgrind, a Valgrind tool for call tracing.
9 Copyright (C) 2002-2013, Josef Weidendorfer (Josef.Weidendorfer@gmx.de)
11 This program is free software; you can redistribute it and/or
12 modify it under the terms of the GNU General Public License as
13 published by the Free Software Foundation; either version 2 of the
14 License, or (at your option) any later version.
16 This program is distributed in the hope that it will be useful, but
17 WITHOUT ANY WARRANTY; without even the implied warranty of
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 General Public License for more details.
21 You should have received a copy of the GNU General Public License
22 along with this program; if not, write to the Free Software
23 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
26 The GNU General Public License is contained in the file COPYING.
31 /*------------------------------------------------------------*/
32 /*--- Call stack, operations ---*/
33 /*------------------------------------------------------------*/
35 /* Stack of current thread. Gets initialized when switching to 1st thread.
37 * The artificial call stack is an array of call_entry's, representing
38 * stack frames of the executing program.
39 * Array call_stack and call_stack_esp have same size and grow on demand.
40 * Array call_stack_esp holds SPs of corresponding stack frames.
44 #define N_CALL_STACK_INITIAL_ENTRIES 500
46 call_stack
CLG_(current_call_stack
);
48 void CLG_(init_call_stack
)(call_stack
* s
)
54 s
->size
= N_CALL_STACK_INITIAL_ENTRIES
;
55 s
->entry
= (call_entry
*) CLG_MALLOC("cl.callstack.ics.1",
56 s
->size
* sizeof(call_entry
));
58 s
->entry
[0].cxt
= 0; /* for assertion in push_cxt() */
60 for(i
=0; i
<s
->size
; i
++) s
->entry
[i
].enter_cost
= 0;
63 call_entry
* CLG_(get_call_entry
)(Int sp
)
65 CLG_ASSERT(sp
<= CLG_(current_call_stack
).sp
);
66 return &(CLG_(current_call_stack
).entry
[sp
]);
69 void CLG_(copy_current_call_stack
)(call_stack
* dst
)
73 dst
->size
= CLG_(current_call_stack
).size
;
74 dst
->entry
= CLG_(current_call_stack
).entry
;
75 dst
->sp
= CLG_(current_call_stack
).sp
;
78 void CLG_(set_current_call_stack
)(call_stack
* s
)
82 CLG_(current_call_stack
).size
= s
->size
;
83 CLG_(current_call_stack
).entry
= s
->entry
;
84 CLG_(current_call_stack
).sp
= s
->sp
;
89 void ensure_stack_size(Int i
)
92 call_stack
*cs
= &CLG_(current_call_stack
);
94 if (i
< cs
->size
) return;
98 while (i
> cs
->size
) cs
->size
*= 2;
100 cs
->entry
= (call_entry
*) VG_(realloc
)("cl.callstack.ess.1",
102 cs
->size
* sizeof(call_entry
));
104 for(i
=oldsize
; i
<cs
->size
; i
++)
105 cs
->entry
[i
].enter_cost
= 0;
107 CLG_(stat
).call_stack_resizes
++;
110 VG_(printf
)(" call stack enlarged to %d entries\n",
111 CLG_(current_call_stack
).size
);
116 /* Called when function entered nonrecursive */
117 static void function_entered(fn_node
* fn
)
122 if (fn
->verbosity
>=0) {
123 Int old
= CLG_(clo
).verbose
;
124 CLG_(clo
).verbose
= fn
->verbosity
;
126 VG_(message
)(Vg_DebugMsg
,
127 "Entering %s: Verbosity set to %d\n",
128 fn
->name
, CLG_(clo
).verbose
);
132 if (fn
->dump_before
) {
133 HChar trigger
[VG_(strlen
)(fn
->name
) + 20];
134 VG_(sprintf
)(trigger
, "--dump-before=%s", fn
->name
);
135 CLG_(dump_profile
)(trigger
, True
);
137 else if (fn
->zero_before
) {
138 CLG_(zero_all_cost
)(True
);
141 if (fn
->toggle_collect
) {
142 CLG_(current_state
).collect
= !CLG_(current_state
).collect
;
143 CLG_DEBUG(2," entering %s: toggled collection state to %s\n",
145 CLG_(current_state
).collect
? "ON" : "OFF");
149 /* Called when function left (no recursive level active) */
150 static void function_left(fn_node
* fn
)
154 if (fn
->dump_after
) {
155 HChar trigger
[VG_(strlen
)(fn
->name
) + 20];
156 VG_(sprintf
)(trigger
, "--dump-after=%s", fn
->name
);
157 CLG_(dump_profile
)(trigger
, True
);
159 if (fn
->toggle_collect
) {
160 CLG_(current_state
).collect
= !CLG_(current_state
).collect
;
161 CLG_DEBUG(2," leaving %s: toggled collection state to %s\n",
163 CLG_(current_state
).collect
? "ON" : "OFF");
167 if (fn
->verbosity
>=0) {
168 Int old
= CLG_(clo
).verbose
;
169 CLG_(clo
).verbose
= fn
->verbosity
;
171 VG_(message
)(Vg_DebugMsg
,
172 "Leaving %s: Verbosity set back to %d\n",
173 fn
->name
, CLG_(clo
).verbose
);
179 /* Push call on call stack.
181 * Increment the usage count for the function called.
182 * A jump from <from> to <to>, with <sp>.
183 * If <skip> is true, this is a call to a function to be skipped;
184 * for this, we set jcc = 0.
186 void CLG_(push_call_stack
)(BBCC
* from
, UInt jmp
, BBCC
* to
, Addr sp
, Bool skip
)
190 call_entry
* current_entry
;
193 /* Ensure a call stack of size <current_sp>+1.
194 * The +1 is needed as push_cxt will store the
195 * context at [current_sp]
197 ensure_stack_size(CLG_(current_call_stack
).sp
+1);
198 current_entry
= &(CLG_(current_call_stack
).entry
[CLG_(current_call_stack
).sp
]);
204 fn_node
* to_fn
= to
->cxt
->fn
[0];
206 if (CLG_(current_state
).nonskipped
) {
207 /* this is a jmp from skipped to nonskipped */
208 CLG_ASSERT(CLG_(current_state
).nonskipped
== from
);
211 /* As push_cxt() has to be called before push_call_stack if not
212 * skipping, the old context should already be saved on the stack */
213 CLG_ASSERT(current_entry
->cxt
!= 0);
214 CLG_(copy_cost_lz
)( CLG_(sets
).full
, &(current_entry
->enter_cost
),
215 CLG_(current_state
).cost
);
217 jcc
= CLG_(get_jcc
)(from
, jmp
, to
);
218 CLG_ASSERT(jcc
!= 0);
220 pdepth
= CLG_(get_fn_entry
)(to_fn
->number
);
221 if (CLG_(clo
).skip_direct_recursion
) {
222 /* only increment depth if another function is called */
223 if (jcc
->from
->cxt
->fn
[0] != to_fn
) (*pdepth
)++;
228 CLG_(stat
).rec_call_counter
++;
231 CLG_(stat
).call_counter
++;
233 if (*pdepth
== 1) function_entered(to_fn
);
236 /* return address is only is useful with a real call;
237 * used to detect RET w/o CALL */
238 if (from
->bb
->jmp
[jmp
].jmpkind
== jk_Call
) {
239 UInt instr
= from
->bb
->jmp
[jmp
].instr
;
240 ret_addr
= bb_addr(from
->bb
) +
241 from
->bb
->instr
[instr
].instr_offset
+
242 from
->bb
->instr
[instr
].instr_size
;
247 /* put jcc on call stack */
248 current_entry
->jcc
= jcc
;
249 current_entry
->sp
= sp
;
250 current_entry
->ret_addr
= ret_addr
;
251 current_entry
->nonskipped
= CLG_(current_state
).nonskipped
;
253 CLG_(current_call_stack
).sp
++;
255 /* To allow for above assertion we set context of next frame to 0 */
256 CLG_ASSERT(CLG_(current_call_stack
).sp
< CLG_(current_call_stack
).size
);
258 current_entry
->cxt
= 0;
261 CLG_(current_state
).nonskipped
= 0;
262 else if (!CLG_(current_state
).nonskipped
) {
263 /* a call from nonskipped to skipped */
264 CLG_(current_state
).nonskipped
= from
;
265 if (!CLG_(current_state
).nonskipped
->skipped
) {
266 CLG_(init_cost_lz
)( CLG_(sets
).full
,
267 &CLG_(current_state
).nonskipped
->skipped
);
268 CLG_(stat
).distinct_skips
++;
274 if (CLG_(clo
).verbose
<2) {
275 if (jcc
&& jcc
->to
&& jcc
->to
->bb
) {
276 const HChar spaces
[][41] = {
277 " . . . . . . . . . .",
278 " . . . . . . . . . . ",
279 " . . . . . . . . . . ",
280 ". . . . . . . . . . " };
282 int s
= CLG_(current_call_stack
).sp
;
283 Int
* pars
= (Int
*) sp
;
285 BB
* bb
= jcc
->to
->bb
;
287 VG_(printf
)("%s> %s(0x%x, 0x%x, ...) [%s / %#lx]\n", spaces
[s
%4]+40-s
, bb
->fn
->name
,
290 bb
->obj
->name
+ bb
->obj
->last_slash_pos
,
294 else if (CLG_(clo
).verbose
<4) {
295 VG_(printf
)("+ %2d ", CLG_(current_call_stack
).sp
);
296 CLG_(print_short_jcc
)(jcc
);
297 VG_(printf
)(", SP %#lx, RA %#lx\n", sp
, ret_addr
);
300 VG_(printf
)(" Pushed ");
301 CLG_(print_stackentry
)(3, CLG_(current_call_stack
).sp
-1);
309 /* Pop call stack and update inclusive sums.
310 * Returns modified fcc.
312 * If the JCC becomes inactive, call entries are freed if possible
314 void CLG_(pop_call_stack
)()
318 call_entry
* lower_entry
;
320 if (CLG_(current_state
).sig
>0) {
321 /* Check if we leave a signal handler; this can happen when
322 * calling longjmp() in the handler */
323 CLG_(run_post_signal_on_call_stack_bottom
)();
327 &(CLG_(current_call_stack
).entry
[CLG_(current_call_stack
).sp
-1]);
329 CLG_DEBUG(4,"+ pop_call_stack: frame %d, jcc %p\n",
330 CLG_(current_call_stack
).sp
, lower_entry
->jcc
);
332 /* jCC item not any more on real stack: pop */
333 jcc
= lower_entry
->jcc
;
334 CLG_(current_state
).nonskipped
= lower_entry
->nonskipped
;
337 fn_node
* to_fn
= jcc
->to
->cxt
->fn
[0];
338 UInt
* pdepth
= CLG_(get_fn_entry
)(to_fn
->number
);
339 if (CLG_(clo
).skip_direct_recursion
) {
340 /* only decrement depth if another function was called */
341 if (jcc
->from
->cxt
->fn
[0] != to_fn
) (*pdepth
)--;
346 /* add cost difference to sum */
347 if ( CLG_(add_diff_cost_lz
)( CLG_(sets
).full
, &(jcc
->cost
),
348 lower_entry
->enter_cost
,
349 CLG_(current_state
).cost
) ) {
351 /* only count this call if it attributed some cost.
352 * the ret_counter is used to check if a BBCC dump is needed.
354 jcc
->from
->ret_counter
++;
356 CLG_(stat
).ret_counter
++;
358 /* restore context */
359 CLG_(current_state
).cxt
= lower_entry
->cxt
;
360 CLG_(current_fn_stack
).top
=
361 CLG_(current_fn_stack
).bottom
+ lower_entry
->fn_sp
;
362 CLG_ASSERT(CLG_(current_state
).cxt
!= 0);
364 if (depth
== 0) function_left(to_fn
);
367 /* To allow for an assertion in push_call_stack() */
368 lower_entry
->cxt
= 0;
370 CLG_(current_call_stack
).sp
--;
374 if (CLG_(clo
).verbose
<4) {
376 /* popped JCC target first */
377 VG_(printf
)("- %2d %#lx => ",
378 CLG_(current_call_stack
).sp
,
379 bb_addr(jcc
->to
->bb
));
380 CLG_(print_addr
)(bb_jmpaddr(jcc
->from
->bb
));
381 VG_(printf
)(", SP %#lx\n",
382 CLG_(current_call_stack
).entry
[CLG_(current_call_stack
).sp
].sp
);
383 CLG_(print_cost
)(10, CLG_(sets
).full
, jcc
->cost
);
386 VG_(printf
)("- %2d [Skipped JCC], SP %#lx\n",
387 CLG_(current_call_stack
).sp
,
388 CLG_(current_call_stack
).entry
[CLG_(current_call_stack
).sp
].sp
);
391 VG_(printf
)(" Popped ");
392 CLG_(print_stackentry
)(7, CLG_(current_call_stack
).sp
);
394 VG_(printf
)(" returned to ");
395 CLG_(print_addr_ln
)(bb_jmpaddr(jcc
->from
->bb
));
404 /* Unwind enough CallStack items to sync with current stack pointer.
405 * Returns the number of stack frames unwinded.
407 Int
CLG_(unwind_call_stack
)(Addr sp
, Int minpops
)
410 Int unwind_count
= 0;
411 CLG_DEBUG(4,"+ unwind_call_stack(sp %#lx, minpops %d): frame %d\n",
412 sp
, minpops
, CLG_(current_call_stack
).sp
);
414 /* We pop old stack frames.
415 * For a call, be p the stack address with return address.
416 * - call_stack_esp[] has SP after the CALL: p-4
417 * - current sp is after a RET: >= p
420 while( (csp
=CLG_(current_call_stack
).sp
) >0) {
421 call_entry
* top_ce
= &(CLG_(current_call_stack
).entry
[csp
-1]);
423 if ((top_ce
->sp
< sp
) ||
424 ((top_ce
->sp
== sp
) && minpops
>0)) {
428 CLG_(pop_call_stack
)();
429 csp
=CLG_(current_call_stack
).sp
;
435 CLG_DEBUG(4,"- unwind_call_stack\n");