1 /*--------------------------------------------------------------------*/
3 /*--- ct_callstack.c ---*/
4 /*--------------------------------------------------------------------*/
7 This file is part of Callgrind, a Valgrind tool for call tracing.
9 Copyright (C) 2002-2017, Josef Weidendorfer (Josef.Weidendorfer@gmx.de)
11 This program is free software; you can redistribute it and/or
12 modify it under the terms of the GNU General Public License as
13 published by the Free Software Foundation; either version 2 of the
14 License, or (at your option) any later version.
16 This program is distributed in the hope that it will be useful, but
17 WITHOUT ANY WARRANTY; without even the implied warranty of
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 General Public License for more details.
21 You should have received a copy of the GNU General Public License
22 along with this program; if not, see <http://www.gnu.org/licenses/>.
24 The GNU General Public License is contained in the file COPYING.
29 /*------------------------------------------------------------*/
30 /*--- Call stack, operations ---*/
31 /*------------------------------------------------------------*/
33 /* Stack of current thread. Gets initialized when switching to 1st thread.
35 * The artificial call stack is an array of call_entry's, representing
36 * stack frames of the executing program.
37 * Array call_stack and call_stack_esp have same size and grow on demand.
38 * Array call_stack_esp holds SPs of corresponding stack frames.
42 #define N_CALL_STACK_INITIAL_ENTRIES 500
44 call_stack
CLG_(current_call_stack
);
46 void CLG_(init_call_stack
)(call_stack
* s
)
52 s
->size
= N_CALL_STACK_INITIAL_ENTRIES
;
53 s
->entry
= (call_entry
*) CLG_MALLOC("cl.callstack.ics.1",
54 s
->size
* sizeof(call_entry
));
56 s
->entry
[0].cxt
= 0; /* for assertion in push_cxt() */
58 for(i
=0; i
<s
->size
; i
++) s
->entry
[i
].enter_cost
= 0;
61 call_entry
* CLG_(get_call_entry
)(Int sp
)
63 CLG_ASSERT(sp
<= CLG_(current_call_stack
).sp
);
64 return &(CLG_(current_call_stack
).entry
[sp
]);
67 void CLG_(copy_current_call_stack
)(call_stack
* dst
)
71 dst
->size
= CLG_(current_call_stack
).size
;
72 dst
->entry
= CLG_(current_call_stack
).entry
;
73 dst
->sp
= CLG_(current_call_stack
).sp
;
76 void CLG_(set_current_call_stack
)(call_stack
* s
)
80 CLG_(current_call_stack
).size
= s
->size
;
81 CLG_(current_call_stack
).entry
= s
->entry
;
82 CLG_(current_call_stack
).sp
= s
->sp
;
87 void ensure_stack_size(Int i
)
90 call_stack
*cs
= &CLG_(current_call_stack
);
92 if (i
< cs
->size
) return;
96 while (i
> cs
->size
) cs
->size
*= 2;
98 cs
->entry
= (call_entry
*) VG_(realloc
)("cl.callstack.ess.1",
100 cs
->size
* sizeof(call_entry
));
102 for(i
=oldsize
; i
<cs
->size
; i
++)
103 cs
->entry
[i
].enter_cost
= 0;
105 CLG_(stat
).call_stack_resizes
++;
108 VG_(printf
)(" call stack enlarged to %u entries\n",
109 CLG_(current_call_stack
).size
);
114 /* Called when function entered nonrecursive */
115 static void function_entered(fn_node
* fn
)
120 if (fn
->verbosity
>=0) {
121 Int old
= CLG_(clo
).verbose
;
122 CLG_(clo
).verbose
= fn
->verbosity
;
124 VG_(message
)(Vg_DebugMsg
,
125 "Entering %s: Verbosity set to %d\n",
126 fn
->name
, CLG_(clo
).verbose
);
130 if (fn
->dump_before
) {
131 HChar trigger
[VG_(strlen
)(fn
->name
) + 20];
132 VG_(sprintf
)(trigger
, "--dump-before=%s", fn
->name
);
133 CLG_(dump_profile
)(trigger
, True
);
135 else if (fn
->zero_before
) {
136 CLG_(zero_all_cost
)(True
);
139 if (fn
->toggle_collect
) {
140 CLG_(current_state
).collect
= !CLG_(current_state
).collect
;
141 CLG_DEBUG(2," entering %s: toggled collection state to %s\n",
143 CLG_(current_state
).collect
? "ON" : "OFF");
147 /* Called when function left (no recursive level active) */
148 static void function_left(fn_node
* fn
)
152 if (fn
->dump_after
) {
153 HChar trigger
[VG_(strlen
)(fn
->name
) + 20];
154 VG_(sprintf
)(trigger
, "--dump-after=%s", fn
->name
);
155 CLG_(dump_profile
)(trigger
, True
);
157 if (fn
->toggle_collect
) {
158 CLG_(current_state
).collect
= !CLG_(current_state
).collect
;
159 CLG_DEBUG(2," leaving %s: toggled collection state to %s\n",
161 CLG_(current_state
).collect
? "ON" : "OFF");
165 if (fn
->verbosity
>=0) {
166 Int old
= CLG_(clo
).verbose
;
167 CLG_(clo
).verbose
= fn
->verbosity
;
169 VG_(message
)(Vg_DebugMsg
,
170 "Leaving %s: Verbosity set back to %d\n",
171 fn
->name
, CLG_(clo
).verbose
);
177 /* Push call on call stack.
179 * Increment the usage count for the function called.
180 * A jump from <from> to <to>, with <sp>.
181 * If <skip> is true, this is a call to a function to be skipped;
182 * for this, we set jcc = 0.
184 void CLG_(push_call_stack
)(BBCC
* from
, UInt jmp
, BBCC
* to
, Addr sp
, Bool skip
)
188 call_entry
* current_entry
;
191 /* Ensure a call stack of size <current_sp>+1.
192 * The +1 is needed as push_cxt will store the
193 * context at [current_sp]
195 ensure_stack_size(CLG_(current_call_stack
).sp
+1);
196 current_entry
= &(CLG_(current_call_stack
).entry
[CLG_(current_call_stack
).sp
]);
202 fn_node
* to_fn
= to
->cxt
->fn
[0];
204 if (CLG_(current_state
).nonskipped
) {
205 /* this is a jmp from skipped to nonskipped */
206 CLG_ASSERT(CLG_(current_state
).nonskipped
== from
);
209 /* As push_cxt() has to be called before push_call_stack if not
210 * skipping, the old context should already be saved on the stack */
211 CLG_ASSERT(current_entry
->cxt
!= 0);
212 CLG_(copy_cost_lz
)( CLG_(sets
).full
, &(current_entry
->enter_cost
),
213 CLG_(current_state
).cost
);
215 jcc
= CLG_(get_jcc
)(from
, jmp
, to
);
216 CLG_ASSERT(jcc
!= 0);
218 pdepth
= CLG_(get_fn_entry
)(to_fn
->number
);
219 if (CLG_(clo
).skip_direct_recursion
) {
220 /* only increment depth if another function is called */
221 if (jcc
->from
->cxt
->fn
[0] != to_fn
) (*pdepth
)++;
226 CLG_(stat
).rec_call_counter
++;
229 CLG_(stat
).call_counter
++;
231 if (*pdepth
== 1) function_entered(to_fn
);
234 /* return address is only is useful with a real call;
235 * used to detect RET w/o CALL */
236 if (from
->bb
->jmp
[jmp
].jmpkind
== jk_Call
) {
237 UInt instr
= from
->bb
->jmp
[jmp
].instr
;
238 ret_addr
= bb_addr(from
->bb
) +
239 from
->bb
->instr
[instr
].instr_offset
+
240 from
->bb
->instr
[instr
].instr_size
;
245 /* put jcc on call stack */
246 current_entry
->jcc
= jcc
;
247 current_entry
->sp
= sp
;
248 current_entry
->ret_addr
= ret_addr
;
249 current_entry
->nonskipped
= CLG_(current_state
).nonskipped
;
251 CLG_(current_call_stack
).sp
++;
253 /* To allow for above assertion we set context of next frame to 0 */
254 CLG_ASSERT(CLG_(current_call_stack
).sp
< CLG_(current_call_stack
).size
);
256 current_entry
->cxt
= 0;
259 CLG_(current_state
).nonskipped
= 0;
260 else if (!CLG_(current_state
).nonskipped
) {
261 /* a call from nonskipped to skipped */
262 CLG_(current_state
).nonskipped
= from
;
263 if (!CLG_(current_state
).nonskipped
->skipped
) {
264 CLG_(init_cost_lz
)( CLG_(sets
).full
,
265 &CLG_(current_state
).nonskipped
->skipped
);
266 CLG_(stat
).distinct_skips
++;
272 if (CLG_(clo
).verbose
<2) {
273 if (jcc
&& jcc
->to
&& jcc
->to
->bb
) {
274 const HChar spaces
[][41] = {
275 " . . . . . . . . . .",
276 " . . . . . . . . . . ",
277 " . . . . . . . . . . ",
278 ". . . . . . . . . . " };
280 int s
= CLG_(current_call_stack
).sp
;
281 UInt
* pars
= (UInt
*) sp
;
283 BB
* bb
= jcc
->to
->bb
;
285 VG_(printf
)("%s> %s(0x%x, 0x%x, ...) [%s / %#lx]\n", spaces
[s
%4]+40-s
, bb
->fn
->name
,
288 bb
->obj
->name
+ bb
->obj
->last_slash_pos
,
292 else if (CLG_(clo
).verbose
<4) {
293 VG_(printf
)("+ %2d ", CLG_(current_call_stack
).sp
);
294 CLG_(print_short_jcc
)(jcc
);
295 VG_(printf
)(", SP %#lx, RA %#lx\n", sp
, ret_addr
);
298 VG_(printf
)(" Pushed ");
299 CLG_(print_stackentry
)(3, CLG_(current_call_stack
).sp
-1);
307 /* Pop call stack and update inclusive sums.
308 * Returns modified fcc.
310 * If the JCC becomes inactive, call entries are freed if possible
312 void CLG_(pop_call_stack
)(void)
316 call_entry
* lower_entry
;
318 if (CLG_(current_state
).sig
>0) {
319 /* Check if we leave a signal handler; this can happen when
320 * calling longjmp() in the handler */
321 CLG_(run_post_signal_on_call_stack_bottom
)();
325 &(CLG_(current_call_stack
).entry
[CLG_(current_call_stack
).sp
-1]);
327 CLG_DEBUG(4,"+ pop_call_stack: frame %d, jcc %p\n",
328 CLG_(current_call_stack
).sp
, lower_entry
->jcc
);
330 /* jCC item not any more on real stack: pop */
331 jcc
= lower_entry
->jcc
;
332 CLG_(current_state
).nonskipped
= lower_entry
->nonskipped
;
335 fn_node
* to_fn
= jcc
->to
->cxt
->fn
[0];
336 UInt
* pdepth
= CLG_(get_fn_entry
)(to_fn
->number
);
337 if (CLG_(clo
).skip_direct_recursion
) {
338 /* only decrement depth if another function was called */
339 if (jcc
->from
->cxt
->fn
[0] != to_fn
) (*pdepth
)--;
344 /* add cost difference to sum */
345 if ( CLG_(add_diff_cost_lz
)( CLG_(sets
).full
, &(jcc
->cost
),
346 lower_entry
->enter_cost
,
347 CLG_(current_state
).cost
) ) {
349 /* only count this call if it attributed some cost.
350 * the ret_counter is used to check if a BBCC dump is needed.
352 jcc
->from
->ret_counter
++;
354 CLG_(stat
).ret_counter
++;
356 /* restore context */
357 CLG_(current_state
).cxt
= lower_entry
->cxt
;
358 CLG_(current_fn_stack
).top
=
359 CLG_(current_fn_stack
).bottom
+ lower_entry
->fn_sp
;
360 CLG_ASSERT(CLG_(current_state
).cxt
!= 0);
362 if (depth
== 0) function_left(to_fn
);
365 /* To allow for an assertion in push_call_stack() */
366 lower_entry
->cxt
= 0;
368 CLG_(current_call_stack
).sp
--;
372 if (CLG_(clo
).verbose
<4) {
374 /* popped JCC target first */
375 VG_(printf
)("- %2d %#lx => ",
376 CLG_(current_call_stack
).sp
,
377 bb_addr(jcc
->to
->bb
));
378 CLG_(print_addr
)(bb_jmpaddr(jcc
->from
->bb
));
379 VG_(printf
)(", SP %#lx\n",
380 CLG_(current_call_stack
).entry
[CLG_(current_call_stack
).sp
].sp
);
381 CLG_(print_cost
)(10, CLG_(sets
).full
, jcc
->cost
);
384 VG_(printf
)("- %2d [Skipped JCC], SP %#lx\n",
385 CLG_(current_call_stack
).sp
,
386 CLG_(current_call_stack
).entry
[CLG_(current_call_stack
).sp
].sp
);
389 VG_(printf
)(" Popped ");
390 CLG_(print_stackentry
)(7, CLG_(current_call_stack
).sp
);
392 VG_(printf
)(" returned to ");
393 CLG_(print_addr_ln
)(bb_jmpaddr(jcc
->from
->bb
));
402 /* Unwind enough CallStack items to sync with current stack pointer.
403 * Returns the number of stack frames unwinded.
405 Int
CLG_(unwind_call_stack
)(Addr sp
, Int minpops
)
408 Int unwind_count
= 0;
409 CLG_DEBUG(4,"+ unwind_call_stack(sp %#lx, minpops %d): frame %d\n",
410 sp
, minpops
, CLG_(current_call_stack
).sp
);
412 /* We pop old stack frames.
413 * For a call, be p the stack address with return address.
414 * - call_stack_esp[] has SP after the CALL: p-4
415 * - current sp is after a RET: >= p
418 while( (csp
=CLG_(current_call_stack
).sp
) >0) {
419 call_entry
* top_ce
= &(CLG_(current_call_stack
).entry
[csp
-1]);
421 if ((top_ce
->sp
< sp
) ||
422 ((top_ce
->sp
== sp
) && minpops
>0)) {
426 CLG_(pop_call_stack
)();
427 csp
=CLG_(current_call_stack
).sp
;
433 CLG_DEBUG(4,"- unwind_call_stack\n");