4 * Copyright (C) 2008, 2009 Paul Mundt
5 * Copyright (C) 2008, 2009 Matt Fleming
7 * This file is subject to the terms and conditions of the GNU General Public
8 * License. See the file "COPYING" in the main directory of this archive
11 #include <asm/ftrace.h>
12 #include <asm/thread_info.h>
13 #include <asm/asm-offsets.h>
15 #define MCOUNT_ENTER() \
25 #define MCOUNT_LEAVE() \
33 #ifdef CONFIG_STACK_DEBUG
35 * Perform diagnostic checks on the state of the kernel stack.
37 * Check for stack overflow. If there is less than 1KB free
38 * then it has overflowed.
40 * Make sure the stack pointer contains a valid address. Valid
41 * addresses for kernel stacks are anywhere after the bss
42 * (after __bss_stop) and anywhere in init_thread_union (init_stack).
44 #define STACK_CHECK() \
45 mov #(THREAD_SIZE >> 10), r0; \
49 /* r1 = sp & (THREAD_SIZE - 1) */ \
55 mov #(STACK_WARN >> 8), r2; \
59 /* Is the stack overflowing? */ \
63 /* If sp > __bss_stop then we're OK. */ \
68 /* If sp < init_stack, we're not OK. */ \
69 mov.l .L_init_thread_union, r1; \
73 /* If sp > init_stack && sp < __bss_stop, not OK. */ \
80 #endif /* CONFIG_STACK_DEBUG */
84 .type _mcount,@function
86 .type mcount,@function
91 #ifndef CONFIG_FUNCTION_TRACER
95 #ifndef CONFIG_DYNAMIC_FTRACE
96 mov.l .Lfunction_trace_stop, r0
104 #ifdef CONFIG_DYNAMIC_FTRACE
107 mov.l .Lftrace_stub, r6
109 mov.l .Lftrace_trace_function, r6
110 mov.l ftrace_stub, r7
119 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
120 mov.l .Lftrace_graph_return, r6
121 mov.l .Lftrace_stub, r7
125 mov.l .Lftrace_graph_caller, r0
130 mov.l .Lftrace_graph_entry, r6
131 mov.l .Lftrace_graph_entry_stub, r7
135 mov.l .Lftrace_graph_caller, r0
140 .Lftrace_graph_return:
141 .long ftrace_graph_return
142 .Lftrace_graph_entry:
143 .long ftrace_graph_entry
144 .Lftrace_graph_entry_stub:
145 .long ftrace_graph_entry_stub
146 .Lftrace_graph_caller:
147 .long ftrace_graph_caller
148 #endif /* CONFIG_FUNCTION_GRAPH_TRACER */
155 .Lftrace_trace_function:
156 .long ftrace_trace_function
158 #ifdef CONFIG_DYNAMIC_FTRACE
159 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
161 * NOTE: Do not move either ftrace_graph_call or ftrace_caller
162 * as this will affect the calculation of GRAPH_INSN_OFFSET.
164 .globl ftrace_graph_call
166 mov.l .Lskip_trace, r0
173 #endif /* CONFIG_FUNCTION_GRAPH_TRACER */
177 mov.l .Lfunction_trace_stop, r0
186 mov.l .Lftrace_stub, r6
190 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
191 bra ftrace_graph_call
195 #endif /* CONFIG_FUNCTION_GRAPH_TRACER */
196 #endif /* CONFIG_DYNAMIC_FTRACE */
199 .Lfunction_trace_stop:
200 .long function_trace_stop
203 * NOTE: From here on the locations of the .Lftrace_stub label and
204 * ftrace_stub itself are fixed. Adding additional data here will skew
205 * the displacement for the memory table and break the block replacement.
206 * Place new labels either after the ftrace_stub body, or before
207 * ftrace_caller. You have been warned.
217 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
218 .globl ftrace_graph_caller
230 * MCOUNT_ENTER() pushed 5 registers onto the stack, so
231 * the stack address containing our return address is
238 mov.l .Lprepare_ftrace_return, r0
245 2: .long function_trace_stop
247 .Lprepare_ftrace_return:
248 .long prepare_ftrace_return
250 .globl return_to_handler
253 * Save the return values.
260 mov.l .Lftrace_return_to_handler, r0
265 * The return value from ftrace_return_handler has the real
266 * address that we should return to.
275 .Lftrace_return_to_handler:
276 .long ftrace_return_to_handler
277 #endif /* CONFIG_FUNCTION_GRAPH_TRACER */
278 #endif /* CONFIG_FUNCTION_TRACER */
280 #ifdef CONFIG_STACK_DEBUG
283 mov.l .Ldump_stack, r0
295 .L_init_thread_union:
296 .long init_thread_union
309 .string "Stack error"
310 #endif /* CONFIG_STACK_DEBUG */