spi-topcliff-pch: add recovery processing in case wait-event timeout
[zen-stable.git] / arch / powerpc / include / asm / ftrace.h
blob169d039ed402080720aef97c33e20952d28dad12
1 #ifndef _ASM_POWERPC_FTRACE
2 #define _ASM_POWERPC_FTRACE
4 #ifdef CONFIG_FUNCTION_TRACER
5 #define MCOUNT_ADDR ((long)(_mcount))
6 #define MCOUNT_INSN_SIZE 4 /* sizeof mcount call */
8 #ifdef __ASSEMBLY__
10 /* Based off of objdump optput from glibc */
12 #define MCOUNT_SAVE_FRAME \
13 stwu r1,-48(r1); \
14 stw r3, 12(r1); \
15 stw r4, 16(r1); \
16 stw r5, 20(r1); \
17 stw r6, 24(r1); \
18 mflr r3; \
19 lwz r4, 52(r1); \
20 mfcr r5; \
21 stw r7, 28(r1); \
22 stw r8, 32(r1); \
23 stw r9, 36(r1); \
24 stw r10,40(r1); \
25 stw r3, 44(r1); \
26 stw r5, 8(r1)
28 #define MCOUNT_RESTORE_FRAME \
29 lwz r6, 8(r1); \
30 lwz r0, 44(r1); \
31 lwz r3, 12(r1); \
32 mtctr r0; \
33 lwz r4, 16(r1); \
34 mtcr r6; \
35 lwz r5, 20(r1); \
36 lwz r6, 24(r1); \
37 lwz r0, 52(r1); \
38 lwz r7, 28(r1); \
39 lwz r8, 32(r1); \
40 mtlr r0; \
41 lwz r9, 36(r1); \
42 lwz r10,40(r1); \
43 addi r1, r1, 48
45 #else /* !__ASSEMBLY__ */
46 extern void _mcount(void);
48 #ifdef CONFIG_DYNAMIC_FTRACE
49 static inline unsigned long ftrace_call_adjust(unsigned long addr)
51 /* reloction of mcount call site is the same as the address */
52 return addr;
55 struct dyn_arch_ftrace {
56 struct module *mod;
58 #endif /* CONFIG_DYNAMIC_FTRACE */
59 #endif /* __ASSEMBLY__ */
61 #endif
63 #if defined(CONFIG_FTRACE_SYSCALLS) && defined(CONFIG_PPC64) && !defined(__ASSEMBLY__)
64 #define ARCH_HAS_SYSCALL_MATCH_SYM_NAME
65 static inline bool arch_syscall_match_sym_name(const char *sym, const char *name)
68 * Compare the symbol name with the system call name. Skip the .sys or .SyS
69 * prefix from the symbol name and the sys prefix from the system call name and
70 * just match the rest. This is only needed on ppc64 since symbol names on
71 * 32bit do not start with a period so the generic function will work.
73 return !strcmp(sym + 4, name + 3);
75 #endif /* CONFIG_FTRACE_SYSCALLS && CONFIG_PPC64 && !__ASSEMBLY__ */
77 #endif /* _ASM_POWERPC_FTRACE */