treewide: remove redundant IS_ERR() before error code check
[linux/fpc-iii.git] / arch / sparc / kernel / kstack.h
blobb3c5e8f2443a65d9aa89852b09a2cc3e366d8b96
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _KSTACK_H
3 #define _KSTACK_H
5 #include <linux/thread_info.h>
6 #include <linux/sched.h>
7 #include <asm/ptrace.h>
8 #include <asm/irq.h>
10 /* SP must be STACK_BIAS adjusted already. */
11 static inline bool kstack_valid(struct thread_info *tp, unsigned long sp)
13 unsigned long base = (unsigned long) tp;
15 /* Stack pointer must be 16-byte aligned. */
16 if (sp & (16UL - 1))
17 return false;
19 if (sp >= (base + sizeof(struct thread_info)) &&
20 sp <= (base + THREAD_SIZE - sizeof(struct sparc_stackf)))
21 return true;
23 if (hardirq_stack[tp->cpu]) {
24 base = (unsigned long) hardirq_stack[tp->cpu];
25 if (sp >= base &&
26 sp <= (base + THREAD_SIZE - sizeof(struct sparc_stackf)))
27 return true;
28 base = (unsigned long) softirq_stack[tp->cpu];
29 if (sp >= base &&
30 sp <= (base + THREAD_SIZE - sizeof(struct sparc_stackf)))
31 return true;
33 return false;
36 /* Does "regs" point to a valid pt_regs trap frame? */
37 static inline bool kstack_is_trap_frame(struct thread_info *tp, struct pt_regs *regs)
39 unsigned long base = (unsigned long) tp;
40 unsigned long addr = (unsigned long) regs;
42 if (addr >= base &&
43 addr <= (base + THREAD_SIZE - sizeof(*regs)))
44 goto check_magic;
46 if (hardirq_stack[tp->cpu]) {
47 base = (unsigned long) hardirq_stack[tp->cpu];
48 if (addr >= base &&
49 addr <= (base + THREAD_SIZE - sizeof(*regs)))
50 goto check_magic;
51 base = (unsigned long) softirq_stack[tp->cpu];
52 if (addr >= base &&
53 addr <= (base + THREAD_SIZE - sizeof(*regs)))
54 goto check_magic;
56 return false;
58 check_magic:
59 if ((regs->magic & ~0x1ff) == PT_REGS_MAGIC)
60 return true;
61 return false;
65 static inline __attribute__((always_inline)) void *set_hardirq_stack(void)
67 void *orig_sp, *sp = hardirq_stack[smp_processor_id()];
69 __asm__ __volatile__("mov %%sp, %0" : "=r" (orig_sp));
70 if (orig_sp < sp ||
71 orig_sp > (sp + THREAD_SIZE)) {
72 sp += THREAD_SIZE - 192 - STACK_BIAS;
73 __asm__ __volatile__("mov %0, %%sp" : : "r" (sp));
76 return orig_sp;
79 static inline __attribute__((always_inline)) void restore_hardirq_stack(void *orig_sp)
81 __asm__ __volatile__("mov %0, %%sp" : : "r" (orig_sp));
84 #endif /* _KSTACK_H */