Avoid beyond bounds copy while caching ACL
[zen-stable.git] / include / asm-generic / percpu.h
blobd17784ea37ff34e2a8c167736bdd3b13b5a34d2f
1 #ifndef _ASM_GENERIC_PERCPU_H_
2 #define _ASM_GENERIC_PERCPU_H_
4 #include <linux/compiler.h>
5 #include <linux/threads.h>
6 #include <linux/percpu-defs.h>
8 #ifdef CONFIG_SMP
11 * per_cpu_offset() is the offset that has to be added to a
12 * percpu variable to get to the instance for a certain processor.
14 * Most arches use the __per_cpu_offset array for those offsets but
15 * some arches have their own ways of determining the offset (x86_64, s390).
17 #ifndef __per_cpu_offset
18 extern unsigned long __per_cpu_offset[NR_CPUS];
20 #define per_cpu_offset(x) (__per_cpu_offset[x])
21 #endif
24 * Determine the offset for the currently active processor.
25 * An arch may define __my_cpu_offset to provide a more effective
26 * means of obtaining the offset to the per cpu variables of the
27 * current processor.
29 #ifndef __my_cpu_offset
30 #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
31 #endif
32 #ifdef CONFIG_DEBUG_PREEMPT
33 #define my_cpu_offset per_cpu_offset(smp_processor_id())
34 #else
35 #define my_cpu_offset __my_cpu_offset
36 #endif
39 * Add a offset to a pointer but keep the pointer as is.
41 * Only S390 provides its own means of moving the pointer.
43 #ifndef SHIFT_PERCPU_PTR
44 /* Weird cast keeps both GCC and sparse happy. */
45 #define SHIFT_PERCPU_PTR(__p, __offset) ({ \
46 __verify_pcpu_ptr((__p)); \
47 RELOC_HIDE((typeof(*(__p)) __kernel __force *)(__p), (__offset)); \
49 #endif
52 * A percpu variable may point to a discarded regions. The following are
53 * established ways to produce a usable pointer from the percpu variable
54 * offset.
56 #define per_cpu(var, cpu) \
57 (*SHIFT_PERCPU_PTR(&(var), per_cpu_offset(cpu)))
59 #ifndef __this_cpu_ptr
60 #define __this_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
61 #endif
62 #ifdef CONFIG_DEBUG_PREEMPT
63 #define this_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, my_cpu_offset)
64 #else
65 #define this_cpu_ptr(ptr) __this_cpu_ptr(ptr)
66 #endif
68 #define __get_cpu_var(var) (*this_cpu_ptr(&(var)))
69 #define __raw_get_cpu_var(var) (*__this_cpu_ptr(&(var)))
71 #ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
72 extern void setup_per_cpu_areas(void);
73 #endif
75 #else /* ! SMP */
77 #define VERIFY_PERCPU_PTR(__p) ({ \
78 __verify_pcpu_ptr((__p)); \
79 (typeof(*(__p)) __kernel __force *)(__p); \
82 #define per_cpu(var, cpu) (*((void)(cpu), VERIFY_PERCPU_PTR(&(var))))
83 #define __get_cpu_var(var) (*VERIFY_PERCPU_PTR(&(var)))
84 #define __raw_get_cpu_var(var) (*VERIFY_PERCPU_PTR(&(var)))
85 #define this_cpu_ptr(ptr) per_cpu_ptr(ptr, 0)
86 #define __this_cpu_ptr(ptr) this_cpu_ptr(ptr)
88 #endif /* SMP */
90 #ifndef PER_CPU_BASE_SECTION
91 #ifdef CONFIG_SMP
92 #define PER_CPU_BASE_SECTION ".data..percpu"
93 #else
94 #define PER_CPU_BASE_SECTION ".data"
95 #endif
96 #endif
98 #ifdef CONFIG_SMP
100 #ifdef MODULE
101 #define PER_CPU_SHARED_ALIGNED_SECTION ""
102 #define PER_CPU_ALIGNED_SECTION ""
103 #else
104 #define PER_CPU_SHARED_ALIGNED_SECTION "..shared_aligned"
105 #define PER_CPU_ALIGNED_SECTION "..shared_aligned"
106 #endif
107 #define PER_CPU_FIRST_SECTION "..first"
109 #else
111 #define PER_CPU_SHARED_ALIGNED_SECTION ""
112 #define PER_CPU_ALIGNED_SECTION "..shared_aligned"
113 #define PER_CPU_FIRST_SECTION ""
115 #endif
117 #ifndef PER_CPU_ATTRIBUTES
118 #define PER_CPU_ATTRIBUTES
119 #endif
121 #ifndef PER_CPU_DEF_ATTRIBUTES
122 #define PER_CPU_DEF_ATTRIBUTES
123 #endif
125 #endif /* _ASM_GENERIC_PERCPU_H_ */