1 #ifndef _ASM_GENERIC_PERCPU_H_
2 #define _ASM_GENERIC_PERCPU_H_
4 #include <linux/compiler.h>
5 #include <linux/threads.h>
6 #include <linux/percpu-defs.h>
11 * per_cpu_offset() is the offset that has to be added to a
12 * percpu variable to get to the instance for a certain processor.
14 * Most arches use the __per_cpu_offset array for those offsets but
15 * some arches have their own ways of determining the offset (x86_64, s390).
17 #ifndef __per_cpu_offset
18 extern unsigned long __per_cpu_offset
[NR_CPUS
];
20 #define per_cpu_offset(x) (__per_cpu_offset[x])
24 * Determine the offset for the currently active processor.
25 * An arch may define __my_cpu_offset to provide a more effective
26 * means of obtaining the offset to the per cpu variables of the
29 #ifndef __my_cpu_offset
30 #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
32 #ifdef CONFIG_DEBUG_PREEMPT
33 #define my_cpu_offset per_cpu_offset(smp_processor_id())
35 #define my_cpu_offset __my_cpu_offset
39 * Arch may define arch_raw_cpu_ptr() to provide more efficient address
40 * translations for raw_cpu_ptr().
42 #ifndef arch_raw_cpu_ptr
43 #define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
46 #ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
47 extern void setup_per_cpu_areas(void);
52 #ifndef PER_CPU_BASE_SECTION
54 #define PER_CPU_BASE_SECTION ".data..percpu"
56 #define PER_CPU_BASE_SECTION ".data"
60 #ifndef PER_CPU_ATTRIBUTES
61 #define PER_CPU_ATTRIBUTES
64 #ifndef PER_CPU_DEF_ATTRIBUTES
65 #define PER_CPU_DEF_ATTRIBUTES
68 #define raw_cpu_generic_read(pcp) \
70 *raw_cpu_ptr(&(pcp)); \
73 #define raw_cpu_generic_to_op(pcp, val, op) \
75 *raw_cpu_ptr(&(pcp)) op val; \
78 #define raw_cpu_generic_add_return(pcp, val) \
80 typeof(&(pcp)) __p = raw_cpu_ptr(&(pcp)); \
86 #define raw_cpu_generic_xchg(pcp, nval) \
88 typeof(&(pcp)) __p = raw_cpu_ptr(&(pcp)); \
95 #define raw_cpu_generic_cmpxchg(pcp, oval, nval) \
97 typeof(&(pcp)) __p = raw_cpu_ptr(&(pcp)); \
100 if (__ret == (oval)) \
105 #define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
107 typeof(&(pcp1)) __p1 = raw_cpu_ptr(&(pcp1)); \
108 typeof(&(pcp2)) __p2 = raw_cpu_ptr(&(pcp2)); \
110 if (*__p1 == (oval1) && *__p2 == (oval2)) { \
118 #define this_cpu_generic_read(pcp) \
121 preempt_disable_notrace(); \
122 __ret = raw_cpu_generic_read(pcp); \
123 preempt_enable_notrace(); \
127 #define this_cpu_generic_to_op(pcp, val, op) \
129 unsigned long __flags; \
130 raw_local_irq_save(__flags); \
131 raw_cpu_generic_to_op(pcp, val, op); \
132 raw_local_irq_restore(__flags); \
136 #define this_cpu_generic_add_return(pcp, val) \
139 unsigned long __flags; \
140 raw_local_irq_save(__flags); \
141 __ret = raw_cpu_generic_add_return(pcp, val); \
142 raw_local_irq_restore(__flags); \
146 #define this_cpu_generic_xchg(pcp, nval) \
149 unsigned long __flags; \
150 raw_local_irq_save(__flags); \
151 __ret = raw_cpu_generic_xchg(pcp, nval); \
152 raw_local_irq_restore(__flags); \
156 #define this_cpu_generic_cmpxchg(pcp, oval, nval) \
159 unsigned long __flags; \
160 raw_local_irq_save(__flags); \
161 __ret = raw_cpu_generic_cmpxchg(pcp, oval, nval); \
162 raw_local_irq_restore(__flags); \
166 #define this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
169 unsigned long __flags; \
170 raw_local_irq_save(__flags); \
171 __ret = raw_cpu_generic_cmpxchg_double(pcp1, pcp2, \
172 oval1, oval2, nval1, nval2); \
173 raw_local_irq_restore(__flags); \
177 #ifndef raw_cpu_read_1
178 #define raw_cpu_read_1(pcp) raw_cpu_generic_read(pcp)
180 #ifndef raw_cpu_read_2
181 #define raw_cpu_read_2(pcp) raw_cpu_generic_read(pcp)
183 #ifndef raw_cpu_read_4
184 #define raw_cpu_read_4(pcp) raw_cpu_generic_read(pcp)
186 #ifndef raw_cpu_read_8
187 #define raw_cpu_read_8(pcp) raw_cpu_generic_read(pcp)
190 #ifndef raw_cpu_write_1
191 #define raw_cpu_write_1(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
193 #ifndef raw_cpu_write_2
194 #define raw_cpu_write_2(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
196 #ifndef raw_cpu_write_4
197 #define raw_cpu_write_4(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
199 #ifndef raw_cpu_write_8
200 #define raw_cpu_write_8(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
203 #ifndef raw_cpu_add_1
204 #define raw_cpu_add_1(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
206 #ifndef raw_cpu_add_2
207 #define raw_cpu_add_2(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
209 #ifndef raw_cpu_add_4
210 #define raw_cpu_add_4(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
212 #ifndef raw_cpu_add_8
213 #define raw_cpu_add_8(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
216 #ifndef raw_cpu_and_1
217 #define raw_cpu_and_1(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
219 #ifndef raw_cpu_and_2
220 #define raw_cpu_and_2(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
222 #ifndef raw_cpu_and_4
223 #define raw_cpu_and_4(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
225 #ifndef raw_cpu_and_8
226 #define raw_cpu_and_8(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
230 #define raw_cpu_or_1(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
233 #define raw_cpu_or_2(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
236 #define raw_cpu_or_4(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
239 #define raw_cpu_or_8(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
242 #ifndef raw_cpu_add_return_1
243 #define raw_cpu_add_return_1(pcp, val) raw_cpu_generic_add_return(pcp, val)
245 #ifndef raw_cpu_add_return_2
246 #define raw_cpu_add_return_2(pcp, val) raw_cpu_generic_add_return(pcp, val)
248 #ifndef raw_cpu_add_return_4
249 #define raw_cpu_add_return_4(pcp, val) raw_cpu_generic_add_return(pcp, val)
251 #ifndef raw_cpu_add_return_8
252 #define raw_cpu_add_return_8(pcp, val) raw_cpu_generic_add_return(pcp, val)
255 #ifndef raw_cpu_xchg_1
256 #define raw_cpu_xchg_1(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
258 #ifndef raw_cpu_xchg_2
259 #define raw_cpu_xchg_2(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
261 #ifndef raw_cpu_xchg_4
262 #define raw_cpu_xchg_4(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
264 #ifndef raw_cpu_xchg_8
265 #define raw_cpu_xchg_8(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
268 #ifndef raw_cpu_cmpxchg_1
269 #define raw_cpu_cmpxchg_1(pcp, oval, nval) \
270 raw_cpu_generic_cmpxchg(pcp, oval, nval)
272 #ifndef raw_cpu_cmpxchg_2
273 #define raw_cpu_cmpxchg_2(pcp, oval, nval) \
274 raw_cpu_generic_cmpxchg(pcp, oval, nval)
276 #ifndef raw_cpu_cmpxchg_4
277 #define raw_cpu_cmpxchg_4(pcp, oval, nval) \
278 raw_cpu_generic_cmpxchg(pcp, oval, nval)
280 #ifndef raw_cpu_cmpxchg_8
281 #define raw_cpu_cmpxchg_8(pcp, oval, nval) \
282 raw_cpu_generic_cmpxchg(pcp, oval, nval)
285 #ifndef raw_cpu_cmpxchg_double_1
286 #define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
287 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
289 #ifndef raw_cpu_cmpxchg_double_2
290 #define raw_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
291 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
293 #ifndef raw_cpu_cmpxchg_double_4
294 #define raw_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
295 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
297 #ifndef raw_cpu_cmpxchg_double_8
298 #define raw_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
299 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
302 #ifndef this_cpu_read_1
303 #define this_cpu_read_1(pcp) this_cpu_generic_read(pcp)
305 #ifndef this_cpu_read_2
306 #define this_cpu_read_2(pcp) this_cpu_generic_read(pcp)
308 #ifndef this_cpu_read_4
309 #define this_cpu_read_4(pcp) this_cpu_generic_read(pcp)
311 #ifndef this_cpu_read_8
312 #define this_cpu_read_8(pcp) this_cpu_generic_read(pcp)
315 #ifndef this_cpu_write_1
316 #define this_cpu_write_1(pcp, val) this_cpu_generic_to_op(pcp, val, =)
318 #ifndef this_cpu_write_2
319 #define this_cpu_write_2(pcp, val) this_cpu_generic_to_op(pcp, val, =)
321 #ifndef this_cpu_write_4
322 #define this_cpu_write_4(pcp, val) this_cpu_generic_to_op(pcp, val, =)
324 #ifndef this_cpu_write_8
325 #define this_cpu_write_8(pcp, val) this_cpu_generic_to_op(pcp, val, =)
328 #ifndef this_cpu_add_1
329 #define this_cpu_add_1(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
331 #ifndef this_cpu_add_2
332 #define this_cpu_add_2(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
334 #ifndef this_cpu_add_4
335 #define this_cpu_add_4(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
337 #ifndef this_cpu_add_8
338 #define this_cpu_add_8(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
341 #ifndef this_cpu_and_1
342 #define this_cpu_and_1(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
344 #ifndef this_cpu_and_2
345 #define this_cpu_and_2(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
347 #ifndef this_cpu_and_4
348 #define this_cpu_and_4(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
350 #ifndef this_cpu_and_8
351 #define this_cpu_and_8(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
354 #ifndef this_cpu_or_1
355 #define this_cpu_or_1(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
357 #ifndef this_cpu_or_2
358 #define this_cpu_or_2(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
360 #ifndef this_cpu_or_4
361 #define this_cpu_or_4(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
363 #ifndef this_cpu_or_8
364 #define this_cpu_or_8(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
367 #ifndef this_cpu_add_return_1
368 #define this_cpu_add_return_1(pcp, val) this_cpu_generic_add_return(pcp, val)
370 #ifndef this_cpu_add_return_2
371 #define this_cpu_add_return_2(pcp, val) this_cpu_generic_add_return(pcp, val)
373 #ifndef this_cpu_add_return_4
374 #define this_cpu_add_return_4(pcp, val) this_cpu_generic_add_return(pcp, val)
376 #ifndef this_cpu_add_return_8
377 #define this_cpu_add_return_8(pcp, val) this_cpu_generic_add_return(pcp, val)
380 #ifndef this_cpu_xchg_1
381 #define this_cpu_xchg_1(pcp, nval) this_cpu_generic_xchg(pcp, nval)
383 #ifndef this_cpu_xchg_2
384 #define this_cpu_xchg_2(pcp, nval) this_cpu_generic_xchg(pcp, nval)
386 #ifndef this_cpu_xchg_4
387 #define this_cpu_xchg_4(pcp, nval) this_cpu_generic_xchg(pcp, nval)
389 #ifndef this_cpu_xchg_8
390 #define this_cpu_xchg_8(pcp, nval) this_cpu_generic_xchg(pcp, nval)
393 #ifndef this_cpu_cmpxchg_1
394 #define this_cpu_cmpxchg_1(pcp, oval, nval) \
395 this_cpu_generic_cmpxchg(pcp, oval, nval)
397 #ifndef this_cpu_cmpxchg_2
398 #define this_cpu_cmpxchg_2(pcp, oval, nval) \
399 this_cpu_generic_cmpxchg(pcp, oval, nval)
401 #ifndef this_cpu_cmpxchg_4
402 #define this_cpu_cmpxchg_4(pcp, oval, nval) \
403 this_cpu_generic_cmpxchg(pcp, oval, nval)
405 #ifndef this_cpu_cmpxchg_8
406 #define this_cpu_cmpxchg_8(pcp, oval, nval) \
407 this_cpu_generic_cmpxchg(pcp, oval, nval)
410 #ifndef this_cpu_cmpxchg_double_1
411 #define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
412 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
414 #ifndef this_cpu_cmpxchg_double_2
415 #define this_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
416 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
418 #ifndef this_cpu_cmpxchg_double_4
419 #define this_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
420 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
422 #ifndef this_cpu_cmpxchg_double_8
423 #define this_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
424 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
427 #endif /* _ASM_GENERIC_PERCPU_H_ */