1 #ifndef _ASM_GENERIC_PERCPU_H_
2 #define _ASM_GENERIC_PERCPU_H_
4 #include <linux/compiler.h>
5 #include <linux/threads.h>
6 #include <linux/percpu-defs.h>
11 * per_cpu_offset() is the offset that has to be added to a
12 * percpu variable to get to the instance for a certain processor.
14 * Most arches use the __per_cpu_offset array for those offsets but
15 * some arches have their own ways of determining the offset (x86_64, s390).
17 #ifndef __per_cpu_offset
18 extern unsigned long __per_cpu_offset
[NR_CPUS
];
20 #define per_cpu_offset(x) (__per_cpu_offset[x])
24 * Determine the offset for the currently active processor.
25 * An arch may define __my_cpu_offset to provide a more effective
26 * means of obtaining the offset to the per cpu variables of the
29 #ifndef __my_cpu_offset
30 #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
32 #ifdef CONFIG_DEBUG_PREEMPT
33 #define my_cpu_offset per_cpu_offset(smp_processor_id())
35 #define my_cpu_offset __my_cpu_offset
39 * Arch may define arch_raw_cpu_ptr() to provide more efficient address
40 * translations for raw_cpu_ptr().
42 #ifndef arch_raw_cpu_ptr
43 #define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
46 #ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
47 extern void setup_per_cpu_areas(void);
52 #ifndef PER_CPU_BASE_SECTION
54 #define PER_CPU_BASE_SECTION ".data..percpu"
56 #define PER_CPU_BASE_SECTION ".data"
60 #ifndef PER_CPU_ATTRIBUTES
61 #define PER_CPU_ATTRIBUTES
64 #ifndef PER_CPU_DEF_ATTRIBUTES
65 #define PER_CPU_DEF_ATTRIBUTES
68 #define raw_cpu_generic_to_op(pcp, val, op) \
70 *raw_cpu_ptr(&(pcp)) op val; \
73 #define raw_cpu_generic_add_return(pcp, val) \
75 raw_cpu_add(pcp, val); \
79 #define raw_cpu_generic_xchg(pcp, nval) \
82 __ret = raw_cpu_read(pcp); \
83 raw_cpu_write(pcp, nval); \
87 #define raw_cpu_generic_cmpxchg(pcp, oval, nval) \
90 __ret = raw_cpu_read(pcp); \
91 if (__ret == (oval)) \
92 raw_cpu_write(pcp, nval); \
96 #define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
99 if (raw_cpu_read(pcp1) == (oval1) && \
100 raw_cpu_read(pcp2) == (oval2)) { \
101 raw_cpu_write(pcp1, nval1); \
102 raw_cpu_write(pcp2, nval2); \
108 #define this_cpu_generic_read(pcp) \
112 __ret = *this_cpu_ptr(&(pcp)); \
117 #define this_cpu_generic_to_op(pcp, val, op) \
119 unsigned long __flags; \
120 raw_local_irq_save(__flags); \
121 *raw_cpu_ptr(&(pcp)) op val; \
122 raw_local_irq_restore(__flags); \
125 #define this_cpu_generic_add_return(pcp, val) \
128 unsigned long __flags; \
129 raw_local_irq_save(__flags); \
130 raw_cpu_add(pcp, val); \
131 __ret = raw_cpu_read(pcp); \
132 raw_local_irq_restore(__flags); \
136 #define this_cpu_generic_xchg(pcp, nval) \
139 unsigned long __flags; \
140 raw_local_irq_save(__flags); \
141 __ret = raw_cpu_read(pcp); \
142 raw_cpu_write(pcp, nval); \
143 raw_local_irq_restore(__flags); \
147 #define this_cpu_generic_cmpxchg(pcp, oval, nval) \
150 unsigned long __flags; \
151 raw_local_irq_save(__flags); \
152 __ret = raw_cpu_read(pcp); \
153 if (__ret == (oval)) \
154 raw_cpu_write(pcp, nval); \
155 raw_local_irq_restore(__flags); \
159 #define this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
162 unsigned long __flags; \
163 raw_local_irq_save(__flags); \
164 __ret = raw_cpu_generic_cmpxchg_double(pcp1, pcp2, \
165 oval1, oval2, nval1, nval2); \
166 raw_local_irq_restore(__flags); \
170 #ifndef raw_cpu_read_1
171 #define raw_cpu_read_1(pcp) (*raw_cpu_ptr(&(pcp)))
173 #ifndef raw_cpu_read_2
174 #define raw_cpu_read_2(pcp) (*raw_cpu_ptr(&(pcp)))
176 #ifndef raw_cpu_read_4
177 #define raw_cpu_read_4(pcp) (*raw_cpu_ptr(&(pcp)))
179 #ifndef raw_cpu_read_8
180 #define raw_cpu_read_8(pcp) (*raw_cpu_ptr(&(pcp)))
183 #ifndef raw_cpu_write_1
184 #define raw_cpu_write_1(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
186 #ifndef raw_cpu_write_2
187 #define raw_cpu_write_2(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
189 #ifndef raw_cpu_write_4
190 #define raw_cpu_write_4(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
192 #ifndef raw_cpu_write_8
193 #define raw_cpu_write_8(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
196 #ifndef raw_cpu_add_1
197 #define raw_cpu_add_1(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
199 #ifndef raw_cpu_add_2
200 #define raw_cpu_add_2(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
202 #ifndef raw_cpu_add_4
203 #define raw_cpu_add_4(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
205 #ifndef raw_cpu_add_8
206 #define raw_cpu_add_8(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
209 #ifndef raw_cpu_and_1
210 #define raw_cpu_and_1(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
212 #ifndef raw_cpu_and_2
213 #define raw_cpu_and_2(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
215 #ifndef raw_cpu_and_4
216 #define raw_cpu_and_4(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
218 #ifndef raw_cpu_and_8
219 #define raw_cpu_and_8(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
223 #define raw_cpu_or_1(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
226 #define raw_cpu_or_2(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
229 #define raw_cpu_or_4(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
232 #define raw_cpu_or_8(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
235 #ifndef raw_cpu_add_return_1
236 #define raw_cpu_add_return_1(pcp, val) raw_cpu_generic_add_return(pcp, val)
238 #ifndef raw_cpu_add_return_2
239 #define raw_cpu_add_return_2(pcp, val) raw_cpu_generic_add_return(pcp, val)
241 #ifndef raw_cpu_add_return_4
242 #define raw_cpu_add_return_4(pcp, val) raw_cpu_generic_add_return(pcp, val)
244 #ifndef raw_cpu_add_return_8
245 #define raw_cpu_add_return_8(pcp, val) raw_cpu_generic_add_return(pcp, val)
248 #ifndef raw_cpu_xchg_1
249 #define raw_cpu_xchg_1(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
251 #ifndef raw_cpu_xchg_2
252 #define raw_cpu_xchg_2(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
254 #ifndef raw_cpu_xchg_4
255 #define raw_cpu_xchg_4(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
257 #ifndef raw_cpu_xchg_8
258 #define raw_cpu_xchg_8(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
261 #ifndef raw_cpu_cmpxchg_1
262 #define raw_cpu_cmpxchg_1(pcp, oval, nval) \
263 raw_cpu_generic_cmpxchg(pcp, oval, nval)
265 #ifndef raw_cpu_cmpxchg_2
266 #define raw_cpu_cmpxchg_2(pcp, oval, nval) \
267 raw_cpu_generic_cmpxchg(pcp, oval, nval)
269 #ifndef raw_cpu_cmpxchg_4
270 #define raw_cpu_cmpxchg_4(pcp, oval, nval) \
271 raw_cpu_generic_cmpxchg(pcp, oval, nval)
273 #ifndef raw_cpu_cmpxchg_8
274 #define raw_cpu_cmpxchg_8(pcp, oval, nval) \
275 raw_cpu_generic_cmpxchg(pcp, oval, nval)
278 #ifndef raw_cpu_cmpxchg_double_1
279 #define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
280 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
282 #ifndef raw_cpu_cmpxchg_double_2
283 #define raw_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
284 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
286 #ifndef raw_cpu_cmpxchg_double_4
287 #define raw_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
288 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
290 #ifndef raw_cpu_cmpxchg_double_8
291 #define raw_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
292 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
295 #ifndef this_cpu_read_1
296 #define this_cpu_read_1(pcp) this_cpu_generic_read(pcp)
298 #ifndef this_cpu_read_2
299 #define this_cpu_read_2(pcp) this_cpu_generic_read(pcp)
301 #ifndef this_cpu_read_4
302 #define this_cpu_read_4(pcp) this_cpu_generic_read(pcp)
304 #ifndef this_cpu_read_8
305 #define this_cpu_read_8(pcp) this_cpu_generic_read(pcp)
308 #ifndef this_cpu_write_1
309 #define this_cpu_write_1(pcp, val) this_cpu_generic_to_op(pcp, val, =)
311 #ifndef this_cpu_write_2
312 #define this_cpu_write_2(pcp, val) this_cpu_generic_to_op(pcp, val, =)
314 #ifndef this_cpu_write_4
315 #define this_cpu_write_4(pcp, val) this_cpu_generic_to_op(pcp, val, =)
317 #ifndef this_cpu_write_8
318 #define this_cpu_write_8(pcp, val) this_cpu_generic_to_op(pcp, val, =)
321 #ifndef this_cpu_add_1
322 #define this_cpu_add_1(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
324 #ifndef this_cpu_add_2
325 #define this_cpu_add_2(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
327 #ifndef this_cpu_add_4
328 #define this_cpu_add_4(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
330 #ifndef this_cpu_add_8
331 #define this_cpu_add_8(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
334 #ifndef this_cpu_and_1
335 #define this_cpu_and_1(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
337 #ifndef this_cpu_and_2
338 #define this_cpu_and_2(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
340 #ifndef this_cpu_and_4
341 #define this_cpu_and_4(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
343 #ifndef this_cpu_and_8
344 #define this_cpu_and_8(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
347 #ifndef this_cpu_or_1
348 #define this_cpu_or_1(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
350 #ifndef this_cpu_or_2
351 #define this_cpu_or_2(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
353 #ifndef this_cpu_or_4
354 #define this_cpu_or_4(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
356 #ifndef this_cpu_or_8
357 #define this_cpu_or_8(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
360 #ifndef this_cpu_add_return_1
361 #define this_cpu_add_return_1(pcp, val) this_cpu_generic_add_return(pcp, val)
363 #ifndef this_cpu_add_return_2
364 #define this_cpu_add_return_2(pcp, val) this_cpu_generic_add_return(pcp, val)
366 #ifndef this_cpu_add_return_4
367 #define this_cpu_add_return_4(pcp, val) this_cpu_generic_add_return(pcp, val)
369 #ifndef this_cpu_add_return_8
370 #define this_cpu_add_return_8(pcp, val) this_cpu_generic_add_return(pcp, val)
373 #ifndef this_cpu_xchg_1
374 #define this_cpu_xchg_1(pcp, nval) this_cpu_generic_xchg(pcp, nval)
376 #ifndef this_cpu_xchg_2
377 #define this_cpu_xchg_2(pcp, nval) this_cpu_generic_xchg(pcp, nval)
379 #ifndef this_cpu_xchg_4
380 #define this_cpu_xchg_4(pcp, nval) this_cpu_generic_xchg(pcp, nval)
382 #ifndef this_cpu_xchg_8
383 #define this_cpu_xchg_8(pcp, nval) this_cpu_generic_xchg(pcp, nval)
386 #ifndef this_cpu_cmpxchg_1
387 #define this_cpu_cmpxchg_1(pcp, oval, nval) \
388 this_cpu_generic_cmpxchg(pcp, oval, nval)
390 #ifndef this_cpu_cmpxchg_2
391 #define this_cpu_cmpxchg_2(pcp, oval, nval) \
392 this_cpu_generic_cmpxchg(pcp, oval, nval)
394 #ifndef this_cpu_cmpxchg_4
395 #define this_cpu_cmpxchg_4(pcp, oval, nval) \
396 this_cpu_generic_cmpxchg(pcp, oval, nval)
398 #ifndef this_cpu_cmpxchg_8
399 #define this_cpu_cmpxchg_8(pcp, oval, nval) \
400 this_cpu_generic_cmpxchg(pcp, oval, nval)
403 #ifndef this_cpu_cmpxchg_double_1
404 #define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
405 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
407 #ifndef this_cpu_cmpxchg_double_2
408 #define this_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
409 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
411 #ifndef this_cpu_cmpxchg_double_4
412 #define this_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
413 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
415 #ifndef this_cpu_cmpxchg_double_8
416 #define this_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
417 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
420 #endif /* _ASM_GENERIC_PERCPU_H_ */