x86: introduce native_set_pte_atomic() on 64-bit too
[wrt350n-kernel.git] / include / asm-s390 / percpu.h
blob2d676a873858cf599fbc1b679e6ceccd6a112fc3
1 #ifndef __ARCH_S390_PERCPU__
2 #define __ARCH_S390_PERCPU__
4 #include <linux/compiler.h>
5 #include <asm/lowcore.h>
7 /*
8 * s390 uses its own implementation for per cpu data, the offset of
9 * the cpu local data area is cached in the cpu's lowcore memory.
10 * For 64 bit module code s390 forces the use of a GOT slot for the
11 * address of the per cpu variable. This is needed because the module
12 * may be more than 4G above the per cpu area.
14 #if defined(__s390x__) && defined(MODULE)
16 #define __reloc_hide(var,offset) (*({ \
17 extern int simple_identifier_##var(void); \
18 unsigned long *__ptr; \
19 asm ( "larl %0,per_cpu__"#var"@GOTENT" \
20 : "=a" (__ptr) : "X" (per_cpu__##var) ); \
21 (typeof(&per_cpu__##var))((*__ptr) + (offset)); }))
23 #else
25 #define __reloc_hide(var, offset) (*({ \
26 extern int simple_identifier_##var(void); \
27 unsigned long __ptr; \
28 asm ( "" : "=a" (__ptr) : "0" (&per_cpu__##var) ); \
29 (typeof(&per_cpu__##var)) (__ptr + (offset)); }))
31 #endif
33 #ifdef CONFIG_SMP
35 extern unsigned long __per_cpu_offset[NR_CPUS];
37 #define __get_cpu_var(var) __reloc_hide(var,S390_lowcore.percpu_offset)
38 #define __raw_get_cpu_var(var) __reloc_hide(var,S390_lowcore.percpu_offset)
39 #define per_cpu(var,cpu) __reloc_hide(var,__per_cpu_offset[cpu])
40 #define per_cpu_offset(x) (__per_cpu_offset[x])
42 /* A macro to avoid #include hell... */
43 #define percpu_modcopy(pcpudst, src, size) \
44 do { \
45 unsigned int __i; \
46 for_each_possible_cpu(__i) \
47 memcpy((pcpudst)+__per_cpu_offset[__i], \
48 (src), (size)); \
49 } while (0)
51 #else /* ! SMP */
53 #define __get_cpu_var(var) __reloc_hide(var,0)
54 #define __raw_get_cpu_var(var) __reloc_hide(var,0)
55 #define per_cpu(var,cpu) __reloc_hide(var,0)
57 #endif /* SMP */
59 #define DECLARE_PER_CPU(type, name) extern __typeof__(type) per_cpu__##name
61 #endif /* __ARCH_S390_PERCPU__ */