libata-link: linkify PHY-related functions
[pv_ops_mirror.git] / include / asm-s390 / percpu.h
blob545857e6444376352344db9c3f328d5125847105
1 #ifndef __ARCH_S390_PERCPU__
2 #define __ARCH_S390_PERCPU__
4 #include <linux/compiler.h>
5 #include <asm/lowcore.h>
7 #define __GENERIC_PER_CPU
9 /*
10 * s390 uses its own implementation for per cpu data, the offset of
11 * the cpu local data area is cached in the cpu's lowcore memory.
12 * For 64 bit module code s390 forces the use of a GOT slot for the
13 * address of the per cpu variable. This is needed because the module
14 * may be more than 4G above the per cpu area.
16 #if defined(__s390x__) && defined(MODULE)
18 #define __reloc_hide(var,offset) (*({ \
19 extern int simple_identifier_##var(void); \
20 unsigned long *__ptr; \
21 asm ( "larl %0,per_cpu__"#var"@GOTENT" \
22 : "=a" (__ptr) : "X" (per_cpu__##var) ); \
23 (typeof(&per_cpu__##var))((*__ptr) + (offset)); }))
25 #else
27 #define __reloc_hide(var, offset) (*({ \
28 extern int simple_identifier_##var(void); \
29 unsigned long __ptr; \
30 asm ( "" : "=a" (__ptr) : "0" (&per_cpu__##var) ); \
31 (typeof(&per_cpu__##var)) (__ptr + (offset)); }))
33 #endif
35 #ifdef CONFIG_SMP
37 extern unsigned long __per_cpu_offset[NR_CPUS];
39 /* Separate out the type, so (int[3], foo) works. */
40 #define DEFINE_PER_CPU(type, name) \
41 __attribute__((__section__(".data.percpu"))) \
42 __typeof__(type) per_cpu__##name
44 #define DEFINE_PER_CPU_SHARED_ALIGNED(type, name) \
45 __attribute__((__section__(".data.percpu.shared_aligned"))) \
46 __typeof__(type) per_cpu__##name \
47 ____cacheline_aligned_in_smp
49 #define __get_cpu_var(var) __reloc_hide(var,S390_lowcore.percpu_offset)
50 #define __raw_get_cpu_var(var) __reloc_hide(var,S390_lowcore.percpu_offset)
51 #define per_cpu(var,cpu) __reloc_hide(var,__per_cpu_offset[cpu])
52 #define per_cpu_offset(x) (__per_cpu_offset[x])
54 /* A macro to avoid #include hell... */
55 #define percpu_modcopy(pcpudst, src, size) \
56 do { \
57 unsigned int __i; \
58 for_each_possible_cpu(__i) \
59 memcpy((pcpudst)+__per_cpu_offset[__i], \
60 (src), (size)); \
61 } while (0)
63 #else /* ! SMP */
65 #define DEFINE_PER_CPU(type, name) \
66 __typeof__(type) per_cpu__##name
67 #define DEFINE_PER_CPU_SHARED_ALIGNED(type, name) \
68 DEFINE_PER_CPU(type, name)
70 #define __get_cpu_var(var) __reloc_hide(var,0)
71 #define __raw_get_cpu_var(var) __reloc_hide(var,0)
72 #define per_cpu(var,cpu) __reloc_hide(var,0)
74 #endif /* SMP */
76 #define DECLARE_PER_CPU(type, name) extern __typeof__(type) per_cpu__##name
78 #define EXPORT_PER_CPU_SYMBOL(var) EXPORT_SYMBOL(per_cpu__##var)
79 #define EXPORT_PER_CPU_SYMBOL_GPL(var) EXPORT_SYMBOL_GPL(per_cpu__##var)
81 #endif /* __ARCH_S390_PERCPU__ */