x86/mm/pat: Don't report PAT on CPUs that don't support it
[linux/fpc-iii.git] / arch / tile / include / asm / cmpxchg.h
blob25d5899497be193bdbaba16a46107e7030de7fe8
1 /*
2 * cmpxchg.h -- forked from asm/atomic.h with this copyright:
4 * Copyright 2010 Tilera Corporation. All Rights Reserved.
6 * This program is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU General Public License
8 * as published by the Free Software Foundation, version 2.
10 * This program is distributed in the hope that it will be useful, but
11 * WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE, GOOD TITLE or
13 * NON INFRINGEMENT. See the GNU General Public License for
14 * more details.
18 #ifndef _ASM_TILE_CMPXCHG_H
19 #define _ASM_TILE_CMPXCHG_H
21 #ifndef __ASSEMBLY__
23 #include <asm/barrier.h>
25 /* Nonexistent functions intended to cause compile errors. */
26 extern void __xchg_called_with_bad_pointer(void)
27 __compiletime_error("Bad argument size for xchg");
28 extern void __cmpxchg_called_with_bad_pointer(void)
29 __compiletime_error("Bad argument size for cmpxchg");
31 #ifndef __tilegx__
33 /* Note the _atomic_xxx() routines include a final mb(). */
34 int _atomic_xchg(int *ptr, int n);
35 int _atomic_xchg_add(int *v, int i);
36 int _atomic_xchg_add_unless(int *v, int a, int u);
37 int _atomic_cmpxchg(int *ptr, int o, int n);
38 long long _atomic64_xchg(long long *v, long long n);
39 long long _atomic64_xchg_add(long long *v, long long i);
40 long long _atomic64_xchg_add_unless(long long *v, long long a, long long u);
41 long long _atomic64_cmpxchg(long long *v, long long o, long long n);
43 #define xchg(ptr, n) \
44 ({ \
45 if (sizeof(*(ptr)) != 4) \
46 __xchg_called_with_bad_pointer(); \
47 smp_mb(); \
48 (typeof(*(ptr)))_atomic_xchg((int *)(ptr), (int)(n)); \
51 #define cmpxchg(ptr, o, n) \
52 ({ \
53 if (sizeof(*(ptr)) != 4) \
54 __cmpxchg_called_with_bad_pointer(); \
55 smp_mb(); \
56 (typeof(*(ptr)))_atomic_cmpxchg((int *)ptr, (int)o, \
57 (int)n); \
60 #define xchg64(ptr, n) \
61 ({ \
62 if (sizeof(*(ptr)) != 8) \
63 __xchg_called_with_bad_pointer(); \
64 smp_mb(); \
65 (typeof(*(ptr)))_atomic64_xchg((long long *)(ptr), \
66 (long long)(n)); \
69 #define cmpxchg64(ptr, o, n) \
70 ({ \
71 if (sizeof(*(ptr)) != 8) \
72 __cmpxchg_called_with_bad_pointer(); \
73 smp_mb(); \
74 (typeof(*(ptr)))_atomic64_cmpxchg((long long *)ptr, \
75 (long long)o, (long long)n); \
78 #else
80 #define xchg(ptr, n) \
81 ({ \
82 typeof(*(ptr)) __x; \
83 smp_mb(); \
84 switch (sizeof(*(ptr))) { \
85 case 4: \
86 __x = (typeof(__x))(unsigned long) \
87 __insn_exch4((ptr), \
88 (u32)(unsigned long)(n)); \
89 break; \
90 case 8: \
91 __x = (typeof(__x)) \
92 __insn_exch((ptr), (unsigned long)(n)); \
93 break; \
94 default: \
95 __xchg_called_with_bad_pointer(); \
96 break; \
97 } \
98 smp_mb(); \
99 __x; \
102 #define cmpxchg(ptr, o, n) \
103 ({ \
104 typeof(*(ptr)) __x; \
105 __insn_mtspr(SPR_CMPEXCH_VALUE, (unsigned long)(o)); \
106 smp_mb(); \
107 switch (sizeof(*(ptr))) { \
108 case 4: \
109 __x = (typeof(__x))(unsigned long) \
110 __insn_cmpexch4((ptr), \
111 (u32)(unsigned long)(n)); \
112 break; \
113 case 8: \
114 __x = (typeof(__x))__insn_cmpexch((ptr), \
115 (long long)(n)); \
116 break; \
117 default: \
118 __cmpxchg_called_with_bad_pointer(); \
119 break; \
121 smp_mb(); \
122 __x; \
125 #define xchg64 xchg
126 #define cmpxchg64 cmpxchg
128 #endif
130 #endif /* __ASSEMBLY__ */
132 #endif /* _ASM_TILE_CMPXCHG_H */