treewide: remove redundant IS_ERR() before error code check
[linux/fpc-iii.git] / arch / m68k / include / asm / cmpxchg.h
blob38e1d7acc44dc5b7da058989d329f305e55b2d66
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __ARCH_M68K_CMPXCHG__
3 #define __ARCH_M68K_CMPXCHG__
5 #include <linux/irqflags.h>
7 struct __xchg_dummy { unsigned long a[100]; };
8 #define __xg(x) ((volatile struct __xchg_dummy *)(x))
10 extern unsigned long __invalid_xchg_size(unsigned long, volatile void *, int);
12 #ifndef CONFIG_RMW_INSNS
13 static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
15 unsigned long flags, tmp;
17 local_irq_save(flags);
19 switch (size) {
20 case 1:
21 tmp = *(u8 *)ptr;
22 *(u8 *)ptr = x;
23 x = tmp;
24 break;
25 case 2:
26 tmp = *(u16 *)ptr;
27 *(u16 *)ptr = x;
28 x = tmp;
29 break;
30 case 4:
31 tmp = *(u32 *)ptr;
32 *(u32 *)ptr = x;
33 x = tmp;
34 break;
35 default:
36 tmp = __invalid_xchg_size(x, ptr, size);
37 break;
40 local_irq_restore(flags);
41 return x;
43 #else
44 static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
46 switch (size) {
47 case 1:
48 __asm__ __volatile__
49 ("moveb %2,%0\n\t"
50 "1:\n\t"
51 "casb %0,%1,%2\n\t"
52 "jne 1b"
53 : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
54 break;
55 case 2:
56 __asm__ __volatile__
57 ("movew %2,%0\n\t"
58 "1:\n\t"
59 "casw %0,%1,%2\n\t"
60 "jne 1b"
61 : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
62 break;
63 case 4:
64 __asm__ __volatile__
65 ("movel %2,%0\n\t"
66 "1:\n\t"
67 "casl %0,%1,%2\n\t"
68 "jne 1b"
69 : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
70 break;
71 default:
72 x = __invalid_xchg_size(x, ptr, size);
73 break;
75 return x;
77 #endif
79 #define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
81 #include <asm-generic/cmpxchg-local.h>
83 #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
85 extern unsigned long __invalid_cmpxchg_size(volatile void *,
86 unsigned long, unsigned long, int);
89 * Atomic compare and exchange. Compare OLD with MEM, if identical,
90 * store NEW in MEM. Return the initial value in MEM. Success is
91 * indicated by comparing RETURN with OLD.
93 #ifdef CONFIG_RMW_INSNS
95 static inline unsigned long __cmpxchg(volatile void *p, unsigned long old,
96 unsigned long new, int size)
98 switch (size) {
99 case 1:
100 __asm__ __volatile__ ("casb %0,%2,%1"
101 : "=d" (old), "=m" (*(char *)p)
102 : "d" (new), "0" (old), "m" (*(char *)p));
103 break;
104 case 2:
105 __asm__ __volatile__ ("casw %0,%2,%1"
106 : "=d" (old), "=m" (*(short *)p)
107 : "d" (new), "0" (old), "m" (*(short *)p));
108 break;
109 case 4:
110 __asm__ __volatile__ ("casl %0,%2,%1"
111 : "=d" (old), "=m" (*(int *)p)
112 : "d" (new), "0" (old), "m" (*(int *)p));
113 break;
114 default:
115 old = __invalid_cmpxchg_size(p, old, new, size);
116 break;
118 return old;
121 #define cmpxchg(ptr, o, n) \
122 ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o), \
123 (unsigned long)(n), sizeof(*(ptr))))
124 #define cmpxchg_local(ptr, o, n) \
125 ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o), \
126 (unsigned long)(n), sizeof(*(ptr))))
128 #define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n))
130 #else
133 * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
134 * them available.
136 #define cmpxchg_local(ptr, o, n) \
137 ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
138 (unsigned long)(n), sizeof(*(ptr))))
140 #include <asm-generic/cmpxchg.h>
142 #endif
144 #endif /* __ARCH_M68K_CMPXCHG__ */