1 #ifndef _ALPHA_CMPXCHG_H
2 #error Do not include xchg.h directly!
5 * xchg/xchg_local and cmpxchg/cmpxchg_local share the same code
6 * except that local version do not have the expensive memory barrier.
7 * So this file is included twice from asm/cmpxchg.h.
12 * Since it can be used to implement critical sections
13 * it must clobber "memory" (also for interrupts in UP).
16 static inline unsigned long
17 ____xchg(_u8
, volatile char *m
, unsigned long val
)
19 unsigned long ret
, tmp
, addr64
;
34 : "=&r" (ret
), "=&r" (val
), "=&r" (tmp
), "=&r" (addr64
)
35 : "r" ((long)m
), "1" (val
) : "memory");
40 static inline unsigned long
41 ____xchg(_u16
, volatile short *m
, unsigned long val
)
43 unsigned long ret
, tmp
, addr64
;
58 : "=&r" (ret
), "=&r" (val
), "=&r" (tmp
), "=&r" (addr64
)
59 : "r" ((long)m
), "1" (val
) : "memory");
64 static inline unsigned long
65 ____xchg(_u32
, volatile int *m
, unsigned long val
)
78 : "=&r" (val
), "=&r" (dummy
), "=m" (*m
)
79 : "rI" (val
), "m" (*m
) : "memory");
84 static inline unsigned long
85 ____xchg(_u64
, volatile long *m
, unsigned long val
)
98 : "=&r" (val
), "=&r" (dummy
), "=m" (*m
)
99 : "rI" (val
), "m" (*m
) : "memory");
104 /* This function doesn't exist, so you'll get a linker error
105 if something tries to do an invalid xchg(). */
106 extern void __xchg_called_with_bad_pointer(void);
108 static __always_inline
unsigned long
109 ____xchg(, volatile void *ptr
, unsigned long x
, int size
)
113 return ____xchg(_u8
, ptr
, x
);
115 return ____xchg(_u16
, ptr
, x
);
117 return ____xchg(_u32
, ptr
, x
);
119 return ____xchg(_u64
, ptr
, x
);
121 __xchg_called_with_bad_pointer();
126 * Atomic compare and exchange. Compare OLD with MEM, if identical,
127 * store NEW in MEM. Return the initial value in MEM. Success is
128 * indicated by comparing RETURN with OLD.
130 * The memory barrier should be placed in SMP only when we actually
131 * make the change. If we don't change anything (so if the returned
132 * prev is equal to old) then we aren't acquiring anything new and
133 * we don't need any memory barrier as far I can tell.
136 static inline unsigned long
137 ____cmpxchg(_u8
, volatile char *m
, unsigned char old
, unsigned char new)
139 unsigned long prev
, tmp
, cmp
, addr64
;
141 __asm__
__volatile__(
144 "1: ldq_l %2,0(%4)\n"
157 : "=&r" (prev
), "=&r" (new), "=&r" (tmp
), "=&r" (cmp
), "=&r" (addr64
)
158 : "r" ((long)m
), "Ir" (old
), "1" (new) : "memory");
163 static inline unsigned long
164 ____cmpxchg(_u16
, volatile short *m
, unsigned short old
, unsigned short new)
166 unsigned long prev
, tmp
, cmp
, addr64
;
168 __asm__
__volatile__(
171 "1: ldq_l %2,0(%4)\n"
184 : "=&r" (prev
), "=&r" (new), "=&r" (tmp
), "=&r" (cmp
), "=&r" (addr64
)
185 : "r" ((long)m
), "Ir" (old
), "1" (new) : "memory");
190 static inline unsigned long
191 ____cmpxchg(_u32
, volatile int *m
, int old
, int new)
193 unsigned long prev
, cmp
;
195 __asm__
__volatile__(
207 : "=&r"(prev
), "=&r"(cmp
), "=m"(*m
)
208 : "r"((long) old
), "r"(new), "m"(*m
) : "memory");
213 static inline unsigned long
214 ____cmpxchg(_u64
, volatile long *m
, unsigned long old
, unsigned long new)
216 unsigned long prev
, cmp
;
218 __asm__
__volatile__(
230 : "=&r"(prev
), "=&r"(cmp
), "=m"(*m
)
231 : "r"((long) old
), "r"(new), "m"(*m
) : "memory");
236 /* This function doesn't exist, so you'll get a linker error
237 if something tries to do an invalid cmpxchg(). */
238 extern void __cmpxchg_called_with_bad_pointer(void);
240 static __always_inline
unsigned long
241 ____cmpxchg(, volatile void *ptr
, unsigned long old
, unsigned long new,
246 return ____cmpxchg(_u8
, ptr
, old
, new);
248 return ____cmpxchg(_u16
, ptr
, old
, new);
250 return ____cmpxchg(_u32
, ptr
, old
, new);
252 return ____cmpxchg(_u64
, ptr
, old
, new);
254 __cmpxchg_called_with_bad_pointer();