1 #ifndef _ALPHA_SPINLOCK_H
2 #define _ALPHA_SPINLOCK_H
4 #include <asm/system.h>
5 #include <linux/kernel.h>
6 #include <asm/current.h>
9 * Simple spin lock operations. There are two variants, one clears IRQ's
10 * on the local processor, one does not.
12 * We make no fairness assumptions. They have a cost.
15 #define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock)
16 #define arch_spin_is_locked(x) ((x)->lock != 0)
17 #define arch_spin_unlock_wait(x) \
18 do { cpu_relax(); } while ((x)->lock)
20 static inline void arch_spin_unlock(arch_spinlock_t
* lock
)
26 static inline void arch_spin_lock(arch_spinlock_t
* lock
)
42 : "=&r" (tmp
), "=m" (lock
->lock
)
43 : "m"(lock
->lock
) : "memory");
46 static inline int arch_spin_trylock(arch_spinlock_t
*lock
)
48 return !test_and_set_bit(0, &lock
->lock
);
51 /***********************************************************/
53 static inline int arch_read_can_lock(arch_rwlock_t
*lock
)
55 return (lock
->lock
& 1) == 0;
58 static inline int arch_write_can_lock(arch_rwlock_t
*lock
)
60 return lock
->lock
== 0;
63 static inline void arch_read_lock(arch_rwlock_t
*lock
)
79 : "=m" (*lock
), "=&r" (regx
)
80 : "m" (*lock
) : "memory");
83 static inline void arch_write_lock(arch_rwlock_t
*lock
)
99 : "=m" (*lock
), "=&r" (regx
)
100 : "m" (*lock
) : "memory");
103 static inline int arch_read_trylock(arch_rwlock_t
* lock
)
108 __asm__
__volatile__(
119 : "=m" (*lock
), "=&r" (regx
), "=&r" (success
)
120 : "m" (*lock
) : "memory");
125 static inline int arch_write_trylock(arch_rwlock_t
* lock
)
130 __asm__
__volatile__(
141 : "=m" (*lock
), "=&r" (regx
), "=&r" (success
)
142 : "m" (*lock
) : "memory");
147 static inline void arch_read_unlock(arch_rwlock_t
* lock
)
150 __asm__
__volatile__(
159 : "=m" (*lock
), "=&r" (regx
)
160 : "m" (*lock
) : "memory");
163 static inline void arch_write_unlock(arch_rwlock_t
* lock
)
169 #define arch_read_lock_flags(lock, flags) arch_read_lock(lock)
170 #define arch_write_lock_flags(lock, flags) arch_write_lock(lock)
172 #define arch_spin_relax(lock) cpu_relax()
173 #define arch_read_relax(lock) cpu_relax()
174 #define arch_write_relax(lock) cpu_relax()
176 #endif /* _ALPHA_SPINLOCK_H */