1 #ifndef _ALPHA_SPINLOCK_H
2 #define _ALPHA_SPINLOCK_H
4 #include <linux/config.h>
5 #include <asm/system.h>
6 #include <linux/kernel.h>
7 #include <asm/current.h>
10 * Simple spin lock operations. There are two variants, one clears IRQ's
11 * on the local processor, one does not.
13 * We make no fairness assumptions. They have a cost.
16 #define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock)
17 #define __raw_spin_is_locked(x) ((x)->lock != 0)
18 #define __raw_spin_unlock_wait(x) \
19 do { cpu_relax(); } while ((x)->lock)
21 static inline void __raw_spin_unlock(raw_spinlock_t
* lock
)
27 static inline void __raw_spin_lock(raw_spinlock_t
* lock
)
43 : "=&r" (tmp
), "=m" (lock
->lock
)
44 : "m"(lock
->lock
) : "memory");
47 static inline int __raw_spin_trylock(raw_spinlock_t
*lock
)
49 return !test_and_set_bit(0, &lock
->lock
);
52 /***********************************************************/
54 static inline int __raw_read_can_lock(raw_rwlock_t
*lock
)
56 return (lock
->lock
& 1) == 0;
59 static inline int __raw_write_can_lock(raw_rwlock_t
*lock
)
61 return lock
->lock
== 0;
64 static inline void __raw_read_lock(raw_rwlock_t
*lock
)
80 : "=m" (*lock
), "=&r" (regx
)
81 : "m" (*lock
) : "memory");
84 static inline void __raw_write_lock(raw_rwlock_t
*lock
)
100 : "=m" (*lock
), "=&r" (regx
)
101 : "m" (*lock
) : "memory");
104 static inline int __raw_read_trylock(raw_rwlock_t
* lock
)
109 __asm__
__volatile__(
120 : "=m" (*lock
), "=&r" (regx
), "=&r" (success
)
121 : "m" (*lock
) : "memory");
126 static inline int __raw_write_trylock(raw_rwlock_t
* lock
)
131 __asm__
__volatile__(
142 : "=m" (*lock
), "=&r" (regx
), "=&r" (success
)
143 : "m" (*lock
) : "memory");
148 static inline void __raw_read_unlock(raw_rwlock_t
* lock
)
151 __asm__
__volatile__(
160 : "=m" (*lock
), "=&r" (regx
)
161 : "m" (*lock
) : "memory");
164 static inline void __raw_write_unlock(raw_rwlock_t
* lock
)
170 #endif /* _ALPHA_SPINLOCK_H */