KVM: SVM: clean up nestec vmload/vmsave paths
[linux/fpc-iii.git] / arch / blackfin / include / asm / spinlock.h
blobd6ff4b59fcb195e03925a773373c2fdfd9e9035f
1 #ifndef __BFIN_SPINLOCK_H
2 #define __BFIN_SPINLOCK_H
4 #ifndef CONFIG_SMP
5 # include <asm-generic/spinlock.h>
6 #else
8 #include <asm/atomic.h>
10 asmlinkage int __raw_spin_is_locked_asm(volatile int *ptr);
11 asmlinkage void __raw_spin_lock_asm(volatile int *ptr);
12 asmlinkage int __raw_spin_trylock_asm(volatile int *ptr);
13 asmlinkage void __raw_spin_unlock_asm(volatile int *ptr);
14 asmlinkage void __raw_read_lock_asm(volatile int *ptr);
15 asmlinkage int __raw_read_trylock_asm(volatile int *ptr);
16 asmlinkage void __raw_read_unlock_asm(volatile int *ptr);
17 asmlinkage void __raw_write_lock_asm(volatile int *ptr);
18 asmlinkage int __raw_write_trylock_asm(volatile int *ptr);
19 asmlinkage void __raw_write_unlock_asm(volatile int *ptr);
21 static inline int __raw_spin_is_locked(raw_spinlock_t *lock)
23 return __raw_spin_is_locked_asm(&lock->lock);
26 static inline void __raw_spin_lock(raw_spinlock_t *lock)
28 __raw_spin_lock_asm(&lock->lock);
31 #define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock)
33 static inline int __raw_spin_trylock(raw_spinlock_t *lock)
35 return __raw_spin_trylock_asm(&lock->lock);
38 static inline void __raw_spin_unlock(raw_spinlock_t *lock)
40 __raw_spin_unlock_asm(&lock->lock);
43 static inline void __raw_spin_unlock_wait(raw_spinlock_t *lock)
45 while (__raw_spin_is_locked(lock))
46 cpu_relax();
49 static inline int __raw_read_can_lock(raw_rwlock_t *rw)
51 return __raw_uncached_fetch_asm(&rw->lock) > 0;
54 static inline int __raw_write_can_lock(raw_rwlock_t *rw)
56 return __raw_uncached_fetch_asm(&rw->lock) == RW_LOCK_BIAS;
59 static inline void __raw_read_lock(raw_rwlock_t *rw)
61 __raw_read_lock_asm(&rw->lock);
64 static inline int __raw_read_trylock(raw_rwlock_t *rw)
66 return __raw_read_trylock_asm(&rw->lock);
69 static inline void __raw_read_unlock(raw_rwlock_t *rw)
71 __raw_read_unlock_asm(&rw->lock);
74 static inline void __raw_write_lock(raw_rwlock_t *rw)
76 __raw_write_lock_asm(&rw->lock);
79 static inline int __raw_write_trylock(raw_rwlock_t *rw)
81 return __raw_write_trylock_asm(&rw->lock);
84 static inline void __raw_write_unlock(raw_rwlock_t *rw)
86 __raw_write_unlock_asm(&rw->lock);
89 #define _raw_spin_relax(lock) cpu_relax()
90 #define _raw_read_relax(lock) cpu_relax()
91 #define _raw_write_relax(lock) cpu_relax()
93 #endif
95 #endif /* !__BFIN_SPINLOCK_H */