OMAP: Add new function to check wether there is irq pending
[linux-ginger.git] / arch / blackfin / include / asm / spinlock.h
blob0249ac319476f4dbf20b41b51f9f9362e756a9d0
1 #ifndef __BFIN_SPINLOCK_H
2 #define __BFIN_SPINLOCK_H
4 #include <asm/atomic.h>
6 asmlinkage int __raw_spin_is_locked_asm(volatile int *ptr);
7 asmlinkage void __raw_spin_lock_asm(volatile int *ptr);
8 asmlinkage int __raw_spin_trylock_asm(volatile int *ptr);
9 asmlinkage void __raw_spin_unlock_asm(volatile int *ptr);
10 asmlinkage void __raw_read_lock_asm(volatile int *ptr);
11 asmlinkage int __raw_read_trylock_asm(volatile int *ptr);
12 asmlinkage void __raw_read_unlock_asm(volatile int *ptr);
13 asmlinkage void __raw_write_lock_asm(volatile int *ptr);
14 asmlinkage int __raw_write_trylock_asm(volatile int *ptr);
15 asmlinkage void __raw_write_unlock_asm(volatile int *ptr);
17 static inline int __raw_spin_is_locked(raw_spinlock_t *lock)
19 return __raw_spin_is_locked_asm(&lock->lock);
22 static inline void __raw_spin_lock(raw_spinlock_t *lock)
24 __raw_spin_lock_asm(&lock->lock);
27 #define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock)
29 static inline int __raw_spin_trylock(raw_spinlock_t *lock)
31 return __raw_spin_trylock_asm(&lock->lock);
34 static inline void __raw_spin_unlock(raw_spinlock_t *lock)
36 __raw_spin_unlock_asm(&lock->lock);
39 static inline void __raw_spin_unlock_wait(raw_spinlock_t *lock)
41 while (__raw_spin_is_locked(lock))
42 cpu_relax();
45 static inline int __raw_read_can_lock(raw_rwlock_t *rw)
47 return __raw_uncached_fetch_asm(&rw->lock) > 0;
50 static inline int __raw_write_can_lock(raw_rwlock_t *rw)
52 return __raw_uncached_fetch_asm(&rw->lock) == RW_LOCK_BIAS;
55 static inline void __raw_read_lock(raw_rwlock_t *rw)
57 __raw_read_lock_asm(&rw->lock);
60 static inline int __raw_read_trylock(raw_rwlock_t *rw)
62 return __raw_read_trylock_asm(&rw->lock);
65 static inline void __raw_read_unlock(raw_rwlock_t *rw)
67 __raw_read_unlock_asm(&rw->lock);
70 static inline void __raw_write_lock(raw_rwlock_t *rw)
72 __raw_write_lock_asm(&rw->lock);
75 static inline int __raw_write_trylock(raw_rwlock_t *rw)
77 return __raw_write_trylock_asm(&rw->lock);
80 static inline void __raw_write_unlock(raw_rwlock_t *rw)
82 __raw_write_unlock_asm(&rw->lock);
85 #define _raw_spin_relax(lock) cpu_relax()
86 #define _raw_read_relax(lock) cpu_relax()
87 #define _raw_write_relax(lock) cpu_relax()
89 #endif /* !__BFIN_SPINLOCK_H */