2 * include/asm-sh/spinlock.h
4 * Copyright (C) 2002, 2003 Paul Mundt
6 * This file is subject to the terms and conditions of the GNU General Public
7 * License. See the file "COPYING" in the main directory of this archive
10 #ifndef __ASM_SH_SPINLOCK_H
11 #define __ASM_SH_SPINLOCK_H
13 #include <asm/atomic.h>
16 * Your basic SMP spinlocks, allowing only a single CPU anywhere
19 volatile unsigned long lock
;
22 #define SPIN_LOCK_UNLOCKED (spinlock_t) { 0 }
24 #define spin_lock_init(x) do { *(x) = SPIN_LOCK_UNLOCKED; } while(0)
26 #define spin_is_locked(x) ((x)->lock != 0)
27 #define spin_unlock_wait(x) do { barrier(); } while (spin_is_locked(x))
28 #define _raw_spin_lock_flags(lock, flags) _raw_spin_lock(lock)
31 * Simple spin lock operations. There are two variants, one clears IRQ's
32 * on the local processor, one does not.
34 * We make no fairness assumptions. They have a cost.
36 static inline void _raw_spin_lock(spinlock_t
*lock
)
38 __asm__
__volatile__ (
49 static inline void _raw_spin_unlock(spinlock_t
*lock
)
51 #ifdef CONFIG_DEBUG_SPINLOCK
52 BUG_ON(!spin_is_locked(lock
));
58 #define _raw_spin_trylock(x) (!test_and_set_bit(0, &(x)->lock))
61 * Read-write spinlocks, allowing multiple readers but only one writer.
63 * NOTE! it is quite common to have readers in interrupts but no interrupt
64 * writers. For those circumstances we can "mix" irq-safe locks - any writer
65 * needs to get a irq-safe write-lock, but readers can get non-irqsafe
73 #define RW_LOCK_BIAS 0x01000000
74 #define RW_LOCK_UNLOCKED (rwlock_t) { { 0 }, { RW_LOCK_BIAS } }
75 #define rwlock_init(x) do { *(x) = RW_LOCK_UNLOCKED; } while (0)
76 #define rwlock_is_locked(x) (atomic_read(&(x)->counter) != RW_LOCK_BIAS)
78 static inline void _raw_read_lock(rwlock_t
*rw
)
80 _raw_spin_lock(&rw
->lock
);
82 atomic_inc(&rw
->counter
);
84 _raw_spin_unlock(&rw
->lock
);
87 static inline void _raw_read_unlock(rwlock_t
*rw
)
89 _raw_spin_lock(&rw
->lock
);
91 atomic_dec(&rw
->counter
);
93 _raw_spin_unlock(&rw
->lock
);
96 static inline void _raw_write_lock(rwlock_t
*rw
)
98 _raw_spin_lock(&rw
->lock
);
99 atomic_set(&rw
->counter
, -1);
102 static inline void _raw_write_unlock(rwlock_t
*rw
)
104 atomic_set(&rw
->counter
, 0);
105 _raw_spin_unlock(&rw
->lock
);
108 static inline int _raw_write_trylock(rwlock_t
*rw
)
110 if (atomic_sub_and_test(RW_LOCK_BIAS
, &rw
->counter
))
113 atomic_add(RW_LOCK_BIAS
, &rw
->counter
);
118 #endif /* __ASM_SH_SPINLOCK_H */