1 #ifndef _ALPHA_SPINLOCK_H
2 #define _ALPHA_SPINLOCK_H
4 #include <asm/system.h>
5 #include <linux/kernel.h>
6 #include <asm/current.h>
8 #define DEBUG_SPINLOCK 1
12 * Simple spin lock operations. There are two variants, one clears IRQ's
13 * on the local processor, one does not.
15 * We make no fairness assumptions. They have a cost.
19 volatile unsigned int lock
/*__attribute__((aligned(32))) */;
24 struct task_struct
* task
;
25 const char *base_file
;
30 #define SPIN_LOCK_UNLOCKED (spinlock_t) {0, -1, 0, 0, 0, 0}
31 #define spin_lock_init(x) \
32 ((x)->lock = 0, (x)->on_cpu = -1, (x)->previous = 0, (x)->task = 0)
34 #define SPIN_LOCK_UNLOCKED (spinlock_t) { 0 }
35 #define spin_lock_init(x) ((x)->lock = 0)
38 #define spin_is_locked(x) ((x)->lock != 0)
39 #define spin_unlock_wait(x) ({ do { barrier(); } while ((x)->lock); })
41 typedef struct { unsigned long a
[100]; } __dummy_lock_t
;
42 #define __dummy_lock(lock) (*(__dummy_lock_t *)(lock))
45 extern void spin_unlock(spinlock_t
* lock
);
46 extern void debug_spin_lock(spinlock_t
* lock
, const char *, int);
47 extern int debug_spin_trylock(spinlock_t
* lock
, const char *, int);
49 #define spin_lock(LOCK) debug_spin_lock(LOCK, __BASE_FILE__, __LINE__)
50 #define spin_trylock(LOCK) debug_spin_trylock(LOCK, __BASE_FILE__, __LINE__)
52 #define spin_lock_own(LOCK, LOCATION) \
54 if (!((LOCK)->lock && (LOCK)->on_cpu == smp_processor_id())) \
55 printk("%s: called on %d from %p but lock %s on %d\n", \
56 LOCATION, smp_processor_id(), \
57 __builtin_return_address(0), \
58 (LOCK)->lock ? "taken" : "freed", (LOCK)->on_cpu); \
61 static inline void spin_unlock(spinlock_t
* lock
)
67 static inline void spin_lock(spinlock_t
* lock
)
71 /* Use sub-sections to put the actual loop at the end
72 of this object file's text section so as to perfect
81 ".section .text2,\"ax\"\n"
86 : "=r" (tmp
), "=m" (__dummy_lock(lock
))
87 : "m"(__dummy_lock(lock
)));
90 #define spin_trylock(lock) (!test_and_set_bit(0,(lock)))
91 #define spin_lock_own(LOCK, LOCATION) ((void)0)
92 #endif /* DEBUG_SPINLOCK */
94 /***********************************************************/
97 volatile int write_lock
:1, read_counter
:31;
98 } /*__attribute__((aligned(32)))*/ rwlock_t
;
100 #define RW_LOCK_UNLOCKED (rwlock_t) { 0, 0 }
103 extern void write_lock(rwlock_t
* lock
);
104 extern void read_lock(rwlock_t
* lock
);
106 static inline void write_lock(rwlock_t
* lock
)
110 __asm__
__volatile__(
117 ".section .text2,\"ax\"\n"
122 : "=m" (__dummy_lock(lock
)), "=&r" (regx
)
123 : "0" (__dummy_lock(lock
))
127 static inline void read_lock(rwlock_t
* lock
)
131 __asm__
__volatile__(
138 ".section .text2,\"ax\"\n"
143 : "=m" (__dummy_lock(lock
)), "=&r" (regx
)
144 : "m" (__dummy_lock(lock
))
147 #endif /* DEBUG_RWLOCK */
149 static inline void write_unlock(rwlock_t
* lock
)
152 *(volatile int *)lock
= 0;
155 static inline void read_unlock(rwlock_t
* lock
)
158 __asm__
__volatile__(
163 ".section .text2,\"ax\"\n"
166 : "=m" (__dummy_lock(lock
)), "=&r" (regx
)
167 : "m" (__dummy_lock(lock
)));
170 #endif /* _ALPHA_SPINLOCK_H */