sched: cleanup, use NSEC_PER_MSEC and NSEC_PER_SEC
[pv_ops_mirror.git] / include / asm-sparc / semaphore.h
blob8018f9f4d497d85863b38610e4b651b2f6f7341f
1 #ifndef _SPARC_SEMAPHORE_H
2 #define _SPARC_SEMAPHORE_H
4 /* Dinky, good for nothing, just barely irq safe, Sparc semaphores. */
6 #ifdef __KERNEL__
8 #include <asm/atomic.h>
9 #include <linux/wait.h>
10 #include <linux/rwsem.h>
12 struct semaphore {
13 atomic24_t count;
14 int sleepers;
15 wait_queue_head_t wait;
18 #define __SEMAPHORE_INITIALIZER(name, n) \
19 { \
20 .count = ATOMIC24_INIT(n), \
21 .sleepers = 0, \
22 .wait = __WAIT_QUEUE_HEAD_INITIALIZER((name).wait) \
25 #define __DECLARE_SEMAPHORE_GENERIC(name,count) \
26 struct semaphore name = __SEMAPHORE_INITIALIZER(name,count)
28 #define DECLARE_MUTEX(name) __DECLARE_SEMAPHORE_GENERIC(name,1)
30 static inline void sema_init (struct semaphore *sem, int val)
32 atomic24_set(&sem->count, val);
33 sem->sleepers = 0;
34 init_waitqueue_head(&sem->wait);
37 static inline void init_MUTEX (struct semaphore *sem)
39 sema_init(sem, 1);
42 static inline void init_MUTEX_LOCKED (struct semaphore *sem)
44 sema_init(sem, 0);
47 extern void __down(struct semaphore * sem);
48 extern int __down_interruptible(struct semaphore * sem);
49 extern int __down_trylock(struct semaphore * sem);
50 extern void __up(struct semaphore * sem);
52 static inline void down(struct semaphore * sem)
54 register volatile int *ptr asm("g1");
55 register int increment asm("g2");
57 might_sleep();
59 ptr = &(sem->count.counter);
60 increment = 1;
62 __asm__ __volatile__(
63 "mov %%o7, %%g4\n\t"
64 "call ___atomic24_sub\n\t"
65 " add %%o7, 8, %%o7\n\t"
66 "tst %%g2\n\t"
67 "bl 2f\n\t"
68 " nop\n"
69 "1:\n\t"
70 ".subsection 2\n"
71 "2:\n\t"
72 "save %%sp, -64, %%sp\n\t"
73 "mov %%g1, %%l1\n\t"
74 "mov %%g5, %%l5\n\t"
75 "call %3\n\t"
76 " mov %%g1, %%o0\n\t"
77 "mov %%l1, %%g1\n\t"
78 "ba 1b\n\t"
79 " restore %%l5, %%g0, %%g5\n\t"
80 ".previous\n"
81 : "=&r" (increment)
82 : "0" (increment), "r" (ptr), "i" (__down)
83 : "g3", "g4", "g7", "memory", "cc");
86 static inline int down_interruptible(struct semaphore * sem)
88 register volatile int *ptr asm("g1");
89 register int increment asm("g2");
91 might_sleep();
93 ptr = &(sem->count.counter);
94 increment = 1;
96 __asm__ __volatile__(
97 "mov %%o7, %%g4\n\t"
98 "call ___atomic24_sub\n\t"
99 " add %%o7, 8, %%o7\n\t"
100 "tst %%g2\n\t"
101 "bl 2f\n\t"
102 " clr %%g2\n"
103 "1:\n\t"
104 ".subsection 2\n"
105 "2:\n\t"
106 "save %%sp, -64, %%sp\n\t"
107 "mov %%g1, %%l1\n\t"
108 "mov %%g5, %%l5\n\t"
109 "call %3\n\t"
110 " mov %%g1, %%o0\n\t"
111 "mov %%l1, %%g1\n\t"
112 "mov %%l5, %%g5\n\t"
113 "ba 1b\n\t"
114 " restore %%o0, %%g0, %%g2\n\t"
115 ".previous\n"
116 : "=&r" (increment)
117 : "0" (increment), "r" (ptr), "i" (__down_interruptible)
118 : "g3", "g4", "g7", "memory", "cc");
120 return increment;
123 static inline int down_trylock(struct semaphore * sem)
125 register volatile int *ptr asm("g1");
126 register int increment asm("g2");
128 ptr = &(sem->count.counter);
129 increment = 1;
131 __asm__ __volatile__(
132 "mov %%o7, %%g4\n\t"
133 "call ___atomic24_sub\n\t"
134 " add %%o7, 8, %%o7\n\t"
135 "tst %%g2\n\t"
136 "bl 2f\n\t"
137 " clr %%g2\n"
138 "1:\n\t"
139 ".subsection 2\n"
140 "2:\n\t"
141 "save %%sp, -64, %%sp\n\t"
142 "mov %%g1, %%l1\n\t"
143 "mov %%g5, %%l5\n\t"
144 "call %3\n\t"
145 " mov %%g1, %%o0\n\t"
146 "mov %%l1, %%g1\n\t"
147 "mov %%l5, %%g5\n\t"
148 "ba 1b\n\t"
149 " restore %%o0, %%g0, %%g2\n\t"
150 ".previous\n"
151 : "=&r" (increment)
152 : "0" (increment), "r" (ptr), "i" (__down_trylock)
153 : "g3", "g4", "g7", "memory", "cc");
155 return increment;
158 static inline void up(struct semaphore * sem)
160 register volatile int *ptr asm("g1");
161 register int increment asm("g2");
163 ptr = &(sem->count.counter);
164 increment = 1;
166 __asm__ __volatile__(
167 "mov %%o7, %%g4\n\t"
168 "call ___atomic24_add\n\t"
169 " add %%o7, 8, %%o7\n\t"
170 "tst %%g2\n\t"
171 "ble 2f\n\t"
172 " nop\n"
173 "1:\n\t"
174 ".subsection 2\n"
175 "2:\n\t"
176 "save %%sp, -64, %%sp\n\t"
177 "mov %%g1, %%l1\n\t"
178 "mov %%g5, %%l5\n\t"
179 "call %3\n\t"
180 " mov %%g1, %%o0\n\t"
181 "mov %%l1, %%g1\n\t"
182 "ba 1b\n\t"
183 " restore %%l5, %%g0, %%g5\n\t"
184 ".previous\n"
185 : "=&r" (increment)
186 : "0" (increment), "r" (ptr), "i" (__up)
187 : "g3", "g4", "g7", "memory", "cc");
190 #endif /* __KERNEL__ */
192 #endif /* !(_SPARC_SEMAPHORE_H) */