perf tools: Be consistent on the type of map->symbols[] interator
[linux/fpc-iii.git] / arch / m32r / include / asm / local.h
blob4045db3e4f6519593439c40edd65473739bc77eb
1 #ifndef __M32R_LOCAL_H
2 #define __M32R_LOCAL_H
4 /*
5 * linux/include/asm-m32r/local.h
7 * M32R version:
8 * Copyright (C) 2001, 2002 Hitoshi Yamamoto
9 * Copyright (C) 2004 Hirokazu Takata <takata at linux-m32r.org>
10 * Copyright (C) 2007 Mathieu Desnoyers <mathieu.desnoyers@polymtl.ca>
13 #include <linux/percpu.h>
14 #include <asm/assembler.h>
15 #include <asm/local.h>
18 * Atomic operations that C can't guarantee us. Useful for
19 * resource counting etc..
23 * Make sure gcc doesn't try to be clever and move things around
24 * on us. We need to use _exactly_ the address the user gave us,
25 * not some alias that contains the same information.
27 typedef struct { volatile int counter; } local_t;
29 #define LOCAL_INIT(i) { (i) }
31 /**
32 * local_read - read local variable
33 * @l: pointer of type local_t
35 * Atomically reads the value of @l.
37 #define local_read(l) ((l)->counter)
39 /**
40 * local_set - set local variable
41 * @l: pointer of type local_t
42 * @i: required value
44 * Atomically sets the value of @l to @i.
46 #define local_set(l, i) (((l)->counter) = (i))
48 /**
49 * local_add_return - add long to local variable and return it
50 * @i: long value to add
51 * @l: pointer of type local_t
53 * Atomically adds @i to @l and return (@i + @l).
55 static inline long local_add_return(long i, local_t *l)
57 unsigned long flags;
58 long result;
60 local_irq_save(flags);
61 __asm__ __volatile__ (
62 "# local_add_return \n\t"
63 DCACHE_CLEAR("%0", "r4", "%1")
64 "ld %0, @%1; \n\t"
65 "add %0, %2; \n\t"
66 "st %0, @%1; \n\t"
67 : "=&r" (result)
68 : "r" (&l->counter), "r" (i)
69 : "memory"
70 #ifdef CONFIG_CHIP_M32700_TS1
71 , "r4"
72 #endif /* CONFIG_CHIP_M32700_TS1 */
74 local_irq_restore(flags);
76 return result;
79 /**
80 * local_sub_return - subtract long from local variable and return it
81 * @i: long value to subtract
82 * @l: pointer of type local_t
84 * Atomically subtracts @i from @l and return (@l - @i).
86 static inline long local_sub_return(long i, local_t *l)
88 unsigned long flags;
89 long result;
91 local_irq_save(flags);
92 __asm__ __volatile__ (
93 "# local_sub_return \n\t"
94 DCACHE_CLEAR("%0", "r4", "%1")
95 "ld %0, @%1; \n\t"
96 "sub %0, %2; \n\t"
97 "st %0, @%1; \n\t"
98 : "=&r" (result)
99 : "r" (&l->counter), "r" (i)
100 : "memory"
101 #ifdef CONFIG_CHIP_M32700_TS1
102 , "r4"
103 #endif /* CONFIG_CHIP_M32700_TS1 */
105 local_irq_restore(flags);
107 return result;
111 * local_add - add long to local variable
112 * @i: long value to add
113 * @l: pointer of type local_t
115 * Atomically adds @i to @l.
117 #define local_add(i, l) ((void) local_add_return((i), (l)))
120 * local_sub - subtract the local variable
121 * @i: long value to subtract
122 * @l: pointer of type local_t
124 * Atomically subtracts @i from @l.
126 #define local_sub(i, l) ((void) local_sub_return((i), (l)))
129 * local_sub_and_test - subtract value from variable and test result
130 * @i: integer value to subtract
131 * @l: pointer of type local_t
133 * Atomically subtracts @i from @l and returns
134 * true if the result is zero, or false for all
135 * other cases.
137 #define local_sub_and_test(i, l) (local_sub_return((i), (l)) == 0)
140 * local_inc_return - increment local variable and return it
141 * @l: pointer of type local_t
143 * Atomically increments @l by 1 and returns the result.
145 static inline long local_inc_return(local_t *l)
147 unsigned long flags;
148 long result;
150 local_irq_save(flags);
151 __asm__ __volatile__ (
152 "# local_inc_return \n\t"
153 DCACHE_CLEAR("%0", "r4", "%1")
154 "ld %0, @%1; \n\t"
155 "addi %0, #1; \n\t"
156 "st %0, @%1; \n\t"
157 : "=&r" (result)
158 : "r" (&l->counter)
159 : "memory"
160 #ifdef CONFIG_CHIP_M32700_TS1
161 , "r4"
162 #endif /* CONFIG_CHIP_M32700_TS1 */
164 local_irq_restore(flags);
166 return result;
170 * local_dec_return - decrement local variable and return it
171 * @l: pointer of type local_t
173 * Atomically decrements @l by 1 and returns the result.
175 static inline long local_dec_return(local_t *l)
177 unsigned long flags;
178 long result;
180 local_irq_save(flags);
181 __asm__ __volatile__ (
182 "# local_dec_return \n\t"
183 DCACHE_CLEAR("%0", "r4", "%1")
184 "ld %0, @%1; \n\t"
185 "addi %0, #-1; \n\t"
186 "st %0, @%1; \n\t"
187 : "=&r" (result)
188 : "r" (&l->counter)
189 : "memory"
190 #ifdef CONFIG_CHIP_M32700_TS1
191 , "r4"
192 #endif /* CONFIG_CHIP_M32700_TS1 */
194 local_irq_restore(flags);
196 return result;
200 * local_inc - increment local variable
201 * @l: pointer of type local_t
203 * Atomically increments @l by 1.
205 #define local_inc(l) ((void)local_inc_return(l))
208 * local_dec - decrement local variable
209 * @l: pointer of type local_t
211 * Atomically decrements @l by 1.
213 #define local_dec(l) ((void)local_dec_return(l))
216 * local_inc_and_test - increment and test
217 * @l: pointer of type local_t
219 * Atomically increments @l by 1
220 * and returns true if the result is zero, or false for all
221 * other cases.
223 #define local_inc_and_test(l) (local_inc_return(l) == 0)
226 * local_dec_and_test - decrement and test
227 * @l: pointer of type local_t
229 * Atomically decrements @l by 1 and
230 * returns true if the result is 0, or false for all
231 * other cases.
233 #define local_dec_and_test(l) (local_dec_return(l) == 0)
236 * local_add_negative - add and test if negative
237 * @l: pointer of type local_t
238 * @i: integer value to add
240 * Atomically adds @i to @l and returns true
241 * if the result is negative, or false when
242 * result is greater than or equal to zero.
244 #define local_add_negative(i, l) (local_add_return((i), (l)) < 0)
246 #define local_cmpxchg(l, o, n) (cmpxchg_local(&((l)->counter), (o), (n)))
247 #define local_xchg(v, new) (xchg_local(&((l)->counter), new))
250 * local_add_unless - add unless the number is a given value
251 * @l: pointer of type local_t
252 * @a: the amount to add to l...
253 * @u: ...unless l is equal to u.
255 * Atomically adds @a to @l, so long as it was not @u.
256 * Returns non-zero if @l was not @u, and zero otherwise.
258 static inline int local_add_unless(local_t *l, long a, long u)
260 long c, old;
261 c = local_read(l);
262 for (;;) {
263 if (unlikely(c == (u)))
264 break;
265 old = local_cmpxchg((l), c, c + (a));
266 if (likely(old == c))
267 break;
268 c = old;
270 return c != (u);
273 #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
275 static inline void local_clear_mask(unsigned long mask, local_t *addr)
277 unsigned long flags;
278 unsigned long tmp;
280 local_irq_save(flags);
281 __asm__ __volatile__ (
282 "# local_clear_mask \n\t"
283 DCACHE_CLEAR("%0", "r5", "%1")
284 "ld %0, @%1; \n\t"
285 "and %0, %2; \n\t"
286 "st %0, @%1; \n\t"
287 : "=&r" (tmp)
288 : "r" (addr), "r" (~mask)
289 : "memory"
290 #ifdef CONFIG_CHIP_M32700_TS1
291 , "r5"
292 #endif /* CONFIG_CHIP_M32700_TS1 */
294 local_irq_restore(flags);
297 static inline void local_set_mask(unsigned long mask, local_t *addr)
299 unsigned long flags;
300 unsigned long tmp;
302 local_irq_save(flags);
303 __asm__ __volatile__ (
304 "# local_set_mask \n\t"
305 DCACHE_CLEAR("%0", "r5", "%1")
306 "ld %0, @%1; \n\t"
307 "or %0, %2; \n\t"
308 "st %0, @%1; \n\t"
309 : "=&r" (tmp)
310 : "r" (addr), "r" (mask)
311 : "memory"
312 #ifdef CONFIG_CHIP_M32700_TS1
313 , "r5"
314 #endif /* CONFIG_CHIP_M32700_TS1 */
316 local_irq_restore(flags);
319 /* Atomic operations are already serializing on m32r */
320 #define smp_mb__before_local_dec() barrier()
321 #define smp_mb__after_local_dec() barrier()
322 #define smp_mb__before_local_inc() barrier()
323 #define smp_mb__after_local_inc() barrier()
325 /* Use these for per-cpu local_t variables: on some archs they are
326 * much more efficient than these naive implementations. Note they take
327 * a variable, not an address.
330 #define __local_inc(l) ((l)->a.counter++)
331 #define __local_dec(l) ((l)->a.counter++)
332 #define __local_add(i, l) ((l)->a.counter += (i))
333 #define __local_sub(i, l) ((l)->a.counter -= (i))
335 /* Use these for per-cpu local_t variables: on some archs they are
336 * much more efficient than these naive implementations. Note they take
337 * a variable, not an address.
340 #endif /* __M32R_LOCAL_H */