1 #ifndef _ASM_X86_LOCAL_H
2 #define _ASM_X86_LOCAL_H
4 #include <linux/percpu.h>
6 #include <linux/atomic.h>
13 #define LOCAL_INIT(i) { ATOMIC_LONG_INIT(i) }
15 #define local_read(l) atomic_long_read(&(l)->a)
16 #define local_set(l, i) atomic_long_set(&(l)->a, (i))
18 static inline void local_inc(local_t
*l
)
20 asm volatile(_ASM_INC
"%0"
21 : "+m" (l
->a
.counter
));
24 static inline void local_dec(local_t
*l
)
26 asm volatile(_ASM_DEC
"%0"
27 : "+m" (l
->a
.counter
));
30 static inline void local_add(long i
, local_t
*l
)
32 asm volatile(_ASM_ADD
"%1,%0"
37 static inline void local_sub(long i
, local_t
*l
)
39 asm volatile(_ASM_SUB
"%1,%0"
45 * local_sub_and_test - subtract value from variable and test result
46 * @i: integer value to subtract
47 * @l: pointer to type local_t
49 * Atomically subtracts @i from @l and returns
50 * true if the result is zero, or false for all
53 static inline int local_sub_and_test(long i
, local_t
*l
)
57 asm volatile(_ASM_SUB
"%2,%0; sete %1"
58 : "+m" (l
->a
.counter
), "=qm" (c
)
59 : "ir" (i
) : "memory");
64 * local_dec_and_test - decrement and test
65 * @l: pointer to type local_t
67 * Atomically decrements @l by 1 and
68 * returns true if the result is 0, or false for all other
71 static inline int local_dec_and_test(local_t
*l
)
75 asm volatile(_ASM_DEC
"%0; sete %1"
76 : "+m" (l
->a
.counter
), "=qm" (c
)
82 * local_inc_and_test - increment and test
83 * @l: pointer to type local_t
85 * Atomically increments @l by 1
86 * and returns true if the result is zero, or false for all
89 static inline int local_inc_and_test(local_t
*l
)
93 asm volatile(_ASM_INC
"%0; sete %1"
94 : "+m" (l
->a
.counter
), "=qm" (c
)
100 * local_add_negative - add and test if negative
101 * @i: integer value to add
102 * @l: pointer to type local_t
104 * Atomically adds @i to @l and returns true
105 * if the result is negative, or false when
106 * result is greater than or equal to zero.
108 static inline int local_add_negative(long i
, local_t
*l
)
112 asm volatile(_ASM_ADD
"%2,%0; sets %1"
113 : "+m" (l
->a
.counter
), "=qm" (c
)
114 : "ir" (i
) : "memory");
119 * local_add_return - add and return
120 * @i: integer value to add
121 * @l: pointer to type local_t
123 * Atomically adds @i to @l and returns @i + @l
125 static inline long local_add_return(long i
, local_t
*l
)
128 asm volatile(_ASM_XADD
"%0, %1;"
129 : "+r" (i
), "+m" (l
->a
.counter
)
134 static inline long local_sub_return(long i
, local_t
*l
)
136 return local_add_return(-i
, l
);
139 #define local_inc_return(l) (local_add_return(1, l))
140 #define local_dec_return(l) (local_sub_return(1, l))
142 #define local_cmpxchg(l, o, n) \
143 (cmpxchg_local(&((l)->a.counter), (o), (n)))
144 /* Always has a lock prefix */
145 #define local_xchg(l, n) (xchg(&((l)->a.counter), (n)))
148 * local_add_unless - add unless the number is a given value
149 * @l: pointer of type local_t
150 * @a: the amount to add to l...
151 * @u: ...unless l is equal to u.
153 * Atomically adds @a to @l, so long as it was not @u.
154 * Returns non-zero if @l was not @u, and zero otherwise.
156 #define local_add_unless(l, a, u) \
159 c = local_read((l)); \
161 if (unlikely(c == (u))) \
163 old = local_cmpxchg((l), c, c + (a)); \
164 if (likely(old == c)) \
170 #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
172 /* On x86_32, these are no better than the atomic variants.
173 * On x86-64 these are better than the atomic variants on SMP kernels
174 * because they dont use a lock prefix.
176 #define __local_inc(l) local_inc(l)
177 #define __local_dec(l) local_dec(l)
178 #define __local_add(i, l) local_add((i), (l))
179 #define __local_sub(i, l) local_sub((i), (l))
181 #endif /* _ASM_X86_LOCAL_H */