Avoid beyond bounds copy while caching ACL
[zen-stable.git] / include / asm-generic / local64.h
blob5980002b8b7bee3db8bc7ab84f6c5a9de128b30c
1 #ifndef _ASM_GENERIC_LOCAL64_H
2 #define _ASM_GENERIC_LOCAL64_H
4 #include <linux/percpu.h>
5 #include <asm/types.h>
7 /*
8 * A signed long type for operations which are atomic for a single CPU.
9 * Usually used in combination with per-cpu variables.
11 * This is the default implementation, which uses atomic64_t. Which is
12 * rather pointless. The whole point behind local64_t is that some processors
13 * can perform atomic adds and subtracts in a manner which is atomic wrt IRQs
14 * running on this CPU. local64_t allows exploitation of such capabilities.
17 /* Implement in terms of atomics. */
19 #if BITS_PER_LONG == 64
21 #include <asm/local.h>
23 typedef struct {
24 local_t a;
25 } local64_t;
27 #define LOCAL64_INIT(i) { LOCAL_INIT(i) }
29 #define local64_read(l) local_read(&(l)->a)
30 #define local64_set(l,i) local_set((&(l)->a),(i))
31 #define local64_inc(l) local_inc(&(l)->a)
32 #define local64_dec(l) local_dec(&(l)->a)
33 #define local64_add(i,l) local_add((i),(&(l)->a))
34 #define local64_sub(i,l) local_sub((i),(&(l)->a))
36 #define local64_sub_and_test(i, l) local_sub_and_test((i), (&(l)->a))
37 #define local64_dec_and_test(l) local_dec_and_test(&(l)->a)
38 #define local64_inc_and_test(l) local_inc_and_test(&(l)->a)
39 #define local64_add_negative(i, l) local_add_negative((i), (&(l)->a))
40 #define local64_add_return(i, l) local_add_return((i), (&(l)->a))
41 #define local64_sub_return(i, l) local_sub_return((i), (&(l)->a))
42 #define local64_inc_return(l) local_inc_return(&(l)->a)
44 #define local64_cmpxchg(l, o, n) local_cmpxchg((&(l)->a), (o), (n))
45 #define local64_xchg(l, n) local_xchg((&(l)->a), (n))
46 #define local64_add_unless(l, _a, u) local_add_unless((&(l)->a), (_a), (u))
47 #define local64_inc_not_zero(l) local_inc_not_zero(&(l)->a)
49 /* Non-atomic variants, ie. preemption disabled and won't be touched
50 * in interrupt, etc. Some archs can optimize this case well. */
51 #define __local64_inc(l) local64_set((l), local64_read(l) + 1)
52 #define __local64_dec(l) local64_set((l), local64_read(l) - 1)
53 #define __local64_add(i,l) local64_set((l), local64_read(l) + (i))
54 #define __local64_sub(i,l) local64_set((l), local64_read(l) - (i))
56 #else /* BITS_PER_LONG != 64 */
58 #include <linux/atomic.h>
60 /* Don't use typedef: don't want them to be mixed with atomic_t's. */
61 typedef struct {
62 atomic64_t a;
63 } local64_t;
65 #define LOCAL64_INIT(i) { ATOMIC_LONG_INIT(i) }
67 #define local64_read(l) atomic64_read(&(l)->a)
68 #define local64_set(l,i) atomic64_set((&(l)->a),(i))
69 #define local64_inc(l) atomic64_inc(&(l)->a)
70 #define local64_dec(l) atomic64_dec(&(l)->a)
71 #define local64_add(i,l) atomic64_add((i),(&(l)->a))
72 #define local64_sub(i,l) atomic64_sub((i),(&(l)->a))
74 #define local64_sub_and_test(i, l) atomic64_sub_and_test((i), (&(l)->a))
75 #define local64_dec_and_test(l) atomic64_dec_and_test(&(l)->a)
76 #define local64_inc_and_test(l) atomic64_inc_and_test(&(l)->a)
77 #define local64_add_negative(i, l) atomic64_add_negative((i), (&(l)->a))
78 #define local64_add_return(i, l) atomic64_add_return((i), (&(l)->a))
79 #define local64_sub_return(i, l) atomic64_sub_return((i), (&(l)->a))
80 #define local64_inc_return(l) atomic64_inc_return(&(l)->a)
82 #define local64_cmpxchg(l, o, n) atomic64_cmpxchg((&(l)->a), (o), (n))
83 #define local64_xchg(l, n) atomic64_xchg((&(l)->a), (n))
84 #define local64_add_unless(l, _a, u) atomic64_add_unless((&(l)->a), (_a), (u))
85 #define local64_inc_not_zero(l) atomic64_inc_not_zero(&(l)->a)
87 /* Non-atomic variants, ie. preemption disabled and won't be touched
88 * in interrupt, etc. Some archs can optimize this case well. */
89 #define __local64_inc(l) local64_set((l), local64_read(l) + 1)
90 #define __local64_dec(l) local64_set((l), local64_read(l) - 1)
91 #define __local64_add(i,l) local64_set((l), local64_read(l) + (i))
92 #define __local64_sub(i,l) local64_set((l), local64_read(l) - (i))
94 #endif /* BITS_PER_LONG != 64 */
96 #endif /* _ASM_GENERIC_LOCAL64_H */