x86: add PAGE_KERNEL_EXEC_NOCACHE
[wrt350n-kernel.git] / include / asm-v850 / bitops.h
blobf82f5b4a56e0d0526d3d2e53023e90ab65aeaab0
1 /*
2 * include/asm-v850/bitops.h -- Bit operations
4 * Copyright (C) 2001,02,03,04,05 NEC Electronics Corporation
5 * Copyright (C) 2001,02,03,04,05 Miles Bader <miles@gnu.org>
6 * Copyright (C) 1992 Linus Torvalds.
8 * This file is subject to the terms and conditions of the GNU General
9 * Public License. See the file COPYING in the main directory of this
10 * archive for more details.
13 #ifndef __V850_BITOPS_H__
14 #define __V850_BITOPS_H__
16 #ifndef _LINUX_BITOPS_H
17 #error only <linux/bitops.h> can be included directly
18 #endif
20 #include <linux/compiler.h> /* unlikely */
21 #include <asm/byteorder.h> /* swab32 */
22 #include <asm/system.h> /* interrupt enable/disable */
25 #ifdef __KERNEL__
27 #include <asm-generic/bitops/ffz.h>
30 * The __ functions are not atomic
33 /* In the following constant-bit-op macros, a "g" constraint is used when
34 we really need an integer ("i" constraint). This is to avoid
35 warnings/errors from the compiler in the case where the associated
36 operand _isn't_ an integer, and shouldn't produce bogus assembly because
37 use of that form is protected by a guard statement that checks for
38 constants, and should otherwise be removed by the optimizer. This
39 _usually_ works -- however, __builtin_constant_p returns true for a
40 variable with a known constant value too, and unfortunately gcc will
41 happily put the variable in a register and use the register for the "g"
42 constraint'd asm operand. To avoid the latter problem, we add a
43 constant offset to the operand and subtract it back in the asm code;
44 forcing gcc to do arithmetic on the value is usually enough to get it
45 to use a real constant value. This is horrible, and ultimately
46 unreliable too, but it seems to work for now (hopefully gcc will offer
47 us more control in the future, so we can do a better job). */
49 #define __const_bit_op(op, nr, addr) \
50 ({ __asm__ (op " (%0 - 0x123), %1" \
51 :: "g" (((nr) & 0x7) + 0x123), \
52 "m" (*((char *)(addr) + ((nr) >> 3))) \
53 : "memory"); })
54 #define __var_bit_op(op, nr, addr) \
55 ({ int __nr = (nr); \
56 __asm__ (op " %0, [%1]" \
57 :: "r" (__nr & 0x7), \
58 "r" ((char *)(addr) + (__nr >> 3)) \
59 : "memory"); })
60 #define __bit_op(op, nr, addr) \
61 ((__builtin_constant_p (nr) && (unsigned)(nr) <= 0x7FFFF) \
62 ? __const_bit_op (op, nr, addr) \
63 : __var_bit_op (op, nr, addr))
65 #define __set_bit(nr, addr) __bit_op ("set1", nr, addr)
66 #define __clear_bit(nr, addr) __bit_op ("clr1", nr, addr)
67 #define __change_bit(nr, addr) __bit_op ("not1", nr, addr)
69 /* The bit instructions used by `non-atomic' variants are actually atomic. */
70 #define set_bit __set_bit
71 #define clear_bit __clear_bit
72 #define change_bit __change_bit
75 #define __const_tns_bit_op(op, nr, addr) \
76 ({ int __tns_res; \
77 __asm__ __volatile__ ( \
78 "tst1 (%1 - 0x123), %2; setf nz, %0; " op " (%1 - 0x123), %2" \
79 : "=&r" (__tns_res) \
80 : "g" (((nr) & 0x7) + 0x123), \
81 "m" (*((char *)(addr) + ((nr) >> 3))) \
82 : "memory"); \
83 __tns_res; \
85 #define __var_tns_bit_op(op, nr, addr) \
86 ({ int __nr = (nr); \
87 int __tns_res; \
88 __asm__ __volatile__ ( \
89 "tst1 %1, [%2]; setf nz, %0; " op " %1, [%2]" \
90 : "=&r" (__tns_res) \
91 : "r" (__nr & 0x7), \
92 "r" ((char *)(addr) + (__nr >> 3)) \
93 : "memory"); \
94 __tns_res; \
96 #define __tns_bit_op(op, nr, addr) \
97 ((__builtin_constant_p (nr) && (unsigned)(nr) <= 0x7FFFF) \
98 ? __const_tns_bit_op (op, nr, addr) \
99 : __var_tns_bit_op (op, nr, addr))
100 #define __tns_atomic_bit_op(op, nr, addr) \
101 ({ int __tns_atomic_res, __tns_atomic_flags; \
102 local_irq_save (__tns_atomic_flags); \
103 __tns_atomic_res = __tns_bit_op (op, nr, addr); \
104 local_irq_restore (__tns_atomic_flags); \
105 __tns_atomic_res; \
108 #define __test_and_set_bit(nr, addr) __tns_bit_op ("set1", nr, addr)
109 #define test_and_set_bit(nr, addr) __tns_atomic_bit_op ("set1", nr, addr)
111 #define __test_and_clear_bit(nr, addr) __tns_bit_op ("clr1", nr, addr)
112 #define test_and_clear_bit(nr, addr) __tns_atomic_bit_op ("clr1", nr, addr)
114 #define __test_and_change_bit(nr, addr) __tns_bit_op ("not1", nr, addr)
115 #define test_and_change_bit(nr, addr) __tns_atomic_bit_op ("not1", nr, addr)
118 #define __const_test_bit(nr, addr) \
119 ({ int __test_bit_res; \
120 __asm__ __volatile__ ("tst1 (%1 - 0x123), %2; setf nz, %0" \
121 : "=r" (__test_bit_res) \
122 : "g" (((nr) & 0x7) + 0x123), \
123 "m" (*((const char *)(addr) + ((nr) >> 3)))); \
124 __test_bit_res; \
126 static inline int __test_bit (int nr, const void *addr)
128 int res;
129 __asm__ __volatile__ ("tst1 %1, [%2]; setf nz, %0"
130 : "=r" (res)
131 : "r" (nr & 0x7), "r" (addr + (nr >> 3)));
132 return res;
134 #define test_bit(nr,addr) \
135 ((__builtin_constant_p (nr) && (unsigned)(nr) <= 0x7FFFF) \
136 ? __const_test_bit ((nr), (addr)) \
137 : __test_bit ((nr), (addr)))
140 /* clear_bit doesn't provide any barrier for the compiler. */
141 #define smp_mb__before_clear_bit() barrier ()
142 #define smp_mb__after_clear_bit() barrier ()
144 #include <asm-generic/bitops/ffs.h>
145 #include <asm-generic/bitops/fls.h>
146 #include <asm-generic/bitops/fls64.h>
147 #include <asm-generic/bitops/__ffs.h>
148 #include <asm-generic/bitops/find.h>
149 #include <asm-generic/bitops/sched.h>
150 #include <asm-generic/bitops/hweight.h>
151 #include <asm-generic/bitops/lock.h>
153 #include <asm-generic/bitops/ext2-non-atomic.h>
154 #define ext2_set_bit_atomic(l,n,a) test_and_set_bit(n,a)
155 #define ext2_clear_bit_atomic(l,n,a) test_and_clear_bit(n,a)
157 #include <asm-generic/bitops/minix.h>
159 #endif /* __KERNEL__ */
161 #endif /* __V850_BITOPS_H__ */