1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __ASM_SH_BITOPS_OP32_H
3 #define __ASM_SH_BITOPS_OP32_H
6 * The bit modifying instructions on SH-2A are only capable of working
7 * with a 3-bit immediate, which signifies the shift position for the bit
10 #if defined(__BIG_ENDIAN)
11 #define BITOP_LE_SWIZZLE ((BITS_PER_LONG-1) & ~0x7)
12 #define BYTE_NUMBER(nr) ((nr ^ BITOP_LE_SWIZZLE) / BITS_PER_BYTE)
13 #define BYTE_OFFSET(nr) ((nr ^ BITOP_LE_SWIZZLE) % BITS_PER_BYTE)
15 #define BYTE_NUMBER(nr) ((nr) / BITS_PER_BYTE)
16 #define BYTE_OFFSET(nr) ((nr) % BITS_PER_BYTE)
19 static inline void __set_bit(int nr
, volatile unsigned long *addr
)
21 if (__builtin_constant_p(nr
)) {
22 __asm__
__volatile__ (
23 "bset.b %1, @(%O2,%0) ! __set_bit\n\t"
25 : "i" (BYTE_OFFSET(nr
)), "i" (BYTE_NUMBER(nr
))
29 unsigned long mask
= BIT_MASK(nr
);
30 unsigned long *p
= ((unsigned long *)addr
) + BIT_WORD(nr
);
36 static inline void __clear_bit(int nr
, volatile unsigned long *addr
)
38 if (__builtin_constant_p(nr
)) {
39 __asm__
__volatile__ (
40 "bclr.b %1, @(%O2,%0) ! __clear_bit\n\t"
42 : "i" (BYTE_OFFSET(nr
)),
47 unsigned long mask
= BIT_MASK(nr
);
48 unsigned long *p
= ((unsigned long *)addr
) + BIT_WORD(nr
);
55 * __change_bit - Toggle a bit in memory
56 * @nr: the bit to change
57 * @addr: the address to start counting from
59 * Unlike change_bit(), this function is non-atomic and may be reordered.
60 * If it's called on the same region of memory simultaneously, the effect
61 * may be that only one operation succeeds.
63 static inline void __change_bit(int nr
, volatile unsigned long *addr
)
65 if (__builtin_constant_p(nr
)) {
66 __asm__
__volatile__ (
67 "bxor.b %1, @(%O2,%0) ! __change_bit\n\t"
69 : "i" (BYTE_OFFSET(nr
)),
74 unsigned long mask
= BIT_MASK(nr
);
75 unsigned long *p
= ((unsigned long *)addr
) + BIT_WORD(nr
);
82 * __test_and_set_bit - Set a bit and return its old value
84 * @addr: Address to count from
86 * This operation is non-atomic and can be reordered.
87 * If two examples of this operation race, one can appear to succeed
88 * but actually fail. You must protect multiple accesses with a lock.
90 static inline int __test_and_set_bit(int nr
, volatile unsigned long *addr
)
92 unsigned long mask
= BIT_MASK(nr
);
93 unsigned long *p
= ((unsigned long *)addr
) + BIT_WORD(nr
);
94 unsigned long old
= *p
;
97 return (old
& mask
) != 0;
101 * __test_and_clear_bit - Clear a bit and return its old value
103 * @addr: Address to count from
105 * This operation is non-atomic and can be reordered.
106 * If two examples of this operation race, one can appear to succeed
107 * but actually fail. You must protect multiple accesses with a lock.
109 static inline int __test_and_clear_bit(int nr
, volatile unsigned long *addr
)
111 unsigned long mask
= BIT_MASK(nr
);
112 unsigned long *p
= ((unsigned long *)addr
) + BIT_WORD(nr
);
113 unsigned long old
= *p
;
116 return (old
& mask
) != 0;
119 /* WARNING: non atomic and it can be reordered! */
120 static inline int __test_and_change_bit(int nr
,
121 volatile unsigned long *addr
)
123 unsigned long mask
= BIT_MASK(nr
);
124 unsigned long *p
= ((unsigned long *)addr
) + BIT_WORD(nr
);
125 unsigned long old
= *p
;
128 return (old
& mask
) != 0;
132 * test_bit - Determine whether a bit is set
133 * @nr: bit number to test
134 * @addr: Address to start counting from
136 static inline int test_bit(int nr
, const volatile unsigned long *addr
)
138 return 1UL & (addr
[BIT_WORD(nr
)] >> (nr
& (BITS_PER_LONG
-1)));
141 #endif /* __ASM_SH_BITOPS_OP32_H */