spi-topcliff-pch: add recovery processing in case wait-event timeout
[zen-stable.git] / arch / sh / include / asm / bitops-llsc.h
blobd8328be0619113d254de7a9f2deec3648feee404
1 #ifndef __ASM_SH_BITOPS_LLSC_H
2 #define __ASM_SH_BITOPS_LLSC_H
4 static inline void set_bit(int nr, volatile void *addr)
6 int mask;
7 volatile unsigned int *a = addr;
8 unsigned long tmp;
10 a += nr >> 5;
11 mask = 1 << (nr & 0x1f);
13 __asm__ __volatile__ (
14 "1: \n\t"
15 "movli.l @%1, %0 ! set_bit \n\t"
16 "or %2, %0 \n\t"
17 "movco.l %0, @%1 \n\t"
18 "bf 1b \n\t"
19 : "=&z" (tmp)
20 : "r" (a), "r" (mask)
21 : "t", "memory"
25 static inline void clear_bit(int nr, volatile void *addr)
27 int mask;
28 volatile unsigned int *a = addr;
29 unsigned long tmp;
31 a += nr >> 5;
32 mask = 1 << (nr & 0x1f);
34 __asm__ __volatile__ (
35 "1: \n\t"
36 "movli.l @%1, %0 ! clear_bit \n\t"
37 "and %2, %0 \n\t"
38 "movco.l %0, @%1 \n\t"
39 "bf 1b \n\t"
40 : "=&z" (tmp)
41 : "r" (a), "r" (~mask)
42 : "t", "memory"
46 static inline void change_bit(int nr, volatile void *addr)
48 int mask;
49 volatile unsigned int *a = addr;
50 unsigned long tmp;
52 a += nr >> 5;
53 mask = 1 << (nr & 0x1f);
55 __asm__ __volatile__ (
56 "1: \n\t"
57 "movli.l @%1, %0 ! change_bit \n\t"
58 "xor %2, %0 \n\t"
59 "movco.l %0, @%1 \n\t"
60 "bf 1b \n\t"
61 : "=&z" (tmp)
62 : "r" (a), "r" (mask)
63 : "t", "memory"
67 static inline int test_and_set_bit(int nr, volatile void *addr)
69 int mask, retval;
70 volatile unsigned int *a = addr;
71 unsigned long tmp;
73 a += nr >> 5;
74 mask = 1 << (nr & 0x1f);
76 __asm__ __volatile__ (
77 "1: \n\t"
78 "movli.l @%2, %0 ! test_and_set_bit \n\t"
79 "mov %0, %1 \n\t"
80 "or %3, %0 \n\t"
81 "movco.l %0, @%2 \n\t"
82 "bf 1b \n\t"
83 "and %3, %1 \n\t"
84 : "=&z" (tmp), "=&r" (retval)
85 : "r" (a), "r" (mask)
86 : "t", "memory"
89 return retval != 0;
92 static inline int test_and_clear_bit(int nr, volatile void *addr)
94 int mask, retval;
95 volatile unsigned int *a = addr;
96 unsigned long tmp;
98 a += nr >> 5;
99 mask = 1 << (nr & 0x1f);
101 __asm__ __volatile__ (
102 "1: \n\t"
103 "movli.l @%2, %0 ! test_and_clear_bit \n\t"
104 "mov %0, %1 \n\t"
105 "and %4, %0 \n\t"
106 "movco.l %0, @%2 \n\t"
107 "bf 1b \n\t"
108 "and %3, %1 \n\t"
109 "synco \n\t"
110 : "=&z" (tmp), "=&r" (retval)
111 : "r" (a), "r" (mask), "r" (~mask)
112 : "t", "memory"
115 return retval != 0;
118 static inline int test_and_change_bit(int nr, volatile void *addr)
120 int mask, retval;
121 volatile unsigned int *a = addr;
122 unsigned long tmp;
124 a += nr >> 5;
125 mask = 1 << (nr & 0x1f);
127 __asm__ __volatile__ (
128 "1: \n\t"
129 "movli.l @%2, %0 ! test_and_change_bit \n\t"
130 "mov %0, %1 \n\t"
131 "xor %3, %0 \n\t"
132 "movco.l %0, @%2 \n\t"
133 "bf 1b \n\t"
134 "and %3, %1 \n\t"
135 "synco \n\t"
136 : "=&z" (tmp), "=&r" (retval)
137 : "r" (a), "r" (mask)
138 : "t", "memory"
141 return retval != 0;
144 #include <asm-generic/bitops/non-atomic.h>
146 #endif /* __ASM_SH_BITOPS_LLSC_H */