1 #include "ace/Atomic_Op.h"
2 #include "ace/OS_NS_unistd.h"
4 #if !defined (__ACE_INLINE__)
5 #include "ace/Atomic_Op.inl"
6 #endif /* __ACE_INLINE__ */
8 #if defined (ACE_HAS_BUILTIN_ATOMIC_OP)
10 #if defined (ACE_INCLUDE_ATOMIC_OP_SPARC)
11 # include "ace/Atomic_Op_Sparc.h"
12 #endif /* ACE_INCLUDE_ATOMIC_OP_SPARC */
16 #if defined (_MSC_VER)
17 // Disable "no return value" warning, as we will be putting
18 // the return values directly into the EAX register.
19 #pragma warning (push)
20 #pragma warning (disable: 4035)
24 single_cpu_increment (volatile long *value
)
26 #if defined (ACE_HAS_INTEL_ASSEMBLY)
28 unsigned long addr
= reinterpret_cast<unsigned long> (value
);
29 asm( "xadd %0, (%1)" : "+r"(tmp
) : "r"(addr
) );
31 #elif !defined (ACE_HAS_SOLARIS_ATOMIC_LIB) && (defined (sun) || \
32 (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64))))
33 return ace_atomic_add_long (
34 reinterpret_cast<volatile unsigned long*> (value
), 1);
35 #elif defined(__GNUC__) && defined(__PPC__)
37 asm("lwz %0,%1" : "=r" (tmp
) : "m" (*value
) );
38 asm("addi %0,%0,1" : "+r" (tmp
) );
39 asm("stw %0,%1" : "+r" (tmp
), "=m" (*value
) );
41 #else /* ACE_HAS_INTEL_ASSEMBLY*/
42 ACE_UNUSED_ARG (value
);
43 ACE_NOTSUP_RETURN (-1);
44 #endif /* ACE_HAS_INTEL_ASSEMBLY*/
48 single_cpu_decrement (volatile long *value
)
50 #if defined (ACE_HAS_INTEL_ASSEMBLY)
52 unsigned long addr
= reinterpret_cast<unsigned long> (value
);
53 asm( "xadd %0, (%1)" : "+r"(tmp
) : "r"(addr
) );
55 #elif !defined (ACE_HAS_SOLARIS_ATOMIC_LIB) && (defined (sun) || \
56 (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64))))
57 return ace_atomic_add_long (
58 reinterpret_cast<volatile unsigned long*> (value
), -1);
59 #elif defined(__GNUC__) && defined(__PPC__)
61 asm("lwz %0,%1" : "=r" (tmp
) : "m" (*value
) );
62 asm("addi %0,%0,-1" : "+r" (tmp
) );
63 asm("stw %0,%1" : "+r" (tmp
), "=m" (*value
) );
65 #else /* ACE_HAS_INTEL_ASSEMBLY*/
66 ACE_UNUSED_ARG (value
);
67 ACE_NOTSUP_RETURN (-1);
68 #endif /* ACE_HAS_INTEL_ASSEMBLY*/
72 single_cpu_exchange (volatile long *value
, long rhs
)
74 #if defined (ACE_HAS_INTEL_ASSEMBLY)
75 unsigned long addr
= reinterpret_cast<unsigned long> (value
);
76 asm( "xchg %0, (%1)" : "+r"(rhs
) : "r"(addr
) );
78 #elif !defined (ACE_HAS_SOLARIS_ATOMIC_LIB) && (defined (sun) || \
79 (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64))))
80 return ace_atomic_swap_long (
81 reinterpret_cast<volatile unsigned long*> (value
), rhs
);
82 #elif defined(__GNUC__) && defined(__PPC__)
84 asm("lwz %0,%1" : "=r" (tmp
) : "m" (rhs
) );
85 asm("stw %0,%1" : "+r" (tmp
), "=m" (*value
) );
87 #else /* ACE_HAS_INTEL_ASSEMBLY*/
88 ACE_UNUSED_ARG (value
);
90 ACE_NOTSUP_RETURN (-1);
91 #endif /* ACE_HAS_INTEL_ASSEMBLY*/
95 single_cpu_exchange_add (volatile long *value
, long rhs
)
97 #if defined (ACE_HAS_INTEL_ASSEMBLY)
98 unsigned long addr
= reinterpret_cast<unsigned long> (value
);
99 asm( "xadd %0, (%1)" : "+r"(rhs
) : "r"(addr
) );
101 #elif !defined (ACE_HAS_SOLARIS_ATOMIC_LIB) && (defined (sun) || \
102 (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64))))
103 return ace_atomic_swap_add_long (
104 reinterpret_cast<volatile unsigned long*> (value
), rhs
);
105 #elif defined(__GNUC__) && defined(__PPC__)
107 asm("add %0,%1,%2" : "=r" (tmp
) : "r" (*value
), "r" (rhs
) );
108 asm("stw %0,%1" : "+r" (tmp
), "=m" (*value
) );
110 #elif defined (WIN32) && !defined (ACE_HAS_INTERLOCKED_EXCHANGEADD)
111 # if defined (_MSC_VER)
118 // Return value is already in EAX register.
119 # elif defined (__BORLANDC__)
121 _EDX
= reinterpret_cast<unsigned long> (value
);
122 __emit__(0x0F, 0xC1, 0x02); // xadd [edx], eax
123 // Return value is already in EAX register.
124 # else /* _MSC_VER */
125 ACE_UNUSED_ARG (value
);
126 ACE_UNUSED_ARG (rhs
);
127 ACE_NOTSUP_RETURN (-1);
128 # endif /* _MSC_VER */
129 #else /* ACE_HAS_INTEL_ASSEMBLY*/
130 ACE_UNUSED_ARG (value
);
131 ACE_UNUSED_ARG (rhs
);
132 ACE_NOTSUP_RETURN (-1);
133 #endif /* ACE_HAS_INTEL_ASSEMBLY*/
137 multi_cpu_increment (volatile long *value
)
139 #if defined (ACE_HAS_INTEL_ASSEMBLY)
141 unsigned long addr
= reinterpret_cast<unsigned long> (value
);
142 asm( "lock ; xadd %0, (%1)" : "+r"(tmp
) : "r"(addr
) );
144 #elif !defined (ACE_HAS_SOLARIS_ATOMIC_LIB) && (defined (sun) || \
145 (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64))))
146 return ace_atomic_add_long (
147 reinterpret_cast<volatile unsigned long*> (value
), 1);
148 #else /* ACE_HAS_INTEL_ASSEMBLY*/
149 ACE_UNUSED_ARG (value
);
150 ACE_NOTSUP_RETURN (-1);
151 #endif /* ACE_HAS_INTEL_ASSEMBLY*/
155 multi_cpu_decrement (volatile long *value
)
157 #if defined (ACE_HAS_INTEL_ASSEMBLY)
159 unsigned long addr
= reinterpret_cast<unsigned long> (value
);
160 asm( "lock ; xadd %0, (%1)" : "+r"(tmp
) : "r"(addr
) );
162 #elif !defined (ACE_HAS_SOLARIS_ATOMIC_LIB) && (defined (sun) || \
163 (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64))))
164 return ace_atomic_add_long (
165 reinterpret_cast<volatile unsigned long*> (value
), -1);
166 #else /* ACE_HAS_INTEL_ASSEMBLY*/
167 ACE_UNUSED_ARG (value
);
168 ACE_NOTSUP_RETURN (-1);
169 #endif /* ACE_HAS_INTEL_ASSEMBLY*/
173 multi_cpu_exchange (volatile long *value
, long rhs
)
175 #if defined (ACE_HAS_INTEL_ASSEMBLY)
176 unsigned long addr
= reinterpret_cast<unsigned long> (value
);
177 // The XCHG instruction automatically follows LOCK semantics
178 asm( "xchg %0, (%1)" : "+r"(rhs
) : "r"(addr
) );
180 #elif !defined (ACE_HAS_SOLARIS_ATOMIC_LIB) && (defined (sun) || \
181 (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64))))
182 return ace_atomic_swap_long (
183 reinterpret_cast<volatile unsigned long*> (value
), rhs
);
184 #else /* ACE_HAS_INTEL_ASSEMBLY*/
185 ACE_UNUSED_ARG (value
);
186 ACE_UNUSED_ARG (rhs
);
187 ACE_NOTSUP_RETURN (-1);
188 #endif /* ACE_HAS_INTEL_ASSEMBLY*/
192 multi_cpu_exchange_add (volatile long *value
, long rhs
)
194 #if defined (ACE_HAS_INTEL_ASSEMBLY)
195 unsigned long addr
= reinterpret_cast<unsigned long> (value
);
196 asm( "lock ; xadd %0, (%1)" : "+r"(rhs
) : "r"(addr
) );
198 #elif !defined (ACE_HAS_SOLARIS_ATOMIC_LIB) && (defined (sun) || \
199 (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64))))
200 return ace_atomic_swap_add_long (
201 reinterpret_cast<volatile unsigned long*> (value
), rhs
);
202 #elif defined (WIN32) && !defined (ACE_HAS_INTERLOCKED_EXCHANGEADD)
203 # if defined (_MSC_VER)
210 // Return value is already in EAX register.
211 # elif defined (__BORLANDC__)
213 _EDX
= reinterpret_cast<unsigned long> (value
);
214 __emit__(0xF0, 0x0F, 0xC1, 0x02); // lock xadd [edx], eax
215 // Return value is already in EAX register.
216 # else /* _MSC_VER */
217 ACE_UNUSED_ARG (value
);
218 ACE_UNUSED_ARG (rhs
);
219 ACE_NOTSUP_RETURN (-1);
220 # endif /* _MSC_VER */
221 #else /* ACE_HAS_INTEL_ASSEMBLY*/
222 ACE_UNUSED_ARG (value
);
223 ACE_UNUSED_ARG (rhs
);
224 ACE_NOTSUP_RETURN (-1);
225 #endif /* ACE_HAS_INTEL_ASSEMBLY*/
228 #if defined (_MSC_VER)
229 #pragma warning (pop)
230 #endif /* _MSC_VER */
234 ACE_BEGIN_VERSIONED_NAMESPACE_DECL
236 long (*ACE_Atomic_Op
<ACE_Thread_Mutex
, long>::increment_fn_
) (volatile long *) = multi_cpu_increment
;
237 long (*ACE_Atomic_Op
<ACE_Thread_Mutex
, long>::decrement_fn_
) (volatile long *) = multi_cpu_decrement
;
238 long (*ACE_Atomic_Op
<ACE_Thread_Mutex
, long>::exchange_fn_
) (volatile long *, long) = multi_cpu_exchange
;
239 long (*ACE_Atomic_Op
<ACE_Thread_Mutex
, long>::exchange_add_fn_
) (volatile long *, long) = multi_cpu_exchange_add
;
242 ACE_Atomic_Op
<ACE_Thread_Mutex
, long>::init_functions (void)
244 if (ACE_OS::num_processors () == 1)
246 increment_fn_
= single_cpu_increment
;
247 decrement_fn_
= single_cpu_decrement
;
248 exchange_fn_
= single_cpu_exchange
;
249 exchange_add_fn_
= single_cpu_exchange_add
;
253 increment_fn_
= multi_cpu_increment
;
254 decrement_fn_
= multi_cpu_decrement
;
255 exchange_fn_
= multi_cpu_exchange
;
256 exchange_add_fn_
= multi_cpu_exchange_add
;
261 ACE_Atomic_Op
<ACE_Thread_Mutex
, long>::dump (void) const
263 #if defined (ACE_HAS_DUMP)
264 ACELIB_DEBUG ((LM_DEBUG
, ACE_BEGIN_DUMP
, this));
265 ACELIB_DEBUG ((LM_DEBUG
, ACE_END_DUMP
));
266 #endif /* ACE_HAS_DUMP */
269 long (*ACE_Atomic_Op
<ACE_Thread_Mutex
, unsigned long>::increment_fn_
) (volatile long *) = multi_cpu_increment
;
270 long (*ACE_Atomic_Op
<ACE_Thread_Mutex
, unsigned long>::decrement_fn_
) (volatile long *) = multi_cpu_decrement
;
271 long (*ACE_Atomic_Op
<ACE_Thread_Mutex
, unsigned long>::exchange_fn_
) (volatile long *, long) = multi_cpu_exchange
;
272 long (*ACE_Atomic_Op
<ACE_Thread_Mutex
, unsigned long>::exchange_add_fn_
) (volatile long *, long) = multi_cpu_exchange_add
;
275 ACE_Atomic_Op
<ACE_Thread_Mutex
, unsigned long>::init_functions (void)
277 if (ACE_OS::num_processors () == 1)
279 increment_fn_
= single_cpu_increment
;
280 decrement_fn_
= single_cpu_decrement
;
281 exchange_fn_
= single_cpu_exchange
;
282 exchange_add_fn_
= single_cpu_exchange_add
;
286 increment_fn_
= multi_cpu_increment
;
287 decrement_fn_
= multi_cpu_decrement
;
288 exchange_fn_
= multi_cpu_exchange
;
289 exchange_add_fn_
= multi_cpu_exchange_add
;
294 ACE_Atomic_Op
<ACE_Thread_Mutex
, unsigned long>::dump (void) const
296 #if defined (ACE_HAS_DUMP)
297 ACELIB_DEBUG ((LM_DEBUG
, ACE_BEGIN_DUMP
, this));
298 ACELIB_DEBUG ((LM_DEBUG
, ACE_END_DUMP
));
299 #endif /* ACE_HAS_DUMP */
302 ACE_END_VERSIONED_NAMESPACE_DECL
304 #endif /* ACE_HAS_BUILTIN_ATOMIC_OP */