2 * x86 version of "atomic_dec_and_lock()" using
3 * the atomic "cmpxchg" instruction.
5 * (For CPU's lacking cmpxchg, we use the slow
6 * generic version, and this one never even gets
10 #include <linux/spinlock.h>
11 #include <asm/atomic.h>
13 int _atomic_dec_and_lock(atomic_t
*atomic
, spinlock_t
*lock
)
19 counter
= atomic_read(atomic
);
25 asm volatile("lock; cmpxchgl %1,%2"
27 :"r" (newcount
), "m" (atomic
->counter
), "0" (counter
));
29 /* If the above failed, "eax" will have changed */
30 if (newcount
!= counter
)
36 if (atomic_dec_and_test(atomic
))