1 #ifndef __ASM_X86_REFCOUNT_H
2 #define __ASM_X86_REFCOUNT_H
4 * x86-specific implementation of refcount_t. Based on PAX_REFCOUNT from
7 #include <linux/refcount.h>
11 * This is the first portion of the refcount error handling, which lives in
12 * .text.unlikely, and is jumped to from the CPU flag check (in the
13 * following macros). This saves the refcount value location into CX for
14 * the exception handler to use (in mm/extable.c), and then triggers the
15 * central refcount exception. The fixup address for the exception points
16 * back to the regular execution flow in .text.
18 #define _REFCOUNT_EXCEPTION \
19 ".pushsection .text..refcount\n" \
20 "111:\tlea %[counter], %%" _ASM_CX "\n" \
21 "112:\t" ASM_UD2 "\n" \
25 _ASM_EXTABLE_REFCOUNT(112b, 113b)
27 /* Trigger refcount exception if refcount result is negative. */
28 #define REFCOUNT_CHECK_LT_ZERO \
32 /* Trigger refcount exception if refcount result is zero or negative. */
33 #define REFCOUNT_CHECK_LE_ZERO \
35 REFCOUNT_CHECK_LT_ZERO
37 /* Trigger refcount exception unconditionally. */
38 #define REFCOUNT_ERROR \
42 static __always_inline
void refcount_add(unsigned int i
, refcount_t
*r
)
44 asm volatile(LOCK_PREFIX
"addl %1,%0\n\t"
45 REFCOUNT_CHECK_LT_ZERO
46 : [counter
] "+m" (r
->refs
.counter
)
51 static __always_inline
void refcount_inc(refcount_t
*r
)
53 asm volatile(LOCK_PREFIX
"incl %0\n\t"
54 REFCOUNT_CHECK_LT_ZERO
55 : [counter
] "+m" (r
->refs
.counter
)
59 static __always_inline
void refcount_dec(refcount_t
*r
)
61 asm volatile(LOCK_PREFIX
"decl %0\n\t"
62 REFCOUNT_CHECK_LE_ZERO
63 : [counter
] "+m" (r
->refs
.counter
)
67 static __always_inline __must_check
68 bool refcount_sub_and_test(unsigned int i
, refcount_t
*r
)
70 GEN_BINARY_SUFFIXED_RMWcc(LOCK_PREFIX
"subl", REFCOUNT_CHECK_LT_ZERO
,
71 r
->refs
.counter
, "er", i
, "%0", e
, "cx");
74 static __always_inline __must_check
bool refcount_dec_and_test(refcount_t
*r
)
76 GEN_UNARY_SUFFIXED_RMWcc(LOCK_PREFIX
"decl", REFCOUNT_CHECK_LT_ZERO
,
77 r
->refs
.counter
, "%0", e
, "cx");
80 static __always_inline __must_check
81 bool refcount_add_not_zero(unsigned int i
, refcount_t
*r
)
85 c
= atomic_read(&(r
->refs
));
92 /* Did we try to increment from/to an undesirable state? */
93 if (unlikely(c
< 0 || c
== INT_MAX
|| result
< c
)) {
94 asm volatile(REFCOUNT_ERROR
95 : : [counter
] "m" (r
->refs
.counter
)
100 } while (!atomic_try_cmpxchg(&(r
->refs
), &c
, result
));
105 static __always_inline __must_check
bool refcount_inc_not_zero(refcount_t
*r
)
107 return refcount_add_not_zero(1, r
);