1 #ifndef __ASM_X86_REFCOUNT_H
2 #define __ASM_X86_REFCOUNT_H
4 * x86-specific implementation of refcount_t. Based on PAX_REFCOUNT from
7 #include <linux/refcount.h>
10 * This is the first portion of the refcount error handling, which lives in
11 * .text.unlikely, and is jumped to from the CPU flag check (in the
12 * following macros). This saves the refcount value location into CX for
13 * the exception handler to use (in mm/extable.c), and then triggers the
14 * central refcount exception. The fixup address for the exception points
15 * back to the regular execution flow in .text.
17 #define _REFCOUNT_EXCEPTION \
18 ".pushsection .text.unlikely\n" \
19 "111:\tlea %[counter], %%" _ASM_CX "\n" \
20 "112:\t" ASM_UD0 "\n" \
24 _ASM_EXTABLE_REFCOUNT(112b, 113b)
26 /* Trigger refcount exception if refcount result is negative. */
27 #define REFCOUNT_CHECK_LT_ZERO \
31 /* Trigger refcount exception if refcount result is zero or negative. */
32 #define REFCOUNT_CHECK_LE_ZERO \
34 REFCOUNT_CHECK_LT_ZERO
36 /* Trigger refcount exception unconditionally. */
37 #define REFCOUNT_ERROR \
41 static __always_inline void refcount_add(unsigned int i, refcount_t *r)
43 asm volatile(LOCK_PREFIX "addl %1,%0\n\t"
44 REFCOUNT_CHECK_LT_ZERO
45 : [counter] "+m" (r->refs.counter)
50 static __always_inline void refcount_inc(refcount_t *r)
52 asm volatile(LOCK_PREFIX "incl %0\n\t"
53 REFCOUNT_CHECK_LT_ZERO
54 : [counter] "+m" (r->refs.counter)
58 static __always_inline void refcount_dec(refcount_t *r)
60 asm volatile(LOCK_PREFIX "decl %0\n\t"
61 REFCOUNT_CHECK_LE_ZERO
62 : [counter] "+m" (r->refs.counter)
66 static __always_inline __must_check
67 bool refcount_sub_and_test(unsigned int i, refcount_t *r)
69 GEN_BINARY_SUFFIXED_RMWcc(LOCK_PREFIX "subl", REFCOUNT_CHECK_LT_ZERO,
70 r->refs.counter, "er", i, "%0", e);
73 static __always_inline __must_check bool refcount_dec_and_test(refcount_t *r)
75 GEN_UNARY_SUFFIXED_RMWcc(LOCK_PREFIX "decl", REFCOUNT_CHECK_LT_ZERO,
76 r->refs.counter, "%0", e);
79 static __always_inline __must_check
80 bool refcount_add_not_zero(unsigned int i, refcount_t *r)
84 c = atomic_read(&(r->refs));
91 /* Did we try to increment from/to an undesirable state? */
92 if (unlikely(c < 0 || c == INT_MAX || result < c)) {
93 asm volatile(REFCOUNT_ERROR
94 : : [counter] "m" (r->refs.counter)
99 } while (!atomic_try_cmpxchg(&(r->refs), &c, result));
104 static __always_inline __must_check bool refcount_inc_not_zero(refcount_t *r)
106 return refcount_add_not_zero(1, r);