1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_X86_ATOMIC64_64_H
3 #define _ASM_X86_ATOMIC64_64_H
5 #include <linux/types.h>
6 #include <asm/alternative.h>
7 #include <asm/cmpxchg.h>
9 /* The 64-bit atomic type */
11 #define ATOMIC64_INIT(i) { (i) }
14 * atomic64_read - read atomic64 variable
15 * @v: pointer of type atomic64_t
17 * Atomically reads the value of @v.
18 * Doesn't imply a read memory barrier.
20 static inline long atomic64_read(const atomic64_t *v)
22 return READ_ONCE((v)->counter);
26 * atomic64_set - set atomic64 variable
27 * @v: pointer to type atomic64_t
30 * Atomically sets the value of @v to @i.
32 static inline void atomic64_set(atomic64_t *v, long i)
34 WRITE_ONCE(v->counter, i);
38 * atomic64_add - add integer to atomic64 variable
39 * @i: integer value to add
40 * @v: pointer to type atomic64_t
42 * Atomically adds @i to @v.
44 static __always_inline void atomic64_add(long i, atomic64_t *v)
46 asm volatile(LOCK_PREFIX "addq %1,%0"
48 : "er" (i), "m" (v->counter));
52 * atomic64_sub - subtract the atomic64 variable
53 * @i: integer value to subtract
54 * @v: pointer to type atomic64_t
56 * Atomically subtracts @i from @v.
58 static inline void atomic64_sub(long i, atomic64_t *v)
60 asm volatile(LOCK_PREFIX "subq %1,%0"
62 : "er" (i), "m" (v->counter));
66 * atomic64_sub_and_test - subtract value from variable and test result
67 * @i: integer value to subtract
68 * @v: pointer to type atomic64_t
70 * Atomically subtracts @i from @v and returns
71 * true if the result is zero, or false for all
74 static inline bool atomic64_sub_and_test(long i, atomic64_t *v)
76 GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, "er", i, "%0", e);
80 * atomic64_inc - increment atomic64 variable
81 * @v: pointer to type atomic64_t
83 * Atomically increments @v by 1.
85 static __always_inline void atomic64_inc(atomic64_t *v)
87 asm volatile(LOCK_PREFIX "incq %0"
93 * atomic64_dec - decrement atomic64 variable
94 * @v: pointer to type atomic64_t
96 * Atomically decrements @v by 1.
98 static __always_inline void atomic64_dec(atomic64_t *v)
100 asm volatile(LOCK_PREFIX "decq %0"
106 * atomic64_dec_and_test - decrement and test
107 * @v: pointer to type atomic64_t
109 * Atomically decrements @v by 1 and
110 * returns true if the result is 0, or false for all other
113 static inline bool atomic64_dec_and_test(atomic64_t *v)
115 GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, "%0", e);
119 * atomic64_inc_and_test - increment and test
120 * @v: pointer to type atomic64_t
122 * Atomically increments @v by 1
123 * and returns true if the result is zero, or false for all
126 static inline bool atomic64_inc_and_test(atomic64_t *v)
128 GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, "%0", e);
132 * atomic64_add_negative - add and test if negative
133 * @i: integer value to add
134 * @v: pointer to type atomic64_t
136 * Atomically adds @i to @v and returns true
137 * if the result is negative, or false when
138 * result is greater than or equal to zero.
140 static inline bool atomic64_add_negative(long i, atomic64_t *v)
142 GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, "er", i, "%0", s);
146 * atomic64_add_return - add and return
147 * @i: integer value to add
148 * @v: pointer to type atomic64_t
150 * Atomically adds @i to @v and returns @i + @v
152 static __always_inline long atomic64_add_return(long i, atomic64_t *v)
154 return i + xadd(&v->counter, i);
157 static inline long atomic64_sub_return(long i, atomic64_t *v)
159 return atomic64_add_return(-i, v);
162 static inline long atomic64_fetch_add(long i, atomic64_t *v)
164 return xadd(&v->counter, i);
167 static inline long atomic64_fetch_sub(long i, atomic64_t *v)
169 return xadd(&v->counter, -i);
172 #define atomic64_inc_return(v) (atomic64_add_return(1, (v)))
173 #define atomic64_dec_return(v) (atomic64_sub_return(1, (v)))
175 static inline long atomic64_cmpxchg(atomic64_t *v, long old, long new)
177 return cmpxchg(&v->counter, old, new);
180 #define atomic64_try_cmpxchg atomic64_try_cmpxchg
181 static __always_inline bool atomic64_try_cmpxchg(atomic64_t *v, s64 *old, long new)
183 return try_cmpxchg(&v->counter, old, new);
186 static inline long atomic64_xchg(atomic64_t *v, long new)
188 return xchg(&v->counter, new);
192 * atomic64_add_unless - add unless the number is a given value
193 * @v: pointer of type atomic64_t
194 * @a: the amount to add to v...
195 * @u: ...unless v is equal to u.
197 * Atomically adds @a to @v, so long as it was not @u.
198 * Returns the old value of @v.
200 static inline bool atomic64_add_unless(atomic64_t *v, long a, long u)
202 s64 c = atomic64_read(v);
204 if (unlikely(c == u))
206 } while (!atomic64_try_cmpxchg(v, &c, c + a));
210 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
213 * atomic64_dec_if_positive - decrement by 1 if old value positive
214 * @v: pointer of type atomic_t
216 * The function returns the old value of *v minus 1, even if
217 * the atomic variable, v, was not decremented.
219 static inline long atomic64_dec_if_positive(atomic64_t *v)
221 s64 dec, c = atomic64_read(v);
224 if (unlikely(dec < 0))
226 } while (!atomic64_try_cmpxchg(v, &c, dec));
230 static inline void atomic64_and(long i, atomic64_t *v)
232 asm volatile(LOCK_PREFIX "andq %1,%0"
238 static inline long atomic64_fetch_and(long i, atomic64_t *v)
240 s64 val = atomic64_read(v);
243 } while (!atomic64_try_cmpxchg(v, &val, val & i));
247 static inline void atomic64_or(long i, atomic64_t *v)
249 asm volatile(LOCK_PREFIX "orq %1,%0"
255 static inline long atomic64_fetch_or(long i, atomic64_t *v)
257 s64 val = atomic64_read(v);
260 } while (!atomic64_try_cmpxchg(v, &val, val | i));
264 static inline void atomic64_xor(long i, atomic64_t *v)
266 asm volatile(LOCK_PREFIX "xorq %1,%0"
272 static inline long atomic64_fetch_xor(long i, atomic64_t *v)
274 s64 val = atomic64_read(v);
277 } while (!atomic64_try_cmpxchg(v, &val, val ^ i));
281 #endif /* _ASM_X86_ATOMIC64_64_H */