2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
12 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
17 #include <linux/irqflags.h>
18 #include <linux/types.h>
19 #include <asm/barrier.h>
20 #include <asm/compiler.h>
21 #include <asm/cpu-features.h>
22 #include <asm/cmpxchg.h>
26 #define ATOMIC_INIT(i) { (i) }
29 * atomic_read - read atomic variable
30 * @v: pointer of type atomic_t
32 * Atomically reads the value of @v.
34 #define atomic_read(v) READ_ONCE((v)->counter)
37 * atomic_set - set atomic variable
38 * @v: pointer of type atomic_t
41 * Atomically sets the value of @v to @i.
43 #define atomic_set(v, i) WRITE_ONCE((v)->counter, (i))
45 #define ATOMIC_OP(op, c_op, asm_op) \
46 static __inline__ void atomic_##op(int i, atomic_t * v) \
48 if (kernel_uses_llsc) { \
52 __asm__ __volatile__( \
54 " .set "MIPS_ISA_LEVEL" \n" \
55 "1: ll %0, %1 # atomic_" #op " \n" \
56 " " #asm_op " %0, %2 \n" \
58 "\t" __SC_BEQZ "%0, 1b \n" \
60 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
61 : "Ir" (i) : __LLSC_CLOBBER); \
63 unsigned long flags; \
65 raw_local_irq_save(flags); \
67 raw_local_irq_restore(flags); \
71 #define ATOMIC_OP_RETURN(op, c_op, asm_op) \
72 static __inline__ int atomic_##op##_return_relaxed(int i, atomic_t * v) \
76 if (kernel_uses_llsc) { \
80 __asm__ __volatile__( \
82 " .set "MIPS_ISA_LEVEL" \n" \
83 "1: ll %1, %2 # atomic_" #op "_return \n" \
84 " " #asm_op " %0, %1, %3 \n" \
86 "\t" __SC_BEQZ "%0, 1b \n" \
87 " " #asm_op " %0, %1, %3 \n" \
89 : "=&r" (result), "=&r" (temp), \
90 "+" GCC_OFF_SMALL_ASM() (v->counter) \
91 : "Ir" (i) : __LLSC_CLOBBER); \
93 unsigned long flags; \
95 raw_local_irq_save(flags); \
96 result = v->counter; \
98 v->counter = result; \
99 raw_local_irq_restore(flags); \
105 #define ATOMIC_FETCH_OP(op, c_op, asm_op) \
106 static __inline__ int atomic_fetch_##op##_relaxed(int i, atomic_t * v) \
110 if (kernel_uses_llsc) { \
113 loongson_llsc_mb(); \
114 __asm__ __volatile__( \
116 " .set "MIPS_ISA_LEVEL" \n" \
117 "1: ll %1, %2 # atomic_fetch_" #op " \n" \
118 " " #asm_op " %0, %1, %3 \n" \
120 "\t" __SC_BEQZ "%0, 1b \n" \
123 : "=&r" (result), "=&r" (temp), \
124 "+" GCC_OFF_SMALL_ASM() (v->counter) \
125 : "Ir" (i) : __LLSC_CLOBBER); \
127 unsigned long flags; \
129 raw_local_irq_save(flags); \
130 result = v->counter; \
132 raw_local_irq_restore(flags); \
138 #define ATOMIC_OPS(op, c_op, asm_op) \
139 ATOMIC_OP(op, c_op, asm_op) \
140 ATOMIC_OP_RETURN(op, c_op, asm_op) \
141 ATOMIC_FETCH_OP(op, c_op, asm_op)
143 ATOMIC_OPS(add, +=, addu)
144 ATOMIC_OPS(sub, -=, subu)
146 #define atomic_add_return_relaxed atomic_add_return_relaxed
147 #define atomic_sub_return_relaxed atomic_sub_return_relaxed
148 #define atomic_fetch_add_relaxed atomic_fetch_add_relaxed
149 #define atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed
152 #define ATOMIC_OPS(op, c_op, asm_op) \
153 ATOMIC_OP(op, c_op, asm_op) \
154 ATOMIC_FETCH_OP(op, c_op, asm_op)
156 ATOMIC_OPS(and, &=, and)
157 ATOMIC_OPS(or, |=, or)
158 ATOMIC_OPS(xor, ^=, xor)
160 #define atomic_fetch_and_relaxed atomic_fetch_and_relaxed
161 #define atomic_fetch_or_relaxed atomic_fetch_or_relaxed
162 #define atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed
165 #undef ATOMIC_FETCH_OP
166 #undef ATOMIC_OP_RETURN
170 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
171 * @i: integer value to subtract
172 * @v: pointer of type atomic_t
174 * Atomically test @v and subtract @i if @v is greater or equal than @i.
175 * The function returns the old value of @v minus @i.
177 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
181 smp_mb__before_llsc();
183 if (kernel_uses_llsc) {
187 __asm__ __volatile__(
189 " .set "MIPS_ISA_LEVEL" \n"
190 "1: ll %1, %2 # atomic_sub_if_positive\n"
192 " subu %0, %1, %3 \n"
196 " .set "MIPS_ISA_LEVEL" \n"
198 "\t" __SC_BEQZ "%1, 1b \n"
201 : "=&r" (result), "=&r" (temp),
202 "+" GCC_OFF_SMALL_ASM() (v->counter)
203 : "Ir" (i) : __LLSC_CLOBBER);
207 raw_local_irq_save(flags);
212 raw_local_irq_restore(flags);
220 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
221 #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
224 * atomic_dec_if_positive - decrement by 1 if old value positive
225 * @v: pointer of type atomic_t
227 #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
231 #define ATOMIC64_INIT(i) { (i) }
234 * atomic64_read - read atomic variable
235 * @v: pointer of type atomic64_t
238 #define atomic64_read(v) READ_ONCE((v)->counter)
241 * atomic64_set - set atomic variable
242 * @v: pointer of type atomic64_t
245 #define atomic64_set(v, i) WRITE_ONCE((v)->counter, (i))
247 #define ATOMIC64_OP(op, c_op, asm_op) \
248 static __inline__ void atomic64_##op(s64 i, atomic64_t * v) \
250 if (kernel_uses_llsc) { \
253 loongson_llsc_mb(); \
254 __asm__ __volatile__( \
256 " .set "MIPS_ISA_LEVEL" \n" \
257 "1: lld %0, %1 # atomic64_" #op " \n" \
258 " " #asm_op " %0, %2 \n" \
260 "\t" __SC_BEQZ "%0, 1b \n" \
262 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
263 : "Ir" (i) : __LLSC_CLOBBER); \
265 unsigned long flags; \
267 raw_local_irq_save(flags); \
269 raw_local_irq_restore(flags); \
273 #define ATOMIC64_OP_RETURN(op, c_op, asm_op) \
274 static __inline__ s64 atomic64_##op##_return_relaxed(s64 i, atomic64_t * v) \
278 if (kernel_uses_llsc) { \
281 loongson_llsc_mb(); \
282 __asm__ __volatile__( \
284 " .set "MIPS_ISA_LEVEL" \n" \
285 "1: lld %1, %2 # atomic64_" #op "_return\n" \
286 " " #asm_op " %0, %1, %3 \n" \
288 "\t" __SC_BEQZ "%0, 1b \n" \
289 " " #asm_op " %0, %1, %3 \n" \
291 : "=&r" (result), "=&r" (temp), \
292 "+" GCC_OFF_SMALL_ASM() (v->counter) \
293 : "Ir" (i) : __LLSC_CLOBBER); \
295 unsigned long flags; \
297 raw_local_irq_save(flags); \
298 result = v->counter; \
300 v->counter = result; \
301 raw_local_irq_restore(flags); \
307 #define ATOMIC64_FETCH_OP(op, c_op, asm_op) \
308 static __inline__ s64 atomic64_fetch_##op##_relaxed(s64 i, atomic64_t * v) \
312 if (kernel_uses_llsc) { \
315 loongson_llsc_mb(); \
316 __asm__ __volatile__( \
318 " .set "MIPS_ISA_LEVEL" \n" \
319 "1: lld %1, %2 # atomic64_fetch_" #op "\n" \
320 " " #asm_op " %0, %1, %3 \n" \
322 "\t" __SC_BEQZ "%0, 1b \n" \
325 : "=&r" (result), "=&r" (temp), \
326 "+" GCC_OFF_SMALL_ASM() (v->counter) \
327 : "Ir" (i) : __LLSC_CLOBBER); \
329 unsigned long flags; \
331 raw_local_irq_save(flags); \
332 result = v->counter; \
334 raw_local_irq_restore(flags); \
340 #define ATOMIC64_OPS(op, c_op, asm_op) \
341 ATOMIC64_OP(op, c_op, asm_op) \
342 ATOMIC64_OP_RETURN(op, c_op, asm_op) \
343 ATOMIC64_FETCH_OP(op, c_op, asm_op)
345 ATOMIC64_OPS(add, +=, daddu)
346 ATOMIC64_OPS(sub, -=, dsubu)
348 #define atomic64_add_return_relaxed atomic64_add_return_relaxed
349 #define atomic64_sub_return_relaxed atomic64_sub_return_relaxed
350 #define atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed
351 #define atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed
354 #define ATOMIC64_OPS(op, c_op, asm_op) \
355 ATOMIC64_OP(op, c_op, asm_op) \
356 ATOMIC64_FETCH_OP(op, c_op, asm_op)
358 ATOMIC64_OPS(and, &=, and)
359 ATOMIC64_OPS(or, |=, or)
360 ATOMIC64_OPS(xor, ^=, xor)
362 #define atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed
363 #define atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed
364 #define atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed
367 #undef ATOMIC64_FETCH_OP
368 #undef ATOMIC64_OP_RETURN
372 * atomic64_sub_if_positive - conditionally subtract integer from atomic
374 * @i: integer value to subtract
375 * @v: pointer of type atomic64_t
377 * Atomically test @v and subtract @i if @v is greater or equal than @i.
378 * The function returns the old value of @v minus @i.
380 static __inline__ s64 atomic64_sub_if_positive(s64 i, atomic64_t * v)
384 smp_mb__before_llsc();
386 if (kernel_uses_llsc) {
389 __asm__ __volatile__(
391 " .set "MIPS_ISA_LEVEL" \n"
392 "1: lld %1, %2 # atomic64_sub_if_positive\n"
393 " dsubu %0, %1, %3 \n"
397 "\t" __SC_BEQZ "%1, 1b \n"
400 : "=&r" (result), "=&r" (temp),
401 "+" GCC_OFF_SMALL_ASM() (v->counter)
406 raw_local_irq_save(flags);
411 raw_local_irq_restore(flags);
419 #define atomic64_cmpxchg(v, o, n) \
420 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
421 #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
424 * atomic64_dec_if_positive - decrement by 1 if old value positive
425 * @v: pointer of type atomic64_t
427 #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
429 #endif /* CONFIG_64BIT */
431 #endif /* _ASM_ATOMIC_H */