1 // SPDX-License-Identifier: GPL-2.0
3 // Generated by scripts/atomic/gen-atomic-fallback.sh
4 // DO NOT MODIFY THIS FILE DIRECTLY
6 #ifndef _LINUX_ATOMIC_FALLBACK_H
7 #define _LINUX_ATOMIC_FALLBACK_H
9 #include <linux/compiler.h>
11 #ifndef arch_xchg_relaxed
12 #define arch_xchg_acquire arch_xchg
13 #define arch_xchg_release arch_xchg
14 #define arch_xchg_relaxed arch_xchg
15 #else /* arch_xchg_relaxed */
17 #ifndef arch_xchg_acquire
18 #define arch_xchg_acquire(...) \
19 __atomic_op_acquire(arch_xchg, __VA_ARGS__)
22 #ifndef arch_xchg_release
23 #define arch_xchg_release(...) \
24 __atomic_op_release(arch_xchg, __VA_ARGS__)
28 #define arch_xchg(...) \
29 __atomic_op_fence(arch_xchg, __VA_ARGS__)
32 #endif /* arch_xchg_relaxed */
34 #ifndef arch_cmpxchg_relaxed
35 #define arch_cmpxchg_acquire arch_cmpxchg
36 #define arch_cmpxchg_release arch_cmpxchg
37 #define arch_cmpxchg_relaxed arch_cmpxchg
38 #else /* arch_cmpxchg_relaxed */
40 #ifndef arch_cmpxchg_acquire
41 #define arch_cmpxchg_acquire(...) \
42 __atomic_op_acquire(arch_cmpxchg, __VA_ARGS__)
45 #ifndef arch_cmpxchg_release
46 #define arch_cmpxchg_release(...) \
47 __atomic_op_release(arch_cmpxchg, __VA_ARGS__)
51 #define arch_cmpxchg(...) \
52 __atomic_op_fence(arch_cmpxchg, __VA_ARGS__)
55 #endif /* arch_cmpxchg_relaxed */
57 #ifndef arch_cmpxchg64_relaxed
58 #define arch_cmpxchg64_acquire arch_cmpxchg64
59 #define arch_cmpxchg64_release arch_cmpxchg64
60 #define arch_cmpxchg64_relaxed arch_cmpxchg64
61 #else /* arch_cmpxchg64_relaxed */
63 #ifndef arch_cmpxchg64_acquire
64 #define arch_cmpxchg64_acquire(...) \
65 __atomic_op_acquire(arch_cmpxchg64, __VA_ARGS__)
68 #ifndef arch_cmpxchg64_release
69 #define arch_cmpxchg64_release(...) \
70 __atomic_op_release(arch_cmpxchg64, __VA_ARGS__)
73 #ifndef arch_cmpxchg64
74 #define arch_cmpxchg64(...) \
75 __atomic_op_fence(arch_cmpxchg64, __VA_ARGS__)
78 #endif /* arch_cmpxchg64_relaxed */
80 #ifndef arch_try_cmpxchg_relaxed
81 #ifdef arch_try_cmpxchg
82 #define arch_try_cmpxchg_acquire arch_try_cmpxchg
83 #define arch_try_cmpxchg_release arch_try_cmpxchg
84 #define arch_try_cmpxchg_relaxed arch_try_cmpxchg
85 #endif /* arch_try_cmpxchg */
87 #ifndef arch_try_cmpxchg
88 #define arch_try_cmpxchg(_ptr, _oldp, _new) \
90 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
91 ___r = arch_cmpxchg((_ptr), ___o, (_new)); \
92 if (unlikely(___r != ___o)) \
94 likely(___r == ___o); \
96 #endif /* arch_try_cmpxchg */
98 #ifndef arch_try_cmpxchg_acquire
99 #define arch_try_cmpxchg_acquire(_ptr, _oldp, _new) \
101 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
102 ___r = arch_cmpxchg_acquire((_ptr), ___o, (_new)); \
103 if (unlikely(___r != ___o)) \
105 likely(___r == ___o); \
107 #endif /* arch_try_cmpxchg_acquire */
109 #ifndef arch_try_cmpxchg_release
110 #define arch_try_cmpxchg_release(_ptr, _oldp, _new) \
112 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
113 ___r = arch_cmpxchg_release((_ptr), ___o, (_new)); \
114 if (unlikely(___r != ___o)) \
116 likely(___r == ___o); \
118 #endif /* arch_try_cmpxchg_release */
120 #ifndef arch_try_cmpxchg_relaxed
121 #define arch_try_cmpxchg_relaxed(_ptr, _oldp, _new) \
123 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
124 ___r = arch_cmpxchg_relaxed((_ptr), ___o, (_new)); \
125 if (unlikely(___r != ___o)) \
127 likely(___r == ___o); \
129 #endif /* arch_try_cmpxchg_relaxed */
131 #else /* arch_try_cmpxchg_relaxed */
133 #ifndef arch_try_cmpxchg_acquire
134 #define arch_try_cmpxchg_acquire(...) \
135 __atomic_op_acquire(arch_try_cmpxchg, __VA_ARGS__)
138 #ifndef arch_try_cmpxchg_release
139 #define arch_try_cmpxchg_release(...) \
140 __atomic_op_release(arch_try_cmpxchg, __VA_ARGS__)
143 #ifndef arch_try_cmpxchg
144 #define arch_try_cmpxchg(...) \
145 __atomic_op_fence(arch_try_cmpxchg, __VA_ARGS__)
148 #endif /* arch_try_cmpxchg_relaxed */
150 #ifndef arch_atomic_read_acquire
151 static __always_inline int
152 arch_atomic_read_acquire(const atomic_t *v)
156 if (__native_word(atomic_t)) {
157 ret = smp_load_acquire(&(v)->counter);
159 ret = arch_atomic_read(v);
160 __atomic_acquire_fence();
165 #define arch_atomic_read_acquire arch_atomic_read_acquire
168 #ifndef arch_atomic_set_release
169 static __always_inline void
170 arch_atomic_set_release(atomic_t *v, int i)
172 if (__native_word(atomic_t)) {
173 smp_store_release(&(v)->counter, i);
175 __atomic_release_fence();
176 arch_atomic_set(v, i);
179 #define arch_atomic_set_release arch_atomic_set_release
182 #ifndef arch_atomic_add_return_relaxed
183 #define arch_atomic_add_return_acquire arch_atomic_add_return
184 #define arch_atomic_add_return_release arch_atomic_add_return
185 #define arch_atomic_add_return_relaxed arch_atomic_add_return
186 #else /* arch_atomic_add_return_relaxed */
188 #ifndef arch_atomic_add_return_acquire
189 static __always_inline int
190 arch_atomic_add_return_acquire(int i, atomic_t *v)
192 int ret = arch_atomic_add_return_relaxed(i, v);
193 __atomic_acquire_fence();
196 #define arch_atomic_add_return_acquire arch_atomic_add_return_acquire
199 #ifndef arch_atomic_add_return_release
200 static __always_inline int
201 arch_atomic_add_return_release(int i, atomic_t *v)
203 __atomic_release_fence();
204 return arch_atomic_add_return_relaxed(i, v);
206 #define arch_atomic_add_return_release arch_atomic_add_return_release
209 #ifndef arch_atomic_add_return
210 static __always_inline int
211 arch_atomic_add_return(int i, atomic_t *v)
214 __atomic_pre_full_fence();
215 ret = arch_atomic_add_return_relaxed(i, v);
216 __atomic_post_full_fence();
219 #define arch_atomic_add_return arch_atomic_add_return
222 #endif /* arch_atomic_add_return_relaxed */
224 #ifndef arch_atomic_fetch_add_relaxed
225 #define arch_atomic_fetch_add_acquire arch_atomic_fetch_add
226 #define arch_atomic_fetch_add_release arch_atomic_fetch_add
227 #define arch_atomic_fetch_add_relaxed arch_atomic_fetch_add
228 #else /* arch_atomic_fetch_add_relaxed */
230 #ifndef arch_atomic_fetch_add_acquire
231 static __always_inline int
232 arch_atomic_fetch_add_acquire(int i, atomic_t *v)
234 int ret = arch_atomic_fetch_add_relaxed(i, v);
235 __atomic_acquire_fence();
238 #define arch_atomic_fetch_add_acquire arch_atomic_fetch_add_acquire
241 #ifndef arch_atomic_fetch_add_release
242 static __always_inline int
243 arch_atomic_fetch_add_release(int i, atomic_t *v)
245 __atomic_release_fence();
246 return arch_atomic_fetch_add_relaxed(i, v);
248 #define arch_atomic_fetch_add_release arch_atomic_fetch_add_release
251 #ifndef arch_atomic_fetch_add
252 static __always_inline int
253 arch_atomic_fetch_add(int i, atomic_t *v)
256 __atomic_pre_full_fence();
257 ret = arch_atomic_fetch_add_relaxed(i, v);
258 __atomic_post_full_fence();
261 #define arch_atomic_fetch_add arch_atomic_fetch_add
264 #endif /* arch_atomic_fetch_add_relaxed */
266 #ifndef arch_atomic_sub_return_relaxed
267 #define arch_atomic_sub_return_acquire arch_atomic_sub_return
268 #define arch_atomic_sub_return_release arch_atomic_sub_return
269 #define arch_atomic_sub_return_relaxed arch_atomic_sub_return
270 #else /* arch_atomic_sub_return_relaxed */
272 #ifndef arch_atomic_sub_return_acquire
273 static __always_inline int
274 arch_atomic_sub_return_acquire(int i, atomic_t *v)
276 int ret = arch_atomic_sub_return_relaxed(i, v);
277 __atomic_acquire_fence();
280 #define arch_atomic_sub_return_acquire arch_atomic_sub_return_acquire
283 #ifndef arch_atomic_sub_return_release
284 static __always_inline int
285 arch_atomic_sub_return_release(int i, atomic_t *v)
287 __atomic_release_fence();
288 return arch_atomic_sub_return_relaxed(i, v);
290 #define arch_atomic_sub_return_release arch_atomic_sub_return_release
293 #ifndef arch_atomic_sub_return
294 static __always_inline int
295 arch_atomic_sub_return(int i, atomic_t *v)
298 __atomic_pre_full_fence();
299 ret = arch_atomic_sub_return_relaxed(i, v);
300 __atomic_post_full_fence();
303 #define arch_atomic_sub_return arch_atomic_sub_return
306 #endif /* arch_atomic_sub_return_relaxed */
308 #ifndef arch_atomic_fetch_sub_relaxed
309 #define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub
310 #define arch_atomic_fetch_sub_release arch_atomic_fetch_sub
311 #define arch_atomic_fetch_sub_relaxed arch_atomic_fetch_sub
312 #else /* arch_atomic_fetch_sub_relaxed */
314 #ifndef arch_atomic_fetch_sub_acquire
315 static __always_inline int
316 arch_atomic_fetch_sub_acquire(int i, atomic_t *v)
318 int ret = arch_atomic_fetch_sub_relaxed(i, v);
319 __atomic_acquire_fence();
322 #define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub_acquire
325 #ifndef arch_atomic_fetch_sub_release
326 static __always_inline int
327 arch_atomic_fetch_sub_release(int i, atomic_t *v)
329 __atomic_release_fence();
330 return arch_atomic_fetch_sub_relaxed(i, v);
332 #define arch_atomic_fetch_sub_release arch_atomic_fetch_sub_release
335 #ifndef arch_atomic_fetch_sub
336 static __always_inline int
337 arch_atomic_fetch_sub(int i, atomic_t *v)
340 __atomic_pre_full_fence();
341 ret = arch_atomic_fetch_sub_relaxed(i, v);
342 __atomic_post_full_fence();
345 #define arch_atomic_fetch_sub arch_atomic_fetch_sub
348 #endif /* arch_atomic_fetch_sub_relaxed */
350 #ifndef arch_atomic_inc
351 static __always_inline void
352 arch_atomic_inc(atomic_t *v)
354 arch_atomic_add(1, v);
356 #define arch_atomic_inc arch_atomic_inc
359 #ifndef arch_atomic_inc_return_relaxed
360 #ifdef arch_atomic_inc_return
361 #define arch_atomic_inc_return_acquire arch_atomic_inc_return
362 #define arch_atomic_inc_return_release arch_atomic_inc_return
363 #define arch_atomic_inc_return_relaxed arch_atomic_inc_return
364 #endif /* arch_atomic_inc_return */
366 #ifndef arch_atomic_inc_return
367 static __always_inline int
368 arch_atomic_inc_return(atomic_t *v)
370 return arch_atomic_add_return(1, v);
372 #define arch_atomic_inc_return arch_atomic_inc_return
375 #ifndef arch_atomic_inc_return_acquire
376 static __always_inline int
377 arch_atomic_inc_return_acquire(atomic_t *v)
379 return arch_atomic_add_return_acquire(1, v);
381 #define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire
384 #ifndef arch_atomic_inc_return_release
385 static __always_inline int
386 arch_atomic_inc_return_release(atomic_t *v)
388 return arch_atomic_add_return_release(1, v);
390 #define arch_atomic_inc_return_release arch_atomic_inc_return_release
393 #ifndef arch_atomic_inc_return_relaxed
394 static __always_inline int
395 arch_atomic_inc_return_relaxed(atomic_t *v)
397 return arch_atomic_add_return_relaxed(1, v);
399 #define arch_atomic_inc_return_relaxed arch_atomic_inc_return_relaxed
402 #else /* arch_atomic_inc_return_relaxed */
404 #ifndef arch_atomic_inc_return_acquire
405 static __always_inline int
406 arch_atomic_inc_return_acquire(atomic_t *v)
408 int ret = arch_atomic_inc_return_relaxed(v);
409 __atomic_acquire_fence();
412 #define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire
415 #ifndef arch_atomic_inc_return_release
416 static __always_inline int
417 arch_atomic_inc_return_release(atomic_t *v)
419 __atomic_release_fence();
420 return arch_atomic_inc_return_relaxed(v);
422 #define arch_atomic_inc_return_release arch_atomic_inc_return_release
425 #ifndef arch_atomic_inc_return
426 static __always_inline int
427 arch_atomic_inc_return(atomic_t *v)
430 __atomic_pre_full_fence();
431 ret = arch_atomic_inc_return_relaxed(v);
432 __atomic_post_full_fence();
435 #define arch_atomic_inc_return arch_atomic_inc_return
438 #endif /* arch_atomic_inc_return_relaxed */
440 #ifndef arch_atomic_fetch_inc_relaxed
441 #ifdef arch_atomic_fetch_inc
442 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc
443 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc
444 #define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc
445 #endif /* arch_atomic_fetch_inc */
447 #ifndef arch_atomic_fetch_inc
448 static __always_inline int
449 arch_atomic_fetch_inc(atomic_t *v)
451 return arch_atomic_fetch_add(1, v);
453 #define arch_atomic_fetch_inc arch_atomic_fetch_inc
456 #ifndef arch_atomic_fetch_inc_acquire
457 static __always_inline int
458 arch_atomic_fetch_inc_acquire(atomic_t *v)
460 return arch_atomic_fetch_add_acquire(1, v);
462 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire
465 #ifndef arch_atomic_fetch_inc_release
466 static __always_inline int
467 arch_atomic_fetch_inc_release(atomic_t *v)
469 return arch_atomic_fetch_add_release(1, v);
471 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release
474 #ifndef arch_atomic_fetch_inc_relaxed
475 static __always_inline int
476 arch_atomic_fetch_inc_relaxed(atomic_t *v)
478 return arch_atomic_fetch_add_relaxed(1, v);
480 #define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc_relaxed
483 #else /* arch_atomic_fetch_inc_relaxed */
485 #ifndef arch_atomic_fetch_inc_acquire
486 static __always_inline int
487 arch_atomic_fetch_inc_acquire(atomic_t *v)
489 int ret = arch_atomic_fetch_inc_relaxed(v);
490 __atomic_acquire_fence();
493 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire
496 #ifndef arch_atomic_fetch_inc_release
497 static __always_inline int
498 arch_atomic_fetch_inc_release(atomic_t *v)
500 __atomic_release_fence();
501 return arch_atomic_fetch_inc_relaxed(v);
503 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release
506 #ifndef arch_atomic_fetch_inc
507 static __always_inline int
508 arch_atomic_fetch_inc(atomic_t *v)
511 __atomic_pre_full_fence();
512 ret = arch_atomic_fetch_inc_relaxed(v);
513 __atomic_post_full_fence();
516 #define arch_atomic_fetch_inc arch_atomic_fetch_inc
519 #endif /* arch_atomic_fetch_inc_relaxed */
521 #ifndef arch_atomic_dec
522 static __always_inline void
523 arch_atomic_dec(atomic_t *v)
525 arch_atomic_sub(1, v);
527 #define arch_atomic_dec arch_atomic_dec
530 #ifndef arch_atomic_dec_return_relaxed
531 #ifdef arch_atomic_dec_return
532 #define arch_atomic_dec_return_acquire arch_atomic_dec_return
533 #define arch_atomic_dec_return_release arch_atomic_dec_return
534 #define arch_atomic_dec_return_relaxed arch_atomic_dec_return
535 #endif /* arch_atomic_dec_return */
537 #ifndef arch_atomic_dec_return
538 static __always_inline int
539 arch_atomic_dec_return(atomic_t *v)
541 return arch_atomic_sub_return(1, v);
543 #define arch_atomic_dec_return arch_atomic_dec_return
546 #ifndef arch_atomic_dec_return_acquire
547 static __always_inline int
548 arch_atomic_dec_return_acquire(atomic_t *v)
550 return arch_atomic_sub_return_acquire(1, v);
552 #define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire
555 #ifndef arch_atomic_dec_return_release
556 static __always_inline int
557 arch_atomic_dec_return_release(atomic_t *v)
559 return arch_atomic_sub_return_release(1, v);
561 #define arch_atomic_dec_return_release arch_atomic_dec_return_release
564 #ifndef arch_atomic_dec_return_relaxed
565 static __always_inline int
566 arch_atomic_dec_return_relaxed(atomic_t *v)
568 return arch_atomic_sub_return_relaxed(1, v);
570 #define arch_atomic_dec_return_relaxed arch_atomic_dec_return_relaxed
573 #else /* arch_atomic_dec_return_relaxed */
575 #ifndef arch_atomic_dec_return_acquire
576 static __always_inline int
577 arch_atomic_dec_return_acquire(atomic_t *v)
579 int ret = arch_atomic_dec_return_relaxed(v);
580 __atomic_acquire_fence();
583 #define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire
586 #ifndef arch_atomic_dec_return_release
587 static __always_inline int
588 arch_atomic_dec_return_release(atomic_t *v)
590 __atomic_release_fence();
591 return arch_atomic_dec_return_relaxed(v);
593 #define arch_atomic_dec_return_release arch_atomic_dec_return_release
596 #ifndef arch_atomic_dec_return
597 static __always_inline int
598 arch_atomic_dec_return(atomic_t *v)
601 __atomic_pre_full_fence();
602 ret = arch_atomic_dec_return_relaxed(v);
603 __atomic_post_full_fence();
606 #define arch_atomic_dec_return arch_atomic_dec_return
609 #endif /* arch_atomic_dec_return_relaxed */
611 #ifndef arch_atomic_fetch_dec_relaxed
612 #ifdef arch_atomic_fetch_dec
613 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec
614 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec
615 #define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec
616 #endif /* arch_atomic_fetch_dec */
618 #ifndef arch_atomic_fetch_dec
619 static __always_inline int
620 arch_atomic_fetch_dec(atomic_t *v)
622 return arch_atomic_fetch_sub(1, v);
624 #define arch_atomic_fetch_dec arch_atomic_fetch_dec
627 #ifndef arch_atomic_fetch_dec_acquire
628 static __always_inline int
629 arch_atomic_fetch_dec_acquire(atomic_t *v)
631 return arch_atomic_fetch_sub_acquire(1, v);
633 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire
636 #ifndef arch_atomic_fetch_dec_release
637 static __always_inline int
638 arch_atomic_fetch_dec_release(atomic_t *v)
640 return arch_atomic_fetch_sub_release(1, v);
642 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release
645 #ifndef arch_atomic_fetch_dec_relaxed
646 static __always_inline int
647 arch_atomic_fetch_dec_relaxed(atomic_t *v)
649 return arch_atomic_fetch_sub_relaxed(1, v);
651 #define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec_relaxed
654 #else /* arch_atomic_fetch_dec_relaxed */
656 #ifndef arch_atomic_fetch_dec_acquire
657 static __always_inline int
658 arch_atomic_fetch_dec_acquire(atomic_t *v)
660 int ret = arch_atomic_fetch_dec_relaxed(v);
661 __atomic_acquire_fence();
664 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire
667 #ifndef arch_atomic_fetch_dec_release
668 static __always_inline int
669 arch_atomic_fetch_dec_release(atomic_t *v)
671 __atomic_release_fence();
672 return arch_atomic_fetch_dec_relaxed(v);
674 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release
677 #ifndef arch_atomic_fetch_dec
678 static __always_inline int
679 arch_atomic_fetch_dec(atomic_t *v)
682 __atomic_pre_full_fence();
683 ret = arch_atomic_fetch_dec_relaxed(v);
684 __atomic_post_full_fence();
687 #define arch_atomic_fetch_dec arch_atomic_fetch_dec
690 #endif /* arch_atomic_fetch_dec_relaxed */
692 #ifndef arch_atomic_fetch_and_relaxed
693 #define arch_atomic_fetch_and_acquire arch_atomic_fetch_and
694 #define arch_atomic_fetch_and_release arch_atomic_fetch_and
695 #define arch_atomic_fetch_and_relaxed arch_atomic_fetch_and
696 #else /* arch_atomic_fetch_and_relaxed */
698 #ifndef arch_atomic_fetch_and_acquire
699 static __always_inline int
700 arch_atomic_fetch_and_acquire(int i, atomic_t *v)
702 int ret = arch_atomic_fetch_and_relaxed(i, v);
703 __atomic_acquire_fence();
706 #define arch_atomic_fetch_and_acquire arch_atomic_fetch_and_acquire
709 #ifndef arch_atomic_fetch_and_release
710 static __always_inline int
711 arch_atomic_fetch_and_release(int i, atomic_t *v)
713 __atomic_release_fence();
714 return arch_atomic_fetch_and_relaxed(i, v);
716 #define arch_atomic_fetch_and_release arch_atomic_fetch_and_release
719 #ifndef arch_atomic_fetch_and
720 static __always_inline int
721 arch_atomic_fetch_and(int i, atomic_t *v)
724 __atomic_pre_full_fence();
725 ret = arch_atomic_fetch_and_relaxed(i, v);
726 __atomic_post_full_fence();
729 #define arch_atomic_fetch_and arch_atomic_fetch_and
732 #endif /* arch_atomic_fetch_and_relaxed */
734 #ifndef arch_atomic_andnot
735 static __always_inline void
736 arch_atomic_andnot(int i, atomic_t *v)
738 arch_atomic_and(~i, v);
740 #define arch_atomic_andnot arch_atomic_andnot
743 #ifndef arch_atomic_fetch_andnot_relaxed
744 #ifdef arch_atomic_fetch_andnot
745 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot
746 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot
747 #define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot
748 #endif /* arch_atomic_fetch_andnot */
750 #ifndef arch_atomic_fetch_andnot
751 static __always_inline int
752 arch_atomic_fetch_andnot(int i, atomic_t *v)
754 return arch_atomic_fetch_and(~i, v);
756 #define arch_atomic_fetch_andnot arch_atomic_fetch_andnot
759 #ifndef arch_atomic_fetch_andnot_acquire
760 static __always_inline int
761 arch_atomic_fetch_andnot_acquire(int i, atomic_t *v)
763 return arch_atomic_fetch_and_acquire(~i, v);
765 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire
768 #ifndef arch_atomic_fetch_andnot_release
769 static __always_inline int
770 arch_atomic_fetch_andnot_release(int i, atomic_t *v)
772 return arch_atomic_fetch_and_release(~i, v);
774 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release
777 #ifndef arch_atomic_fetch_andnot_relaxed
778 static __always_inline int
779 arch_atomic_fetch_andnot_relaxed(int i, atomic_t *v)
781 return arch_atomic_fetch_and_relaxed(~i, v);
783 #define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot_relaxed
786 #else /* arch_atomic_fetch_andnot_relaxed */
788 #ifndef arch_atomic_fetch_andnot_acquire
789 static __always_inline int
790 arch_atomic_fetch_andnot_acquire(int i, atomic_t *v)
792 int ret = arch_atomic_fetch_andnot_relaxed(i, v);
793 __atomic_acquire_fence();
796 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire
799 #ifndef arch_atomic_fetch_andnot_release
800 static __always_inline int
801 arch_atomic_fetch_andnot_release(int i, atomic_t *v)
803 __atomic_release_fence();
804 return arch_atomic_fetch_andnot_relaxed(i, v);
806 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release
809 #ifndef arch_atomic_fetch_andnot
810 static __always_inline int
811 arch_atomic_fetch_andnot(int i, atomic_t *v)
814 __atomic_pre_full_fence();
815 ret = arch_atomic_fetch_andnot_relaxed(i, v);
816 __atomic_post_full_fence();
819 #define arch_atomic_fetch_andnot arch_atomic_fetch_andnot
822 #endif /* arch_atomic_fetch_andnot_relaxed */
824 #ifndef arch_atomic_fetch_or_relaxed
825 #define arch_atomic_fetch_or_acquire arch_atomic_fetch_or
826 #define arch_atomic_fetch_or_release arch_atomic_fetch_or
827 #define arch_atomic_fetch_or_relaxed arch_atomic_fetch_or
828 #else /* arch_atomic_fetch_or_relaxed */
830 #ifndef arch_atomic_fetch_or_acquire
831 static __always_inline int
832 arch_atomic_fetch_or_acquire(int i, atomic_t *v)
834 int ret = arch_atomic_fetch_or_relaxed(i, v);
835 __atomic_acquire_fence();
838 #define arch_atomic_fetch_or_acquire arch_atomic_fetch_or_acquire
841 #ifndef arch_atomic_fetch_or_release
842 static __always_inline int
843 arch_atomic_fetch_or_release(int i, atomic_t *v)
845 __atomic_release_fence();
846 return arch_atomic_fetch_or_relaxed(i, v);
848 #define arch_atomic_fetch_or_release arch_atomic_fetch_or_release
851 #ifndef arch_atomic_fetch_or
852 static __always_inline int
853 arch_atomic_fetch_or(int i, atomic_t *v)
856 __atomic_pre_full_fence();
857 ret = arch_atomic_fetch_or_relaxed(i, v);
858 __atomic_post_full_fence();
861 #define arch_atomic_fetch_or arch_atomic_fetch_or
864 #endif /* arch_atomic_fetch_or_relaxed */
866 #ifndef arch_atomic_fetch_xor_relaxed
867 #define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor
868 #define arch_atomic_fetch_xor_release arch_atomic_fetch_xor
869 #define arch_atomic_fetch_xor_relaxed arch_atomic_fetch_xor
870 #else /* arch_atomic_fetch_xor_relaxed */
872 #ifndef arch_atomic_fetch_xor_acquire
873 static __always_inline int
874 arch_atomic_fetch_xor_acquire(int i, atomic_t *v)
876 int ret = arch_atomic_fetch_xor_relaxed(i, v);
877 __atomic_acquire_fence();
880 #define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor_acquire
883 #ifndef arch_atomic_fetch_xor_release
884 static __always_inline int
885 arch_atomic_fetch_xor_release(int i, atomic_t *v)
887 __atomic_release_fence();
888 return arch_atomic_fetch_xor_relaxed(i, v);
890 #define arch_atomic_fetch_xor_release arch_atomic_fetch_xor_release
893 #ifndef arch_atomic_fetch_xor
894 static __always_inline int
895 arch_atomic_fetch_xor(int i, atomic_t *v)
898 __atomic_pre_full_fence();
899 ret = arch_atomic_fetch_xor_relaxed(i, v);
900 __atomic_post_full_fence();
903 #define arch_atomic_fetch_xor arch_atomic_fetch_xor
906 #endif /* arch_atomic_fetch_xor_relaxed */
908 #ifndef arch_atomic_xchg_relaxed
909 #define arch_atomic_xchg_acquire arch_atomic_xchg
910 #define arch_atomic_xchg_release arch_atomic_xchg
911 #define arch_atomic_xchg_relaxed arch_atomic_xchg
912 #else /* arch_atomic_xchg_relaxed */
914 #ifndef arch_atomic_xchg_acquire
915 static __always_inline int
916 arch_atomic_xchg_acquire(atomic_t *v, int i)
918 int ret = arch_atomic_xchg_relaxed(v, i);
919 __atomic_acquire_fence();
922 #define arch_atomic_xchg_acquire arch_atomic_xchg_acquire
925 #ifndef arch_atomic_xchg_release
926 static __always_inline int
927 arch_atomic_xchg_release(atomic_t *v, int i)
929 __atomic_release_fence();
930 return arch_atomic_xchg_relaxed(v, i);
932 #define arch_atomic_xchg_release arch_atomic_xchg_release
935 #ifndef arch_atomic_xchg
936 static __always_inline int
937 arch_atomic_xchg(atomic_t *v, int i)
940 __atomic_pre_full_fence();
941 ret = arch_atomic_xchg_relaxed(v, i);
942 __atomic_post_full_fence();
945 #define arch_atomic_xchg arch_atomic_xchg
948 #endif /* arch_atomic_xchg_relaxed */
950 #ifndef arch_atomic_cmpxchg_relaxed
951 #define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg
952 #define arch_atomic_cmpxchg_release arch_atomic_cmpxchg
953 #define arch_atomic_cmpxchg_relaxed arch_atomic_cmpxchg
954 #else /* arch_atomic_cmpxchg_relaxed */
956 #ifndef arch_atomic_cmpxchg_acquire
957 static __always_inline int
958 arch_atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
960 int ret = arch_atomic_cmpxchg_relaxed(v, old, new);
961 __atomic_acquire_fence();
964 #define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg_acquire
967 #ifndef arch_atomic_cmpxchg_release
968 static __always_inline int
969 arch_atomic_cmpxchg_release(atomic_t *v, int old, int new)
971 __atomic_release_fence();
972 return arch_atomic_cmpxchg_relaxed(v, old, new);
974 #define arch_atomic_cmpxchg_release arch_atomic_cmpxchg_release
977 #ifndef arch_atomic_cmpxchg
978 static __always_inline int
979 arch_atomic_cmpxchg(atomic_t *v, int old, int new)
982 __atomic_pre_full_fence();
983 ret = arch_atomic_cmpxchg_relaxed(v, old, new);
984 __atomic_post_full_fence();
987 #define arch_atomic_cmpxchg arch_atomic_cmpxchg
990 #endif /* arch_atomic_cmpxchg_relaxed */
992 #ifndef arch_atomic_try_cmpxchg_relaxed
993 #ifdef arch_atomic_try_cmpxchg
994 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg
995 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg
996 #define arch_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg
997 #endif /* arch_atomic_try_cmpxchg */
999 #ifndef arch_atomic_try_cmpxchg
1000 static __always_inline bool
1001 arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
1004 r = arch_atomic_cmpxchg(v, o, new);
1005 if (unlikely(r != o))
1007 return likely(r == o);
1009 #define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg
1012 #ifndef arch_atomic_try_cmpxchg_acquire
1013 static __always_inline bool
1014 arch_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
1017 r = arch_atomic_cmpxchg_acquire(v, o, new);
1018 if (unlikely(r != o))
1020 return likely(r == o);
1022 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire
1025 #ifndef arch_atomic_try_cmpxchg_release
1026 static __always_inline bool
1027 arch_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
1030 r = arch_atomic_cmpxchg_release(v, o, new);
1031 if (unlikely(r != o))
1033 return likely(r == o);
1035 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release
1038 #ifndef arch_atomic_try_cmpxchg_relaxed
1039 static __always_inline bool
1040 arch_atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
1043 r = arch_atomic_cmpxchg_relaxed(v, o, new);
1044 if (unlikely(r != o))
1046 return likely(r == o);
1048 #define arch_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg_relaxed
1051 #else /* arch_atomic_try_cmpxchg_relaxed */
1053 #ifndef arch_atomic_try_cmpxchg_acquire
1054 static __always_inline bool
1055 arch_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
1057 bool ret = arch_atomic_try_cmpxchg_relaxed(v, old, new);
1058 __atomic_acquire_fence();
1061 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire
1064 #ifndef arch_atomic_try_cmpxchg_release
1065 static __always_inline bool
1066 arch_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
1068 __atomic_release_fence();
1069 return arch_atomic_try_cmpxchg_relaxed(v, old, new);
1071 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release
1074 #ifndef arch_atomic_try_cmpxchg
1075 static __always_inline bool
1076 arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
1079 __atomic_pre_full_fence();
1080 ret = arch_atomic_try_cmpxchg_relaxed(v, old, new);
1081 __atomic_post_full_fence();
1084 #define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg
1087 #endif /* arch_atomic_try_cmpxchg_relaxed */
1089 #ifndef arch_atomic_sub_and_test
1091 * arch_atomic_sub_and_test - subtract value from variable and test result
1092 * @i: integer value to subtract
1093 * @v: pointer of type atomic_t
1095 * Atomically subtracts @i from @v and returns
1096 * true if the result is zero, or false for all
1099 static __always_inline bool
1100 arch_atomic_sub_and_test(int i, atomic_t *v)
1102 return arch_atomic_sub_return(i, v) == 0;
1104 #define arch_atomic_sub_and_test arch_atomic_sub_and_test
1107 #ifndef arch_atomic_dec_and_test
1109 * arch_atomic_dec_and_test - decrement and test
1110 * @v: pointer of type atomic_t
1112 * Atomically decrements @v by 1 and
1113 * returns true if the result is 0, or false for all other
1116 static __always_inline bool
1117 arch_atomic_dec_and_test(atomic_t *v)
1119 return arch_atomic_dec_return(v) == 0;
1121 #define arch_atomic_dec_and_test arch_atomic_dec_and_test
1124 #ifndef arch_atomic_inc_and_test
1126 * arch_atomic_inc_and_test - increment and test
1127 * @v: pointer of type atomic_t
1129 * Atomically increments @v by 1
1130 * and returns true if the result is zero, or false for all
1133 static __always_inline bool
1134 arch_atomic_inc_and_test(atomic_t *v)
1136 return arch_atomic_inc_return(v) == 0;
1138 #define arch_atomic_inc_and_test arch_atomic_inc_and_test
1141 #ifndef arch_atomic_add_negative
1143 * arch_atomic_add_negative - add and test if negative
1144 * @i: integer value to add
1145 * @v: pointer of type atomic_t
1147 * Atomically adds @i to @v and returns true
1148 * if the result is negative, or false when
1149 * result is greater than or equal to zero.
1151 static __always_inline bool
1152 arch_atomic_add_negative(int i, atomic_t *v)
1154 return arch_atomic_add_return(i, v) < 0;
1156 #define arch_atomic_add_negative arch_atomic_add_negative
1159 #ifndef arch_atomic_fetch_add_unless
1161 * arch_atomic_fetch_add_unless - add unless the number is already a given value
1162 * @v: pointer of type atomic_t
1163 * @a: the amount to add to v...
1164 * @u: ...unless v is equal to u.
1166 * Atomically adds @a to @v, so long as @v was not already @u.
1167 * Returns original value of @v
1169 static __always_inline int
1170 arch_atomic_fetch_add_unless(atomic_t *v, int a, int u)
1172 int c = arch_atomic_read(v);
1175 if (unlikely(c == u))
1177 } while (!arch_atomic_try_cmpxchg(v, &c, c + a));
1181 #define arch_atomic_fetch_add_unless arch_atomic_fetch_add_unless
1184 #ifndef arch_atomic_add_unless
1186 * arch_atomic_add_unless - add unless the number is already a given value
1187 * @v: pointer of type atomic_t
1188 * @a: the amount to add to v...
1189 * @u: ...unless v is equal to u.
1191 * Atomically adds @a to @v, if @v was not already @u.
1192 * Returns true if the addition was done.
1194 static __always_inline bool
1195 arch_atomic_add_unless(atomic_t *v, int a, int u)
1197 return arch_atomic_fetch_add_unless(v, a, u) != u;
1199 #define arch_atomic_add_unless arch_atomic_add_unless
1202 #ifndef arch_atomic_inc_not_zero
1204 * arch_atomic_inc_not_zero - increment unless the number is zero
1205 * @v: pointer of type atomic_t
1207 * Atomically increments @v by 1, if @v is non-zero.
1208 * Returns true if the increment was done.
1210 static __always_inline bool
1211 arch_atomic_inc_not_zero(atomic_t *v)
1213 return arch_atomic_add_unless(v, 1, 0);
1215 #define arch_atomic_inc_not_zero arch_atomic_inc_not_zero
1218 #ifndef arch_atomic_inc_unless_negative
1219 static __always_inline bool
1220 arch_atomic_inc_unless_negative(atomic_t *v)
1222 int c = arch_atomic_read(v);
1225 if (unlikely(c < 0))
1227 } while (!arch_atomic_try_cmpxchg(v, &c, c + 1));
1231 #define arch_atomic_inc_unless_negative arch_atomic_inc_unless_negative
1234 #ifndef arch_atomic_dec_unless_positive
1235 static __always_inline bool
1236 arch_atomic_dec_unless_positive(atomic_t *v)
1238 int c = arch_atomic_read(v);
1241 if (unlikely(c > 0))
1243 } while (!arch_atomic_try_cmpxchg(v, &c, c - 1));
1247 #define arch_atomic_dec_unless_positive arch_atomic_dec_unless_positive
1250 #ifndef arch_atomic_dec_if_positive
1251 static __always_inline int
1252 arch_atomic_dec_if_positive(atomic_t *v)
1254 int dec, c = arch_atomic_read(v);
1258 if (unlikely(dec < 0))
1260 } while (!arch_atomic_try_cmpxchg(v, &c, dec));
1264 #define arch_atomic_dec_if_positive arch_atomic_dec_if_positive
1267 #ifdef CONFIG_GENERIC_ATOMIC64
1268 #include <asm-generic/atomic64.h>
1271 #ifndef arch_atomic64_read_acquire
1272 static __always_inline s64
1273 arch_atomic64_read_acquire(const atomic64_t *v)
1277 if (__native_word(atomic64_t)) {
1278 ret = smp_load_acquire(&(v)->counter);
1280 ret = arch_atomic64_read(v);
1281 __atomic_acquire_fence();
1286 #define arch_atomic64_read_acquire arch_atomic64_read_acquire
1289 #ifndef arch_atomic64_set_release
1290 static __always_inline void
1291 arch_atomic64_set_release(atomic64_t *v, s64 i)
1293 if (__native_word(atomic64_t)) {
1294 smp_store_release(&(v)->counter, i);
1296 __atomic_release_fence();
1297 arch_atomic64_set(v, i);
1300 #define arch_atomic64_set_release arch_atomic64_set_release
1303 #ifndef arch_atomic64_add_return_relaxed
1304 #define arch_atomic64_add_return_acquire arch_atomic64_add_return
1305 #define arch_atomic64_add_return_release arch_atomic64_add_return
1306 #define arch_atomic64_add_return_relaxed arch_atomic64_add_return
1307 #else /* arch_atomic64_add_return_relaxed */
1309 #ifndef arch_atomic64_add_return_acquire
1310 static __always_inline s64
1311 arch_atomic64_add_return_acquire(s64 i, atomic64_t *v)
1313 s64 ret = arch_atomic64_add_return_relaxed(i, v);
1314 __atomic_acquire_fence();
1317 #define arch_atomic64_add_return_acquire arch_atomic64_add_return_acquire
1320 #ifndef arch_atomic64_add_return_release
1321 static __always_inline s64
1322 arch_atomic64_add_return_release(s64 i, atomic64_t *v)
1324 __atomic_release_fence();
1325 return arch_atomic64_add_return_relaxed(i, v);
1327 #define arch_atomic64_add_return_release arch_atomic64_add_return_release
1330 #ifndef arch_atomic64_add_return
1331 static __always_inline s64
1332 arch_atomic64_add_return(s64 i, atomic64_t *v)
1335 __atomic_pre_full_fence();
1336 ret = arch_atomic64_add_return_relaxed(i, v);
1337 __atomic_post_full_fence();
1340 #define arch_atomic64_add_return arch_atomic64_add_return
1343 #endif /* arch_atomic64_add_return_relaxed */
1345 #ifndef arch_atomic64_fetch_add_relaxed
1346 #define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add
1347 #define arch_atomic64_fetch_add_release arch_atomic64_fetch_add
1348 #define arch_atomic64_fetch_add_relaxed arch_atomic64_fetch_add
1349 #else /* arch_atomic64_fetch_add_relaxed */
1351 #ifndef arch_atomic64_fetch_add_acquire
1352 static __always_inline s64
1353 arch_atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
1355 s64 ret = arch_atomic64_fetch_add_relaxed(i, v);
1356 __atomic_acquire_fence();
1359 #define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add_acquire
1362 #ifndef arch_atomic64_fetch_add_release
1363 static __always_inline s64
1364 arch_atomic64_fetch_add_release(s64 i, atomic64_t *v)
1366 __atomic_release_fence();
1367 return arch_atomic64_fetch_add_relaxed(i, v);
1369 #define arch_atomic64_fetch_add_release arch_atomic64_fetch_add_release
1372 #ifndef arch_atomic64_fetch_add
1373 static __always_inline s64
1374 arch_atomic64_fetch_add(s64 i, atomic64_t *v)
1377 __atomic_pre_full_fence();
1378 ret = arch_atomic64_fetch_add_relaxed(i, v);
1379 __atomic_post_full_fence();
1382 #define arch_atomic64_fetch_add arch_atomic64_fetch_add
1385 #endif /* arch_atomic64_fetch_add_relaxed */
1387 #ifndef arch_atomic64_sub_return_relaxed
1388 #define arch_atomic64_sub_return_acquire arch_atomic64_sub_return
1389 #define arch_atomic64_sub_return_release arch_atomic64_sub_return
1390 #define arch_atomic64_sub_return_relaxed arch_atomic64_sub_return
1391 #else /* arch_atomic64_sub_return_relaxed */
1393 #ifndef arch_atomic64_sub_return_acquire
1394 static __always_inline s64
1395 arch_atomic64_sub_return_acquire(s64 i, atomic64_t *v)
1397 s64 ret = arch_atomic64_sub_return_relaxed(i, v);
1398 __atomic_acquire_fence();
1401 #define arch_atomic64_sub_return_acquire arch_atomic64_sub_return_acquire
1404 #ifndef arch_atomic64_sub_return_release
1405 static __always_inline s64
1406 arch_atomic64_sub_return_release(s64 i, atomic64_t *v)
1408 __atomic_release_fence();
1409 return arch_atomic64_sub_return_relaxed(i, v);
1411 #define arch_atomic64_sub_return_release arch_atomic64_sub_return_release
1414 #ifndef arch_atomic64_sub_return
1415 static __always_inline s64
1416 arch_atomic64_sub_return(s64 i, atomic64_t *v)
1419 __atomic_pre_full_fence();
1420 ret = arch_atomic64_sub_return_relaxed(i, v);
1421 __atomic_post_full_fence();
1424 #define arch_atomic64_sub_return arch_atomic64_sub_return
1427 #endif /* arch_atomic64_sub_return_relaxed */
1429 #ifndef arch_atomic64_fetch_sub_relaxed
1430 #define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub
1431 #define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub
1432 #define arch_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub
1433 #else /* arch_atomic64_fetch_sub_relaxed */
1435 #ifndef arch_atomic64_fetch_sub_acquire
1436 static __always_inline s64
1437 arch_atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
1439 s64 ret = arch_atomic64_fetch_sub_relaxed(i, v);
1440 __atomic_acquire_fence();
1443 #define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub_acquire
1446 #ifndef arch_atomic64_fetch_sub_release
1447 static __always_inline s64
1448 arch_atomic64_fetch_sub_release(s64 i, atomic64_t *v)
1450 __atomic_release_fence();
1451 return arch_atomic64_fetch_sub_relaxed(i, v);
1453 #define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub_release
1456 #ifndef arch_atomic64_fetch_sub
1457 static __always_inline s64
1458 arch_atomic64_fetch_sub(s64 i, atomic64_t *v)
1461 __atomic_pre_full_fence();
1462 ret = arch_atomic64_fetch_sub_relaxed(i, v);
1463 __atomic_post_full_fence();
1466 #define arch_atomic64_fetch_sub arch_atomic64_fetch_sub
1469 #endif /* arch_atomic64_fetch_sub_relaxed */
1471 #ifndef arch_atomic64_inc
1472 static __always_inline void
1473 arch_atomic64_inc(atomic64_t *v)
1475 arch_atomic64_add(1, v);
1477 #define arch_atomic64_inc arch_atomic64_inc
1480 #ifndef arch_atomic64_inc_return_relaxed
1481 #ifdef arch_atomic64_inc_return
1482 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return
1483 #define arch_atomic64_inc_return_release arch_atomic64_inc_return
1484 #define arch_atomic64_inc_return_relaxed arch_atomic64_inc_return
1485 #endif /* arch_atomic64_inc_return */
1487 #ifndef arch_atomic64_inc_return
1488 static __always_inline s64
1489 arch_atomic64_inc_return(atomic64_t *v)
1491 return arch_atomic64_add_return(1, v);
1493 #define arch_atomic64_inc_return arch_atomic64_inc_return
1496 #ifndef arch_atomic64_inc_return_acquire
1497 static __always_inline s64
1498 arch_atomic64_inc_return_acquire(atomic64_t *v)
1500 return arch_atomic64_add_return_acquire(1, v);
1502 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire
1505 #ifndef arch_atomic64_inc_return_release
1506 static __always_inline s64
1507 arch_atomic64_inc_return_release(atomic64_t *v)
1509 return arch_atomic64_add_return_release(1, v);
1511 #define arch_atomic64_inc_return_release arch_atomic64_inc_return_release
1514 #ifndef arch_atomic64_inc_return_relaxed
1515 static __always_inline s64
1516 arch_atomic64_inc_return_relaxed(atomic64_t *v)
1518 return arch_atomic64_add_return_relaxed(1, v);
1520 #define arch_atomic64_inc_return_relaxed arch_atomic64_inc_return_relaxed
1523 #else /* arch_atomic64_inc_return_relaxed */
1525 #ifndef arch_atomic64_inc_return_acquire
1526 static __always_inline s64
1527 arch_atomic64_inc_return_acquire(atomic64_t *v)
1529 s64 ret = arch_atomic64_inc_return_relaxed(v);
1530 __atomic_acquire_fence();
1533 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire
1536 #ifndef arch_atomic64_inc_return_release
1537 static __always_inline s64
1538 arch_atomic64_inc_return_release(atomic64_t *v)
1540 __atomic_release_fence();
1541 return arch_atomic64_inc_return_relaxed(v);
1543 #define arch_atomic64_inc_return_release arch_atomic64_inc_return_release
1546 #ifndef arch_atomic64_inc_return
1547 static __always_inline s64
1548 arch_atomic64_inc_return(atomic64_t *v)
1551 __atomic_pre_full_fence();
1552 ret = arch_atomic64_inc_return_relaxed(v);
1553 __atomic_post_full_fence();
1556 #define arch_atomic64_inc_return arch_atomic64_inc_return
1559 #endif /* arch_atomic64_inc_return_relaxed */
1561 #ifndef arch_atomic64_fetch_inc_relaxed
1562 #ifdef arch_atomic64_fetch_inc
1563 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc
1564 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc
1565 #define arch_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc
1566 #endif /* arch_atomic64_fetch_inc */
1568 #ifndef arch_atomic64_fetch_inc
1569 static __always_inline s64
1570 arch_atomic64_fetch_inc(atomic64_t *v)
1572 return arch_atomic64_fetch_add(1, v);
1574 #define arch_atomic64_fetch_inc arch_atomic64_fetch_inc
1577 #ifndef arch_atomic64_fetch_inc_acquire
1578 static __always_inline s64
1579 arch_atomic64_fetch_inc_acquire(atomic64_t *v)
1581 return arch_atomic64_fetch_add_acquire(1, v);
1583 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire
1586 #ifndef arch_atomic64_fetch_inc_release
1587 static __always_inline s64
1588 arch_atomic64_fetch_inc_release(atomic64_t *v)
1590 return arch_atomic64_fetch_add_release(1, v);
1592 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release
1595 #ifndef arch_atomic64_fetch_inc_relaxed
1596 static __always_inline s64
1597 arch_atomic64_fetch_inc_relaxed(atomic64_t *v)
1599 return arch_atomic64_fetch_add_relaxed(1, v);
1601 #define arch_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc_relaxed
1604 #else /* arch_atomic64_fetch_inc_relaxed */
1606 #ifndef arch_atomic64_fetch_inc_acquire
1607 static __always_inline s64
1608 arch_atomic64_fetch_inc_acquire(atomic64_t *v)
1610 s64 ret = arch_atomic64_fetch_inc_relaxed(v);
1611 __atomic_acquire_fence();
1614 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire
1617 #ifndef arch_atomic64_fetch_inc_release
1618 static __always_inline s64
1619 arch_atomic64_fetch_inc_release(atomic64_t *v)
1621 __atomic_release_fence();
1622 return arch_atomic64_fetch_inc_relaxed(v);
1624 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release
1627 #ifndef arch_atomic64_fetch_inc
1628 static __always_inline s64
1629 arch_atomic64_fetch_inc(atomic64_t *v)
1632 __atomic_pre_full_fence();
1633 ret = arch_atomic64_fetch_inc_relaxed(v);
1634 __atomic_post_full_fence();
1637 #define arch_atomic64_fetch_inc arch_atomic64_fetch_inc
1640 #endif /* arch_atomic64_fetch_inc_relaxed */
1642 #ifndef arch_atomic64_dec
1643 static __always_inline void
1644 arch_atomic64_dec(atomic64_t *v)
1646 arch_atomic64_sub(1, v);
1648 #define arch_atomic64_dec arch_atomic64_dec
1651 #ifndef arch_atomic64_dec_return_relaxed
1652 #ifdef arch_atomic64_dec_return
1653 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return
1654 #define arch_atomic64_dec_return_release arch_atomic64_dec_return
1655 #define arch_atomic64_dec_return_relaxed arch_atomic64_dec_return
1656 #endif /* arch_atomic64_dec_return */
1658 #ifndef arch_atomic64_dec_return
1659 static __always_inline s64
1660 arch_atomic64_dec_return(atomic64_t *v)
1662 return arch_atomic64_sub_return(1, v);
1664 #define arch_atomic64_dec_return arch_atomic64_dec_return
1667 #ifndef arch_atomic64_dec_return_acquire
1668 static __always_inline s64
1669 arch_atomic64_dec_return_acquire(atomic64_t *v)
1671 return arch_atomic64_sub_return_acquire(1, v);
1673 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire
1676 #ifndef arch_atomic64_dec_return_release
1677 static __always_inline s64
1678 arch_atomic64_dec_return_release(atomic64_t *v)
1680 return arch_atomic64_sub_return_release(1, v);
1682 #define arch_atomic64_dec_return_release arch_atomic64_dec_return_release
1685 #ifndef arch_atomic64_dec_return_relaxed
1686 static __always_inline s64
1687 arch_atomic64_dec_return_relaxed(atomic64_t *v)
1689 return arch_atomic64_sub_return_relaxed(1, v);
1691 #define arch_atomic64_dec_return_relaxed arch_atomic64_dec_return_relaxed
1694 #else /* arch_atomic64_dec_return_relaxed */
1696 #ifndef arch_atomic64_dec_return_acquire
1697 static __always_inline s64
1698 arch_atomic64_dec_return_acquire(atomic64_t *v)
1700 s64 ret = arch_atomic64_dec_return_relaxed(v);
1701 __atomic_acquire_fence();
1704 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire
1707 #ifndef arch_atomic64_dec_return_release
1708 static __always_inline s64
1709 arch_atomic64_dec_return_release(atomic64_t *v)
1711 __atomic_release_fence();
1712 return arch_atomic64_dec_return_relaxed(v);
1714 #define arch_atomic64_dec_return_release arch_atomic64_dec_return_release
1717 #ifndef arch_atomic64_dec_return
1718 static __always_inline s64
1719 arch_atomic64_dec_return(atomic64_t *v)
1722 __atomic_pre_full_fence();
1723 ret = arch_atomic64_dec_return_relaxed(v);
1724 __atomic_post_full_fence();
1727 #define arch_atomic64_dec_return arch_atomic64_dec_return
1730 #endif /* arch_atomic64_dec_return_relaxed */
1732 #ifndef arch_atomic64_fetch_dec_relaxed
1733 #ifdef arch_atomic64_fetch_dec
1734 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec
1735 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec
1736 #define arch_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec
1737 #endif /* arch_atomic64_fetch_dec */
1739 #ifndef arch_atomic64_fetch_dec
1740 static __always_inline s64
1741 arch_atomic64_fetch_dec(atomic64_t *v)
1743 return arch_atomic64_fetch_sub(1, v);
1745 #define arch_atomic64_fetch_dec arch_atomic64_fetch_dec
1748 #ifndef arch_atomic64_fetch_dec_acquire
1749 static __always_inline s64
1750 arch_atomic64_fetch_dec_acquire(atomic64_t *v)
1752 return arch_atomic64_fetch_sub_acquire(1, v);
1754 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire
1757 #ifndef arch_atomic64_fetch_dec_release
1758 static __always_inline s64
1759 arch_atomic64_fetch_dec_release(atomic64_t *v)
1761 return arch_atomic64_fetch_sub_release(1, v);
1763 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release
1766 #ifndef arch_atomic64_fetch_dec_relaxed
1767 static __always_inline s64
1768 arch_atomic64_fetch_dec_relaxed(atomic64_t *v)
1770 return arch_atomic64_fetch_sub_relaxed(1, v);
1772 #define arch_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec_relaxed
1775 #else /* arch_atomic64_fetch_dec_relaxed */
1777 #ifndef arch_atomic64_fetch_dec_acquire
1778 static __always_inline s64
1779 arch_atomic64_fetch_dec_acquire(atomic64_t *v)
1781 s64 ret = arch_atomic64_fetch_dec_relaxed(v);
1782 __atomic_acquire_fence();
1785 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire
1788 #ifndef arch_atomic64_fetch_dec_release
1789 static __always_inline s64
1790 arch_atomic64_fetch_dec_release(atomic64_t *v)
1792 __atomic_release_fence();
1793 return arch_atomic64_fetch_dec_relaxed(v);
1795 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release
1798 #ifndef arch_atomic64_fetch_dec
1799 static __always_inline s64
1800 arch_atomic64_fetch_dec(atomic64_t *v)
1803 __atomic_pre_full_fence();
1804 ret = arch_atomic64_fetch_dec_relaxed(v);
1805 __atomic_post_full_fence();
1808 #define arch_atomic64_fetch_dec arch_atomic64_fetch_dec
1811 #endif /* arch_atomic64_fetch_dec_relaxed */
1813 #ifndef arch_atomic64_fetch_and_relaxed
1814 #define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and
1815 #define arch_atomic64_fetch_and_release arch_atomic64_fetch_and
1816 #define arch_atomic64_fetch_and_relaxed arch_atomic64_fetch_and
1817 #else /* arch_atomic64_fetch_and_relaxed */
1819 #ifndef arch_atomic64_fetch_and_acquire
1820 static __always_inline s64
1821 arch_atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
1823 s64 ret = arch_atomic64_fetch_and_relaxed(i, v);
1824 __atomic_acquire_fence();
1827 #define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and_acquire
1830 #ifndef arch_atomic64_fetch_and_release
1831 static __always_inline s64
1832 arch_atomic64_fetch_and_release(s64 i, atomic64_t *v)
1834 __atomic_release_fence();
1835 return arch_atomic64_fetch_and_relaxed(i, v);
1837 #define arch_atomic64_fetch_and_release arch_atomic64_fetch_and_release
1840 #ifndef arch_atomic64_fetch_and
1841 static __always_inline s64
1842 arch_atomic64_fetch_and(s64 i, atomic64_t *v)
1845 __atomic_pre_full_fence();
1846 ret = arch_atomic64_fetch_and_relaxed(i, v);
1847 __atomic_post_full_fence();
1850 #define arch_atomic64_fetch_and arch_atomic64_fetch_and
1853 #endif /* arch_atomic64_fetch_and_relaxed */
1855 #ifndef arch_atomic64_andnot
1856 static __always_inline void
1857 arch_atomic64_andnot(s64 i, atomic64_t *v)
1859 arch_atomic64_and(~i, v);
1861 #define arch_atomic64_andnot arch_atomic64_andnot
1864 #ifndef arch_atomic64_fetch_andnot_relaxed
1865 #ifdef arch_atomic64_fetch_andnot
1866 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot
1867 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot
1868 #define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot
1869 #endif /* arch_atomic64_fetch_andnot */
1871 #ifndef arch_atomic64_fetch_andnot
1872 static __always_inline s64
1873 arch_atomic64_fetch_andnot(s64 i, atomic64_t *v)
1875 return arch_atomic64_fetch_and(~i, v);
1877 #define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot
1880 #ifndef arch_atomic64_fetch_andnot_acquire
1881 static __always_inline s64
1882 arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1884 return arch_atomic64_fetch_and_acquire(~i, v);
1886 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire
1889 #ifndef arch_atomic64_fetch_andnot_release
1890 static __always_inline s64
1891 arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1893 return arch_atomic64_fetch_and_release(~i, v);
1895 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release
1898 #ifndef arch_atomic64_fetch_andnot_relaxed
1899 static __always_inline s64
1900 arch_atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
1902 return arch_atomic64_fetch_and_relaxed(~i, v);
1904 #define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot_relaxed
1907 #else /* arch_atomic64_fetch_andnot_relaxed */
1909 #ifndef arch_atomic64_fetch_andnot_acquire
1910 static __always_inline s64
1911 arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1913 s64 ret = arch_atomic64_fetch_andnot_relaxed(i, v);
1914 __atomic_acquire_fence();
1917 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire
1920 #ifndef arch_atomic64_fetch_andnot_release
1921 static __always_inline s64
1922 arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1924 __atomic_release_fence();
1925 return arch_atomic64_fetch_andnot_relaxed(i, v);
1927 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release
1930 #ifndef arch_atomic64_fetch_andnot
1931 static __always_inline s64
1932 arch_atomic64_fetch_andnot(s64 i, atomic64_t *v)
1935 __atomic_pre_full_fence();
1936 ret = arch_atomic64_fetch_andnot_relaxed(i, v);
1937 __atomic_post_full_fence();
1940 #define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot
1943 #endif /* arch_atomic64_fetch_andnot_relaxed */
1945 #ifndef arch_atomic64_fetch_or_relaxed
1946 #define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or
1947 #define arch_atomic64_fetch_or_release arch_atomic64_fetch_or
1948 #define arch_atomic64_fetch_or_relaxed arch_atomic64_fetch_or
1949 #else /* arch_atomic64_fetch_or_relaxed */
1951 #ifndef arch_atomic64_fetch_or_acquire
1952 static __always_inline s64
1953 arch_atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
1955 s64 ret = arch_atomic64_fetch_or_relaxed(i, v);
1956 __atomic_acquire_fence();
1959 #define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or_acquire
1962 #ifndef arch_atomic64_fetch_or_release
1963 static __always_inline s64
1964 arch_atomic64_fetch_or_release(s64 i, atomic64_t *v)
1966 __atomic_release_fence();
1967 return arch_atomic64_fetch_or_relaxed(i, v);
1969 #define arch_atomic64_fetch_or_release arch_atomic64_fetch_or_release
1972 #ifndef arch_atomic64_fetch_or
1973 static __always_inline s64
1974 arch_atomic64_fetch_or(s64 i, atomic64_t *v)
1977 __atomic_pre_full_fence();
1978 ret = arch_atomic64_fetch_or_relaxed(i, v);
1979 __atomic_post_full_fence();
1982 #define arch_atomic64_fetch_or arch_atomic64_fetch_or
1985 #endif /* arch_atomic64_fetch_or_relaxed */
1987 #ifndef arch_atomic64_fetch_xor_relaxed
1988 #define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor
1989 #define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor
1990 #define arch_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor
1991 #else /* arch_atomic64_fetch_xor_relaxed */
1993 #ifndef arch_atomic64_fetch_xor_acquire
1994 static __always_inline s64
1995 arch_atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
1997 s64 ret = arch_atomic64_fetch_xor_relaxed(i, v);
1998 __atomic_acquire_fence();
2001 #define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor_acquire
2004 #ifndef arch_atomic64_fetch_xor_release
2005 static __always_inline s64
2006 arch_atomic64_fetch_xor_release(s64 i, atomic64_t *v)
2008 __atomic_release_fence();
2009 return arch_atomic64_fetch_xor_relaxed(i, v);
2011 #define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor_release
2014 #ifndef arch_atomic64_fetch_xor
2015 static __always_inline s64
2016 arch_atomic64_fetch_xor(s64 i, atomic64_t *v)
2019 __atomic_pre_full_fence();
2020 ret = arch_atomic64_fetch_xor_relaxed(i, v);
2021 __atomic_post_full_fence();
2024 #define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
2027 #endif /* arch_atomic64_fetch_xor_relaxed */
2029 #ifndef arch_atomic64_xchg_relaxed
2030 #define arch_atomic64_xchg_acquire arch_atomic64_xchg
2031 #define arch_atomic64_xchg_release arch_atomic64_xchg
2032 #define arch_atomic64_xchg_relaxed arch_atomic64_xchg
2033 #else /* arch_atomic64_xchg_relaxed */
2035 #ifndef arch_atomic64_xchg_acquire
2036 static __always_inline s64
2037 arch_atomic64_xchg_acquire(atomic64_t *v, s64 i)
2039 s64 ret = arch_atomic64_xchg_relaxed(v, i);
2040 __atomic_acquire_fence();
2043 #define arch_atomic64_xchg_acquire arch_atomic64_xchg_acquire
2046 #ifndef arch_atomic64_xchg_release
2047 static __always_inline s64
2048 arch_atomic64_xchg_release(atomic64_t *v, s64 i)
2050 __atomic_release_fence();
2051 return arch_atomic64_xchg_relaxed(v, i);
2053 #define arch_atomic64_xchg_release arch_atomic64_xchg_release
2056 #ifndef arch_atomic64_xchg
2057 static __always_inline s64
2058 arch_atomic64_xchg(atomic64_t *v, s64 i)
2061 __atomic_pre_full_fence();
2062 ret = arch_atomic64_xchg_relaxed(v, i);
2063 __atomic_post_full_fence();
2066 #define arch_atomic64_xchg arch_atomic64_xchg
2069 #endif /* arch_atomic64_xchg_relaxed */
2071 #ifndef arch_atomic64_cmpxchg_relaxed
2072 #define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg
2073 #define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg
2074 #define arch_atomic64_cmpxchg_relaxed arch_atomic64_cmpxchg
2075 #else /* arch_atomic64_cmpxchg_relaxed */
2077 #ifndef arch_atomic64_cmpxchg_acquire
2078 static __always_inline s64
2079 arch_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
2081 s64 ret = arch_atomic64_cmpxchg_relaxed(v, old, new);
2082 __atomic_acquire_fence();
2085 #define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg_acquire
2088 #ifndef arch_atomic64_cmpxchg_release
2089 static __always_inline s64
2090 arch_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
2092 __atomic_release_fence();
2093 return arch_atomic64_cmpxchg_relaxed(v, old, new);
2095 #define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg_release
2098 #ifndef arch_atomic64_cmpxchg
2099 static __always_inline s64
2100 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
2103 __atomic_pre_full_fence();
2104 ret = arch_atomic64_cmpxchg_relaxed(v, old, new);
2105 __atomic_post_full_fence();
2108 #define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
2111 #endif /* arch_atomic64_cmpxchg_relaxed */
2113 #ifndef arch_atomic64_try_cmpxchg_relaxed
2114 #ifdef arch_atomic64_try_cmpxchg
2115 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg
2116 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg
2117 #define arch_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg
2118 #endif /* arch_atomic64_try_cmpxchg */
2120 #ifndef arch_atomic64_try_cmpxchg
2121 static __always_inline bool
2122 arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2125 r = arch_atomic64_cmpxchg(v, o, new);
2126 if (unlikely(r != o))
2128 return likely(r == o);
2130 #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
2133 #ifndef arch_atomic64_try_cmpxchg_acquire
2134 static __always_inline bool
2135 arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2138 r = arch_atomic64_cmpxchg_acquire(v, o, new);
2139 if (unlikely(r != o))
2141 return likely(r == o);
2143 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire
2146 #ifndef arch_atomic64_try_cmpxchg_release
2147 static __always_inline bool
2148 arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2151 r = arch_atomic64_cmpxchg_release(v, o, new);
2152 if (unlikely(r != o))
2154 return likely(r == o);
2156 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release
2159 #ifndef arch_atomic64_try_cmpxchg_relaxed
2160 static __always_inline bool
2161 arch_atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
2164 r = arch_atomic64_cmpxchg_relaxed(v, o, new);
2165 if (unlikely(r != o))
2167 return likely(r == o);
2169 #define arch_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg_relaxed
2172 #else /* arch_atomic64_try_cmpxchg_relaxed */
2174 #ifndef arch_atomic64_try_cmpxchg_acquire
2175 static __always_inline bool
2176 arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2178 bool ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new);
2179 __atomic_acquire_fence();
2182 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire
2185 #ifndef arch_atomic64_try_cmpxchg_release
2186 static __always_inline bool
2187 arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2189 __atomic_release_fence();
2190 return arch_atomic64_try_cmpxchg_relaxed(v, old, new);
2192 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release
2195 #ifndef arch_atomic64_try_cmpxchg
2196 static __always_inline bool
2197 arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2200 __atomic_pre_full_fence();
2201 ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new);
2202 __atomic_post_full_fence();
2205 #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
2208 #endif /* arch_atomic64_try_cmpxchg_relaxed */
2210 #ifndef arch_atomic64_sub_and_test
2212 * arch_atomic64_sub_and_test - subtract value from variable and test result
2213 * @i: integer value to subtract
2214 * @v: pointer of type atomic64_t
2216 * Atomically subtracts @i from @v and returns
2217 * true if the result is zero, or false for all
2220 static __always_inline bool
2221 arch_atomic64_sub_and_test(s64 i, atomic64_t *v)
2223 return arch_atomic64_sub_return(i, v) == 0;
2225 #define arch_atomic64_sub_and_test arch_atomic64_sub_and_test
2228 #ifndef arch_atomic64_dec_and_test
2230 * arch_atomic64_dec_and_test - decrement and test
2231 * @v: pointer of type atomic64_t
2233 * Atomically decrements @v by 1 and
2234 * returns true if the result is 0, or false for all other
2237 static __always_inline bool
2238 arch_atomic64_dec_and_test(atomic64_t *v)
2240 return arch_atomic64_dec_return(v) == 0;
2242 #define arch_atomic64_dec_and_test arch_atomic64_dec_and_test
2245 #ifndef arch_atomic64_inc_and_test
2247 * arch_atomic64_inc_and_test - increment and test
2248 * @v: pointer of type atomic64_t
2250 * Atomically increments @v by 1
2251 * and returns true if the result is zero, or false for all
2254 static __always_inline bool
2255 arch_atomic64_inc_and_test(atomic64_t *v)
2257 return arch_atomic64_inc_return(v) == 0;
2259 #define arch_atomic64_inc_and_test arch_atomic64_inc_and_test
2262 #ifndef arch_atomic64_add_negative
2264 * arch_atomic64_add_negative - add and test if negative
2265 * @i: integer value to add
2266 * @v: pointer of type atomic64_t
2268 * Atomically adds @i to @v and returns true
2269 * if the result is negative, or false when
2270 * result is greater than or equal to zero.
2272 static __always_inline bool
2273 arch_atomic64_add_negative(s64 i, atomic64_t *v)
2275 return arch_atomic64_add_return(i, v) < 0;
2277 #define arch_atomic64_add_negative arch_atomic64_add_negative
2280 #ifndef arch_atomic64_fetch_add_unless
2282 * arch_atomic64_fetch_add_unless - add unless the number is already a given value
2283 * @v: pointer of type atomic64_t
2284 * @a: the amount to add to v...
2285 * @u: ...unless v is equal to u.
2287 * Atomically adds @a to @v, so long as @v was not already @u.
2288 * Returns original value of @v
2290 static __always_inline s64
2291 arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
2293 s64 c = arch_atomic64_read(v);
2296 if (unlikely(c == u))
2298 } while (!arch_atomic64_try_cmpxchg(v, &c, c + a));
2302 #define arch_atomic64_fetch_add_unless arch_atomic64_fetch_add_unless
2305 #ifndef arch_atomic64_add_unless
2307 * arch_atomic64_add_unless - add unless the number is already a given value
2308 * @v: pointer of type atomic64_t
2309 * @a: the amount to add to v...
2310 * @u: ...unless v is equal to u.
2312 * Atomically adds @a to @v, if @v was not already @u.
2313 * Returns true if the addition was done.
2315 static __always_inline bool
2316 arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
2318 return arch_atomic64_fetch_add_unless(v, a, u) != u;
2320 #define arch_atomic64_add_unless arch_atomic64_add_unless
2323 #ifndef arch_atomic64_inc_not_zero
2325 * arch_atomic64_inc_not_zero - increment unless the number is zero
2326 * @v: pointer of type atomic64_t
2328 * Atomically increments @v by 1, if @v is non-zero.
2329 * Returns true if the increment was done.
2331 static __always_inline bool
2332 arch_atomic64_inc_not_zero(atomic64_t *v)
2334 return arch_atomic64_add_unless(v, 1, 0);
2336 #define arch_atomic64_inc_not_zero arch_atomic64_inc_not_zero
2339 #ifndef arch_atomic64_inc_unless_negative
2340 static __always_inline bool
2341 arch_atomic64_inc_unless_negative(atomic64_t *v)
2343 s64 c = arch_atomic64_read(v);
2346 if (unlikely(c < 0))
2348 } while (!arch_atomic64_try_cmpxchg(v, &c, c + 1));
2352 #define arch_atomic64_inc_unless_negative arch_atomic64_inc_unless_negative
2355 #ifndef arch_atomic64_dec_unless_positive
2356 static __always_inline bool
2357 arch_atomic64_dec_unless_positive(atomic64_t *v)
2359 s64 c = arch_atomic64_read(v);
2362 if (unlikely(c > 0))
2364 } while (!arch_atomic64_try_cmpxchg(v, &c, c - 1));
2368 #define arch_atomic64_dec_unless_positive arch_atomic64_dec_unless_positive
2371 #ifndef arch_atomic64_dec_if_positive
2372 static __always_inline s64
2373 arch_atomic64_dec_if_positive(atomic64_t *v)
2375 s64 dec, c = arch_atomic64_read(v);
2379 if (unlikely(dec < 0))
2381 } while (!arch_atomic64_try_cmpxchg(v, &c, dec));
2385 #define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
2388 #endif /* _LINUX_ATOMIC_FALLBACK_H */
2389 // 8e2cc06bc0d2c0967d2f8424762bd48555ee40ae