1 // SPDX-License-Identifier: GPL-2.0
3 // Generated by scripts/atomic/gen-atomic-fallback.sh
4 // DO NOT MODIFY THIS FILE DIRECTLY
6 #ifndef _LINUX_ATOMIC_FALLBACK_H
7 #define _LINUX_ATOMIC_FALLBACK_H
9 #include <linux/compiler.h>
12 #define xchg_acquire xchg
13 #define xchg_release xchg
14 #define xchg_relaxed xchg
15 #else /* xchg_relaxed */
18 #define xchg_acquire(...) \
19 __atomic_op_acquire(xchg, __VA_ARGS__)
23 #define xchg_release(...) \
24 __atomic_op_release(xchg, __VA_ARGS__)
29 __atomic_op_fence(xchg, __VA_ARGS__)
32 #endif /* xchg_relaxed */
34 #ifndef cmpxchg_relaxed
35 #define cmpxchg_acquire cmpxchg
36 #define cmpxchg_release cmpxchg
37 #define cmpxchg_relaxed cmpxchg
38 #else /* cmpxchg_relaxed */
40 #ifndef cmpxchg_acquire
41 #define cmpxchg_acquire(...) \
42 __atomic_op_acquire(cmpxchg, __VA_ARGS__)
45 #ifndef cmpxchg_release
46 #define cmpxchg_release(...) \
47 __atomic_op_release(cmpxchg, __VA_ARGS__)
51 #define cmpxchg(...) \
52 __atomic_op_fence(cmpxchg, __VA_ARGS__)
55 #endif /* cmpxchg_relaxed */
57 #ifndef cmpxchg64_relaxed
58 #define cmpxchg64_acquire cmpxchg64
59 #define cmpxchg64_release cmpxchg64
60 #define cmpxchg64_relaxed cmpxchg64
61 #else /* cmpxchg64_relaxed */
63 #ifndef cmpxchg64_acquire
64 #define cmpxchg64_acquire(...) \
65 __atomic_op_acquire(cmpxchg64, __VA_ARGS__)
68 #ifndef cmpxchg64_release
69 #define cmpxchg64_release(...) \
70 __atomic_op_release(cmpxchg64, __VA_ARGS__)
74 #define cmpxchg64(...) \
75 __atomic_op_fence(cmpxchg64, __VA_ARGS__)
78 #endif /* cmpxchg64_relaxed */
80 #ifndef try_cmpxchg_relaxed
82 #define try_cmpxchg_acquire try_cmpxchg
83 #define try_cmpxchg_release try_cmpxchg
84 #define try_cmpxchg_relaxed try_cmpxchg
85 #endif /* try_cmpxchg */
88 #define try_cmpxchg(_ptr, _oldp, _new) \
90 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
91 ___r = cmpxchg((_ptr), ___o, (_new)); \
92 if (unlikely(___r != ___o)) \
94 likely(___r == ___o); \
96 #endif /* try_cmpxchg */
98 #ifndef try_cmpxchg_acquire
99 #define try_cmpxchg_acquire(_ptr, _oldp, _new) \
101 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
102 ___r = cmpxchg_acquire((_ptr), ___o, (_new)); \
103 if (unlikely(___r != ___o)) \
105 likely(___r == ___o); \
107 #endif /* try_cmpxchg_acquire */
109 #ifndef try_cmpxchg_release
110 #define try_cmpxchg_release(_ptr, _oldp, _new) \
112 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
113 ___r = cmpxchg_release((_ptr), ___o, (_new)); \
114 if (unlikely(___r != ___o)) \
116 likely(___r == ___o); \
118 #endif /* try_cmpxchg_release */
120 #ifndef try_cmpxchg_relaxed
121 #define try_cmpxchg_relaxed(_ptr, _oldp, _new) \
123 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
124 ___r = cmpxchg_relaxed((_ptr), ___o, (_new)); \
125 if (unlikely(___r != ___o)) \
127 likely(___r == ___o); \
129 #endif /* try_cmpxchg_relaxed */
131 #else /* try_cmpxchg_relaxed */
133 #ifndef try_cmpxchg_acquire
134 #define try_cmpxchg_acquire(...) \
135 __atomic_op_acquire(try_cmpxchg, __VA_ARGS__)
138 #ifndef try_cmpxchg_release
139 #define try_cmpxchg_release(...) \
140 __atomic_op_release(try_cmpxchg, __VA_ARGS__)
144 #define try_cmpxchg(...) \
145 __atomic_op_fence(try_cmpxchg, __VA_ARGS__)
148 #endif /* try_cmpxchg_relaxed */
150 #define arch_atomic_read atomic_read
151 #define arch_atomic_read_acquire atomic_read_acquire
153 #ifndef atomic_read_acquire
154 static __always_inline int
155 atomic_read_acquire(const atomic_t *v)
157 return smp_load_acquire(&(v)->counter);
159 #define atomic_read_acquire atomic_read_acquire
162 #define arch_atomic_set atomic_set
163 #define arch_atomic_set_release atomic_set_release
165 #ifndef atomic_set_release
166 static __always_inline void
167 atomic_set_release(atomic_t *v, int i)
169 smp_store_release(&(v)->counter, i);
171 #define atomic_set_release atomic_set_release
174 #define arch_atomic_add atomic_add
176 #define arch_atomic_add_return atomic_add_return
177 #define arch_atomic_add_return_acquire atomic_add_return_acquire
178 #define arch_atomic_add_return_release atomic_add_return_release
179 #define arch_atomic_add_return_relaxed atomic_add_return_relaxed
181 #ifndef atomic_add_return_relaxed
182 #define atomic_add_return_acquire atomic_add_return
183 #define atomic_add_return_release atomic_add_return
184 #define atomic_add_return_relaxed atomic_add_return
185 #else /* atomic_add_return_relaxed */
187 #ifndef atomic_add_return_acquire
188 static __always_inline int
189 atomic_add_return_acquire(int i, atomic_t *v)
191 int ret = atomic_add_return_relaxed(i, v);
192 __atomic_acquire_fence();
195 #define atomic_add_return_acquire atomic_add_return_acquire
198 #ifndef atomic_add_return_release
199 static __always_inline int
200 atomic_add_return_release(int i, atomic_t *v)
202 __atomic_release_fence();
203 return atomic_add_return_relaxed(i, v);
205 #define atomic_add_return_release atomic_add_return_release
208 #ifndef atomic_add_return
209 static __always_inline int
210 atomic_add_return(int i, atomic_t *v)
213 __atomic_pre_full_fence();
214 ret = atomic_add_return_relaxed(i, v);
215 __atomic_post_full_fence();
218 #define atomic_add_return atomic_add_return
221 #endif /* atomic_add_return_relaxed */
223 #define arch_atomic_fetch_add atomic_fetch_add
224 #define arch_atomic_fetch_add_acquire atomic_fetch_add_acquire
225 #define arch_atomic_fetch_add_release atomic_fetch_add_release
226 #define arch_atomic_fetch_add_relaxed atomic_fetch_add_relaxed
228 #ifndef atomic_fetch_add_relaxed
229 #define atomic_fetch_add_acquire atomic_fetch_add
230 #define atomic_fetch_add_release atomic_fetch_add
231 #define atomic_fetch_add_relaxed atomic_fetch_add
232 #else /* atomic_fetch_add_relaxed */
234 #ifndef atomic_fetch_add_acquire
235 static __always_inline int
236 atomic_fetch_add_acquire(int i, atomic_t *v)
238 int ret = atomic_fetch_add_relaxed(i, v);
239 __atomic_acquire_fence();
242 #define atomic_fetch_add_acquire atomic_fetch_add_acquire
245 #ifndef atomic_fetch_add_release
246 static __always_inline int
247 atomic_fetch_add_release(int i, atomic_t *v)
249 __atomic_release_fence();
250 return atomic_fetch_add_relaxed(i, v);
252 #define atomic_fetch_add_release atomic_fetch_add_release
255 #ifndef atomic_fetch_add
256 static __always_inline int
257 atomic_fetch_add(int i, atomic_t *v)
260 __atomic_pre_full_fence();
261 ret = atomic_fetch_add_relaxed(i, v);
262 __atomic_post_full_fence();
265 #define atomic_fetch_add atomic_fetch_add
268 #endif /* atomic_fetch_add_relaxed */
270 #define arch_atomic_sub atomic_sub
272 #define arch_atomic_sub_return atomic_sub_return
273 #define arch_atomic_sub_return_acquire atomic_sub_return_acquire
274 #define arch_atomic_sub_return_release atomic_sub_return_release
275 #define arch_atomic_sub_return_relaxed atomic_sub_return_relaxed
277 #ifndef atomic_sub_return_relaxed
278 #define atomic_sub_return_acquire atomic_sub_return
279 #define atomic_sub_return_release atomic_sub_return
280 #define atomic_sub_return_relaxed atomic_sub_return
281 #else /* atomic_sub_return_relaxed */
283 #ifndef atomic_sub_return_acquire
284 static __always_inline int
285 atomic_sub_return_acquire(int i, atomic_t *v)
287 int ret = atomic_sub_return_relaxed(i, v);
288 __atomic_acquire_fence();
291 #define atomic_sub_return_acquire atomic_sub_return_acquire
294 #ifndef atomic_sub_return_release
295 static __always_inline int
296 atomic_sub_return_release(int i, atomic_t *v)
298 __atomic_release_fence();
299 return atomic_sub_return_relaxed(i, v);
301 #define atomic_sub_return_release atomic_sub_return_release
304 #ifndef atomic_sub_return
305 static __always_inline int
306 atomic_sub_return(int i, atomic_t *v)
309 __atomic_pre_full_fence();
310 ret = atomic_sub_return_relaxed(i, v);
311 __atomic_post_full_fence();
314 #define atomic_sub_return atomic_sub_return
317 #endif /* atomic_sub_return_relaxed */
319 #define arch_atomic_fetch_sub atomic_fetch_sub
320 #define arch_atomic_fetch_sub_acquire atomic_fetch_sub_acquire
321 #define arch_atomic_fetch_sub_release atomic_fetch_sub_release
322 #define arch_atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed
324 #ifndef atomic_fetch_sub_relaxed
325 #define atomic_fetch_sub_acquire atomic_fetch_sub
326 #define atomic_fetch_sub_release atomic_fetch_sub
327 #define atomic_fetch_sub_relaxed atomic_fetch_sub
328 #else /* atomic_fetch_sub_relaxed */
330 #ifndef atomic_fetch_sub_acquire
331 static __always_inline int
332 atomic_fetch_sub_acquire(int i, atomic_t *v)
334 int ret = atomic_fetch_sub_relaxed(i, v);
335 __atomic_acquire_fence();
338 #define atomic_fetch_sub_acquire atomic_fetch_sub_acquire
341 #ifndef atomic_fetch_sub_release
342 static __always_inline int
343 atomic_fetch_sub_release(int i, atomic_t *v)
345 __atomic_release_fence();
346 return atomic_fetch_sub_relaxed(i, v);
348 #define atomic_fetch_sub_release atomic_fetch_sub_release
351 #ifndef atomic_fetch_sub
352 static __always_inline int
353 atomic_fetch_sub(int i, atomic_t *v)
356 __atomic_pre_full_fence();
357 ret = atomic_fetch_sub_relaxed(i, v);
358 __atomic_post_full_fence();
361 #define atomic_fetch_sub atomic_fetch_sub
364 #endif /* atomic_fetch_sub_relaxed */
366 #define arch_atomic_inc atomic_inc
369 static __always_inline void
370 atomic_inc(atomic_t *v)
374 #define atomic_inc atomic_inc
377 #define arch_atomic_inc_return atomic_inc_return
378 #define arch_atomic_inc_return_acquire atomic_inc_return_acquire
379 #define arch_atomic_inc_return_release atomic_inc_return_release
380 #define arch_atomic_inc_return_relaxed atomic_inc_return_relaxed
382 #ifndef atomic_inc_return_relaxed
383 #ifdef atomic_inc_return
384 #define atomic_inc_return_acquire atomic_inc_return
385 #define atomic_inc_return_release atomic_inc_return
386 #define atomic_inc_return_relaxed atomic_inc_return
387 #endif /* atomic_inc_return */
389 #ifndef atomic_inc_return
390 static __always_inline int
391 atomic_inc_return(atomic_t *v)
393 return atomic_add_return(1, v);
395 #define atomic_inc_return atomic_inc_return
398 #ifndef atomic_inc_return_acquire
399 static __always_inline int
400 atomic_inc_return_acquire(atomic_t *v)
402 return atomic_add_return_acquire(1, v);
404 #define atomic_inc_return_acquire atomic_inc_return_acquire
407 #ifndef atomic_inc_return_release
408 static __always_inline int
409 atomic_inc_return_release(atomic_t *v)
411 return atomic_add_return_release(1, v);
413 #define atomic_inc_return_release atomic_inc_return_release
416 #ifndef atomic_inc_return_relaxed
417 static __always_inline int
418 atomic_inc_return_relaxed(atomic_t *v)
420 return atomic_add_return_relaxed(1, v);
422 #define atomic_inc_return_relaxed atomic_inc_return_relaxed
425 #else /* atomic_inc_return_relaxed */
427 #ifndef atomic_inc_return_acquire
428 static __always_inline int
429 atomic_inc_return_acquire(atomic_t *v)
431 int ret = atomic_inc_return_relaxed(v);
432 __atomic_acquire_fence();
435 #define atomic_inc_return_acquire atomic_inc_return_acquire
438 #ifndef atomic_inc_return_release
439 static __always_inline int
440 atomic_inc_return_release(atomic_t *v)
442 __atomic_release_fence();
443 return atomic_inc_return_relaxed(v);
445 #define atomic_inc_return_release atomic_inc_return_release
448 #ifndef atomic_inc_return
449 static __always_inline int
450 atomic_inc_return(atomic_t *v)
453 __atomic_pre_full_fence();
454 ret = atomic_inc_return_relaxed(v);
455 __atomic_post_full_fence();
458 #define atomic_inc_return atomic_inc_return
461 #endif /* atomic_inc_return_relaxed */
463 #define arch_atomic_fetch_inc atomic_fetch_inc
464 #define arch_atomic_fetch_inc_acquire atomic_fetch_inc_acquire
465 #define arch_atomic_fetch_inc_release atomic_fetch_inc_release
466 #define arch_atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
468 #ifndef atomic_fetch_inc_relaxed
469 #ifdef atomic_fetch_inc
470 #define atomic_fetch_inc_acquire atomic_fetch_inc
471 #define atomic_fetch_inc_release atomic_fetch_inc
472 #define atomic_fetch_inc_relaxed atomic_fetch_inc
473 #endif /* atomic_fetch_inc */
475 #ifndef atomic_fetch_inc
476 static __always_inline int
477 atomic_fetch_inc(atomic_t *v)
479 return atomic_fetch_add(1, v);
481 #define atomic_fetch_inc atomic_fetch_inc
484 #ifndef atomic_fetch_inc_acquire
485 static __always_inline int
486 atomic_fetch_inc_acquire(atomic_t *v)
488 return atomic_fetch_add_acquire(1, v);
490 #define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
493 #ifndef atomic_fetch_inc_release
494 static __always_inline int
495 atomic_fetch_inc_release(atomic_t *v)
497 return atomic_fetch_add_release(1, v);
499 #define atomic_fetch_inc_release atomic_fetch_inc_release
502 #ifndef atomic_fetch_inc_relaxed
503 static __always_inline int
504 atomic_fetch_inc_relaxed(atomic_t *v)
506 return atomic_fetch_add_relaxed(1, v);
508 #define atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
511 #else /* atomic_fetch_inc_relaxed */
513 #ifndef atomic_fetch_inc_acquire
514 static __always_inline int
515 atomic_fetch_inc_acquire(atomic_t *v)
517 int ret = atomic_fetch_inc_relaxed(v);
518 __atomic_acquire_fence();
521 #define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
524 #ifndef atomic_fetch_inc_release
525 static __always_inline int
526 atomic_fetch_inc_release(atomic_t *v)
528 __atomic_release_fence();
529 return atomic_fetch_inc_relaxed(v);
531 #define atomic_fetch_inc_release atomic_fetch_inc_release
534 #ifndef atomic_fetch_inc
535 static __always_inline int
536 atomic_fetch_inc(atomic_t *v)
539 __atomic_pre_full_fence();
540 ret = atomic_fetch_inc_relaxed(v);
541 __atomic_post_full_fence();
544 #define atomic_fetch_inc atomic_fetch_inc
547 #endif /* atomic_fetch_inc_relaxed */
549 #define arch_atomic_dec atomic_dec
552 static __always_inline void
553 atomic_dec(atomic_t *v)
557 #define atomic_dec atomic_dec
560 #define arch_atomic_dec_return atomic_dec_return
561 #define arch_atomic_dec_return_acquire atomic_dec_return_acquire
562 #define arch_atomic_dec_return_release atomic_dec_return_release
563 #define arch_atomic_dec_return_relaxed atomic_dec_return_relaxed
565 #ifndef atomic_dec_return_relaxed
566 #ifdef atomic_dec_return
567 #define atomic_dec_return_acquire atomic_dec_return
568 #define atomic_dec_return_release atomic_dec_return
569 #define atomic_dec_return_relaxed atomic_dec_return
570 #endif /* atomic_dec_return */
572 #ifndef atomic_dec_return
573 static __always_inline int
574 atomic_dec_return(atomic_t *v)
576 return atomic_sub_return(1, v);
578 #define atomic_dec_return atomic_dec_return
581 #ifndef atomic_dec_return_acquire
582 static __always_inline int
583 atomic_dec_return_acquire(atomic_t *v)
585 return atomic_sub_return_acquire(1, v);
587 #define atomic_dec_return_acquire atomic_dec_return_acquire
590 #ifndef atomic_dec_return_release
591 static __always_inline int
592 atomic_dec_return_release(atomic_t *v)
594 return atomic_sub_return_release(1, v);
596 #define atomic_dec_return_release atomic_dec_return_release
599 #ifndef atomic_dec_return_relaxed
600 static __always_inline int
601 atomic_dec_return_relaxed(atomic_t *v)
603 return atomic_sub_return_relaxed(1, v);
605 #define atomic_dec_return_relaxed atomic_dec_return_relaxed
608 #else /* atomic_dec_return_relaxed */
610 #ifndef atomic_dec_return_acquire
611 static __always_inline int
612 atomic_dec_return_acquire(atomic_t *v)
614 int ret = atomic_dec_return_relaxed(v);
615 __atomic_acquire_fence();
618 #define atomic_dec_return_acquire atomic_dec_return_acquire
621 #ifndef atomic_dec_return_release
622 static __always_inline int
623 atomic_dec_return_release(atomic_t *v)
625 __atomic_release_fence();
626 return atomic_dec_return_relaxed(v);
628 #define atomic_dec_return_release atomic_dec_return_release
631 #ifndef atomic_dec_return
632 static __always_inline int
633 atomic_dec_return(atomic_t *v)
636 __atomic_pre_full_fence();
637 ret = atomic_dec_return_relaxed(v);
638 __atomic_post_full_fence();
641 #define atomic_dec_return atomic_dec_return
644 #endif /* atomic_dec_return_relaxed */
646 #define arch_atomic_fetch_dec atomic_fetch_dec
647 #define arch_atomic_fetch_dec_acquire atomic_fetch_dec_acquire
648 #define arch_atomic_fetch_dec_release atomic_fetch_dec_release
649 #define arch_atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
651 #ifndef atomic_fetch_dec_relaxed
652 #ifdef atomic_fetch_dec
653 #define atomic_fetch_dec_acquire atomic_fetch_dec
654 #define atomic_fetch_dec_release atomic_fetch_dec
655 #define atomic_fetch_dec_relaxed atomic_fetch_dec
656 #endif /* atomic_fetch_dec */
658 #ifndef atomic_fetch_dec
659 static __always_inline int
660 atomic_fetch_dec(atomic_t *v)
662 return atomic_fetch_sub(1, v);
664 #define atomic_fetch_dec atomic_fetch_dec
667 #ifndef atomic_fetch_dec_acquire
668 static __always_inline int
669 atomic_fetch_dec_acquire(atomic_t *v)
671 return atomic_fetch_sub_acquire(1, v);
673 #define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
676 #ifndef atomic_fetch_dec_release
677 static __always_inline int
678 atomic_fetch_dec_release(atomic_t *v)
680 return atomic_fetch_sub_release(1, v);
682 #define atomic_fetch_dec_release atomic_fetch_dec_release
685 #ifndef atomic_fetch_dec_relaxed
686 static __always_inline int
687 atomic_fetch_dec_relaxed(atomic_t *v)
689 return atomic_fetch_sub_relaxed(1, v);
691 #define atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
694 #else /* atomic_fetch_dec_relaxed */
696 #ifndef atomic_fetch_dec_acquire
697 static __always_inline int
698 atomic_fetch_dec_acquire(atomic_t *v)
700 int ret = atomic_fetch_dec_relaxed(v);
701 __atomic_acquire_fence();
704 #define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
707 #ifndef atomic_fetch_dec_release
708 static __always_inline int
709 atomic_fetch_dec_release(atomic_t *v)
711 __atomic_release_fence();
712 return atomic_fetch_dec_relaxed(v);
714 #define atomic_fetch_dec_release atomic_fetch_dec_release
717 #ifndef atomic_fetch_dec
718 static __always_inline int
719 atomic_fetch_dec(atomic_t *v)
722 __atomic_pre_full_fence();
723 ret = atomic_fetch_dec_relaxed(v);
724 __atomic_post_full_fence();
727 #define atomic_fetch_dec atomic_fetch_dec
730 #endif /* atomic_fetch_dec_relaxed */
732 #define arch_atomic_and atomic_and
734 #define arch_atomic_fetch_and atomic_fetch_and
735 #define arch_atomic_fetch_and_acquire atomic_fetch_and_acquire
736 #define arch_atomic_fetch_and_release atomic_fetch_and_release
737 #define arch_atomic_fetch_and_relaxed atomic_fetch_and_relaxed
739 #ifndef atomic_fetch_and_relaxed
740 #define atomic_fetch_and_acquire atomic_fetch_and
741 #define atomic_fetch_and_release atomic_fetch_and
742 #define atomic_fetch_and_relaxed atomic_fetch_and
743 #else /* atomic_fetch_and_relaxed */
745 #ifndef atomic_fetch_and_acquire
746 static __always_inline int
747 atomic_fetch_and_acquire(int i, atomic_t *v)
749 int ret = atomic_fetch_and_relaxed(i, v);
750 __atomic_acquire_fence();
753 #define atomic_fetch_and_acquire atomic_fetch_and_acquire
756 #ifndef atomic_fetch_and_release
757 static __always_inline int
758 atomic_fetch_and_release(int i, atomic_t *v)
760 __atomic_release_fence();
761 return atomic_fetch_and_relaxed(i, v);
763 #define atomic_fetch_and_release atomic_fetch_and_release
766 #ifndef atomic_fetch_and
767 static __always_inline int
768 atomic_fetch_and(int i, atomic_t *v)
771 __atomic_pre_full_fence();
772 ret = atomic_fetch_and_relaxed(i, v);
773 __atomic_post_full_fence();
776 #define atomic_fetch_and atomic_fetch_and
779 #endif /* atomic_fetch_and_relaxed */
781 #define arch_atomic_andnot atomic_andnot
783 #ifndef atomic_andnot
784 static __always_inline void
785 atomic_andnot(int i, atomic_t *v)
789 #define atomic_andnot atomic_andnot
792 #define arch_atomic_fetch_andnot atomic_fetch_andnot
793 #define arch_atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
794 #define arch_atomic_fetch_andnot_release atomic_fetch_andnot_release
795 #define arch_atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
797 #ifndef atomic_fetch_andnot_relaxed
798 #ifdef atomic_fetch_andnot
799 #define atomic_fetch_andnot_acquire atomic_fetch_andnot
800 #define atomic_fetch_andnot_release atomic_fetch_andnot
801 #define atomic_fetch_andnot_relaxed atomic_fetch_andnot
802 #endif /* atomic_fetch_andnot */
804 #ifndef atomic_fetch_andnot
805 static __always_inline int
806 atomic_fetch_andnot(int i, atomic_t *v)
808 return atomic_fetch_and(~i, v);
810 #define atomic_fetch_andnot atomic_fetch_andnot
813 #ifndef atomic_fetch_andnot_acquire
814 static __always_inline int
815 atomic_fetch_andnot_acquire(int i, atomic_t *v)
817 return atomic_fetch_and_acquire(~i, v);
819 #define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
822 #ifndef atomic_fetch_andnot_release
823 static __always_inline int
824 atomic_fetch_andnot_release(int i, atomic_t *v)
826 return atomic_fetch_and_release(~i, v);
828 #define atomic_fetch_andnot_release atomic_fetch_andnot_release
831 #ifndef atomic_fetch_andnot_relaxed
832 static __always_inline int
833 atomic_fetch_andnot_relaxed(int i, atomic_t *v)
835 return atomic_fetch_and_relaxed(~i, v);
837 #define atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
840 #else /* atomic_fetch_andnot_relaxed */
842 #ifndef atomic_fetch_andnot_acquire
843 static __always_inline int
844 atomic_fetch_andnot_acquire(int i, atomic_t *v)
846 int ret = atomic_fetch_andnot_relaxed(i, v);
847 __atomic_acquire_fence();
850 #define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
853 #ifndef atomic_fetch_andnot_release
854 static __always_inline int
855 atomic_fetch_andnot_release(int i, atomic_t *v)
857 __atomic_release_fence();
858 return atomic_fetch_andnot_relaxed(i, v);
860 #define atomic_fetch_andnot_release atomic_fetch_andnot_release
863 #ifndef atomic_fetch_andnot
864 static __always_inline int
865 atomic_fetch_andnot(int i, atomic_t *v)
868 __atomic_pre_full_fence();
869 ret = atomic_fetch_andnot_relaxed(i, v);
870 __atomic_post_full_fence();
873 #define atomic_fetch_andnot atomic_fetch_andnot
876 #endif /* atomic_fetch_andnot_relaxed */
878 #define arch_atomic_or atomic_or
880 #define arch_atomic_fetch_or atomic_fetch_or
881 #define arch_atomic_fetch_or_acquire atomic_fetch_or_acquire
882 #define arch_atomic_fetch_or_release atomic_fetch_or_release
883 #define arch_atomic_fetch_or_relaxed atomic_fetch_or_relaxed
885 #ifndef atomic_fetch_or_relaxed
886 #define atomic_fetch_or_acquire atomic_fetch_or
887 #define atomic_fetch_or_release atomic_fetch_or
888 #define atomic_fetch_or_relaxed atomic_fetch_or
889 #else /* atomic_fetch_or_relaxed */
891 #ifndef atomic_fetch_or_acquire
892 static __always_inline int
893 atomic_fetch_or_acquire(int i, atomic_t *v)
895 int ret = atomic_fetch_or_relaxed(i, v);
896 __atomic_acquire_fence();
899 #define atomic_fetch_or_acquire atomic_fetch_or_acquire
902 #ifndef atomic_fetch_or_release
903 static __always_inline int
904 atomic_fetch_or_release(int i, atomic_t *v)
906 __atomic_release_fence();
907 return atomic_fetch_or_relaxed(i, v);
909 #define atomic_fetch_or_release atomic_fetch_or_release
912 #ifndef atomic_fetch_or
913 static __always_inline int
914 atomic_fetch_or(int i, atomic_t *v)
917 __atomic_pre_full_fence();
918 ret = atomic_fetch_or_relaxed(i, v);
919 __atomic_post_full_fence();
922 #define atomic_fetch_or atomic_fetch_or
925 #endif /* atomic_fetch_or_relaxed */
927 #define arch_atomic_xor atomic_xor
929 #define arch_atomic_fetch_xor atomic_fetch_xor
930 #define arch_atomic_fetch_xor_acquire atomic_fetch_xor_acquire
931 #define arch_atomic_fetch_xor_release atomic_fetch_xor_release
932 #define arch_atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed
934 #ifndef atomic_fetch_xor_relaxed
935 #define atomic_fetch_xor_acquire atomic_fetch_xor
936 #define atomic_fetch_xor_release atomic_fetch_xor
937 #define atomic_fetch_xor_relaxed atomic_fetch_xor
938 #else /* atomic_fetch_xor_relaxed */
940 #ifndef atomic_fetch_xor_acquire
941 static __always_inline int
942 atomic_fetch_xor_acquire(int i, atomic_t *v)
944 int ret = atomic_fetch_xor_relaxed(i, v);
945 __atomic_acquire_fence();
948 #define atomic_fetch_xor_acquire atomic_fetch_xor_acquire
951 #ifndef atomic_fetch_xor_release
952 static __always_inline int
953 atomic_fetch_xor_release(int i, atomic_t *v)
955 __atomic_release_fence();
956 return atomic_fetch_xor_relaxed(i, v);
958 #define atomic_fetch_xor_release atomic_fetch_xor_release
961 #ifndef atomic_fetch_xor
962 static __always_inline int
963 atomic_fetch_xor(int i, atomic_t *v)
966 __atomic_pre_full_fence();
967 ret = atomic_fetch_xor_relaxed(i, v);
968 __atomic_post_full_fence();
971 #define atomic_fetch_xor atomic_fetch_xor
974 #endif /* atomic_fetch_xor_relaxed */
976 #define arch_atomic_xchg atomic_xchg
977 #define arch_atomic_xchg_acquire atomic_xchg_acquire
978 #define arch_atomic_xchg_release atomic_xchg_release
979 #define arch_atomic_xchg_relaxed atomic_xchg_relaxed
981 #ifndef atomic_xchg_relaxed
982 #define atomic_xchg_acquire atomic_xchg
983 #define atomic_xchg_release atomic_xchg
984 #define atomic_xchg_relaxed atomic_xchg
985 #else /* atomic_xchg_relaxed */
987 #ifndef atomic_xchg_acquire
988 static __always_inline int
989 atomic_xchg_acquire(atomic_t *v, int i)
991 int ret = atomic_xchg_relaxed(v, i);
992 __atomic_acquire_fence();
995 #define atomic_xchg_acquire atomic_xchg_acquire
998 #ifndef atomic_xchg_release
999 static __always_inline int
1000 atomic_xchg_release(atomic_t *v, int i)
1002 __atomic_release_fence();
1003 return atomic_xchg_relaxed(v, i);
1005 #define atomic_xchg_release atomic_xchg_release
1009 static __always_inline int
1010 atomic_xchg(atomic_t *v, int i)
1013 __atomic_pre_full_fence();
1014 ret = atomic_xchg_relaxed(v, i);
1015 __atomic_post_full_fence();
1018 #define atomic_xchg atomic_xchg
1021 #endif /* atomic_xchg_relaxed */
1023 #define arch_atomic_cmpxchg atomic_cmpxchg
1024 #define arch_atomic_cmpxchg_acquire atomic_cmpxchg_acquire
1025 #define arch_atomic_cmpxchg_release atomic_cmpxchg_release
1026 #define arch_atomic_cmpxchg_relaxed atomic_cmpxchg_relaxed
1028 #ifndef atomic_cmpxchg_relaxed
1029 #define atomic_cmpxchg_acquire atomic_cmpxchg
1030 #define atomic_cmpxchg_release atomic_cmpxchg
1031 #define atomic_cmpxchg_relaxed atomic_cmpxchg
1032 #else /* atomic_cmpxchg_relaxed */
1034 #ifndef atomic_cmpxchg_acquire
1035 static __always_inline int
1036 atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
1038 int ret = atomic_cmpxchg_relaxed(v, old, new);
1039 __atomic_acquire_fence();
1042 #define atomic_cmpxchg_acquire atomic_cmpxchg_acquire
1045 #ifndef atomic_cmpxchg_release
1046 static __always_inline int
1047 atomic_cmpxchg_release(atomic_t *v, int old, int new)
1049 __atomic_release_fence();
1050 return atomic_cmpxchg_relaxed(v, old, new);
1052 #define atomic_cmpxchg_release atomic_cmpxchg_release
1055 #ifndef atomic_cmpxchg
1056 static __always_inline int
1057 atomic_cmpxchg(atomic_t *v, int old, int new)
1060 __atomic_pre_full_fence();
1061 ret = atomic_cmpxchg_relaxed(v, old, new);
1062 __atomic_post_full_fence();
1065 #define atomic_cmpxchg atomic_cmpxchg
1068 #endif /* atomic_cmpxchg_relaxed */
1070 #define arch_atomic_try_cmpxchg atomic_try_cmpxchg
1071 #define arch_atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
1072 #define arch_atomic_try_cmpxchg_release atomic_try_cmpxchg_release
1073 #define arch_atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed
1075 #ifndef atomic_try_cmpxchg_relaxed
1076 #ifdef atomic_try_cmpxchg
1077 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg
1078 #define atomic_try_cmpxchg_release atomic_try_cmpxchg
1079 #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg
1080 #endif /* atomic_try_cmpxchg */
1082 #ifndef atomic_try_cmpxchg
1083 static __always_inline bool
1084 atomic_try_cmpxchg(atomic_t *v, int *old, int new)
1087 r = atomic_cmpxchg(v, o, new);
1088 if (unlikely(r != o))
1090 return likely(r == o);
1092 #define atomic_try_cmpxchg atomic_try_cmpxchg
1095 #ifndef atomic_try_cmpxchg_acquire
1096 static __always_inline bool
1097 atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
1100 r = atomic_cmpxchg_acquire(v, o, new);
1101 if (unlikely(r != o))
1103 return likely(r == o);
1105 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
1108 #ifndef atomic_try_cmpxchg_release
1109 static __always_inline bool
1110 atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
1113 r = atomic_cmpxchg_release(v, o, new);
1114 if (unlikely(r != o))
1116 return likely(r == o);
1118 #define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
1121 #ifndef atomic_try_cmpxchg_relaxed
1122 static __always_inline bool
1123 atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
1126 r = atomic_cmpxchg_relaxed(v, o, new);
1127 if (unlikely(r != o))
1129 return likely(r == o);
1131 #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed
1134 #else /* atomic_try_cmpxchg_relaxed */
1136 #ifndef atomic_try_cmpxchg_acquire
1137 static __always_inline bool
1138 atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
1140 bool ret = atomic_try_cmpxchg_relaxed(v, old, new);
1141 __atomic_acquire_fence();
1144 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
1147 #ifndef atomic_try_cmpxchg_release
1148 static __always_inline bool
1149 atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
1151 __atomic_release_fence();
1152 return atomic_try_cmpxchg_relaxed(v, old, new);
1154 #define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
1157 #ifndef atomic_try_cmpxchg
1158 static __always_inline bool
1159 atomic_try_cmpxchg(atomic_t *v, int *old, int new)
1162 __atomic_pre_full_fence();
1163 ret = atomic_try_cmpxchg_relaxed(v, old, new);
1164 __atomic_post_full_fence();
1167 #define atomic_try_cmpxchg atomic_try_cmpxchg
1170 #endif /* atomic_try_cmpxchg_relaxed */
1172 #define arch_atomic_sub_and_test atomic_sub_and_test
1174 #ifndef atomic_sub_and_test
1176 * atomic_sub_and_test - subtract value from variable and test result
1177 * @i: integer value to subtract
1178 * @v: pointer of type atomic_t
1180 * Atomically subtracts @i from @v and returns
1181 * true if the result is zero, or false for all
1184 static __always_inline bool
1185 atomic_sub_and_test(int i, atomic_t *v)
1187 return atomic_sub_return(i, v) == 0;
1189 #define atomic_sub_and_test atomic_sub_and_test
1192 #define arch_atomic_dec_and_test atomic_dec_and_test
1194 #ifndef atomic_dec_and_test
1196 * atomic_dec_and_test - decrement and test
1197 * @v: pointer of type atomic_t
1199 * Atomically decrements @v by 1 and
1200 * returns true if the result is 0, or false for all other
1203 static __always_inline bool
1204 atomic_dec_and_test(atomic_t *v)
1206 return atomic_dec_return(v) == 0;
1208 #define atomic_dec_and_test atomic_dec_and_test
1211 #define arch_atomic_inc_and_test atomic_inc_and_test
1213 #ifndef atomic_inc_and_test
1215 * atomic_inc_and_test - increment and test
1216 * @v: pointer of type atomic_t
1218 * Atomically increments @v by 1
1219 * and returns true if the result is zero, or false for all
1222 static __always_inline bool
1223 atomic_inc_and_test(atomic_t *v)
1225 return atomic_inc_return(v) == 0;
1227 #define atomic_inc_and_test atomic_inc_and_test
1230 #define arch_atomic_add_negative atomic_add_negative
1232 #ifndef atomic_add_negative
1234 * atomic_add_negative - add and test if negative
1235 * @i: integer value to add
1236 * @v: pointer of type atomic_t
1238 * Atomically adds @i to @v and returns true
1239 * if the result is negative, or false when
1240 * result is greater than or equal to zero.
1242 static __always_inline bool
1243 atomic_add_negative(int i, atomic_t *v)
1245 return atomic_add_return(i, v) < 0;
1247 #define atomic_add_negative atomic_add_negative
1250 #define arch_atomic_fetch_add_unless atomic_fetch_add_unless
1252 #ifndef atomic_fetch_add_unless
1254 * atomic_fetch_add_unless - add unless the number is already a given value
1255 * @v: pointer of type atomic_t
1256 * @a: the amount to add to v...
1257 * @u: ...unless v is equal to u.
1259 * Atomically adds @a to @v, so long as @v was not already @u.
1260 * Returns original value of @v
1262 static __always_inline int
1263 atomic_fetch_add_unless(atomic_t *v, int a, int u)
1265 int c = atomic_read(v);
1268 if (unlikely(c == u))
1270 } while (!atomic_try_cmpxchg(v, &c, c + a));
1274 #define atomic_fetch_add_unless atomic_fetch_add_unless
1277 #define arch_atomic_add_unless atomic_add_unless
1279 #ifndef atomic_add_unless
1281 * atomic_add_unless - add unless the number is already a given value
1282 * @v: pointer of type atomic_t
1283 * @a: the amount to add to v...
1284 * @u: ...unless v is equal to u.
1286 * Atomically adds @a to @v, if @v was not already @u.
1287 * Returns true if the addition was done.
1289 static __always_inline bool
1290 atomic_add_unless(atomic_t *v, int a, int u)
1292 return atomic_fetch_add_unless(v, a, u) != u;
1294 #define atomic_add_unless atomic_add_unless
1297 #define arch_atomic_inc_not_zero atomic_inc_not_zero
1299 #ifndef atomic_inc_not_zero
1301 * atomic_inc_not_zero - increment unless the number is zero
1302 * @v: pointer of type atomic_t
1304 * Atomically increments @v by 1, if @v is non-zero.
1305 * Returns true if the increment was done.
1307 static __always_inline bool
1308 atomic_inc_not_zero(atomic_t *v)
1310 return atomic_add_unless(v, 1, 0);
1312 #define atomic_inc_not_zero atomic_inc_not_zero
1315 #define arch_atomic_inc_unless_negative atomic_inc_unless_negative
1317 #ifndef atomic_inc_unless_negative
1318 static __always_inline bool
1319 atomic_inc_unless_negative(atomic_t *v)
1321 int c = atomic_read(v);
1324 if (unlikely(c < 0))
1326 } while (!atomic_try_cmpxchg(v, &c, c + 1));
1330 #define atomic_inc_unless_negative atomic_inc_unless_negative
1333 #define arch_atomic_dec_unless_positive atomic_dec_unless_positive
1335 #ifndef atomic_dec_unless_positive
1336 static __always_inline bool
1337 atomic_dec_unless_positive(atomic_t *v)
1339 int c = atomic_read(v);
1342 if (unlikely(c > 0))
1344 } while (!atomic_try_cmpxchg(v, &c, c - 1));
1348 #define atomic_dec_unless_positive atomic_dec_unless_positive
1351 #define arch_atomic_dec_if_positive atomic_dec_if_positive
1353 #ifndef atomic_dec_if_positive
1354 static __always_inline int
1355 atomic_dec_if_positive(atomic_t *v)
1357 int dec, c = atomic_read(v);
1361 if (unlikely(dec < 0))
1363 } while (!atomic_try_cmpxchg(v, &c, dec));
1367 #define atomic_dec_if_positive atomic_dec_if_positive
1370 #ifdef CONFIG_GENERIC_ATOMIC64
1371 #include <asm-generic/atomic64.h>
1374 #define arch_atomic64_read atomic64_read
1375 #define arch_atomic64_read_acquire atomic64_read_acquire
1377 #ifndef atomic64_read_acquire
1378 static __always_inline s64
1379 atomic64_read_acquire(const atomic64_t *v)
1381 return smp_load_acquire(&(v)->counter);
1383 #define atomic64_read_acquire atomic64_read_acquire
1386 #define arch_atomic64_set atomic64_set
1387 #define arch_atomic64_set_release atomic64_set_release
1389 #ifndef atomic64_set_release
1390 static __always_inline void
1391 atomic64_set_release(atomic64_t *v, s64 i)
1393 smp_store_release(&(v)->counter, i);
1395 #define atomic64_set_release atomic64_set_release
1398 #define arch_atomic64_add atomic64_add
1400 #define arch_atomic64_add_return atomic64_add_return
1401 #define arch_atomic64_add_return_acquire atomic64_add_return_acquire
1402 #define arch_atomic64_add_return_release atomic64_add_return_release
1403 #define arch_atomic64_add_return_relaxed atomic64_add_return_relaxed
1405 #ifndef atomic64_add_return_relaxed
1406 #define atomic64_add_return_acquire atomic64_add_return
1407 #define atomic64_add_return_release atomic64_add_return
1408 #define atomic64_add_return_relaxed atomic64_add_return
1409 #else /* atomic64_add_return_relaxed */
1411 #ifndef atomic64_add_return_acquire
1412 static __always_inline s64
1413 atomic64_add_return_acquire(s64 i, atomic64_t *v)
1415 s64 ret = atomic64_add_return_relaxed(i, v);
1416 __atomic_acquire_fence();
1419 #define atomic64_add_return_acquire atomic64_add_return_acquire
1422 #ifndef atomic64_add_return_release
1423 static __always_inline s64
1424 atomic64_add_return_release(s64 i, atomic64_t *v)
1426 __atomic_release_fence();
1427 return atomic64_add_return_relaxed(i, v);
1429 #define atomic64_add_return_release atomic64_add_return_release
1432 #ifndef atomic64_add_return
1433 static __always_inline s64
1434 atomic64_add_return(s64 i, atomic64_t *v)
1437 __atomic_pre_full_fence();
1438 ret = atomic64_add_return_relaxed(i, v);
1439 __atomic_post_full_fence();
1442 #define atomic64_add_return atomic64_add_return
1445 #endif /* atomic64_add_return_relaxed */
1447 #define arch_atomic64_fetch_add atomic64_fetch_add
1448 #define arch_atomic64_fetch_add_acquire atomic64_fetch_add_acquire
1449 #define arch_atomic64_fetch_add_release atomic64_fetch_add_release
1450 #define arch_atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed
1452 #ifndef atomic64_fetch_add_relaxed
1453 #define atomic64_fetch_add_acquire atomic64_fetch_add
1454 #define atomic64_fetch_add_release atomic64_fetch_add
1455 #define atomic64_fetch_add_relaxed atomic64_fetch_add
1456 #else /* atomic64_fetch_add_relaxed */
1458 #ifndef atomic64_fetch_add_acquire
1459 static __always_inline s64
1460 atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
1462 s64 ret = atomic64_fetch_add_relaxed(i, v);
1463 __atomic_acquire_fence();
1466 #define atomic64_fetch_add_acquire atomic64_fetch_add_acquire
1469 #ifndef atomic64_fetch_add_release
1470 static __always_inline s64
1471 atomic64_fetch_add_release(s64 i, atomic64_t *v)
1473 __atomic_release_fence();
1474 return atomic64_fetch_add_relaxed(i, v);
1476 #define atomic64_fetch_add_release atomic64_fetch_add_release
1479 #ifndef atomic64_fetch_add
1480 static __always_inline s64
1481 atomic64_fetch_add(s64 i, atomic64_t *v)
1484 __atomic_pre_full_fence();
1485 ret = atomic64_fetch_add_relaxed(i, v);
1486 __atomic_post_full_fence();
1489 #define atomic64_fetch_add atomic64_fetch_add
1492 #endif /* atomic64_fetch_add_relaxed */
1494 #define arch_atomic64_sub atomic64_sub
1496 #define arch_atomic64_sub_return atomic64_sub_return
1497 #define arch_atomic64_sub_return_acquire atomic64_sub_return_acquire
1498 #define arch_atomic64_sub_return_release atomic64_sub_return_release
1499 #define arch_atomic64_sub_return_relaxed atomic64_sub_return_relaxed
1501 #ifndef atomic64_sub_return_relaxed
1502 #define atomic64_sub_return_acquire atomic64_sub_return
1503 #define atomic64_sub_return_release atomic64_sub_return
1504 #define atomic64_sub_return_relaxed atomic64_sub_return
1505 #else /* atomic64_sub_return_relaxed */
1507 #ifndef atomic64_sub_return_acquire
1508 static __always_inline s64
1509 atomic64_sub_return_acquire(s64 i, atomic64_t *v)
1511 s64 ret = atomic64_sub_return_relaxed(i, v);
1512 __atomic_acquire_fence();
1515 #define atomic64_sub_return_acquire atomic64_sub_return_acquire
1518 #ifndef atomic64_sub_return_release
1519 static __always_inline s64
1520 atomic64_sub_return_release(s64 i, atomic64_t *v)
1522 __atomic_release_fence();
1523 return atomic64_sub_return_relaxed(i, v);
1525 #define atomic64_sub_return_release atomic64_sub_return_release
1528 #ifndef atomic64_sub_return
1529 static __always_inline s64
1530 atomic64_sub_return(s64 i, atomic64_t *v)
1533 __atomic_pre_full_fence();
1534 ret = atomic64_sub_return_relaxed(i, v);
1535 __atomic_post_full_fence();
1538 #define atomic64_sub_return atomic64_sub_return
1541 #endif /* atomic64_sub_return_relaxed */
1543 #define arch_atomic64_fetch_sub atomic64_fetch_sub
1544 #define arch_atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
1545 #define arch_atomic64_fetch_sub_release atomic64_fetch_sub_release
1546 #define arch_atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed
1548 #ifndef atomic64_fetch_sub_relaxed
1549 #define atomic64_fetch_sub_acquire atomic64_fetch_sub
1550 #define atomic64_fetch_sub_release atomic64_fetch_sub
1551 #define atomic64_fetch_sub_relaxed atomic64_fetch_sub
1552 #else /* atomic64_fetch_sub_relaxed */
1554 #ifndef atomic64_fetch_sub_acquire
1555 static __always_inline s64
1556 atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
1558 s64 ret = atomic64_fetch_sub_relaxed(i, v);
1559 __atomic_acquire_fence();
1562 #define atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
1565 #ifndef atomic64_fetch_sub_release
1566 static __always_inline s64
1567 atomic64_fetch_sub_release(s64 i, atomic64_t *v)
1569 __atomic_release_fence();
1570 return atomic64_fetch_sub_relaxed(i, v);
1572 #define atomic64_fetch_sub_release atomic64_fetch_sub_release
1575 #ifndef atomic64_fetch_sub
1576 static __always_inline s64
1577 atomic64_fetch_sub(s64 i, atomic64_t *v)
1580 __atomic_pre_full_fence();
1581 ret = atomic64_fetch_sub_relaxed(i, v);
1582 __atomic_post_full_fence();
1585 #define atomic64_fetch_sub atomic64_fetch_sub
1588 #endif /* atomic64_fetch_sub_relaxed */
1590 #define arch_atomic64_inc atomic64_inc
1592 #ifndef atomic64_inc
1593 static __always_inline void
1594 atomic64_inc(atomic64_t *v)
1598 #define atomic64_inc atomic64_inc
1601 #define arch_atomic64_inc_return atomic64_inc_return
1602 #define arch_atomic64_inc_return_acquire atomic64_inc_return_acquire
1603 #define arch_atomic64_inc_return_release atomic64_inc_return_release
1604 #define arch_atomic64_inc_return_relaxed atomic64_inc_return_relaxed
1606 #ifndef atomic64_inc_return_relaxed
1607 #ifdef atomic64_inc_return
1608 #define atomic64_inc_return_acquire atomic64_inc_return
1609 #define atomic64_inc_return_release atomic64_inc_return
1610 #define atomic64_inc_return_relaxed atomic64_inc_return
1611 #endif /* atomic64_inc_return */
1613 #ifndef atomic64_inc_return
1614 static __always_inline s64
1615 atomic64_inc_return(atomic64_t *v)
1617 return atomic64_add_return(1, v);
1619 #define atomic64_inc_return atomic64_inc_return
1622 #ifndef atomic64_inc_return_acquire
1623 static __always_inline s64
1624 atomic64_inc_return_acquire(atomic64_t *v)
1626 return atomic64_add_return_acquire(1, v);
1628 #define atomic64_inc_return_acquire atomic64_inc_return_acquire
1631 #ifndef atomic64_inc_return_release
1632 static __always_inline s64
1633 atomic64_inc_return_release(atomic64_t *v)
1635 return atomic64_add_return_release(1, v);
1637 #define atomic64_inc_return_release atomic64_inc_return_release
1640 #ifndef atomic64_inc_return_relaxed
1641 static __always_inline s64
1642 atomic64_inc_return_relaxed(atomic64_t *v)
1644 return atomic64_add_return_relaxed(1, v);
1646 #define atomic64_inc_return_relaxed atomic64_inc_return_relaxed
1649 #else /* atomic64_inc_return_relaxed */
1651 #ifndef atomic64_inc_return_acquire
1652 static __always_inline s64
1653 atomic64_inc_return_acquire(atomic64_t *v)
1655 s64 ret = atomic64_inc_return_relaxed(v);
1656 __atomic_acquire_fence();
1659 #define atomic64_inc_return_acquire atomic64_inc_return_acquire
1662 #ifndef atomic64_inc_return_release
1663 static __always_inline s64
1664 atomic64_inc_return_release(atomic64_t *v)
1666 __atomic_release_fence();
1667 return atomic64_inc_return_relaxed(v);
1669 #define atomic64_inc_return_release atomic64_inc_return_release
1672 #ifndef atomic64_inc_return
1673 static __always_inline s64
1674 atomic64_inc_return(atomic64_t *v)
1677 __atomic_pre_full_fence();
1678 ret = atomic64_inc_return_relaxed(v);
1679 __atomic_post_full_fence();
1682 #define atomic64_inc_return atomic64_inc_return
1685 #endif /* atomic64_inc_return_relaxed */
1687 #define arch_atomic64_fetch_inc atomic64_fetch_inc
1688 #define arch_atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1689 #define arch_atomic64_fetch_inc_release atomic64_fetch_inc_release
1690 #define arch_atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
1692 #ifndef atomic64_fetch_inc_relaxed
1693 #ifdef atomic64_fetch_inc
1694 #define atomic64_fetch_inc_acquire atomic64_fetch_inc
1695 #define atomic64_fetch_inc_release atomic64_fetch_inc
1696 #define atomic64_fetch_inc_relaxed atomic64_fetch_inc
1697 #endif /* atomic64_fetch_inc */
1699 #ifndef atomic64_fetch_inc
1700 static __always_inline s64
1701 atomic64_fetch_inc(atomic64_t *v)
1703 return atomic64_fetch_add(1, v);
1705 #define atomic64_fetch_inc atomic64_fetch_inc
1708 #ifndef atomic64_fetch_inc_acquire
1709 static __always_inline s64
1710 atomic64_fetch_inc_acquire(atomic64_t *v)
1712 return atomic64_fetch_add_acquire(1, v);
1714 #define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1717 #ifndef atomic64_fetch_inc_release
1718 static __always_inline s64
1719 atomic64_fetch_inc_release(atomic64_t *v)
1721 return atomic64_fetch_add_release(1, v);
1723 #define atomic64_fetch_inc_release atomic64_fetch_inc_release
1726 #ifndef atomic64_fetch_inc_relaxed
1727 static __always_inline s64
1728 atomic64_fetch_inc_relaxed(atomic64_t *v)
1730 return atomic64_fetch_add_relaxed(1, v);
1732 #define atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
1735 #else /* atomic64_fetch_inc_relaxed */
1737 #ifndef atomic64_fetch_inc_acquire
1738 static __always_inline s64
1739 atomic64_fetch_inc_acquire(atomic64_t *v)
1741 s64 ret = atomic64_fetch_inc_relaxed(v);
1742 __atomic_acquire_fence();
1745 #define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1748 #ifndef atomic64_fetch_inc_release
1749 static __always_inline s64
1750 atomic64_fetch_inc_release(atomic64_t *v)
1752 __atomic_release_fence();
1753 return atomic64_fetch_inc_relaxed(v);
1755 #define atomic64_fetch_inc_release atomic64_fetch_inc_release
1758 #ifndef atomic64_fetch_inc
1759 static __always_inline s64
1760 atomic64_fetch_inc(atomic64_t *v)
1763 __atomic_pre_full_fence();
1764 ret = atomic64_fetch_inc_relaxed(v);
1765 __atomic_post_full_fence();
1768 #define atomic64_fetch_inc atomic64_fetch_inc
1771 #endif /* atomic64_fetch_inc_relaxed */
1773 #define arch_atomic64_dec atomic64_dec
1775 #ifndef atomic64_dec
1776 static __always_inline void
1777 atomic64_dec(atomic64_t *v)
1781 #define atomic64_dec atomic64_dec
1784 #define arch_atomic64_dec_return atomic64_dec_return
1785 #define arch_atomic64_dec_return_acquire atomic64_dec_return_acquire
1786 #define arch_atomic64_dec_return_release atomic64_dec_return_release
1787 #define arch_atomic64_dec_return_relaxed atomic64_dec_return_relaxed
1789 #ifndef atomic64_dec_return_relaxed
1790 #ifdef atomic64_dec_return
1791 #define atomic64_dec_return_acquire atomic64_dec_return
1792 #define atomic64_dec_return_release atomic64_dec_return
1793 #define atomic64_dec_return_relaxed atomic64_dec_return
1794 #endif /* atomic64_dec_return */
1796 #ifndef atomic64_dec_return
1797 static __always_inline s64
1798 atomic64_dec_return(atomic64_t *v)
1800 return atomic64_sub_return(1, v);
1802 #define atomic64_dec_return atomic64_dec_return
1805 #ifndef atomic64_dec_return_acquire
1806 static __always_inline s64
1807 atomic64_dec_return_acquire(atomic64_t *v)
1809 return atomic64_sub_return_acquire(1, v);
1811 #define atomic64_dec_return_acquire atomic64_dec_return_acquire
1814 #ifndef atomic64_dec_return_release
1815 static __always_inline s64
1816 atomic64_dec_return_release(atomic64_t *v)
1818 return atomic64_sub_return_release(1, v);
1820 #define atomic64_dec_return_release atomic64_dec_return_release
1823 #ifndef atomic64_dec_return_relaxed
1824 static __always_inline s64
1825 atomic64_dec_return_relaxed(atomic64_t *v)
1827 return atomic64_sub_return_relaxed(1, v);
1829 #define atomic64_dec_return_relaxed atomic64_dec_return_relaxed
1832 #else /* atomic64_dec_return_relaxed */
1834 #ifndef atomic64_dec_return_acquire
1835 static __always_inline s64
1836 atomic64_dec_return_acquire(atomic64_t *v)
1838 s64 ret = atomic64_dec_return_relaxed(v);
1839 __atomic_acquire_fence();
1842 #define atomic64_dec_return_acquire atomic64_dec_return_acquire
1845 #ifndef atomic64_dec_return_release
1846 static __always_inline s64
1847 atomic64_dec_return_release(atomic64_t *v)
1849 __atomic_release_fence();
1850 return atomic64_dec_return_relaxed(v);
1852 #define atomic64_dec_return_release atomic64_dec_return_release
1855 #ifndef atomic64_dec_return
1856 static __always_inline s64
1857 atomic64_dec_return(atomic64_t *v)
1860 __atomic_pre_full_fence();
1861 ret = atomic64_dec_return_relaxed(v);
1862 __atomic_post_full_fence();
1865 #define atomic64_dec_return atomic64_dec_return
1868 #endif /* atomic64_dec_return_relaxed */
1870 #define arch_atomic64_fetch_dec atomic64_fetch_dec
1871 #define arch_atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1872 #define arch_atomic64_fetch_dec_release atomic64_fetch_dec_release
1873 #define arch_atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
1875 #ifndef atomic64_fetch_dec_relaxed
1876 #ifdef atomic64_fetch_dec
1877 #define atomic64_fetch_dec_acquire atomic64_fetch_dec
1878 #define atomic64_fetch_dec_release atomic64_fetch_dec
1879 #define atomic64_fetch_dec_relaxed atomic64_fetch_dec
1880 #endif /* atomic64_fetch_dec */
1882 #ifndef atomic64_fetch_dec
1883 static __always_inline s64
1884 atomic64_fetch_dec(atomic64_t *v)
1886 return atomic64_fetch_sub(1, v);
1888 #define atomic64_fetch_dec atomic64_fetch_dec
1891 #ifndef atomic64_fetch_dec_acquire
1892 static __always_inline s64
1893 atomic64_fetch_dec_acquire(atomic64_t *v)
1895 return atomic64_fetch_sub_acquire(1, v);
1897 #define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1900 #ifndef atomic64_fetch_dec_release
1901 static __always_inline s64
1902 atomic64_fetch_dec_release(atomic64_t *v)
1904 return atomic64_fetch_sub_release(1, v);
1906 #define atomic64_fetch_dec_release atomic64_fetch_dec_release
1909 #ifndef atomic64_fetch_dec_relaxed
1910 static __always_inline s64
1911 atomic64_fetch_dec_relaxed(atomic64_t *v)
1913 return atomic64_fetch_sub_relaxed(1, v);
1915 #define atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
1918 #else /* atomic64_fetch_dec_relaxed */
1920 #ifndef atomic64_fetch_dec_acquire
1921 static __always_inline s64
1922 atomic64_fetch_dec_acquire(atomic64_t *v)
1924 s64 ret = atomic64_fetch_dec_relaxed(v);
1925 __atomic_acquire_fence();
1928 #define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1931 #ifndef atomic64_fetch_dec_release
1932 static __always_inline s64
1933 atomic64_fetch_dec_release(atomic64_t *v)
1935 __atomic_release_fence();
1936 return atomic64_fetch_dec_relaxed(v);
1938 #define atomic64_fetch_dec_release atomic64_fetch_dec_release
1941 #ifndef atomic64_fetch_dec
1942 static __always_inline s64
1943 atomic64_fetch_dec(atomic64_t *v)
1946 __atomic_pre_full_fence();
1947 ret = atomic64_fetch_dec_relaxed(v);
1948 __atomic_post_full_fence();
1951 #define atomic64_fetch_dec atomic64_fetch_dec
1954 #endif /* atomic64_fetch_dec_relaxed */
1956 #define arch_atomic64_and atomic64_and
1958 #define arch_atomic64_fetch_and atomic64_fetch_and
1959 #define arch_atomic64_fetch_and_acquire atomic64_fetch_and_acquire
1960 #define arch_atomic64_fetch_and_release atomic64_fetch_and_release
1961 #define arch_atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed
1963 #ifndef atomic64_fetch_and_relaxed
1964 #define atomic64_fetch_and_acquire atomic64_fetch_and
1965 #define atomic64_fetch_and_release atomic64_fetch_and
1966 #define atomic64_fetch_and_relaxed atomic64_fetch_and
1967 #else /* atomic64_fetch_and_relaxed */
1969 #ifndef atomic64_fetch_and_acquire
1970 static __always_inline s64
1971 atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
1973 s64 ret = atomic64_fetch_and_relaxed(i, v);
1974 __atomic_acquire_fence();
1977 #define atomic64_fetch_and_acquire atomic64_fetch_and_acquire
1980 #ifndef atomic64_fetch_and_release
1981 static __always_inline s64
1982 atomic64_fetch_and_release(s64 i, atomic64_t *v)
1984 __atomic_release_fence();
1985 return atomic64_fetch_and_relaxed(i, v);
1987 #define atomic64_fetch_and_release atomic64_fetch_and_release
1990 #ifndef atomic64_fetch_and
1991 static __always_inline s64
1992 atomic64_fetch_and(s64 i, atomic64_t *v)
1995 __atomic_pre_full_fence();
1996 ret = atomic64_fetch_and_relaxed(i, v);
1997 __atomic_post_full_fence();
2000 #define atomic64_fetch_and atomic64_fetch_and
2003 #endif /* atomic64_fetch_and_relaxed */
2005 #define arch_atomic64_andnot atomic64_andnot
2007 #ifndef atomic64_andnot
2008 static __always_inline void
2009 atomic64_andnot(s64 i, atomic64_t *v)
2011 atomic64_and(~i, v);
2013 #define atomic64_andnot atomic64_andnot
2016 #define arch_atomic64_fetch_andnot atomic64_fetch_andnot
2017 #define arch_atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
2018 #define arch_atomic64_fetch_andnot_release atomic64_fetch_andnot_release
2019 #define arch_atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
2021 #ifndef atomic64_fetch_andnot_relaxed
2022 #ifdef atomic64_fetch_andnot
2023 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
2024 #define atomic64_fetch_andnot_release atomic64_fetch_andnot
2025 #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
2026 #endif /* atomic64_fetch_andnot */
2028 #ifndef atomic64_fetch_andnot
2029 static __always_inline s64
2030 atomic64_fetch_andnot(s64 i, atomic64_t *v)
2032 return atomic64_fetch_and(~i, v);
2034 #define atomic64_fetch_andnot atomic64_fetch_andnot
2037 #ifndef atomic64_fetch_andnot_acquire
2038 static __always_inline s64
2039 atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
2041 return atomic64_fetch_and_acquire(~i, v);
2043 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
2046 #ifndef atomic64_fetch_andnot_release
2047 static __always_inline s64
2048 atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
2050 return atomic64_fetch_and_release(~i, v);
2052 #define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
2055 #ifndef atomic64_fetch_andnot_relaxed
2056 static __always_inline s64
2057 atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
2059 return atomic64_fetch_and_relaxed(~i, v);
2061 #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
2064 #else /* atomic64_fetch_andnot_relaxed */
2066 #ifndef atomic64_fetch_andnot_acquire
2067 static __always_inline s64
2068 atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
2070 s64 ret = atomic64_fetch_andnot_relaxed(i, v);
2071 __atomic_acquire_fence();
2074 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
2077 #ifndef atomic64_fetch_andnot_release
2078 static __always_inline s64
2079 atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
2081 __atomic_release_fence();
2082 return atomic64_fetch_andnot_relaxed(i, v);
2084 #define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
2087 #ifndef atomic64_fetch_andnot
2088 static __always_inline s64
2089 atomic64_fetch_andnot(s64 i, atomic64_t *v)
2092 __atomic_pre_full_fence();
2093 ret = atomic64_fetch_andnot_relaxed(i, v);
2094 __atomic_post_full_fence();
2097 #define atomic64_fetch_andnot atomic64_fetch_andnot
2100 #endif /* atomic64_fetch_andnot_relaxed */
2102 #define arch_atomic64_or atomic64_or
2104 #define arch_atomic64_fetch_or atomic64_fetch_or
2105 #define arch_atomic64_fetch_or_acquire atomic64_fetch_or_acquire
2106 #define arch_atomic64_fetch_or_release atomic64_fetch_or_release
2107 #define arch_atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed
2109 #ifndef atomic64_fetch_or_relaxed
2110 #define atomic64_fetch_or_acquire atomic64_fetch_or
2111 #define atomic64_fetch_or_release atomic64_fetch_or
2112 #define atomic64_fetch_or_relaxed atomic64_fetch_or
2113 #else /* atomic64_fetch_or_relaxed */
2115 #ifndef atomic64_fetch_or_acquire
2116 static __always_inline s64
2117 atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
2119 s64 ret = atomic64_fetch_or_relaxed(i, v);
2120 __atomic_acquire_fence();
2123 #define atomic64_fetch_or_acquire atomic64_fetch_or_acquire
2126 #ifndef atomic64_fetch_or_release
2127 static __always_inline s64
2128 atomic64_fetch_or_release(s64 i, atomic64_t *v)
2130 __atomic_release_fence();
2131 return atomic64_fetch_or_relaxed(i, v);
2133 #define atomic64_fetch_or_release atomic64_fetch_or_release
2136 #ifndef atomic64_fetch_or
2137 static __always_inline s64
2138 atomic64_fetch_or(s64 i, atomic64_t *v)
2141 __atomic_pre_full_fence();
2142 ret = atomic64_fetch_or_relaxed(i, v);
2143 __atomic_post_full_fence();
2146 #define atomic64_fetch_or atomic64_fetch_or
2149 #endif /* atomic64_fetch_or_relaxed */
2151 #define arch_atomic64_xor atomic64_xor
2153 #define arch_atomic64_fetch_xor atomic64_fetch_xor
2154 #define arch_atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
2155 #define arch_atomic64_fetch_xor_release atomic64_fetch_xor_release
2156 #define arch_atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed
2158 #ifndef atomic64_fetch_xor_relaxed
2159 #define atomic64_fetch_xor_acquire atomic64_fetch_xor
2160 #define atomic64_fetch_xor_release atomic64_fetch_xor
2161 #define atomic64_fetch_xor_relaxed atomic64_fetch_xor
2162 #else /* atomic64_fetch_xor_relaxed */
2164 #ifndef atomic64_fetch_xor_acquire
2165 static __always_inline s64
2166 atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
2168 s64 ret = atomic64_fetch_xor_relaxed(i, v);
2169 __atomic_acquire_fence();
2172 #define atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
2175 #ifndef atomic64_fetch_xor_release
2176 static __always_inline s64
2177 atomic64_fetch_xor_release(s64 i, atomic64_t *v)
2179 __atomic_release_fence();
2180 return atomic64_fetch_xor_relaxed(i, v);
2182 #define atomic64_fetch_xor_release atomic64_fetch_xor_release
2185 #ifndef atomic64_fetch_xor
2186 static __always_inline s64
2187 atomic64_fetch_xor(s64 i, atomic64_t *v)
2190 __atomic_pre_full_fence();
2191 ret = atomic64_fetch_xor_relaxed(i, v);
2192 __atomic_post_full_fence();
2195 #define atomic64_fetch_xor atomic64_fetch_xor
2198 #endif /* atomic64_fetch_xor_relaxed */
2200 #define arch_atomic64_xchg atomic64_xchg
2201 #define arch_atomic64_xchg_acquire atomic64_xchg_acquire
2202 #define arch_atomic64_xchg_release atomic64_xchg_release
2203 #define arch_atomic64_xchg_relaxed atomic64_xchg_relaxed
2205 #ifndef atomic64_xchg_relaxed
2206 #define atomic64_xchg_acquire atomic64_xchg
2207 #define atomic64_xchg_release atomic64_xchg
2208 #define atomic64_xchg_relaxed atomic64_xchg
2209 #else /* atomic64_xchg_relaxed */
2211 #ifndef atomic64_xchg_acquire
2212 static __always_inline s64
2213 atomic64_xchg_acquire(atomic64_t *v, s64 i)
2215 s64 ret = atomic64_xchg_relaxed(v, i);
2216 __atomic_acquire_fence();
2219 #define atomic64_xchg_acquire atomic64_xchg_acquire
2222 #ifndef atomic64_xchg_release
2223 static __always_inline s64
2224 atomic64_xchg_release(atomic64_t *v, s64 i)
2226 __atomic_release_fence();
2227 return atomic64_xchg_relaxed(v, i);
2229 #define atomic64_xchg_release atomic64_xchg_release
2232 #ifndef atomic64_xchg
2233 static __always_inline s64
2234 atomic64_xchg(atomic64_t *v, s64 i)
2237 __atomic_pre_full_fence();
2238 ret = atomic64_xchg_relaxed(v, i);
2239 __atomic_post_full_fence();
2242 #define atomic64_xchg atomic64_xchg
2245 #endif /* atomic64_xchg_relaxed */
2247 #define arch_atomic64_cmpxchg atomic64_cmpxchg
2248 #define arch_atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire
2249 #define arch_atomic64_cmpxchg_release atomic64_cmpxchg_release
2250 #define arch_atomic64_cmpxchg_relaxed atomic64_cmpxchg_relaxed
2252 #ifndef atomic64_cmpxchg_relaxed
2253 #define atomic64_cmpxchg_acquire atomic64_cmpxchg
2254 #define atomic64_cmpxchg_release atomic64_cmpxchg
2255 #define atomic64_cmpxchg_relaxed atomic64_cmpxchg
2256 #else /* atomic64_cmpxchg_relaxed */
2258 #ifndef atomic64_cmpxchg_acquire
2259 static __always_inline s64
2260 atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
2262 s64 ret = atomic64_cmpxchg_relaxed(v, old, new);
2263 __atomic_acquire_fence();
2266 #define atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire
2269 #ifndef atomic64_cmpxchg_release
2270 static __always_inline s64
2271 atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
2273 __atomic_release_fence();
2274 return atomic64_cmpxchg_relaxed(v, old, new);
2276 #define atomic64_cmpxchg_release atomic64_cmpxchg_release
2279 #ifndef atomic64_cmpxchg
2280 static __always_inline s64
2281 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
2284 __atomic_pre_full_fence();
2285 ret = atomic64_cmpxchg_relaxed(v, old, new);
2286 __atomic_post_full_fence();
2289 #define atomic64_cmpxchg atomic64_cmpxchg
2292 #endif /* atomic64_cmpxchg_relaxed */
2294 #define arch_atomic64_try_cmpxchg atomic64_try_cmpxchg
2295 #define arch_atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
2296 #define arch_atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
2297 #define arch_atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed
2299 #ifndef atomic64_try_cmpxchg_relaxed
2300 #ifdef atomic64_try_cmpxchg
2301 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg
2302 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
2303 #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg
2304 #endif /* atomic64_try_cmpxchg */
2306 #ifndef atomic64_try_cmpxchg
2307 static __always_inline bool
2308 atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2311 r = atomic64_cmpxchg(v, o, new);
2312 if (unlikely(r != o))
2314 return likely(r == o);
2316 #define atomic64_try_cmpxchg atomic64_try_cmpxchg
2319 #ifndef atomic64_try_cmpxchg_acquire
2320 static __always_inline bool
2321 atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2324 r = atomic64_cmpxchg_acquire(v, o, new);
2325 if (unlikely(r != o))
2327 return likely(r == o);
2329 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
2332 #ifndef atomic64_try_cmpxchg_release
2333 static __always_inline bool
2334 atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2337 r = atomic64_cmpxchg_release(v, o, new);
2338 if (unlikely(r != o))
2340 return likely(r == o);
2342 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
2345 #ifndef atomic64_try_cmpxchg_relaxed
2346 static __always_inline bool
2347 atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
2350 r = atomic64_cmpxchg_relaxed(v, o, new);
2351 if (unlikely(r != o))
2353 return likely(r == o);
2355 #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed
2358 #else /* atomic64_try_cmpxchg_relaxed */
2360 #ifndef atomic64_try_cmpxchg_acquire
2361 static __always_inline bool
2362 atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2364 bool ret = atomic64_try_cmpxchg_relaxed(v, old, new);
2365 __atomic_acquire_fence();
2368 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
2371 #ifndef atomic64_try_cmpxchg_release
2372 static __always_inline bool
2373 atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2375 __atomic_release_fence();
2376 return atomic64_try_cmpxchg_relaxed(v, old, new);
2378 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
2381 #ifndef atomic64_try_cmpxchg
2382 static __always_inline bool
2383 atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2386 __atomic_pre_full_fence();
2387 ret = atomic64_try_cmpxchg_relaxed(v, old, new);
2388 __atomic_post_full_fence();
2391 #define atomic64_try_cmpxchg atomic64_try_cmpxchg
2394 #endif /* atomic64_try_cmpxchg_relaxed */
2396 #define arch_atomic64_sub_and_test atomic64_sub_and_test
2398 #ifndef atomic64_sub_and_test
2400 * atomic64_sub_and_test - subtract value from variable and test result
2401 * @i: integer value to subtract
2402 * @v: pointer of type atomic64_t
2404 * Atomically subtracts @i from @v and returns
2405 * true if the result is zero, or false for all
2408 static __always_inline bool
2409 atomic64_sub_and_test(s64 i, atomic64_t *v)
2411 return atomic64_sub_return(i, v) == 0;
2413 #define atomic64_sub_and_test atomic64_sub_and_test
2416 #define arch_atomic64_dec_and_test atomic64_dec_and_test
2418 #ifndef atomic64_dec_and_test
2420 * atomic64_dec_and_test - decrement and test
2421 * @v: pointer of type atomic64_t
2423 * Atomically decrements @v by 1 and
2424 * returns true if the result is 0, or false for all other
2427 static __always_inline bool
2428 atomic64_dec_and_test(atomic64_t *v)
2430 return atomic64_dec_return(v) == 0;
2432 #define atomic64_dec_and_test atomic64_dec_and_test
2435 #define arch_atomic64_inc_and_test atomic64_inc_and_test
2437 #ifndef atomic64_inc_and_test
2439 * atomic64_inc_and_test - increment and test
2440 * @v: pointer of type atomic64_t
2442 * Atomically increments @v by 1
2443 * and returns true if the result is zero, or false for all
2446 static __always_inline bool
2447 atomic64_inc_and_test(atomic64_t *v)
2449 return atomic64_inc_return(v) == 0;
2451 #define atomic64_inc_and_test atomic64_inc_and_test
2454 #define arch_atomic64_add_negative atomic64_add_negative
2456 #ifndef atomic64_add_negative
2458 * atomic64_add_negative - add and test if negative
2459 * @i: integer value to add
2460 * @v: pointer of type atomic64_t
2462 * Atomically adds @i to @v and returns true
2463 * if the result is negative, or false when
2464 * result is greater than or equal to zero.
2466 static __always_inline bool
2467 atomic64_add_negative(s64 i, atomic64_t *v)
2469 return atomic64_add_return(i, v) < 0;
2471 #define atomic64_add_negative atomic64_add_negative
2474 #define arch_atomic64_fetch_add_unless atomic64_fetch_add_unless
2476 #ifndef atomic64_fetch_add_unless
2478 * atomic64_fetch_add_unless - add unless the number is already a given value
2479 * @v: pointer of type atomic64_t
2480 * @a: the amount to add to v...
2481 * @u: ...unless v is equal to u.
2483 * Atomically adds @a to @v, so long as @v was not already @u.
2484 * Returns original value of @v
2486 static __always_inline s64
2487 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
2489 s64 c = atomic64_read(v);
2492 if (unlikely(c == u))
2494 } while (!atomic64_try_cmpxchg(v, &c, c + a));
2498 #define atomic64_fetch_add_unless atomic64_fetch_add_unless
2501 #define arch_atomic64_add_unless atomic64_add_unless
2503 #ifndef atomic64_add_unless
2505 * atomic64_add_unless - add unless the number is already a given value
2506 * @v: pointer of type atomic64_t
2507 * @a: the amount to add to v...
2508 * @u: ...unless v is equal to u.
2510 * Atomically adds @a to @v, if @v was not already @u.
2511 * Returns true if the addition was done.
2513 static __always_inline bool
2514 atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
2516 return atomic64_fetch_add_unless(v, a, u) != u;
2518 #define atomic64_add_unless atomic64_add_unless
2521 #define arch_atomic64_inc_not_zero atomic64_inc_not_zero
2523 #ifndef atomic64_inc_not_zero
2525 * atomic64_inc_not_zero - increment unless the number is zero
2526 * @v: pointer of type atomic64_t
2528 * Atomically increments @v by 1, if @v is non-zero.
2529 * Returns true if the increment was done.
2531 static __always_inline bool
2532 atomic64_inc_not_zero(atomic64_t *v)
2534 return atomic64_add_unless(v, 1, 0);
2536 #define atomic64_inc_not_zero atomic64_inc_not_zero
2539 #define arch_atomic64_inc_unless_negative atomic64_inc_unless_negative
2541 #ifndef atomic64_inc_unless_negative
2542 static __always_inline bool
2543 atomic64_inc_unless_negative(atomic64_t *v)
2545 s64 c = atomic64_read(v);
2548 if (unlikely(c < 0))
2550 } while (!atomic64_try_cmpxchg(v, &c, c + 1));
2554 #define atomic64_inc_unless_negative atomic64_inc_unless_negative
2557 #define arch_atomic64_dec_unless_positive atomic64_dec_unless_positive
2559 #ifndef atomic64_dec_unless_positive
2560 static __always_inline bool
2561 atomic64_dec_unless_positive(atomic64_t *v)
2563 s64 c = atomic64_read(v);
2566 if (unlikely(c > 0))
2568 } while (!atomic64_try_cmpxchg(v, &c, c - 1));
2572 #define atomic64_dec_unless_positive atomic64_dec_unless_positive
2575 #define arch_atomic64_dec_if_positive atomic64_dec_if_positive
2577 #ifndef atomic64_dec_if_positive
2578 static __always_inline s64
2579 atomic64_dec_if_positive(atomic64_t *v)
2581 s64 dec, c = atomic64_read(v);
2585 if (unlikely(dec < 0))
2587 } while (!atomic64_try_cmpxchg(v, &c, dec));
2591 #define atomic64_dec_if_positive atomic64_dec_if_positive
2594 #endif /* _LINUX_ATOMIC_FALLBACK_H */
2595 // d78e6c293c661c15188f0ec05bce45188c8d5892