1 // SPDX-License-Identifier: GPL-2.0
3 // Generated by scripts/atomic/gen-atomic-fallback.sh
4 // DO NOT MODIFY THIS FILE DIRECTLY
6 #ifndef _LINUX_ATOMIC_FALLBACK_H
7 #define _LINUX_ATOMIC_FALLBACK_H
9 #include <linux/compiler.h>
12 #define xchg_relaxed xchg
13 #define xchg_acquire xchg
14 #define xchg_release xchg
15 #else /* xchg_relaxed */
18 #define xchg_acquire(...) \
19 __atomic_op_acquire(xchg, __VA_ARGS__)
23 #define xchg_release(...) \
24 __atomic_op_release(xchg, __VA_ARGS__)
29 __atomic_op_fence(xchg, __VA_ARGS__)
32 #endif /* xchg_relaxed */
34 #ifndef cmpxchg_relaxed
35 #define cmpxchg_relaxed cmpxchg
36 #define cmpxchg_acquire cmpxchg
37 #define cmpxchg_release cmpxchg
38 #else /* cmpxchg_relaxed */
40 #ifndef cmpxchg_acquire
41 #define cmpxchg_acquire(...) \
42 __atomic_op_acquire(cmpxchg, __VA_ARGS__)
45 #ifndef cmpxchg_release
46 #define cmpxchg_release(...) \
47 __atomic_op_release(cmpxchg, __VA_ARGS__)
51 #define cmpxchg(...) \
52 __atomic_op_fence(cmpxchg, __VA_ARGS__)
55 #endif /* cmpxchg_relaxed */
57 #ifndef cmpxchg64_relaxed
58 #define cmpxchg64_relaxed cmpxchg64
59 #define cmpxchg64_acquire cmpxchg64
60 #define cmpxchg64_release cmpxchg64
61 #else /* cmpxchg64_relaxed */
63 #ifndef cmpxchg64_acquire
64 #define cmpxchg64_acquire(...) \
65 __atomic_op_acquire(cmpxchg64, __VA_ARGS__)
68 #ifndef cmpxchg64_release
69 #define cmpxchg64_release(...) \
70 __atomic_op_release(cmpxchg64, __VA_ARGS__)
74 #define cmpxchg64(...) \
75 __atomic_op_fence(cmpxchg64, __VA_ARGS__)
78 #endif /* cmpxchg64_relaxed */
80 #ifndef atomic_read_acquire
81 static __always_inline int
82 atomic_read_acquire(const atomic_t *v)
84 return smp_load_acquire(&(v)->counter);
86 #define atomic_read_acquire atomic_read_acquire
89 #ifndef atomic_set_release
90 static __always_inline void
91 atomic_set_release(atomic_t *v, int i)
93 smp_store_release(&(v)->counter, i);
95 #define atomic_set_release atomic_set_release
98 #ifndef atomic_add_return_relaxed
99 #define atomic_add_return_acquire atomic_add_return
100 #define atomic_add_return_release atomic_add_return
101 #define atomic_add_return_relaxed atomic_add_return
102 #else /* atomic_add_return_relaxed */
104 #ifndef atomic_add_return_acquire
105 static __always_inline int
106 atomic_add_return_acquire(int i, atomic_t *v)
108 int ret = atomic_add_return_relaxed(i, v);
109 __atomic_acquire_fence();
112 #define atomic_add_return_acquire atomic_add_return_acquire
115 #ifndef atomic_add_return_release
116 static __always_inline int
117 atomic_add_return_release(int i, atomic_t *v)
119 __atomic_release_fence();
120 return atomic_add_return_relaxed(i, v);
122 #define atomic_add_return_release atomic_add_return_release
125 #ifndef atomic_add_return
126 static __always_inline int
127 atomic_add_return(int i, atomic_t *v)
130 __atomic_pre_full_fence();
131 ret = atomic_add_return_relaxed(i, v);
132 __atomic_post_full_fence();
135 #define atomic_add_return atomic_add_return
138 #endif /* atomic_add_return_relaxed */
140 #ifndef atomic_fetch_add_relaxed
141 #define atomic_fetch_add_acquire atomic_fetch_add
142 #define atomic_fetch_add_release atomic_fetch_add
143 #define atomic_fetch_add_relaxed atomic_fetch_add
144 #else /* atomic_fetch_add_relaxed */
146 #ifndef atomic_fetch_add_acquire
147 static __always_inline int
148 atomic_fetch_add_acquire(int i, atomic_t *v)
150 int ret = atomic_fetch_add_relaxed(i, v);
151 __atomic_acquire_fence();
154 #define atomic_fetch_add_acquire atomic_fetch_add_acquire
157 #ifndef atomic_fetch_add_release
158 static __always_inline int
159 atomic_fetch_add_release(int i, atomic_t *v)
161 __atomic_release_fence();
162 return atomic_fetch_add_relaxed(i, v);
164 #define atomic_fetch_add_release atomic_fetch_add_release
167 #ifndef atomic_fetch_add
168 static __always_inline int
169 atomic_fetch_add(int i, atomic_t *v)
172 __atomic_pre_full_fence();
173 ret = atomic_fetch_add_relaxed(i, v);
174 __atomic_post_full_fence();
177 #define atomic_fetch_add atomic_fetch_add
180 #endif /* atomic_fetch_add_relaxed */
182 #ifndef atomic_sub_return_relaxed
183 #define atomic_sub_return_acquire atomic_sub_return
184 #define atomic_sub_return_release atomic_sub_return
185 #define atomic_sub_return_relaxed atomic_sub_return
186 #else /* atomic_sub_return_relaxed */
188 #ifndef atomic_sub_return_acquire
189 static __always_inline int
190 atomic_sub_return_acquire(int i, atomic_t *v)
192 int ret = atomic_sub_return_relaxed(i, v);
193 __atomic_acquire_fence();
196 #define atomic_sub_return_acquire atomic_sub_return_acquire
199 #ifndef atomic_sub_return_release
200 static __always_inline int
201 atomic_sub_return_release(int i, atomic_t *v)
203 __atomic_release_fence();
204 return atomic_sub_return_relaxed(i, v);
206 #define atomic_sub_return_release atomic_sub_return_release
209 #ifndef atomic_sub_return
210 static __always_inline int
211 atomic_sub_return(int i, atomic_t *v)
214 __atomic_pre_full_fence();
215 ret = atomic_sub_return_relaxed(i, v);
216 __atomic_post_full_fence();
219 #define atomic_sub_return atomic_sub_return
222 #endif /* atomic_sub_return_relaxed */
224 #ifndef atomic_fetch_sub_relaxed
225 #define atomic_fetch_sub_acquire atomic_fetch_sub
226 #define atomic_fetch_sub_release atomic_fetch_sub
227 #define atomic_fetch_sub_relaxed atomic_fetch_sub
228 #else /* atomic_fetch_sub_relaxed */
230 #ifndef atomic_fetch_sub_acquire
231 static __always_inline int
232 atomic_fetch_sub_acquire(int i, atomic_t *v)
234 int ret = atomic_fetch_sub_relaxed(i, v);
235 __atomic_acquire_fence();
238 #define atomic_fetch_sub_acquire atomic_fetch_sub_acquire
241 #ifndef atomic_fetch_sub_release
242 static __always_inline int
243 atomic_fetch_sub_release(int i, atomic_t *v)
245 __atomic_release_fence();
246 return atomic_fetch_sub_relaxed(i, v);
248 #define atomic_fetch_sub_release atomic_fetch_sub_release
251 #ifndef atomic_fetch_sub
252 static __always_inline int
253 atomic_fetch_sub(int i, atomic_t *v)
256 __atomic_pre_full_fence();
257 ret = atomic_fetch_sub_relaxed(i, v);
258 __atomic_post_full_fence();
261 #define atomic_fetch_sub atomic_fetch_sub
264 #endif /* atomic_fetch_sub_relaxed */
267 static __always_inline void
268 atomic_inc(atomic_t *v)
272 #define atomic_inc atomic_inc
275 #ifndef atomic_inc_return_relaxed
276 #ifdef atomic_inc_return
277 #define atomic_inc_return_acquire atomic_inc_return
278 #define atomic_inc_return_release atomic_inc_return
279 #define atomic_inc_return_relaxed atomic_inc_return
280 #endif /* atomic_inc_return */
282 #ifndef atomic_inc_return
283 static __always_inline int
284 atomic_inc_return(atomic_t *v)
286 return atomic_add_return(1, v);
288 #define atomic_inc_return atomic_inc_return
291 #ifndef atomic_inc_return_acquire
292 static __always_inline int
293 atomic_inc_return_acquire(atomic_t *v)
295 return atomic_add_return_acquire(1, v);
297 #define atomic_inc_return_acquire atomic_inc_return_acquire
300 #ifndef atomic_inc_return_release
301 static __always_inline int
302 atomic_inc_return_release(atomic_t *v)
304 return atomic_add_return_release(1, v);
306 #define atomic_inc_return_release atomic_inc_return_release
309 #ifndef atomic_inc_return_relaxed
310 static __always_inline int
311 atomic_inc_return_relaxed(atomic_t *v)
313 return atomic_add_return_relaxed(1, v);
315 #define atomic_inc_return_relaxed atomic_inc_return_relaxed
318 #else /* atomic_inc_return_relaxed */
320 #ifndef atomic_inc_return_acquire
321 static __always_inline int
322 atomic_inc_return_acquire(atomic_t *v)
324 int ret = atomic_inc_return_relaxed(v);
325 __atomic_acquire_fence();
328 #define atomic_inc_return_acquire atomic_inc_return_acquire
331 #ifndef atomic_inc_return_release
332 static __always_inline int
333 atomic_inc_return_release(atomic_t *v)
335 __atomic_release_fence();
336 return atomic_inc_return_relaxed(v);
338 #define atomic_inc_return_release atomic_inc_return_release
341 #ifndef atomic_inc_return
342 static __always_inline int
343 atomic_inc_return(atomic_t *v)
346 __atomic_pre_full_fence();
347 ret = atomic_inc_return_relaxed(v);
348 __atomic_post_full_fence();
351 #define atomic_inc_return atomic_inc_return
354 #endif /* atomic_inc_return_relaxed */
356 #ifndef atomic_fetch_inc_relaxed
357 #ifdef atomic_fetch_inc
358 #define atomic_fetch_inc_acquire atomic_fetch_inc
359 #define atomic_fetch_inc_release atomic_fetch_inc
360 #define atomic_fetch_inc_relaxed atomic_fetch_inc
361 #endif /* atomic_fetch_inc */
363 #ifndef atomic_fetch_inc
364 static __always_inline int
365 atomic_fetch_inc(atomic_t *v)
367 return atomic_fetch_add(1, v);
369 #define atomic_fetch_inc atomic_fetch_inc
372 #ifndef atomic_fetch_inc_acquire
373 static __always_inline int
374 atomic_fetch_inc_acquire(atomic_t *v)
376 return atomic_fetch_add_acquire(1, v);
378 #define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
381 #ifndef atomic_fetch_inc_release
382 static __always_inline int
383 atomic_fetch_inc_release(atomic_t *v)
385 return atomic_fetch_add_release(1, v);
387 #define atomic_fetch_inc_release atomic_fetch_inc_release
390 #ifndef atomic_fetch_inc_relaxed
391 static __always_inline int
392 atomic_fetch_inc_relaxed(atomic_t *v)
394 return atomic_fetch_add_relaxed(1, v);
396 #define atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
399 #else /* atomic_fetch_inc_relaxed */
401 #ifndef atomic_fetch_inc_acquire
402 static __always_inline int
403 atomic_fetch_inc_acquire(atomic_t *v)
405 int ret = atomic_fetch_inc_relaxed(v);
406 __atomic_acquire_fence();
409 #define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
412 #ifndef atomic_fetch_inc_release
413 static __always_inline int
414 atomic_fetch_inc_release(atomic_t *v)
416 __atomic_release_fence();
417 return atomic_fetch_inc_relaxed(v);
419 #define atomic_fetch_inc_release atomic_fetch_inc_release
422 #ifndef atomic_fetch_inc
423 static __always_inline int
424 atomic_fetch_inc(atomic_t *v)
427 __atomic_pre_full_fence();
428 ret = atomic_fetch_inc_relaxed(v);
429 __atomic_post_full_fence();
432 #define atomic_fetch_inc atomic_fetch_inc
435 #endif /* atomic_fetch_inc_relaxed */
438 static __always_inline void
439 atomic_dec(atomic_t *v)
443 #define atomic_dec atomic_dec
446 #ifndef atomic_dec_return_relaxed
447 #ifdef atomic_dec_return
448 #define atomic_dec_return_acquire atomic_dec_return
449 #define atomic_dec_return_release atomic_dec_return
450 #define atomic_dec_return_relaxed atomic_dec_return
451 #endif /* atomic_dec_return */
453 #ifndef atomic_dec_return
454 static __always_inline int
455 atomic_dec_return(atomic_t *v)
457 return atomic_sub_return(1, v);
459 #define atomic_dec_return atomic_dec_return
462 #ifndef atomic_dec_return_acquire
463 static __always_inline int
464 atomic_dec_return_acquire(atomic_t *v)
466 return atomic_sub_return_acquire(1, v);
468 #define atomic_dec_return_acquire atomic_dec_return_acquire
471 #ifndef atomic_dec_return_release
472 static __always_inline int
473 atomic_dec_return_release(atomic_t *v)
475 return atomic_sub_return_release(1, v);
477 #define atomic_dec_return_release atomic_dec_return_release
480 #ifndef atomic_dec_return_relaxed
481 static __always_inline int
482 atomic_dec_return_relaxed(atomic_t *v)
484 return atomic_sub_return_relaxed(1, v);
486 #define atomic_dec_return_relaxed atomic_dec_return_relaxed
489 #else /* atomic_dec_return_relaxed */
491 #ifndef atomic_dec_return_acquire
492 static __always_inline int
493 atomic_dec_return_acquire(atomic_t *v)
495 int ret = atomic_dec_return_relaxed(v);
496 __atomic_acquire_fence();
499 #define atomic_dec_return_acquire atomic_dec_return_acquire
502 #ifndef atomic_dec_return_release
503 static __always_inline int
504 atomic_dec_return_release(atomic_t *v)
506 __atomic_release_fence();
507 return atomic_dec_return_relaxed(v);
509 #define atomic_dec_return_release atomic_dec_return_release
512 #ifndef atomic_dec_return
513 static __always_inline int
514 atomic_dec_return(atomic_t *v)
517 __atomic_pre_full_fence();
518 ret = atomic_dec_return_relaxed(v);
519 __atomic_post_full_fence();
522 #define atomic_dec_return atomic_dec_return
525 #endif /* atomic_dec_return_relaxed */
527 #ifndef atomic_fetch_dec_relaxed
528 #ifdef atomic_fetch_dec
529 #define atomic_fetch_dec_acquire atomic_fetch_dec
530 #define atomic_fetch_dec_release atomic_fetch_dec
531 #define atomic_fetch_dec_relaxed atomic_fetch_dec
532 #endif /* atomic_fetch_dec */
534 #ifndef atomic_fetch_dec
535 static __always_inline int
536 atomic_fetch_dec(atomic_t *v)
538 return atomic_fetch_sub(1, v);
540 #define atomic_fetch_dec atomic_fetch_dec
543 #ifndef atomic_fetch_dec_acquire
544 static __always_inline int
545 atomic_fetch_dec_acquire(atomic_t *v)
547 return atomic_fetch_sub_acquire(1, v);
549 #define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
552 #ifndef atomic_fetch_dec_release
553 static __always_inline int
554 atomic_fetch_dec_release(atomic_t *v)
556 return atomic_fetch_sub_release(1, v);
558 #define atomic_fetch_dec_release atomic_fetch_dec_release
561 #ifndef atomic_fetch_dec_relaxed
562 static __always_inline int
563 atomic_fetch_dec_relaxed(atomic_t *v)
565 return atomic_fetch_sub_relaxed(1, v);
567 #define atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
570 #else /* atomic_fetch_dec_relaxed */
572 #ifndef atomic_fetch_dec_acquire
573 static __always_inline int
574 atomic_fetch_dec_acquire(atomic_t *v)
576 int ret = atomic_fetch_dec_relaxed(v);
577 __atomic_acquire_fence();
580 #define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
583 #ifndef atomic_fetch_dec_release
584 static __always_inline int
585 atomic_fetch_dec_release(atomic_t *v)
587 __atomic_release_fence();
588 return atomic_fetch_dec_relaxed(v);
590 #define atomic_fetch_dec_release atomic_fetch_dec_release
593 #ifndef atomic_fetch_dec
594 static __always_inline int
595 atomic_fetch_dec(atomic_t *v)
598 __atomic_pre_full_fence();
599 ret = atomic_fetch_dec_relaxed(v);
600 __atomic_post_full_fence();
603 #define atomic_fetch_dec atomic_fetch_dec
606 #endif /* atomic_fetch_dec_relaxed */
608 #ifndef atomic_fetch_and_relaxed
609 #define atomic_fetch_and_acquire atomic_fetch_and
610 #define atomic_fetch_and_release atomic_fetch_and
611 #define atomic_fetch_and_relaxed atomic_fetch_and
612 #else /* atomic_fetch_and_relaxed */
614 #ifndef atomic_fetch_and_acquire
615 static __always_inline int
616 atomic_fetch_and_acquire(int i, atomic_t *v)
618 int ret = atomic_fetch_and_relaxed(i, v);
619 __atomic_acquire_fence();
622 #define atomic_fetch_and_acquire atomic_fetch_and_acquire
625 #ifndef atomic_fetch_and_release
626 static __always_inline int
627 atomic_fetch_and_release(int i, atomic_t *v)
629 __atomic_release_fence();
630 return atomic_fetch_and_relaxed(i, v);
632 #define atomic_fetch_and_release atomic_fetch_and_release
635 #ifndef atomic_fetch_and
636 static __always_inline int
637 atomic_fetch_and(int i, atomic_t *v)
640 __atomic_pre_full_fence();
641 ret = atomic_fetch_and_relaxed(i, v);
642 __atomic_post_full_fence();
645 #define atomic_fetch_and atomic_fetch_and
648 #endif /* atomic_fetch_and_relaxed */
650 #ifndef atomic_andnot
651 static __always_inline void
652 atomic_andnot(int i, atomic_t *v)
656 #define atomic_andnot atomic_andnot
659 #ifndef atomic_fetch_andnot_relaxed
660 #ifdef atomic_fetch_andnot
661 #define atomic_fetch_andnot_acquire atomic_fetch_andnot
662 #define atomic_fetch_andnot_release atomic_fetch_andnot
663 #define atomic_fetch_andnot_relaxed atomic_fetch_andnot
664 #endif /* atomic_fetch_andnot */
666 #ifndef atomic_fetch_andnot
667 static __always_inline int
668 atomic_fetch_andnot(int i, atomic_t *v)
670 return atomic_fetch_and(~i, v);
672 #define atomic_fetch_andnot atomic_fetch_andnot
675 #ifndef atomic_fetch_andnot_acquire
676 static __always_inline int
677 atomic_fetch_andnot_acquire(int i, atomic_t *v)
679 return atomic_fetch_and_acquire(~i, v);
681 #define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
684 #ifndef atomic_fetch_andnot_release
685 static __always_inline int
686 atomic_fetch_andnot_release(int i, atomic_t *v)
688 return atomic_fetch_and_release(~i, v);
690 #define atomic_fetch_andnot_release atomic_fetch_andnot_release
693 #ifndef atomic_fetch_andnot_relaxed
694 static __always_inline int
695 atomic_fetch_andnot_relaxed(int i, atomic_t *v)
697 return atomic_fetch_and_relaxed(~i, v);
699 #define atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
702 #else /* atomic_fetch_andnot_relaxed */
704 #ifndef atomic_fetch_andnot_acquire
705 static __always_inline int
706 atomic_fetch_andnot_acquire(int i, atomic_t *v)
708 int ret = atomic_fetch_andnot_relaxed(i, v);
709 __atomic_acquire_fence();
712 #define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
715 #ifndef atomic_fetch_andnot_release
716 static __always_inline int
717 atomic_fetch_andnot_release(int i, atomic_t *v)
719 __atomic_release_fence();
720 return atomic_fetch_andnot_relaxed(i, v);
722 #define atomic_fetch_andnot_release atomic_fetch_andnot_release
725 #ifndef atomic_fetch_andnot
726 static __always_inline int
727 atomic_fetch_andnot(int i, atomic_t *v)
730 __atomic_pre_full_fence();
731 ret = atomic_fetch_andnot_relaxed(i, v);
732 __atomic_post_full_fence();
735 #define atomic_fetch_andnot atomic_fetch_andnot
738 #endif /* atomic_fetch_andnot_relaxed */
740 #ifndef atomic_fetch_or_relaxed
741 #define atomic_fetch_or_acquire atomic_fetch_or
742 #define atomic_fetch_or_release atomic_fetch_or
743 #define atomic_fetch_or_relaxed atomic_fetch_or
744 #else /* atomic_fetch_or_relaxed */
746 #ifndef atomic_fetch_or_acquire
747 static __always_inline int
748 atomic_fetch_or_acquire(int i, atomic_t *v)
750 int ret = atomic_fetch_or_relaxed(i, v);
751 __atomic_acquire_fence();
754 #define atomic_fetch_or_acquire atomic_fetch_or_acquire
757 #ifndef atomic_fetch_or_release
758 static __always_inline int
759 atomic_fetch_or_release(int i, atomic_t *v)
761 __atomic_release_fence();
762 return atomic_fetch_or_relaxed(i, v);
764 #define atomic_fetch_or_release atomic_fetch_or_release
767 #ifndef atomic_fetch_or
768 static __always_inline int
769 atomic_fetch_or(int i, atomic_t *v)
772 __atomic_pre_full_fence();
773 ret = atomic_fetch_or_relaxed(i, v);
774 __atomic_post_full_fence();
777 #define atomic_fetch_or atomic_fetch_or
780 #endif /* atomic_fetch_or_relaxed */
782 #ifndef atomic_fetch_xor_relaxed
783 #define atomic_fetch_xor_acquire atomic_fetch_xor
784 #define atomic_fetch_xor_release atomic_fetch_xor
785 #define atomic_fetch_xor_relaxed atomic_fetch_xor
786 #else /* atomic_fetch_xor_relaxed */
788 #ifndef atomic_fetch_xor_acquire
789 static __always_inline int
790 atomic_fetch_xor_acquire(int i, atomic_t *v)
792 int ret = atomic_fetch_xor_relaxed(i, v);
793 __atomic_acquire_fence();
796 #define atomic_fetch_xor_acquire atomic_fetch_xor_acquire
799 #ifndef atomic_fetch_xor_release
800 static __always_inline int
801 atomic_fetch_xor_release(int i, atomic_t *v)
803 __atomic_release_fence();
804 return atomic_fetch_xor_relaxed(i, v);
806 #define atomic_fetch_xor_release atomic_fetch_xor_release
809 #ifndef atomic_fetch_xor
810 static __always_inline int
811 atomic_fetch_xor(int i, atomic_t *v)
814 __atomic_pre_full_fence();
815 ret = atomic_fetch_xor_relaxed(i, v);
816 __atomic_post_full_fence();
819 #define atomic_fetch_xor atomic_fetch_xor
822 #endif /* atomic_fetch_xor_relaxed */
824 #ifndef atomic_xchg_relaxed
825 #define atomic_xchg_acquire atomic_xchg
826 #define atomic_xchg_release atomic_xchg
827 #define atomic_xchg_relaxed atomic_xchg
828 #else /* atomic_xchg_relaxed */
830 #ifndef atomic_xchg_acquire
831 static __always_inline int
832 atomic_xchg_acquire(atomic_t *v, int i)
834 int ret = atomic_xchg_relaxed(v, i);
835 __atomic_acquire_fence();
838 #define atomic_xchg_acquire atomic_xchg_acquire
841 #ifndef atomic_xchg_release
842 static __always_inline int
843 atomic_xchg_release(atomic_t *v, int i)
845 __atomic_release_fence();
846 return atomic_xchg_relaxed(v, i);
848 #define atomic_xchg_release atomic_xchg_release
852 static __always_inline int
853 atomic_xchg(atomic_t *v, int i)
856 __atomic_pre_full_fence();
857 ret = atomic_xchg_relaxed(v, i);
858 __atomic_post_full_fence();
861 #define atomic_xchg atomic_xchg
864 #endif /* atomic_xchg_relaxed */
866 #ifndef atomic_cmpxchg_relaxed
867 #define atomic_cmpxchg_acquire atomic_cmpxchg
868 #define atomic_cmpxchg_release atomic_cmpxchg
869 #define atomic_cmpxchg_relaxed atomic_cmpxchg
870 #else /* atomic_cmpxchg_relaxed */
872 #ifndef atomic_cmpxchg_acquire
873 static __always_inline int
874 atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
876 int ret = atomic_cmpxchg_relaxed(v, old, new);
877 __atomic_acquire_fence();
880 #define atomic_cmpxchg_acquire atomic_cmpxchg_acquire
883 #ifndef atomic_cmpxchg_release
884 static __always_inline int
885 atomic_cmpxchg_release(atomic_t *v, int old, int new)
887 __atomic_release_fence();
888 return atomic_cmpxchg_relaxed(v, old, new);
890 #define atomic_cmpxchg_release atomic_cmpxchg_release
893 #ifndef atomic_cmpxchg
894 static __always_inline int
895 atomic_cmpxchg(atomic_t *v, int old, int new)
898 __atomic_pre_full_fence();
899 ret = atomic_cmpxchg_relaxed(v, old, new);
900 __atomic_post_full_fence();
903 #define atomic_cmpxchg atomic_cmpxchg
906 #endif /* atomic_cmpxchg_relaxed */
908 #ifndef atomic_try_cmpxchg_relaxed
909 #ifdef atomic_try_cmpxchg
910 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg
911 #define atomic_try_cmpxchg_release atomic_try_cmpxchg
912 #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg
913 #endif /* atomic_try_cmpxchg */
915 #ifndef atomic_try_cmpxchg
916 static __always_inline bool
917 atomic_try_cmpxchg(atomic_t *v, int *old, int new)
920 r = atomic_cmpxchg(v, o, new);
921 if (unlikely(r != o))
923 return likely(r == o);
925 #define atomic_try_cmpxchg atomic_try_cmpxchg
928 #ifndef atomic_try_cmpxchg_acquire
929 static __always_inline bool
930 atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
933 r = atomic_cmpxchg_acquire(v, o, new);
934 if (unlikely(r != o))
936 return likely(r == o);
938 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
941 #ifndef atomic_try_cmpxchg_release
942 static __always_inline bool
943 atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
946 r = atomic_cmpxchg_release(v, o, new);
947 if (unlikely(r != o))
949 return likely(r == o);
951 #define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
954 #ifndef atomic_try_cmpxchg_relaxed
955 static __always_inline bool
956 atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
959 r = atomic_cmpxchg_relaxed(v, o, new);
960 if (unlikely(r != o))
962 return likely(r == o);
964 #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed
967 #else /* atomic_try_cmpxchg_relaxed */
969 #ifndef atomic_try_cmpxchg_acquire
970 static __always_inline bool
971 atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
973 bool ret = atomic_try_cmpxchg_relaxed(v, old, new);
974 __atomic_acquire_fence();
977 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
980 #ifndef atomic_try_cmpxchg_release
981 static __always_inline bool
982 atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
984 __atomic_release_fence();
985 return atomic_try_cmpxchg_relaxed(v, old, new);
987 #define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
990 #ifndef atomic_try_cmpxchg
991 static __always_inline bool
992 atomic_try_cmpxchg(atomic_t *v, int *old, int new)
995 __atomic_pre_full_fence();
996 ret = atomic_try_cmpxchg_relaxed(v, old, new);
997 __atomic_post_full_fence();
1000 #define atomic_try_cmpxchg atomic_try_cmpxchg
1003 #endif /* atomic_try_cmpxchg_relaxed */
1005 #ifndef atomic_sub_and_test
1007 * atomic_sub_and_test - subtract value from variable and test result
1008 * @i: integer value to subtract
1009 * @v: pointer of type atomic_t
1011 * Atomically subtracts @i from @v and returns
1012 * true if the result is zero, or false for all
1015 static __always_inline bool
1016 atomic_sub_and_test(int i, atomic_t *v)
1018 return atomic_sub_return(i, v) == 0;
1020 #define atomic_sub_and_test atomic_sub_and_test
1023 #ifndef atomic_dec_and_test
1025 * atomic_dec_and_test - decrement and test
1026 * @v: pointer of type atomic_t
1028 * Atomically decrements @v by 1 and
1029 * returns true if the result is 0, or false for all other
1032 static __always_inline bool
1033 atomic_dec_and_test(atomic_t *v)
1035 return atomic_dec_return(v) == 0;
1037 #define atomic_dec_and_test atomic_dec_and_test
1040 #ifndef atomic_inc_and_test
1042 * atomic_inc_and_test - increment and test
1043 * @v: pointer of type atomic_t
1045 * Atomically increments @v by 1
1046 * and returns true if the result is zero, or false for all
1049 static __always_inline bool
1050 atomic_inc_and_test(atomic_t *v)
1052 return atomic_inc_return(v) == 0;
1054 #define atomic_inc_and_test atomic_inc_and_test
1057 #ifndef atomic_add_negative
1059 * atomic_add_negative - add and test if negative
1060 * @i: integer value to add
1061 * @v: pointer of type atomic_t
1063 * Atomically adds @i to @v and returns true
1064 * if the result is negative, or false when
1065 * result is greater than or equal to zero.
1067 static __always_inline bool
1068 atomic_add_negative(int i, atomic_t *v)
1070 return atomic_add_return(i, v) < 0;
1072 #define atomic_add_negative atomic_add_negative
1075 #ifndef atomic_fetch_add_unless
1077 * atomic_fetch_add_unless - add unless the number is already a given value
1078 * @v: pointer of type atomic_t
1079 * @a: the amount to add to v...
1080 * @u: ...unless v is equal to u.
1082 * Atomically adds @a to @v, so long as @v was not already @u.
1083 * Returns original value of @v
1085 static __always_inline int
1086 atomic_fetch_add_unless(atomic_t *v, int a, int u)
1088 int c = atomic_read(v);
1091 if (unlikely(c == u))
1093 } while (!atomic_try_cmpxchg(v, &c, c + a));
1097 #define atomic_fetch_add_unless atomic_fetch_add_unless
1100 #ifndef atomic_add_unless
1102 * atomic_add_unless - add unless the number is already a given value
1103 * @v: pointer of type atomic_t
1104 * @a: the amount to add to v...
1105 * @u: ...unless v is equal to u.
1107 * Atomically adds @a to @v, if @v was not already @u.
1108 * Returns true if the addition was done.
1110 static __always_inline bool
1111 atomic_add_unless(atomic_t *v, int a, int u)
1113 return atomic_fetch_add_unless(v, a, u) != u;
1115 #define atomic_add_unless atomic_add_unless
1118 #ifndef atomic_inc_not_zero
1120 * atomic_inc_not_zero - increment unless the number is zero
1121 * @v: pointer of type atomic_t
1123 * Atomically increments @v by 1, if @v is non-zero.
1124 * Returns true if the increment was done.
1126 static __always_inline bool
1127 atomic_inc_not_zero(atomic_t *v)
1129 return atomic_add_unless(v, 1, 0);
1131 #define atomic_inc_not_zero atomic_inc_not_zero
1134 #ifndef atomic_inc_unless_negative
1135 static __always_inline bool
1136 atomic_inc_unless_negative(atomic_t *v)
1138 int c = atomic_read(v);
1141 if (unlikely(c < 0))
1143 } while (!atomic_try_cmpxchg(v, &c, c + 1));
1147 #define atomic_inc_unless_negative atomic_inc_unless_negative
1150 #ifndef atomic_dec_unless_positive
1151 static __always_inline bool
1152 atomic_dec_unless_positive(atomic_t *v)
1154 int c = atomic_read(v);
1157 if (unlikely(c > 0))
1159 } while (!atomic_try_cmpxchg(v, &c, c - 1));
1163 #define atomic_dec_unless_positive atomic_dec_unless_positive
1166 #ifndef atomic_dec_if_positive
1167 static __always_inline int
1168 atomic_dec_if_positive(atomic_t *v)
1170 int dec, c = atomic_read(v);
1174 if (unlikely(dec < 0))
1176 } while (!atomic_try_cmpxchg(v, &c, dec));
1180 #define atomic_dec_if_positive atomic_dec_if_positive
1183 #ifdef CONFIG_GENERIC_ATOMIC64
1184 #include <asm-generic/atomic64.h>
1187 #ifndef atomic64_read_acquire
1188 static __always_inline s64
1189 atomic64_read_acquire(const atomic64_t *v)
1191 return smp_load_acquire(&(v)->counter);
1193 #define atomic64_read_acquire atomic64_read_acquire
1196 #ifndef atomic64_set_release
1197 static __always_inline void
1198 atomic64_set_release(atomic64_t *v, s64 i)
1200 smp_store_release(&(v)->counter, i);
1202 #define atomic64_set_release atomic64_set_release
1205 #ifndef atomic64_add_return_relaxed
1206 #define atomic64_add_return_acquire atomic64_add_return
1207 #define atomic64_add_return_release atomic64_add_return
1208 #define atomic64_add_return_relaxed atomic64_add_return
1209 #else /* atomic64_add_return_relaxed */
1211 #ifndef atomic64_add_return_acquire
1212 static __always_inline s64
1213 atomic64_add_return_acquire(s64 i, atomic64_t *v)
1215 s64 ret = atomic64_add_return_relaxed(i, v);
1216 __atomic_acquire_fence();
1219 #define atomic64_add_return_acquire atomic64_add_return_acquire
1222 #ifndef atomic64_add_return_release
1223 static __always_inline s64
1224 atomic64_add_return_release(s64 i, atomic64_t *v)
1226 __atomic_release_fence();
1227 return atomic64_add_return_relaxed(i, v);
1229 #define atomic64_add_return_release atomic64_add_return_release
1232 #ifndef atomic64_add_return
1233 static __always_inline s64
1234 atomic64_add_return(s64 i, atomic64_t *v)
1237 __atomic_pre_full_fence();
1238 ret = atomic64_add_return_relaxed(i, v);
1239 __atomic_post_full_fence();
1242 #define atomic64_add_return atomic64_add_return
1245 #endif /* atomic64_add_return_relaxed */
1247 #ifndef atomic64_fetch_add_relaxed
1248 #define atomic64_fetch_add_acquire atomic64_fetch_add
1249 #define atomic64_fetch_add_release atomic64_fetch_add
1250 #define atomic64_fetch_add_relaxed atomic64_fetch_add
1251 #else /* atomic64_fetch_add_relaxed */
1253 #ifndef atomic64_fetch_add_acquire
1254 static __always_inline s64
1255 atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
1257 s64 ret = atomic64_fetch_add_relaxed(i, v);
1258 __atomic_acquire_fence();
1261 #define atomic64_fetch_add_acquire atomic64_fetch_add_acquire
1264 #ifndef atomic64_fetch_add_release
1265 static __always_inline s64
1266 atomic64_fetch_add_release(s64 i, atomic64_t *v)
1268 __atomic_release_fence();
1269 return atomic64_fetch_add_relaxed(i, v);
1271 #define atomic64_fetch_add_release atomic64_fetch_add_release
1274 #ifndef atomic64_fetch_add
1275 static __always_inline s64
1276 atomic64_fetch_add(s64 i, atomic64_t *v)
1279 __atomic_pre_full_fence();
1280 ret = atomic64_fetch_add_relaxed(i, v);
1281 __atomic_post_full_fence();
1284 #define atomic64_fetch_add atomic64_fetch_add
1287 #endif /* atomic64_fetch_add_relaxed */
1289 #ifndef atomic64_sub_return_relaxed
1290 #define atomic64_sub_return_acquire atomic64_sub_return
1291 #define atomic64_sub_return_release atomic64_sub_return
1292 #define atomic64_sub_return_relaxed atomic64_sub_return
1293 #else /* atomic64_sub_return_relaxed */
1295 #ifndef atomic64_sub_return_acquire
1296 static __always_inline s64
1297 atomic64_sub_return_acquire(s64 i, atomic64_t *v)
1299 s64 ret = atomic64_sub_return_relaxed(i, v);
1300 __atomic_acquire_fence();
1303 #define atomic64_sub_return_acquire atomic64_sub_return_acquire
1306 #ifndef atomic64_sub_return_release
1307 static __always_inline s64
1308 atomic64_sub_return_release(s64 i, atomic64_t *v)
1310 __atomic_release_fence();
1311 return atomic64_sub_return_relaxed(i, v);
1313 #define atomic64_sub_return_release atomic64_sub_return_release
1316 #ifndef atomic64_sub_return
1317 static __always_inline s64
1318 atomic64_sub_return(s64 i, atomic64_t *v)
1321 __atomic_pre_full_fence();
1322 ret = atomic64_sub_return_relaxed(i, v);
1323 __atomic_post_full_fence();
1326 #define atomic64_sub_return atomic64_sub_return
1329 #endif /* atomic64_sub_return_relaxed */
1331 #ifndef atomic64_fetch_sub_relaxed
1332 #define atomic64_fetch_sub_acquire atomic64_fetch_sub
1333 #define atomic64_fetch_sub_release atomic64_fetch_sub
1334 #define atomic64_fetch_sub_relaxed atomic64_fetch_sub
1335 #else /* atomic64_fetch_sub_relaxed */
1337 #ifndef atomic64_fetch_sub_acquire
1338 static __always_inline s64
1339 atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
1341 s64 ret = atomic64_fetch_sub_relaxed(i, v);
1342 __atomic_acquire_fence();
1345 #define atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
1348 #ifndef atomic64_fetch_sub_release
1349 static __always_inline s64
1350 atomic64_fetch_sub_release(s64 i, atomic64_t *v)
1352 __atomic_release_fence();
1353 return atomic64_fetch_sub_relaxed(i, v);
1355 #define atomic64_fetch_sub_release atomic64_fetch_sub_release
1358 #ifndef atomic64_fetch_sub
1359 static __always_inline s64
1360 atomic64_fetch_sub(s64 i, atomic64_t *v)
1363 __atomic_pre_full_fence();
1364 ret = atomic64_fetch_sub_relaxed(i, v);
1365 __atomic_post_full_fence();
1368 #define atomic64_fetch_sub atomic64_fetch_sub
1371 #endif /* atomic64_fetch_sub_relaxed */
1373 #ifndef atomic64_inc
1374 static __always_inline void
1375 atomic64_inc(atomic64_t *v)
1379 #define atomic64_inc atomic64_inc
1382 #ifndef atomic64_inc_return_relaxed
1383 #ifdef atomic64_inc_return
1384 #define atomic64_inc_return_acquire atomic64_inc_return
1385 #define atomic64_inc_return_release atomic64_inc_return
1386 #define atomic64_inc_return_relaxed atomic64_inc_return
1387 #endif /* atomic64_inc_return */
1389 #ifndef atomic64_inc_return
1390 static __always_inline s64
1391 atomic64_inc_return(atomic64_t *v)
1393 return atomic64_add_return(1, v);
1395 #define atomic64_inc_return atomic64_inc_return
1398 #ifndef atomic64_inc_return_acquire
1399 static __always_inline s64
1400 atomic64_inc_return_acquire(atomic64_t *v)
1402 return atomic64_add_return_acquire(1, v);
1404 #define atomic64_inc_return_acquire atomic64_inc_return_acquire
1407 #ifndef atomic64_inc_return_release
1408 static __always_inline s64
1409 atomic64_inc_return_release(atomic64_t *v)
1411 return atomic64_add_return_release(1, v);
1413 #define atomic64_inc_return_release atomic64_inc_return_release
1416 #ifndef atomic64_inc_return_relaxed
1417 static __always_inline s64
1418 atomic64_inc_return_relaxed(atomic64_t *v)
1420 return atomic64_add_return_relaxed(1, v);
1422 #define atomic64_inc_return_relaxed atomic64_inc_return_relaxed
1425 #else /* atomic64_inc_return_relaxed */
1427 #ifndef atomic64_inc_return_acquire
1428 static __always_inline s64
1429 atomic64_inc_return_acquire(atomic64_t *v)
1431 s64 ret = atomic64_inc_return_relaxed(v);
1432 __atomic_acquire_fence();
1435 #define atomic64_inc_return_acquire atomic64_inc_return_acquire
1438 #ifndef atomic64_inc_return_release
1439 static __always_inline s64
1440 atomic64_inc_return_release(atomic64_t *v)
1442 __atomic_release_fence();
1443 return atomic64_inc_return_relaxed(v);
1445 #define atomic64_inc_return_release atomic64_inc_return_release
1448 #ifndef atomic64_inc_return
1449 static __always_inline s64
1450 atomic64_inc_return(atomic64_t *v)
1453 __atomic_pre_full_fence();
1454 ret = atomic64_inc_return_relaxed(v);
1455 __atomic_post_full_fence();
1458 #define atomic64_inc_return atomic64_inc_return
1461 #endif /* atomic64_inc_return_relaxed */
1463 #ifndef atomic64_fetch_inc_relaxed
1464 #ifdef atomic64_fetch_inc
1465 #define atomic64_fetch_inc_acquire atomic64_fetch_inc
1466 #define atomic64_fetch_inc_release atomic64_fetch_inc
1467 #define atomic64_fetch_inc_relaxed atomic64_fetch_inc
1468 #endif /* atomic64_fetch_inc */
1470 #ifndef atomic64_fetch_inc
1471 static __always_inline s64
1472 atomic64_fetch_inc(atomic64_t *v)
1474 return atomic64_fetch_add(1, v);
1476 #define atomic64_fetch_inc atomic64_fetch_inc
1479 #ifndef atomic64_fetch_inc_acquire
1480 static __always_inline s64
1481 atomic64_fetch_inc_acquire(atomic64_t *v)
1483 return atomic64_fetch_add_acquire(1, v);
1485 #define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1488 #ifndef atomic64_fetch_inc_release
1489 static __always_inline s64
1490 atomic64_fetch_inc_release(atomic64_t *v)
1492 return atomic64_fetch_add_release(1, v);
1494 #define atomic64_fetch_inc_release atomic64_fetch_inc_release
1497 #ifndef atomic64_fetch_inc_relaxed
1498 static __always_inline s64
1499 atomic64_fetch_inc_relaxed(atomic64_t *v)
1501 return atomic64_fetch_add_relaxed(1, v);
1503 #define atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
1506 #else /* atomic64_fetch_inc_relaxed */
1508 #ifndef atomic64_fetch_inc_acquire
1509 static __always_inline s64
1510 atomic64_fetch_inc_acquire(atomic64_t *v)
1512 s64 ret = atomic64_fetch_inc_relaxed(v);
1513 __atomic_acquire_fence();
1516 #define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1519 #ifndef atomic64_fetch_inc_release
1520 static __always_inline s64
1521 atomic64_fetch_inc_release(atomic64_t *v)
1523 __atomic_release_fence();
1524 return atomic64_fetch_inc_relaxed(v);
1526 #define atomic64_fetch_inc_release atomic64_fetch_inc_release
1529 #ifndef atomic64_fetch_inc
1530 static __always_inline s64
1531 atomic64_fetch_inc(atomic64_t *v)
1534 __atomic_pre_full_fence();
1535 ret = atomic64_fetch_inc_relaxed(v);
1536 __atomic_post_full_fence();
1539 #define atomic64_fetch_inc atomic64_fetch_inc
1542 #endif /* atomic64_fetch_inc_relaxed */
1544 #ifndef atomic64_dec
1545 static __always_inline void
1546 atomic64_dec(atomic64_t *v)
1550 #define atomic64_dec atomic64_dec
1553 #ifndef atomic64_dec_return_relaxed
1554 #ifdef atomic64_dec_return
1555 #define atomic64_dec_return_acquire atomic64_dec_return
1556 #define atomic64_dec_return_release atomic64_dec_return
1557 #define atomic64_dec_return_relaxed atomic64_dec_return
1558 #endif /* atomic64_dec_return */
1560 #ifndef atomic64_dec_return
1561 static __always_inline s64
1562 atomic64_dec_return(atomic64_t *v)
1564 return atomic64_sub_return(1, v);
1566 #define atomic64_dec_return atomic64_dec_return
1569 #ifndef atomic64_dec_return_acquire
1570 static __always_inline s64
1571 atomic64_dec_return_acquire(atomic64_t *v)
1573 return atomic64_sub_return_acquire(1, v);
1575 #define atomic64_dec_return_acquire atomic64_dec_return_acquire
1578 #ifndef atomic64_dec_return_release
1579 static __always_inline s64
1580 atomic64_dec_return_release(atomic64_t *v)
1582 return atomic64_sub_return_release(1, v);
1584 #define atomic64_dec_return_release atomic64_dec_return_release
1587 #ifndef atomic64_dec_return_relaxed
1588 static __always_inline s64
1589 atomic64_dec_return_relaxed(atomic64_t *v)
1591 return atomic64_sub_return_relaxed(1, v);
1593 #define atomic64_dec_return_relaxed atomic64_dec_return_relaxed
1596 #else /* atomic64_dec_return_relaxed */
1598 #ifndef atomic64_dec_return_acquire
1599 static __always_inline s64
1600 atomic64_dec_return_acquire(atomic64_t *v)
1602 s64 ret = atomic64_dec_return_relaxed(v);
1603 __atomic_acquire_fence();
1606 #define atomic64_dec_return_acquire atomic64_dec_return_acquire
1609 #ifndef atomic64_dec_return_release
1610 static __always_inline s64
1611 atomic64_dec_return_release(atomic64_t *v)
1613 __atomic_release_fence();
1614 return atomic64_dec_return_relaxed(v);
1616 #define atomic64_dec_return_release atomic64_dec_return_release
1619 #ifndef atomic64_dec_return
1620 static __always_inline s64
1621 atomic64_dec_return(atomic64_t *v)
1624 __atomic_pre_full_fence();
1625 ret = atomic64_dec_return_relaxed(v);
1626 __atomic_post_full_fence();
1629 #define atomic64_dec_return atomic64_dec_return
1632 #endif /* atomic64_dec_return_relaxed */
1634 #ifndef atomic64_fetch_dec_relaxed
1635 #ifdef atomic64_fetch_dec
1636 #define atomic64_fetch_dec_acquire atomic64_fetch_dec
1637 #define atomic64_fetch_dec_release atomic64_fetch_dec
1638 #define atomic64_fetch_dec_relaxed atomic64_fetch_dec
1639 #endif /* atomic64_fetch_dec */
1641 #ifndef atomic64_fetch_dec
1642 static __always_inline s64
1643 atomic64_fetch_dec(atomic64_t *v)
1645 return atomic64_fetch_sub(1, v);
1647 #define atomic64_fetch_dec atomic64_fetch_dec
1650 #ifndef atomic64_fetch_dec_acquire
1651 static __always_inline s64
1652 atomic64_fetch_dec_acquire(atomic64_t *v)
1654 return atomic64_fetch_sub_acquire(1, v);
1656 #define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1659 #ifndef atomic64_fetch_dec_release
1660 static __always_inline s64
1661 atomic64_fetch_dec_release(atomic64_t *v)
1663 return atomic64_fetch_sub_release(1, v);
1665 #define atomic64_fetch_dec_release atomic64_fetch_dec_release
1668 #ifndef atomic64_fetch_dec_relaxed
1669 static __always_inline s64
1670 atomic64_fetch_dec_relaxed(atomic64_t *v)
1672 return atomic64_fetch_sub_relaxed(1, v);
1674 #define atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
1677 #else /* atomic64_fetch_dec_relaxed */
1679 #ifndef atomic64_fetch_dec_acquire
1680 static __always_inline s64
1681 atomic64_fetch_dec_acquire(atomic64_t *v)
1683 s64 ret = atomic64_fetch_dec_relaxed(v);
1684 __atomic_acquire_fence();
1687 #define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1690 #ifndef atomic64_fetch_dec_release
1691 static __always_inline s64
1692 atomic64_fetch_dec_release(atomic64_t *v)
1694 __atomic_release_fence();
1695 return atomic64_fetch_dec_relaxed(v);
1697 #define atomic64_fetch_dec_release atomic64_fetch_dec_release
1700 #ifndef atomic64_fetch_dec
1701 static __always_inline s64
1702 atomic64_fetch_dec(atomic64_t *v)
1705 __atomic_pre_full_fence();
1706 ret = atomic64_fetch_dec_relaxed(v);
1707 __atomic_post_full_fence();
1710 #define atomic64_fetch_dec atomic64_fetch_dec
1713 #endif /* atomic64_fetch_dec_relaxed */
1715 #ifndef atomic64_fetch_and_relaxed
1716 #define atomic64_fetch_and_acquire atomic64_fetch_and
1717 #define atomic64_fetch_and_release atomic64_fetch_and
1718 #define atomic64_fetch_and_relaxed atomic64_fetch_and
1719 #else /* atomic64_fetch_and_relaxed */
1721 #ifndef atomic64_fetch_and_acquire
1722 static __always_inline s64
1723 atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
1725 s64 ret = atomic64_fetch_and_relaxed(i, v);
1726 __atomic_acquire_fence();
1729 #define atomic64_fetch_and_acquire atomic64_fetch_and_acquire
1732 #ifndef atomic64_fetch_and_release
1733 static __always_inline s64
1734 atomic64_fetch_and_release(s64 i, atomic64_t *v)
1736 __atomic_release_fence();
1737 return atomic64_fetch_and_relaxed(i, v);
1739 #define atomic64_fetch_and_release atomic64_fetch_and_release
1742 #ifndef atomic64_fetch_and
1743 static __always_inline s64
1744 atomic64_fetch_and(s64 i, atomic64_t *v)
1747 __atomic_pre_full_fence();
1748 ret = atomic64_fetch_and_relaxed(i, v);
1749 __atomic_post_full_fence();
1752 #define atomic64_fetch_and atomic64_fetch_and
1755 #endif /* atomic64_fetch_and_relaxed */
1757 #ifndef atomic64_andnot
1758 static __always_inline void
1759 atomic64_andnot(s64 i, atomic64_t *v)
1761 atomic64_and(~i, v);
1763 #define atomic64_andnot atomic64_andnot
1766 #ifndef atomic64_fetch_andnot_relaxed
1767 #ifdef atomic64_fetch_andnot
1768 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
1769 #define atomic64_fetch_andnot_release atomic64_fetch_andnot
1770 #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
1771 #endif /* atomic64_fetch_andnot */
1773 #ifndef atomic64_fetch_andnot
1774 static __always_inline s64
1775 atomic64_fetch_andnot(s64 i, atomic64_t *v)
1777 return atomic64_fetch_and(~i, v);
1779 #define atomic64_fetch_andnot atomic64_fetch_andnot
1782 #ifndef atomic64_fetch_andnot_acquire
1783 static __always_inline s64
1784 atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1786 return atomic64_fetch_and_acquire(~i, v);
1788 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
1791 #ifndef atomic64_fetch_andnot_release
1792 static __always_inline s64
1793 atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1795 return atomic64_fetch_and_release(~i, v);
1797 #define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
1800 #ifndef atomic64_fetch_andnot_relaxed
1801 static __always_inline s64
1802 atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
1804 return atomic64_fetch_and_relaxed(~i, v);
1806 #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
1809 #else /* atomic64_fetch_andnot_relaxed */
1811 #ifndef atomic64_fetch_andnot_acquire
1812 static __always_inline s64
1813 atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1815 s64 ret = atomic64_fetch_andnot_relaxed(i, v);
1816 __atomic_acquire_fence();
1819 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
1822 #ifndef atomic64_fetch_andnot_release
1823 static __always_inline s64
1824 atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1826 __atomic_release_fence();
1827 return atomic64_fetch_andnot_relaxed(i, v);
1829 #define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
1832 #ifndef atomic64_fetch_andnot
1833 static __always_inline s64
1834 atomic64_fetch_andnot(s64 i, atomic64_t *v)
1837 __atomic_pre_full_fence();
1838 ret = atomic64_fetch_andnot_relaxed(i, v);
1839 __atomic_post_full_fence();
1842 #define atomic64_fetch_andnot atomic64_fetch_andnot
1845 #endif /* atomic64_fetch_andnot_relaxed */
1847 #ifndef atomic64_fetch_or_relaxed
1848 #define atomic64_fetch_or_acquire atomic64_fetch_or
1849 #define atomic64_fetch_or_release atomic64_fetch_or
1850 #define atomic64_fetch_or_relaxed atomic64_fetch_or
1851 #else /* atomic64_fetch_or_relaxed */
1853 #ifndef atomic64_fetch_or_acquire
1854 static __always_inline s64
1855 atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
1857 s64 ret = atomic64_fetch_or_relaxed(i, v);
1858 __atomic_acquire_fence();
1861 #define atomic64_fetch_or_acquire atomic64_fetch_or_acquire
1864 #ifndef atomic64_fetch_or_release
1865 static __always_inline s64
1866 atomic64_fetch_or_release(s64 i, atomic64_t *v)
1868 __atomic_release_fence();
1869 return atomic64_fetch_or_relaxed(i, v);
1871 #define atomic64_fetch_or_release atomic64_fetch_or_release
1874 #ifndef atomic64_fetch_or
1875 static __always_inline s64
1876 atomic64_fetch_or(s64 i, atomic64_t *v)
1879 __atomic_pre_full_fence();
1880 ret = atomic64_fetch_or_relaxed(i, v);
1881 __atomic_post_full_fence();
1884 #define atomic64_fetch_or atomic64_fetch_or
1887 #endif /* atomic64_fetch_or_relaxed */
1889 #ifndef atomic64_fetch_xor_relaxed
1890 #define atomic64_fetch_xor_acquire atomic64_fetch_xor
1891 #define atomic64_fetch_xor_release atomic64_fetch_xor
1892 #define atomic64_fetch_xor_relaxed atomic64_fetch_xor
1893 #else /* atomic64_fetch_xor_relaxed */
1895 #ifndef atomic64_fetch_xor_acquire
1896 static __always_inline s64
1897 atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
1899 s64 ret = atomic64_fetch_xor_relaxed(i, v);
1900 __atomic_acquire_fence();
1903 #define atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
1906 #ifndef atomic64_fetch_xor_release
1907 static __always_inline s64
1908 atomic64_fetch_xor_release(s64 i, atomic64_t *v)
1910 __atomic_release_fence();
1911 return atomic64_fetch_xor_relaxed(i, v);
1913 #define atomic64_fetch_xor_release atomic64_fetch_xor_release
1916 #ifndef atomic64_fetch_xor
1917 static __always_inline s64
1918 atomic64_fetch_xor(s64 i, atomic64_t *v)
1921 __atomic_pre_full_fence();
1922 ret = atomic64_fetch_xor_relaxed(i, v);
1923 __atomic_post_full_fence();
1926 #define atomic64_fetch_xor atomic64_fetch_xor
1929 #endif /* atomic64_fetch_xor_relaxed */
1931 #ifndef atomic64_xchg_relaxed
1932 #define atomic64_xchg_acquire atomic64_xchg
1933 #define atomic64_xchg_release atomic64_xchg
1934 #define atomic64_xchg_relaxed atomic64_xchg
1935 #else /* atomic64_xchg_relaxed */
1937 #ifndef atomic64_xchg_acquire
1938 static __always_inline s64
1939 atomic64_xchg_acquire(atomic64_t *v, s64 i)
1941 s64 ret = atomic64_xchg_relaxed(v, i);
1942 __atomic_acquire_fence();
1945 #define atomic64_xchg_acquire atomic64_xchg_acquire
1948 #ifndef atomic64_xchg_release
1949 static __always_inline s64
1950 atomic64_xchg_release(atomic64_t *v, s64 i)
1952 __atomic_release_fence();
1953 return atomic64_xchg_relaxed(v, i);
1955 #define atomic64_xchg_release atomic64_xchg_release
1958 #ifndef atomic64_xchg
1959 static __always_inline s64
1960 atomic64_xchg(atomic64_t *v, s64 i)
1963 __atomic_pre_full_fence();
1964 ret = atomic64_xchg_relaxed(v, i);
1965 __atomic_post_full_fence();
1968 #define atomic64_xchg atomic64_xchg
1971 #endif /* atomic64_xchg_relaxed */
1973 #ifndef atomic64_cmpxchg_relaxed
1974 #define atomic64_cmpxchg_acquire atomic64_cmpxchg
1975 #define atomic64_cmpxchg_release atomic64_cmpxchg
1976 #define atomic64_cmpxchg_relaxed atomic64_cmpxchg
1977 #else /* atomic64_cmpxchg_relaxed */
1979 #ifndef atomic64_cmpxchg_acquire
1980 static __always_inline s64
1981 atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
1983 s64 ret = atomic64_cmpxchg_relaxed(v, old, new);
1984 __atomic_acquire_fence();
1987 #define atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire
1990 #ifndef atomic64_cmpxchg_release
1991 static __always_inline s64
1992 atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
1994 __atomic_release_fence();
1995 return atomic64_cmpxchg_relaxed(v, old, new);
1997 #define atomic64_cmpxchg_release atomic64_cmpxchg_release
2000 #ifndef atomic64_cmpxchg
2001 static __always_inline s64
2002 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
2005 __atomic_pre_full_fence();
2006 ret = atomic64_cmpxchg_relaxed(v, old, new);
2007 __atomic_post_full_fence();
2010 #define atomic64_cmpxchg atomic64_cmpxchg
2013 #endif /* atomic64_cmpxchg_relaxed */
2015 #ifndef atomic64_try_cmpxchg_relaxed
2016 #ifdef atomic64_try_cmpxchg
2017 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg
2018 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
2019 #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg
2020 #endif /* atomic64_try_cmpxchg */
2022 #ifndef atomic64_try_cmpxchg
2023 static __always_inline bool
2024 atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2027 r = atomic64_cmpxchg(v, o, new);
2028 if (unlikely(r != o))
2030 return likely(r == o);
2032 #define atomic64_try_cmpxchg atomic64_try_cmpxchg
2035 #ifndef atomic64_try_cmpxchg_acquire
2036 static __always_inline bool
2037 atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2040 r = atomic64_cmpxchg_acquire(v, o, new);
2041 if (unlikely(r != o))
2043 return likely(r == o);
2045 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
2048 #ifndef atomic64_try_cmpxchg_release
2049 static __always_inline bool
2050 atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2053 r = atomic64_cmpxchg_release(v, o, new);
2054 if (unlikely(r != o))
2056 return likely(r == o);
2058 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
2061 #ifndef atomic64_try_cmpxchg_relaxed
2062 static __always_inline bool
2063 atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
2066 r = atomic64_cmpxchg_relaxed(v, o, new);
2067 if (unlikely(r != o))
2069 return likely(r == o);
2071 #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed
2074 #else /* atomic64_try_cmpxchg_relaxed */
2076 #ifndef atomic64_try_cmpxchg_acquire
2077 static __always_inline bool
2078 atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2080 bool ret = atomic64_try_cmpxchg_relaxed(v, old, new);
2081 __atomic_acquire_fence();
2084 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
2087 #ifndef atomic64_try_cmpxchg_release
2088 static __always_inline bool
2089 atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2091 __atomic_release_fence();
2092 return atomic64_try_cmpxchg_relaxed(v, old, new);
2094 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
2097 #ifndef atomic64_try_cmpxchg
2098 static __always_inline bool
2099 atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2102 __atomic_pre_full_fence();
2103 ret = atomic64_try_cmpxchg_relaxed(v, old, new);
2104 __atomic_post_full_fence();
2107 #define atomic64_try_cmpxchg atomic64_try_cmpxchg
2110 #endif /* atomic64_try_cmpxchg_relaxed */
2112 #ifndef atomic64_sub_and_test
2114 * atomic64_sub_and_test - subtract value from variable and test result
2115 * @i: integer value to subtract
2116 * @v: pointer of type atomic64_t
2118 * Atomically subtracts @i from @v and returns
2119 * true if the result is zero, or false for all
2122 static __always_inline bool
2123 atomic64_sub_and_test(s64 i, atomic64_t *v)
2125 return atomic64_sub_return(i, v) == 0;
2127 #define atomic64_sub_and_test atomic64_sub_and_test
2130 #ifndef atomic64_dec_and_test
2132 * atomic64_dec_and_test - decrement and test
2133 * @v: pointer of type atomic64_t
2135 * Atomically decrements @v by 1 and
2136 * returns true if the result is 0, or false for all other
2139 static __always_inline bool
2140 atomic64_dec_and_test(atomic64_t *v)
2142 return atomic64_dec_return(v) == 0;
2144 #define atomic64_dec_and_test atomic64_dec_and_test
2147 #ifndef atomic64_inc_and_test
2149 * atomic64_inc_and_test - increment and test
2150 * @v: pointer of type atomic64_t
2152 * Atomically increments @v by 1
2153 * and returns true if the result is zero, or false for all
2156 static __always_inline bool
2157 atomic64_inc_and_test(atomic64_t *v)
2159 return atomic64_inc_return(v) == 0;
2161 #define atomic64_inc_and_test atomic64_inc_and_test
2164 #ifndef atomic64_add_negative
2166 * atomic64_add_negative - add and test if negative
2167 * @i: integer value to add
2168 * @v: pointer of type atomic64_t
2170 * Atomically adds @i to @v and returns true
2171 * if the result is negative, or false when
2172 * result is greater than or equal to zero.
2174 static __always_inline bool
2175 atomic64_add_negative(s64 i, atomic64_t *v)
2177 return atomic64_add_return(i, v) < 0;
2179 #define atomic64_add_negative atomic64_add_negative
2182 #ifndef atomic64_fetch_add_unless
2184 * atomic64_fetch_add_unless - add unless the number is already a given value
2185 * @v: pointer of type atomic64_t
2186 * @a: the amount to add to v...
2187 * @u: ...unless v is equal to u.
2189 * Atomically adds @a to @v, so long as @v was not already @u.
2190 * Returns original value of @v
2192 static __always_inline s64
2193 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
2195 s64 c = atomic64_read(v);
2198 if (unlikely(c == u))
2200 } while (!atomic64_try_cmpxchg(v, &c, c + a));
2204 #define atomic64_fetch_add_unless atomic64_fetch_add_unless
2207 #ifndef atomic64_add_unless
2209 * atomic64_add_unless - add unless the number is already a given value
2210 * @v: pointer of type atomic64_t
2211 * @a: the amount to add to v...
2212 * @u: ...unless v is equal to u.
2214 * Atomically adds @a to @v, if @v was not already @u.
2215 * Returns true if the addition was done.
2217 static __always_inline bool
2218 atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
2220 return atomic64_fetch_add_unless(v, a, u) != u;
2222 #define atomic64_add_unless atomic64_add_unless
2225 #ifndef atomic64_inc_not_zero
2227 * atomic64_inc_not_zero - increment unless the number is zero
2228 * @v: pointer of type atomic64_t
2230 * Atomically increments @v by 1, if @v is non-zero.
2231 * Returns true if the increment was done.
2233 static __always_inline bool
2234 atomic64_inc_not_zero(atomic64_t *v)
2236 return atomic64_add_unless(v, 1, 0);
2238 #define atomic64_inc_not_zero atomic64_inc_not_zero
2241 #ifndef atomic64_inc_unless_negative
2242 static __always_inline bool
2243 atomic64_inc_unless_negative(atomic64_t *v)
2245 s64 c = atomic64_read(v);
2248 if (unlikely(c < 0))
2250 } while (!atomic64_try_cmpxchg(v, &c, c + 1));
2254 #define atomic64_inc_unless_negative atomic64_inc_unless_negative
2257 #ifndef atomic64_dec_unless_positive
2258 static __always_inline bool
2259 atomic64_dec_unless_positive(atomic64_t *v)
2261 s64 c = atomic64_read(v);
2264 if (unlikely(c > 0))
2266 } while (!atomic64_try_cmpxchg(v, &c, c - 1));
2270 #define atomic64_dec_unless_positive atomic64_dec_unless_positive
2273 #ifndef atomic64_dec_if_positive
2274 static __always_inline s64
2275 atomic64_dec_if_positive(atomic64_t *v)
2277 s64 dec, c = atomic64_read(v);
2281 if (unlikely(dec < 0))
2283 } while (!atomic64_try_cmpxchg(v, &c, dec));
2287 #define atomic64_dec_if_positive atomic64_dec_if_positive
2290 #endif /* _LINUX_ATOMIC_FALLBACK_H */
2291 // 1fac0941c79bf0ae100723cc2ac9b94061f0b67a