1 // SPDX-License-Identifier: GPL-2.0
3 // Generated by scripts/atomic/gen-atomic-fallback.sh
4 // DO NOT MODIFY THIS FILE DIRECTLY
6 #ifndef _LINUX_ATOMIC_FALLBACK_H
7 #define _LINUX_ATOMIC_FALLBACK_H
9 #include <linux/compiler.h>
12 #define xchg_relaxed xchg
13 #define xchg_acquire xchg
14 #define xchg_release xchg
15 #else /* xchg_relaxed */
18 #define xchg_acquire(...) \
19 __atomic_op_acquire(xchg, __VA_ARGS__)
23 #define xchg_release(...) \
24 __atomic_op_release(xchg, __VA_ARGS__)
29 __atomic_op_fence(xchg, __VA_ARGS__)
32 #endif /* xchg_relaxed */
34 #ifndef cmpxchg_relaxed
35 #define cmpxchg_relaxed cmpxchg
36 #define cmpxchg_acquire cmpxchg
37 #define cmpxchg_release cmpxchg
38 #else /* cmpxchg_relaxed */
40 #ifndef cmpxchg_acquire
41 #define cmpxchg_acquire(...) \
42 __atomic_op_acquire(cmpxchg, __VA_ARGS__)
45 #ifndef cmpxchg_release
46 #define cmpxchg_release(...) \
47 __atomic_op_release(cmpxchg, __VA_ARGS__)
51 #define cmpxchg(...) \
52 __atomic_op_fence(cmpxchg, __VA_ARGS__)
55 #endif /* cmpxchg_relaxed */
57 #ifndef cmpxchg64_relaxed
58 #define cmpxchg64_relaxed cmpxchg64
59 #define cmpxchg64_acquire cmpxchg64
60 #define cmpxchg64_release cmpxchg64
61 #else /* cmpxchg64_relaxed */
63 #ifndef cmpxchg64_acquire
64 #define cmpxchg64_acquire(...) \
65 __atomic_op_acquire(cmpxchg64, __VA_ARGS__)
68 #ifndef cmpxchg64_release
69 #define cmpxchg64_release(...) \
70 __atomic_op_release(cmpxchg64, __VA_ARGS__)
74 #define cmpxchg64(...) \
75 __atomic_op_fence(cmpxchg64, __VA_ARGS__)
78 #endif /* cmpxchg64_relaxed */
80 #define arch_atomic_read atomic_read
81 #define arch_atomic_read_acquire atomic_read_acquire
83 #ifndef atomic_read_acquire
84 static __always_inline int
85 atomic_read_acquire(const atomic_t *v)
87 return smp_load_acquire(&(v)->counter);
89 #define atomic_read_acquire atomic_read_acquire
92 #define arch_atomic_set atomic_set
93 #define arch_atomic_set_release atomic_set_release
95 #ifndef atomic_set_release
96 static __always_inline void
97 atomic_set_release(atomic_t *v, int i)
99 smp_store_release(&(v)->counter, i);
101 #define atomic_set_release atomic_set_release
104 #define arch_atomic_add atomic_add
106 #define arch_atomic_add_return atomic_add_return
107 #define arch_atomic_add_return_acquire atomic_add_return_acquire
108 #define arch_atomic_add_return_release atomic_add_return_release
109 #define arch_atomic_add_return_relaxed atomic_add_return_relaxed
111 #ifndef atomic_add_return_relaxed
112 #define atomic_add_return_acquire atomic_add_return
113 #define atomic_add_return_release atomic_add_return
114 #define atomic_add_return_relaxed atomic_add_return
115 #else /* atomic_add_return_relaxed */
117 #ifndef atomic_add_return_acquire
118 static __always_inline int
119 atomic_add_return_acquire(int i, atomic_t *v)
121 int ret = atomic_add_return_relaxed(i, v);
122 __atomic_acquire_fence();
125 #define atomic_add_return_acquire atomic_add_return_acquire
128 #ifndef atomic_add_return_release
129 static __always_inline int
130 atomic_add_return_release(int i, atomic_t *v)
132 __atomic_release_fence();
133 return atomic_add_return_relaxed(i, v);
135 #define atomic_add_return_release atomic_add_return_release
138 #ifndef atomic_add_return
139 static __always_inline int
140 atomic_add_return(int i, atomic_t *v)
143 __atomic_pre_full_fence();
144 ret = atomic_add_return_relaxed(i, v);
145 __atomic_post_full_fence();
148 #define atomic_add_return atomic_add_return
151 #endif /* atomic_add_return_relaxed */
153 #define arch_atomic_fetch_add atomic_fetch_add
154 #define arch_atomic_fetch_add_acquire atomic_fetch_add_acquire
155 #define arch_atomic_fetch_add_release atomic_fetch_add_release
156 #define arch_atomic_fetch_add_relaxed atomic_fetch_add_relaxed
158 #ifndef atomic_fetch_add_relaxed
159 #define atomic_fetch_add_acquire atomic_fetch_add
160 #define atomic_fetch_add_release atomic_fetch_add
161 #define atomic_fetch_add_relaxed atomic_fetch_add
162 #else /* atomic_fetch_add_relaxed */
164 #ifndef atomic_fetch_add_acquire
165 static __always_inline int
166 atomic_fetch_add_acquire(int i, atomic_t *v)
168 int ret = atomic_fetch_add_relaxed(i, v);
169 __atomic_acquire_fence();
172 #define atomic_fetch_add_acquire atomic_fetch_add_acquire
175 #ifndef atomic_fetch_add_release
176 static __always_inline int
177 atomic_fetch_add_release(int i, atomic_t *v)
179 __atomic_release_fence();
180 return atomic_fetch_add_relaxed(i, v);
182 #define atomic_fetch_add_release atomic_fetch_add_release
185 #ifndef atomic_fetch_add
186 static __always_inline int
187 atomic_fetch_add(int i, atomic_t *v)
190 __atomic_pre_full_fence();
191 ret = atomic_fetch_add_relaxed(i, v);
192 __atomic_post_full_fence();
195 #define atomic_fetch_add atomic_fetch_add
198 #endif /* atomic_fetch_add_relaxed */
200 #define arch_atomic_sub atomic_sub
202 #define arch_atomic_sub_return atomic_sub_return
203 #define arch_atomic_sub_return_acquire atomic_sub_return_acquire
204 #define arch_atomic_sub_return_release atomic_sub_return_release
205 #define arch_atomic_sub_return_relaxed atomic_sub_return_relaxed
207 #ifndef atomic_sub_return_relaxed
208 #define atomic_sub_return_acquire atomic_sub_return
209 #define atomic_sub_return_release atomic_sub_return
210 #define atomic_sub_return_relaxed atomic_sub_return
211 #else /* atomic_sub_return_relaxed */
213 #ifndef atomic_sub_return_acquire
214 static __always_inline int
215 atomic_sub_return_acquire(int i, atomic_t *v)
217 int ret = atomic_sub_return_relaxed(i, v);
218 __atomic_acquire_fence();
221 #define atomic_sub_return_acquire atomic_sub_return_acquire
224 #ifndef atomic_sub_return_release
225 static __always_inline int
226 atomic_sub_return_release(int i, atomic_t *v)
228 __atomic_release_fence();
229 return atomic_sub_return_relaxed(i, v);
231 #define atomic_sub_return_release atomic_sub_return_release
234 #ifndef atomic_sub_return
235 static __always_inline int
236 atomic_sub_return(int i, atomic_t *v)
239 __atomic_pre_full_fence();
240 ret = atomic_sub_return_relaxed(i, v);
241 __atomic_post_full_fence();
244 #define atomic_sub_return atomic_sub_return
247 #endif /* atomic_sub_return_relaxed */
249 #define arch_atomic_fetch_sub atomic_fetch_sub
250 #define arch_atomic_fetch_sub_acquire atomic_fetch_sub_acquire
251 #define arch_atomic_fetch_sub_release atomic_fetch_sub_release
252 #define arch_atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed
254 #ifndef atomic_fetch_sub_relaxed
255 #define atomic_fetch_sub_acquire atomic_fetch_sub
256 #define atomic_fetch_sub_release atomic_fetch_sub
257 #define atomic_fetch_sub_relaxed atomic_fetch_sub
258 #else /* atomic_fetch_sub_relaxed */
260 #ifndef atomic_fetch_sub_acquire
261 static __always_inline int
262 atomic_fetch_sub_acquire(int i, atomic_t *v)
264 int ret = atomic_fetch_sub_relaxed(i, v);
265 __atomic_acquire_fence();
268 #define atomic_fetch_sub_acquire atomic_fetch_sub_acquire
271 #ifndef atomic_fetch_sub_release
272 static __always_inline int
273 atomic_fetch_sub_release(int i, atomic_t *v)
275 __atomic_release_fence();
276 return atomic_fetch_sub_relaxed(i, v);
278 #define atomic_fetch_sub_release atomic_fetch_sub_release
281 #ifndef atomic_fetch_sub
282 static __always_inline int
283 atomic_fetch_sub(int i, atomic_t *v)
286 __atomic_pre_full_fence();
287 ret = atomic_fetch_sub_relaxed(i, v);
288 __atomic_post_full_fence();
291 #define atomic_fetch_sub atomic_fetch_sub
294 #endif /* atomic_fetch_sub_relaxed */
296 #define arch_atomic_inc atomic_inc
299 static __always_inline void
300 atomic_inc(atomic_t *v)
304 #define atomic_inc atomic_inc
307 #define arch_atomic_inc_return atomic_inc_return
308 #define arch_atomic_inc_return_acquire atomic_inc_return_acquire
309 #define arch_atomic_inc_return_release atomic_inc_return_release
310 #define arch_atomic_inc_return_relaxed atomic_inc_return_relaxed
312 #ifndef atomic_inc_return_relaxed
313 #ifdef atomic_inc_return
314 #define atomic_inc_return_acquire atomic_inc_return
315 #define atomic_inc_return_release atomic_inc_return
316 #define atomic_inc_return_relaxed atomic_inc_return
317 #endif /* atomic_inc_return */
319 #ifndef atomic_inc_return
320 static __always_inline int
321 atomic_inc_return(atomic_t *v)
323 return atomic_add_return(1, v);
325 #define atomic_inc_return atomic_inc_return
328 #ifndef atomic_inc_return_acquire
329 static __always_inline int
330 atomic_inc_return_acquire(atomic_t *v)
332 return atomic_add_return_acquire(1, v);
334 #define atomic_inc_return_acquire atomic_inc_return_acquire
337 #ifndef atomic_inc_return_release
338 static __always_inline int
339 atomic_inc_return_release(atomic_t *v)
341 return atomic_add_return_release(1, v);
343 #define atomic_inc_return_release atomic_inc_return_release
346 #ifndef atomic_inc_return_relaxed
347 static __always_inline int
348 atomic_inc_return_relaxed(atomic_t *v)
350 return atomic_add_return_relaxed(1, v);
352 #define atomic_inc_return_relaxed atomic_inc_return_relaxed
355 #else /* atomic_inc_return_relaxed */
357 #ifndef atomic_inc_return_acquire
358 static __always_inline int
359 atomic_inc_return_acquire(atomic_t *v)
361 int ret = atomic_inc_return_relaxed(v);
362 __atomic_acquire_fence();
365 #define atomic_inc_return_acquire atomic_inc_return_acquire
368 #ifndef atomic_inc_return_release
369 static __always_inline int
370 atomic_inc_return_release(atomic_t *v)
372 __atomic_release_fence();
373 return atomic_inc_return_relaxed(v);
375 #define atomic_inc_return_release atomic_inc_return_release
378 #ifndef atomic_inc_return
379 static __always_inline int
380 atomic_inc_return(atomic_t *v)
383 __atomic_pre_full_fence();
384 ret = atomic_inc_return_relaxed(v);
385 __atomic_post_full_fence();
388 #define atomic_inc_return atomic_inc_return
391 #endif /* atomic_inc_return_relaxed */
393 #define arch_atomic_fetch_inc atomic_fetch_inc
394 #define arch_atomic_fetch_inc_acquire atomic_fetch_inc_acquire
395 #define arch_atomic_fetch_inc_release atomic_fetch_inc_release
396 #define arch_atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
398 #ifndef atomic_fetch_inc_relaxed
399 #ifdef atomic_fetch_inc
400 #define atomic_fetch_inc_acquire atomic_fetch_inc
401 #define atomic_fetch_inc_release atomic_fetch_inc
402 #define atomic_fetch_inc_relaxed atomic_fetch_inc
403 #endif /* atomic_fetch_inc */
405 #ifndef atomic_fetch_inc
406 static __always_inline int
407 atomic_fetch_inc(atomic_t *v)
409 return atomic_fetch_add(1, v);
411 #define atomic_fetch_inc atomic_fetch_inc
414 #ifndef atomic_fetch_inc_acquire
415 static __always_inline int
416 atomic_fetch_inc_acquire(atomic_t *v)
418 return atomic_fetch_add_acquire(1, v);
420 #define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
423 #ifndef atomic_fetch_inc_release
424 static __always_inline int
425 atomic_fetch_inc_release(atomic_t *v)
427 return atomic_fetch_add_release(1, v);
429 #define atomic_fetch_inc_release atomic_fetch_inc_release
432 #ifndef atomic_fetch_inc_relaxed
433 static __always_inline int
434 atomic_fetch_inc_relaxed(atomic_t *v)
436 return atomic_fetch_add_relaxed(1, v);
438 #define atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
441 #else /* atomic_fetch_inc_relaxed */
443 #ifndef atomic_fetch_inc_acquire
444 static __always_inline int
445 atomic_fetch_inc_acquire(atomic_t *v)
447 int ret = atomic_fetch_inc_relaxed(v);
448 __atomic_acquire_fence();
451 #define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
454 #ifndef atomic_fetch_inc_release
455 static __always_inline int
456 atomic_fetch_inc_release(atomic_t *v)
458 __atomic_release_fence();
459 return atomic_fetch_inc_relaxed(v);
461 #define atomic_fetch_inc_release atomic_fetch_inc_release
464 #ifndef atomic_fetch_inc
465 static __always_inline int
466 atomic_fetch_inc(atomic_t *v)
469 __atomic_pre_full_fence();
470 ret = atomic_fetch_inc_relaxed(v);
471 __atomic_post_full_fence();
474 #define atomic_fetch_inc atomic_fetch_inc
477 #endif /* atomic_fetch_inc_relaxed */
479 #define arch_atomic_dec atomic_dec
482 static __always_inline void
483 atomic_dec(atomic_t *v)
487 #define atomic_dec atomic_dec
490 #define arch_atomic_dec_return atomic_dec_return
491 #define arch_atomic_dec_return_acquire atomic_dec_return_acquire
492 #define arch_atomic_dec_return_release atomic_dec_return_release
493 #define arch_atomic_dec_return_relaxed atomic_dec_return_relaxed
495 #ifndef atomic_dec_return_relaxed
496 #ifdef atomic_dec_return
497 #define atomic_dec_return_acquire atomic_dec_return
498 #define atomic_dec_return_release atomic_dec_return
499 #define atomic_dec_return_relaxed atomic_dec_return
500 #endif /* atomic_dec_return */
502 #ifndef atomic_dec_return
503 static __always_inline int
504 atomic_dec_return(atomic_t *v)
506 return atomic_sub_return(1, v);
508 #define atomic_dec_return atomic_dec_return
511 #ifndef atomic_dec_return_acquire
512 static __always_inline int
513 atomic_dec_return_acquire(atomic_t *v)
515 return atomic_sub_return_acquire(1, v);
517 #define atomic_dec_return_acquire atomic_dec_return_acquire
520 #ifndef atomic_dec_return_release
521 static __always_inline int
522 atomic_dec_return_release(atomic_t *v)
524 return atomic_sub_return_release(1, v);
526 #define atomic_dec_return_release atomic_dec_return_release
529 #ifndef atomic_dec_return_relaxed
530 static __always_inline int
531 atomic_dec_return_relaxed(atomic_t *v)
533 return atomic_sub_return_relaxed(1, v);
535 #define atomic_dec_return_relaxed atomic_dec_return_relaxed
538 #else /* atomic_dec_return_relaxed */
540 #ifndef atomic_dec_return_acquire
541 static __always_inline int
542 atomic_dec_return_acquire(atomic_t *v)
544 int ret = atomic_dec_return_relaxed(v);
545 __atomic_acquire_fence();
548 #define atomic_dec_return_acquire atomic_dec_return_acquire
551 #ifndef atomic_dec_return_release
552 static __always_inline int
553 atomic_dec_return_release(atomic_t *v)
555 __atomic_release_fence();
556 return atomic_dec_return_relaxed(v);
558 #define atomic_dec_return_release atomic_dec_return_release
561 #ifndef atomic_dec_return
562 static __always_inline int
563 atomic_dec_return(atomic_t *v)
566 __atomic_pre_full_fence();
567 ret = atomic_dec_return_relaxed(v);
568 __atomic_post_full_fence();
571 #define atomic_dec_return atomic_dec_return
574 #endif /* atomic_dec_return_relaxed */
576 #define arch_atomic_fetch_dec atomic_fetch_dec
577 #define arch_atomic_fetch_dec_acquire atomic_fetch_dec_acquire
578 #define arch_atomic_fetch_dec_release atomic_fetch_dec_release
579 #define arch_atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
581 #ifndef atomic_fetch_dec_relaxed
582 #ifdef atomic_fetch_dec
583 #define atomic_fetch_dec_acquire atomic_fetch_dec
584 #define atomic_fetch_dec_release atomic_fetch_dec
585 #define atomic_fetch_dec_relaxed atomic_fetch_dec
586 #endif /* atomic_fetch_dec */
588 #ifndef atomic_fetch_dec
589 static __always_inline int
590 atomic_fetch_dec(atomic_t *v)
592 return atomic_fetch_sub(1, v);
594 #define atomic_fetch_dec atomic_fetch_dec
597 #ifndef atomic_fetch_dec_acquire
598 static __always_inline int
599 atomic_fetch_dec_acquire(atomic_t *v)
601 return atomic_fetch_sub_acquire(1, v);
603 #define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
606 #ifndef atomic_fetch_dec_release
607 static __always_inline int
608 atomic_fetch_dec_release(atomic_t *v)
610 return atomic_fetch_sub_release(1, v);
612 #define atomic_fetch_dec_release atomic_fetch_dec_release
615 #ifndef atomic_fetch_dec_relaxed
616 static __always_inline int
617 atomic_fetch_dec_relaxed(atomic_t *v)
619 return atomic_fetch_sub_relaxed(1, v);
621 #define atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
624 #else /* atomic_fetch_dec_relaxed */
626 #ifndef atomic_fetch_dec_acquire
627 static __always_inline int
628 atomic_fetch_dec_acquire(atomic_t *v)
630 int ret = atomic_fetch_dec_relaxed(v);
631 __atomic_acquire_fence();
634 #define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
637 #ifndef atomic_fetch_dec_release
638 static __always_inline int
639 atomic_fetch_dec_release(atomic_t *v)
641 __atomic_release_fence();
642 return atomic_fetch_dec_relaxed(v);
644 #define atomic_fetch_dec_release atomic_fetch_dec_release
647 #ifndef atomic_fetch_dec
648 static __always_inline int
649 atomic_fetch_dec(atomic_t *v)
652 __atomic_pre_full_fence();
653 ret = atomic_fetch_dec_relaxed(v);
654 __atomic_post_full_fence();
657 #define atomic_fetch_dec atomic_fetch_dec
660 #endif /* atomic_fetch_dec_relaxed */
662 #define arch_atomic_and atomic_and
664 #define arch_atomic_fetch_and atomic_fetch_and
665 #define arch_atomic_fetch_and_acquire atomic_fetch_and_acquire
666 #define arch_atomic_fetch_and_release atomic_fetch_and_release
667 #define arch_atomic_fetch_and_relaxed atomic_fetch_and_relaxed
669 #ifndef atomic_fetch_and_relaxed
670 #define atomic_fetch_and_acquire atomic_fetch_and
671 #define atomic_fetch_and_release atomic_fetch_and
672 #define atomic_fetch_and_relaxed atomic_fetch_and
673 #else /* atomic_fetch_and_relaxed */
675 #ifndef atomic_fetch_and_acquire
676 static __always_inline int
677 atomic_fetch_and_acquire(int i, atomic_t *v)
679 int ret = atomic_fetch_and_relaxed(i, v);
680 __atomic_acquire_fence();
683 #define atomic_fetch_and_acquire atomic_fetch_and_acquire
686 #ifndef atomic_fetch_and_release
687 static __always_inline int
688 atomic_fetch_and_release(int i, atomic_t *v)
690 __atomic_release_fence();
691 return atomic_fetch_and_relaxed(i, v);
693 #define atomic_fetch_and_release atomic_fetch_and_release
696 #ifndef atomic_fetch_and
697 static __always_inline int
698 atomic_fetch_and(int i, atomic_t *v)
701 __atomic_pre_full_fence();
702 ret = atomic_fetch_and_relaxed(i, v);
703 __atomic_post_full_fence();
706 #define atomic_fetch_and atomic_fetch_and
709 #endif /* atomic_fetch_and_relaxed */
711 #define arch_atomic_andnot atomic_andnot
713 #ifndef atomic_andnot
714 static __always_inline void
715 atomic_andnot(int i, atomic_t *v)
719 #define atomic_andnot atomic_andnot
722 #define arch_atomic_fetch_andnot atomic_fetch_andnot
723 #define arch_atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
724 #define arch_atomic_fetch_andnot_release atomic_fetch_andnot_release
725 #define arch_atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
727 #ifndef atomic_fetch_andnot_relaxed
728 #ifdef atomic_fetch_andnot
729 #define atomic_fetch_andnot_acquire atomic_fetch_andnot
730 #define atomic_fetch_andnot_release atomic_fetch_andnot
731 #define atomic_fetch_andnot_relaxed atomic_fetch_andnot
732 #endif /* atomic_fetch_andnot */
734 #ifndef atomic_fetch_andnot
735 static __always_inline int
736 atomic_fetch_andnot(int i, atomic_t *v)
738 return atomic_fetch_and(~i, v);
740 #define atomic_fetch_andnot atomic_fetch_andnot
743 #ifndef atomic_fetch_andnot_acquire
744 static __always_inline int
745 atomic_fetch_andnot_acquire(int i, atomic_t *v)
747 return atomic_fetch_and_acquire(~i, v);
749 #define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
752 #ifndef atomic_fetch_andnot_release
753 static __always_inline int
754 atomic_fetch_andnot_release(int i, atomic_t *v)
756 return atomic_fetch_and_release(~i, v);
758 #define atomic_fetch_andnot_release atomic_fetch_andnot_release
761 #ifndef atomic_fetch_andnot_relaxed
762 static __always_inline int
763 atomic_fetch_andnot_relaxed(int i, atomic_t *v)
765 return atomic_fetch_and_relaxed(~i, v);
767 #define atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
770 #else /* atomic_fetch_andnot_relaxed */
772 #ifndef atomic_fetch_andnot_acquire
773 static __always_inline int
774 atomic_fetch_andnot_acquire(int i, atomic_t *v)
776 int ret = atomic_fetch_andnot_relaxed(i, v);
777 __atomic_acquire_fence();
780 #define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
783 #ifndef atomic_fetch_andnot_release
784 static __always_inline int
785 atomic_fetch_andnot_release(int i, atomic_t *v)
787 __atomic_release_fence();
788 return atomic_fetch_andnot_relaxed(i, v);
790 #define atomic_fetch_andnot_release atomic_fetch_andnot_release
793 #ifndef atomic_fetch_andnot
794 static __always_inline int
795 atomic_fetch_andnot(int i, atomic_t *v)
798 __atomic_pre_full_fence();
799 ret = atomic_fetch_andnot_relaxed(i, v);
800 __atomic_post_full_fence();
803 #define atomic_fetch_andnot atomic_fetch_andnot
806 #endif /* atomic_fetch_andnot_relaxed */
808 #define arch_atomic_or atomic_or
810 #define arch_atomic_fetch_or atomic_fetch_or
811 #define arch_atomic_fetch_or_acquire atomic_fetch_or_acquire
812 #define arch_atomic_fetch_or_release atomic_fetch_or_release
813 #define arch_atomic_fetch_or_relaxed atomic_fetch_or_relaxed
815 #ifndef atomic_fetch_or_relaxed
816 #define atomic_fetch_or_acquire atomic_fetch_or
817 #define atomic_fetch_or_release atomic_fetch_or
818 #define atomic_fetch_or_relaxed atomic_fetch_or
819 #else /* atomic_fetch_or_relaxed */
821 #ifndef atomic_fetch_or_acquire
822 static __always_inline int
823 atomic_fetch_or_acquire(int i, atomic_t *v)
825 int ret = atomic_fetch_or_relaxed(i, v);
826 __atomic_acquire_fence();
829 #define atomic_fetch_or_acquire atomic_fetch_or_acquire
832 #ifndef atomic_fetch_or_release
833 static __always_inline int
834 atomic_fetch_or_release(int i, atomic_t *v)
836 __atomic_release_fence();
837 return atomic_fetch_or_relaxed(i, v);
839 #define atomic_fetch_or_release atomic_fetch_or_release
842 #ifndef atomic_fetch_or
843 static __always_inline int
844 atomic_fetch_or(int i, atomic_t *v)
847 __atomic_pre_full_fence();
848 ret = atomic_fetch_or_relaxed(i, v);
849 __atomic_post_full_fence();
852 #define atomic_fetch_or atomic_fetch_or
855 #endif /* atomic_fetch_or_relaxed */
857 #define arch_atomic_xor atomic_xor
859 #define arch_atomic_fetch_xor atomic_fetch_xor
860 #define arch_atomic_fetch_xor_acquire atomic_fetch_xor_acquire
861 #define arch_atomic_fetch_xor_release atomic_fetch_xor_release
862 #define arch_atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed
864 #ifndef atomic_fetch_xor_relaxed
865 #define atomic_fetch_xor_acquire atomic_fetch_xor
866 #define atomic_fetch_xor_release atomic_fetch_xor
867 #define atomic_fetch_xor_relaxed atomic_fetch_xor
868 #else /* atomic_fetch_xor_relaxed */
870 #ifndef atomic_fetch_xor_acquire
871 static __always_inline int
872 atomic_fetch_xor_acquire(int i, atomic_t *v)
874 int ret = atomic_fetch_xor_relaxed(i, v);
875 __atomic_acquire_fence();
878 #define atomic_fetch_xor_acquire atomic_fetch_xor_acquire
881 #ifndef atomic_fetch_xor_release
882 static __always_inline int
883 atomic_fetch_xor_release(int i, atomic_t *v)
885 __atomic_release_fence();
886 return atomic_fetch_xor_relaxed(i, v);
888 #define atomic_fetch_xor_release atomic_fetch_xor_release
891 #ifndef atomic_fetch_xor
892 static __always_inline int
893 atomic_fetch_xor(int i, atomic_t *v)
896 __atomic_pre_full_fence();
897 ret = atomic_fetch_xor_relaxed(i, v);
898 __atomic_post_full_fence();
901 #define atomic_fetch_xor atomic_fetch_xor
904 #endif /* atomic_fetch_xor_relaxed */
906 #define arch_atomic_xchg atomic_xchg
907 #define arch_atomic_xchg_acquire atomic_xchg_acquire
908 #define arch_atomic_xchg_release atomic_xchg_release
909 #define arch_atomic_xchg_relaxed atomic_xchg_relaxed
911 #ifndef atomic_xchg_relaxed
912 #define atomic_xchg_acquire atomic_xchg
913 #define atomic_xchg_release atomic_xchg
914 #define atomic_xchg_relaxed atomic_xchg
915 #else /* atomic_xchg_relaxed */
917 #ifndef atomic_xchg_acquire
918 static __always_inline int
919 atomic_xchg_acquire(atomic_t *v, int i)
921 int ret = atomic_xchg_relaxed(v, i);
922 __atomic_acquire_fence();
925 #define atomic_xchg_acquire atomic_xchg_acquire
928 #ifndef atomic_xchg_release
929 static __always_inline int
930 atomic_xchg_release(atomic_t *v, int i)
932 __atomic_release_fence();
933 return atomic_xchg_relaxed(v, i);
935 #define atomic_xchg_release atomic_xchg_release
939 static __always_inline int
940 atomic_xchg(atomic_t *v, int i)
943 __atomic_pre_full_fence();
944 ret = atomic_xchg_relaxed(v, i);
945 __atomic_post_full_fence();
948 #define atomic_xchg atomic_xchg
951 #endif /* atomic_xchg_relaxed */
953 #define arch_atomic_cmpxchg atomic_cmpxchg
954 #define arch_atomic_cmpxchg_acquire atomic_cmpxchg_acquire
955 #define arch_atomic_cmpxchg_release atomic_cmpxchg_release
956 #define arch_atomic_cmpxchg_relaxed atomic_cmpxchg_relaxed
958 #ifndef atomic_cmpxchg_relaxed
959 #define atomic_cmpxchg_acquire atomic_cmpxchg
960 #define atomic_cmpxchg_release atomic_cmpxchg
961 #define atomic_cmpxchg_relaxed atomic_cmpxchg
962 #else /* atomic_cmpxchg_relaxed */
964 #ifndef atomic_cmpxchg_acquire
965 static __always_inline int
966 atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
968 int ret = atomic_cmpxchg_relaxed(v, old, new);
969 __atomic_acquire_fence();
972 #define atomic_cmpxchg_acquire atomic_cmpxchg_acquire
975 #ifndef atomic_cmpxchg_release
976 static __always_inline int
977 atomic_cmpxchg_release(atomic_t *v, int old, int new)
979 __atomic_release_fence();
980 return atomic_cmpxchg_relaxed(v, old, new);
982 #define atomic_cmpxchg_release atomic_cmpxchg_release
985 #ifndef atomic_cmpxchg
986 static __always_inline int
987 atomic_cmpxchg(atomic_t *v, int old, int new)
990 __atomic_pre_full_fence();
991 ret = atomic_cmpxchg_relaxed(v, old, new);
992 __atomic_post_full_fence();
995 #define atomic_cmpxchg atomic_cmpxchg
998 #endif /* atomic_cmpxchg_relaxed */
1000 #define arch_atomic_try_cmpxchg atomic_try_cmpxchg
1001 #define arch_atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
1002 #define arch_atomic_try_cmpxchg_release atomic_try_cmpxchg_release
1003 #define arch_atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed
1005 #ifndef atomic_try_cmpxchg_relaxed
1006 #ifdef atomic_try_cmpxchg
1007 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg
1008 #define atomic_try_cmpxchg_release atomic_try_cmpxchg
1009 #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg
1010 #endif /* atomic_try_cmpxchg */
1012 #ifndef atomic_try_cmpxchg
1013 static __always_inline bool
1014 atomic_try_cmpxchg(atomic_t *v, int *old, int new)
1017 r = atomic_cmpxchg(v, o, new);
1018 if (unlikely(r != o))
1020 return likely(r == o);
1022 #define atomic_try_cmpxchg atomic_try_cmpxchg
1025 #ifndef atomic_try_cmpxchg_acquire
1026 static __always_inline bool
1027 atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
1030 r = atomic_cmpxchg_acquire(v, o, new);
1031 if (unlikely(r != o))
1033 return likely(r == o);
1035 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
1038 #ifndef atomic_try_cmpxchg_release
1039 static __always_inline bool
1040 atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
1043 r = atomic_cmpxchg_release(v, o, new);
1044 if (unlikely(r != o))
1046 return likely(r == o);
1048 #define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
1051 #ifndef atomic_try_cmpxchg_relaxed
1052 static __always_inline bool
1053 atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
1056 r = atomic_cmpxchg_relaxed(v, o, new);
1057 if (unlikely(r != o))
1059 return likely(r == o);
1061 #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed
1064 #else /* atomic_try_cmpxchg_relaxed */
1066 #ifndef atomic_try_cmpxchg_acquire
1067 static __always_inline bool
1068 atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
1070 bool ret = atomic_try_cmpxchg_relaxed(v, old, new);
1071 __atomic_acquire_fence();
1074 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
1077 #ifndef atomic_try_cmpxchg_release
1078 static __always_inline bool
1079 atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
1081 __atomic_release_fence();
1082 return atomic_try_cmpxchg_relaxed(v, old, new);
1084 #define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
1087 #ifndef atomic_try_cmpxchg
1088 static __always_inline bool
1089 atomic_try_cmpxchg(atomic_t *v, int *old, int new)
1092 __atomic_pre_full_fence();
1093 ret = atomic_try_cmpxchg_relaxed(v, old, new);
1094 __atomic_post_full_fence();
1097 #define atomic_try_cmpxchg atomic_try_cmpxchg
1100 #endif /* atomic_try_cmpxchg_relaxed */
1102 #define arch_atomic_sub_and_test atomic_sub_and_test
1104 #ifndef atomic_sub_and_test
1106 * atomic_sub_and_test - subtract value from variable and test result
1107 * @i: integer value to subtract
1108 * @v: pointer of type atomic_t
1110 * Atomically subtracts @i from @v and returns
1111 * true if the result is zero, or false for all
1114 static __always_inline bool
1115 atomic_sub_and_test(int i, atomic_t *v)
1117 return atomic_sub_return(i, v) == 0;
1119 #define atomic_sub_and_test atomic_sub_and_test
1122 #define arch_atomic_dec_and_test atomic_dec_and_test
1124 #ifndef atomic_dec_and_test
1126 * atomic_dec_and_test - decrement and test
1127 * @v: pointer of type atomic_t
1129 * Atomically decrements @v by 1 and
1130 * returns true if the result is 0, or false for all other
1133 static __always_inline bool
1134 atomic_dec_and_test(atomic_t *v)
1136 return atomic_dec_return(v) == 0;
1138 #define atomic_dec_and_test atomic_dec_and_test
1141 #define arch_atomic_inc_and_test atomic_inc_and_test
1143 #ifndef atomic_inc_and_test
1145 * atomic_inc_and_test - increment and test
1146 * @v: pointer of type atomic_t
1148 * Atomically increments @v by 1
1149 * and returns true if the result is zero, or false for all
1152 static __always_inline bool
1153 atomic_inc_and_test(atomic_t *v)
1155 return atomic_inc_return(v) == 0;
1157 #define atomic_inc_and_test atomic_inc_and_test
1160 #define arch_atomic_add_negative atomic_add_negative
1162 #ifndef atomic_add_negative
1164 * atomic_add_negative - add and test if negative
1165 * @i: integer value to add
1166 * @v: pointer of type atomic_t
1168 * Atomically adds @i to @v and returns true
1169 * if the result is negative, or false when
1170 * result is greater than or equal to zero.
1172 static __always_inline bool
1173 atomic_add_negative(int i, atomic_t *v)
1175 return atomic_add_return(i, v) < 0;
1177 #define atomic_add_negative atomic_add_negative
1180 #define arch_atomic_fetch_add_unless atomic_fetch_add_unless
1182 #ifndef atomic_fetch_add_unless
1184 * atomic_fetch_add_unless - add unless the number is already a given value
1185 * @v: pointer of type atomic_t
1186 * @a: the amount to add to v...
1187 * @u: ...unless v is equal to u.
1189 * Atomically adds @a to @v, so long as @v was not already @u.
1190 * Returns original value of @v
1192 static __always_inline int
1193 atomic_fetch_add_unless(atomic_t *v, int a, int u)
1195 int c = atomic_read(v);
1198 if (unlikely(c == u))
1200 } while (!atomic_try_cmpxchg(v, &c, c + a));
1204 #define atomic_fetch_add_unless atomic_fetch_add_unless
1207 #define arch_atomic_add_unless atomic_add_unless
1209 #ifndef atomic_add_unless
1211 * atomic_add_unless - add unless the number is already a given value
1212 * @v: pointer of type atomic_t
1213 * @a: the amount to add to v...
1214 * @u: ...unless v is equal to u.
1216 * Atomically adds @a to @v, if @v was not already @u.
1217 * Returns true if the addition was done.
1219 static __always_inline bool
1220 atomic_add_unless(atomic_t *v, int a, int u)
1222 return atomic_fetch_add_unless(v, a, u) != u;
1224 #define atomic_add_unless atomic_add_unless
1227 #define arch_atomic_inc_not_zero atomic_inc_not_zero
1229 #ifndef atomic_inc_not_zero
1231 * atomic_inc_not_zero - increment unless the number is zero
1232 * @v: pointer of type atomic_t
1234 * Atomically increments @v by 1, if @v is non-zero.
1235 * Returns true if the increment was done.
1237 static __always_inline bool
1238 atomic_inc_not_zero(atomic_t *v)
1240 return atomic_add_unless(v, 1, 0);
1242 #define atomic_inc_not_zero atomic_inc_not_zero
1245 #define arch_atomic_inc_unless_negative atomic_inc_unless_negative
1247 #ifndef atomic_inc_unless_negative
1248 static __always_inline bool
1249 atomic_inc_unless_negative(atomic_t *v)
1251 int c = atomic_read(v);
1254 if (unlikely(c < 0))
1256 } while (!atomic_try_cmpxchg(v, &c, c + 1));
1260 #define atomic_inc_unless_negative atomic_inc_unless_negative
1263 #define arch_atomic_dec_unless_positive atomic_dec_unless_positive
1265 #ifndef atomic_dec_unless_positive
1266 static __always_inline bool
1267 atomic_dec_unless_positive(atomic_t *v)
1269 int c = atomic_read(v);
1272 if (unlikely(c > 0))
1274 } while (!atomic_try_cmpxchg(v, &c, c - 1));
1278 #define atomic_dec_unless_positive atomic_dec_unless_positive
1281 #define arch_atomic_dec_if_positive atomic_dec_if_positive
1283 #ifndef atomic_dec_if_positive
1284 static __always_inline int
1285 atomic_dec_if_positive(atomic_t *v)
1287 int dec, c = atomic_read(v);
1291 if (unlikely(dec < 0))
1293 } while (!atomic_try_cmpxchg(v, &c, dec));
1297 #define atomic_dec_if_positive atomic_dec_if_positive
1300 #ifdef CONFIG_GENERIC_ATOMIC64
1301 #include <asm-generic/atomic64.h>
1304 #define arch_atomic64_read atomic64_read
1305 #define arch_atomic64_read_acquire atomic64_read_acquire
1307 #ifndef atomic64_read_acquire
1308 static __always_inline s64
1309 atomic64_read_acquire(const atomic64_t *v)
1311 return smp_load_acquire(&(v)->counter);
1313 #define atomic64_read_acquire atomic64_read_acquire
1316 #define arch_atomic64_set atomic64_set
1317 #define arch_atomic64_set_release atomic64_set_release
1319 #ifndef atomic64_set_release
1320 static __always_inline void
1321 atomic64_set_release(atomic64_t *v, s64 i)
1323 smp_store_release(&(v)->counter, i);
1325 #define atomic64_set_release atomic64_set_release
1328 #define arch_atomic64_add atomic64_add
1330 #define arch_atomic64_add_return atomic64_add_return
1331 #define arch_atomic64_add_return_acquire atomic64_add_return_acquire
1332 #define arch_atomic64_add_return_release atomic64_add_return_release
1333 #define arch_atomic64_add_return_relaxed atomic64_add_return_relaxed
1335 #ifndef atomic64_add_return_relaxed
1336 #define atomic64_add_return_acquire atomic64_add_return
1337 #define atomic64_add_return_release atomic64_add_return
1338 #define atomic64_add_return_relaxed atomic64_add_return
1339 #else /* atomic64_add_return_relaxed */
1341 #ifndef atomic64_add_return_acquire
1342 static __always_inline s64
1343 atomic64_add_return_acquire(s64 i, atomic64_t *v)
1345 s64 ret = atomic64_add_return_relaxed(i, v);
1346 __atomic_acquire_fence();
1349 #define atomic64_add_return_acquire atomic64_add_return_acquire
1352 #ifndef atomic64_add_return_release
1353 static __always_inline s64
1354 atomic64_add_return_release(s64 i, atomic64_t *v)
1356 __atomic_release_fence();
1357 return atomic64_add_return_relaxed(i, v);
1359 #define atomic64_add_return_release atomic64_add_return_release
1362 #ifndef atomic64_add_return
1363 static __always_inline s64
1364 atomic64_add_return(s64 i, atomic64_t *v)
1367 __atomic_pre_full_fence();
1368 ret = atomic64_add_return_relaxed(i, v);
1369 __atomic_post_full_fence();
1372 #define atomic64_add_return atomic64_add_return
1375 #endif /* atomic64_add_return_relaxed */
1377 #define arch_atomic64_fetch_add atomic64_fetch_add
1378 #define arch_atomic64_fetch_add_acquire atomic64_fetch_add_acquire
1379 #define arch_atomic64_fetch_add_release atomic64_fetch_add_release
1380 #define arch_atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed
1382 #ifndef atomic64_fetch_add_relaxed
1383 #define atomic64_fetch_add_acquire atomic64_fetch_add
1384 #define atomic64_fetch_add_release atomic64_fetch_add
1385 #define atomic64_fetch_add_relaxed atomic64_fetch_add
1386 #else /* atomic64_fetch_add_relaxed */
1388 #ifndef atomic64_fetch_add_acquire
1389 static __always_inline s64
1390 atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
1392 s64 ret = atomic64_fetch_add_relaxed(i, v);
1393 __atomic_acquire_fence();
1396 #define atomic64_fetch_add_acquire atomic64_fetch_add_acquire
1399 #ifndef atomic64_fetch_add_release
1400 static __always_inline s64
1401 atomic64_fetch_add_release(s64 i, atomic64_t *v)
1403 __atomic_release_fence();
1404 return atomic64_fetch_add_relaxed(i, v);
1406 #define atomic64_fetch_add_release atomic64_fetch_add_release
1409 #ifndef atomic64_fetch_add
1410 static __always_inline s64
1411 atomic64_fetch_add(s64 i, atomic64_t *v)
1414 __atomic_pre_full_fence();
1415 ret = atomic64_fetch_add_relaxed(i, v);
1416 __atomic_post_full_fence();
1419 #define atomic64_fetch_add atomic64_fetch_add
1422 #endif /* atomic64_fetch_add_relaxed */
1424 #define arch_atomic64_sub atomic64_sub
1426 #define arch_atomic64_sub_return atomic64_sub_return
1427 #define arch_atomic64_sub_return_acquire atomic64_sub_return_acquire
1428 #define arch_atomic64_sub_return_release atomic64_sub_return_release
1429 #define arch_atomic64_sub_return_relaxed atomic64_sub_return_relaxed
1431 #ifndef atomic64_sub_return_relaxed
1432 #define atomic64_sub_return_acquire atomic64_sub_return
1433 #define atomic64_sub_return_release atomic64_sub_return
1434 #define atomic64_sub_return_relaxed atomic64_sub_return
1435 #else /* atomic64_sub_return_relaxed */
1437 #ifndef atomic64_sub_return_acquire
1438 static __always_inline s64
1439 atomic64_sub_return_acquire(s64 i, atomic64_t *v)
1441 s64 ret = atomic64_sub_return_relaxed(i, v);
1442 __atomic_acquire_fence();
1445 #define atomic64_sub_return_acquire atomic64_sub_return_acquire
1448 #ifndef atomic64_sub_return_release
1449 static __always_inline s64
1450 atomic64_sub_return_release(s64 i, atomic64_t *v)
1452 __atomic_release_fence();
1453 return atomic64_sub_return_relaxed(i, v);
1455 #define atomic64_sub_return_release atomic64_sub_return_release
1458 #ifndef atomic64_sub_return
1459 static __always_inline s64
1460 atomic64_sub_return(s64 i, atomic64_t *v)
1463 __atomic_pre_full_fence();
1464 ret = atomic64_sub_return_relaxed(i, v);
1465 __atomic_post_full_fence();
1468 #define atomic64_sub_return atomic64_sub_return
1471 #endif /* atomic64_sub_return_relaxed */
1473 #define arch_atomic64_fetch_sub atomic64_fetch_sub
1474 #define arch_atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
1475 #define arch_atomic64_fetch_sub_release atomic64_fetch_sub_release
1476 #define arch_atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed
1478 #ifndef atomic64_fetch_sub_relaxed
1479 #define atomic64_fetch_sub_acquire atomic64_fetch_sub
1480 #define atomic64_fetch_sub_release atomic64_fetch_sub
1481 #define atomic64_fetch_sub_relaxed atomic64_fetch_sub
1482 #else /* atomic64_fetch_sub_relaxed */
1484 #ifndef atomic64_fetch_sub_acquire
1485 static __always_inline s64
1486 atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
1488 s64 ret = atomic64_fetch_sub_relaxed(i, v);
1489 __atomic_acquire_fence();
1492 #define atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
1495 #ifndef atomic64_fetch_sub_release
1496 static __always_inline s64
1497 atomic64_fetch_sub_release(s64 i, atomic64_t *v)
1499 __atomic_release_fence();
1500 return atomic64_fetch_sub_relaxed(i, v);
1502 #define atomic64_fetch_sub_release atomic64_fetch_sub_release
1505 #ifndef atomic64_fetch_sub
1506 static __always_inline s64
1507 atomic64_fetch_sub(s64 i, atomic64_t *v)
1510 __atomic_pre_full_fence();
1511 ret = atomic64_fetch_sub_relaxed(i, v);
1512 __atomic_post_full_fence();
1515 #define atomic64_fetch_sub atomic64_fetch_sub
1518 #endif /* atomic64_fetch_sub_relaxed */
1520 #define arch_atomic64_inc atomic64_inc
1522 #ifndef atomic64_inc
1523 static __always_inline void
1524 atomic64_inc(atomic64_t *v)
1528 #define atomic64_inc atomic64_inc
1531 #define arch_atomic64_inc_return atomic64_inc_return
1532 #define arch_atomic64_inc_return_acquire atomic64_inc_return_acquire
1533 #define arch_atomic64_inc_return_release atomic64_inc_return_release
1534 #define arch_atomic64_inc_return_relaxed atomic64_inc_return_relaxed
1536 #ifndef atomic64_inc_return_relaxed
1537 #ifdef atomic64_inc_return
1538 #define atomic64_inc_return_acquire atomic64_inc_return
1539 #define atomic64_inc_return_release atomic64_inc_return
1540 #define atomic64_inc_return_relaxed atomic64_inc_return
1541 #endif /* atomic64_inc_return */
1543 #ifndef atomic64_inc_return
1544 static __always_inline s64
1545 atomic64_inc_return(atomic64_t *v)
1547 return atomic64_add_return(1, v);
1549 #define atomic64_inc_return atomic64_inc_return
1552 #ifndef atomic64_inc_return_acquire
1553 static __always_inline s64
1554 atomic64_inc_return_acquire(atomic64_t *v)
1556 return atomic64_add_return_acquire(1, v);
1558 #define atomic64_inc_return_acquire atomic64_inc_return_acquire
1561 #ifndef atomic64_inc_return_release
1562 static __always_inline s64
1563 atomic64_inc_return_release(atomic64_t *v)
1565 return atomic64_add_return_release(1, v);
1567 #define atomic64_inc_return_release atomic64_inc_return_release
1570 #ifndef atomic64_inc_return_relaxed
1571 static __always_inline s64
1572 atomic64_inc_return_relaxed(atomic64_t *v)
1574 return atomic64_add_return_relaxed(1, v);
1576 #define atomic64_inc_return_relaxed atomic64_inc_return_relaxed
1579 #else /* atomic64_inc_return_relaxed */
1581 #ifndef atomic64_inc_return_acquire
1582 static __always_inline s64
1583 atomic64_inc_return_acquire(atomic64_t *v)
1585 s64 ret = atomic64_inc_return_relaxed(v);
1586 __atomic_acquire_fence();
1589 #define atomic64_inc_return_acquire atomic64_inc_return_acquire
1592 #ifndef atomic64_inc_return_release
1593 static __always_inline s64
1594 atomic64_inc_return_release(atomic64_t *v)
1596 __atomic_release_fence();
1597 return atomic64_inc_return_relaxed(v);
1599 #define atomic64_inc_return_release atomic64_inc_return_release
1602 #ifndef atomic64_inc_return
1603 static __always_inline s64
1604 atomic64_inc_return(atomic64_t *v)
1607 __atomic_pre_full_fence();
1608 ret = atomic64_inc_return_relaxed(v);
1609 __atomic_post_full_fence();
1612 #define atomic64_inc_return atomic64_inc_return
1615 #endif /* atomic64_inc_return_relaxed */
1617 #define arch_atomic64_fetch_inc atomic64_fetch_inc
1618 #define arch_atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1619 #define arch_atomic64_fetch_inc_release atomic64_fetch_inc_release
1620 #define arch_atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
1622 #ifndef atomic64_fetch_inc_relaxed
1623 #ifdef atomic64_fetch_inc
1624 #define atomic64_fetch_inc_acquire atomic64_fetch_inc
1625 #define atomic64_fetch_inc_release atomic64_fetch_inc
1626 #define atomic64_fetch_inc_relaxed atomic64_fetch_inc
1627 #endif /* atomic64_fetch_inc */
1629 #ifndef atomic64_fetch_inc
1630 static __always_inline s64
1631 atomic64_fetch_inc(atomic64_t *v)
1633 return atomic64_fetch_add(1, v);
1635 #define atomic64_fetch_inc atomic64_fetch_inc
1638 #ifndef atomic64_fetch_inc_acquire
1639 static __always_inline s64
1640 atomic64_fetch_inc_acquire(atomic64_t *v)
1642 return atomic64_fetch_add_acquire(1, v);
1644 #define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1647 #ifndef atomic64_fetch_inc_release
1648 static __always_inline s64
1649 atomic64_fetch_inc_release(atomic64_t *v)
1651 return atomic64_fetch_add_release(1, v);
1653 #define atomic64_fetch_inc_release atomic64_fetch_inc_release
1656 #ifndef atomic64_fetch_inc_relaxed
1657 static __always_inline s64
1658 atomic64_fetch_inc_relaxed(atomic64_t *v)
1660 return atomic64_fetch_add_relaxed(1, v);
1662 #define atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
1665 #else /* atomic64_fetch_inc_relaxed */
1667 #ifndef atomic64_fetch_inc_acquire
1668 static __always_inline s64
1669 atomic64_fetch_inc_acquire(atomic64_t *v)
1671 s64 ret = atomic64_fetch_inc_relaxed(v);
1672 __atomic_acquire_fence();
1675 #define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1678 #ifndef atomic64_fetch_inc_release
1679 static __always_inline s64
1680 atomic64_fetch_inc_release(atomic64_t *v)
1682 __atomic_release_fence();
1683 return atomic64_fetch_inc_relaxed(v);
1685 #define atomic64_fetch_inc_release atomic64_fetch_inc_release
1688 #ifndef atomic64_fetch_inc
1689 static __always_inline s64
1690 atomic64_fetch_inc(atomic64_t *v)
1693 __atomic_pre_full_fence();
1694 ret = atomic64_fetch_inc_relaxed(v);
1695 __atomic_post_full_fence();
1698 #define atomic64_fetch_inc atomic64_fetch_inc
1701 #endif /* atomic64_fetch_inc_relaxed */
1703 #define arch_atomic64_dec atomic64_dec
1705 #ifndef atomic64_dec
1706 static __always_inline void
1707 atomic64_dec(atomic64_t *v)
1711 #define atomic64_dec atomic64_dec
1714 #define arch_atomic64_dec_return atomic64_dec_return
1715 #define arch_atomic64_dec_return_acquire atomic64_dec_return_acquire
1716 #define arch_atomic64_dec_return_release atomic64_dec_return_release
1717 #define arch_atomic64_dec_return_relaxed atomic64_dec_return_relaxed
1719 #ifndef atomic64_dec_return_relaxed
1720 #ifdef atomic64_dec_return
1721 #define atomic64_dec_return_acquire atomic64_dec_return
1722 #define atomic64_dec_return_release atomic64_dec_return
1723 #define atomic64_dec_return_relaxed atomic64_dec_return
1724 #endif /* atomic64_dec_return */
1726 #ifndef atomic64_dec_return
1727 static __always_inline s64
1728 atomic64_dec_return(atomic64_t *v)
1730 return atomic64_sub_return(1, v);
1732 #define atomic64_dec_return atomic64_dec_return
1735 #ifndef atomic64_dec_return_acquire
1736 static __always_inline s64
1737 atomic64_dec_return_acquire(atomic64_t *v)
1739 return atomic64_sub_return_acquire(1, v);
1741 #define atomic64_dec_return_acquire atomic64_dec_return_acquire
1744 #ifndef atomic64_dec_return_release
1745 static __always_inline s64
1746 atomic64_dec_return_release(atomic64_t *v)
1748 return atomic64_sub_return_release(1, v);
1750 #define atomic64_dec_return_release atomic64_dec_return_release
1753 #ifndef atomic64_dec_return_relaxed
1754 static __always_inline s64
1755 atomic64_dec_return_relaxed(atomic64_t *v)
1757 return atomic64_sub_return_relaxed(1, v);
1759 #define atomic64_dec_return_relaxed atomic64_dec_return_relaxed
1762 #else /* atomic64_dec_return_relaxed */
1764 #ifndef atomic64_dec_return_acquire
1765 static __always_inline s64
1766 atomic64_dec_return_acquire(atomic64_t *v)
1768 s64 ret = atomic64_dec_return_relaxed(v);
1769 __atomic_acquire_fence();
1772 #define atomic64_dec_return_acquire atomic64_dec_return_acquire
1775 #ifndef atomic64_dec_return_release
1776 static __always_inline s64
1777 atomic64_dec_return_release(atomic64_t *v)
1779 __atomic_release_fence();
1780 return atomic64_dec_return_relaxed(v);
1782 #define atomic64_dec_return_release atomic64_dec_return_release
1785 #ifndef atomic64_dec_return
1786 static __always_inline s64
1787 atomic64_dec_return(atomic64_t *v)
1790 __atomic_pre_full_fence();
1791 ret = atomic64_dec_return_relaxed(v);
1792 __atomic_post_full_fence();
1795 #define atomic64_dec_return atomic64_dec_return
1798 #endif /* atomic64_dec_return_relaxed */
1800 #define arch_atomic64_fetch_dec atomic64_fetch_dec
1801 #define arch_atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1802 #define arch_atomic64_fetch_dec_release atomic64_fetch_dec_release
1803 #define arch_atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
1805 #ifndef atomic64_fetch_dec_relaxed
1806 #ifdef atomic64_fetch_dec
1807 #define atomic64_fetch_dec_acquire atomic64_fetch_dec
1808 #define atomic64_fetch_dec_release atomic64_fetch_dec
1809 #define atomic64_fetch_dec_relaxed atomic64_fetch_dec
1810 #endif /* atomic64_fetch_dec */
1812 #ifndef atomic64_fetch_dec
1813 static __always_inline s64
1814 atomic64_fetch_dec(atomic64_t *v)
1816 return atomic64_fetch_sub(1, v);
1818 #define atomic64_fetch_dec atomic64_fetch_dec
1821 #ifndef atomic64_fetch_dec_acquire
1822 static __always_inline s64
1823 atomic64_fetch_dec_acquire(atomic64_t *v)
1825 return atomic64_fetch_sub_acquire(1, v);
1827 #define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1830 #ifndef atomic64_fetch_dec_release
1831 static __always_inline s64
1832 atomic64_fetch_dec_release(atomic64_t *v)
1834 return atomic64_fetch_sub_release(1, v);
1836 #define atomic64_fetch_dec_release atomic64_fetch_dec_release
1839 #ifndef atomic64_fetch_dec_relaxed
1840 static __always_inline s64
1841 atomic64_fetch_dec_relaxed(atomic64_t *v)
1843 return atomic64_fetch_sub_relaxed(1, v);
1845 #define atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
1848 #else /* atomic64_fetch_dec_relaxed */
1850 #ifndef atomic64_fetch_dec_acquire
1851 static __always_inline s64
1852 atomic64_fetch_dec_acquire(atomic64_t *v)
1854 s64 ret = atomic64_fetch_dec_relaxed(v);
1855 __atomic_acquire_fence();
1858 #define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1861 #ifndef atomic64_fetch_dec_release
1862 static __always_inline s64
1863 atomic64_fetch_dec_release(atomic64_t *v)
1865 __atomic_release_fence();
1866 return atomic64_fetch_dec_relaxed(v);
1868 #define atomic64_fetch_dec_release atomic64_fetch_dec_release
1871 #ifndef atomic64_fetch_dec
1872 static __always_inline s64
1873 atomic64_fetch_dec(atomic64_t *v)
1876 __atomic_pre_full_fence();
1877 ret = atomic64_fetch_dec_relaxed(v);
1878 __atomic_post_full_fence();
1881 #define atomic64_fetch_dec atomic64_fetch_dec
1884 #endif /* atomic64_fetch_dec_relaxed */
1886 #define arch_atomic64_and atomic64_and
1888 #define arch_atomic64_fetch_and atomic64_fetch_and
1889 #define arch_atomic64_fetch_and_acquire atomic64_fetch_and_acquire
1890 #define arch_atomic64_fetch_and_release atomic64_fetch_and_release
1891 #define arch_atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed
1893 #ifndef atomic64_fetch_and_relaxed
1894 #define atomic64_fetch_and_acquire atomic64_fetch_and
1895 #define atomic64_fetch_and_release atomic64_fetch_and
1896 #define atomic64_fetch_and_relaxed atomic64_fetch_and
1897 #else /* atomic64_fetch_and_relaxed */
1899 #ifndef atomic64_fetch_and_acquire
1900 static __always_inline s64
1901 atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
1903 s64 ret = atomic64_fetch_and_relaxed(i, v);
1904 __atomic_acquire_fence();
1907 #define atomic64_fetch_and_acquire atomic64_fetch_and_acquire
1910 #ifndef atomic64_fetch_and_release
1911 static __always_inline s64
1912 atomic64_fetch_and_release(s64 i, atomic64_t *v)
1914 __atomic_release_fence();
1915 return atomic64_fetch_and_relaxed(i, v);
1917 #define atomic64_fetch_and_release atomic64_fetch_and_release
1920 #ifndef atomic64_fetch_and
1921 static __always_inline s64
1922 atomic64_fetch_and(s64 i, atomic64_t *v)
1925 __atomic_pre_full_fence();
1926 ret = atomic64_fetch_and_relaxed(i, v);
1927 __atomic_post_full_fence();
1930 #define atomic64_fetch_and atomic64_fetch_and
1933 #endif /* atomic64_fetch_and_relaxed */
1935 #define arch_atomic64_andnot atomic64_andnot
1937 #ifndef atomic64_andnot
1938 static __always_inline void
1939 atomic64_andnot(s64 i, atomic64_t *v)
1941 atomic64_and(~i, v);
1943 #define atomic64_andnot atomic64_andnot
1946 #define arch_atomic64_fetch_andnot atomic64_fetch_andnot
1947 #define arch_atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
1948 #define arch_atomic64_fetch_andnot_release atomic64_fetch_andnot_release
1949 #define arch_atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
1951 #ifndef atomic64_fetch_andnot_relaxed
1952 #ifdef atomic64_fetch_andnot
1953 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
1954 #define atomic64_fetch_andnot_release atomic64_fetch_andnot
1955 #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
1956 #endif /* atomic64_fetch_andnot */
1958 #ifndef atomic64_fetch_andnot
1959 static __always_inline s64
1960 atomic64_fetch_andnot(s64 i, atomic64_t *v)
1962 return atomic64_fetch_and(~i, v);
1964 #define atomic64_fetch_andnot atomic64_fetch_andnot
1967 #ifndef atomic64_fetch_andnot_acquire
1968 static __always_inline s64
1969 atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1971 return atomic64_fetch_and_acquire(~i, v);
1973 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
1976 #ifndef atomic64_fetch_andnot_release
1977 static __always_inline s64
1978 atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1980 return atomic64_fetch_and_release(~i, v);
1982 #define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
1985 #ifndef atomic64_fetch_andnot_relaxed
1986 static __always_inline s64
1987 atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
1989 return atomic64_fetch_and_relaxed(~i, v);
1991 #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
1994 #else /* atomic64_fetch_andnot_relaxed */
1996 #ifndef atomic64_fetch_andnot_acquire
1997 static __always_inline s64
1998 atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
2000 s64 ret = atomic64_fetch_andnot_relaxed(i, v);
2001 __atomic_acquire_fence();
2004 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
2007 #ifndef atomic64_fetch_andnot_release
2008 static __always_inline s64
2009 atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
2011 __atomic_release_fence();
2012 return atomic64_fetch_andnot_relaxed(i, v);
2014 #define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
2017 #ifndef atomic64_fetch_andnot
2018 static __always_inline s64
2019 atomic64_fetch_andnot(s64 i, atomic64_t *v)
2022 __atomic_pre_full_fence();
2023 ret = atomic64_fetch_andnot_relaxed(i, v);
2024 __atomic_post_full_fence();
2027 #define atomic64_fetch_andnot atomic64_fetch_andnot
2030 #endif /* atomic64_fetch_andnot_relaxed */
2032 #define arch_atomic64_or atomic64_or
2034 #define arch_atomic64_fetch_or atomic64_fetch_or
2035 #define arch_atomic64_fetch_or_acquire atomic64_fetch_or_acquire
2036 #define arch_atomic64_fetch_or_release atomic64_fetch_or_release
2037 #define arch_atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed
2039 #ifndef atomic64_fetch_or_relaxed
2040 #define atomic64_fetch_or_acquire atomic64_fetch_or
2041 #define atomic64_fetch_or_release atomic64_fetch_or
2042 #define atomic64_fetch_or_relaxed atomic64_fetch_or
2043 #else /* atomic64_fetch_or_relaxed */
2045 #ifndef atomic64_fetch_or_acquire
2046 static __always_inline s64
2047 atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
2049 s64 ret = atomic64_fetch_or_relaxed(i, v);
2050 __atomic_acquire_fence();
2053 #define atomic64_fetch_or_acquire atomic64_fetch_or_acquire
2056 #ifndef atomic64_fetch_or_release
2057 static __always_inline s64
2058 atomic64_fetch_or_release(s64 i, atomic64_t *v)
2060 __atomic_release_fence();
2061 return atomic64_fetch_or_relaxed(i, v);
2063 #define atomic64_fetch_or_release atomic64_fetch_or_release
2066 #ifndef atomic64_fetch_or
2067 static __always_inline s64
2068 atomic64_fetch_or(s64 i, atomic64_t *v)
2071 __atomic_pre_full_fence();
2072 ret = atomic64_fetch_or_relaxed(i, v);
2073 __atomic_post_full_fence();
2076 #define atomic64_fetch_or atomic64_fetch_or
2079 #endif /* atomic64_fetch_or_relaxed */
2081 #define arch_atomic64_xor atomic64_xor
2083 #define arch_atomic64_fetch_xor atomic64_fetch_xor
2084 #define arch_atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
2085 #define arch_atomic64_fetch_xor_release atomic64_fetch_xor_release
2086 #define arch_atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed
2088 #ifndef atomic64_fetch_xor_relaxed
2089 #define atomic64_fetch_xor_acquire atomic64_fetch_xor
2090 #define atomic64_fetch_xor_release atomic64_fetch_xor
2091 #define atomic64_fetch_xor_relaxed atomic64_fetch_xor
2092 #else /* atomic64_fetch_xor_relaxed */
2094 #ifndef atomic64_fetch_xor_acquire
2095 static __always_inline s64
2096 atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
2098 s64 ret = atomic64_fetch_xor_relaxed(i, v);
2099 __atomic_acquire_fence();
2102 #define atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
2105 #ifndef atomic64_fetch_xor_release
2106 static __always_inline s64
2107 atomic64_fetch_xor_release(s64 i, atomic64_t *v)
2109 __atomic_release_fence();
2110 return atomic64_fetch_xor_relaxed(i, v);
2112 #define atomic64_fetch_xor_release atomic64_fetch_xor_release
2115 #ifndef atomic64_fetch_xor
2116 static __always_inline s64
2117 atomic64_fetch_xor(s64 i, atomic64_t *v)
2120 __atomic_pre_full_fence();
2121 ret = atomic64_fetch_xor_relaxed(i, v);
2122 __atomic_post_full_fence();
2125 #define atomic64_fetch_xor atomic64_fetch_xor
2128 #endif /* atomic64_fetch_xor_relaxed */
2130 #define arch_atomic64_xchg atomic64_xchg
2131 #define arch_atomic64_xchg_acquire atomic64_xchg_acquire
2132 #define arch_atomic64_xchg_release atomic64_xchg_release
2133 #define arch_atomic64_xchg_relaxed atomic64_xchg_relaxed
2135 #ifndef atomic64_xchg_relaxed
2136 #define atomic64_xchg_acquire atomic64_xchg
2137 #define atomic64_xchg_release atomic64_xchg
2138 #define atomic64_xchg_relaxed atomic64_xchg
2139 #else /* atomic64_xchg_relaxed */
2141 #ifndef atomic64_xchg_acquire
2142 static __always_inline s64
2143 atomic64_xchg_acquire(atomic64_t *v, s64 i)
2145 s64 ret = atomic64_xchg_relaxed(v, i);
2146 __atomic_acquire_fence();
2149 #define atomic64_xchg_acquire atomic64_xchg_acquire
2152 #ifndef atomic64_xchg_release
2153 static __always_inline s64
2154 atomic64_xchg_release(atomic64_t *v, s64 i)
2156 __atomic_release_fence();
2157 return atomic64_xchg_relaxed(v, i);
2159 #define atomic64_xchg_release atomic64_xchg_release
2162 #ifndef atomic64_xchg
2163 static __always_inline s64
2164 atomic64_xchg(atomic64_t *v, s64 i)
2167 __atomic_pre_full_fence();
2168 ret = atomic64_xchg_relaxed(v, i);
2169 __atomic_post_full_fence();
2172 #define atomic64_xchg atomic64_xchg
2175 #endif /* atomic64_xchg_relaxed */
2177 #define arch_atomic64_cmpxchg atomic64_cmpxchg
2178 #define arch_atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire
2179 #define arch_atomic64_cmpxchg_release atomic64_cmpxchg_release
2180 #define arch_atomic64_cmpxchg_relaxed atomic64_cmpxchg_relaxed
2182 #ifndef atomic64_cmpxchg_relaxed
2183 #define atomic64_cmpxchg_acquire atomic64_cmpxchg
2184 #define atomic64_cmpxchg_release atomic64_cmpxchg
2185 #define atomic64_cmpxchg_relaxed atomic64_cmpxchg
2186 #else /* atomic64_cmpxchg_relaxed */
2188 #ifndef atomic64_cmpxchg_acquire
2189 static __always_inline s64
2190 atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
2192 s64 ret = atomic64_cmpxchg_relaxed(v, old, new);
2193 __atomic_acquire_fence();
2196 #define atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire
2199 #ifndef atomic64_cmpxchg_release
2200 static __always_inline s64
2201 atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
2203 __atomic_release_fence();
2204 return atomic64_cmpxchg_relaxed(v, old, new);
2206 #define atomic64_cmpxchg_release atomic64_cmpxchg_release
2209 #ifndef atomic64_cmpxchg
2210 static __always_inline s64
2211 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
2214 __atomic_pre_full_fence();
2215 ret = atomic64_cmpxchg_relaxed(v, old, new);
2216 __atomic_post_full_fence();
2219 #define atomic64_cmpxchg atomic64_cmpxchg
2222 #endif /* atomic64_cmpxchg_relaxed */
2224 #define arch_atomic64_try_cmpxchg atomic64_try_cmpxchg
2225 #define arch_atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
2226 #define arch_atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
2227 #define arch_atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed
2229 #ifndef atomic64_try_cmpxchg_relaxed
2230 #ifdef atomic64_try_cmpxchg
2231 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg
2232 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
2233 #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg
2234 #endif /* atomic64_try_cmpxchg */
2236 #ifndef atomic64_try_cmpxchg
2237 static __always_inline bool
2238 atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2241 r = atomic64_cmpxchg(v, o, new);
2242 if (unlikely(r != o))
2244 return likely(r == o);
2246 #define atomic64_try_cmpxchg atomic64_try_cmpxchg
2249 #ifndef atomic64_try_cmpxchg_acquire
2250 static __always_inline bool
2251 atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2254 r = atomic64_cmpxchg_acquire(v, o, new);
2255 if (unlikely(r != o))
2257 return likely(r == o);
2259 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
2262 #ifndef atomic64_try_cmpxchg_release
2263 static __always_inline bool
2264 atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2267 r = atomic64_cmpxchg_release(v, o, new);
2268 if (unlikely(r != o))
2270 return likely(r == o);
2272 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
2275 #ifndef atomic64_try_cmpxchg_relaxed
2276 static __always_inline bool
2277 atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
2280 r = atomic64_cmpxchg_relaxed(v, o, new);
2281 if (unlikely(r != o))
2283 return likely(r == o);
2285 #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed
2288 #else /* atomic64_try_cmpxchg_relaxed */
2290 #ifndef atomic64_try_cmpxchg_acquire
2291 static __always_inline bool
2292 atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2294 bool ret = atomic64_try_cmpxchg_relaxed(v, old, new);
2295 __atomic_acquire_fence();
2298 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
2301 #ifndef atomic64_try_cmpxchg_release
2302 static __always_inline bool
2303 atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2305 __atomic_release_fence();
2306 return atomic64_try_cmpxchg_relaxed(v, old, new);
2308 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
2311 #ifndef atomic64_try_cmpxchg
2312 static __always_inline bool
2313 atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2316 __atomic_pre_full_fence();
2317 ret = atomic64_try_cmpxchg_relaxed(v, old, new);
2318 __atomic_post_full_fence();
2321 #define atomic64_try_cmpxchg atomic64_try_cmpxchg
2324 #endif /* atomic64_try_cmpxchg_relaxed */
2326 #define arch_atomic64_sub_and_test atomic64_sub_and_test
2328 #ifndef atomic64_sub_and_test
2330 * atomic64_sub_and_test - subtract value from variable and test result
2331 * @i: integer value to subtract
2332 * @v: pointer of type atomic64_t
2334 * Atomically subtracts @i from @v and returns
2335 * true if the result is zero, or false for all
2338 static __always_inline bool
2339 atomic64_sub_and_test(s64 i, atomic64_t *v)
2341 return atomic64_sub_return(i, v) == 0;
2343 #define atomic64_sub_and_test atomic64_sub_and_test
2346 #define arch_atomic64_dec_and_test atomic64_dec_and_test
2348 #ifndef atomic64_dec_and_test
2350 * atomic64_dec_and_test - decrement and test
2351 * @v: pointer of type atomic64_t
2353 * Atomically decrements @v by 1 and
2354 * returns true if the result is 0, or false for all other
2357 static __always_inline bool
2358 atomic64_dec_and_test(atomic64_t *v)
2360 return atomic64_dec_return(v) == 0;
2362 #define atomic64_dec_and_test atomic64_dec_and_test
2365 #define arch_atomic64_inc_and_test atomic64_inc_and_test
2367 #ifndef atomic64_inc_and_test
2369 * atomic64_inc_and_test - increment and test
2370 * @v: pointer of type atomic64_t
2372 * Atomically increments @v by 1
2373 * and returns true if the result is zero, or false for all
2376 static __always_inline bool
2377 atomic64_inc_and_test(atomic64_t *v)
2379 return atomic64_inc_return(v) == 0;
2381 #define atomic64_inc_and_test atomic64_inc_and_test
2384 #define arch_atomic64_add_negative atomic64_add_negative
2386 #ifndef atomic64_add_negative
2388 * atomic64_add_negative - add and test if negative
2389 * @i: integer value to add
2390 * @v: pointer of type atomic64_t
2392 * Atomically adds @i to @v and returns true
2393 * if the result is negative, or false when
2394 * result is greater than or equal to zero.
2396 static __always_inline bool
2397 atomic64_add_negative(s64 i, atomic64_t *v)
2399 return atomic64_add_return(i, v) < 0;
2401 #define atomic64_add_negative atomic64_add_negative
2404 #define arch_atomic64_fetch_add_unless atomic64_fetch_add_unless
2406 #ifndef atomic64_fetch_add_unless
2408 * atomic64_fetch_add_unless - add unless the number is already a given value
2409 * @v: pointer of type atomic64_t
2410 * @a: the amount to add to v...
2411 * @u: ...unless v is equal to u.
2413 * Atomically adds @a to @v, so long as @v was not already @u.
2414 * Returns original value of @v
2416 static __always_inline s64
2417 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
2419 s64 c = atomic64_read(v);
2422 if (unlikely(c == u))
2424 } while (!atomic64_try_cmpxchg(v, &c, c + a));
2428 #define atomic64_fetch_add_unless atomic64_fetch_add_unless
2431 #define arch_atomic64_add_unless atomic64_add_unless
2433 #ifndef atomic64_add_unless
2435 * atomic64_add_unless - add unless the number is already a given value
2436 * @v: pointer of type atomic64_t
2437 * @a: the amount to add to v...
2438 * @u: ...unless v is equal to u.
2440 * Atomically adds @a to @v, if @v was not already @u.
2441 * Returns true if the addition was done.
2443 static __always_inline bool
2444 atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
2446 return atomic64_fetch_add_unless(v, a, u) != u;
2448 #define atomic64_add_unless atomic64_add_unless
2451 #define arch_atomic64_inc_not_zero atomic64_inc_not_zero
2453 #ifndef atomic64_inc_not_zero
2455 * atomic64_inc_not_zero - increment unless the number is zero
2456 * @v: pointer of type atomic64_t
2458 * Atomically increments @v by 1, if @v is non-zero.
2459 * Returns true if the increment was done.
2461 static __always_inline bool
2462 atomic64_inc_not_zero(atomic64_t *v)
2464 return atomic64_add_unless(v, 1, 0);
2466 #define atomic64_inc_not_zero atomic64_inc_not_zero
2469 #define arch_atomic64_inc_unless_negative atomic64_inc_unless_negative
2471 #ifndef atomic64_inc_unless_negative
2472 static __always_inline bool
2473 atomic64_inc_unless_negative(atomic64_t *v)
2475 s64 c = atomic64_read(v);
2478 if (unlikely(c < 0))
2480 } while (!atomic64_try_cmpxchg(v, &c, c + 1));
2484 #define atomic64_inc_unless_negative atomic64_inc_unless_negative
2487 #define arch_atomic64_dec_unless_positive atomic64_dec_unless_positive
2489 #ifndef atomic64_dec_unless_positive
2490 static __always_inline bool
2491 atomic64_dec_unless_positive(atomic64_t *v)
2493 s64 c = atomic64_read(v);
2496 if (unlikely(c > 0))
2498 } while (!atomic64_try_cmpxchg(v, &c, c - 1));
2502 #define atomic64_dec_unless_positive atomic64_dec_unless_positive
2505 #define arch_atomic64_dec_if_positive atomic64_dec_if_positive
2507 #ifndef atomic64_dec_if_positive
2508 static __always_inline s64
2509 atomic64_dec_if_positive(atomic64_t *v)
2511 s64 dec, c = atomic64_read(v);
2515 if (unlikely(dec < 0))
2517 } while (!atomic64_try_cmpxchg(v, &c, dec));
2521 #define atomic64_dec_if_positive atomic64_dec_if_positive
2524 #endif /* _LINUX_ATOMIC_FALLBACK_H */
2525 // 9d95b56f98d82a2a26c7b79ccdd0c47572d50a6f