Merge tag 'v5.7-rc1' into locking/kcsan, to resolve conflicts and refresh
[linux-2.6-microblaze.git] / include / linux / atomic-fallback.h
1 // SPDX-License-Identifier: GPL-2.0
2
3 // Generated by scripts/atomic/gen-atomic-fallback.sh
4 // DO NOT MODIFY THIS FILE DIRECTLY
5
6 #ifndef _LINUX_ATOMIC_FALLBACK_H
7 #define _LINUX_ATOMIC_FALLBACK_H
8
9 #include <linux/compiler.h>
10
11 #ifndef xchg_relaxed
12 #define xchg_relaxed            xchg
13 #define xchg_acquire            xchg
14 #define xchg_release            xchg
15 #else /* xchg_relaxed */
16
17 #ifndef xchg_acquire
18 #define xchg_acquire(...) \
19         __atomic_op_acquire(xchg, __VA_ARGS__)
20 #endif
21
22 #ifndef xchg_release
23 #define xchg_release(...) \
24         __atomic_op_release(xchg, __VA_ARGS__)
25 #endif
26
27 #ifndef xchg
28 #define xchg(...) \
29         __atomic_op_fence(xchg, __VA_ARGS__)
30 #endif
31
32 #endif /* xchg_relaxed */
33
34 #ifndef cmpxchg_relaxed
35 #define cmpxchg_relaxed         cmpxchg
36 #define cmpxchg_acquire         cmpxchg
37 #define cmpxchg_release         cmpxchg
38 #else /* cmpxchg_relaxed */
39
40 #ifndef cmpxchg_acquire
41 #define cmpxchg_acquire(...) \
42         __atomic_op_acquire(cmpxchg, __VA_ARGS__)
43 #endif
44
45 #ifndef cmpxchg_release
46 #define cmpxchg_release(...) \
47         __atomic_op_release(cmpxchg, __VA_ARGS__)
48 #endif
49
50 #ifndef cmpxchg
51 #define cmpxchg(...) \
52         __atomic_op_fence(cmpxchg, __VA_ARGS__)
53 #endif
54
55 #endif /* cmpxchg_relaxed */
56
57 #ifndef cmpxchg64_relaxed
58 #define cmpxchg64_relaxed               cmpxchg64
59 #define cmpxchg64_acquire               cmpxchg64
60 #define cmpxchg64_release               cmpxchg64
61 #else /* cmpxchg64_relaxed */
62
63 #ifndef cmpxchg64_acquire
64 #define cmpxchg64_acquire(...) \
65         __atomic_op_acquire(cmpxchg64, __VA_ARGS__)
66 #endif
67
68 #ifndef cmpxchg64_release
69 #define cmpxchg64_release(...) \
70         __atomic_op_release(cmpxchg64, __VA_ARGS__)
71 #endif
72
73 #ifndef cmpxchg64
74 #define cmpxchg64(...) \
75         __atomic_op_fence(cmpxchg64, __VA_ARGS__)
76 #endif
77
78 #endif /* cmpxchg64_relaxed */
79
80 #ifndef atomic_read_acquire
81 static __always_inline int
82 atomic_read_acquire(const atomic_t *v)
83 {
84         return smp_load_acquire(&(v)->counter);
85 }
86 #define atomic_read_acquire atomic_read_acquire
87 #endif
88
89 #ifndef atomic_set_release
90 static __always_inline void
91 atomic_set_release(atomic_t *v, int i)
92 {
93         smp_store_release(&(v)->counter, i);
94 }
95 #define atomic_set_release atomic_set_release
96 #endif
97
98 #ifndef atomic_add_return_relaxed
99 #define atomic_add_return_acquire atomic_add_return
100 #define atomic_add_return_release atomic_add_return
101 #define atomic_add_return_relaxed atomic_add_return
102 #else /* atomic_add_return_relaxed */
103
104 #ifndef atomic_add_return_acquire
105 static __always_inline int
106 atomic_add_return_acquire(int i, atomic_t *v)
107 {
108         int ret = atomic_add_return_relaxed(i, v);
109         __atomic_acquire_fence();
110         return ret;
111 }
112 #define atomic_add_return_acquire atomic_add_return_acquire
113 #endif
114
115 #ifndef atomic_add_return_release
116 static __always_inline int
117 atomic_add_return_release(int i, atomic_t *v)
118 {
119         __atomic_release_fence();
120         return atomic_add_return_relaxed(i, v);
121 }
122 #define atomic_add_return_release atomic_add_return_release
123 #endif
124
125 #ifndef atomic_add_return
126 static __always_inline int
127 atomic_add_return(int i, atomic_t *v)
128 {
129         int ret;
130         __atomic_pre_full_fence();
131         ret = atomic_add_return_relaxed(i, v);
132         __atomic_post_full_fence();
133         return ret;
134 }
135 #define atomic_add_return atomic_add_return
136 #endif
137
138 #endif /* atomic_add_return_relaxed */
139
140 #ifndef atomic_fetch_add_relaxed
141 #define atomic_fetch_add_acquire atomic_fetch_add
142 #define atomic_fetch_add_release atomic_fetch_add
143 #define atomic_fetch_add_relaxed atomic_fetch_add
144 #else /* atomic_fetch_add_relaxed */
145
146 #ifndef atomic_fetch_add_acquire
147 static __always_inline int
148 atomic_fetch_add_acquire(int i, atomic_t *v)
149 {
150         int ret = atomic_fetch_add_relaxed(i, v);
151         __atomic_acquire_fence();
152         return ret;
153 }
154 #define atomic_fetch_add_acquire atomic_fetch_add_acquire
155 #endif
156
157 #ifndef atomic_fetch_add_release
158 static __always_inline int
159 atomic_fetch_add_release(int i, atomic_t *v)
160 {
161         __atomic_release_fence();
162         return atomic_fetch_add_relaxed(i, v);
163 }
164 #define atomic_fetch_add_release atomic_fetch_add_release
165 #endif
166
167 #ifndef atomic_fetch_add
168 static __always_inline int
169 atomic_fetch_add(int i, atomic_t *v)
170 {
171         int ret;
172         __atomic_pre_full_fence();
173         ret = atomic_fetch_add_relaxed(i, v);
174         __atomic_post_full_fence();
175         return ret;
176 }
177 #define atomic_fetch_add atomic_fetch_add
178 #endif
179
180 #endif /* atomic_fetch_add_relaxed */
181
182 #ifndef atomic_sub_return_relaxed
183 #define atomic_sub_return_acquire atomic_sub_return
184 #define atomic_sub_return_release atomic_sub_return
185 #define atomic_sub_return_relaxed atomic_sub_return
186 #else /* atomic_sub_return_relaxed */
187
188 #ifndef atomic_sub_return_acquire
189 static __always_inline int
190 atomic_sub_return_acquire(int i, atomic_t *v)
191 {
192         int ret = atomic_sub_return_relaxed(i, v);
193         __atomic_acquire_fence();
194         return ret;
195 }
196 #define atomic_sub_return_acquire atomic_sub_return_acquire
197 #endif
198
199 #ifndef atomic_sub_return_release
200 static __always_inline int
201 atomic_sub_return_release(int i, atomic_t *v)
202 {
203         __atomic_release_fence();
204         return atomic_sub_return_relaxed(i, v);
205 }
206 #define atomic_sub_return_release atomic_sub_return_release
207 #endif
208
209 #ifndef atomic_sub_return
210 static __always_inline int
211 atomic_sub_return(int i, atomic_t *v)
212 {
213         int ret;
214         __atomic_pre_full_fence();
215         ret = atomic_sub_return_relaxed(i, v);
216         __atomic_post_full_fence();
217         return ret;
218 }
219 #define atomic_sub_return atomic_sub_return
220 #endif
221
222 #endif /* atomic_sub_return_relaxed */
223
224 #ifndef atomic_fetch_sub_relaxed
225 #define atomic_fetch_sub_acquire atomic_fetch_sub
226 #define atomic_fetch_sub_release atomic_fetch_sub
227 #define atomic_fetch_sub_relaxed atomic_fetch_sub
228 #else /* atomic_fetch_sub_relaxed */
229
230 #ifndef atomic_fetch_sub_acquire
231 static __always_inline int
232 atomic_fetch_sub_acquire(int i, atomic_t *v)
233 {
234         int ret = atomic_fetch_sub_relaxed(i, v);
235         __atomic_acquire_fence();
236         return ret;
237 }
238 #define atomic_fetch_sub_acquire atomic_fetch_sub_acquire
239 #endif
240
241 #ifndef atomic_fetch_sub_release
242 static __always_inline int
243 atomic_fetch_sub_release(int i, atomic_t *v)
244 {
245         __atomic_release_fence();
246         return atomic_fetch_sub_relaxed(i, v);
247 }
248 #define atomic_fetch_sub_release atomic_fetch_sub_release
249 #endif
250
251 #ifndef atomic_fetch_sub
252 static __always_inline int
253 atomic_fetch_sub(int i, atomic_t *v)
254 {
255         int ret;
256         __atomic_pre_full_fence();
257         ret = atomic_fetch_sub_relaxed(i, v);
258         __atomic_post_full_fence();
259         return ret;
260 }
261 #define atomic_fetch_sub atomic_fetch_sub
262 #endif
263
264 #endif /* atomic_fetch_sub_relaxed */
265
266 #ifndef atomic_inc
267 static __always_inline void
268 atomic_inc(atomic_t *v)
269 {
270         atomic_add(1, v);
271 }
272 #define atomic_inc atomic_inc
273 #endif
274
275 #ifndef atomic_inc_return_relaxed
276 #ifdef atomic_inc_return
277 #define atomic_inc_return_acquire atomic_inc_return
278 #define atomic_inc_return_release atomic_inc_return
279 #define atomic_inc_return_relaxed atomic_inc_return
280 #endif /* atomic_inc_return */
281
282 #ifndef atomic_inc_return
283 static __always_inline int
284 atomic_inc_return(atomic_t *v)
285 {
286         return atomic_add_return(1, v);
287 }
288 #define atomic_inc_return atomic_inc_return
289 #endif
290
291 #ifndef atomic_inc_return_acquire
292 static __always_inline int
293 atomic_inc_return_acquire(atomic_t *v)
294 {
295         return atomic_add_return_acquire(1, v);
296 }
297 #define atomic_inc_return_acquire atomic_inc_return_acquire
298 #endif
299
300 #ifndef atomic_inc_return_release
301 static __always_inline int
302 atomic_inc_return_release(atomic_t *v)
303 {
304         return atomic_add_return_release(1, v);
305 }
306 #define atomic_inc_return_release atomic_inc_return_release
307 #endif
308
309 #ifndef atomic_inc_return_relaxed
310 static __always_inline int
311 atomic_inc_return_relaxed(atomic_t *v)
312 {
313         return atomic_add_return_relaxed(1, v);
314 }
315 #define atomic_inc_return_relaxed atomic_inc_return_relaxed
316 #endif
317
318 #else /* atomic_inc_return_relaxed */
319
320 #ifndef atomic_inc_return_acquire
321 static __always_inline int
322 atomic_inc_return_acquire(atomic_t *v)
323 {
324         int ret = atomic_inc_return_relaxed(v);
325         __atomic_acquire_fence();
326         return ret;
327 }
328 #define atomic_inc_return_acquire atomic_inc_return_acquire
329 #endif
330
331 #ifndef atomic_inc_return_release
332 static __always_inline int
333 atomic_inc_return_release(atomic_t *v)
334 {
335         __atomic_release_fence();
336         return atomic_inc_return_relaxed(v);
337 }
338 #define atomic_inc_return_release atomic_inc_return_release
339 #endif
340
341 #ifndef atomic_inc_return
342 static __always_inline int
343 atomic_inc_return(atomic_t *v)
344 {
345         int ret;
346         __atomic_pre_full_fence();
347         ret = atomic_inc_return_relaxed(v);
348         __atomic_post_full_fence();
349         return ret;
350 }
351 #define atomic_inc_return atomic_inc_return
352 #endif
353
354 #endif /* atomic_inc_return_relaxed */
355
356 #ifndef atomic_fetch_inc_relaxed
357 #ifdef atomic_fetch_inc
358 #define atomic_fetch_inc_acquire atomic_fetch_inc
359 #define atomic_fetch_inc_release atomic_fetch_inc
360 #define atomic_fetch_inc_relaxed atomic_fetch_inc
361 #endif /* atomic_fetch_inc */
362
363 #ifndef atomic_fetch_inc
364 static __always_inline int
365 atomic_fetch_inc(atomic_t *v)
366 {
367         return atomic_fetch_add(1, v);
368 }
369 #define atomic_fetch_inc atomic_fetch_inc
370 #endif
371
372 #ifndef atomic_fetch_inc_acquire
373 static __always_inline int
374 atomic_fetch_inc_acquire(atomic_t *v)
375 {
376         return atomic_fetch_add_acquire(1, v);
377 }
378 #define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
379 #endif
380
381 #ifndef atomic_fetch_inc_release
382 static __always_inline int
383 atomic_fetch_inc_release(atomic_t *v)
384 {
385         return atomic_fetch_add_release(1, v);
386 }
387 #define atomic_fetch_inc_release atomic_fetch_inc_release
388 #endif
389
390 #ifndef atomic_fetch_inc_relaxed
391 static __always_inline int
392 atomic_fetch_inc_relaxed(atomic_t *v)
393 {
394         return atomic_fetch_add_relaxed(1, v);
395 }
396 #define atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
397 #endif
398
399 #else /* atomic_fetch_inc_relaxed */
400
401 #ifndef atomic_fetch_inc_acquire
402 static __always_inline int
403 atomic_fetch_inc_acquire(atomic_t *v)
404 {
405         int ret = atomic_fetch_inc_relaxed(v);
406         __atomic_acquire_fence();
407         return ret;
408 }
409 #define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
410 #endif
411
412 #ifndef atomic_fetch_inc_release
413 static __always_inline int
414 atomic_fetch_inc_release(atomic_t *v)
415 {
416         __atomic_release_fence();
417         return atomic_fetch_inc_relaxed(v);
418 }
419 #define atomic_fetch_inc_release atomic_fetch_inc_release
420 #endif
421
422 #ifndef atomic_fetch_inc
423 static __always_inline int
424 atomic_fetch_inc(atomic_t *v)
425 {
426         int ret;
427         __atomic_pre_full_fence();
428         ret = atomic_fetch_inc_relaxed(v);
429         __atomic_post_full_fence();
430         return ret;
431 }
432 #define atomic_fetch_inc atomic_fetch_inc
433 #endif
434
435 #endif /* atomic_fetch_inc_relaxed */
436
437 #ifndef atomic_dec
438 static __always_inline void
439 atomic_dec(atomic_t *v)
440 {
441         atomic_sub(1, v);
442 }
443 #define atomic_dec atomic_dec
444 #endif
445
446 #ifndef atomic_dec_return_relaxed
447 #ifdef atomic_dec_return
448 #define atomic_dec_return_acquire atomic_dec_return
449 #define atomic_dec_return_release atomic_dec_return
450 #define atomic_dec_return_relaxed atomic_dec_return
451 #endif /* atomic_dec_return */
452
453 #ifndef atomic_dec_return
454 static __always_inline int
455 atomic_dec_return(atomic_t *v)
456 {
457         return atomic_sub_return(1, v);
458 }
459 #define atomic_dec_return atomic_dec_return
460 #endif
461
462 #ifndef atomic_dec_return_acquire
463 static __always_inline int
464 atomic_dec_return_acquire(atomic_t *v)
465 {
466         return atomic_sub_return_acquire(1, v);
467 }
468 #define atomic_dec_return_acquire atomic_dec_return_acquire
469 #endif
470
471 #ifndef atomic_dec_return_release
472 static __always_inline int
473 atomic_dec_return_release(atomic_t *v)
474 {
475         return atomic_sub_return_release(1, v);
476 }
477 #define atomic_dec_return_release atomic_dec_return_release
478 #endif
479
480 #ifndef atomic_dec_return_relaxed
481 static __always_inline int
482 atomic_dec_return_relaxed(atomic_t *v)
483 {
484         return atomic_sub_return_relaxed(1, v);
485 }
486 #define atomic_dec_return_relaxed atomic_dec_return_relaxed
487 #endif
488
489 #else /* atomic_dec_return_relaxed */
490
491 #ifndef atomic_dec_return_acquire
492 static __always_inline int
493 atomic_dec_return_acquire(atomic_t *v)
494 {
495         int ret = atomic_dec_return_relaxed(v);
496         __atomic_acquire_fence();
497         return ret;
498 }
499 #define atomic_dec_return_acquire atomic_dec_return_acquire
500 #endif
501
502 #ifndef atomic_dec_return_release
503 static __always_inline int
504 atomic_dec_return_release(atomic_t *v)
505 {
506         __atomic_release_fence();
507         return atomic_dec_return_relaxed(v);
508 }
509 #define atomic_dec_return_release atomic_dec_return_release
510 #endif
511
512 #ifndef atomic_dec_return
513 static __always_inline int
514 atomic_dec_return(atomic_t *v)
515 {
516         int ret;
517         __atomic_pre_full_fence();
518         ret = atomic_dec_return_relaxed(v);
519         __atomic_post_full_fence();
520         return ret;
521 }
522 #define atomic_dec_return atomic_dec_return
523 #endif
524
525 #endif /* atomic_dec_return_relaxed */
526
527 #ifndef atomic_fetch_dec_relaxed
528 #ifdef atomic_fetch_dec
529 #define atomic_fetch_dec_acquire atomic_fetch_dec
530 #define atomic_fetch_dec_release atomic_fetch_dec
531 #define atomic_fetch_dec_relaxed atomic_fetch_dec
532 #endif /* atomic_fetch_dec */
533
534 #ifndef atomic_fetch_dec
535 static __always_inline int
536 atomic_fetch_dec(atomic_t *v)
537 {
538         return atomic_fetch_sub(1, v);
539 }
540 #define atomic_fetch_dec atomic_fetch_dec
541 #endif
542
543 #ifndef atomic_fetch_dec_acquire
544 static __always_inline int
545 atomic_fetch_dec_acquire(atomic_t *v)
546 {
547         return atomic_fetch_sub_acquire(1, v);
548 }
549 #define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
550 #endif
551
552 #ifndef atomic_fetch_dec_release
553 static __always_inline int
554 atomic_fetch_dec_release(atomic_t *v)
555 {
556         return atomic_fetch_sub_release(1, v);
557 }
558 #define atomic_fetch_dec_release atomic_fetch_dec_release
559 #endif
560
561 #ifndef atomic_fetch_dec_relaxed
562 static __always_inline int
563 atomic_fetch_dec_relaxed(atomic_t *v)
564 {
565         return atomic_fetch_sub_relaxed(1, v);
566 }
567 #define atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
568 #endif
569
570 #else /* atomic_fetch_dec_relaxed */
571
572 #ifndef atomic_fetch_dec_acquire
573 static __always_inline int
574 atomic_fetch_dec_acquire(atomic_t *v)
575 {
576         int ret = atomic_fetch_dec_relaxed(v);
577         __atomic_acquire_fence();
578         return ret;
579 }
580 #define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
581 #endif
582
583 #ifndef atomic_fetch_dec_release
584 static __always_inline int
585 atomic_fetch_dec_release(atomic_t *v)
586 {
587         __atomic_release_fence();
588         return atomic_fetch_dec_relaxed(v);
589 }
590 #define atomic_fetch_dec_release atomic_fetch_dec_release
591 #endif
592
593 #ifndef atomic_fetch_dec
594 static __always_inline int
595 atomic_fetch_dec(atomic_t *v)
596 {
597         int ret;
598         __atomic_pre_full_fence();
599         ret = atomic_fetch_dec_relaxed(v);
600         __atomic_post_full_fence();
601         return ret;
602 }
603 #define atomic_fetch_dec atomic_fetch_dec
604 #endif
605
606 #endif /* atomic_fetch_dec_relaxed */
607
608 #ifndef atomic_fetch_and_relaxed
609 #define atomic_fetch_and_acquire atomic_fetch_and
610 #define atomic_fetch_and_release atomic_fetch_and
611 #define atomic_fetch_and_relaxed atomic_fetch_and
612 #else /* atomic_fetch_and_relaxed */
613
614 #ifndef atomic_fetch_and_acquire
615 static __always_inline int
616 atomic_fetch_and_acquire(int i, atomic_t *v)
617 {
618         int ret = atomic_fetch_and_relaxed(i, v);
619         __atomic_acquire_fence();
620         return ret;
621 }
622 #define atomic_fetch_and_acquire atomic_fetch_and_acquire
623 #endif
624
625 #ifndef atomic_fetch_and_release
626 static __always_inline int
627 atomic_fetch_and_release(int i, atomic_t *v)
628 {
629         __atomic_release_fence();
630         return atomic_fetch_and_relaxed(i, v);
631 }
632 #define atomic_fetch_and_release atomic_fetch_and_release
633 #endif
634
635 #ifndef atomic_fetch_and
636 static __always_inline int
637 atomic_fetch_and(int i, atomic_t *v)
638 {
639         int ret;
640         __atomic_pre_full_fence();
641         ret = atomic_fetch_and_relaxed(i, v);
642         __atomic_post_full_fence();
643         return ret;
644 }
645 #define atomic_fetch_and atomic_fetch_and
646 #endif
647
648 #endif /* atomic_fetch_and_relaxed */
649
650 #ifndef atomic_andnot
651 static __always_inline void
652 atomic_andnot(int i, atomic_t *v)
653 {
654         atomic_and(~i, v);
655 }
656 #define atomic_andnot atomic_andnot
657 #endif
658
659 #ifndef atomic_fetch_andnot_relaxed
660 #ifdef atomic_fetch_andnot
661 #define atomic_fetch_andnot_acquire atomic_fetch_andnot
662 #define atomic_fetch_andnot_release atomic_fetch_andnot
663 #define atomic_fetch_andnot_relaxed atomic_fetch_andnot
664 #endif /* atomic_fetch_andnot */
665
666 #ifndef atomic_fetch_andnot
667 static __always_inline int
668 atomic_fetch_andnot(int i, atomic_t *v)
669 {
670         return atomic_fetch_and(~i, v);
671 }
672 #define atomic_fetch_andnot atomic_fetch_andnot
673 #endif
674
675 #ifndef atomic_fetch_andnot_acquire
676 static __always_inline int
677 atomic_fetch_andnot_acquire(int i, atomic_t *v)
678 {
679         return atomic_fetch_and_acquire(~i, v);
680 }
681 #define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
682 #endif
683
684 #ifndef atomic_fetch_andnot_release
685 static __always_inline int
686 atomic_fetch_andnot_release(int i, atomic_t *v)
687 {
688         return atomic_fetch_and_release(~i, v);
689 }
690 #define atomic_fetch_andnot_release atomic_fetch_andnot_release
691 #endif
692
693 #ifndef atomic_fetch_andnot_relaxed
694 static __always_inline int
695 atomic_fetch_andnot_relaxed(int i, atomic_t *v)
696 {
697         return atomic_fetch_and_relaxed(~i, v);
698 }
699 #define atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
700 #endif
701
702 #else /* atomic_fetch_andnot_relaxed */
703
704 #ifndef atomic_fetch_andnot_acquire
705 static __always_inline int
706 atomic_fetch_andnot_acquire(int i, atomic_t *v)
707 {
708         int ret = atomic_fetch_andnot_relaxed(i, v);
709         __atomic_acquire_fence();
710         return ret;
711 }
712 #define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
713 #endif
714
715 #ifndef atomic_fetch_andnot_release
716 static __always_inline int
717 atomic_fetch_andnot_release(int i, atomic_t *v)
718 {
719         __atomic_release_fence();
720         return atomic_fetch_andnot_relaxed(i, v);
721 }
722 #define atomic_fetch_andnot_release atomic_fetch_andnot_release
723 #endif
724
725 #ifndef atomic_fetch_andnot
726 static __always_inline int
727 atomic_fetch_andnot(int i, atomic_t *v)
728 {
729         int ret;
730         __atomic_pre_full_fence();
731         ret = atomic_fetch_andnot_relaxed(i, v);
732         __atomic_post_full_fence();
733         return ret;
734 }
735 #define atomic_fetch_andnot atomic_fetch_andnot
736 #endif
737
738 #endif /* atomic_fetch_andnot_relaxed */
739
740 #ifndef atomic_fetch_or_relaxed
741 #define atomic_fetch_or_acquire atomic_fetch_or
742 #define atomic_fetch_or_release atomic_fetch_or
743 #define atomic_fetch_or_relaxed atomic_fetch_or
744 #else /* atomic_fetch_or_relaxed */
745
746 #ifndef atomic_fetch_or_acquire
747 static __always_inline int
748 atomic_fetch_or_acquire(int i, atomic_t *v)
749 {
750         int ret = atomic_fetch_or_relaxed(i, v);
751         __atomic_acquire_fence();
752         return ret;
753 }
754 #define atomic_fetch_or_acquire atomic_fetch_or_acquire
755 #endif
756
757 #ifndef atomic_fetch_or_release
758 static __always_inline int
759 atomic_fetch_or_release(int i, atomic_t *v)
760 {
761         __atomic_release_fence();
762         return atomic_fetch_or_relaxed(i, v);
763 }
764 #define atomic_fetch_or_release atomic_fetch_or_release
765 #endif
766
767 #ifndef atomic_fetch_or
768 static __always_inline int
769 atomic_fetch_or(int i, atomic_t *v)
770 {
771         int ret;
772         __atomic_pre_full_fence();
773         ret = atomic_fetch_or_relaxed(i, v);
774         __atomic_post_full_fence();
775         return ret;
776 }
777 #define atomic_fetch_or atomic_fetch_or
778 #endif
779
780 #endif /* atomic_fetch_or_relaxed */
781
782 #ifndef atomic_fetch_xor_relaxed
783 #define atomic_fetch_xor_acquire atomic_fetch_xor
784 #define atomic_fetch_xor_release atomic_fetch_xor
785 #define atomic_fetch_xor_relaxed atomic_fetch_xor
786 #else /* atomic_fetch_xor_relaxed */
787
788 #ifndef atomic_fetch_xor_acquire
789 static __always_inline int
790 atomic_fetch_xor_acquire(int i, atomic_t *v)
791 {
792         int ret = atomic_fetch_xor_relaxed(i, v);
793         __atomic_acquire_fence();
794         return ret;
795 }
796 #define atomic_fetch_xor_acquire atomic_fetch_xor_acquire
797 #endif
798
799 #ifndef atomic_fetch_xor_release
800 static __always_inline int
801 atomic_fetch_xor_release(int i, atomic_t *v)
802 {
803         __atomic_release_fence();
804         return atomic_fetch_xor_relaxed(i, v);
805 }
806 #define atomic_fetch_xor_release atomic_fetch_xor_release
807 #endif
808
809 #ifndef atomic_fetch_xor
810 static __always_inline int
811 atomic_fetch_xor(int i, atomic_t *v)
812 {
813         int ret;
814         __atomic_pre_full_fence();
815         ret = atomic_fetch_xor_relaxed(i, v);
816         __atomic_post_full_fence();
817         return ret;
818 }
819 #define atomic_fetch_xor atomic_fetch_xor
820 #endif
821
822 #endif /* atomic_fetch_xor_relaxed */
823
824 #ifndef atomic_xchg_relaxed
825 #define atomic_xchg_acquire atomic_xchg
826 #define atomic_xchg_release atomic_xchg
827 #define atomic_xchg_relaxed atomic_xchg
828 #else /* atomic_xchg_relaxed */
829
830 #ifndef atomic_xchg_acquire
831 static __always_inline int
832 atomic_xchg_acquire(atomic_t *v, int i)
833 {
834         int ret = atomic_xchg_relaxed(v, i);
835         __atomic_acquire_fence();
836         return ret;
837 }
838 #define atomic_xchg_acquire atomic_xchg_acquire
839 #endif
840
841 #ifndef atomic_xchg_release
842 static __always_inline int
843 atomic_xchg_release(atomic_t *v, int i)
844 {
845         __atomic_release_fence();
846         return atomic_xchg_relaxed(v, i);
847 }
848 #define atomic_xchg_release atomic_xchg_release
849 #endif
850
851 #ifndef atomic_xchg
852 static __always_inline int
853 atomic_xchg(atomic_t *v, int i)
854 {
855         int ret;
856         __atomic_pre_full_fence();
857         ret = atomic_xchg_relaxed(v, i);
858         __atomic_post_full_fence();
859         return ret;
860 }
861 #define atomic_xchg atomic_xchg
862 #endif
863
864 #endif /* atomic_xchg_relaxed */
865
866 #ifndef atomic_cmpxchg_relaxed
867 #define atomic_cmpxchg_acquire atomic_cmpxchg
868 #define atomic_cmpxchg_release atomic_cmpxchg
869 #define atomic_cmpxchg_relaxed atomic_cmpxchg
870 #else /* atomic_cmpxchg_relaxed */
871
872 #ifndef atomic_cmpxchg_acquire
873 static __always_inline int
874 atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
875 {
876         int ret = atomic_cmpxchg_relaxed(v, old, new);
877         __atomic_acquire_fence();
878         return ret;
879 }
880 #define atomic_cmpxchg_acquire atomic_cmpxchg_acquire
881 #endif
882
883 #ifndef atomic_cmpxchg_release
884 static __always_inline int
885 atomic_cmpxchg_release(atomic_t *v, int old, int new)
886 {
887         __atomic_release_fence();
888         return atomic_cmpxchg_relaxed(v, old, new);
889 }
890 #define atomic_cmpxchg_release atomic_cmpxchg_release
891 #endif
892
893 #ifndef atomic_cmpxchg
894 static __always_inline int
895 atomic_cmpxchg(atomic_t *v, int old, int new)
896 {
897         int ret;
898         __atomic_pre_full_fence();
899         ret = atomic_cmpxchg_relaxed(v, old, new);
900         __atomic_post_full_fence();
901         return ret;
902 }
903 #define atomic_cmpxchg atomic_cmpxchg
904 #endif
905
906 #endif /* atomic_cmpxchg_relaxed */
907
908 #ifndef atomic_try_cmpxchg_relaxed
909 #ifdef atomic_try_cmpxchg
910 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg
911 #define atomic_try_cmpxchg_release atomic_try_cmpxchg
912 #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg
913 #endif /* atomic_try_cmpxchg */
914
915 #ifndef atomic_try_cmpxchg
916 static __always_inline bool
917 atomic_try_cmpxchg(atomic_t *v, int *old, int new)
918 {
919         int r, o = *old;
920         r = atomic_cmpxchg(v, o, new);
921         if (unlikely(r != o))
922                 *old = r;
923         return likely(r == o);
924 }
925 #define atomic_try_cmpxchg atomic_try_cmpxchg
926 #endif
927
928 #ifndef atomic_try_cmpxchg_acquire
929 static __always_inline bool
930 atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
931 {
932         int r, o = *old;
933         r = atomic_cmpxchg_acquire(v, o, new);
934         if (unlikely(r != o))
935                 *old = r;
936         return likely(r == o);
937 }
938 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
939 #endif
940
941 #ifndef atomic_try_cmpxchg_release
942 static __always_inline bool
943 atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
944 {
945         int r, o = *old;
946         r = atomic_cmpxchg_release(v, o, new);
947         if (unlikely(r != o))
948                 *old = r;
949         return likely(r == o);
950 }
951 #define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
952 #endif
953
954 #ifndef atomic_try_cmpxchg_relaxed
955 static __always_inline bool
956 atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
957 {
958         int r, o = *old;
959         r = atomic_cmpxchg_relaxed(v, o, new);
960         if (unlikely(r != o))
961                 *old = r;
962         return likely(r == o);
963 }
964 #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed
965 #endif
966
967 #else /* atomic_try_cmpxchg_relaxed */
968
969 #ifndef atomic_try_cmpxchg_acquire
970 static __always_inline bool
971 atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
972 {
973         bool ret = atomic_try_cmpxchg_relaxed(v, old, new);
974         __atomic_acquire_fence();
975         return ret;
976 }
977 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
978 #endif
979
980 #ifndef atomic_try_cmpxchg_release
981 static __always_inline bool
982 atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
983 {
984         __atomic_release_fence();
985         return atomic_try_cmpxchg_relaxed(v, old, new);
986 }
987 #define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
988 #endif
989
990 #ifndef atomic_try_cmpxchg
991 static __always_inline bool
992 atomic_try_cmpxchg(atomic_t *v, int *old, int new)
993 {
994         bool ret;
995         __atomic_pre_full_fence();
996         ret = atomic_try_cmpxchg_relaxed(v, old, new);
997         __atomic_post_full_fence();
998         return ret;
999 }
1000 #define atomic_try_cmpxchg atomic_try_cmpxchg
1001 #endif
1002
1003 #endif /* atomic_try_cmpxchg_relaxed */
1004
1005 #ifndef atomic_sub_and_test
1006 /**
1007  * atomic_sub_and_test - subtract value from variable and test result
1008  * @i: integer value to subtract
1009  * @v: pointer of type atomic_t
1010  *
1011  * Atomically subtracts @i from @v and returns
1012  * true if the result is zero, or false for all
1013  * other cases.
1014  */
1015 static __always_inline bool
1016 atomic_sub_and_test(int i, atomic_t *v)
1017 {
1018         return atomic_sub_return(i, v) == 0;
1019 }
1020 #define atomic_sub_and_test atomic_sub_and_test
1021 #endif
1022
1023 #ifndef atomic_dec_and_test
1024 /**
1025  * atomic_dec_and_test - decrement and test
1026  * @v: pointer of type atomic_t
1027  *
1028  * Atomically decrements @v by 1 and
1029  * returns true if the result is 0, or false for all other
1030  * cases.
1031  */
1032 static __always_inline bool
1033 atomic_dec_and_test(atomic_t *v)
1034 {
1035         return atomic_dec_return(v) == 0;
1036 }
1037 #define atomic_dec_and_test atomic_dec_and_test
1038 #endif
1039
1040 #ifndef atomic_inc_and_test
1041 /**
1042  * atomic_inc_and_test - increment and test
1043  * @v: pointer of type atomic_t
1044  *
1045  * Atomically increments @v by 1
1046  * and returns true if the result is zero, or false for all
1047  * other cases.
1048  */
1049 static __always_inline bool
1050 atomic_inc_and_test(atomic_t *v)
1051 {
1052         return atomic_inc_return(v) == 0;
1053 }
1054 #define atomic_inc_and_test atomic_inc_and_test
1055 #endif
1056
1057 #ifndef atomic_add_negative
1058 /**
1059  * atomic_add_negative - add and test if negative
1060  * @i: integer value to add
1061  * @v: pointer of type atomic_t
1062  *
1063  * Atomically adds @i to @v and returns true
1064  * if the result is negative, or false when
1065  * result is greater than or equal to zero.
1066  */
1067 static __always_inline bool
1068 atomic_add_negative(int i, atomic_t *v)
1069 {
1070         return atomic_add_return(i, v) < 0;
1071 }
1072 #define atomic_add_negative atomic_add_negative
1073 #endif
1074
1075 #ifndef atomic_fetch_add_unless
1076 /**
1077  * atomic_fetch_add_unless - add unless the number is already a given value
1078  * @v: pointer of type atomic_t
1079  * @a: the amount to add to v...
1080  * @u: ...unless v is equal to u.
1081  *
1082  * Atomically adds @a to @v, so long as @v was not already @u.
1083  * Returns original value of @v
1084  */
1085 static __always_inline int
1086 atomic_fetch_add_unless(atomic_t *v, int a, int u)
1087 {
1088         int c = atomic_read(v);
1089
1090         do {
1091                 if (unlikely(c == u))
1092                         break;
1093         } while (!atomic_try_cmpxchg(v, &c, c + a));
1094
1095         return c;
1096 }
1097 #define atomic_fetch_add_unless atomic_fetch_add_unless
1098 #endif
1099
1100 #ifndef atomic_add_unless
1101 /**
1102  * atomic_add_unless - add unless the number is already a given value
1103  * @v: pointer of type atomic_t
1104  * @a: the amount to add to v...
1105  * @u: ...unless v is equal to u.
1106  *
1107  * Atomically adds @a to @v, if @v was not already @u.
1108  * Returns true if the addition was done.
1109  */
1110 static __always_inline bool
1111 atomic_add_unless(atomic_t *v, int a, int u)
1112 {
1113         return atomic_fetch_add_unless(v, a, u) != u;
1114 }
1115 #define atomic_add_unless atomic_add_unless
1116 #endif
1117
1118 #ifndef atomic_inc_not_zero
1119 /**
1120  * atomic_inc_not_zero - increment unless the number is zero
1121  * @v: pointer of type atomic_t
1122  *
1123  * Atomically increments @v by 1, if @v is non-zero.
1124  * Returns true if the increment was done.
1125  */
1126 static __always_inline bool
1127 atomic_inc_not_zero(atomic_t *v)
1128 {
1129         return atomic_add_unless(v, 1, 0);
1130 }
1131 #define atomic_inc_not_zero atomic_inc_not_zero
1132 #endif
1133
1134 #ifndef atomic_inc_unless_negative
1135 static __always_inline bool
1136 atomic_inc_unless_negative(atomic_t *v)
1137 {
1138         int c = atomic_read(v);
1139
1140         do {
1141                 if (unlikely(c < 0))
1142                         return false;
1143         } while (!atomic_try_cmpxchg(v, &c, c + 1));
1144
1145         return true;
1146 }
1147 #define atomic_inc_unless_negative atomic_inc_unless_negative
1148 #endif
1149
1150 #ifndef atomic_dec_unless_positive
1151 static __always_inline bool
1152 atomic_dec_unless_positive(atomic_t *v)
1153 {
1154         int c = atomic_read(v);
1155
1156         do {
1157                 if (unlikely(c > 0))
1158                         return false;
1159         } while (!atomic_try_cmpxchg(v, &c, c - 1));
1160
1161         return true;
1162 }
1163 #define atomic_dec_unless_positive atomic_dec_unless_positive
1164 #endif
1165
1166 #ifndef atomic_dec_if_positive
1167 static __always_inline int
1168 atomic_dec_if_positive(atomic_t *v)
1169 {
1170         int dec, c = atomic_read(v);
1171
1172         do {
1173                 dec = c - 1;
1174                 if (unlikely(dec < 0))
1175                         break;
1176         } while (!atomic_try_cmpxchg(v, &c, dec));
1177
1178         return dec;
1179 }
1180 #define atomic_dec_if_positive atomic_dec_if_positive
1181 #endif
1182
1183 #define atomic_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
1184 #define atomic_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
1185
1186 #ifdef CONFIG_GENERIC_ATOMIC64
1187 #include <asm-generic/atomic64.h>
1188 #endif
1189
1190 #ifndef atomic64_read_acquire
1191 static __always_inline s64
1192 atomic64_read_acquire(const atomic64_t *v)
1193 {
1194         return smp_load_acquire(&(v)->counter);
1195 }
1196 #define atomic64_read_acquire atomic64_read_acquire
1197 #endif
1198
1199 #ifndef atomic64_set_release
1200 static __always_inline void
1201 atomic64_set_release(atomic64_t *v, s64 i)
1202 {
1203         smp_store_release(&(v)->counter, i);
1204 }
1205 #define atomic64_set_release atomic64_set_release
1206 #endif
1207
1208 #ifndef atomic64_add_return_relaxed
1209 #define atomic64_add_return_acquire atomic64_add_return
1210 #define atomic64_add_return_release atomic64_add_return
1211 #define atomic64_add_return_relaxed atomic64_add_return
1212 #else /* atomic64_add_return_relaxed */
1213
1214 #ifndef atomic64_add_return_acquire
1215 static __always_inline s64
1216 atomic64_add_return_acquire(s64 i, atomic64_t *v)
1217 {
1218         s64 ret = atomic64_add_return_relaxed(i, v);
1219         __atomic_acquire_fence();
1220         return ret;
1221 }
1222 #define atomic64_add_return_acquire atomic64_add_return_acquire
1223 #endif
1224
1225 #ifndef atomic64_add_return_release
1226 static __always_inline s64
1227 atomic64_add_return_release(s64 i, atomic64_t *v)
1228 {
1229         __atomic_release_fence();
1230         return atomic64_add_return_relaxed(i, v);
1231 }
1232 #define atomic64_add_return_release atomic64_add_return_release
1233 #endif
1234
1235 #ifndef atomic64_add_return
1236 static __always_inline s64
1237 atomic64_add_return(s64 i, atomic64_t *v)
1238 {
1239         s64 ret;
1240         __atomic_pre_full_fence();
1241         ret = atomic64_add_return_relaxed(i, v);
1242         __atomic_post_full_fence();
1243         return ret;
1244 }
1245 #define atomic64_add_return atomic64_add_return
1246 #endif
1247
1248 #endif /* atomic64_add_return_relaxed */
1249
1250 #ifndef atomic64_fetch_add_relaxed
1251 #define atomic64_fetch_add_acquire atomic64_fetch_add
1252 #define atomic64_fetch_add_release atomic64_fetch_add
1253 #define atomic64_fetch_add_relaxed atomic64_fetch_add
1254 #else /* atomic64_fetch_add_relaxed */
1255
1256 #ifndef atomic64_fetch_add_acquire
1257 static __always_inline s64
1258 atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
1259 {
1260         s64 ret = atomic64_fetch_add_relaxed(i, v);
1261         __atomic_acquire_fence();
1262         return ret;
1263 }
1264 #define atomic64_fetch_add_acquire atomic64_fetch_add_acquire
1265 #endif
1266
1267 #ifndef atomic64_fetch_add_release
1268 static __always_inline s64
1269 atomic64_fetch_add_release(s64 i, atomic64_t *v)
1270 {
1271         __atomic_release_fence();
1272         return atomic64_fetch_add_relaxed(i, v);
1273 }
1274 #define atomic64_fetch_add_release atomic64_fetch_add_release
1275 #endif
1276
1277 #ifndef atomic64_fetch_add
1278 static __always_inline s64
1279 atomic64_fetch_add(s64 i, atomic64_t *v)
1280 {
1281         s64 ret;
1282         __atomic_pre_full_fence();
1283         ret = atomic64_fetch_add_relaxed(i, v);
1284         __atomic_post_full_fence();
1285         return ret;
1286 }
1287 #define atomic64_fetch_add atomic64_fetch_add
1288 #endif
1289
1290 #endif /* atomic64_fetch_add_relaxed */
1291
1292 #ifndef atomic64_sub_return_relaxed
1293 #define atomic64_sub_return_acquire atomic64_sub_return
1294 #define atomic64_sub_return_release atomic64_sub_return
1295 #define atomic64_sub_return_relaxed atomic64_sub_return
1296 #else /* atomic64_sub_return_relaxed */
1297
1298 #ifndef atomic64_sub_return_acquire
1299 static __always_inline s64
1300 atomic64_sub_return_acquire(s64 i, atomic64_t *v)
1301 {
1302         s64 ret = atomic64_sub_return_relaxed(i, v);
1303         __atomic_acquire_fence();
1304         return ret;
1305 }
1306 #define atomic64_sub_return_acquire atomic64_sub_return_acquire
1307 #endif
1308
1309 #ifndef atomic64_sub_return_release
1310 static __always_inline s64
1311 atomic64_sub_return_release(s64 i, atomic64_t *v)
1312 {
1313         __atomic_release_fence();
1314         return atomic64_sub_return_relaxed(i, v);
1315 }
1316 #define atomic64_sub_return_release atomic64_sub_return_release
1317 #endif
1318
1319 #ifndef atomic64_sub_return
1320 static __always_inline s64
1321 atomic64_sub_return(s64 i, atomic64_t *v)
1322 {
1323         s64 ret;
1324         __atomic_pre_full_fence();
1325         ret = atomic64_sub_return_relaxed(i, v);
1326         __atomic_post_full_fence();
1327         return ret;
1328 }
1329 #define atomic64_sub_return atomic64_sub_return
1330 #endif
1331
1332 #endif /* atomic64_sub_return_relaxed */
1333
1334 #ifndef atomic64_fetch_sub_relaxed
1335 #define atomic64_fetch_sub_acquire atomic64_fetch_sub
1336 #define atomic64_fetch_sub_release atomic64_fetch_sub
1337 #define atomic64_fetch_sub_relaxed atomic64_fetch_sub
1338 #else /* atomic64_fetch_sub_relaxed */
1339
1340 #ifndef atomic64_fetch_sub_acquire
1341 static __always_inline s64
1342 atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
1343 {
1344         s64 ret = atomic64_fetch_sub_relaxed(i, v);
1345         __atomic_acquire_fence();
1346         return ret;
1347 }
1348 #define atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
1349 #endif
1350
1351 #ifndef atomic64_fetch_sub_release
1352 static __always_inline s64
1353 atomic64_fetch_sub_release(s64 i, atomic64_t *v)
1354 {
1355         __atomic_release_fence();
1356         return atomic64_fetch_sub_relaxed(i, v);
1357 }
1358 #define atomic64_fetch_sub_release atomic64_fetch_sub_release
1359 #endif
1360
1361 #ifndef atomic64_fetch_sub
1362 static __always_inline s64
1363 atomic64_fetch_sub(s64 i, atomic64_t *v)
1364 {
1365         s64 ret;
1366         __atomic_pre_full_fence();
1367         ret = atomic64_fetch_sub_relaxed(i, v);
1368         __atomic_post_full_fence();
1369         return ret;
1370 }
1371 #define atomic64_fetch_sub atomic64_fetch_sub
1372 #endif
1373
1374 #endif /* atomic64_fetch_sub_relaxed */
1375
1376 #ifndef atomic64_inc
1377 static __always_inline void
1378 atomic64_inc(atomic64_t *v)
1379 {
1380         atomic64_add(1, v);
1381 }
1382 #define atomic64_inc atomic64_inc
1383 #endif
1384
1385 #ifndef atomic64_inc_return_relaxed
1386 #ifdef atomic64_inc_return
1387 #define atomic64_inc_return_acquire atomic64_inc_return
1388 #define atomic64_inc_return_release atomic64_inc_return
1389 #define atomic64_inc_return_relaxed atomic64_inc_return
1390 #endif /* atomic64_inc_return */
1391
1392 #ifndef atomic64_inc_return
1393 static __always_inline s64
1394 atomic64_inc_return(atomic64_t *v)
1395 {
1396         return atomic64_add_return(1, v);
1397 }
1398 #define atomic64_inc_return atomic64_inc_return
1399 #endif
1400
1401 #ifndef atomic64_inc_return_acquire
1402 static __always_inline s64
1403 atomic64_inc_return_acquire(atomic64_t *v)
1404 {
1405         return atomic64_add_return_acquire(1, v);
1406 }
1407 #define atomic64_inc_return_acquire atomic64_inc_return_acquire
1408 #endif
1409
1410 #ifndef atomic64_inc_return_release
1411 static __always_inline s64
1412 atomic64_inc_return_release(atomic64_t *v)
1413 {
1414         return atomic64_add_return_release(1, v);
1415 }
1416 #define atomic64_inc_return_release atomic64_inc_return_release
1417 #endif
1418
1419 #ifndef atomic64_inc_return_relaxed
1420 static __always_inline s64
1421 atomic64_inc_return_relaxed(atomic64_t *v)
1422 {
1423         return atomic64_add_return_relaxed(1, v);
1424 }
1425 #define atomic64_inc_return_relaxed atomic64_inc_return_relaxed
1426 #endif
1427
1428 #else /* atomic64_inc_return_relaxed */
1429
1430 #ifndef atomic64_inc_return_acquire
1431 static __always_inline s64
1432 atomic64_inc_return_acquire(atomic64_t *v)
1433 {
1434         s64 ret = atomic64_inc_return_relaxed(v);
1435         __atomic_acquire_fence();
1436         return ret;
1437 }
1438 #define atomic64_inc_return_acquire atomic64_inc_return_acquire
1439 #endif
1440
1441 #ifndef atomic64_inc_return_release
1442 static __always_inline s64
1443 atomic64_inc_return_release(atomic64_t *v)
1444 {
1445         __atomic_release_fence();
1446         return atomic64_inc_return_relaxed(v);
1447 }
1448 #define atomic64_inc_return_release atomic64_inc_return_release
1449 #endif
1450
1451 #ifndef atomic64_inc_return
1452 static __always_inline s64
1453 atomic64_inc_return(atomic64_t *v)
1454 {
1455         s64 ret;
1456         __atomic_pre_full_fence();
1457         ret = atomic64_inc_return_relaxed(v);
1458         __atomic_post_full_fence();
1459         return ret;
1460 }
1461 #define atomic64_inc_return atomic64_inc_return
1462 #endif
1463
1464 #endif /* atomic64_inc_return_relaxed */
1465
1466 #ifndef atomic64_fetch_inc_relaxed
1467 #ifdef atomic64_fetch_inc
1468 #define atomic64_fetch_inc_acquire atomic64_fetch_inc
1469 #define atomic64_fetch_inc_release atomic64_fetch_inc
1470 #define atomic64_fetch_inc_relaxed atomic64_fetch_inc
1471 #endif /* atomic64_fetch_inc */
1472
1473 #ifndef atomic64_fetch_inc
1474 static __always_inline s64
1475 atomic64_fetch_inc(atomic64_t *v)
1476 {
1477         return atomic64_fetch_add(1, v);
1478 }
1479 #define atomic64_fetch_inc atomic64_fetch_inc
1480 #endif
1481
1482 #ifndef atomic64_fetch_inc_acquire
1483 static __always_inline s64
1484 atomic64_fetch_inc_acquire(atomic64_t *v)
1485 {
1486         return atomic64_fetch_add_acquire(1, v);
1487 }
1488 #define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1489 #endif
1490
1491 #ifndef atomic64_fetch_inc_release
1492 static __always_inline s64
1493 atomic64_fetch_inc_release(atomic64_t *v)
1494 {
1495         return atomic64_fetch_add_release(1, v);
1496 }
1497 #define atomic64_fetch_inc_release atomic64_fetch_inc_release
1498 #endif
1499
1500 #ifndef atomic64_fetch_inc_relaxed
1501 static __always_inline s64
1502 atomic64_fetch_inc_relaxed(atomic64_t *v)
1503 {
1504         return atomic64_fetch_add_relaxed(1, v);
1505 }
1506 #define atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
1507 #endif
1508
1509 #else /* atomic64_fetch_inc_relaxed */
1510
1511 #ifndef atomic64_fetch_inc_acquire
1512 static __always_inline s64
1513 atomic64_fetch_inc_acquire(atomic64_t *v)
1514 {
1515         s64 ret = atomic64_fetch_inc_relaxed(v);
1516         __atomic_acquire_fence();
1517         return ret;
1518 }
1519 #define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1520 #endif
1521
1522 #ifndef atomic64_fetch_inc_release
1523 static __always_inline s64
1524 atomic64_fetch_inc_release(atomic64_t *v)
1525 {
1526         __atomic_release_fence();
1527         return atomic64_fetch_inc_relaxed(v);
1528 }
1529 #define atomic64_fetch_inc_release atomic64_fetch_inc_release
1530 #endif
1531
1532 #ifndef atomic64_fetch_inc
1533 static __always_inline s64
1534 atomic64_fetch_inc(atomic64_t *v)
1535 {
1536         s64 ret;
1537         __atomic_pre_full_fence();
1538         ret = atomic64_fetch_inc_relaxed(v);
1539         __atomic_post_full_fence();
1540         return ret;
1541 }
1542 #define atomic64_fetch_inc atomic64_fetch_inc
1543 #endif
1544
1545 #endif /* atomic64_fetch_inc_relaxed */
1546
1547 #ifndef atomic64_dec
1548 static __always_inline void
1549 atomic64_dec(atomic64_t *v)
1550 {
1551         atomic64_sub(1, v);
1552 }
1553 #define atomic64_dec atomic64_dec
1554 #endif
1555
1556 #ifndef atomic64_dec_return_relaxed
1557 #ifdef atomic64_dec_return
1558 #define atomic64_dec_return_acquire atomic64_dec_return
1559 #define atomic64_dec_return_release atomic64_dec_return
1560 #define atomic64_dec_return_relaxed atomic64_dec_return
1561 #endif /* atomic64_dec_return */
1562
1563 #ifndef atomic64_dec_return
1564 static __always_inline s64
1565 atomic64_dec_return(atomic64_t *v)
1566 {
1567         return atomic64_sub_return(1, v);
1568 }
1569 #define atomic64_dec_return atomic64_dec_return
1570 #endif
1571
1572 #ifndef atomic64_dec_return_acquire
1573 static __always_inline s64
1574 atomic64_dec_return_acquire(atomic64_t *v)
1575 {
1576         return atomic64_sub_return_acquire(1, v);
1577 }
1578 #define atomic64_dec_return_acquire atomic64_dec_return_acquire
1579 #endif
1580
1581 #ifndef atomic64_dec_return_release
1582 static __always_inline s64
1583 atomic64_dec_return_release(atomic64_t *v)
1584 {
1585         return atomic64_sub_return_release(1, v);
1586 }
1587 #define atomic64_dec_return_release atomic64_dec_return_release
1588 #endif
1589
1590 #ifndef atomic64_dec_return_relaxed
1591 static __always_inline s64
1592 atomic64_dec_return_relaxed(atomic64_t *v)
1593 {
1594         return atomic64_sub_return_relaxed(1, v);
1595 }
1596 #define atomic64_dec_return_relaxed atomic64_dec_return_relaxed
1597 #endif
1598
1599 #else /* atomic64_dec_return_relaxed */
1600
1601 #ifndef atomic64_dec_return_acquire
1602 static __always_inline s64
1603 atomic64_dec_return_acquire(atomic64_t *v)
1604 {
1605         s64 ret = atomic64_dec_return_relaxed(v);
1606         __atomic_acquire_fence();
1607         return ret;
1608 }
1609 #define atomic64_dec_return_acquire atomic64_dec_return_acquire
1610 #endif
1611
1612 #ifndef atomic64_dec_return_release
1613 static __always_inline s64
1614 atomic64_dec_return_release(atomic64_t *v)
1615 {
1616         __atomic_release_fence();
1617         return atomic64_dec_return_relaxed(v);
1618 }
1619 #define atomic64_dec_return_release atomic64_dec_return_release
1620 #endif
1621
1622 #ifndef atomic64_dec_return
1623 static __always_inline s64
1624 atomic64_dec_return(atomic64_t *v)
1625 {
1626         s64 ret;
1627         __atomic_pre_full_fence();
1628         ret = atomic64_dec_return_relaxed(v);
1629         __atomic_post_full_fence();
1630         return ret;
1631 }
1632 #define atomic64_dec_return atomic64_dec_return
1633 #endif
1634
1635 #endif /* atomic64_dec_return_relaxed */
1636
1637 #ifndef atomic64_fetch_dec_relaxed
1638 #ifdef atomic64_fetch_dec
1639 #define atomic64_fetch_dec_acquire atomic64_fetch_dec
1640 #define atomic64_fetch_dec_release atomic64_fetch_dec
1641 #define atomic64_fetch_dec_relaxed atomic64_fetch_dec
1642 #endif /* atomic64_fetch_dec */
1643
1644 #ifndef atomic64_fetch_dec
1645 static __always_inline s64
1646 atomic64_fetch_dec(atomic64_t *v)
1647 {
1648         return atomic64_fetch_sub(1, v);
1649 }
1650 #define atomic64_fetch_dec atomic64_fetch_dec
1651 #endif
1652
1653 #ifndef atomic64_fetch_dec_acquire
1654 static __always_inline s64
1655 atomic64_fetch_dec_acquire(atomic64_t *v)
1656 {
1657         return atomic64_fetch_sub_acquire(1, v);
1658 }
1659 #define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1660 #endif
1661
1662 #ifndef atomic64_fetch_dec_release
1663 static __always_inline s64
1664 atomic64_fetch_dec_release(atomic64_t *v)
1665 {
1666         return atomic64_fetch_sub_release(1, v);
1667 }
1668 #define atomic64_fetch_dec_release atomic64_fetch_dec_release
1669 #endif
1670
1671 #ifndef atomic64_fetch_dec_relaxed
1672 static __always_inline s64
1673 atomic64_fetch_dec_relaxed(atomic64_t *v)
1674 {
1675         return atomic64_fetch_sub_relaxed(1, v);
1676 }
1677 #define atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
1678 #endif
1679
1680 #else /* atomic64_fetch_dec_relaxed */
1681
1682 #ifndef atomic64_fetch_dec_acquire
1683 static __always_inline s64
1684 atomic64_fetch_dec_acquire(atomic64_t *v)
1685 {
1686         s64 ret = atomic64_fetch_dec_relaxed(v);
1687         __atomic_acquire_fence();
1688         return ret;
1689 }
1690 #define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1691 #endif
1692
1693 #ifndef atomic64_fetch_dec_release
1694 static __always_inline s64
1695 atomic64_fetch_dec_release(atomic64_t *v)
1696 {
1697         __atomic_release_fence();
1698         return atomic64_fetch_dec_relaxed(v);
1699 }
1700 #define atomic64_fetch_dec_release atomic64_fetch_dec_release
1701 #endif
1702
1703 #ifndef atomic64_fetch_dec
1704 static __always_inline s64
1705 atomic64_fetch_dec(atomic64_t *v)
1706 {
1707         s64 ret;
1708         __atomic_pre_full_fence();
1709         ret = atomic64_fetch_dec_relaxed(v);
1710         __atomic_post_full_fence();
1711         return ret;
1712 }
1713 #define atomic64_fetch_dec atomic64_fetch_dec
1714 #endif
1715
1716 #endif /* atomic64_fetch_dec_relaxed */
1717
1718 #ifndef atomic64_fetch_and_relaxed
1719 #define atomic64_fetch_and_acquire atomic64_fetch_and
1720 #define atomic64_fetch_and_release atomic64_fetch_and
1721 #define atomic64_fetch_and_relaxed atomic64_fetch_and
1722 #else /* atomic64_fetch_and_relaxed */
1723
1724 #ifndef atomic64_fetch_and_acquire
1725 static __always_inline s64
1726 atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
1727 {
1728         s64 ret = atomic64_fetch_and_relaxed(i, v);
1729         __atomic_acquire_fence();
1730         return ret;
1731 }
1732 #define atomic64_fetch_and_acquire atomic64_fetch_and_acquire
1733 #endif
1734
1735 #ifndef atomic64_fetch_and_release
1736 static __always_inline s64
1737 atomic64_fetch_and_release(s64 i, atomic64_t *v)
1738 {
1739         __atomic_release_fence();
1740         return atomic64_fetch_and_relaxed(i, v);
1741 }
1742 #define atomic64_fetch_and_release atomic64_fetch_and_release
1743 #endif
1744
1745 #ifndef atomic64_fetch_and
1746 static __always_inline s64
1747 atomic64_fetch_and(s64 i, atomic64_t *v)
1748 {
1749         s64 ret;
1750         __atomic_pre_full_fence();
1751         ret = atomic64_fetch_and_relaxed(i, v);
1752         __atomic_post_full_fence();
1753         return ret;
1754 }
1755 #define atomic64_fetch_and atomic64_fetch_and
1756 #endif
1757
1758 #endif /* atomic64_fetch_and_relaxed */
1759
1760 #ifndef atomic64_andnot
1761 static __always_inline void
1762 atomic64_andnot(s64 i, atomic64_t *v)
1763 {
1764         atomic64_and(~i, v);
1765 }
1766 #define atomic64_andnot atomic64_andnot
1767 #endif
1768
1769 #ifndef atomic64_fetch_andnot_relaxed
1770 #ifdef atomic64_fetch_andnot
1771 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
1772 #define atomic64_fetch_andnot_release atomic64_fetch_andnot
1773 #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
1774 #endif /* atomic64_fetch_andnot */
1775
1776 #ifndef atomic64_fetch_andnot
1777 static __always_inline s64
1778 atomic64_fetch_andnot(s64 i, atomic64_t *v)
1779 {
1780         return atomic64_fetch_and(~i, v);
1781 }
1782 #define atomic64_fetch_andnot atomic64_fetch_andnot
1783 #endif
1784
1785 #ifndef atomic64_fetch_andnot_acquire
1786 static __always_inline s64
1787 atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1788 {
1789         return atomic64_fetch_and_acquire(~i, v);
1790 }
1791 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
1792 #endif
1793
1794 #ifndef atomic64_fetch_andnot_release
1795 static __always_inline s64
1796 atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1797 {
1798         return atomic64_fetch_and_release(~i, v);
1799 }
1800 #define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
1801 #endif
1802
1803 #ifndef atomic64_fetch_andnot_relaxed
1804 static __always_inline s64
1805 atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
1806 {
1807         return atomic64_fetch_and_relaxed(~i, v);
1808 }
1809 #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
1810 #endif
1811
1812 #else /* atomic64_fetch_andnot_relaxed */
1813
1814 #ifndef atomic64_fetch_andnot_acquire
1815 static __always_inline s64
1816 atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1817 {
1818         s64 ret = atomic64_fetch_andnot_relaxed(i, v);
1819         __atomic_acquire_fence();
1820         return ret;
1821 }
1822 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
1823 #endif
1824
1825 #ifndef atomic64_fetch_andnot_release
1826 static __always_inline s64
1827 atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1828 {
1829         __atomic_release_fence();
1830         return atomic64_fetch_andnot_relaxed(i, v);
1831 }
1832 #define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
1833 #endif
1834
1835 #ifndef atomic64_fetch_andnot
1836 static __always_inline s64
1837 atomic64_fetch_andnot(s64 i, atomic64_t *v)
1838 {
1839         s64 ret;
1840         __atomic_pre_full_fence();
1841         ret = atomic64_fetch_andnot_relaxed(i, v);
1842         __atomic_post_full_fence();
1843         return ret;
1844 }
1845 #define atomic64_fetch_andnot atomic64_fetch_andnot
1846 #endif
1847
1848 #endif /* atomic64_fetch_andnot_relaxed */
1849
1850 #ifndef atomic64_fetch_or_relaxed
1851 #define atomic64_fetch_or_acquire atomic64_fetch_or
1852 #define atomic64_fetch_or_release atomic64_fetch_or
1853 #define atomic64_fetch_or_relaxed atomic64_fetch_or
1854 #else /* atomic64_fetch_or_relaxed */
1855
1856 #ifndef atomic64_fetch_or_acquire
1857 static __always_inline s64
1858 atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
1859 {
1860         s64 ret = atomic64_fetch_or_relaxed(i, v);
1861         __atomic_acquire_fence();
1862         return ret;
1863 }
1864 #define atomic64_fetch_or_acquire atomic64_fetch_or_acquire
1865 #endif
1866
1867 #ifndef atomic64_fetch_or_release
1868 static __always_inline s64
1869 atomic64_fetch_or_release(s64 i, atomic64_t *v)
1870 {
1871         __atomic_release_fence();
1872         return atomic64_fetch_or_relaxed(i, v);
1873 }
1874 #define atomic64_fetch_or_release atomic64_fetch_or_release
1875 #endif
1876
1877 #ifndef atomic64_fetch_or
1878 static __always_inline s64
1879 atomic64_fetch_or(s64 i, atomic64_t *v)
1880 {
1881         s64 ret;
1882         __atomic_pre_full_fence();
1883         ret = atomic64_fetch_or_relaxed(i, v);
1884         __atomic_post_full_fence();
1885         return ret;
1886 }
1887 #define atomic64_fetch_or atomic64_fetch_or
1888 #endif
1889
1890 #endif /* atomic64_fetch_or_relaxed */
1891
1892 #ifndef atomic64_fetch_xor_relaxed
1893 #define atomic64_fetch_xor_acquire atomic64_fetch_xor
1894 #define atomic64_fetch_xor_release atomic64_fetch_xor
1895 #define atomic64_fetch_xor_relaxed atomic64_fetch_xor
1896 #else /* atomic64_fetch_xor_relaxed */
1897
1898 #ifndef atomic64_fetch_xor_acquire
1899 static __always_inline s64
1900 atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
1901 {
1902         s64 ret = atomic64_fetch_xor_relaxed(i, v);
1903         __atomic_acquire_fence();
1904         return ret;
1905 }
1906 #define atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
1907 #endif
1908
1909 #ifndef atomic64_fetch_xor_release
1910 static __always_inline s64
1911 atomic64_fetch_xor_release(s64 i, atomic64_t *v)
1912 {
1913         __atomic_release_fence();
1914         return atomic64_fetch_xor_relaxed(i, v);
1915 }
1916 #define atomic64_fetch_xor_release atomic64_fetch_xor_release
1917 #endif
1918
1919 #ifndef atomic64_fetch_xor
1920 static __always_inline s64
1921 atomic64_fetch_xor(s64 i, atomic64_t *v)
1922 {
1923         s64 ret;
1924         __atomic_pre_full_fence();
1925         ret = atomic64_fetch_xor_relaxed(i, v);
1926         __atomic_post_full_fence();
1927         return ret;
1928 }
1929 #define atomic64_fetch_xor atomic64_fetch_xor
1930 #endif
1931
1932 #endif /* atomic64_fetch_xor_relaxed */
1933
1934 #ifndef atomic64_xchg_relaxed
1935 #define atomic64_xchg_acquire atomic64_xchg
1936 #define atomic64_xchg_release atomic64_xchg
1937 #define atomic64_xchg_relaxed atomic64_xchg
1938 #else /* atomic64_xchg_relaxed */
1939
1940 #ifndef atomic64_xchg_acquire
1941 static __always_inline s64
1942 atomic64_xchg_acquire(atomic64_t *v, s64 i)
1943 {
1944         s64 ret = atomic64_xchg_relaxed(v, i);
1945         __atomic_acquire_fence();
1946         return ret;
1947 }
1948 #define atomic64_xchg_acquire atomic64_xchg_acquire
1949 #endif
1950
1951 #ifndef atomic64_xchg_release
1952 static __always_inline s64
1953 atomic64_xchg_release(atomic64_t *v, s64 i)
1954 {
1955         __atomic_release_fence();
1956         return atomic64_xchg_relaxed(v, i);
1957 }
1958 #define atomic64_xchg_release atomic64_xchg_release
1959 #endif
1960
1961 #ifndef atomic64_xchg
1962 static __always_inline s64
1963 atomic64_xchg(atomic64_t *v, s64 i)
1964 {
1965         s64 ret;
1966         __atomic_pre_full_fence();
1967         ret = atomic64_xchg_relaxed(v, i);
1968         __atomic_post_full_fence();
1969         return ret;
1970 }
1971 #define atomic64_xchg atomic64_xchg
1972 #endif
1973
1974 #endif /* atomic64_xchg_relaxed */
1975
1976 #ifndef atomic64_cmpxchg_relaxed
1977 #define atomic64_cmpxchg_acquire atomic64_cmpxchg
1978 #define atomic64_cmpxchg_release atomic64_cmpxchg
1979 #define atomic64_cmpxchg_relaxed atomic64_cmpxchg
1980 #else /* atomic64_cmpxchg_relaxed */
1981
1982 #ifndef atomic64_cmpxchg_acquire
1983 static __always_inline s64
1984 atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
1985 {
1986         s64 ret = atomic64_cmpxchg_relaxed(v, old, new);
1987         __atomic_acquire_fence();
1988         return ret;
1989 }
1990 #define atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire
1991 #endif
1992
1993 #ifndef atomic64_cmpxchg_release
1994 static __always_inline s64
1995 atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
1996 {
1997         __atomic_release_fence();
1998         return atomic64_cmpxchg_relaxed(v, old, new);
1999 }
2000 #define atomic64_cmpxchg_release atomic64_cmpxchg_release
2001 #endif
2002
2003 #ifndef atomic64_cmpxchg
2004 static __always_inline s64
2005 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
2006 {
2007         s64 ret;
2008         __atomic_pre_full_fence();
2009         ret = atomic64_cmpxchg_relaxed(v, old, new);
2010         __atomic_post_full_fence();
2011         return ret;
2012 }
2013 #define atomic64_cmpxchg atomic64_cmpxchg
2014 #endif
2015
2016 #endif /* atomic64_cmpxchg_relaxed */
2017
2018 #ifndef atomic64_try_cmpxchg_relaxed
2019 #ifdef atomic64_try_cmpxchg
2020 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg
2021 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
2022 #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg
2023 #endif /* atomic64_try_cmpxchg */
2024
2025 #ifndef atomic64_try_cmpxchg
2026 static __always_inline bool
2027 atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2028 {
2029         s64 r, o = *old;
2030         r = atomic64_cmpxchg(v, o, new);
2031         if (unlikely(r != o))
2032                 *old = r;
2033         return likely(r == o);
2034 }
2035 #define atomic64_try_cmpxchg atomic64_try_cmpxchg
2036 #endif
2037
2038 #ifndef atomic64_try_cmpxchg_acquire
2039 static __always_inline bool
2040 atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2041 {
2042         s64 r, o = *old;
2043         r = atomic64_cmpxchg_acquire(v, o, new);
2044         if (unlikely(r != o))
2045                 *old = r;
2046         return likely(r == o);
2047 }
2048 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
2049 #endif
2050
2051 #ifndef atomic64_try_cmpxchg_release
2052 static __always_inline bool
2053 atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2054 {
2055         s64 r, o = *old;
2056         r = atomic64_cmpxchg_release(v, o, new);
2057         if (unlikely(r != o))
2058                 *old = r;
2059         return likely(r == o);
2060 }
2061 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
2062 #endif
2063
2064 #ifndef atomic64_try_cmpxchg_relaxed
2065 static __always_inline bool
2066 atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
2067 {
2068         s64 r, o = *old;
2069         r = atomic64_cmpxchg_relaxed(v, o, new);
2070         if (unlikely(r != o))
2071                 *old = r;
2072         return likely(r == o);
2073 }
2074 #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed
2075 #endif
2076
2077 #else /* atomic64_try_cmpxchg_relaxed */
2078
2079 #ifndef atomic64_try_cmpxchg_acquire
2080 static __always_inline bool
2081 atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2082 {
2083         bool ret = atomic64_try_cmpxchg_relaxed(v, old, new);
2084         __atomic_acquire_fence();
2085         return ret;
2086 }
2087 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
2088 #endif
2089
2090 #ifndef atomic64_try_cmpxchg_release
2091 static __always_inline bool
2092 atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2093 {
2094         __atomic_release_fence();
2095         return atomic64_try_cmpxchg_relaxed(v, old, new);
2096 }
2097 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
2098 #endif
2099
2100 #ifndef atomic64_try_cmpxchg
2101 static __always_inline bool
2102 atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2103 {
2104         bool ret;
2105         __atomic_pre_full_fence();
2106         ret = atomic64_try_cmpxchg_relaxed(v, old, new);
2107         __atomic_post_full_fence();
2108         return ret;
2109 }
2110 #define atomic64_try_cmpxchg atomic64_try_cmpxchg
2111 #endif
2112
2113 #endif /* atomic64_try_cmpxchg_relaxed */
2114
2115 #ifndef atomic64_sub_and_test
2116 /**
2117  * atomic64_sub_and_test - subtract value from variable and test result
2118  * @i: integer value to subtract
2119  * @v: pointer of type atomic64_t
2120  *
2121  * Atomically subtracts @i from @v and returns
2122  * true if the result is zero, or false for all
2123  * other cases.
2124  */
2125 static __always_inline bool
2126 atomic64_sub_and_test(s64 i, atomic64_t *v)
2127 {
2128         return atomic64_sub_return(i, v) == 0;
2129 }
2130 #define atomic64_sub_and_test atomic64_sub_and_test
2131 #endif
2132
2133 #ifndef atomic64_dec_and_test
2134 /**
2135  * atomic64_dec_and_test - decrement and test
2136  * @v: pointer of type atomic64_t
2137  *
2138  * Atomically decrements @v by 1 and
2139  * returns true if the result is 0, or false for all other
2140  * cases.
2141  */
2142 static __always_inline bool
2143 atomic64_dec_and_test(atomic64_t *v)
2144 {
2145         return atomic64_dec_return(v) == 0;
2146 }
2147 #define atomic64_dec_and_test atomic64_dec_and_test
2148 #endif
2149
2150 #ifndef atomic64_inc_and_test
2151 /**
2152  * atomic64_inc_and_test - increment and test
2153  * @v: pointer of type atomic64_t
2154  *
2155  * Atomically increments @v by 1
2156  * and returns true if the result is zero, or false for all
2157  * other cases.
2158  */
2159 static __always_inline bool
2160 atomic64_inc_and_test(atomic64_t *v)
2161 {
2162         return atomic64_inc_return(v) == 0;
2163 }
2164 #define atomic64_inc_and_test atomic64_inc_and_test
2165 #endif
2166
2167 #ifndef atomic64_add_negative
2168 /**
2169  * atomic64_add_negative - add and test if negative
2170  * @i: integer value to add
2171  * @v: pointer of type atomic64_t
2172  *
2173  * Atomically adds @i to @v and returns true
2174  * if the result is negative, or false when
2175  * result is greater than or equal to zero.
2176  */
2177 static __always_inline bool
2178 atomic64_add_negative(s64 i, atomic64_t *v)
2179 {
2180         return atomic64_add_return(i, v) < 0;
2181 }
2182 #define atomic64_add_negative atomic64_add_negative
2183 #endif
2184
2185 #ifndef atomic64_fetch_add_unless
2186 /**
2187  * atomic64_fetch_add_unless - add unless the number is already a given value
2188  * @v: pointer of type atomic64_t
2189  * @a: the amount to add to v...
2190  * @u: ...unless v is equal to u.
2191  *
2192  * Atomically adds @a to @v, so long as @v was not already @u.
2193  * Returns original value of @v
2194  */
2195 static __always_inline s64
2196 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
2197 {
2198         s64 c = atomic64_read(v);
2199
2200         do {
2201                 if (unlikely(c == u))
2202                         break;
2203         } while (!atomic64_try_cmpxchg(v, &c, c + a));
2204
2205         return c;
2206 }
2207 #define atomic64_fetch_add_unless atomic64_fetch_add_unless
2208 #endif
2209
2210 #ifndef atomic64_add_unless
2211 /**
2212  * atomic64_add_unless - add unless the number is already a given value
2213  * @v: pointer of type atomic64_t
2214  * @a: the amount to add to v...
2215  * @u: ...unless v is equal to u.
2216  *
2217  * Atomically adds @a to @v, if @v was not already @u.
2218  * Returns true if the addition was done.
2219  */
2220 static __always_inline bool
2221 atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
2222 {
2223         return atomic64_fetch_add_unless(v, a, u) != u;
2224 }
2225 #define atomic64_add_unless atomic64_add_unless
2226 #endif
2227
2228 #ifndef atomic64_inc_not_zero
2229 /**
2230  * atomic64_inc_not_zero - increment unless the number is zero
2231  * @v: pointer of type atomic64_t
2232  *
2233  * Atomically increments @v by 1, if @v is non-zero.
2234  * Returns true if the increment was done.
2235  */
2236 static __always_inline bool
2237 atomic64_inc_not_zero(atomic64_t *v)
2238 {
2239         return atomic64_add_unless(v, 1, 0);
2240 }
2241 #define atomic64_inc_not_zero atomic64_inc_not_zero
2242 #endif
2243
2244 #ifndef atomic64_inc_unless_negative
2245 static __always_inline bool
2246 atomic64_inc_unless_negative(atomic64_t *v)
2247 {
2248         s64 c = atomic64_read(v);
2249
2250         do {
2251                 if (unlikely(c < 0))
2252                         return false;
2253         } while (!atomic64_try_cmpxchg(v, &c, c + 1));
2254
2255         return true;
2256 }
2257 #define atomic64_inc_unless_negative atomic64_inc_unless_negative
2258 #endif
2259
2260 #ifndef atomic64_dec_unless_positive
2261 static __always_inline bool
2262 atomic64_dec_unless_positive(atomic64_t *v)
2263 {
2264         s64 c = atomic64_read(v);
2265
2266         do {
2267                 if (unlikely(c > 0))
2268                         return false;
2269         } while (!atomic64_try_cmpxchg(v, &c, c - 1));
2270
2271         return true;
2272 }
2273 #define atomic64_dec_unless_positive atomic64_dec_unless_positive
2274 #endif
2275
2276 #ifndef atomic64_dec_if_positive
2277 static __always_inline s64
2278 atomic64_dec_if_positive(atomic64_t *v)
2279 {
2280         s64 dec, c = atomic64_read(v);
2281
2282         do {
2283                 dec = c - 1;
2284                 if (unlikely(dec < 0))
2285                         break;
2286         } while (!atomic64_try_cmpxchg(v, &c, dec));
2287
2288         return dec;
2289 }
2290 #define atomic64_dec_if_positive atomic64_dec_if_positive
2291 #endif
2292
2293 #define atomic64_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
2294 #define atomic64_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
2295
2296 #endif /* _LINUX_ATOMIC_FALLBACK_H */
2297 // baaf45f4c24ed88ceae58baca39d7fd80bb8101b