1 // SPDX-License-Identifier: GPL-2.0
3 // Generated by scripts/atomic/gen-atomic-instrumented.sh
4 // DO NOT MODIFY THIS FILE DIRECTLY
7 * This file provides wrappers with KASAN instrumentation for atomic operations.
8 * To use this functionality an arch's atomic.h file needs to define all
9 * atomic operations with arch_ prefix (e.g. arch_atomic_read()) and include
10 * this file at the end. This file provides atomic_read() that forwards to
11 * arch_atomic_read() for actual atomic operation.
12 * Note: if an arch atomic operation is implemented by means of other atomic
13 * operations (e.g. atomic_read()/atomic_cmpxchg() loop), then it needs to use
14 * arch_ variants (i.e. arch_atomic_read()/arch_atomic_cmpxchg()) to avoid
15 * double instrumentation.
17 #ifndef _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
18 #define _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
20 #include <linux/build_bug.h>
21 #include <linux/compiler.h>
22 #include <linux/kasan-checks.h>
23 #include <linux/kcsan-checks.h>
25 static __always_inline void __atomic_check_read(const volatile void *v, size_t size)
27 kasan_check_read(v, size);
28 kcsan_check_atomic_read(v, size);
31 static __always_inline void __atomic_check_write(const volatile void *v, size_t size)
33 kasan_check_write(v, size);
34 kcsan_check_atomic_write(v, size);
37 static __always_inline int
38 atomic_read(const atomic_t *v)
40 __atomic_check_read(v, sizeof(*v));
41 return arch_atomic_read(v);
43 #define atomic_read atomic_read
45 #if defined(arch_atomic_read_acquire)
46 static __always_inline int
47 atomic_read_acquire(const atomic_t *v)
49 __atomic_check_read(v, sizeof(*v));
50 return arch_atomic_read_acquire(v);
52 #define atomic_read_acquire atomic_read_acquire
55 static __always_inline void
56 atomic_set(atomic_t *v, int i)
58 __atomic_check_write(v, sizeof(*v));
59 arch_atomic_set(v, i);
61 #define atomic_set atomic_set
63 #if defined(arch_atomic_set_release)
64 static __always_inline void
65 atomic_set_release(atomic_t *v, int i)
67 __atomic_check_write(v, sizeof(*v));
68 arch_atomic_set_release(v, i);
70 #define atomic_set_release atomic_set_release
73 static __always_inline void
74 atomic_add(int i, atomic_t *v)
76 __atomic_check_write(v, sizeof(*v));
77 arch_atomic_add(i, v);
79 #define atomic_add atomic_add
81 #if !defined(arch_atomic_add_return_relaxed) || defined(arch_atomic_add_return)
82 static __always_inline int
83 atomic_add_return(int i, atomic_t *v)
85 __atomic_check_write(v, sizeof(*v));
86 return arch_atomic_add_return(i, v);
88 #define atomic_add_return atomic_add_return
91 #if defined(arch_atomic_add_return_acquire)
92 static __always_inline int
93 atomic_add_return_acquire(int i, atomic_t *v)
95 __atomic_check_write(v, sizeof(*v));
96 return arch_atomic_add_return_acquire(i, v);
98 #define atomic_add_return_acquire atomic_add_return_acquire
101 #if defined(arch_atomic_add_return_release)
102 static __always_inline int
103 atomic_add_return_release(int i, atomic_t *v)
105 __atomic_check_write(v, sizeof(*v));
106 return arch_atomic_add_return_release(i, v);
108 #define atomic_add_return_release atomic_add_return_release
111 #if defined(arch_atomic_add_return_relaxed)
112 static __always_inline int
113 atomic_add_return_relaxed(int i, atomic_t *v)
115 __atomic_check_write(v, sizeof(*v));
116 return arch_atomic_add_return_relaxed(i, v);
118 #define atomic_add_return_relaxed atomic_add_return_relaxed
121 #if !defined(arch_atomic_fetch_add_relaxed) || defined(arch_atomic_fetch_add)
122 static __always_inline int
123 atomic_fetch_add(int i, atomic_t *v)
125 __atomic_check_write(v, sizeof(*v));
126 return arch_atomic_fetch_add(i, v);
128 #define atomic_fetch_add atomic_fetch_add
131 #if defined(arch_atomic_fetch_add_acquire)
132 static __always_inline int
133 atomic_fetch_add_acquire(int i, atomic_t *v)
135 __atomic_check_write(v, sizeof(*v));
136 return arch_atomic_fetch_add_acquire(i, v);
138 #define atomic_fetch_add_acquire atomic_fetch_add_acquire
141 #if defined(arch_atomic_fetch_add_release)
142 static __always_inline int
143 atomic_fetch_add_release(int i, atomic_t *v)
145 __atomic_check_write(v, sizeof(*v));
146 return arch_atomic_fetch_add_release(i, v);
148 #define atomic_fetch_add_release atomic_fetch_add_release
151 #if defined(arch_atomic_fetch_add_relaxed)
152 static __always_inline int
153 atomic_fetch_add_relaxed(int i, atomic_t *v)
155 __atomic_check_write(v, sizeof(*v));
156 return arch_atomic_fetch_add_relaxed(i, v);
158 #define atomic_fetch_add_relaxed atomic_fetch_add_relaxed
161 static __always_inline void
162 atomic_sub(int i, atomic_t *v)
164 __atomic_check_write(v, sizeof(*v));
165 arch_atomic_sub(i, v);
167 #define atomic_sub atomic_sub
169 #if !defined(arch_atomic_sub_return_relaxed) || defined(arch_atomic_sub_return)
170 static __always_inline int
171 atomic_sub_return(int i, atomic_t *v)
173 __atomic_check_write(v, sizeof(*v));
174 return arch_atomic_sub_return(i, v);
176 #define atomic_sub_return atomic_sub_return
179 #if defined(arch_atomic_sub_return_acquire)
180 static __always_inline int
181 atomic_sub_return_acquire(int i, atomic_t *v)
183 __atomic_check_write(v, sizeof(*v));
184 return arch_atomic_sub_return_acquire(i, v);
186 #define atomic_sub_return_acquire atomic_sub_return_acquire
189 #if defined(arch_atomic_sub_return_release)
190 static __always_inline int
191 atomic_sub_return_release(int i, atomic_t *v)
193 __atomic_check_write(v, sizeof(*v));
194 return arch_atomic_sub_return_release(i, v);
196 #define atomic_sub_return_release atomic_sub_return_release
199 #if defined(arch_atomic_sub_return_relaxed)
200 static __always_inline int
201 atomic_sub_return_relaxed(int i, atomic_t *v)
203 __atomic_check_write(v, sizeof(*v));
204 return arch_atomic_sub_return_relaxed(i, v);
206 #define atomic_sub_return_relaxed atomic_sub_return_relaxed
209 #if !defined(arch_atomic_fetch_sub_relaxed) || defined(arch_atomic_fetch_sub)
210 static __always_inline int
211 atomic_fetch_sub(int i, atomic_t *v)
213 __atomic_check_write(v, sizeof(*v));
214 return arch_atomic_fetch_sub(i, v);
216 #define atomic_fetch_sub atomic_fetch_sub
219 #if defined(arch_atomic_fetch_sub_acquire)
220 static __always_inline int
221 atomic_fetch_sub_acquire(int i, atomic_t *v)
223 __atomic_check_write(v, sizeof(*v));
224 return arch_atomic_fetch_sub_acquire(i, v);
226 #define atomic_fetch_sub_acquire atomic_fetch_sub_acquire
229 #if defined(arch_atomic_fetch_sub_release)
230 static __always_inline int
231 atomic_fetch_sub_release(int i, atomic_t *v)
233 __atomic_check_write(v, sizeof(*v));
234 return arch_atomic_fetch_sub_release(i, v);
236 #define atomic_fetch_sub_release atomic_fetch_sub_release
239 #if defined(arch_atomic_fetch_sub_relaxed)
240 static __always_inline int
241 atomic_fetch_sub_relaxed(int i, atomic_t *v)
243 __atomic_check_write(v, sizeof(*v));
244 return arch_atomic_fetch_sub_relaxed(i, v);
246 #define atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed
249 #if defined(arch_atomic_inc)
250 static __always_inline void
251 atomic_inc(atomic_t *v)
253 __atomic_check_write(v, sizeof(*v));
256 #define atomic_inc atomic_inc
259 #if defined(arch_atomic_inc_return)
260 static __always_inline int
261 atomic_inc_return(atomic_t *v)
263 __atomic_check_write(v, sizeof(*v));
264 return arch_atomic_inc_return(v);
266 #define atomic_inc_return atomic_inc_return
269 #if defined(arch_atomic_inc_return_acquire)
270 static __always_inline int
271 atomic_inc_return_acquire(atomic_t *v)
273 __atomic_check_write(v, sizeof(*v));
274 return arch_atomic_inc_return_acquire(v);
276 #define atomic_inc_return_acquire atomic_inc_return_acquire
279 #if defined(arch_atomic_inc_return_release)
280 static __always_inline int
281 atomic_inc_return_release(atomic_t *v)
283 __atomic_check_write(v, sizeof(*v));
284 return arch_atomic_inc_return_release(v);
286 #define atomic_inc_return_release atomic_inc_return_release
289 #if defined(arch_atomic_inc_return_relaxed)
290 static __always_inline int
291 atomic_inc_return_relaxed(atomic_t *v)
293 __atomic_check_write(v, sizeof(*v));
294 return arch_atomic_inc_return_relaxed(v);
296 #define atomic_inc_return_relaxed atomic_inc_return_relaxed
299 #if defined(arch_atomic_fetch_inc)
300 static __always_inline int
301 atomic_fetch_inc(atomic_t *v)
303 __atomic_check_write(v, sizeof(*v));
304 return arch_atomic_fetch_inc(v);
306 #define atomic_fetch_inc atomic_fetch_inc
309 #if defined(arch_atomic_fetch_inc_acquire)
310 static __always_inline int
311 atomic_fetch_inc_acquire(atomic_t *v)
313 __atomic_check_write(v, sizeof(*v));
314 return arch_atomic_fetch_inc_acquire(v);
316 #define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
319 #if defined(arch_atomic_fetch_inc_release)
320 static __always_inline int
321 atomic_fetch_inc_release(atomic_t *v)
323 __atomic_check_write(v, sizeof(*v));
324 return arch_atomic_fetch_inc_release(v);
326 #define atomic_fetch_inc_release atomic_fetch_inc_release
329 #if defined(arch_atomic_fetch_inc_relaxed)
330 static __always_inline int
331 atomic_fetch_inc_relaxed(atomic_t *v)
333 __atomic_check_write(v, sizeof(*v));
334 return arch_atomic_fetch_inc_relaxed(v);
336 #define atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
339 #if defined(arch_atomic_dec)
340 static __always_inline void
341 atomic_dec(atomic_t *v)
343 __atomic_check_write(v, sizeof(*v));
346 #define atomic_dec atomic_dec
349 #if defined(arch_atomic_dec_return)
350 static __always_inline int
351 atomic_dec_return(atomic_t *v)
353 __atomic_check_write(v, sizeof(*v));
354 return arch_atomic_dec_return(v);
356 #define atomic_dec_return atomic_dec_return
359 #if defined(arch_atomic_dec_return_acquire)
360 static __always_inline int
361 atomic_dec_return_acquire(atomic_t *v)
363 __atomic_check_write(v, sizeof(*v));
364 return arch_atomic_dec_return_acquire(v);
366 #define atomic_dec_return_acquire atomic_dec_return_acquire
369 #if defined(arch_atomic_dec_return_release)
370 static __always_inline int
371 atomic_dec_return_release(atomic_t *v)
373 __atomic_check_write(v, sizeof(*v));
374 return arch_atomic_dec_return_release(v);
376 #define atomic_dec_return_release atomic_dec_return_release
379 #if defined(arch_atomic_dec_return_relaxed)
380 static __always_inline int
381 atomic_dec_return_relaxed(atomic_t *v)
383 __atomic_check_write(v, sizeof(*v));
384 return arch_atomic_dec_return_relaxed(v);
386 #define atomic_dec_return_relaxed atomic_dec_return_relaxed
389 #if defined(arch_atomic_fetch_dec)
390 static __always_inline int
391 atomic_fetch_dec(atomic_t *v)
393 __atomic_check_write(v, sizeof(*v));
394 return arch_atomic_fetch_dec(v);
396 #define atomic_fetch_dec atomic_fetch_dec
399 #if defined(arch_atomic_fetch_dec_acquire)
400 static __always_inline int
401 atomic_fetch_dec_acquire(atomic_t *v)
403 __atomic_check_write(v, sizeof(*v));
404 return arch_atomic_fetch_dec_acquire(v);
406 #define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
409 #if defined(arch_atomic_fetch_dec_release)
410 static __always_inline int
411 atomic_fetch_dec_release(atomic_t *v)
413 __atomic_check_write(v, sizeof(*v));
414 return arch_atomic_fetch_dec_release(v);
416 #define atomic_fetch_dec_release atomic_fetch_dec_release
419 #if defined(arch_atomic_fetch_dec_relaxed)
420 static __always_inline int
421 atomic_fetch_dec_relaxed(atomic_t *v)
423 __atomic_check_write(v, sizeof(*v));
424 return arch_atomic_fetch_dec_relaxed(v);
426 #define atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
429 static __always_inline void
430 atomic_and(int i, atomic_t *v)
432 __atomic_check_write(v, sizeof(*v));
433 arch_atomic_and(i, v);
435 #define atomic_and atomic_and
437 #if !defined(arch_atomic_fetch_and_relaxed) || defined(arch_atomic_fetch_and)
438 static __always_inline int
439 atomic_fetch_and(int i, atomic_t *v)
441 __atomic_check_write(v, sizeof(*v));
442 return arch_atomic_fetch_and(i, v);
444 #define atomic_fetch_and atomic_fetch_and
447 #if defined(arch_atomic_fetch_and_acquire)
448 static __always_inline int
449 atomic_fetch_and_acquire(int i, atomic_t *v)
451 __atomic_check_write(v, sizeof(*v));
452 return arch_atomic_fetch_and_acquire(i, v);
454 #define atomic_fetch_and_acquire atomic_fetch_and_acquire
457 #if defined(arch_atomic_fetch_and_release)
458 static __always_inline int
459 atomic_fetch_and_release(int i, atomic_t *v)
461 __atomic_check_write(v, sizeof(*v));
462 return arch_atomic_fetch_and_release(i, v);
464 #define atomic_fetch_and_release atomic_fetch_and_release
467 #if defined(arch_atomic_fetch_and_relaxed)
468 static __always_inline int
469 atomic_fetch_and_relaxed(int i, atomic_t *v)
471 __atomic_check_write(v, sizeof(*v));
472 return arch_atomic_fetch_and_relaxed(i, v);
474 #define atomic_fetch_and_relaxed atomic_fetch_and_relaxed
477 #if defined(arch_atomic_andnot)
478 static __always_inline void
479 atomic_andnot(int i, atomic_t *v)
481 __atomic_check_write(v, sizeof(*v));
482 arch_atomic_andnot(i, v);
484 #define atomic_andnot atomic_andnot
487 #if defined(arch_atomic_fetch_andnot)
488 static __always_inline int
489 atomic_fetch_andnot(int i, atomic_t *v)
491 __atomic_check_write(v, sizeof(*v));
492 return arch_atomic_fetch_andnot(i, v);
494 #define atomic_fetch_andnot atomic_fetch_andnot
497 #if defined(arch_atomic_fetch_andnot_acquire)
498 static __always_inline int
499 atomic_fetch_andnot_acquire(int i, atomic_t *v)
501 __atomic_check_write(v, sizeof(*v));
502 return arch_atomic_fetch_andnot_acquire(i, v);
504 #define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
507 #if defined(arch_atomic_fetch_andnot_release)
508 static __always_inline int
509 atomic_fetch_andnot_release(int i, atomic_t *v)
511 __atomic_check_write(v, sizeof(*v));
512 return arch_atomic_fetch_andnot_release(i, v);
514 #define atomic_fetch_andnot_release atomic_fetch_andnot_release
517 #if defined(arch_atomic_fetch_andnot_relaxed)
518 static __always_inline int
519 atomic_fetch_andnot_relaxed(int i, atomic_t *v)
521 __atomic_check_write(v, sizeof(*v));
522 return arch_atomic_fetch_andnot_relaxed(i, v);
524 #define atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
527 static __always_inline void
528 atomic_or(int i, atomic_t *v)
530 __atomic_check_write(v, sizeof(*v));
531 arch_atomic_or(i, v);
533 #define atomic_or atomic_or
535 #if !defined(arch_atomic_fetch_or_relaxed) || defined(arch_atomic_fetch_or)
536 static __always_inline int
537 atomic_fetch_or(int i, atomic_t *v)
539 __atomic_check_write(v, sizeof(*v));
540 return arch_atomic_fetch_or(i, v);
542 #define atomic_fetch_or atomic_fetch_or
545 #if defined(arch_atomic_fetch_or_acquire)
546 static __always_inline int
547 atomic_fetch_or_acquire(int i, atomic_t *v)
549 __atomic_check_write(v, sizeof(*v));
550 return arch_atomic_fetch_or_acquire(i, v);
552 #define atomic_fetch_or_acquire atomic_fetch_or_acquire
555 #if defined(arch_atomic_fetch_or_release)
556 static __always_inline int
557 atomic_fetch_or_release(int i, atomic_t *v)
559 __atomic_check_write(v, sizeof(*v));
560 return arch_atomic_fetch_or_release(i, v);
562 #define atomic_fetch_or_release atomic_fetch_or_release
565 #if defined(arch_atomic_fetch_or_relaxed)
566 static __always_inline int
567 atomic_fetch_or_relaxed(int i, atomic_t *v)
569 __atomic_check_write(v, sizeof(*v));
570 return arch_atomic_fetch_or_relaxed(i, v);
572 #define atomic_fetch_or_relaxed atomic_fetch_or_relaxed
575 static __always_inline void
576 atomic_xor(int i, atomic_t *v)
578 __atomic_check_write(v, sizeof(*v));
579 arch_atomic_xor(i, v);
581 #define atomic_xor atomic_xor
583 #if !defined(arch_atomic_fetch_xor_relaxed) || defined(arch_atomic_fetch_xor)
584 static __always_inline int
585 atomic_fetch_xor(int i, atomic_t *v)
587 __atomic_check_write(v, sizeof(*v));
588 return arch_atomic_fetch_xor(i, v);
590 #define atomic_fetch_xor atomic_fetch_xor
593 #if defined(arch_atomic_fetch_xor_acquire)
594 static __always_inline int
595 atomic_fetch_xor_acquire(int i, atomic_t *v)
597 __atomic_check_write(v, sizeof(*v));
598 return arch_atomic_fetch_xor_acquire(i, v);
600 #define atomic_fetch_xor_acquire atomic_fetch_xor_acquire
603 #if defined(arch_atomic_fetch_xor_release)
604 static __always_inline int
605 atomic_fetch_xor_release(int i, atomic_t *v)
607 __atomic_check_write(v, sizeof(*v));
608 return arch_atomic_fetch_xor_release(i, v);
610 #define atomic_fetch_xor_release atomic_fetch_xor_release
613 #if defined(arch_atomic_fetch_xor_relaxed)
614 static __always_inline int
615 atomic_fetch_xor_relaxed(int i, atomic_t *v)
617 __atomic_check_write(v, sizeof(*v));
618 return arch_atomic_fetch_xor_relaxed(i, v);
620 #define atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed
623 #if !defined(arch_atomic_xchg_relaxed) || defined(arch_atomic_xchg)
624 static __always_inline int
625 atomic_xchg(atomic_t *v, int i)
627 __atomic_check_write(v, sizeof(*v));
628 return arch_atomic_xchg(v, i);
630 #define atomic_xchg atomic_xchg
633 #if defined(arch_atomic_xchg_acquire)
634 static __always_inline int
635 atomic_xchg_acquire(atomic_t *v, int i)
637 __atomic_check_write(v, sizeof(*v));
638 return arch_atomic_xchg_acquire(v, i);
640 #define atomic_xchg_acquire atomic_xchg_acquire
643 #if defined(arch_atomic_xchg_release)
644 static __always_inline int
645 atomic_xchg_release(atomic_t *v, int i)
647 __atomic_check_write(v, sizeof(*v));
648 return arch_atomic_xchg_release(v, i);
650 #define atomic_xchg_release atomic_xchg_release
653 #if defined(arch_atomic_xchg_relaxed)
654 static __always_inline int
655 atomic_xchg_relaxed(atomic_t *v, int i)
657 __atomic_check_write(v, sizeof(*v));
658 return arch_atomic_xchg_relaxed(v, i);
660 #define atomic_xchg_relaxed atomic_xchg_relaxed
663 #if !defined(arch_atomic_cmpxchg_relaxed) || defined(arch_atomic_cmpxchg)
664 static __always_inline int
665 atomic_cmpxchg(atomic_t *v, int old, int new)
667 __atomic_check_write(v, sizeof(*v));
668 return arch_atomic_cmpxchg(v, old, new);
670 #define atomic_cmpxchg atomic_cmpxchg
673 #if defined(arch_atomic_cmpxchg_acquire)
674 static __always_inline int
675 atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
677 __atomic_check_write(v, sizeof(*v));
678 return arch_atomic_cmpxchg_acquire(v, old, new);
680 #define atomic_cmpxchg_acquire atomic_cmpxchg_acquire
683 #if defined(arch_atomic_cmpxchg_release)
684 static __always_inline int
685 atomic_cmpxchg_release(atomic_t *v, int old, int new)
687 __atomic_check_write(v, sizeof(*v));
688 return arch_atomic_cmpxchg_release(v, old, new);
690 #define atomic_cmpxchg_release atomic_cmpxchg_release
693 #if defined(arch_atomic_cmpxchg_relaxed)
694 static __always_inline int
695 atomic_cmpxchg_relaxed(atomic_t *v, int old, int new)
697 __atomic_check_write(v, sizeof(*v));
698 return arch_atomic_cmpxchg_relaxed(v, old, new);
700 #define atomic_cmpxchg_relaxed atomic_cmpxchg_relaxed
703 #if defined(arch_atomic_try_cmpxchg)
704 static __always_inline bool
705 atomic_try_cmpxchg(atomic_t *v, int *old, int new)
707 __atomic_check_write(v, sizeof(*v));
708 __atomic_check_write(old, sizeof(*old));
709 return arch_atomic_try_cmpxchg(v, old, new);
711 #define atomic_try_cmpxchg atomic_try_cmpxchg
714 #if defined(arch_atomic_try_cmpxchg_acquire)
715 static __always_inline bool
716 atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
718 __atomic_check_write(v, sizeof(*v));
719 __atomic_check_write(old, sizeof(*old));
720 return arch_atomic_try_cmpxchg_acquire(v, old, new);
722 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
725 #if defined(arch_atomic_try_cmpxchg_release)
726 static __always_inline bool
727 atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
729 __atomic_check_write(v, sizeof(*v));
730 __atomic_check_write(old, sizeof(*old));
731 return arch_atomic_try_cmpxchg_release(v, old, new);
733 #define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
736 #if defined(arch_atomic_try_cmpxchg_relaxed)
737 static __always_inline bool
738 atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
740 __atomic_check_write(v, sizeof(*v));
741 __atomic_check_write(old, sizeof(*old));
742 return arch_atomic_try_cmpxchg_relaxed(v, old, new);
744 #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed
747 #if defined(arch_atomic_sub_and_test)
748 static __always_inline bool
749 atomic_sub_and_test(int i, atomic_t *v)
751 __atomic_check_write(v, sizeof(*v));
752 return arch_atomic_sub_and_test(i, v);
754 #define atomic_sub_and_test atomic_sub_and_test
757 #if defined(arch_atomic_dec_and_test)
758 static __always_inline bool
759 atomic_dec_and_test(atomic_t *v)
761 __atomic_check_write(v, sizeof(*v));
762 return arch_atomic_dec_and_test(v);
764 #define atomic_dec_and_test atomic_dec_and_test
767 #if defined(arch_atomic_inc_and_test)
768 static __always_inline bool
769 atomic_inc_and_test(atomic_t *v)
771 __atomic_check_write(v, sizeof(*v));
772 return arch_atomic_inc_and_test(v);
774 #define atomic_inc_and_test atomic_inc_and_test
777 #if defined(arch_atomic_add_negative)
778 static __always_inline bool
779 atomic_add_negative(int i, atomic_t *v)
781 __atomic_check_write(v, sizeof(*v));
782 return arch_atomic_add_negative(i, v);
784 #define atomic_add_negative atomic_add_negative
787 #if defined(arch_atomic_fetch_add_unless)
788 static __always_inline int
789 atomic_fetch_add_unless(atomic_t *v, int a, int u)
791 __atomic_check_write(v, sizeof(*v));
792 return arch_atomic_fetch_add_unless(v, a, u);
794 #define atomic_fetch_add_unless atomic_fetch_add_unless
797 #if defined(arch_atomic_add_unless)
798 static __always_inline bool
799 atomic_add_unless(atomic_t *v, int a, int u)
801 __atomic_check_write(v, sizeof(*v));
802 return arch_atomic_add_unless(v, a, u);
804 #define atomic_add_unless atomic_add_unless
807 #if defined(arch_atomic_inc_not_zero)
808 static __always_inline bool
809 atomic_inc_not_zero(atomic_t *v)
811 __atomic_check_write(v, sizeof(*v));
812 return arch_atomic_inc_not_zero(v);
814 #define atomic_inc_not_zero atomic_inc_not_zero
817 #if defined(arch_atomic_inc_unless_negative)
818 static __always_inline bool
819 atomic_inc_unless_negative(atomic_t *v)
821 __atomic_check_write(v, sizeof(*v));
822 return arch_atomic_inc_unless_negative(v);
824 #define atomic_inc_unless_negative atomic_inc_unless_negative
827 #if defined(arch_atomic_dec_unless_positive)
828 static __always_inline bool
829 atomic_dec_unless_positive(atomic_t *v)
831 __atomic_check_write(v, sizeof(*v));
832 return arch_atomic_dec_unless_positive(v);
834 #define atomic_dec_unless_positive atomic_dec_unless_positive
837 #if defined(arch_atomic_dec_if_positive)
838 static __always_inline int
839 atomic_dec_if_positive(atomic_t *v)
841 __atomic_check_write(v, sizeof(*v));
842 return arch_atomic_dec_if_positive(v);
844 #define atomic_dec_if_positive atomic_dec_if_positive
847 static __always_inline s64
848 atomic64_read(const atomic64_t *v)
850 __atomic_check_read(v, sizeof(*v));
851 return arch_atomic64_read(v);
853 #define atomic64_read atomic64_read
855 #if defined(arch_atomic64_read_acquire)
856 static __always_inline s64
857 atomic64_read_acquire(const atomic64_t *v)
859 __atomic_check_read(v, sizeof(*v));
860 return arch_atomic64_read_acquire(v);
862 #define atomic64_read_acquire atomic64_read_acquire
865 static __always_inline void
866 atomic64_set(atomic64_t *v, s64 i)
868 __atomic_check_write(v, sizeof(*v));
869 arch_atomic64_set(v, i);
871 #define atomic64_set atomic64_set
873 #if defined(arch_atomic64_set_release)
874 static __always_inline void
875 atomic64_set_release(atomic64_t *v, s64 i)
877 __atomic_check_write(v, sizeof(*v));
878 arch_atomic64_set_release(v, i);
880 #define atomic64_set_release atomic64_set_release
883 static __always_inline void
884 atomic64_add(s64 i, atomic64_t *v)
886 __atomic_check_write(v, sizeof(*v));
887 arch_atomic64_add(i, v);
889 #define atomic64_add atomic64_add
891 #if !defined(arch_atomic64_add_return_relaxed) || defined(arch_atomic64_add_return)
892 static __always_inline s64
893 atomic64_add_return(s64 i, atomic64_t *v)
895 __atomic_check_write(v, sizeof(*v));
896 return arch_atomic64_add_return(i, v);
898 #define atomic64_add_return atomic64_add_return
901 #if defined(arch_atomic64_add_return_acquire)
902 static __always_inline s64
903 atomic64_add_return_acquire(s64 i, atomic64_t *v)
905 __atomic_check_write(v, sizeof(*v));
906 return arch_atomic64_add_return_acquire(i, v);
908 #define atomic64_add_return_acquire atomic64_add_return_acquire
911 #if defined(arch_atomic64_add_return_release)
912 static __always_inline s64
913 atomic64_add_return_release(s64 i, atomic64_t *v)
915 __atomic_check_write(v, sizeof(*v));
916 return arch_atomic64_add_return_release(i, v);
918 #define atomic64_add_return_release atomic64_add_return_release
921 #if defined(arch_atomic64_add_return_relaxed)
922 static __always_inline s64
923 atomic64_add_return_relaxed(s64 i, atomic64_t *v)
925 __atomic_check_write(v, sizeof(*v));
926 return arch_atomic64_add_return_relaxed(i, v);
928 #define atomic64_add_return_relaxed atomic64_add_return_relaxed
931 #if !defined(arch_atomic64_fetch_add_relaxed) || defined(arch_atomic64_fetch_add)
932 static __always_inline s64
933 atomic64_fetch_add(s64 i, atomic64_t *v)
935 __atomic_check_write(v, sizeof(*v));
936 return arch_atomic64_fetch_add(i, v);
938 #define atomic64_fetch_add atomic64_fetch_add
941 #if defined(arch_atomic64_fetch_add_acquire)
942 static __always_inline s64
943 atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
945 __atomic_check_write(v, sizeof(*v));
946 return arch_atomic64_fetch_add_acquire(i, v);
948 #define atomic64_fetch_add_acquire atomic64_fetch_add_acquire
951 #if defined(arch_atomic64_fetch_add_release)
952 static __always_inline s64
953 atomic64_fetch_add_release(s64 i, atomic64_t *v)
955 __atomic_check_write(v, sizeof(*v));
956 return arch_atomic64_fetch_add_release(i, v);
958 #define atomic64_fetch_add_release atomic64_fetch_add_release
961 #if defined(arch_atomic64_fetch_add_relaxed)
962 static __always_inline s64
963 atomic64_fetch_add_relaxed(s64 i, atomic64_t *v)
965 __atomic_check_write(v, sizeof(*v));
966 return arch_atomic64_fetch_add_relaxed(i, v);
968 #define atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed
971 static __always_inline void
972 atomic64_sub(s64 i, atomic64_t *v)
974 __atomic_check_write(v, sizeof(*v));
975 arch_atomic64_sub(i, v);
977 #define atomic64_sub atomic64_sub
979 #if !defined(arch_atomic64_sub_return_relaxed) || defined(arch_atomic64_sub_return)
980 static __always_inline s64
981 atomic64_sub_return(s64 i, atomic64_t *v)
983 __atomic_check_write(v, sizeof(*v));
984 return arch_atomic64_sub_return(i, v);
986 #define atomic64_sub_return atomic64_sub_return
989 #if defined(arch_atomic64_sub_return_acquire)
990 static __always_inline s64
991 atomic64_sub_return_acquire(s64 i, atomic64_t *v)
993 __atomic_check_write(v, sizeof(*v));
994 return arch_atomic64_sub_return_acquire(i, v);
996 #define atomic64_sub_return_acquire atomic64_sub_return_acquire
999 #if defined(arch_atomic64_sub_return_release)
1000 static __always_inline s64
1001 atomic64_sub_return_release(s64 i, atomic64_t *v)
1003 __atomic_check_write(v, sizeof(*v));
1004 return arch_atomic64_sub_return_release(i, v);
1006 #define atomic64_sub_return_release atomic64_sub_return_release
1009 #if defined(arch_atomic64_sub_return_relaxed)
1010 static __always_inline s64
1011 atomic64_sub_return_relaxed(s64 i, atomic64_t *v)
1013 __atomic_check_write(v, sizeof(*v));
1014 return arch_atomic64_sub_return_relaxed(i, v);
1016 #define atomic64_sub_return_relaxed atomic64_sub_return_relaxed
1019 #if !defined(arch_atomic64_fetch_sub_relaxed) || defined(arch_atomic64_fetch_sub)
1020 static __always_inline s64
1021 atomic64_fetch_sub(s64 i, atomic64_t *v)
1023 __atomic_check_write(v, sizeof(*v));
1024 return arch_atomic64_fetch_sub(i, v);
1026 #define atomic64_fetch_sub atomic64_fetch_sub
1029 #if defined(arch_atomic64_fetch_sub_acquire)
1030 static __always_inline s64
1031 atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
1033 __atomic_check_write(v, sizeof(*v));
1034 return arch_atomic64_fetch_sub_acquire(i, v);
1036 #define atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
1039 #if defined(arch_atomic64_fetch_sub_release)
1040 static __always_inline s64
1041 atomic64_fetch_sub_release(s64 i, atomic64_t *v)
1043 __atomic_check_write(v, sizeof(*v));
1044 return arch_atomic64_fetch_sub_release(i, v);
1046 #define atomic64_fetch_sub_release atomic64_fetch_sub_release
1049 #if defined(arch_atomic64_fetch_sub_relaxed)
1050 static __always_inline s64
1051 atomic64_fetch_sub_relaxed(s64 i, atomic64_t *v)
1053 __atomic_check_write(v, sizeof(*v));
1054 return arch_atomic64_fetch_sub_relaxed(i, v);
1056 #define atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed
1059 #if defined(arch_atomic64_inc)
1060 static __always_inline void
1061 atomic64_inc(atomic64_t *v)
1063 __atomic_check_write(v, sizeof(*v));
1064 arch_atomic64_inc(v);
1066 #define atomic64_inc atomic64_inc
1069 #if defined(arch_atomic64_inc_return)
1070 static __always_inline s64
1071 atomic64_inc_return(atomic64_t *v)
1073 __atomic_check_write(v, sizeof(*v));
1074 return arch_atomic64_inc_return(v);
1076 #define atomic64_inc_return atomic64_inc_return
1079 #if defined(arch_atomic64_inc_return_acquire)
1080 static __always_inline s64
1081 atomic64_inc_return_acquire(atomic64_t *v)
1083 __atomic_check_write(v, sizeof(*v));
1084 return arch_atomic64_inc_return_acquire(v);
1086 #define atomic64_inc_return_acquire atomic64_inc_return_acquire
1089 #if defined(arch_atomic64_inc_return_release)
1090 static __always_inline s64
1091 atomic64_inc_return_release(atomic64_t *v)
1093 __atomic_check_write(v, sizeof(*v));
1094 return arch_atomic64_inc_return_release(v);
1096 #define atomic64_inc_return_release atomic64_inc_return_release
1099 #if defined(arch_atomic64_inc_return_relaxed)
1100 static __always_inline s64
1101 atomic64_inc_return_relaxed(atomic64_t *v)
1103 __atomic_check_write(v, sizeof(*v));
1104 return arch_atomic64_inc_return_relaxed(v);
1106 #define atomic64_inc_return_relaxed atomic64_inc_return_relaxed
1109 #if defined(arch_atomic64_fetch_inc)
1110 static __always_inline s64
1111 atomic64_fetch_inc(atomic64_t *v)
1113 __atomic_check_write(v, sizeof(*v));
1114 return arch_atomic64_fetch_inc(v);
1116 #define atomic64_fetch_inc atomic64_fetch_inc
1119 #if defined(arch_atomic64_fetch_inc_acquire)
1120 static __always_inline s64
1121 atomic64_fetch_inc_acquire(atomic64_t *v)
1123 __atomic_check_write(v, sizeof(*v));
1124 return arch_atomic64_fetch_inc_acquire(v);
1126 #define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1129 #if defined(arch_atomic64_fetch_inc_release)
1130 static __always_inline s64
1131 atomic64_fetch_inc_release(atomic64_t *v)
1133 __atomic_check_write(v, sizeof(*v));
1134 return arch_atomic64_fetch_inc_release(v);
1136 #define atomic64_fetch_inc_release atomic64_fetch_inc_release
1139 #if defined(arch_atomic64_fetch_inc_relaxed)
1140 static __always_inline s64
1141 atomic64_fetch_inc_relaxed(atomic64_t *v)
1143 __atomic_check_write(v, sizeof(*v));
1144 return arch_atomic64_fetch_inc_relaxed(v);
1146 #define atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
1149 #if defined(arch_atomic64_dec)
1150 static __always_inline void
1151 atomic64_dec(atomic64_t *v)
1153 __atomic_check_write(v, sizeof(*v));
1154 arch_atomic64_dec(v);
1156 #define atomic64_dec atomic64_dec
1159 #if defined(arch_atomic64_dec_return)
1160 static __always_inline s64
1161 atomic64_dec_return(atomic64_t *v)
1163 __atomic_check_write(v, sizeof(*v));
1164 return arch_atomic64_dec_return(v);
1166 #define atomic64_dec_return atomic64_dec_return
1169 #if defined(arch_atomic64_dec_return_acquire)
1170 static __always_inline s64
1171 atomic64_dec_return_acquire(atomic64_t *v)
1173 __atomic_check_write(v, sizeof(*v));
1174 return arch_atomic64_dec_return_acquire(v);
1176 #define atomic64_dec_return_acquire atomic64_dec_return_acquire
1179 #if defined(arch_atomic64_dec_return_release)
1180 static __always_inline s64
1181 atomic64_dec_return_release(atomic64_t *v)
1183 __atomic_check_write(v, sizeof(*v));
1184 return arch_atomic64_dec_return_release(v);
1186 #define atomic64_dec_return_release atomic64_dec_return_release
1189 #if defined(arch_atomic64_dec_return_relaxed)
1190 static __always_inline s64
1191 atomic64_dec_return_relaxed(atomic64_t *v)
1193 __atomic_check_write(v, sizeof(*v));
1194 return arch_atomic64_dec_return_relaxed(v);
1196 #define atomic64_dec_return_relaxed atomic64_dec_return_relaxed
1199 #if defined(arch_atomic64_fetch_dec)
1200 static __always_inline s64
1201 atomic64_fetch_dec(atomic64_t *v)
1203 __atomic_check_write(v, sizeof(*v));
1204 return arch_atomic64_fetch_dec(v);
1206 #define atomic64_fetch_dec atomic64_fetch_dec
1209 #if defined(arch_atomic64_fetch_dec_acquire)
1210 static __always_inline s64
1211 atomic64_fetch_dec_acquire(atomic64_t *v)
1213 __atomic_check_write(v, sizeof(*v));
1214 return arch_atomic64_fetch_dec_acquire(v);
1216 #define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1219 #if defined(arch_atomic64_fetch_dec_release)
1220 static __always_inline s64
1221 atomic64_fetch_dec_release(atomic64_t *v)
1223 __atomic_check_write(v, sizeof(*v));
1224 return arch_atomic64_fetch_dec_release(v);
1226 #define atomic64_fetch_dec_release atomic64_fetch_dec_release
1229 #if defined(arch_atomic64_fetch_dec_relaxed)
1230 static __always_inline s64
1231 atomic64_fetch_dec_relaxed(atomic64_t *v)
1233 __atomic_check_write(v, sizeof(*v));
1234 return arch_atomic64_fetch_dec_relaxed(v);
1236 #define atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
1239 static __always_inline void
1240 atomic64_and(s64 i, atomic64_t *v)
1242 __atomic_check_write(v, sizeof(*v));
1243 arch_atomic64_and(i, v);
1245 #define atomic64_and atomic64_and
1247 #if !defined(arch_atomic64_fetch_and_relaxed) || defined(arch_atomic64_fetch_and)
1248 static __always_inline s64
1249 atomic64_fetch_and(s64 i, atomic64_t *v)
1251 __atomic_check_write(v, sizeof(*v));
1252 return arch_atomic64_fetch_and(i, v);
1254 #define atomic64_fetch_and atomic64_fetch_and
1257 #if defined(arch_atomic64_fetch_and_acquire)
1258 static __always_inline s64
1259 atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
1261 __atomic_check_write(v, sizeof(*v));
1262 return arch_atomic64_fetch_and_acquire(i, v);
1264 #define atomic64_fetch_and_acquire atomic64_fetch_and_acquire
1267 #if defined(arch_atomic64_fetch_and_release)
1268 static __always_inline s64
1269 atomic64_fetch_and_release(s64 i, atomic64_t *v)
1271 __atomic_check_write(v, sizeof(*v));
1272 return arch_atomic64_fetch_and_release(i, v);
1274 #define atomic64_fetch_and_release atomic64_fetch_and_release
1277 #if defined(arch_atomic64_fetch_and_relaxed)
1278 static __always_inline s64
1279 atomic64_fetch_and_relaxed(s64 i, atomic64_t *v)
1281 __atomic_check_write(v, sizeof(*v));
1282 return arch_atomic64_fetch_and_relaxed(i, v);
1284 #define atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed
1287 #if defined(arch_atomic64_andnot)
1288 static __always_inline void
1289 atomic64_andnot(s64 i, atomic64_t *v)
1291 __atomic_check_write(v, sizeof(*v));
1292 arch_atomic64_andnot(i, v);
1294 #define atomic64_andnot atomic64_andnot
1297 #if defined(arch_atomic64_fetch_andnot)
1298 static __always_inline s64
1299 atomic64_fetch_andnot(s64 i, atomic64_t *v)
1301 __atomic_check_write(v, sizeof(*v));
1302 return arch_atomic64_fetch_andnot(i, v);
1304 #define atomic64_fetch_andnot atomic64_fetch_andnot
1307 #if defined(arch_atomic64_fetch_andnot_acquire)
1308 static __always_inline s64
1309 atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1311 __atomic_check_write(v, sizeof(*v));
1312 return arch_atomic64_fetch_andnot_acquire(i, v);
1314 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
1317 #if defined(arch_atomic64_fetch_andnot_release)
1318 static __always_inline s64
1319 atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1321 __atomic_check_write(v, sizeof(*v));
1322 return arch_atomic64_fetch_andnot_release(i, v);
1324 #define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
1327 #if defined(arch_atomic64_fetch_andnot_relaxed)
1328 static __always_inline s64
1329 atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
1331 __atomic_check_write(v, sizeof(*v));
1332 return arch_atomic64_fetch_andnot_relaxed(i, v);
1334 #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
1337 static __always_inline void
1338 atomic64_or(s64 i, atomic64_t *v)
1340 __atomic_check_write(v, sizeof(*v));
1341 arch_atomic64_or(i, v);
1343 #define atomic64_or atomic64_or
1345 #if !defined(arch_atomic64_fetch_or_relaxed) || defined(arch_atomic64_fetch_or)
1346 static __always_inline s64
1347 atomic64_fetch_or(s64 i, atomic64_t *v)
1349 __atomic_check_write(v, sizeof(*v));
1350 return arch_atomic64_fetch_or(i, v);
1352 #define atomic64_fetch_or atomic64_fetch_or
1355 #if defined(arch_atomic64_fetch_or_acquire)
1356 static __always_inline s64
1357 atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
1359 __atomic_check_write(v, sizeof(*v));
1360 return arch_atomic64_fetch_or_acquire(i, v);
1362 #define atomic64_fetch_or_acquire atomic64_fetch_or_acquire
1365 #if defined(arch_atomic64_fetch_or_release)
1366 static __always_inline s64
1367 atomic64_fetch_or_release(s64 i, atomic64_t *v)
1369 __atomic_check_write(v, sizeof(*v));
1370 return arch_atomic64_fetch_or_release(i, v);
1372 #define atomic64_fetch_or_release atomic64_fetch_or_release
1375 #if defined(arch_atomic64_fetch_or_relaxed)
1376 static __always_inline s64
1377 atomic64_fetch_or_relaxed(s64 i, atomic64_t *v)
1379 __atomic_check_write(v, sizeof(*v));
1380 return arch_atomic64_fetch_or_relaxed(i, v);
1382 #define atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed
1385 static __always_inline void
1386 atomic64_xor(s64 i, atomic64_t *v)
1388 __atomic_check_write(v, sizeof(*v));
1389 arch_atomic64_xor(i, v);
1391 #define atomic64_xor atomic64_xor
1393 #if !defined(arch_atomic64_fetch_xor_relaxed) || defined(arch_atomic64_fetch_xor)
1394 static __always_inline s64
1395 atomic64_fetch_xor(s64 i, atomic64_t *v)
1397 __atomic_check_write(v, sizeof(*v));
1398 return arch_atomic64_fetch_xor(i, v);
1400 #define atomic64_fetch_xor atomic64_fetch_xor
1403 #if defined(arch_atomic64_fetch_xor_acquire)
1404 static __always_inline s64
1405 atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
1407 __atomic_check_write(v, sizeof(*v));
1408 return arch_atomic64_fetch_xor_acquire(i, v);
1410 #define atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
1413 #if defined(arch_atomic64_fetch_xor_release)
1414 static __always_inline s64
1415 atomic64_fetch_xor_release(s64 i, atomic64_t *v)
1417 __atomic_check_write(v, sizeof(*v));
1418 return arch_atomic64_fetch_xor_release(i, v);
1420 #define atomic64_fetch_xor_release atomic64_fetch_xor_release
1423 #if defined(arch_atomic64_fetch_xor_relaxed)
1424 static __always_inline s64
1425 atomic64_fetch_xor_relaxed(s64 i, atomic64_t *v)
1427 __atomic_check_write(v, sizeof(*v));
1428 return arch_atomic64_fetch_xor_relaxed(i, v);
1430 #define atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed
1433 #if !defined(arch_atomic64_xchg_relaxed) || defined(arch_atomic64_xchg)
1434 static __always_inline s64
1435 atomic64_xchg(atomic64_t *v, s64 i)
1437 __atomic_check_write(v, sizeof(*v));
1438 return arch_atomic64_xchg(v, i);
1440 #define atomic64_xchg atomic64_xchg
1443 #if defined(arch_atomic64_xchg_acquire)
1444 static __always_inline s64
1445 atomic64_xchg_acquire(atomic64_t *v, s64 i)
1447 __atomic_check_write(v, sizeof(*v));
1448 return arch_atomic64_xchg_acquire(v, i);
1450 #define atomic64_xchg_acquire atomic64_xchg_acquire
1453 #if defined(arch_atomic64_xchg_release)
1454 static __always_inline s64
1455 atomic64_xchg_release(atomic64_t *v, s64 i)
1457 __atomic_check_write(v, sizeof(*v));
1458 return arch_atomic64_xchg_release(v, i);
1460 #define atomic64_xchg_release atomic64_xchg_release
1463 #if defined(arch_atomic64_xchg_relaxed)
1464 static __always_inline s64
1465 atomic64_xchg_relaxed(atomic64_t *v, s64 i)
1467 __atomic_check_write(v, sizeof(*v));
1468 return arch_atomic64_xchg_relaxed(v, i);
1470 #define atomic64_xchg_relaxed atomic64_xchg_relaxed
1473 #if !defined(arch_atomic64_cmpxchg_relaxed) || defined(arch_atomic64_cmpxchg)
1474 static __always_inline s64
1475 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
1477 __atomic_check_write(v, sizeof(*v));
1478 return arch_atomic64_cmpxchg(v, old, new);
1480 #define atomic64_cmpxchg atomic64_cmpxchg
1483 #if defined(arch_atomic64_cmpxchg_acquire)
1484 static __always_inline s64
1485 atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
1487 __atomic_check_write(v, sizeof(*v));
1488 return arch_atomic64_cmpxchg_acquire(v, old, new);
1490 #define atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire
1493 #if defined(arch_atomic64_cmpxchg_release)
1494 static __always_inline s64
1495 atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
1497 __atomic_check_write(v, sizeof(*v));
1498 return arch_atomic64_cmpxchg_release(v, old, new);
1500 #define atomic64_cmpxchg_release atomic64_cmpxchg_release
1503 #if defined(arch_atomic64_cmpxchg_relaxed)
1504 static __always_inline s64
1505 atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new)
1507 __atomic_check_write(v, sizeof(*v));
1508 return arch_atomic64_cmpxchg_relaxed(v, old, new);
1510 #define atomic64_cmpxchg_relaxed atomic64_cmpxchg_relaxed
1513 #if defined(arch_atomic64_try_cmpxchg)
1514 static __always_inline bool
1515 atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
1517 __atomic_check_write(v, sizeof(*v));
1518 __atomic_check_write(old, sizeof(*old));
1519 return arch_atomic64_try_cmpxchg(v, old, new);
1521 #define atomic64_try_cmpxchg atomic64_try_cmpxchg
1524 #if defined(arch_atomic64_try_cmpxchg_acquire)
1525 static __always_inline bool
1526 atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
1528 __atomic_check_write(v, sizeof(*v));
1529 __atomic_check_write(old, sizeof(*old));
1530 return arch_atomic64_try_cmpxchg_acquire(v, old, new);
1532 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
1535 #if defined(arch_atomic64_try_cmpxchg_release)
1536 static __always_inline bool
1537 atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
1539 __atomic_check_write(v, sizeof(*v));
1540 __atomic_check_write(old, sizeof(*old));
1541 return arch_atomic64_try_cmpxchg_release(v, old, new);
1543 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
1546 #if defined(arch_atomic64_try_cmpxchg_relaxed)
1547 static __always_inline bool
1548 atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
1550 __atomic_check_write(v, sizeof(*v));
1551 __atomic_check_write(old, sizeof(*old));
1552 return arch_atomic64_try_cmpxchg_relaxed(v, old, new);
1554 #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed
1557 #if defined(arch_atomic64_sub_and_test)
1558 static __always_inline bool
1559 atomic64_sub_and_test(s64 i, atomic64_t *v)
1561 __atomic_check_write(v, sizeof(*v));
1562 return arch_atomic64_sub_and_test(i, v);
1564 #define atomic64_sub_and_test atomic64_sub_and_test
1567 #if defined(arch_atomic64_dec_and_test)
1568 static __always_inline bool
1569 atomic64_dec_and_test(atomic64_t *v)
1571 __atomic_check_write(v, sizeof(*v));
1572 return arch_atomic64_dec_and_test(v);
1574 #define atomic64_dec_and_test atomic64_dec_and_test
1577 #if defined(arch_atomic64_inc_and_test)
1578 static __always_inline bool
1579 atomic64_inc_and_test(atomic64_t *v)
1581 __atomic_check_write(v, sizeof(*v));
1582 return arch_atomic64_inc_and_test(v);
1584 #define atomic64_inc_and_test atomic64_inc_and_test
1587 #if defined(arch_atomic64_add_negative)
1588 static __always_inline bool
1589 atomic64_add_negative(s64 i, atomic64_t *v)
1591 __atomic_check_write(v, sizeof(*v));
1592 return arch_atomic64_add_negative(i, v);
1594 #define atomic64_add_negative atomic64_add_negative
1597 #if defined(arch_atomic64_fetch_add_unless)
1598 static __always_inline s64
1599 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
1601 __atomic_check_write(v, sizeof(*v));
1602 return arch_atomic64_fetch_add_unless(v, a, u);
1604 #define atomic64_fetch_add_unless atomic64_fetch_add_unless
1607 #if defined(arch_atomic64_add_unless)
1608 static __always_inline bool
1609 atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
1611 __atomic_check_write(v, sizeof(*v));
1612 return arch_atomic64_add_unless(v, a, u);
1614 #define atomic64_add_unless atomic64_add_unless
1617 #if defined(arch_atomic64_inc_not_zero)
1618 static __always_inline bool
1619 atomic64_inc_not_zero(atomic64_t *v)
1621 __atomic_check_write(v, sizeof(*v));
1622 return arch_atomic64_inc_not_zero(v);
1624 #define atomic64_inc_not_zero atomic64_inc_not_zero
1627 #if defined(arch_atomic64_inc_unless_negative)
1628 static __always_inline bool
1629 atomic64_inc_unless_negative(atomic64_t *v)
1631 __atomic_check_write(v, sizeof(*v));
1632 return arch_atomic64_inc_unless_negative(v);
1634 #define atomic64_inc_unless_negative atomic64_inc_unless_negative
1637 #if defined(arch_atomic64_dec_unless_positive)
1638 static __always_inline bool
1639 atomic64_dec_unless_positive(atomic64_t *v)
1641 __atomic_check_write(v, sizeof(*v));
1642 return arch_atomic64_dec_unless_positive(v);
1644 #define atomic64_dec_unless_positive atomic64_dec_unless_positive
1647 #if defined(arch_atomic64_dec_if_positive)
1648 static __always_inline s64
1649 atomic64_dec_if_positive(atomic64_t *v)
1651 __atomic_check_write(v, sizeof(*v));
1652 return arch_atomic64_dec_if_positive(v);
1654 #define atomic64_dec_if_positive atomic64_dec_if_positive
1657 #if !defined(arch_xchg_relaxed) || defined(arch_xchg)
1658 #define xchg(ptr, ...) \
1660 typeof(ptr) __ai_ptr = (ptr); \
1661 __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1662 arch_xchg(__ai_ptr, __VA_ARGS__); \
1666 #if defined(arch_xchg_acquire)
1667 #define xchg_acquire(ptr, ...) \
1669 typeof(ptr) __ai_ptr = (ptr); \
1670 __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1671 arch_xchg_acquire(__ai_ptr, __VA_ARGS__); \
1675 #if defined(arch_xchg_release)
1676 #define xchg_release(ptr, ...) \
1678 typeof(ptr) __ai_ptr = (ptr); \
1679 __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1680 arch_xchg_release(__ai_ptr, __VA_ARGS__); \
1684 #if defined(arch_xchg_relaxed)
1685 #define xchg_relaxed(ptr, ...) \
1687 typeof(ptr) __ai_ptr = (ptr); \
1688 __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1689 arch_xchg_relaxed(__ai_ptr, __VA_ARGS__); \
1693 #if !defined(arch_cmpxchg_relaxed) || defined(arch_cmpxchg)
1694 #define cmpxchg(ptr, ...) \
1696 typeof(ptr) __ai_ptr = (ptr); \
1697 __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1698 arch_cmpxchg(__ai_ptr, __VA_ARGS__); \
1702 #if defined(arch_cmpxchg_acquire)
1703 #define cmpxchg_acquire(ptr, ...) \
1705 typeof(ptr) __ai_ptr = (ptr); \
1706 __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1707 arch_cmpxchg_acquire(__ai_ptr, __VA_ARGS__); \
1711 #if defined(arch_cmpxchg_release)
1712 #define cmpxchg_release(ptr, ...) \
1714 typeof(ptr) __ai_ptr = (ptr); \
1715 __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1716 arch_cmpxchg_release(__ai_ptr, __VA_ARGS__); \
1720 #if defined(arch_cmpxchg_relaxed)
1721 #define cmpxchg_relaxed(ptr, ...) \
1723 typeof(ptr) __ai_ptr = (ptr); \
1724 __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1725 arch_cmpxchg_relaxed(__ai_ptr, __VA_ARGS__); \
1729 #if !defined(arch_cmpxchg64_relaxed) || defined(arch_cmpxchg64)
1730 #define cmpxchg64(ptr, ...) \
1732 typeof(ptr) __ai_ptr = (ptr); \
1733 __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1734 arch_cmpxchg64(__ai_ptr, __VA_ARGS__); \
1738 #if defined(arch_cmpxchg64_acquire)
1739 #define cmpxchg64_acquire(ptr, ...) \
1741 typeof(ptr) __ai_ptr = (ptr); \
1742 __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1743 arch_cmpxchg64_acquire(__ai_ptr, __VA_ARGS__); \
1747 #if defined(arch_cmpxchg64_release)
1748 #define cmpxchg64_release(ptr, ...) \
1750 typeof(ptr) __ai_ptr = (ptr); \
1751 __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1752 arch_cmpxchg64_release(__ai_ptr, __VA_ARGS__); \
1756 #if defined(arch_cmpxchg64_relaxed)
1757 #define cmpxchg64_relaxed(ptr, ...) \
1759 typeof(ptr) __ai_ptr = (ptr); \
1760 __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1761 arch_cmpxchg64_relaxed(__ai_ptr, __VA_ARGS__); \
1765 #define cmpxchg_local(ptr, ...) \
1767 typeof(ptr) __ai_ptr = (ptr); \
1768 __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1769 arch_cmpxchg_local(__ai_ptr, __VA_ARGS__); \
1772 #define cmpxchg64_local(ptr, ...) \
1774 typeof(ptr) __ai_ptr = (ptr); \
1775 __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1776 arch_cmpxchg64_local(__ai_ptr, __VA_ARGS__); \
1779 #define sync_cmpxchg(ptr, ...) \
1781 typeof(ptr) __ai_ptr = (ptr); \
1782 __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1783 arch_sync_cmpxchg(__ai_ptr, __VA_ARGS__); \
1786 #define cmpxchg_double(ptr, ...) \
1788 typeof(ptr) __ai_ptr = (ptr); \
1789 __atomic_check_write(__ai_ptr, 2 * sizeof(*__ai_ptr)); \
1790 arch_cmpxchg_double(__ai_ptr, __VA_ARGS__); \
1794 #define cmpxchg_double_local(ptr, ...) \
1796 typeof(ptr) __ai_ptr = (ptr); \
1797 __atomic_check_write(__ai_ptr, 2 * sizeof(*__ai_ptr)); \
1798 arch_cmpxchg_double_local(__ai_ptr, __VA_ARGS__); \
1801 #endif /* _ASM_GENERIC_ATOMIC_INSTRUMENTED_H */
1802 // 7b7e2af0e75c8ecb6f02298a7075f503f30d244c