Merge branch 'kcsan.2020.01.07a' into locking/kcsan
[linux-2.6-microblaze.git] / include / asm-generic / atomic-instrumented.h
1 // SPDX-License-Identifier: GPL-2.0
2
3 // Generated by scripts/atomic/gen-atomic-instrumented.sh
4 // DO NOT MODIFY THIS FILE DIRECTLY
5
6 /*
7  * This file provides wrappers with KASAN instrumentation for atomic operations.
8  * To use this functionality an arch's atomic.h file needs to define all
9  * atomic operations with arch_ prefix (e.g. arch_atomic_read()) and include
10  * this file at the end. This file provides atomic_read() that forwards to
11  * arch_atomic_read() for actual atomic operation.
12  * Note: if an arch atomic operation is implemented by means of other atomic
13  * operations (e.g. atomic_read()/atomic_cmpxchg() loop), then it needs to use
14  * arch_ variants (i.e. arch_atomic_read()/arch_atomic_cmpxchg()) to avoid
15  * double instrumentation.
16  */
17 #ifndef _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
18 #define _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
19
20 #include <linux/build_bug.h>
21 #include <linux/compiler.h>
22 #include <linux/kasan-checks.h>
23 #include <linux/kcsan-checks.h>
24
25 static __always_inline void __atomic_check_read(const volatile void *v, size_t size)
26 {
27         kasan_check_read(v, size);
28         kcsan_check_atomic_read(v, size);
29 }
30
31 static __always_inline void __atomic_check_write(const volatile void *v, size_t size)
32 {
33         kasan_check_write(v, size);
34         kcsan_check_atomic_write(v, size);
35 }
36
37 static __always_inline int
38 atomic_read(const atomic_t *v)
39 {
40         __atomic_check_read(v, sizeof(*v));
41         return arch_atomic_read(v);
42 }
43 #define atomic_read atomic_read
44
45 #if defined(arch_atomic_read_acquire)
46 static __always_inline int
47 atomic_read_acquire(const atomic_t *v)
48 {
49         __atomic_check_read(v, sizeof(*v));
50         return arch_atomic_read_acquire(v);
51 }
52 #define atomic_read_acquire atomic_read_acquire
53 #endif
54
55 static __always_inline void
56 atomic_set(atomic_t *v, int i)
57 {
58         __atomic_check_write(v, sizeof(*v));
59         arch_atomic_set(v, i);
60 }
61 #define atomic_set atomic_set
62
63 #if defined(arch_atomic_set_release)
64 static __always_inline void
65 atomic_set_release(atomic_t *v, int i)
66 {
67         __atomic_check_write(v, sizeof(*v));
68         arch_atomic_set_release(v, i);
69 }
70 #define atomic_set_release atomic_set_release
71 #endif
72
73 static __always_inline void
74 atomic_add(int i, atomic_t *v)
75 {
76         __atomic_check_write(v, sizeof(*v));
77         arch_atomic_add(i, v);
78 }
79 #define atomic_add atomic_add
80
81 #if !defined(arch_atomic_add_return_relaxed) || defined(arch_atomic_add_return)
82 static __always_inline int
83 atomic_add_return(int i, atomic_t *v)
84 {
85         __atomic_check_write(v, sizeof(*v));
86         return arch_atomic_add_return(i, v);
87 }
88 #define atomic_add_return atomic_add_return
89 #endif
90
91 #if defined(arch_atomic_add_return_acquire)
92 static __always_inline int
93 atomic_add_return_acquire(int i, atomic_t *v)
94 {
95         __atomic_check_write(v, sizeof(*v));
96         return arch_atomic_add_return_acquire(i, v);
97 }
98 #define atomic_add_return_acquire atomic_add_return_acquire
99 #endif
100
101 #if defined(arch_atomic_add_return_release)
102 static __always_inline int
103 atomic_add_return_release(int i, atomic_t *v)
104 {
105         __atomic_check_write(v, sizeof(*v));
106         return arch_atomic_add_return_release(i, v);
107 }
108 #define atomic_add_return_release atomic_add_return_release
109 #endif
110
111 #if defined(arch_atomic_add_return_relaxed)
112 static __always_inline int
113 atomic_add_return_relaxed(int i, atomic_t *v)
114 {
115         __atomic_check_write(v, sizeof(*v));
116         return arch_atomic_add_return_relaxed(i, v);
117 }
118 #define atomic_add_return_relaxed atomic_add_return_relaxed
119 #endif
120
121 #if !defined(arch_atomic_fetch_add_relaxed) || defined(arch_atomic_fetch_add)
122 static __always_inline int
123 atomic_fetch_add(int i, atomic_t *v)
124 {
125         __atomic_check_write(v, sizeof(*v));
126         return arch_atomic_fetch_add(i, v);
127 }
128 #define atomic_fetch_add atomic_fetch_add
129 #endif
130
131 #if defined(arch_atomic_fetch_add_acquire)
132 static __always_inline int
133 atomic_fetch_add_acquire(int i, atomic_t *v)
134 {
135         __atomic_check_write(v, sizeof(*v));
136         return arch_atomic_fetch_add_acquire(i, v);
137 }
138 #define atomic_fetch_add_acquire atomic_fetch_add_acquire
139 #endif
140
141 #if defined(arch_atomic_fetch_add_release)
142 static __always_inline int
143 atomic_fetch_add_release(int i, atomic_t *v)
144 {
145         __atomic_check_write(v, sizeof(*v));
146         return arch_atomic_fetch_add_release(i, v);
147 }
148 #define atomic_fetch_add_release atomic_fetch_add_release
149 #endif
150
151 #if defined(arch_atomic_fetch_add_relaxed)
152 static __always_inline int
153 atomic_fetch_add_relaxed(int i, atomic_t *v)
154 {
155         __atomic_check_write(v, sizeof(*v));
156         return arch_atomic_fetch_add_relaxed(i, v);
157 }
158 #define atomic_fetch_add_relaxed atomic_fetch_add_relaxed
159 #endif
160
161 static __always_inline void
162 atomic_sub(int i, atomic_t *v)
163 {
164         __atomic_check_write(v, sizeof(*v));
165         arch_atomic_sub(i, v);
166 }
167 #define atomic_sub atomic_sub
168
169 #if !defined(arch_atomic_sub_return_relaxed) || defined(arch_atomic_sub_return)
170 static __always_inline int
171 atomic_sub_return(int i, atomic_t *v)
172 {
173         __atomic_check_write(v, sizeof(*v));
174         return arch_atomic_sub_return(i, v);
175 }
176 #define atomic_sub_return atomic_sub_return
177 #endif
178
179 #if defined(arch_atomic_sub_return_acquire)
180 static __always_inline int
181 atomic_sub_return_acquire(int i, atomic_t *v)
182 {
183         __atomic_check_write(v, sizeof(*v));
184         return arch_atomic_sub_return_acquire(i, v);
185 }
186 #define atomic_sub_return_acquire atomic_sub_return_acquire
187 #endif
188
189 #if defined(arch_atomic_sub_return_release)
190 static __always_inline int
191 atomic_sub_return_release(int i, atomic_t *v)
192 {
193         __atomic_check_write(v, sizeof(*v));
194         return arch_atomic_sub_return_release(i, v);
195 }
196 #define atomic_sub_return_release atomic_sub_return_release
197 #endif
198
199 #if defined(arch_atomic_sub_return_relaxed)
200 static __always_inline int
201 atomic_sub_return_relaxed(int i, atomic_t *v)
202 {
203         __atomic_check_write(v, sizeof(*v));
204         return arch_atomic_sub_return_relaxed(i, v);
205 }
206 #define atomic_sub_return_relaxed atomic_sub_return_relaxed
207 #endif
208
209 #if !defined(arch_atomic_fetch_sub_relaxed) || defined(arch_atomic_fetch_sub)
210 static __always_inline int
211 atomic_fetch_sub(int i, atomic_t *v)
212 {
213         __atomic_check_write(v, sizeof(*v));
214         return arch_atomic_fetch_sub(i, v);
215 }
216 #define atomic_fetch_sub atomic_fetch_sub
217 #endif
218
219 #if defined(arch_atomic_fetch_sub_acquire)
220 static __always_inline int
221 atomic_fetch_sub_acquire(int i, atomic_t *v)
222 {
223         __atomic_check_write(v, sizeof(*v));
224         return arch_atomic_fetch_sub_acquire(i, v);
225 }
226 #define atomic_fetch_sub_acquire atomic_fetch_sub_acquire
227 #endif
228
229 #if defined(arch_atomic_fetch_sub_release)
230 static __always_inline int
231 atomic_fetch_sub_release(int i, atomic_t *v)
232 {
233         __atomic_check_write(v, sizeof(*v));
234         return arch_atomic_fetch_sub_release(i, v);
235 }
236 #define atomic_fetch_sub_release atomic_fetch_sub_release
237 #endif
238
239 #if defined(arch_atomic_fetch_sub_relaxed)
240 static __always_inline int
241 atomic_fetch_sub_relaxed(int i, atomic_t *v)
242 {
243         __atomic_check_write(v, sizeof(*v));
244         return arch_atomic_fetch_sub_relaxed(i, v);
245 }
246 #define atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed
247 #endif
248
249 #if defined(arch_atomic_inc)
250 static __always_inline void
251 atomic_inc(atomic_t *v)
252 {
253         __atomic_check_write(v, sizeof(*v));
254         arch_atomic_inc(v);
255 }
256 #define atomic_inc atomic_inc
257 #endif
258
259 #if defined(arch_atomic_inc_return)
260 static __always_inline int
261 atomic_inc_return(atomic_t *v)
262 {
263         __atomic_check_write(v, sizeof(*v));
264         return arch_atomic_inc_return(v);
265 }
266 #define atomic_inc_return atomic_inc_return
267 #endif
268
269 #if defined(arch_atomic_inc_return_acquire)
270 static __always_inline int
271 atomic_inc_return_acquire(atomic_t *v)
272 {
273         __atomic_check_write(v, sizeof(*v));
274         return arch_atomic_inc_return_acquire(v);
275 }
276 #define atomic_inc_return_acquire atomic_inc_return_acquire
277 #endif
278
279 #if defined(arch_atomic_inc_return_release)
280 static __always_inline int
281 atomic_inc_return_release(atomic_t *v)
282 {
283         __atomic_check_write(v, sizeof(*v));
284         return arch_atomic_inc_return_release(v);
285 }
286 #define atomic_inc_return_release atomic_inc_return_release
287 #endif
288
289 #if defined(arch_atomic_inc_return_relaxed)
290 static __always_inline int
291 atomic_inc_return_relaxed(atomic_t *v)
292 {
293         __atomic_check_write(v, sizeof(*v));
294         return arch_atomic_inc_return_relaxed(v);
295 }
296 #define atomic_inc_return_relaxed atomic_inc_return_relaxed
297 #endif
298
299 #if defined(arch_atomic_fetch_inc)
300 static __always_inline int
301 atomic_fetch_inc(atomic_t *v)
302 {
303         __atomic_check_write(v, sizeof(*v));
304         return arch_atomic_fetch_inc(v);
305 }
306 #define atomic_fetch_inc atomic_fetch_inc
307 #endif
308
309 #if defined(arch_atomic_fetch_inc_acquire)
310 static __always_inline int
311 atomic_fetch_inc_acquire(atomic_t *v)
312 {
313         __atomic_check_write(v, sizeof(*v));
314         return arch_atomic_fetch_inc_acquire(v);
315 }
316 #define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
317 #endif
318
319 #if defined(arch_atomic_fetch_inc_release)
320 static __always_inline int
321 atomic_fetch_inc_release(atomic_t *v)
322 {
323         __atomic_check_write(v, sizeof(*v));
324         return arch_atomic_fetch_inc_release(v);
325 }
326 #define atomic_fetch_inc_release atomic_fetch_inc_release
327 #endif
328
329 #if defined(arch_atomic_fetch_inc_relaxed)
330 static __always_inline int
331 atomic_fetch_inc_relaxed(atomic_t *v)
332 {
333         __atomic_check_write(v, sizeof(*v));
334         return arch_atomic_fetch_inc_relaxed(v);
335 }
336 #define atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
337 #endif
338
339 #if defined(arch_atomic_dec)
340 static __always_inline void
341 atomic_dec(atomic_t *v)
342 {
343         __atomic_check_write(v, sizeof(*v));
344         arch_atomic_dec(v);
345 }
346 #define atomic_dec atomic_dec
347 #endif
348
349 #if defined(arch_atomic_dec_return)
350 static __always_inline int
351 atomic_dec_return(atomic_t *v)
352 {
353         __atomic_check_write(v, sizeof(*v));
354         return arch_atomic_dec_return(v);
355 }
356 #define atomic_dec_return atomic_dec_return
357 #endif
358
359 #if defined(arch_atomic_dec_return_acquire)
360 static __always_inline int
361 atomic_dec_return_acquire(atomic_t *v)
362 {
363         __atomic_check_write(v, sizeof(*v));
364         return arch_atomic_dec_return_acquire(v);
365 }
366 #define atomic_dec_return_acquire atomic_dec_return_acquire
367 #endif
368
369 #if defined(arch_atomic_dec_return_release)
370 static __always_inline int
371 atomic_dec_return_release(atomic_t *v)
372 {
373         __atomic_check_write(v, sizeof(*v));
374         return arch_atomic_dec_return_release(v);
375 }
376 #define atomic_dec_return_release atomic_dec_return_release
377 #endif
378
379 #if defined(arch_atomic_dec_return_relaxed)
380 static __always_inline int
381 atomic_dec_return_relaxed(atomic_t *v)
382 {
383         __atomic_check_write(v, sizeof(*v));
384         return arch_atomic_dec_return_relaxed(v);
385 }
386 #define atomic_dec_return_relaxed atomic_dec_return_relaxed
387 #endif
388
389 #if defined(arch_atomic_fetch_dec)
390 static __always_inline int
391 atomic_fetch_dec(atomic_t *v)
392 {
393         __atomic_check_write(v, sizeof(*v));
394         return arch_atomic_fetch_dec(v);
395 }
396 #define atomic_fetch_dec atomic_fetch_dec
397 #endif
398
399 #if defined(arch_atomic_fetch_dec_acquire)
400 static __always_inline int
401 atomic_fetch_dec_acquire(atomic_t *v)
402 {
403         __atomic_check_write(v, sizeof(*v));
404         return arch_atomic_fetch_dec_acquire(v);
405 }
406 #define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
407 #endif
408
409 #if defined(arch_atomic_fetch_dec_release)
410 static __always_inline int
411 atomic_fetch_dec_release(atomic_t *v)
412 {
413         __atomic_check_write(v, sizeof(*v));
414         return arch_atomic_fetch_dec_release(v);
415 }
416 #define atomic_fetch_dec_release atomic_fetch_dec_release
417 #endif
418
419 #if defined(arch_atomic_fetch_dec_relaxed)
420 static __always_inline int
421 atomic_fetch_dec_relaxed(atomic_t *v)
422 {
423         __atomic_check_write(v, sizeof(*v));
424         return arch_atomic_fetch_dec_relaxed(v);
425 }
426 #define atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
427 #endif
428
429 static __always_inline void
430 atomic_and(int i, atomic_t *v)
431 {
432         __atomic_check_write(v, sizeof(*v));
433         arch_atomic_and(i, v);
434 }
435 #define atomic_and atomic_and
436
437 #if !defined(arch_atomic_fetch_and_relaxed) || defined(arch_atomic_fetch_and)
438 static __always_inline int
439 atomic_fetch_and(int i, atomic_t *v)
440 {
441         __atomic_check_write(v, sizeof(*v));
442         return arch_atomic_fetch_and(i, v);
443 }
444 #define atomic_fetch_and atomic_fetch_and
445 #endif
446
447 #if defined(arch_atomic_fetch_and_acquire)
448 static __always_inline int
449 atomic_fetch_and_acquire(int i, atomic_t *v)
450 {
451         __atomic_check_write(v, sizeof(*v));
452         return arch_atomic_fetch_and_acquire(i, v);
453 }
454 #define atomic_fetch_and_acquire atomic_fetch_and_acquire
455 #endif
456
457 #if defined(arch_atomic_fetch_and_release)
458 static __always_inline int
459 atomic_fetch_and_release(int i, atomic_t *v)
460 {
461         __atomic_check_write(v, sizeof(*v));
462         return arch_atomic_fetch_and_release(i, v);
463 }
464 #define atomic_fetch_and_release atomic_fetch_and_release
465 #endif
466
467 #if defined(arch_atomic_fetch_and_relaxed)
468 static __always_inline int
469 atomic_fetch_and_relaxed(int i, atomic_t *v)
470 {
471         __atomic_check_write(v, sizeof(*v));
472         return arch_atomic_fetch_and_relaxed(i, v);
473 }
474 #define atomic_fetch_and_relaxed atomic_fetch_and_relaxed
475 #endif
476
477 #if defined(arch_atomic_andnot)
478 static __always_inline void
479 atomic_andnot(int i, atomic_t *v)
480 {
481         __atomic_check_write(v, sizeof(*v));
482         arch_atomic_andnot(i, v);
483 }
484 #define atomic_andnot atomic_andnot
485 #endif
486
487 #if defined(arch_atomic_fetch_andnot)
488 static __always_inline int
489 atomic_fetch_andnot(int i, atomic_t *v)
490 {
491         __atomic_check_write(v, sizeof(*v));
492         return arch_atomic_fetch_andnot(i, v);
493 }
494 #define atomic_fetch_andnot atomic_fetch_andnot
495 #endif
496
497 #if defined(arch_atomic_fetch_andnot_acquire)
498 static __always_inline int
499 atomic_fetch_andnot_acquire(int i, atomic_t *v)
500 {
501         __atomic_check_write(v, sizeof(*v));
502         return arch_atomic_fetch_andnot_acquire(i, v);
503 }
504 #define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
505 #endif
506
507 #if defined(arch_atomic_fetch_andnot_release)
508 static __always_inline int
509 atomic_fetch_andnot_release(int i, atomic_t *v)
510 {
511         __atomic_check_write(v, sizeof(*v));
512         return arch_atomic_fetch_andnot_release(i, v);
513 }
514 #define atomic_fetch_andnot_release atomic_fetch_andnot_release
515 #endif
516
517 #if defined(arch_atomic_fetch_andnot_relaxed)
518 static __always_inline int
519 atomic_fetch_andnot_relaxed(int i, atomic_t *v)
520 {
521         __atomic_check_write(v, sizeof(*v));
522         return arch_atomic_fetch_andnot_relaxed(i, v);
523 }
524 #define atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
525 #endif
526
527 static __always_inline void
528 atomic_or(int i, atomic_t *v)
529 {
530         __atomic_check_write(v, sizeof(*v));
531         arch_atomic_or(i, v);
532 }
533 #define atomic_or atomic_or
534
535 #if !defined(arch_atomic_fetch_or_relaxed) || defined(arch_atomic_fetch_or)
536 static __always_inline int
537 atomic_fetch_or(int i, atomic_t *v)
538 {
539         __atomic_check_write(v, sizeof(*v));
540         return arch_atomic_fetch_or(i, v);
541 }
542 #define atomic_fetch_or atomic_fetch_or
543 #endif
544
545 #if defined(arch_atomic_fetch_or_acquire)
546 static __always_inline int
547 atomic_fetch_or_acquire(int i, atomic_t *v)
548 {
549         __atomic_check_write(v, sizeof(*v));
550         return arch_atomic_fetch_or_acquire(i, v);
551 }
552 #define atomic_fetch_or_acquire atomic_fetch_or_acquire
553 #endif
554
555 #if defined(arch_atomic_fetch_or_release)
556 static __always_inline int
557 atomic_fetch_or_release(int i, atomic_t *v)
558 {
559         __atomic_check_write(v, sizeof(*v));
560         return arch_atomic_fetch_or_release(i, v);
561 }
562 #define atomic_fetch_or_release atomic_fetch_or_release
563 #endif
564
565 #if defined(arch_atomic_fetch_or_relaxed)
566 static __always_inline int
567 atomic_fetch_or_relaxed(int i, atomic_t *v)
568 {
569         __atomic_check_write(v, sizeof(*v));
570         return arch_atomic_fetch_or_relaxed(i, v);
571 }
572 #define atomic_fetch_or_relaxed atomic_fetch_or_relaxed
573 #endif
574
575 static __always_inline void
576 atomic_xor(int i, atomic_t *v)
577 {
578         __atomic_check_write(v, sizeof(*v));
579         arch_atomic_xor(i, v);
580 }
581 #define atomic_xor atomic_xor
582
583 #if !defined(arch_atomic_fetch_xor_relaxed) || defined(arch_atomic_fetch_xor)
584 static __always_inline int
585 atomic_fetch_xor(int i, atomic_t *v)
586 {
587         __atomic_check_write(v, sizeof(*v));
588         return arch_atomic_fetch_xor(i, v);
589 }
590 #define atomic_fetch_xor atomic_fetch_xor
591 #endif
592
593 #if defined(arch_atomic_fetch_xor_acquire)
594 static __always_inline int
595 atomic_fetch_xor_acquire(int i, atomic_t *v)
596 {
597         __atomic_check_write(v, sizeof(*v));
598         return arch_atomic_fetch_xor_acquire(i, v);
599 }
600 #define atomic_fetch_xor_acquire atomic_fetch_xor_acquire
601 #endif
602
603 #if defined(arch_atomic_fetch_xor_release)
604 static __always_inline int
605 atomic_fetch_xor_release(int i, atomic_t *v)
606 {
607         __atomic_check_write(v, sizeof(*v));
608         return arch_atomic_fetch_xor_release(i, v);
609 }
610 #define atomic_fetch_xor_release atomic_fetch_xor_release
611 #endif
612
613 #if defined(arch_atomic_fetch_xor_relaxed)
614 static __always_inline int
615 atomic_fetch_xor_relaxed(int i, atomic_t *v)
616 {
617         __atomic_check_write(v, sizeof(*v));
618         return arch_atomic_fetch_xor_relaxed(i, v);
619 }
620 #define atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed
621 #endif
622
623 #if !defined(arch_atomic_xchg_relaxed) || defined(arch_atomic_xchg)
624 static __always_inline int
625 atomic_xchg(atomic_t *v, int i)
626 {
627         __atomic_check_write(v, sizeof(*v));
628         return arch_atomic_xchg(v, i);
629 }
630 #define atomic_xchg atomic_xchg
631 #endif
632
633 #if defined(arch_atomic_xchg_acquire)
634 static __always_inline int
635 atomic_xchg_acquire(atomic_t *v, int i)
636 {
637         __atomic_check_write(v, sizeof(*v));
638         return arch_atomic_xchg_acquire(v, i);
639 }
640 #define atomic_xchg_acquire atomic_xchg_acquire
641 #endif
642
643 #if defined(arch_atomic_xchg_release)
644 static __always_inline int
645 atomic_xchg_release(atomic_t *v, int i)
646 {
647         __atomic_check_write(v, sizeof(*v));
648         return arch_atomic_xchg_release(v, i);
649 }
650 #define atomic_xchg_release atomic_xchg_release
651 #endif
652
653 #if defined(arch_atomic_xchg_relaxed)
654 static __always_inline int
655 atomic_xchg_relaxed(atomic_t *v, int i)
656 {
657         __atomic_check_write(v, sizeof(*v));
658         return arch_atomic_xchg_relaxed(v, i);
659 }
660 #define atomic_xchg_relaxed atomic_xchg_relaxed
661 #endif
662
663 #if !defined(arch_atomic_cmpxchg_relaxed) || defined(arch_atomic_cmpxchg)
664 static __always_inline int
665 atomic_cmpxchg(atomic_t *v, int old, int new)
666 {
667         __atomic_check_write(v, sizeof(*v));
668         return arch_atomic_cmpxchg(v, old, new);
669 }
670 #define atomic_cmpxchg atomic_cmpxchg
671 #endif
672
673 #if defined(arch_atomic_cmpxchg_acquire)
674 static __always_inline int
675 atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
676 {
677         __atomic_check_write(v, sizeof(*v));
678         return arch_atomic_cmpxchg_acquire(v, old, new);
679 }
680 #define atomic_cmpxchg_acquire atomic_cmpxchg_acquire
681 #endif
682
683 #if defined(arch_atomic_cmpxchg_release)
684 static __always_inline int
685 atomic_cmpxchg_release(atomic_t *v, int old, int new)
686 {
687         __atomic_check_write(v, sizeof(*v));
688         return arch_atomic_cmpxchg_release(v, old, new);
689 }
690 #define atomic_cmpxchg_release atomic_cmpxchg_release
691 #endif
692
693 #if defined(arch_atomic_cmpxchg_relaxed)
694 static __always_inline int
695 atomic_cmpxchg_relaxed(atomic_t *v, int old, int new)
696 {
697         __atomic_check_write(v, sizeof(*v));
698         return arch_atomic_cmpxchg_relaxed(v, old, new);
699 }
700 #define atomic_cmpxchg_relaxed atomic_cmpxchg_relaxed
701 #endif
702
703 #if defined(arch_atomic_try_cmpxchg)
704 static __always_inline bool
705 atomic_try_cmpxchg(atomic_t *v, int *old, int new)
706 {
707         __atomic_check_write(v, sizeof(*v));
708         __atomic_check_write(old, sizeof(*old));
709         return arch_atomic_try_cmpxchg(v, old, new);
710 }
711 #define atomic_try_cmpxchg atomic_try_cmpxchg
712 #endif
713
714 #if defined(arch_atomic_try_cmpxchg_acquire)
715 static __always_inline bool
716 atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
717 {
718         __atomic_check_write(v, sizeof(*v));
719         __atomic_check_write(old, sizeof(*old));
720         return arch_atomic_try_cmpxchg_acquire(v, old, new);
721 }
722 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
723 #endif
724
725 #if defined(arch_atomic_try_cmpxchg_release)
726 static __always_inline bool
727 atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
728 {
729         __atomic_check_write(v, sizeof(*v));
730         __atomic_check_write(old, sizeof(*old));
731         return arch_atomic_try_cmpxchg_release(v, old, new);
732 }
733 #define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
734 #endif
735
736 #if defined(arch_atomic_try_cmpxchg_relaxed)
737 static __always_inline bool
738 atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
739 {
740         __atomic_check_write(v, sizeof(*v));
741         __atomic_check_write(old, sizeof(*old));
742         return arch_atomic_try_cmpxchg_relaxed(v, old, new);
743 }
744 #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed
745 #endif
746
747 #if defined(arch_atomic_sub_and_test)
748 static __always_inline bool
749 atomic_sub_and_test(int i, atomic_t *v)
750 {
751         __atomic_check_write(v, sizeof(*v));
752         return arch_atomic_sub_and_test(i, v);
753 }
754 #define atomic_sub_and_test atomic_sub_and_test
755 #endif
756
757 #if defined(arch_atomic_dec_and_test)
758 static __always_inline bool
759 atomic_dec_and_test(atomic_t *v)
760 {
761         __atomic_check_write(v, sizeof(*v));
762         return arch_atomic_dec_and_test(v);
763 }
764 #define atomic_dec_and_test atomic_dec_and_test
765 #endif
766
767 #if defined(arch_atomic_inc_and_test)
768 static __always_inline bool
769 atomic_inc_and_test(atomic_t *v)
770 {
771         __atomic_check_write(v, sizeof(*v));
772         return arch_atomic_inc_and_test(v);
773 }
774 #define atomic_inc_and_test atomic_inc_and_test
775 #endif
776
777 #if defined(arch_atomic_add_negative)
778 static __always_inline bool
779 atomic_add_negative(int i, atomic_t *v)
780 {
781         __atomic_check_write(v, sizeof(*v));
782         return arch_atomic_add_negative(i, v);
783 }
784 #define atomic_add_negative atomic_add_negative
785 #endif
786
787 #if defined(arch_atomic_fetch_add_unless)
788 static __always_inline int
789 atomic_fetch_add_unless(atomic_t *v, int a, int u)
790 {
791         __atomic_check_write(v, sizeof(*v));
792         return arch_atomic_fetch_add_unless(v, a, u);
793 }
794 #define atomic_fetch_add_unless atomic_fetch_add_unless
795 #endif
796
797 #if defined(arch_atomic_add_unless)
798 static __always_inline bool
799 atomic_add_unless(atomic_t *v, int a, int u)
800 {
801         __atomic_check_write(v, sizeof(*v));
802         return arch_atomic_add_unless(v, a, u);
803 }
804 #define atomic_add_unless atomic_add_unless
805 #endif
806
807 #if defined(arch_atomic_inc_not_zero)
808 static __always_inline bool
809 atomic_inc_not_zero(atomic_t *v)
810 {
811         __atomic_check_write(v, sizeof(*v));
812         return arch_atomic_inc_not_zero(v);
813 }
814 #define atomic_inc_not_zero atomic_inc_not_zero
815 #endif
816
817 #if defined(arch_atomic_inc_unless_negative)
818 static __always_inline bool
819 atomic_inc_unless_negative(atomic_t *v)
820 {
821         __atomic_check_write(v, sizeof(*v));
822         return arch_atomic_inc_unless_negative(v);
823 }
824 #define atomic_inc_unless_negative atomic_inc_unless_negative
825 #endif
826
827 #if defined(arch_atomic_dec_unless_positive)
828 static __always_inline bool
829 atomic_dec_unless_positive(atomic_t *v)
830 {
831         __atomic_check_write(v, sizeof(*v));
832         return arch_atomic_dec_unless_positive(v);
833 }
834 #define atomic_dec_unless_positive atomic_dec_unless_positive
835 #endif
836
837 #if defined(arch_atomic_dec_if_positive)
838 static __always_inline int
839 atomic_dec_if_positive(atomic_t *v)
840 {
841         __atomic_check_write(v, sizeof(*v));
842         return arch_atomic_dec_if_positive(v);
843 }
844 #define atomic_dec_if_positive atomic_dec_if_positive
845 #endif
846
847 static __always_inline s64
848 atomic64_read(const atomic64_t *v)
849 {
850         __atomic_check_read(v, sizeof(*v));
851         return arch_atomic64_read(v);
852 }
853 #define atomic64_read atomic64_read
854
855 #if defined(arch_atomic64_read_acquire)
856 static __always_inline s64
857 atomic64_read_acquire(const atomic64_t *v)
858 {
859         __atomic_check_read(v, sizeof(*v));
860         return arch_atomic64_read_acquire(v);
861 }
862 #define atomic64_read_acquire atomic64_read_acquire
863 #endif
864
865 static __always_inline void
866 atomic64_set(atomic64_t *v, s64 i)
867 {
868         __atomic_check_write(v, sizeof(*v));
869         arch_atomic64_set(v, i);
870 }
871 #define atomic64_set atomic64_set
872
873 #if defined(arch_atomic64_set_release)
874 static __always_inline void
875 atomic64_set_release(atomic64_t *v, s64 i)
876 {
877         __atomic_check_write(v, sizeof(*v));
878         arch_atomic64_set_release(v, i);
879 }
880 #define atomic64_set_release atomic64_set_release
881 #endif
882
883 static __always_inline void
884 atomic64_add(s64 i, atomic64_t *v)
885 {
886         __atomic_check_write(v, sizeof(*v));
887         arch_atomic64_add(i, v);
888 }
889 #define atomic64_add atomic64_add
890
891 #if !defined(arch_atomic64_add_return_relaxed) || defined(arch_atomic64_add_return)
892 static __always_inline s64
893 atomic64_add_return(s64 i, atomic64_t *v)
894 {
895         __atomic_check_write(v, sizeof(*v));
896         return arch_atomic64_add_return(i, v);
897 }
898 #define atomic64_add_return atomic64_add_return
899 #endif
900
901 #if defined(arch_atomic64_add_return_acquire)
902 static __always_inline s64
903 atomic64_add_return_acquire(s64 i, atomic64_t *v)
904 {
905         __atomic_check_write(v, sizeof(*v));
906         return arch_atomic64_add_return_acquire(i, v);
907 }
908 #define atomic64_add_return_acquire atomic64_add_return_acquire
909 #endif
910
911 #if defined(arch_atomic64_add_return_release)
912 static __always_inline s64
913 atomic64_add_return_release(s64 i, atomic64_t *v)
914 {
915         __atomic_check_write(v, sizeof(*v));
916         return arch_atomic64_add_return_release(i, v);
917 }
918 #define atomic64_add_return_release atomic64_add_return_release
919 #endif
920
921 #if defined(arch_atomic64_add_return_relaxed)
922 static __always_inline s64
923 atomic64_add_return_relaxed(s64 i, atomic64_t *v)
924 {
925         __atomic_check_write(v, sizeof(*v));
926         return arch_atomic64_add_return_relaxed(i, v);
927 }
928 #define atomic64_add_return_relaxed atomic64_add_return_relaxed
929 #endif
930
931 #if !defined(arch_atomic64_fetch_add_relaxed) || defined(arch_atomic64_fetch_add)
932 static __always_inline s64
933 atomic64_fetch_add(s64 i, atomic64_t *v)
934 {
935         __atomic_check_write(v, sizeof(*v));
936         return arch_atomic64_fetch_add(i, v);
937 }
938 #define atomic64_fetch_add atomic64_fetch_add
939 #endif
940
941 #if defined(arch_atomic64_fetch_add_acquire)
942 static __always_inline s64
943 atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
944 {
945         __atomic_check_write(v, sizeof(*v));
946         return arch_atomic64_fetch_add_acquire(i, v);
947 }
948 #define atomic64_fetch_add_acquire atomic64_fetch_add_acquire
949 #endif
950
951 #if defined(arch_atomic64_fetch_add_release)
952 static __always_inline s64
953 atomic64_fetch_add_release(s64 i, atomic64_t *v)
954 {
955         __atomic_check_write(v, sizeof(*v));
956         return arch_atomic64_fetch_add_release(i, v);
957 }
958 #define atomic64_fetch_add_release atomic64_fetch_add_release
959 #endif
960
961 #if defined(arch_atomic64_fetch_add_relaxed)
962 static __always_inline s64
963 atomic64_fetch_add_relaxed(s64 i, atomic64_t *v)
964 {
965         __atomic_check_write(v, sizeof(*v));
966         return arch_atomic64_fetch_add_relaxed(i, v);
967 }
968 #define atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed
969 #endif
970
971 static __always_inline void
972 atomic64_sub(s64 i, atomic64_t *v)
973 {
974         __atomic_check_write(v, sizeof(*v));
975         arch_atomic64_sub(i, v);
976 }
977 #define atomic64_sub atomic64_sub
978
979 #if !defined(arch_atomic64_sub_return_relaxed) || defined(arch_atomic64_sub_return)
980 static __always_inline s64
981 atomic64_sub_return(s64 i, atomic64_t *v)
982 {
983         __atomic_check_write(v, sizeof(*v));
984         return arch_atomic64_sub_return(i, v);
985 }
986 #define atomic64_sub_return atomic64_sub_return
987 #endif
988
989 #if defined(arch_atomic64_sub_return_acquire)
990 static __always_inline s64
991 atomic64_sub_return_acquire(s64 i, atomic64_t *v)
992 {
993         __atomic_check_write(v, sizeof(*v));
994         return arch_atomic64_sub_return_acquire(i, v);
995 }
996 #define atomic64_sub_return_acquire atomic64_sub_return_acquire
997 #endif
998
999 #if defined(arch_atomic64_sub_return_release)
1000 static __always_inline s64
1001 atomic64_sub_return_release(s64 i, atomic64_t *v)
1002 {
1003         __atomic_check_write(v, sizeof(*v));
1004         return arch_atomic64_sub_return_release(i, v);
1005 }
1006 #define atomic64_sub_return_release atomic64_sub_return_release
1007 #endif
1008
1009 #if defined(arch_atomic64_sub_return_relaxed)
1010 static __always_inline s64
1011 atomic64_sub_return_relaxed(s64 i, atomic64_t *v)
1012 {
1013         __atomic_check_write(v, sizeof(*v));
1014         return arch_atomic64_sub_return_relaxed(i, v);
1015 }
1016 #define atomic64_sub_return_relaxed atomic64_sub_return_relaxed
1017 #endif
1018
1019 #if !defined(arch_atomic64_fetch_sub_relaxed) || defined(arch_atomic64_fetch_sub)
1020 static __always_inline s64
1021 atomic64_fetch_sub(s64 i, atomic64_t *v)
1022 {
1023         __atomic_check_write(v, sizeof(*v));
1024         return arch_atomic64_fetch_sub(i, v);
1025 }
1026 #define atomic64_fetch_sub atomic64_fetch_sub
1027 #endif
1028
1029 #if defined(arch_atomic64_fetch_sub_acquire)
1030 static __always_inline s64
1031 atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
1032 {
1033         __atomic_check_write(v, sizeof(*v));
1034         return arch_atomic64_fetch_sub_acquire(i, v);
1035 }
1036 #define atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
1037 #endif
1038
1039 #if defined(arch_atomic64_fetch_sub_release)
1040 static __always_inline s64
1041 atomic64_fetch_sub_release(s64 i, atomic64_t *v)
1042 {
1043         __atomic_check_write(v, sizeof(*v));
1044         return arch_atomic64_fetch_sub_release(i, v);
1045 }
1046 #define atomic64_fetch_sub_release atomic64_fetch_sub_release
1047 #endif
1048
1049 #if defined(arch_atomic64_fetch_sub_relaxed)
1050 static __always_inline s64
1051 atomic64_fetch_sub_relaxed(s64 i, atomic64_t *v)
1052 {
1053         __atomic_check_write(v, sizeof(*v));
1054         return arch_atomic64_fetch_sub_relaxed(i, v);
1055 }
1056 #define atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed
1057 #endif
1058
1059 #if defined(arch_atomic64_inc)
1060 static __always_inline void
1061 atomic64_inc(atomic64_t *v)
1062 {
1063         __atomic_check_write(v, sizeof(*v));
1064         arch_atomic64_inc(v);
1065 }
1066 #define atomic64_inc atomic64_inc
1067 #endif
1068
1069 #if defined(arch_atomic64_inc_return)
1070 static __always_inline s64
1071 atomic64_inc_return(atomic64_t *v)
1072 {
1073         __atomic_check_write(v, sizeof(*v));
1074         return arch_atomic64_inc_return(v);
1075 }
1076 #define atomic64_inc_return atomic64_inc_return
1077 #endif
1078
1079 #if defined(arch_atomic64_inc_return_acquire)
1080 static __always_inline s64
1081 atomic64_inc_return_acquire(atomic64_t *v)
1082 {
1083         __atomic_check_write(v, sizeof(*v));
1084         return arch_atomic64_inc_return_acquire(v);
1085 }
1086 #define atomic64_inc_return_acquire atomic64_inc_return_acquire
1087 #endif
1088
1089 #if defined(arch_atomic64_inc_return_release)
1090 static __always_inline s64
1091 atomic64_inc_return_release(atomic64_t *v)
1092 {
1093         __atomic_check_write(v, sizeof(*v));
1094         return arch_atomic64_inc_return_release(v);
1095 }
1096 #define atomic64_inc_return_release atomic64_inc_return_release
1097 #endif
1098
1099 #if defined(arch_atomic64_inc_return_relaxed)
1100 static __always_inline s64
1101 atomic64_inc_return_relaxed(atomic64_t *v)
1102 {
1103         __atomic_check_write(v, sizeof(*v));
1104         return arch_atomic64_inc_return_relaxed(v);
1105 }
1106 #define atomic64_inc_return_relaxed atomic64_inc_return_relaxed
1107 #endif
1108
1109 #if defined(arch_atomic64_fetch_inc)
1110 static __always_inline s64
1111 atomic64_fetch_inc(atomic64_t *v)
1112 {
1113         __atomic_check_write(v, sizeof(*v));
1114         return arch_atomic64_fetch_inc(v);
1115 }
1116 #define atomic64_fetch_inc atomic64_fetch_inc
1117 #endif
1118
1119 #if defined(arch_atomic64_fetch_inc_acquire)
1120 static __always_inline s64
1121 atomic64_fetch_inc_acquire(atomic64_t *v)
1122 {
1123         __atomic_check_write(v, sizeof(*v));
1124         return arch_atomic64_fetch_inc_acquire(v);
1125 }
1126 #define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1127 #endif
1128
1129 #if defined(arch_atomic64_fetch_inc_release)
1130 static __always_inline s64
1131 atomic64_fetch_inc_release(atomic64_t *v)
1132 {
1133         __atomic_check_write(v, sizeof(*v));
1134         return arch_atomic64_fetch_inc_release(v);
1135 }
1136 #define atomic64_fetch_inc_release atomic64_fetch_inc_release
1137 #endif
1138
1139 #if defined(arch_atomic64_fetch_inc_relaxed)
1140 static __always_inline s64
1141 atomic64_fetch_inc_relaxed(atomic64_t *v)
1142 {
1143         __atomic_check_write(v, sizeof(*v));
1144         return arch_atomic64_fetch_inc_relaxed(v);
1145 }
1146 #define atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
1147 #endif
1148
1149 #if defined(arch_atomic64_dec)
1150 static __always_inline void
1151 atomic64_dec(atomic64_t *v)
1152 {
1153         __atomic_check_write(v, sizeof(*v));
1154         arch_atomic64_dec(v);
1155 }
1156 #define atomic64_dec atomic64_dec
1157 #endif
1158
1159 #if defined(arch_atomic64_dec_return)
1160 static __always_inline s64
1161 atomic64_dec_return(atomic64_t *v)
1162 {
1163         __atomic_check_write(v, sizeof(*v));
1164         return arch_atomic64_dec_return(v);
1165 }
1166 #define atomic64_dec_return atomic64_dec_return
1167 #endif
1168
1169 #if defined(arch_atomic64_dec_return_acquire)
1170 static __always_inline s64
1171 atomic64_dec_return_acquire(atomic64_t *v)
1172 {
1173         __atomic_check_write(v, sizeof(*v));
1174         return arch_atomic64_dec_return_acquire(v);
1175 }
1176 #define atomic64_dec_return_acquire atomic64_dec_return_acquire
1177 #endif
1178
1179 #if defined(arch_atomic64_dec_return_release)
1180 static __always_inline s64
1181 atomic64_dec_return_release(atomic64_t *v)
1182 {
1183         __atomic_check_write(v, sizeof(*v));
1184         return arch_atomic64_dec_return_release(v);
1185 }
1186 #define atomic64_dec_return_release atomic64_dec_return_release
1187 #endif
1188
1189 #if defined(arch_atomic64_dec_return_relaxed)
1190 static __always_inline s64
1191 atomic64_dec_return_relaxed(atomic64_t *v)
1192 {
1193         __atomic_check_write(v, sizeof(*v));
1194         return arch_atomic64_dec_return_relaxed(v);
1195 }
1196 #define atomic64_dec_return_relaxed atomic64_dec_return_relaxed
1197 #endif
1198
1199 #if defined(arch_atomic64_fetch_dec)
1200 static __always_inline s64
1201 atomic64_fetch_dec(atomic64_t *v)
1202 {
1203         __atomic_check_write(v, sizeof(*v));
1204         return arch_atomic64_fetch_dec(v);
1205 }
1206 #define atomic64_fetch_dec atomic64_fetch_dec
1207 #endif
1208
1209 #if defined(arch_atomic64_fetch_dec_acquire)
1210 static __always_inline s64
1211 atomic64_fetch_dec_acquire(atomic64_t *v)
1212 {
1213         __atomic_check_write(v, sizeof(*v));
1214         return arch_atomic64_fetch_dec_acquire(v);
1215 }
1216 #define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1217 #endif
1218
1219 #if defined(arch_atomic64_fetch_dec_release)
1220 static __always_inline s64
1221 atomic64_fetch_dec_release(atomic64_t *v)
1222 {
1223         __atomic_check_write(v, sizeof(*v));
1224         return arch_atomic64_fetch_dec_release(v);
1225 }
1226 #define atomic64_fetch_dec_release atomic64_fetch_dec_release
1227 #endif
1228
1229 #if defined(arch_atomic64_fetch_dec_relaxed)
1230 static __always_inline s64
1231 atomic64_fetch_dec_relaxed(atomic64_t *v)
1232 {
1233         __atomic_check_write(v, sizeof(*v));
1234         return arch_atomic64_fetch_dec_relaxed(v);
1235 }
1236 #define atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
1237 #endif
1238
1239 static __always_inline void
1240 atomic64_and(s64 i, atomic64_t *v)
1241 {
1242         __atomic_check_write(v, sizeof(*v));
1243         arch_atomic64_and(i, v);
1244 }
1245 #define atomic64_and atomic64_and
1246
1247 #if !defined(arch_atomic64_fetch_and_relaxed) || defined(arch_atomic64_fetch_and)
1248 static __always_inline s64
1249 atomic64_fetch_and(s64 i, atomic64_t *v)
1250 {
1251         __atomic_check_write(v, sizeof(*v));
1252         return arch_atomic64_fetch_and(i, v);
1253 }
1254 #define atomic64_fetch_and atomic64_fetch_and
1255 #endif
1256
1257 #if defined(arch_atomic64_fetch_and_acquire)
1258 static __always_inline s64
1259 atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
1260 {
1261         __atomic_check_write(v, sizeof(*v));
1262         return arch_atomic64_fetch_and_acquire(i, v);
1263 }
1264 #define atomic64_fetch_and_acquire atomic64_fetch_and_acquire
1265 #endif
1266
1267 #if defined(arch_atomic64_fetch_and_release)
1268 static __always_inline s64
1269 atomic64_fetch_and_release(s64 i, atomic64_t *v)
1270 {
1271         __atomic_check_write(v, sizeof(*v));
1272         return arch_atomic64_fetch_and_release(i, v);
1273 }
1274 #define atomic64_fetch_and_release atomic64_fetch_and_release
1275 #endif
1276
1277 #if defined(arch_atomic64_fetch_and_relaxed)
1278 static __always_inline s64
1279 atomic64_fetch_and_relaxed(s64 i, atomic64_t *v)
1280 {
1281         __atomic_check_write(v, sizeof(*v));
1282         return arch_atomic64_fetch_and_relaxed(i, v);
1283 }
1284 #define atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed
1285 #endif
1286
1287 #if defined(arch_atomic64_andnot)
1288 static __always_inline void
1289 atomic64_andnot(s64 i, atomic64_t *v)
1290 {
1291         __atomic_check_write(v, sizeof(*v));
1292         arch_atomic64_andnot(i, v);
1293 }
1294 #define atomic64_andnot atomic64_andnot
1295 #endif
1296
1297 #if defined(arch_atomic64_fetch_andnot)
1298 static __always_inline s64
1299 atomic64_fetch_andnot(s64 i, atomic64_t *v)
1300 {
1301         __atomic_check_write(v, sizeof(*v));
1302         return arch_atomic64_fetch_andnot(i, v);
1303 }
1304 #define atomic64_fetch_andnot atomic64_fetch_andnot
1305 #endif
1306
1307 #if defined(arch_atomic64_fetch_andnot_acquire)
1308 static __always_inline s64
1309 atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1310 {
1311         __atomic_check_write(v, sizeof(*v));
1312         return arch_atomic64_fetch_andnot_acquire(i, v);
1313 }
1314 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
1315 #endif
1316
1317 #if defined(arch_atomic64_fetch_andnot_release)
1318 static __always_inline s64
1319 atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1320 {
1321         __atomic_check_write(v, sizeof(*v));
1322         return arch_atomic64_fetch_andnot_release(i, v);
1323 }
1324 #define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
1325 #endif
1326
1327 #if defined(arch_atomic64_fetch_andnot_relaxed)
1328 static __always_inline s64
1329 atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
1330 {
1331         __atomic_check_write(v, sizeof(*v));
1332         return arch_atomic64_fetch_andnot_relaxed(i, v);
1333 }
1334 #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
1335 #endif
1336
1337 static __always_inline void
1338 atomic64_or(s64 i, atomic64_t *v)
1339 {
1340         __atomic_check_write(v, sizeof(*v));
1341         arch_atomic64_or(i, v);
1342 }
1343 #define atomic64_or atomic64_or
1344
1345 #if !defined(arch_atomic64_fetch_or_relaxed) || defined(arch_atomic64_fetch_or)
1346 static __always_inline s64
1347 atomic64_fetch_or(s64 i, atomic64_t *v)
1348 {
1349         __atomic_check_write(v, sizeof(*v));
1350         return arch_atomic64_fetch_or(i, v);
1351 }
1352 #define atomic64_fetch_or atomic64_fetch_or
1353 #endif
1354
1355 #if defined(arch_atomic64_fetch_or_acquire)
1356 static __always_inline s64
1357 atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
1358 {
1359         __atomic_check_write(v, sizeof(*v));
1360         return arch_atomic64_fetch_or_acquire(i, v);
1361 }
1362 #define atomic64_fetch_or_acquire atomic64_fetch_or_acquire
1363 #endif
1364
1365 #if defined(arch_atomic64_fetch_or_release)
1366 static __always_inline s64
1367 atomic64_fetch_or_release(s64 i, atomic64_t *v)
1368 {
1369         __atomic_check_write(v, sizeof(*v));
1370         return arch_atomic64_fetch_or_release(i, v);
1371 }
1372 #define atomic64_fetch_or_release atomic64_fetch_or_release
1373 #endif
1374
1375 #if defined(arch_atomic64_fetch_or_relaxed)
1376 static __always_inline s64
1377 atomic64_fetch_or_relaxed(s64 i, atomic64_t *v)
1378 {
1379         __atomic_check_write(v, sizeof(*v));
1380         return arch_atomic64_fetch_or_relaxed(i, v);
1381 }
1382 #define atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed
1383 #endif
1384
1385 static __always_inline void
1386 atomic64_xor(s64 i, atomic64_t *v)
1387 {
1388         __atomic_check_write(v, sizeof(*v));
1389         arch_atomic64_xor(i, v);
1390 }
1391 #define atomic64_xor atomic64_xor
1392
1393 #if !defined(arch_atomic64_fetch_xor_relaxed) || defined(arch_atomic64_fetch_xor)
1394 static __always_inline s64
1395 atomic64_fetch_xor(s64 i, atomic64_t *v)
1396 {
1397         __atomic_check_write(v, sizeof(*v));
1398         return arch_atomic64_fetch_xor(i, v);
1399 }
1400 #define atomic64_fetch_xor atomic64_fetch_xor
1401 #endif
1402
1403 #if defined(arch_atomic64_fetch_xor_acquire)
1404 static __always_inline s64
1405 atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
1406 {
1407         __atomic_check_write(v, sizeof(*v));
1408         return arch_atomic64_fetch_xor_acquire(i, v);
1409 }
1410 #define atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
1411 #endif
1412
1413 #if defined(arch_atomic64_fetch_xor_release)
1414 static __always_inline s64
1415 atomic64_fetch_xor_release(s64 i, atomic64_t *v)
1416 {
1417         __atomic_check_write(v, sizeof(*v));
1418         return arch_atomic64_fetch_xor_release(i, v);
1419 }
1420 #define atomic64_fetch_xor_release atomic64_fetch_xor_release
1421 #endif
1422
1423 #if defined(arch_atomic64_fetch_xor_relaxed)
1424 static __always_inline s64
1425 atomic64_fetch_xor_relaxed(s64 i, atomic64_t *v)
1426 {
1427         __atomic_check_write(v, sizeof(*v));
1428         return arch_atomic64_fetch_xor_relaxed(i, v);
1429 }
1430 #define atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed
1431 #endif
1432
1433 #if !defined(arch_atomic64_xchg_relaxed) || defined(arch_atomic64_xchg)
1434 static __always_inline s64
1435 atomic64_xchg(atomic64_t *v, s64 i)
1436 {
1437         __atomic_check_write(v, sizeof(*v));
1438         return arch_atomic64_xchg(v, i);
1439 }
1440 #define atomic64_xchg atomic64_xchg
1441 #endif
1442
1443 #if defined(arch_atomic64_xchg_acquire)
1444 static __always_inline s64
1445 atomic64_xchg_acquire(atomic64_t *v, s64 i)
1446 {
1447         __atomic_check_write(v, sizeof(*v));
1448         return arch_atomic64_xchg_acquire(v, i);
1449 }
1450 #define atomic64_xchg_acquire atomic64_xchg_acquire
1451 #endif
1452
1453 #if defined(arch_atomic64_xchg_release)
1454 static __always_inline s64
1455 atomic64_xchg_release(atomic64_t *v, s64 i)
1456 {
1457         __atomic_check_write(v, sizeof(*v));
1458         return arch_atomic64_xchg_release(v, i);
1459 }
1460 #define atomic64_xchg_release atomic64_xchg_release
1461 #endif
1462
1463 #if defined(arch_atomic64_xchg_relaxed)
1464 static __always_inline s64
1465 atomic64_xchg_relaxed(atomic64_t *v, s64 i)
1466 {
1467         __atomic_check_write(v, sizeof(*v));
1468         return arch_atomic64_xchg_relaxed(v, i);
1469 }
1470 #define atomic64_xchg_relaxed atomic64_xchg_relaxed
1471 #endif
1472
1473 #if !defined(arch_atomic64_cmpxchg_relaxed) || defined(arch_atomic64_cmpxchg)
1474 static __always_inline s64
1475 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
1476 {
1477         __atomic_check_write(v, sizeof(*v));
1478         return arch_atomic64_cmpxchg(v, old, new);
1479 }
1480 #define atomic64_cmpxchg atomic64_cmpxchg
1481 #endif
1482
1483 #if defined(arch_atomic64_cmpxchg_acquire)
1484 static __always_inline s64
1485 atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
1486 {
1487         __atomic_check_write(v, sizeof(*v));
1488         return arch_atomic64_cmpxchg_acquire(v, old, new);
1489 }
1490 #define atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire
1491 #endif
1492
1493 #if defined(arch_atomic64_cmpxchg_release)
1494 static __always_inline s64
1495 atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
1496 {
1497         __atomic_check_write(v, sizeof(*v));
1498         return arch_atomic64_cmpxchg_release(v, old, new);
1499 }
1500 #define atomic64_cmpxchg_release atomic64_cmpxchg_release
1501 #endif
1502
1503 #if defined(arch_atomic64_cmpxchg_relaxed)
1504 static __always_inline s64
1505 atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new)
1506 {
1507         __atomic_check_write(v, sizeof(*v));
1508         return arch_atomic64_cmpxchg_relaxed(v, old, new);
1509 }
1510 #define atomic64_cmpxchg_relaxed atomic64_cmpxchg_relaxed
1511 #endif
1512
1513 #if defined(arch_atomic64_try_cmpxchg)
1514 static __always_inline bool
1515 atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
1516 {
1517         __atomic_check_write(v, sizeof(*v));
1518         __atomic_check_write(old, sizeof(*old));
1519         return arch_atomic64_try_cmpxchg(v, old, new);
1520 }
1521 #define atomic64_try_cmpxchg atomic64_try_cmpxchg
1522 #endif
1523
1524 #if defined(arch_atomic64_try_cmpxchg_acquire)
1525 static __always_inline bool
1526 atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
1527 {
1528         __atomic_check_write(v, sizeof(*v));
1529         __atomic_check_write(old, sizeof(*old));
1530         return arch_atomic64_try_cmpxchg_acquire(v, old, new);
1531 }
1532 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
1533 #endif
1534
1535 #if defined(arch_atomic64_try_cmpxchg_release)
1536 static __always_inline bool
1537 atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
1538 {
1539         __atomic_check_write(v, sizeof(*v));
1540         __atomic_check_write(old, sizeof(*old));
1541         return arch_atomic64_try_cmpxchg_release(v, old, new);
1542 }
1543 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
1544 #endif
1545
1546 #if defined(arch_atomic64_try_cmpxchg_relaxed)
1547 static __always_inline bool
1548 atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
1549 {
1550         __atomic_check_write(v, sizeof(*v));
1551         __atomic_check_write(old, sizeof(*old));
1552         return arch_atomic64_try_cmpxchg_relaxed(v, old, new);
1553 }
1554 #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed
1555 #endif
1556
1557 #if defined(arch_atomic64_sub_and_test)
1558 static __always_inline bool
1559 atomic64_sub_and_test(s64 i, atomic64_t *v)
1560 {
1561         __atomic_check_write(v, sizeof(*v));
1562         return arch_atomic64_sub_and_test(i, v);
1563 }
1564 #define atomic64_sub_and_test atomic64_sub_and_test
1565 #endif
1566
1567 #if defined(arch_atomic64_dec_and_test)
1568 static __always_inline bool
1569 atomic64_dec_and_test(atomic64_t *v)
1570 {
1571         __atomic_check_write(v, sizeof(*v));
1572         return arch_atomic64_dec_and_test(v);
1573 }
1574 #define atomic64_dec_and_test atomic64_dec_and_test
1575 #endif
1576
1577 #if defined(arch_atomic64_inc_and_test)
1578 static __always_inline bool
1579 atomic64_inc_and_test(atomic64_t *v)
1580 {
1581         __atomic_check_write(v, sizeof(*v));
1582         return arch_atomic64_inc_and_test(v);
1583 }
1584 #define atomic64_inc_and_test atomic64_inc_and_test
1585 #endif
1586
1587 #if defined(arch_atomic64_add_negative)
1588 static __always_inline bool
1589 atomic64_add_negative(s64 i, atomic64_t *v)
1590 {
1591         __atomic_check_write(v, sizeof(*v));
1592         return arch_atomic64_add_negative(i, v);
1593 }
1594 #define atomic64_add_negative atomic64_add_negative
1595 #endif
1596
1597 #if defined(arch_atomic64_fetch_add_unless)
1598 static __always_inline s64
1599 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
1600 {
1601         __atomic_check_write(v, sizeof(*v));
1602         return arch_atomic64_fetch_add_unless(v, a, u);
1603 }
1604 #define atomic64_fetch_add_unless atomic64_fetch_add_unless
1605 #endif
1606
1607 #if defined(arch_atomic64_add_unless)
1608 static __always_inline bool
1609 atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
1610 {
1611         __atomic_check_write(v, sizeof(*v));
1612         return arch_atomic64_add_unless(v, a, u);
1613 }
1614 #define atomic64_add_unless atomic64_add_unless
1615 #endif
1616
1617 #if defined(arch_atomic64_inc_not_zero)
1618 static __always_inline bool
1619 atomic64_inc_not_zero(atomic64_t *v)
1620 {
1621         __atomic_check_write(v, sizeof(*v));
1622         return arch_atomic64_inc_not_zero(v);
1623 }
1624 #define atomic64_inc_not_zero atomic64_inc_not_zero
1625 #endif
1626
1627 #if defined(arch_atomic64_inc_unless_negative)
1628 static __always_inline bool
1629 atomic64_inc_unless_negative(atomic64_t *v)
1630 {
1631         __atomic_check_write(v, sizeof(*v));
1632         return arch_atomic64_inc_unless_negative(v);
1633 }
1634 #define atomic64_inc_unless_negative atomic64_inc_unless_negative
1635 #endif
1636
1637 #if defined(arch_atomic64_dec_unless_positive)
1638 static __always_inline bool
1639 atomic64_dec_unless_positive(atomic64_t *v)
1640 {
1641         __atomic_check_write(v, sizeof(*v));
1642         return arch_atomic64_dec_unless_positive(v);
1643 }
1644 #define atomic64_dec_unless_positive atomic64_dec_unless_positive
1645 #endif
1646
1647 #if defined(arch_atomic64_dec_if_positive)
1648 static __always_inline s64
1649 atomic64_dec_if_positive(atomic64_t *v)
1650 {
1651         __atomic_check_write(v, sizeof(*v));
1652         return arch_atomic64_dec_if_positive(v);
1653 }
1654 #define atomic64_dec_if_positive atomic64_dec_if_positive
1655 #endif
1656
1657 #if !defined(arch_xchg_relaxed) || defined(arch_xchg)
1658 #define xchg(ptr, ...)                                          \
1659 ({                                                                      \
1660         typeof(ptr) __ai_ptr = (ptr);                                   \
1661         __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr));              \
1662         arch_xchg(__ai_ptr, __VA_ARGS__);                               \
1663 })
1664 #endif
1665
1666 #if defined(arch_xchg_acquire)
1667 #define xchg_acquire(ptr, ...)                                          \
1668 ({                                                                      \
1669         typeof(ptr) __ai_ptr = (ptr);                                   \
1670         __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr));              \
1671         arch_xchg_acquire(__ai_ptr, __VA_ARGS__);                               \
1672 })
1673 #endif
1674
1675 #if defined(arch_xchg_release)
1676 #define xchg_release(ptr, ...)                                          \
1677 ({                                                                      \
1678         typeof(ptr) __ai_ptr = (ptr);                                   \
1679         __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr));              \
1680         arch_xchg_release(__ai_ptr, __VA_ARGS__);                               \
1681 })
1682 #endif
1683
1684 #if defined(arch_xchg_relaxed)
1685 #define xchg_relaxed(ptr, ...)                                          \
1686 ({                                                                      \
1687         typeof(ptr) __ai_ptr = (ptr);                                   \
1688         __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr));              \
1689         arch_xchg_relaxed(__ai_ptr, __VA_ARGS__);                               \
1690 })
1691 #endif
1692
1693 #if !defined(arch_cmpxchg_relaxed) || defined(arch_cmpxchg)
1694 #define cmpxchg(ptr, ...)                                               \
1695 ({                                                                      \
1696         typeof(ptr) __ai_ptr = (ptr);                                   \
1697         __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr));              \
1698         arch_cmpxchg(__ai_ptr, __VA_ARGS__);                            \
1699 })
1700 #endif
1701
1702 #if defined(arch_cmpxchg_acquire)
1703 #define cmpxchg_acquire(ptr, ...)                                               \
1704 ({                                                                      \
1705         typeof(ptr) __ai_ptr = (ptr);                                   \
1706         __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr));              \
1707         arch_cmpxchg_acquire(__ai_ptr, __VA_ARGS__);                            \
1708 })
1709 #endif
1710
1711 #if defined(arch_cmpxchg_release)
1712 #define cmpxchg_release(ptr, ...)                                               \
1713 ({                                                                      \
1714         typeof(ptr) __ai_ptr = (ptr);                                   \
1715         __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr));              \
1716         arch_cmpxchg_release(__ai_ptr, __VA_ARGS__);                            \
1717 })
1718 #endif
1719
1720 #if defined(arch_cmpxchg_relaxed)
1721 #define cmpxchg_relaxed(ptr, ...)                                               \
1722 ({                                                                      \
1723         typeof(ptr) __ai_ptr = (ptr);                                   \
1724         __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr));              \
1725         arch_cmpxchg_relaxed(__ai_ptr, __VA_ARGS__);                            \
1726 })
1727 #endif
1728
1729 #if !defined(arch_cmpxchg64_relaxed) || defined(arch_cmpxchg64)
1730 #define cmpxchg64(ptr, ...)                                             \
1731 ({                                                                      \
1732         typeof(ptr) __ai_ptr = (ptr);                                   \
1733         __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr));              \
1734         arch_cmpxchg64(__ai_ptr, __VA_ARGS__);                          \
1735 })
1736 #endif
1737
1738 #if defined(arch_cmpxchg64_acquire)
1739 #define cmpxchg64_acquire(ptr, ...)                                             \
1740 ({                                                                      \
1741         typeof(ptr) __ai_ptr = (ptr);                                   \
1742         __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr));              \
1743         arch_cmpxchg64_acquire(__ai_ptr, __VA_ARGS__);                          \
1744 })
1745 #endif
1746
1747 #if defined(arch_cmpxchg64_release)
1748 #define cmpxchg64_release(ptr, ...)                                             \
1749 ({                                                                      \
1750         typeof(ptr) __ai_ptr = (ptr);                                   \
1751         __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr));              \
1752         arch_cmpxchg64_release(__ai_ptr, __VA_ARGS__);                          \
1753 })
1754 #endif
1755
1756 #if defined(arch_cmpxchg64_relaxed)
1757 #define cmpxchg64_relaxed(ptr, ...)                                             \
1758 ({                                                                      \
1759         typeof(ptr) __ai_ptr = (ptr);                                   \
1760         __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr));              \
1761         arch_cmpxchg64_relaxed(__ai_ptr, __VA_ARGS__);                          \
1762 })
1763 #endif
1764
1765 #define cmpxchg_local(ptr, ...)                                         \
1766 ({                                                                      \
1767         typeof(ptr) __ai_ptr = (ptr);                                   \
1768         __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr));              \
1769         arch_cmpxchg_local(__ai_ptr, __VA_ARGS__);                              \
1770 })
1771
1772 #define cmpxchg64_local(ptr, ...)                                               \
1773 ({                                                                      \
1774         typeof(ptr) __ai_ptr = (ptr);                                   \
1775         __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr));              \
1776         arch_cmpxchg64_local(__ai_ptr, __VA_ARGS__);                            \
1777 })
1778
1779 #define sync_cmpxchg(ptr, ...)                                          \
1780 ({                                                                      \
1781         typeof(ptr) __ai_ptr = (ptr);                                   \
1782         __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr));              \
1783         arch_sync_cmpxchg(__ai_ptr, __VA_ARGS__);                               \
1784 })
1785
1786 #define cmpxchg_double(ptr, ...)                                                \
1787 ({                                                                      \
1788         typeof(ptr) __ai_ptr = (ptr);                                   \
1789         __atomic_check_write(__ai_ptr, 2 * sizeof(*__ai_ptr));          \
1790         arch_cmpxchg_double(__ai_ptr, __VA_ARGS__);                             \
1791 })
1792
1793
1794 #define cmpxchg_double_local(ptr, ...)                                          \
1795 ({                                                                      \
1796         typeof(ptr) __ai_ptr = (ptr);                                   \
1797         __atomic_check_write(__ai_ptr, 2 * sizeof(*__ai_ptr));          \
1798         arch_cmpxchg_double_local(__ai_ptr, __VA_ARGS__);                               \
1799 })
1800
1801 #endif /* _ASM_GENERIC_ATOMIC_INSTRUMENTED_H */
1802 // 7b7e2af0e75c8ecb6f02298a7075f503f30d244c