Merge tag 'driver-core-5.14-rc1' of git://git.kernel.org/pub/scm/linux/kernel/git...
[linux-2.6-microblaze.git] / include / asm-generic / atomic-instrumented.h
1 // SPDX-License-Identifier: GPL-2.0
2
3 // Generated by scripts/atomic/gen-atomic-instrumented.sh
4 // DO NOT MODIFY THIS FILE DIRECTLY
5
6 /*
7  * This file provides wrappers with KASAN instrumentation for atomic operations.
8  * To use this functionality an arch's atomic.h file needs to define all
9  * atomic operations with arch_ prefix (e.g. arch_atomic_read()) and include
10  * this file at the end. This file provides atomic_read() that forwards to
11  * arch_atomic_read() for actual atomic operation.
12  * Note: if an arch atomic operation is implemented by means of other atomic
13  * operations (e.g. atomic_read()/atomic_cmpxchg() loop), then it needs to use
14  * arch_ variants (i.e. arch_atomic_read()/arch_atomic_cmpxchg()) to avoid
15  * double instrumentation.
16  */
17 #ifndef _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
18 #define _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
19
20 #include <linux/build_bug.h>
21 #include <linux/compiler.h>
22 #include <linux/instrumented.h>
23
24 static __always_inline int
25 atomic_read(const atomic_t *v)
26 {
27         instrument_atomic_read(v, sizeof(*v));
28         return arch_atomic_read(v);
29 }
30
31 static __always_inline int
32 atomic_read_acquire(const atomic_t *v)
33 {
34         instrument_atomic_read(v, sizeof(*v));
35         return arch_atomic_read_acquire(v);
36 }
37
38 static __always_inline void
39 atomic_set(atomic_t *v, int i)
40 {
41         instrument_atomic_write(v, sizeof(*v));
42         arch_atomic_set(v, i);
43 }
44
45 static __always_inline void
46 atomic_set_release(atomic_t *v, int i)
47 {
48         instrument_atomic_write(v, sizeof(*v));
49         arch_atomic_set_release(v, i);
50 }
51
52 static __always_inline void
53 atomic_add(int i, atomic_t *v)
54 {
55         instrument_atomic_read_write(v, sizeof(*v));
56         arch_atomic_add(i, v);
57 }
58
59 static __always_inline int
60 atomic_add_return(int i, atomic_t *v)
61 {
62         instrument_atomic_read_write(v, sizeof(*v));
63         return arch_atomic_add_return(i, v);
64 }
65
66 static __always_inline int
67 atomic_add_return_acquire(int i, atomic_t *v)
68 {
69         instrument_atomic_read_write(v, sizeof(*v));
70         return arch_atomic_add_return_acquire(i, v);
71 }
72
73 static __always_inline int
74 atomic_add_return_release(int i, atomic_t *v)
75 {
76         instrument_atomic_read_write(v, sizeof(*v));
77         return arch_atomic_add_return_release(i, v);
78 }
79
80 static __always_inline int
81 atomic_add_return_relaxed(int i, atomic_t *v)
82 {
83         instrument_atomic_read_write(v, sizeof(*v));
84         return arch_atomic_add_return_relaxed(i, v);
85 }
86
87 static __always_inline int
88 atomic_fetch_add(int i, atomic_t *v)
89 {
90         instrument_atomic_read_write(v, sizeof(*v));
91         return arch_atomic_fetch_add(i, v);
92 }
93
94 static __always_inline int
95 atomic_fetch_add_acquire(int i, atomic_t *v)
96 {
97         instrument_atomic_read_write(v, sizeof(*v));
98         return arch_atomic_fetch_add_acquire(i, v);
99 }
100
101 static __always_inline int
102 atomic_fetch_add_release(int i, atomic_t *v)
103 {
104         instrument_atomic_read_write(v, sizeof(*v));
105         return arch_atomic_fetch_add_release(i, v);
106 }
107
108 static __always_inline int
109 atomic_fetch_add_relaxed(int i, atomic_t *v)
110 {
111         instrument_atomic_read_write(v, sizeof(*v));
112         return arch_atomic_fetch_add_relaxed(i, v);
113 }
114
115 static __always_inline void
116 atomic_sub(int i, atomic_t *v)
117 {
118         instrument_atomic_read_write(v, sizeof(*v));
119         arch_atomic_sub(i, v);
120 }
121
122 static __always_inline int
123 atomic_sub_return(int i, atomic_t *v)
124 {
125         instrument_atomic_read_write(v, sizeof(*v));
126         return arch_atomic_sub_return(i, v);
127 }
128
129 static __always_inline int
130 atomic_sub_return_acquire(int i, atomic_t *v)
131 {
132         instrument_atomic_read_write(v, sizeof(*v));
133         return arch_atomic_sub_return_acquire(i, v);
134 }
135
136 static __always_inline int
137 atomic_sub_return_release(int i, atomic_t *v)
138 {
139         instrument_atomic_read_write(v, sizeof(*v));
140         return arch_atomic_sub_return_release(i, v);
141 }
142
143 static __always_inline int
144 atomic_sub_return_relaxed(int i, atomic_t *v)
145 {
146         instrument_atomic_read_write(v, sizeof(*v));
147         return arch_atomic_sub_return_relaxed(i, v);
148 }
149
150 static __always_inline int
151 atomic_fetch_sub(int i, atomic_t *v)
152 {
153         instrument_atomic_read_write(v, sizeof(*v));
154         return arch_atomic_fetch_sub(i, v);
155 }
156
157 static __always_inline int
158 atomic_fetch_sub_acquire(int i, atomic_t *v)
159 {
160         instrument_atomic_read_write(v, sizeof(*v));
161         return arch_atomic_fetch_sub_acquire(i, v);
162 }
163
164 static __always_inline int
165 atomic_fetch_sub_release(int i, atomic_t *v)
166 {
167         instrument_atomic_read_write(v, sizeof(*v));
168         return arch_atomic_fetch_sub_release(i, v);
169 }
170
171 static __always_inline int
172 atomic_fetch_sub_relaxed(int i, atomic_t *v)
173 {
174         instrument_atomic_read_write(v, sizeof(*v));
175         return arch_atomic_fetch_sub_relaxed(i, v);
176 }
177
178 static __always_inline void
179 atomic_inc(atomic_t *v)
180 {
181         instrument_atomic_read_write(v, sizeof(*v));
182         arch_atomic_inc(v);
183 }
184
185 static __always_inline int
186 atomic_inc_return(atomic_t *v)
187 {
188         instrument_atomic_read_write(v, sizeof(*v));
189         return arch_atomic_inc_return(v);
190 }
191
192 static __always_inline int
193 atomic_inc_return_acquire(atomic_t *v)
194 {
195         instrument_atomic_read_write(v, sizeof(*v));
196         return arch_atomic_inc_return_acquire(v);
197 }
198
199 static __always_inline int
200 atomic_inc_return_release(atomic_t *v)
201 {
202         instrument_atomic_read_write(v, sizeof(*v));
203         return arch_atomic_inc_return_release(v);
204 }
205
206 static __always_inline int
207 atomic_inc_return_relaxed(atomic_t *v)
208 {
209         instrument_atomic_read_write(v, sizeof(*v));
210         return arch_atomic_inc_return_relaxed(v);
211 }
212
213 static __always_inline int
214 atomic_fetch_inc(atomic_t *v)
215 {
216         instrument_atomic_read_write(v, sizeof(*v));
217         return arch_atomic_fetch_inc(v);
218 }
219
220 static __always_inline int
221 atomic_fetch_inc_acquire(atomic_t *v)
222 {
223         instrument_atomic_read_write(v, sizeof(*v));
224         return arch_atomic_fetch_inc_acquire(v);
225 }
226
227 static __always_inline int
228 atomic_fetch_inc_release(atomic_t *v)
229 {
230         instrument_atomic_read_write(v, sizeof(*v));
231         return arch_atomic_fetch_inc_release(v);
232 }
233
234 static __always_inline int
235 atomic_fetch_inc_relaxed(atomic_t *v)
236 {
237         instrument_atomic_read_write(v, sizeof(*v));
238         return arch_atomic_fetch_inc_relaxed(v);
239 }
240
241 static __always_inline void
242 atomic_dec(atomic_t *v)
243 {
244         instrument_atomic_read_write(v, sizeof(*v));
245         arch_atomic_dec(v);
246 }
247
248 static __always_inline int
249 atomic_dec_return(atomic_t *v)
250 {
251         instrument_atomic_read_write(v, sizeof(*v));
252         return arch_atomic_dec_return(v);
253 }
254
255 static __always_inline int
256 atomic_dec_return_acquire(atomic_t *v)
257 {
258         instrument_atomic_read_write(v, sizeof(*v));
259         return arch_atomic_dec_return_acquire(v);
260 }
261
262 static __always_inline int
263 atomic_dec_return_release(atomic_t *v)
264 {
265         instrument_atomic_read_write(v, sizeof(*v));
266         return arch_atomic_dec_return_release(v);
267 }
268
269 static __always_inline int
270 atomic_dec_return_relaxed(atomic_t *v)
271 {
272         instrument_atomic_read_write(v, sizeof(*v));
273         return arch_atomic_dec_return_relaxed(v);
274 }
275
276 static __always_inline int
277 atomic_fetch_dec(atomic_t *v)
278 {
279         instrument_atomic_read_write(v, sizeof(*v));
280         return arch_atomic_fetch_dec(v);
281 }
282
283 static __always_inline int
284 atomic_fetch_dec_acquire(atomic_t *v)
285 {
286         instrument_atomic_read_write(v, sizeof(*v));
287         return arch_atomic_fetch_dec_acquire(v);
288 }
289
290 static __always_inline int
291 atomic_fetch_dec_release(atomic_t *v)
292 {
293         instrument_atomic_read_write(v, sizeof(*v));
294         return arch_atomic_fetch_dec_release(v);
295 }
296
297 static __always_inline int
298 atomic_fetch_dec_relaxed(atomic_t *v)
299 {
300         instrument_atomic_read_write(v, sizeof(*v));
301         return arch_atomic_fetch_dec_relaxed(v);
302 }
303
304 static __always_inline void
305 atomic_and(int i, atomic_t *v)
306 {
307         instrument_atomic_read_write(v, sizeof(*v));
308         arch_atomic_and(i, v);
309 }
310
311 static __always_inline int
312 atomic_fetch_and(int i, atomic_t *v)
313 {
314         instrument_atomic_read_write(v, sizeof(*v));
315         return arch_atomic_fetch_and(i, v);
316 }
317
318 static __always_inline int
319 atomic_fetch_and_acquire(int i, atomic_t *v)
320 {
321         instrument_atomic_read_write(v, sizeof(*v));
322         return arch_atomic_fetch_and_acquire(i, v);
323 }
324
325 static __always_inline int
326 atomic_fetch_and_release(int i, atomic_t *v)
327 {
328         instrument_atomic_read_write(v, sizeof(*v));
329         return arch_atomic_fetch_and_release(i, v);
330 }
331
332 static __always_inline int
333 atomic_fetch_and_relaxed(int i, atomic_t *v)
334 {
335         instrument_atomic_read_write(v, sizeof(*v));
336         return arch_atomic_fetch_and_relaxed(i, v);
337 }
338
339 static __always_inline void
340 atomic_andnot(int i, atomic_t *v)
341 {
342         instrument_atomic_read_write(v, sizeof(*v));
343         arch_atomic_andnot(i, v);
344 }
345
346 static __always_inline int
347 atomic_fetch_andnot(int i, atomic_t *v)
348 {
349         instrument_atomic_read_write(v, sizeof(*v));
350         return arch_atomic_fetch_andnot(i, v);
351 }
352
353 static __always_inline int
354 atomic_fetch_andnot_acquire(int i, atomic_t *v)
355 {
356         instrument_atomic_read_write(v, sizeof(*v));
357         return arch_atomic_fetch_andnot_acquire(i, v);
358 }
359
360 static __always_inline int
361 atomic_fetch_andnot_release(int i, atomic_t *v)
362 {
363         instrument_atomic_read_write(v, sizeof(*v));
364         return arch_atomic_fetch_andnot_release(i, v);
365 }
366
367 static __always_inline int
368 atomic_fetch_andnot_relaxed(int i, atomic_t *v)
369 {
370         instrument_atomic_read_write(v, sizeof(*v));
371         return arch_atomic_fetch_andnot_relaxed(i, v);
372 }
373
374 static __always_inline void
375 atomic_or(int i, atomic_t *v)
376 {
377         instrument_atomic_read_write(v, sizeof(*v));
378         arch_atomic_or(i, v);
379 }
380
381 static __always_inline int
382 atomic_fetch_or(int i, atomic_t *v)
383 {
384         instrument_atomic_read_write(v, sizeof(*v));
385         return arch_atomic_fetch_or(i, v);
386 }
387
388 static __always_inline int
389 atomic_fetch_or_acquire(int i, atomic_t *v)
390 {
391         instrument_atomic_read_write(v, sizeof(*v));
392         return arch_atomic_fetch_or_acquire(i, v);
393 }
394
395 static __always_inline int
396 atomic_fetch_or_release(int i, atomic_t *v)
397 {
398         instrument_atomic_read_write(v, sizeof(*v));
399         return arch_atomic_fetch_or_release(i, v);
400 }
401
402 static __always_inline int
403 atomic_fetch_or_relaxed(int i, atomic_t *v)
404 {
405         instrument_atomic_read_write(v, sizeof(*v));
406         return arch_atomic_fetch_or_relaxed(i, v);
407 }
408
409 static __always_inline void
410 atomic_xor(int i, atomic_t *v)
411 {
412         instrument_atomic_read_write(v, sizeof(*v));
413         arch_atomic_xor(i, v);
414 }
415
416 static __always_inline int
417 atomic_fetch_xor(int i, atomic_t *v)
418 {
419         instrument_atomic_read_write(v, sizeof(*v));
420         return arch_atomic_fetch_xor(i, v);
421 }
422
423 static __always_inline int
424 atomic_fetch_xor_acquire(int i, atomic_t *v)
425 {
426         instrument_atomic_read_write(v, sizeof(*v));
427         return arch_atomic_fetch_xor_acquire(i, v);
428 }
429
430 static __always_inline int
431 atomic_fetch_xor_release(int i, atomic_t *v)
432 {
433         instrument_atomic_read_write(v, sizeof(*v));
434         return arch_atomic_fetch_xor_release(i, v);
435 }
436
437 static __always_inline int
438 atomic_fetch_xor_relaxed(int i, atomic_t *v)
439 {
440         instrument_atomic_read_write(v, sizeof(*v));
441         return arch_atomic_fetch_xor_relaxed(i, v);
442 }
443
444 static __always_inline int
445 atomic_xchg(atomic_t *v, int i)
446 {
447         instrument_atomic_read_write(v, sizeof(*v));
448         return arch_atomic_xchg(v, i);
449 }
450
451 static __always_inline int
452 atomic_xchg_acquire(atomic_t *v, int i)
453 {
454         instrument_atomic_read_write(v, sizeof(*v));
455         return arch_atomic_xchg_acquire(v, i);
456 }
457
458 static __always_inline int
459 atomic_xchg_release(atomic_t *v, int i)
460 {
461         instrument_atomic_read_write(v, sizeof(*v));
462         return arch_atomic_xchg_release(v, i);
463 }
464
465 static __always_inline int
466 atomic_xchg_relaxed(atomic_t *v, int i)
467 {
468         instrument_atomic_read_write(v, sizeof(*v));
469         return arch_atomic_xchg_relaxed(v, i);
470 }
471
472 static __always_inline int
473 atomic_cmpxchg(atomic_t *v, int old, int new)
474 {
475         instrument_atomic_read_write(v, sizeof(*v));
476         return arch_atomic_cmpxchg(v, old, new);
477 }
478
479 static __always_inline int
480 atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
481 {
482         instrument_atomic_read_write(v, sizeof(*v));
483         return arch_atomic_cmpxchg_acquire(v, old, new);
484 }
485
486 static __always_inline int
487 atomic_cmpxchg_release(atomic_t *v, int old, int new)
488 {
489         instrument_atomic_read_write(v, sizeof(*v));
490         return arch_atomic_cmpxchg_release(v, old, new);
491 }
492
493 static __always_inline int
494 atomic_cmpxchg_relaxed(atomic_t *v, int old, int new)
495 {
496         instrument_atomic_read_write(v, sizeof(*v));
497         return arch_atomic_cmpxchg_relaxed(v, old, new);
498 }
499
500 static __always_inline bool
501 atomic_try_cmpxchg(atomic_t *v, int *old, int new)
502 {
503         instrument_atomic_read_write(v, sizeof(*v));
504         instrument_atomic_read_write(old, sizeof(*old));
505         return arch_atomic_try_cmpxchg(v, old, new);
506 }
507
508 static __always_inline bool
509 atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
510 {
511         instrument_atomic_read_write(v, sizeof(*v));
512         instrument_atomic_read_write(old, sizeof(*old));
513         return arch_atomic_try_cmpxchg_acquire(v, old, new);
514 }
515
516 static __always_inline bool
517 atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
518 {
519         instrument_atomic_read_write(v, sizeof(*v));
520         instrument_atomic_read_write(old, sizeof(*old));
521         return arch_atomic_try_cmpxchg_release(v, old, new);
522 }
523
524 static __always_inline bool
525 atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
526 {
527         instrument_atomic_read_write(v, sizeof(*v));
528         instrument_atomic_read_write(old, sizeof(*old));
529         return arch_atomic_try_cmpxchg_relaxed(v, old, new);
530 }
531
532 static __always_inline bool
533 atomic_sub_and_test(int i, atomic_t *v)
534 {
535         instrument_atomic_read_write(v, sizeof(*v));
536         return arch_atomic_sub_and_test(i, v);
537 }
538
539 static __always_inline bool
540 atomic_dec_and_test(atomic_t *v)
541 {
542         instrument_atomic_read_write(v, sizeof(*v));
543         return arch_atomic_dec_and_test(v);
544 }
545
546 static __always_inline bool
547 atomic_inc_and_test(atomic_t *v)
548 {
549         instrument_atomic_read_write(v, sizeof(*v));
550         return arch_atomic_inc_and_test(v);
551 }
552
553 static __always_inline bool
554 atomic_add_negative(int i, atomic_t *v)
555 {
556         instrument_atomic_read_write(v, sizeof(*v));
557         return arch_atomic_add_negative(i, v);
558 }
559
560 static __always_inline int
561 atomic_fetch_add_unless(atomic_t *v, int a, int u)
562 {
563         instrument_atomic_read_write(v, sizeof(*v));
564         return arch_atomic_fetch_add_unless(v, a, u);
565 }
566
567 static __always_inline bool
568 atomic_add_unless(atomic_t *v, int a, int u)
569 {
570         instrument_atomic_read_write(v, sizeof(*v));
571         return arch_atomic_add_unless(v, a, u);
572 }
573
574 static __always_inline bool
575 atomic_inc_not_zero(atomic_t *v)
576 {
577         instrument_atomic_read_write(v, sizeof(*v));
578         return arch_atomic_inc_not_zero(v);
579 }
580
581 static __always_inline bool
582 atomic_inc_unless_negative(atomic_t *v)
583 {
584         instrument_atomic_read_write(v, sizeof(*v));
585         return arch_atomic_inc_unless_negative(v);
586 }
587
588 static __always_inline bool
589 atomic_dec_unless_positive(atomic_t *v)
590 {
591         instrument_atomic_read_write(v, sizeof(*v));
592         return arch_atomic_dec_unless_positive(v);
593 }
594
595 static __always_inline int
596 atomic_dec_if_positive(atomic_t *v)
597 {
598         instrument_atomic_read_write(v, sizeof(*v));
599         return arch_atomic_dec_if_positive(v);
600 }
601
602 static __always_inline s64
603 atomic64_read(const atomic64_t *v)
604 {
605         instrument_atomic_read(v, sizeof(*v));
606         return arch_atomic64_read(v);
607 }
608
609 static __always_inline s64
610 atomic64_read_acquire(const atomic64_t *v)
611 {
612         instrument_atomic_read(v, sizeof(*v));
613         return arch_atomic64_read_acquire(v);
614 }
615
616 static __always_inline void
617 atomic64_set(atomic64_t *v, s64 i)
618 {
619         instrument_atomic_write(v, sizeof(*v));
620         arch_atomic64_set(v, i);
621 }
622
623 static __always_inline void
624 atomic64_set_release(atomic64_t *v, s64 i)
625 {
626         instrument_atomic_write(v, sizeof(*v));
627         arch_atomic64_set_release(v, i);
628 }
629
630 static __always_inline void
631 atomic64_add(s64 i, atomic64_t *v)
632 {
633         instrument_atomic_read_write(v, sizeof(*v));
634         arch_atomic64_add(i, v);
635 }
636
637 static __always_inline s64
638 atomic64_add_return(s64 i, atomic64_t *v)
639 {
640         instrument_atomic_read_write(v, sizeof(*v));
641         return arch_atomic64_add_return(i, v);
642 }
643
644 static __always_inline s64
645 atomic64_add_return_acquire(s64 i, atomic64_t *v)
646 {
647         instrument_atomic_read_write(v, sizeof(*v));
648         return arch_atomic64_add_return_acquire(i, v);
649 }
650
651 static __always_inline s64
652 atomic64_add_return_release(s64 i, atomic64_t *v)
653 {
654         instrument_atomic_read_write(v, sizeof(*v));
655         return arch_atomic64_add_return_release(i, v);
656 }
657
658 static __always_inline s64
659 atomic64_add_return_relaxed(s64 i, atomic64_t *v)
660 {
661         instrument_atomic_read_write(v, sizeof(*v));
662         return arch_atomic64_add_return_relaxed(i, v);
663 }
664
665 static __always_inline s64
666 atomic64_fetch_add(s64 i, atomic64_t *v)
667 {
668         instrument_atomic_read_write(v, sizeof(*v));
669         return arch_atomic64_fetch_add(i, v);
670 }
671
672 static __always_inline s64
673 atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
674 {
675         instrument_atomic_read_write(v, sizeof(*v));
676         return arch_atomic64_fetch_add_acquire(i, v);
677 }
678
679 static __always_inline s64
680 atomic64_fetch_add_release(s64 i, atomic64_t *v)
681 {
682         instrument_atomic_read_write(v, sizeof(*v));
683         return arch_atomic64_fetch_add_release(i, v);
684 }
685
686 static __always_inline s64
687 atomic64_fetch_add_relaxed(s64 i, atomic64_t *v)
688 {
689         instrument_atomic_read_write(v, sizeof(*v));
690         return arch_atomic64_fetch_add_relaxed(i, v);
691 }
692
693 static __always_inline void
694 atomic64_sub(s64 i, atomic64_t *v)
695 {
696         instrument_atomic_read_write(v, sizeof(*v));
697         arch_atomic64_sub(i, v);
698 }
699
700 static __always_inline s64
701 atomic64_sub_return(s64 i, atomic64_t *v)
702 {
703         instrument_atomic_read_write(v, sizeof(*v));
704         return arch_atomic64_sub_return(i, v);
705 }
706
707 static __always_inline s64
708 atomic64_sub_return_acquire(s64 i, atomic64_t *v)
709 {
710         instrument_atomic_read_write(v, sizeof(*v));
711         return arch_atomic64_sub_return_acquire(i, v);
712 }
713
714 static __always_inline s64
715 atomic64_sub_return_release(s64 i, atomic64_t *v)
716 {
717         instrument_atomic_read_write(v, sizeof(*v));
718         return arch_atomic64_sub_return_release(i, v);
719 }
720
721 static __always_inline s64
722 atomic64_sub_return_relaxed(s64 i, atomic64_t *v)
723 {
724         instrument_atomic_read_write(v, sizeof(*v));
725         return arch_atomic64_sub_return_relaxed(i, v);
726 }
727
728 static __always_inline s64
729 atomic64_fetch_sub(s64 i, atomic64_t *v)
730 {
731         instrument_atomic_read_write(v, sizeof(*v));
732         return arch_atomic64_fetch_sub(i, v);
733 }
734
735 static __always_inline s64
736 atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
737 {
738         instrument_atomic_read_write(v, sizeof(*v));
739         return arch_atomic64_fetch_sub_acquire(i, v);
740 }
741
742 static __always_inline s64
743 atomic64_fetch_sub_release(s64 i, atomic64_t *v)
744 {
745         instrument_atomic_read_write(v, sizeof(*v));
746         return arch_atomic64_fetch_sub_release(i, v);
747 }
748
749 static __always_inline s64
750 atomic64_fetch_sub_relaxed(s64 i, atomic64_t *v)
751 {
752         instrument_atomic_read_write(v, sizeof(*v));
753         return arch_atomic64_fetch_sub_relaxed(i, v);
754 }
755
756 static __always_inline void
757 atomic64_inc(atomic64_t *v)
758 {
759         instrument_atomic_read_write(v, sizeof(*v));
760         arch_atomic64_inc(v);
761 }
762
763 static __always_inline s64
764 atomic64_inc_return(atomic64_t *v)
765 {
766         instrument_atomic_read_write(v, sizeof(*v));
767         return arch_atomic64_inc_return(v);
768 }
769
770 static __always_inline s64
771 atomic64_inc_return_acquire(atomic64_t *v)
772 {
773         instrument_atomic_read_write(v, sizeof(*v));
774         return arch_atomic64_inc_return_acquire(v);
775 }
776
777 static __always_inline s64
778 atomic64_inc_return_release(atomic64_t *v)
779 {
780         instrument_atomic_read_write(v, sizeof(*v));
781         return arch_atomic64_inc_return_release(v);
782 }
783
784 static __always_inline s64
785 atomic64_inc_return_relaxed(atomic64_t *v)
786 {
787         instrument_atomic_read_write(v, sizeof(*v));
788         return arch_atomic64_inc_return_relaxed(v);
789 }
790
791 static __always_inline s64
792 atomic64_fetch_inc(atomic64_t *v)
793 {
794         instrument_atomic_read_write(v, sizeof(*v));
795         return arch_atomic64_fetch_inc(v);
796 }
797
798 static __always_inline s64
799 atomic64_fetch_inc_acquire(atomic64_t *v)
800 {
801         instrument_atomic_read_write(v, sizeof(*v));
802         return arch_atomic64_fetch_inc_acquire(v);
803 }
804
805 static __always_inline s64
806 atomic64_fetch_inc_release(atomic64_t *v)
807 {
808         instrument_atomic_read_write(v, sizeof(*v));
809         return arch_atomic64_fetch_inc_release(v);
810 }
811
812 static __always_inline s64
813 atomic64_fetch_inc_relaxed(atomic64_t *v)
814 {
815         instrument_atomic_read_write(v, sizeof(*v));
816         return arch_atomic64_fetch_inc_relaxed(v);
817 }
818
819 static __always_inline void
820 atomic64_dec(atomic64_t *v)
821 {
822         instrument_atomic_read_write(v, sizeof(*v));
823         arch_atomic64_dec(v);
824 }
825
826 static __always_inline s64
827 atomic64_dec_return(atomic64_t *v)
828 {
829         instrument_atomic_read_write(v, sizeof(*v));
830         return arch_atomic64_dec_return(v);
831 }
832
833 static __always_inline s64
834 atomic64_dec_return_acquire(atomic64_t *v)
835 {
836         instrument_atomic_read_write(v, sizeof(*v));
837         return arch_atomic64_dec_return_acquire(v);
838 }
839
840 static __always_inline s64
841 atomic64_dec_return_release(atomic64_t *v)
842 {
843         instrument_atomic_read_write(v, sizeof(*v));
844         return arch_atomic64_dec_return_release(v);
845 }
846
847 static __always_inline s64
848 atomic64_dec_return_relaxed(atomic64_t *v)
849 {
850         instrument_atomic_read_write(v, sizeof(*v));
851         return arch_atomic64_dec_return_relaxed(v);
852 }
853
854 static __always_inline s64
855 atomic64_fetch_dec(atomic64_t *v)
856 {
857         instrument_atomic_read_write(v, sizeof(*v));
858         return arch_atomic64_fetch_dec(v);
859 }
860
861 static __always_inline s64
862 atomic64_fetch_dec_acquire(atomic64_t *v)
863 {
864         instrument_atomic_read_write(v, sizeof(*v));
865         return arch_atomic64_fetch_dec_acquire(v);
866 }
867
868 static __always_inline s64
869 atomic64_fetch_dec_release(atomic64_t *v)
870 {
871         instrument_atomic_read_write(v, sizeof(*v));
872         return arch_atomic64_fetch_dec_release(v);
873 }
874
875 static __always_inline s64
876 atomic64_fetch_dec_relaxed(atomic64_t *v)
877 {
878         instrument_atomic_read_write(v, sizeof(*v));
879         return arch_atomic64_fetch_dec_relaxed(v);
880 }
881
882 static __always_inline void
883 atomic64_and(s64 i, atomic64_t *v)
884 {
885         instrument_atomic_read_write(v, sizeof(*v));
886         arch_atomic64_and(i, v);
887 }
888
889 static __always_inline s64
890 atomic64_fetch_and(s64 i, atomic64_t *v)
891 {
892         instrument_atomic_read_write(v, sizeof(*v));
893         return arch_atomic64_fetch_and(i, v);
894 }
895
896 static __always_inline s64
897 atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
898 {
899         instrument_atomic_read_write(v, sizeof(*v));
900         return arch_atomic64_fetch_and_acquire(i, v);
901 }
902
903 static __always_inline s64
904 atomic64_fetch_and_release(s64 i, atomic64_t *v)
905 {
906         instrument_atomic_read_write(v, sizeof(*v));
907         return arch_atomic64_fetch_and_release(i, v);
908 }
909
910 static __always_inline s64
911 atomic64_fetch_and_relaxed(s64 i, atomic64_t *v)
912 {
913         instrument_atomic_read_write(v, sizeof(*v));
914         return arch_atomic64_fetch_and_relaxed(i, v);
915 }
916
917 static __always_inline void
918 atomic64_andnot(s64 i, atomic64_t *v)
919 {
920         instrument_atomic_read_write(v, sizeof(*v));
921         arch_atomic64_andnot(i, v);
922 }
923
924 static __always_inline s64
925 atomic64_fetch_andnot(s64 i, atomic64_t *v)
926 {
927         instrument_atomic_read_write(v, sizeof(*v));
928         return arch_atomic64_fetch_andnot(i, v);
929 }
930
931 static __always_inline s64
932 atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
933 {
934         instrument_atomic_read_write(v, sizeof(*v));
935         return arch_atomic64_fetch_andnot_acquire(i, v);
936 }
937
938 static __always_inline s64
939 atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
940 {
941         instrument_atomic_read_write(v, sizeof(*v));
942         return arch_atomic64_fetch_andnot_release(i, v);
943 }
944
945 static __always_inline s64
946 atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
947 {
948         instrument_atomic_read_write(v, sizeof(*v));
949         return arch_atomic64_fetch_andnot_relaxed(i, v);
950 }
951
952 static __always_inline void
953 atomic64_or(s64 i, atomic64_t *v)
954 {
955         instrument_atomic_read_write(v, sizeof(*v));
956         arch_atomic64_or(i, v);
957 }
958
959 static __always_inline s64
960 atomic64_fetch_or(s64 i, atomic64_t *v)
961 {
962         instrument_atomic_read_write(v, sizeof(*v));
963         return arch_atomic64_fetch_or(i, v);
964 }
965
966 static __always_inline s64
967 atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
968 {
969         instrument_atomic_read_write(v, sizeof(*v));
970         return arch_atomic64_fetch_or_acquire(i, v);
971 }
972
973 static __always_inline s64
974 atomic64_fetch_or_release(s64 i, atomic64_t *v)
975 {
976         instrument_atomic_read_write(v, sizeof(*v));
977         return arch_atomic64_fetch_or_release(i, v);
978 }
979
980 static __always_inline s64
981 atomic64_fetch_or_relaxed(s64 i, atomic64_t *v)
982 {
983         instrument_atomic_read_write(v, sizeof(*v));
984         return arch_atomic64_fetch_or_relaxed(i, v);
985 }
986
987 static __always_inline void
988 atomic64_xor(s64 i, atomic64_t *v)
989 {
990         instrument_atomic_read_write(v, sizeof(*v));
991         arch_atomic64_xor(i, v);
992 }
993
994 static __always_inline s64
995 atomic64_fetch_xor(s64 i, atomic64_t *v)
996 {
997         instrument_atomic_read_write(v, sizeof(*v));
998         return arch_atomic64_fetch_xor(i, v);
999 }
1000
1001 static __always_inline s64
1002 atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
1003 {
1004         instrument_atomic_read_write(v, sizeof(*v));
1005         return arch_atomic64_fetch_xor_acquire(i, v);
1006 }
1007
1008 static __always_inline s64
1009 atomic64_fetch_xor_release(s64 i, atomic64_t *v)
1010 {
1011         instrument_atomic_read_write(v, sizeof(*v));
1012         return arch_atomic64_fetch_xor_release(i, v);
1013 }
1014
1015 static __always_inline s64
1016 atomic64_fetch_xor_relaxed(s64 i, atomic64_t *v)
1017 {
1018         instrument_atomic_read_write(v, sizeof(*v));
1019         return arch_atomic64_fetch_xor_relaxed(i, v);
1020 }
1021
1022 static __always_inline s64
1023 atomic64_xchg(atomic64_t *v, s64 i)
1024 {
1025         instrument_atomic_read_write(v, sizeof(*v));
1026         return arch_atomic64_xchg(v, i);
1027 }
1028
1029 static __always_inline s64
1030 atomic64_xchg_acquire(atomic64_t *v, s64 i)
1031 {
1032         instrument_atomic_read_write(v, sizeof(*v));
1033         return arch_atomic64_xchg_acquire(v, i);
1034 }
1035
1036 static __always_inline s64
1037 atomic64_xchg_release(atomic64_t *v, s64 i)
1038 {
1039         instrument_atomic_read_write(v, sizeof(*v));
1040         return arch_atomic64_xchg_release(v, i);
1041 }
1042
1043 static __always_inline s64
1044 atomic64_xchg_relaxed(atomic64_t *v, s64 i)
1045 {
1046         instrument_atomic_read_write(v, sizeof(*v));
1047         return arch_atomic64_xchg_relaxed(v, i);
1048 }
1049
1050 static __always_inline s64
1051 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
1052 {
1053         instrument_atomic_read_write(v, sizeof(*v));
1054         return arch_atomic64_cmpxchg(v, old, new);
1055 }
1056
1057 static __always_inline s64
1058 atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
1059 {
1060         instrument_atomic_read_write(v, sizeof(*v));
1061         return arch_atomic64_cmpxchg_acquire(v, old, new);
1062 }
1063
1064 static __always_inline s64
1065 atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
1066 {
1067         instrument_atomic_read_write(v, sizeof(*v));
1068         return arch_atomic64_cmpxchg_release(v, old, new);
1069 }
1070
1071 static __always_inline s64
1072 atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new)
1073 {
1074         instrument_atomic_read_write(v, sizeof(*v));
1075         return arch_atomic64_cmpxchg_relaxed(v, old, new);
1076 }
1077
1078 static __always_inline bool
1079 atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
1080 {
1081         instrument_atomic_read_write(v, sizeof(*v));
1082         instrument_atomic_read_write(old, sizeof(*old));
1083         return arch_atomic64_try_cmpxchg(v, old, new);
1084 }
1085
1086 static __always_inline bool
1087 atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
1088 {
1089         instrument_atomic_read_write(v, sizeof(*v));
1090         instrument_atomic_read_write(old, sizeof(*old));
1091         return arch_atomic64_try_cmpxchg_acquire(v, old, new);
1092 }
1093
1094 static __always_inline bool
1095 atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
1096 {
1097         instrument_atomic_read_write(v, sizeof(*v));
1098         instrument_atomic_read_write(old, sizeof(*old));
1099         return arch_atomic64_try_cmpxchg_release(v, old, new);
1100 }
1101
1102 static __always_inline bool
1103 atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
1104 {
1105         instrument_atomic_read_write(v, sizeof(*v));
1106         instrument_atomic_read_write(old, sizeof(*old));
1107         return arch_atomic64_try_cmpxchg_relaxed(v, old, new);
1108 }
1109
1110 static __always_inline bool
1111 atomic64_sub_and_test(s64 i, atomic64_t *v)
1112 {
1113         instrument_atomic_read_write(v, sizeof(*v));
1114         return arch_atomic64_sub_and_test(i, v);
1115 }
1116
1117 static __always_inline bool
1118 atomic64_dec_and_test(atomic64_t *v)
1119 {
1120         instrument_atomic_read_write(v, sizeof(*v));
1121         return arch_atomic64_dec_and_test(v);
1122 }
1123
1124 static __always_inline bool
1125 atomic64_inc_and_test(atomic64_t *v)
1126 {
1127         instrument_atomic_read_write(v, sizeof(*v));
1128         return arch_atomic64_inc_and_test(v);
1129 }
1130
1131 static __always_inline bool
1132 atomic64_add_negative(s64 i, atomic64_t *v)
1133 {
1134         instrument_atomic_read_write(v, sizeof(*v));
1135         return arch_atomic64_add_negative(i, v);
1136 }
1137
1138 static __always_inline s64
1139 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
1140 {
1141         instrument_atomic_read_write(v, sizeof(*v));
1142         return arch_atomic64_fetch_add_unless(v, a, u);
1143 }
1144
1145 static __always_inline bool
1146 atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
1147 {
1148         instrument_atomic_read_write(v, sizeof(*v));
1149         return arch_atomic64_add_unless(v, a, u);
1150 }
1151
1152 static __always_inline bool
1153 atomic64_inc_not_zero(atomic64_t *v)
1154 {
1155         instrument_atomic_read_write(v, sizeof(*v));
1156         return arch_atomic64_inc_not_zero(v);
1157 }
1158
1159 static __always_inline bool
1160 atomic64_inc_unless_negative(atomic64_t *v)
1161 {
1162         instrument_atomic_read_write(v, sizeof(*v));
1163         return arch_atomic64_inc_unless_negative(v);
1164 }
1165
1166 static __always_inline bool
1167 atomic64_dec_unless_positive(atomic64_t *v)
1168 {
1169         instrument_atomic_read_write(v, sizeof(*v));
1170         return arch_atomic64_dec_unless_positive(v);
1171 }
1172
1173 static __always_inline s64
1174 atomic64_dec_if_positive(atomic64_t *v)
1175 {
1176         instrument_atomic_read_write(v, sizeof(*v));
1177         return arch_atomic64_dec_if_positive(v);
1178 }
1179
1180 #define xchg(ptr, ...) \
1181 ({ \
1182         typeof(ptr) __ai_ptr = (ptr); \
1183         instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1184         arch_xchg(__ai_ptr, __VA_ARGS__); \
1185 })
1186
1187 #define xchg_acquire(ptr, ...) \
1188 ({ \
1189         typeof(ptr) __ai_ptr = (ptr); \
1190         instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1191         arch_xchg_acquire(__ai_ptr, __VA_ARGS__); \
1192 })
1193
1194 #define xchg_release(ptr, ...) \
1195 ({ \
1196         typeof(ptr) __ai_ptr = (ptr); \
1197         instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1198         arch_xchg_release(__ai_ptr, __VA_ARGS__); \
1199 })
1200
1201 #define xchg_relaxed(ptr, ...) \
1202 ({ \
1203         typeof(ptr) __ai_ptr = (ptr); \
1204         instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1205         arch_xchg_relaxed(__ai_ptr, __VA_ARGS__); \
1206 })
1207
1208 #define cmpxchg(ptr, ...) \
1209 ({ \
1210         typeof(ptr) __ai_ptr = (ptr); \
1211         instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1212         arch_cmpxchg(__ai_ptr, __VA_ARGS__); \
1213 })
1214
1215 #define cmpxchg_acquire(ptr, ...) \
1216 ({ \
1217         typeof(ptr) __ai_ptr = (ptr); \
1218         instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1219         arch_cmpxchg_acquire(__ai_ptr, __VA_ARGS__); \
1220 })
1221
1222 #define cmpxchg_release(ptr, ...) \
1223 ({ \
1224         typeof(ptr) __ai_ptr = (ptr); \
1225         instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1226         arch_cmpxchg_release(__ai_ptr, __VA_ARGS__); \
1227 })
1228
1229 #define cmpxchg_relaxed(ptr, ...) \
1230 ({ \
1231         typeof(ptr) __ai_ptr = (ptr); \
1232         instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1233         arch_cmpxchg_relaxed(__ai_ptr, __VA_ARGS__); \
1234 })
1235
1236 #define cmpxchg64(ptr, ...) \
1237 ({ \
1238         typeof(ptr) __ai_ptr = (ptr); \
1239         instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1240         arch_cmpxchg64(__ai_ptr, __VA_ARGS__); \
1241 })
1242
1243 #define cmpxchg64_acquire(ptr, ...) \
1244 ({ \
1245         typeof(ptr) __ai_ptr = (ptr); \
1246         instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1247         arch_cmpxchg64_acquire(__ai_ptr, __VA_ARGS__); \
1248 })
1249
1250 #define cmpxchg64_release(ptr, ...) \
1251 ({ \
1252         typeof(ptr) __ai_ptr = (ptr); \
1253         instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1254         arch_cmpxchg64_release(__ai_ptr, __VA_ARGS__); \
1255 })
1256
1257 #define cmpxchg64_relaxed(ptr, ...) \
1258 ({ \
1259         typeof(ptr) __ai_ptr = (ptr); \
1260         instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1261         arch_cmpxchg64_relaxed(__ai_ptr, __VA_ARGS__); \
1262 })
1263
1264 #define try_cmpxchg(ptr, oldp, ...) \
1265 ({ \
1266         typeof(ptr) __ai_ptr = (ptr); \
1267         typeof(oldp) __ai_oldp = (oldp); \
1268         instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1269         instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \
1270         arch_try_cmpxchg(__ai_ptr, __ai_oldp, __VA_ARGS__); \
1271 })
1272
1273 #define try_cmpxchg_acquire(ptr, oldp, ...) \
1274 ({ \
1275         typeof(ptr) __ai_ptr = (ptr); \
1276         typeof(oldp) __ai_oldp = (oldp); \
1277         instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1278         instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \
1279         arch_try_cmpxchg_acquire(__ai_ptr, __ai_oldp, __VA_ARGS__); \
1280 })
1281
1282 #define try_cmpxchg_release(ptr, oldp, ...) \
1283 ({ \
1284         typeof(ptr) __ai_ptr = (ptr); \
1285         typeof(oldp) __ai_oldp = (oldp); \
1286         instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1287         instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \
1288         arch_try_cmpxchg_release(__ai_ptr, __ai_oldp, __VA_ARGS__); \
1289 })
1290
1291 #define try_cmpxchg_relaxed(ptr, oldp, ...) \
1292 ({ \
1293         typeof(ptr) __ai_ptr = (ptr); \
1294         typeof(oldp) __ai_oldp = (oldp); \
1295         instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1296         instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \
1297         arch_try_cmpxchg_relaxed(__ai_ptr, __ai_oldp, __VA_ARGS__); \
1298 })
1299
1300 #define cmpxchg_local(ptr, ...) \
1301 ({ \
1302         typeof(ptr) __ai_ptr = (ptr); \
1303         instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1304         arch_cmpxchg_local(__ai_ptr, __VA_ARGS__); \
1305 })
1306
1307 #define cmpxchg64_local(ptr, ...) \
1308 ({ \
1309         typeof(ptr) __ai_ptr = (ptr); \
1310         instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1311         arch_cmpxchg64_local(__ai_ptr, __VA_ARGS__); \
1312 })
1313
1314 #define sync_cmpxchg(ptr, ...) \
1315 ({ \
1316         typeof(ptr) __ai_ptr = (ptr); \
1317         instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1318         arch_sync_cmpxchg(__ai_ptr, __VA_ARGS__); \
1319 })
1320
1321 #define cmpxchg_double(ptr, ...) \
1322 ({ \
1323         typeof(ptr) __ai_ptr = (ptr); \
1324         instrument_atomic_write(__ai_ptr, 2 * sizeof(*__ai_ptr)); \
1325         arch_cmpxchg_double(__ai_ptr, __VA_ARGS__); \
1326 })
1327
1328
1329 #define cmpxchg_double_local(ptr, ...) \
1330 ({ \
1331         typeof(ptr) __ai_ptr = (ptr); \
1332         instrument_atomic_write(__ai_ptr, 2 * sizeof(*__ai_ptr)); \
1333         arch_cmpxchg_double_local(__ai_ptr, __VA_ARGS__); \
1334 })
1335
1336 #endif /* _ASM_GENERIC_ATOMIC_INSTRUMENTED_H */
1337 // 1d7c3a25aca5c7fb031c307be4c3d24c7b48fcd5