87584379da43a2083adf1e9bfbc26b8b128738dc
[linux-2.6-microblaze.git] / include / asm-generic / rwonce.h
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * Prevent the compiler from merging or refetching reads or writes. The
4  * compiler is also forbidden from reordering successive instances of
5  * READ_ONCE and WRITE_ONCE, but only when the compiler is aware of some
6  * particular ordering. One way to make the compiler aware of ordering is to
7  * put the two invocations of READ_ONCE or WRITE_ONCE in different C
8  * statements.
9  *
10  * These two macros will also work on aggregate data types like structs or
11  * unions.
12  *
13  * Their two major use cases are: (1) Mediating communication between
14  * process-level code and irq/NMI handlers, all running on the same CPU,
15  * and (2) Ensuring that the compiler does not fold, spindle, or otherwise
16  * mutilate accesses that either do not require ordering or that interact
17  * with an explicit memory barrier or atomic instruction that provides the
18  * required ordering.
19  */
20 #ifndef __ASM_GENERIC_RWONCE_H
21 #define __ASM_GENERIC_RWONCE_H
22
23 #ifndef __ASSEMBLY__
24
25 #include <linux/compiler_types.h>
26 #include <linux/kasan-checks.h>
27 #include <linux/kcsan-checks.h>
28
29 #include <asm/barrier.h>
30
31 /*
32  * Yes, this permits 64-bit accesses on 32-bit architectures. These will
33  * actually be atomic in some cases (namely Armv7 + LPAE), but for others we
34  * rely on the access being split into 2x32-bit accesses for a 32-bit quantity
35  * (e.g. a virtual address) and a strong prevailing wind.
36  */
37 #define compiletime_assert_rwonce_type(t)                                       \
38         compiletime_assert(__native_word(t) || sizeof(t) == sizeof(long long),  \
39                 "Unsupported access size for {READ,WRITE}_ONCE().")
40
41 /*
42  * Use __READ_ONCE() instead of READ_ONCE() if you do not require any
43  * atomicity or dependency ordering guarantees. Note that this may result
44  * in tears!
45  */
46 #define __READ_ONCE(x)  (*(const volatile __unqual_scalar_typeof(x) *)&(x))
47
48 #define __READ_ONCE_SCALAR(x)                                           \
49 ({                                                                      \
50         __unqual_scalar_typeof(x) __x = __READ_ONCE(x);                 \
51         smp_read_barrier_depends();                                     \
52         (typeof(x))__x;                                                 \
53 })
54
55 #define READ_ONCE(x)                                                    \
56 ({                                                                      \
57         compiletime_assert_rwonce_type(x);                              \
58         __READ_ONCE_SCALAR(x);                                          \
59 })
60
61 #define __WRITE_ONCE(x, val)                                            \
62 do {                                                                    \
63         *(volatile typeof(x) *)&(x) = (val);                            \
64 } while (0)
65
66 #define WRITE_ONCE(x, val)                                              \
67 do {                                                                    \
68         compiletime_assert_rwonce_type(x);                              \
69         __WRITE_ONCE(x, val);                                           \
70 } while (0)
71
72 static __no_sanitize_or_inline
73 unsigned long __read_once_word_nocheck(const void *addr)
74 {
75         return __READ_ONCE(*(unsigned long *)addr);
76 }
77
78 /*
79  * Use READ_ONCE_NOCHECK() instead of READ_ONCE() if you need to load a
80  * word from memory atomically but without telling KASAN/KCSAN. This is
81  * usually used by unwinding code when walking the stack of a running process.
82  */
83 #define READ_ONCE_NOCHECK(x)                                            \
84 ({                                                                      \
85         unsigned long __x;                                              \
86         compiletime_assert(sizeof(x) == sizeof(__x),                    \
87                 "Unsupported access size for READ_ONCE_NOCHECK().");    \
88         __x = __read_once_word_nocheck(&(x));                           \
89         smp_read_barrier_depends();                                     \
90         (typeof(x))__x;                                                 \
91 })
92
93 static __no_kasan_or_inline
94 unsigned long read_word_at_a_time(const void *addr)
95 {
96         kasan_check_read(addr, 1);
97         return *(unsigned long *)addr;
98 }
99
100 #endif /* __ASSEMBLY__ */
101 #endif  /* __ASM_GENERIC_RWONCE_H */