Merge tag 'imx-drm-next-2020-10-30' of git://git.pengutronix.de/git/pza/linux into...
[linux-2.6-microblaze.git] / arch / arm64 / include / asm / barrier.h
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  * Based on arch/arm/include/asm/barrier.h
4  *
5  * Copyright (C) 2012 ARM Ltd.
6  */
7 #ifndef __ASM_BARRIER_H
8 #define __ASM_BARRIER_H
9
10 #ifndef __ASSEMBLY__
11
12 #include <linux/kasan-checks.h>
13
14 #define __nops(n)       ".rept  " #n "\nnop\n.endr\n"
15 #define nops(n)         asm volatile(__nops(n))
16
17 #define sev()           asm volatile("sev" : : : "memory")
18 #define wfe()           asm volatile("wfe" : : : "memory")
19 #define wfi()           asm volatile("wfi" : : : "memory")
20
21 #define isb()           asm volatile("isb" : : : "memory")
22 #define dmb(opt)        asm volatile("dmb " #opt : : : "memory")
23 #define dsb(opt)        asm volatile("dsb " #opt : : : "memory")
24
25 #define psb_csync()     asm volatile("hint #17" : : : "memory")
26 #define csdb()          asm volatile("hint #20" : : : "memory")
27
28 #define spec_bar()      asm volatile(ALTERNATIVE("dsb nsh\nisb\n",              \
29                                                  SB_BARRIER_INSN"nop\n",        \
30                                                  ARM64_HAS_SB))
31
32 #ifdef CONFIG_ARM64_PSEUDO_NMI
33 #define pmr_sync()                                              \
34         do {                                                    \
35                 extern struct static_key_false gic_pmr_sync;    \
36                                                                 \
37                 if (static_branch_unlikely(&gic_pmr_sync))      \
38                         dsb(sy);                                \
39         } while(0)
40 #else
41 #define pmr_sync()      do {} while (0)
42 #endif
43
44 #define mb()            dsb(sy)
45 #define rmb()           dsb(ld)
46 #define wmb()           dsb(st)
47
48 #define dma_mb()        dmb(osh)
49 #define dma_rmb()       dmb(oshld)
50 #define dma_wmb()       dmb(oshst)
51
52 /*
53  * Generate a mask for array_index__nospec() that is ~0UL when 0 <= idx < sz
54  * and 0 otherwise.
55  */
56 #define array_index_mask_nospec array_index_mask_nospec
57 static inline unsigned long array_index_mask_nospec(unsigned long idx,
58                                                     unsigned long sz)
59 {
60         unsigned long mask;
61
62         asm volatile(
63         "       cmp     %1, %2\n"
64         "       sbc     %0, xzr, xzr\n"
65         : "=r" (mask)
66         : "r" (idx), "Ir" (sz)
67         : "cc");
68
69         csdb();
70         return mask;
71 }
72
73 #define __smp_mb()      dmb(ish)
74 #define __smp_rmb()     dmb(ishld)
75 #define __smp_wmb()     dmb(ishst)
76
77 #define __smp_store_release(p, v)                                       \
78 do {                                                                    \
79         typeof(p) __p = (p);                                            \
80         union { __unqual_scalar_typeof(*p) __val; char __c[1]; } __u =  \
81                 { .__val = (__force __unqual_scalar_typeof(*p)) (v) };  \
82         compiletime_assert_atomic_type(*p);                             \
83         kasan_check_write(__p, sizeof(*p));                             \
84         switch (sizeof(*p)) {                                           \
85         case 1:                                                         \
86                 asm volatile ("stlrb %w1, %0"                           \
87                                 : "=Q" (*__p)                           \
88                                 : "r" (*(__u8 *)__u.__c)                \
89                                 : "memory");                            \
90                 break;                                                  \
91         case 2:                                                         \
92                 asm volatile ("stlrh %w1, %0"                           \
93                                 : "=Q" (*__p)                           \
94                                 : "r" (*(__u16 *)__u.__c)               \
95                                 : "memory");                            \
96                 break;                                                  \
97         case 4:                                                         \
98                 asm volatile ("stlr %w1, %0"                            \
99                                 : "=Q" (*__p)                           \
100                                 : "r" (*(__u32 *)__u.__c)               \
101                                 : "memory");                            \
102                 break;                                                  \
103         case 8:                                                         \
104                 asm volatile ("stlr %1, %0"                             \
105                                 : "=Q" (*__p)                           \
106                                 : "r" (*(__u64 *)__u.__c)               \
107                                 : "memory");                            \
108                 break;                                                  \
109         }                                                               \
110 } while (0)
111
112 #define __smp_load_acquire(p)                                           \
113 ({                                                                      \
114         union { __unqual_scalar_typeof(*p) __val; char __c[1]; } __u;   \
115         typeof(p) __p = (p);                                            \
116         compiletime_assert_atomic_type(*p);                             \
117         kasan_check_read(__p, sizeof(*p));                              \
118         switch (sizeof(*p)) {                                           \
119         case 1:                                                         \
120                 asm volatile ("ldarb %w0, %1"                           \
121                         : "=r" (*(__u8 *)__u.__c)                       \
122                         : "Q" (*__p) : "memory");                       \
123                 break;                                                  \
124         case 2:                                                         \
125                 asm volatile ("ldarh %w0, %1"                           \
126                         : "=r" (*(__u16 *)__u.__c)                      \
127                         : "Q" (*__p) : "memory");                       \
128                 break;                                                  \
129         case 4:                                                         \
130                 asm volatile ("ldar %w0, %1"                            \
131                         : "=r" (*(__u32 *)__u.__c)                      \
132                         : "Q" (*__p) : "memory");                       \
133                 break;                                                  \
134         case 8:                                                         \
135                 asm volatile ("ldar %0, %1"                             \
136                         : "=r" (*(__u64 *)__u.__c)                      \
137                         : "Q" (*__p) : "memory");                       \
138                 break;                                                  \
139         }                                                               \
140         (typeof(*p))__u.__val;                                          \
141 })
142
143 #define smp_cond_load_relaxed(ptr, cond_expr)                           \
144 ({                                                                      \
145         typeof(ptr) __PTR = (ptr);                                      \
146         __unqual_scalar_typeof(*ptr) VAL;                               \
147         for (;;) {                                                      \
148                 VAL = READ_ONCE(*__PTR);                                \
149                 if (cond_expr)                                          \
150                         break;                                          \
151                 __cmpwait_relaxed(__PTR, VAL);                          \
152         }                                                               \
153         (typeof(*ptr))VAL;                                              \
154 })
155
156 #define smp_cond_load_acquire(ptr, cond_expr)                           \
157 ({                                                                      \
158         typeof(ptr) __PTR = (ptr);                                      \
159         __unqual_scalar_typeof(*ptr) VAL;                               \
160         for (;;) {                                                      \
161                 VAL = smp_load_acquire(__PTR);                          \
162                 if (cond_expr)                                          \
163                         break;                                          \
164                 __cmpwait_relaxed(__PTR, VAL);                          \
165         }                                                               \
166         (typeof(*ptr))VAL;                                              \
167 })
168
169 #include <asm-generic/barrier.h>
170
171 #endif  /* __ASSEMBLY__ */
172
173 #endif  /* __ASM_BARRIER_H */