MIPS: atomic: Fix whitespace in ATOMIC_OP macros
[linux-2.6-microblaze.git] / arch / mips / include / asm / cmpxchg.h
1 /*
2  * This file is subject to the terms and conditions of the GNU General Public
3  * License.  See the file "COPYING" in the main directory of this archive
4  * for more details.
5  *
6  * Copyright (C) 2003, 06, 07 by Ralf Baechle (ralf@linux-mips.org)
7  */
8 #ifndef __ASM_CMPXCHG_H
9 #define __ASM_CMPXCHG_H
10
11 #include <linux/bug.h>
12 #include <linux/irqflags.h>
13 #include <asm/compiler.h>
14 #include <asm/llsc.h>
15 #include <asm/war.h>
16
17 /*
18  * These functions doesn't exist, so if they are called you'll either:
19  *
20  * - Get an error at compile-time due to __compiletime_error, if supported by
21  *   your compiler.
22  *
23  * or:
24  *
25  * - Get an error at link-time due to the call to the missing function.
26  */
27 extern unsigned long __cmpxchg_called_with_bad_pointer(void)
28         __compiletime_error("Bad argument size for cmpxchg");
29 extern unsigned long __cmpxchg64_unsupported(void)
30         __compiletime_error("cmpxchg64 not available; cpu_has_64bits may be false");
31 extern unsigned long __xchg_called_with_bad_pointer(void)
32         __compiletime_error("Bad argument size for xchg");
33
34 #define __xchg_asm(ld, st, m, val)                                      \
35 ({                                                                      \
36         __typeof(*(m)) __ret;                                           \
37                                                                         \
38         if (kernel_uses_llsc) {                                         \
39                 loongson_llsc_mb();                                     \
40                 __asm__ __volatile__(                                   \
41                 "       .set    push                            \n"     \
42                 "       .set    noat                            \n"     \
43                 "       .set    push                            \n"     \
44                 "       .set    " MIPS_ISA_ARCH_LEVEL "         \n"     \
45                 "1:     " ld "  %0, %2          # __xchg_asm    \n"     \
46                 "       .set    pop                             \n"     \
47                 "       move    $1, %z3                         \n"     \
48                 "       .set    " MIPS_ISA_ARCH_LEVEL "         \n"     \
49                 "       " st "  $1, %1                          \n"     \
50                 "\t" __SC_BEQZ  "$1, 1b                         \n"     \
51                 "       .set    pop                             \n"     \
52                 : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m)           \
53                 : GCC_OFF_SMALL_ASM() (*m), "Jr" (val)                  \
54                 : __LLSC_CLOBBER);                                      \
55         } else {                                                        \
56                 unsigned long __flags;                                  \
57                                                                         \
58                 raw_local_irq_save(__flags);                            \
59                 __ret = *m;                                             \
60                 *m = val;                                               \
61                 raw_local_irq_restore(__flags);                         \
62         }                                                               \
63                                                                         \
64         __ret;                                                          \
65 })
66
67 extern unsigned long __xchg_small(volatile void *ptr, unsigned long val,
68                                   unsigned int size);
69
70 static inline unsigned long __xchg(volatile void *ptr, unsigned long x,
71                                    int size)
72 {
73         switch (size) {
74         case 1:
75         case 2:
76                 return __xchg_small(ptr, x, size);
77
78         case 4:
79                 return __xchg_asm("ll", "sc", (volatile u32 *)ptr, x);
80
81         case 8:
82                 if (!IS_ENABLED(CONFIG_64BIT))
83                         return __xchg_called_with_bad_pointer();
84
85                 return __xchg_asm("lld", "scd", (volatile u64 *)ptr, x);
86
87         default:
88                 return __xchg_called_with_bad_pointer();
89         }
90 }
91
92 #define xchg(ptr, x)                                                    \
93 ({                                                                      \
94         __typeof__(*(ptr)) __res;                                       \
95                                                                         \
96         smp_mb__before_llsc();                                          \
97                                                                         \
98         __res = (__typeof__(*(ptr)))                                    \
99                 __xchg((ptr), (unsigned long)(x), sizeof(*(ptr)));      \
100                                                                         \
101         smp_llsc_mb();                                                  \
102                                                                         \
103         __res;                                                          \
104 })
105
106 #define __cmpxchg_asm(ld, st, m, old, new)                              \
107 ({                                                                      \
108         __typeof(*(m)) __ret;                                           \
109                                                                         \
110         if (kernel_uses_llsc) {                                         \
111                 loongson_llsc_mb();                                     \
112                 __asm__ __volatile__(                                   \
113                 "       .set    push                            \n"     \
114                 "       .set    noat                            \n"     \
115                 "       .set    push                            \n"     \
116                 "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"     \
117                 "1:     " ld "  %0, %2          # __cmpxchg_asm \n"     \
118                 "       bne     %0, %z3, 2f                     \n"     \
119                 "       .set    pop                             \n"     \
120                 "       move    $1, %z4                         \n"     \
121                 "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"     \
122                 "       " st "  $1, %1                          \n"     \
123                 "\t" __SC_BEQZ  "$1, 1b                         \n"     \
124                 "       .set    pop                             \n"     \
125                 "2:                                             \n"     \
126                 : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m)           \
127                 : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new)      \
128                 : __LLSC_CLOBBER);                                      \
129                 loongson_llsc_mb();                                     \
130         } else {                                                        \
131                 unsigned long __flags;                                  \
132                                                                         \
133                 raw_local_irq_save(__flags);                            \
134                 __ret = *m;                                             \
135                 if (__ret == old)                                       \
136                         *m = new;                                       \
137                 raw_local_irq_restore(__flags);                         \
138         }                                                               \
139                                                                         \
140         __ret;                                                          \
141 })
142
143 extern unsigned long __cmpxchg_small(volatile void *ptr, unsigned long old,
144                                      unsigned long new, unsigned int size);
145
146 static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
147                                       unsigned long new, unsigned int size)
148 {
149         switch (size) {
150         case 1:
151         case 2:
152                 return __cmpxchg_small(ptr, old, new, size);
153
154         case 4:
155                 return __cmpxchg_asm("ll", "sc", (volatile u32 *)ptr,
156                                      (u32)old, new);
157
158         case 8:
159                 /* lld/scd are only available for MIPS64 */
160                 if (!IS_ENABLED(CONFIG_64BIT))
161                         return __cmpxchg_called_with_bad_pointer();
162
163                 return __cmpxchg_asm("lld", "scd", (volatile u64 *)ptr,
164                                      (u64)old, new);
165
166         default:
167                 return __cmpxchg_called_with_bad_pointer();
168         }
169 }
170
171 #define cmpxchg_local(ptr, old, new)                                    \
172         ((__typeof__(*(ptr)))                                           \
173                 __cmpxchg((ptr),                                        \
174                           (unsigned long)(__typeof__(*(ptr)))(old),     \
175                           (unsigned long)(__typeof__(*(ptr)))(new),     \
176                           sizeof(*(ptr))))
177
178 #define cmpxchg(ptr, old, new)                                          \
179 ({                                                                      \
180         __typeof__(*(ptr)) __res;                                       \
181                                                                         \
182         smp_mb__before_llsc();                                          \
183         __res = cmpxchg_local((ptr), (old), (new));                     \
184         smp_llsc_mb();                                                  \
185                                                                         \
186         __res;                                                          \
187 })
188
189 #ifdef CONFIG_64BIT
190 #define cmpxchg64_local(ptr, o, n)                                      \
191   ({                                                                    \
192         BUILD_BUG_ON(sizeof(*(ptr)) != 8);                              \
193         cmpxchg_local((ptr), (o), (n));                                 \
194   })
195
196 #define cmpxchg64(ptr, o, n)                                            \
197   ({                                                                    \
198         BUILD_BUG_ON(sizeof(*(ptr)) != 8);                              \
199         cmpxchg((ptr), (o), (n));                                       \
200   })
201 #else
202
203 # include <asm-generic/cmpxchg-local.h>
204 # define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
205
206 # ifdef CONFIG_SMP
207
208 static inline unsigned long __cmpxchg64(volatile void *ptr,
209                                         unsigned long long old,
210                                         unsigned long long new)
211 {
212         unsigned long long tmp, ret;
213         unsigned long flags;
214
215         /*
216          * The assembly below has to combine 32 bit values into a 64 bit
217          * register, and split 64 bit values from one register into two. If we
218          * were to take an interrupt in the middle of this we'd only save the
219          * least significant 32 bits of each register & probably clobber the
220          * most significant 32 bits of the 64 bit values we're using. In order
221          * to avoid this we must disable interrupts.
222          */
223         local_irq_save(flags);
224
225         loongson_llsc_mb();
226         asm volatile(
227         "       .set    push                            \n"
228         "       .set    " MIPS_ISA_ARCH_LEVEL "         \n"
229         /* Load 64 bits from ptr */
230         "1:     lld     %L0, %3         # __cmpxchg64   \n"
231         /*
232          * Split the 64 bit value we loaded into the 2 registers that hold the
233          * ret variable.
234          */
235         "       dsra    %M0, %L0, 32                    \n"
236         "       sll     %L0, %L0, 0                     \n"
237         /*
238          * Compare ret against old, breaking out of the loop if they don't
239          * match.
240          */
241         "       bne     %M0, %M4, 2f                    \n"
242         "       bne     %L0, %L4, 2f                    \n"
243         /*
244          * Combine the 32 bit halves from the 2 registers that hold the new
245          * variable into a single 64 bit register.
246          */
247 #  if MIPS_ISA_REV >= 2
248         "       move    %L1, %L5                        \n"
249         "       dins    %L1, %M5, 32, 32                \n"
250 #  else
251         "       dsll    %L1, %L5, 32                    \n"
252         "       dsrl    %L1, %L1, 32                    \n"
253         "       .set    noat                            \n"
254         "       dsll    $at, %M5, 32                    \n"
255         "       or      %L1, %L1, $at                   \n"
256         "       .set    at                              \n"
257 #  endif
258         /* Attempt to store new at ptr */
259         "       scd     %L1, %2                         \n"
260         /* If we failed, loop! */
261         "\t" __SC_BEQZ "%L1, 1b                         \n"
262         "       .set    pop                             \n"
263         "2:                                             \n"
264         : "=&r"(ret),
265           "=&r"(tmp),
266           "=" GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr)
267         : GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr),
268           "r" (old),
269           "r" (new)
270         : "memory");
271         loongson_llsc_mb();
272
273         local_irq_restore(flags);
274         return ret;
275 }
276
277 #  define cmpxchg64(ptr, o, n) ({                                       \
278         unsigned long long __old = (__typeof__(*(ptr)))(o);             \
279         unsigned long long __new = (__typeof__(*(ptr)))(n);             \
280         __typeof__(*(ptr)) __res;                                       \
281                                                                         \
282         /*                                                              \
283          * We can only use cmpxchg64 if we know that the CPU supports   \
284          * 64-bits, ie. lld & scd. Our call to __cmpxchg64_unsupported  \
285          * will cause a build error unless cpu_has_64bits is a          \
286          * compile-time constant 1.                                     \
287          */                                                             \
288         if (cpu_has_64bits && kernel_uses_llsc) {                       \
289                 smp_mb__before_llsc();                                  \
290                 __res = __cmpxchg64((ptr), __old, __new);               \
291                 smp_llsc_mb();                                          \
292         } else {                                                        \
293                 __res = __cmpxchg64_unsupported();                      \
294         }                                                               \
295                                                                         \
296         __res;                                                          \
297 })
298
299 # else /* !CONFIG_SMP */
300 #  define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n))
301 # endif /* !CONFIG_SMP */
302 #endif /* !CONFIG_64BIT */
303
304 #endif /* __ASM_CMPXCHG_H */