MIPS: atomic: Fix whitespace in ATOMIC_OP macros
[linux-2.6-microblaze.git] / arch / mips / include / asm / atomic.h
1 /*
2  * Atomic operations that C can't guarantee us.  Useful for
3  * resource counting etc..
4  *
5  * But use these as seldom as possible since they are much more slower
6  * than regular operations.
7  *
8  * This file is subject to the terms and conditions of the GNU General Public
9  * License.  See the file "COPYING" in the main directory of this archive
10  * for more details.
11  *
12  * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
13  */
14 #ifndef _ASM_ATOMIC_H
15 #define _ASM_ATOMIC_H
16
17 #include <linux/irqflags.h>
18 #include <linux/types.h>
19 #include <asm/barrier.h>
20 #include <asm/compiler.h>
21 #include <asm/cpu-features.h>
22 #include <asm/cmpxchg.h>
23 #include <asm/llsc.h>
24 #include <asm/war.h>
25
26 #define ATOMIC_INIT(i)    { (i) }
27
28 /*
29  * atomic_read - read atomic variable
30  * @v: pointer of type atomic_t
31  *
32  * Atomically reads the value of @v.
33  */
34 #define atomic_read(v)          READ_ONCE((v)->counter)
35
36 /*
37  * atomic_set - set atomic variable
38  * @v: pointer of type atomic_t
39  * @i: required value
40  *
41  * Atomically sets the value of @v to @i.
42  */
43 #define atomic_set(v, i)        WRITE_ONCE((v)->counter, (i))
44
45 #define ATOMIC_OP(op, c_op, asm_op)                                     \
46 static __inline__ void atomic_##op(int i, atomic_t * v)                 \
47 {                                                                       \
48         if (kernel_uses_llsc) {                                         \
49                 int temp;                                               \
50                                                                         \
51                 loongson_llsc_mb();                                     \
52                 __asm__ __volatile__(                                   \
53                 "       .set    push                            \n"     \
54                 "       .set    "MIPS_ISA_LEVEL"                \n"     \
55                 "1:     ll      %0, %1  # atomic_" #op "        \n"     \
56                 "       " #asm_op " %0, %2                      \n"     \
57                 "       sc      %0, %1                          \n"     \
58                 "\t" __SC_BEQZ "%0, 1b                          \n"     \
59                 "       .set    pop                             \n"     \
60                 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)    \
61                 : "Ir" (i) : __LLSC_CLOBBER);                           \
62         } else {                                                        \
63                 unsigned long flags;                                    \
64                                                                         \
65                 raw_local_irq_save(flags);                              \
66                 v->counter c_op i;                                      \
67                 raw_local_irq_restore(flags);                           \
68         }                                                               \
69 }
70
71 #define ATOMIC_OP_RETURN(op, c_op, asm_op)                              \
72 static __inline__ int atomic_##op##_return_relaxed(int i, atomic_t * v) \
73 {                                                                       \
74         int result;                                                     \
75                                                                         \
76         if (kernel_uses_llsc) {                                         \
77                 int temp;                                               \
78                                                                         \
79                 loongson_llsc_mb();                                     \
80                 __asm__ __volatile__(                                   \
81                 "       .set    push                            \n"     \
82                 "       .set    "MIPS_ISA_LEVEL"                \n"     \
83                 "1:     ll      %1, %2  # atomic_" #op "_return \n"     \
84                 "       " #asm_op " %0, %1, %3                  \n"     \
85                 "       sc      %0, %2                          \n"     \
86                 "\t" __SC_BEQZ "%0, 1b                          \n"     \
87                 "       " #asm_op " %0, %1, %3                  \n"     \
88                 "       .set    pop                             \n"     \
89                 : "=&r" (result), "=&r" (temp),                         \
90                   "+" GCC_OFF_SMALL_ASM() (v->counter)                  \
91                 : "Ir" (i) : __LLSC_CLOBBER);                           \
92         } else {                                                        \
93                 unsigned long flags;                                    \
94                                                                         \
95                 raw_local_irq_save(flags);                              \
96                 result = v->counter;                                    \
97                 result c_op i;                                          \
98                 v->counter = result;                                    \
99                 raw_local_irq_restore(flags);                           \
100         }                                                               \
101                                                                         \
102         return result;                                                  \
103 }
104
105 #define ATOMIC_FETCH_OP(op, c_op, asm_op)                               \
106 static __inline__ int atomic_fetch_##op##_relaxed(int i, atomic_t * v)  \
107 {                                                                       \
108         int result;                                                     \
109                                                                         \
110         if (kernel_uses_llsc) {                                         \
111                 int temp;                                               \
112                                                                         \
113                 loongson_llsc_mb();                                     \
114                 __asm__ __volatile__(                                   \
115                 "       .set    push                            \n"     \
116                 "       .set    "MIPS_ISA_LEVEL"                \n"     \
117                 "1:     ll      %1, %2  # atomic_fetch_" #op "  \n"     \
118                 "       " #asm_op " %0, %1, %3                  \n"     \
119                 "       sc      %0, %2                          \n"     \
120                 "\t" __SC_BEQZ "%0, 1b                          \n"     \
121                 "       .set    pop                             \n"     \
122                 "       move    %0, %1                          \n"     \
123                 : "=&r" (result), "=&r" (temp),                         \
124                   "+" GCC_OFF_SMALL_ASM() (v->counter)                  \
125                 : "Ir" (i) : __LLSC_CLOBBER);                           \
126         } else {                                                        \
127                 unsigned long flags;                                    \
128                                                                         \
129                 raw_local_irq_save(flags);                              \
130                 result = v->counter;                                    \
131                 v->counter c_op i;                                      \
132                 raw_local_irq_restore(flags);                           \
133         }                                                               \
134                                                                         \
135         return result;                                                  \
136 }
137
138 #define ATOMIC_OPS(op, c_op, asm_op)                                    \
139         ATOMIC_OP(op, c_op, asm_op)                                     \
140         ATOMIC_OP_RETURN(op, c_op, asm_op)                              \
141         ATOMIC_FETCH_OP(op, c_op, asm_op)
142
143 ATOMIC_OPS(add, +=, addu)
144 ATOMIC_OPS(sub, -=, subu)
145
146 #define atomic_add_return_relaxed       atomic_add_return_relaxed
147 #define atomic_sub_return_relaxed       atomic_sub_return_relaxed
148 #define atomic_fetch_add_relaxed        atomic_fetch_add_relaxed
149 #define atomic_fetch_sub_relaxed        atomic_fetch_sub_relaxed
150
151 #undef ATOMIC_OPS
152 #define ATOMIC_OPS(op, c_op, asm_op)                                    \
153         ATOMIC_OP(op, c_op, asm_op)                                     \
154         ATOMIC_FETCH_OP(op, c_op, asm_op)
155
156 ATOMIC_OPS(and, &=, and)
157 ATOMIC_OPS(or, |=, or)
158 ATOMIC_OPS(xor, ^=, xor)
159
160 #define atomic_fetch_and_relaxed        atomic_fetch_and_relaxed
161 #define atomic_fetch_or_relaxed         atomic_fetch_or_relaxed
162 #define atomic_fetch_xor_relaxed        atomic_fetch_xor_relaxed
163
164 #undef ATOMIC_OPS
165 #undef ATOMIC_FETCH_OP
166 #undef ATOMIC_OP_RETURN
167 #undef ATOMIC_OP
168
169 /*
170  * atomic_sub_if_positive - conditionally subtract integer from atomic variable
171  * @i: integer value to subtract
172  * @v: pointer of type atomic_t
173  *
174  * Atomically test @v and subtract @i if @v is greater or equal than @i.
175  * The function returns the old value of @v minus @i.
176  */
177 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
178 {
179         int result;
180
181         smp_mb__before_llsc();
182
183         if (kernel_uses_llsc) {
184                 int temp;
185
186                 loongson_llsc_mb();
187                 __asm__ __volatile__(
188                 "       .set    push                                    \n"
189                 "       .set    "MIPS_ISA_LEVEL"                        \n"
190                 "1:     ll      %1, %2          # atomic_sub_if_positive\n"
191                 "       .set    pop                                     \n"
192                 "       subu    %0, %1, %3                              \n"
193                 "       move    %1, %0                                  \n"
194                 "       bltz    %0, 2f                                  \n"
195                 "       .set    push                                    \n"
196                 "       .set    "MIPS_ISA_LEVEL"                        \n"
197                 "       sc      %1, %2                                  \n"
198                 "\t" __SC_BEQZ "%1, 1b                                  \n"
199                 "2:                                                     \n"
200                 "       .set    pop                                     \n"
201                 : "=&r" (result), "=&r" (temp),
202                   "+" GCC_OFF_SMALL_ASM() (v->counter)
203                 : "Ir" (i) : __LLSC_CLOBBER);
204         } else {
205                 unsigned long flags;
206
207                 raw_local_irq_save(flags);
208                 result = v->counter;
209                 result -= i;
210                 if (result >= 0)
211                         v->counter = result;
212                 raw_local_irq_restore(flags);
213         }
214
215         smp_llsc_mb();
216
217         return result;
218 }
219
220 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
221 #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
222
223 /*
224  * atomic_dec_if_positive - decrement by 1 if old value positive
225  * @v: pointer of type atomic_t
226  */
227 #define atomic_dec_if_positive(v)       atomic_sub_if_positive(1, v)
228
229 #ifdef CONFIG_64BIT
230
231 #define ATOMIC64_INIT(i)    { (i) }
232
233 /*
234  * atomic64_read - read atomic variable
235  * @v: pointer of type atomic64_t
236  *
237  */
238 #define atomic64_read(v)        READ_ONCE((v)->counter)
239
240 /*
241  * atomic64_set - set atomic variable
242  * @v: pointer of type atomic64_t
243  * @i: required value
244  */
245 #define atomic64_set(v, i)      WRITE_ONCE((v)->counter, (i))
246
247 #define ATOMIC64_OP(op, c_op, asm_op)                                         \
248 static __inline__ void atomic64_##op(s64 i, atomic64_t * v)                   \
249 {                                                                             \
250         if (kernel_uses_llsc) {                                               \
251                 s64 temp;                                                     \
252                                                                               \
253                 loongson_llsc_mb();                                           \
254                 __asm__ __volatile__(                                         \
255                 "       .set    push                                    \n"   \
256                 "       .set    "MIPS_ISA_LEVEL"                        \n"   \
257                 "1:     lld     %0, %1          # atomic64_" #op "      \n"   \
258                 "       " #asm_op " %0, %2                              \n"   \
259                 "       scd     %0, %1                                  \n"   \
260                 "\t" __SC_BEQZ "%0, 1b                                  \n"   \
261                 "       .set    pop                                     \n"   \
262                 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)          \
263                 : "Ir" (i) : __LLSC_CLOBBER);                                 \
264         } else {                                                              \
265                 unsigned long flags;                                          \
266                                                                               \
267                 raw_local_irq_save(flags);                                    \
268                 v->counter c_op i;                                            \
269                 raw_local_irq_restore(flags);                                 \
270         }                                                                     \
271 }
272
273 #define ATOMIC64_OP_RETURN(op, c_op, asm_op)                                  \
274 static __inline__ s64 atomic64_##op##_return_relaxed(s64 i, atomic64_t * v)   \
275 {                                                                             \
276         s64 result;                                                           \
277                                                                               \
278         if (kernel_uses_llsc) {                                               \
279                 s64 temp;                                                     \
280                                                                               \
281                 loongson_llsc_mb();                                           \
282                 __asm__ __volatile__(                                         \
283                 "       .set    push                                    \n"   \
284                 "       .set    "MIPS_ISA_LEVEL"                        \n"   \
285                 "1:     lld     %1, %2          # atomic64_" #op "_return\n"  \
286                 "       " #asm_op " %0, %1, %3                          \n"   \
287                 "       scd     %0, %2                                  \n"   \
288                 "\t" __SC_BEQZ "%0, 1b                                  \n"   \
289                 "       " #asm_op " %0, %1, %3                          \n"   \
290                 "       .set    pop                                     \n"   \
291                 : "=&r" (result), "=&r" (temp),                               \
292                   "+" GCC_OFF_SMALL_ASM() (v->counter)                        \
293                 : "Ir" (i) : __LLSC_CLOBBER);                                 \
294         } else {                                                              \
295                 unsigned long flags;                                          \
296                                                                               \
297                 raw_local_irq_save(flags);                                    \
298                 result = v->counter;                                          \
299                 result c_op i;                                                \
300                 v->counter = result;                                          \
301                 raw_local_irq_restore(flags);                                 \
302         }                                                                     \
303                                                                               \
304         return result;                                                        \
305 }
306
307 #define ATOMIC64_FETCH_OP(op, c_op, asm_op)                                   \
308 static __inline__ s64 atomic64_fetch_##op##_relaxed(s64 i, atomic64_t * v)    \
309 {                                                                             \
310         s64 result;                                                           \
311                                                                               \
312         if (kernel_uses_llsc) {                                               \
313                 s64 temp;                                                     \
314                                                                               \
315                 loongson_llsc_mb();                                           \
316                 __asm__ __volatile__(                                         \
317                 "       .set    push                                    \n"   \
318                 "       .set    "MIPS_ISA_LEVEL"                        \n"   \
319                 "1:     lld     %1, %2          # atomic64_fetch_" #op "\n"   \
320                 "       " #asm_op " %0, %1, %3                          \n"   \
321                 "       scd     %0, %2                                  \n"   \
322                 "\t" __SC_BEQZ "%0, 1b                                  \n"   \
323                 "       move    %0, %1                                  \n"   \
324                 "       .set    pop                                     \n"   \
325                 : "=&r" (result), "=&r" (temp),                               \
326                   "+" GCC_OFF_SMALL_ASM() (v->counter)                        \
327                 : "Ir" (i) : __LLSC_CLOBBER);                                 \
328         } else {                                                              \
329                 unsigned long flags;                                          \
330                                                                               \
331                 raw_local_irq_save(flags);                                    \
332                 result = v->counter;                                          \
333                 v->counter c_op i;                                            \
334                 raw_local_irq_restore(flags);                                 \
335         }                                                                     \
336                                                                               \
337         return result;                                                        \
338 }
339
340 #define ATOMIC64_OPS(op, c_op, asm_op)                                        \
341         ATOMIC64_OP(op, c_op, asm_op)                                         \
342         ATOMIC64_OP_RETURN(op, c_op, asm_op)                                  \
343         ATOMIC64_FETCH_OP(op, c_op, asm_op)
344
345 ATOMIC64_OPS(add, +=, daddu)
346 ATOMIC64_OPS(sub, -=, dsubu)
347
348 #define atomic64_add_return_relaxed     atomic64_add_return_relaxed
349 #define atomic64_sub_return_relaxed     atomic64_sub_return_relaxed
350 #define atomic64_fetch_add_relaxed      atomic64_fetch_add_relaxed
351 #define atomic64_fetch_sub_relaxed      atomic64_fetch_sub_relaxed
352
353 #undef ATOMIC64_OPS
354 #define ATOMIC64_OPS(op, c_op, asm_op)                                        \
355         ATOMIC64_OP(op, c_op, asm_op)                                         \
356         ATOMIC64_FETCH_OP(op, c_op, asm_op)
357
358 ATOMIC64_OPS(and, &=, and)
359 ATOMIC64_OPS(or, |=, or)
360 ATOMIC64_OPS(xor, ^=, xor)
361
362 #define atomic64_fetch_and_relaxed      atomic64_fetch_and_relaxed
363 #define atomic64_fetch_or_relaxed       atomic64_fetch_or_relaxed
364 #define atomic64_fetch_xor_relaxed      atomic64_fetch_xor_relaxed
365
366 #undef ATOMIC64_OPS
367 #undef ATOMIC64_FETCH_OP
368 #undef ATOMIC64_OP_RETURN
369 #undef ATOMIC64_OP
370
371 /*
372  * atomic64_sub_if_positive - conditionally subtract integer from atomic
373  *                            variable
374  * @i: integer value to subtract
375  * @v: pointer of type atomic64_t
376  *
377  * Atomically test @v and subtract @i if @v is greater or equal than @i.
378  * The function returns the old value of @v minus @i.
379  */
380 static __inline__ s64 atomic64_sub_if_positive(s64 i, atomic64_t * v)
381 {
382         s64 result;
383
384         smp_mb__before_llsc();
385
386         if (kernel_uses_llsc) {
387                 s64 temp;
388
389                 __asm__ __volatile__(
390                 "       .set    push                                    \n"
391                 "       .set    "MIPS_ISA_LEVEL"                        \n"
392                 "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
393                 "       dsubu   %0, %1, %3                              \n"
394                 "       move    %1, %0                                  \n"
395                 "       bltz    %0, 1f                                  \n"
396                 "       scd     %1, %2                                  \n"
397                 "\t" __SC_BEQZ "%1, 1b                                  \n"
398                 "1:                                                     \n"
399                 "       .set    pop                                     \n"
400                 : "=&r" (result), "=&r" (temp),
401                   "+" GCC_OFF_SMALL_ASM() (v->counter)
402                 : "Ir" (i));
403         } else {
404                 unsigned long flags;
405
406                 raw_local_irq_save(flags);
407                 result = v->counter;
408                 result -= i;
409                 if (result >= 0)
410                         v->counter = result;
411                 raw_local_irq_restore(flags);
412         }
413
414         smp_llsc_mb();
415
416         return result;
417 }
418
419 #define atomic64_cmpxchg(v, o, n) \
420         ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
421 #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
422
423 /*
424  * atomic64_dec_if_positive - decrement by 1 if old value positive
425  * @v: pointer of type atomic64_t
426  */
427 #define atomic64_dec_if_positive(v)     atomic64_sub_if_positive(1, v)
428
429 #endif /* CONFIG_64BIT */
430
431 #endif /* _ASM_ATOMIC_H */