ext4: remove unnecessary wbc parameter from ext4_bio_write_page
[linux-2.6-microblaze.git] / arch / alpha / include / asm / atomic.h
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ALPHA_ATOMIC_H
3 #define _ALPHA_ATOMIC_H
4
5 #include <linux/types.h>
6 #include <asm/barrier.h>
7 #include <asm/cmpxchg.h>
8
9 /*
10  * Atomic operations that C can't guarantee us.  Useful for
11  * resource counting etc...
12  *
13  * But use these as seldom as possible since they are much slower
14  * than regular operations.
15  */
16
17 /*
18  * To ensure dependency ordering is preserved for the _relaxed and
19  * _release atomics, an smp_mb() is unconditionally inserted into the
20  * _relaxed variants, which are used to build the barriered versions.
21  * Avoid redundant back-to-back fences in the _acquire and _fence
22  * versions.
23  */
24 #define __atomic_acquire_fence()
25 #define __atomic_post_full_fence()
26
27 #define ATOMIC64_INIT(i)        { (i) }
28
29 #define atomic_read(v)          READ_ONCE((v)->counter)
30 #define atomic64_read(v)        READ_ONCE((v)->counter)
31
32 #define atomic_set(v,i)         WRITE_ONCE((v)->counter, (i))
33 #define atomic64_set(v,i)       WRITE_ONCE((v)->counter, (i))
34
35 /*
36  * To get proper branch prediction for the main line, we must branch
37  * forward to code at the end of this object's .text section, then
38  * branch back to restart the operation.
39  */
40
41 #define ATOMIC_OP(op, asm_op)                                           \
42 static __inline__ void atomic_##op(int i, atomic_t * v)                 \
43 {                                                                       \
44         unsigned long temp;                                             \
45         __asm__ __volatile__(                                           \
46         "1:     ldl_l %0,%1\n"                                          \
47         "       " #asm_op " %0,%2,%0\n"                                 \
48         "       stl_c %0,%1\n"                                          \
49         "       beq %0,2f\n"                                            \
50         ".subsection 2\n"                                               \
51         "2:     br 1b\n"                                                \
52         ".previous"                                                     \
53         :"=&r" (temp), "=m" (v->counter)                                \
54         :"Ir" (i), "m" (v->counter));                                   \
55 }                                                                       \
56
57 #define ATOMIC_OP_RETURN(op, asm_op)                                    \
58 static inline int atomic_##op##_return_relaxed(int i, atomic_t *v)      \
59 {                                                                       \
60         long temp, result;                                              \
61         __asm__ __volatile__(                                           \
62         "1:     ldl_l %0,%1\n"                                          \
63         "       " #asm_op " %0,%3,%2\n"                                 \
64         "       " #asm_op " %0,%3,%0\n"                                 \
65         "       stl_c %0,%1\n"                                          \
66         "       beq %0,2f\n"                                            \
67         ".subsection 2\n"                                               \
68         "2:     br 1b\n"                                                \
69         ".previous"                                                     \
70         :"=&r" (temp), "=m" (v->counter), "=&r" (result)                \
71         :"Ir" (i), "m" (v->counter) : "memory");                        \
72         smp_mb();                                                       \
73         return result;                                                  \
74 }
75
76 #define ATOMIC_FETCH_OP(op, asm_op)                                     \
77 static inline int atomic_fetch_##op##_relaxed(int i, atomic_t *v)       \
78 {                                                                       \
79         long temp, result;                                              \
80         __asm__ __volatile__(                                           \
81         "1:     ldl_l %2,%1\n"                                          \
82         "       " #asm_op " %2,%3,%0\n"                                 \
83         "       stl_c %0,%1\n"                                          \
84         "       beq %0,2f\n"                                            \
85         ".subsection 2\n"                                               \
86         "2:     br 1b\n"                                                \
87         ".previous"                                                     \
88         :"=&r" (temp), "=m" (v->counter), "=&r" (result)                \
89         :"Ir" (i), "m" (v->counter) : "memory");                        \
90         smp_mb();                                                       \
91         return result;                                                  \
92 }
93
94 #define ATOMIC64_OP(op, asm_op)                                         \
95 static __inline__ void atomic64_##op(s64 i, atomic64_t * v)             \
96 {                                                                       \
97         s64 temp;                                                       \
98         __asm__ __volatile__(                                           \
99         "1:     ldq_l %0,%1\n"                                          \
100         "       " #asm_op " %0,%2,%0\n"                                 \
101         "       stq_c %0,%1\n"                                          \
102         "       beq %0,2f\n"                                            \
103         ".subsection 2\n"                                               \
104         "2:     br 1b\n"                                                \
105         ".previous"                                                     \
106         :"=&r" (temp), "=m" (v->counter)                                \
107         :"Ir" (i), "m" (v->counter));                                   \
108 }                                                                       \
109
110 #define ATOMIC64_OP_RETURN(op, asm_op)                                  \
111 static __inline__ s64 atomic64_##op##_return_relaxed(s64 i, atomic64_t * v)     \
112 {                                                                       \
113         s64 temp, result;                                               \
114         __asm__ __volatile__(                                           \
115         "1:     ldq_l %0,%1\n"                                          \
116         "       " #asm_op " %0,%3,%2\n"                                 \
117         "       " #asm_op " %0,%3,%0\n"                                 \
118         "       stq_c %0,%1\n"                                          \
119         "       beq %0,2f\n"                                            \
120         ".subsection 2\n"                                               \
121         "2:     br 1b\n"                                                \
122         ".previous"                                                     \
123         :"=&r" (temp), "=m" (v->counter), "=&r" (result)                \
124         :"Ir" (i), "m" (v->counter) : "memory");                        \
125         smp_mb();                                                       \
126         return result;                                                  \
127 }
128
129 #define ATOMIC64_FETCH_OP(op, asm_op)                                   \
130 static __inline__ s64 atomic64_fetch_##op##_relaxed(s64 i, atomic64_t * v)      \
131 {                                                                       \
132         s64 temp, result;                                               \
133         __asm__ __volatile__(                                           \
134         "1:     ldq_l %2,%1\n"                                          \
135         "       " #asm_op " %2,%3,%0\n"                                 \
136         "       stq_c %0,%1\n"                                          \
137         "       beq %0,2f\n"                                            \
138         ".subsection 2\n"                                               \
139         "2:     br 1b\n"                                                \
140         ".previous"                                                     \
141         :"=&r" (temp), "=m" (v->counter), "=&r" (result)                \
142         :"Ir" (i), "m" (v->counter) : "memory");                        \
143         smp_mb();                                                       \
144         return result;                                                  \
145 }
146
147 #define ATOMIC_OPS(op)                                                  \
148         ATOMIC_OP(op, op##l)                                            \
149         ATOMIC_OP_RETURN(op, op##l)                                     \
150         ATOMIC_FETCH_OP(op, op##l)                                      \
151         ATOMIC64_OP(op, op##q)                                          \
152         ATOMIC64_OP_RETURN(op, op##q)                                   \
153         ATOMIC64_FETCH_OP(op, op##q)
154
155 ATOMIC_OPS(add)
156 ATOMIC_OPS(sub)
157
158 #define atomic_add_return_relaxed       atomic_add_return_relaxed
159 #define atomic_sub_return_relaxed       atomic_sub_return_relaxed
160 #define atomic_fetch_add_relaxed        atomic_fetch_add_relaxed
161 #define atomic_fetch_sub_relaxed        atomic_fetch_sub_relaxed
162
163 #define atomic64_add_return_relaxed     atomic64_add_return_relaxed
164 #define atomic64_sub_return_relaxed     atomic64_sub_return_relaxed
165 #define atomic64_fetch_add_relaxed      atomic64_fetch_add_relaxed
166 #define atomic64_fetch_sub_relaxed      atomic64_fetch_sub_relaxed
167
168 #define atomic_andnot atomic_andnot
169 #define atomic64_andnot atomic64_andnot
170
171 #undef ATOMIC_OPS
172 #define ATOMIC_OPS(op, asm)                                             \
173         ATOMIC_OP(op, asm)                                              \
174         ATOMIC_FETCH_OP(op, asm)                                        \
175         ATOMIC64_OP(op, asm)                                            \
176         ATOMIC64_FETCH_OP(op, asm)
177
178 ATOMIC_OPS(and, and)
179 ATOMIC_OPS(andnot, bic)
180 ATOMIC_OPS(or, bis)
181 ATOMIC_OPS(xor, xor)
182
183 #define atomic_fetch_and_relaxed        atomic_fetch_and_relaxed
184 #define atomic_fetch_andnot_relaxed     atomic_fetch_andnot_relaxed
185 #define atomic_fetch_or_relaxed         atomic_fetch_or_relaxed
186 #define atomic_fetch_xor_relaxed        atomic_fetch_xor_relaxed
187
188 #define atomic64_fetch_and_relaxed      atomic64_fetch_and_relaxed
189 #define atomic64_fetch_andnot_relaxed   atomic64_fetch_andnot_relaxed
190 #define atomic64_fetch_or_relaxed       atomic64_fetch_or_relaxed
191 #define atomic64_fetch_xor_relaxed      atomic64_fetch_xor_relaxed
192
193 #undef ATOMIC_OPS
194 #undef ATOMIC64_FETCH_OP
195 #undef ATOMIC64_OP_RETURN
196 #undef ATOMIC64_OP
197 #undef ATOMIC_FETCH_OP
198 #undef ATOMIC_OP_RETURN
199 #undef ATOMIC_OP
200
201 #define atomic64_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
202 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
203
204 #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
205 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
206
207 /**
208  * atomic_fetch_add_unless - add unless the number is a given value
209  * @v: pointer of type atomic_t
210  * @a: the amount to add to v...
211  * @u: ...unless v is equal to u.
212  *
213  * Atomically adds @a to @v, so long as it was not @u.
214  * Returns the old value of @v.
215  */
216 static __inline__ int atomic_fetch_add_unless(atomic_t *v, int a, int u)
217 {
218         int c, new, old;
219         smp_mb();
220         __asm__ __volatile__(
221         "1:     ldl_l   %[old],%[mem]\n"
222         "       cmpeq   %[old],%[u],%[c]\n"
223         "       addl    %[old],%[a],%[new]\n"
224         "       bne     %[c],2f\n"
225         "       stl_c   %[new],%[mem]\n"
226         "       beq     %[new],3f\n"
227         "2:\n"
228         ".subsection 2\n"
229         "3:     br      1b\n"
230         ".previous"
231         : [old] "=&r"(old), [new] "=&r"(new), [c] "=&r"(c)
232         : [mem] "m"(*v), [a] "rI"(a), [u] "rI"((long)u)
233         : "memory");
234         smp_mb();
235         return old;
236 }
237 #define atomic_fetch_add_unless atomic_fetch_add_unless
238
239 /**
240  * atomic64_fetch_add_unless - add unless the number is a given value
241  * @v: pointer of type atomic64_t
242  * @a: the amount to add to v...
243  * @u: ...unless v is equal to u.
244  *
245  * Atomically adds @a to @v, so long as it was not @u.
246  * Returns the old value of @v.
247  */
248 static __inline__ s64 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
249 {
250         s64 c, new, old;
251         smp_mb();
252         __asm__ __volatile__(
253         "1:     ldq_l   %[old],%[mem]\n"
254         "       cmpeq   %[old],%[u],%[c]\n"
255         "       addq    %[old],%[a],%[new]\n"
256         "       bne     %[c],2f\n"
257         "       stq_c   %[new],%[mem]\n"
258         "       beq     %[new],3f\n"
259         "2:\n"
260         ".subsection 2\n"
261         "3:     br      1b\n"
262         ".previous"
263         : [old] "=&r"(old), [new] "=&r"(new), [c] "=&r"(c)
264         : [mem] "m"(*v), [a] "rI"(a), [u] "rI"(u)
265         : "memory");
266         smp_mb();
267         return old;
268 }
269 #define atomic64_fetch_add_unless atomic64_fetch_add_unless
270
271 /*
272  * atomic64_dec_if_positive - decrement by 1 if old value positive
273  * @v: pointer of type atomic_t
274  *
275  * The function returns the old value of *v minus 1, even if
276  * the atomic variable, v, was not decremented.
277  */
278 static inline s64 atomic64_dec_if_positive(atomic64_t *v)
279 {
280         s64 old, tmp;
281         smp_mb();
282         __asm__ __volatile__(
283         "1:     ldq_l   %[old],%[mem]\n"
284         "       subq    %[old],1,%[tmp]\n"
285         "       ble     %[old],2f\n"
286         "       stq_c   %[tmp],%[mem]\n"
287         "       beq     %[tmp],3f\n"
288         "2:\n"
289         ".subsection 2\n"
290         "3:     br      1b\n"
291         ".previous"
292         : [old] "=&r"(old), [tmp] "=&r"(tmp)
293         : [mem] "m"(*v)
294         : "memory");
295         smp_mb();
296         return old - 1;
297 }
298 #define atomic64_dec_if_positive atomic64_dec_if_positive
299
300 #endif /* _ALPHA_ATOMIC_H */