1 /* SPDX-License-Identifier: GPL-2.0 */
3 #ifndef __ASM_CSKY_ATOMIC_H
4 #define __ASM_CSKY_ATOMIC_H
6 #include <linux/version.h>
7 #include <asm/cmpxchg.h>
8 #include <asm/barrier.h>
10 #ifdef CONFIG_CPU_HAS_LDSTEX
12 #define __atomic_add_unless __atomic_add_unless
13 static inline int __atomic_add_unless(atomic_t *v, int a, int u)
15 unsigned long tmp, ret;
20 "1: ldex.w %0, (%3) \n"
28 : "=&r" (tmp), "=&r" (ret)
29 : "r" (a), "r"(&v->counter), "r"(u)
38 #define ATOMIC_OP(op, c_op) \
39 static inline void atomic_##op(int i, atomic_t *v) \
44 "1: ldex.w %0, (%2) \n" \
45 " " #op " %0, %1 \n" \
46 " stex.w %0, (%2) \n" \
49 : "r" (i), "r"(&v->counter) \
53 #define ATOMIC_OP_RETURN(op, c_op) \
54 static inline int atomic_##op##_return(int i, atomic_t *v) \
56 unsigned long tmp, ret; \
60 "1: ldex.w %0, (%3) \n" \
61 " " #op " %0, %2 \n" \
63 " stex.w %0, (%3) \n" \
65 : "=&r" (tmp), "=&r" (ret) \
66 : "r" (i), "r"(&v->counter) \
73 #define ATOMIC_FETCH_OP(op, c_op) \
74 static inline int atomic_fetch_##op(int i, atomic_t *v) \
76 unsigned long tmp, ret; \
80 "1: ldex.w %0, (%3) \n" \
82 " " #op " %0, %2 \n" \
83 " stex.w %0, (%3) \n" \
85 : "=&r" (tmp), "=&r" (ret) \
86 : "r" (i), "r"(&v->counter) \
93 #else /* CONFIG_CPU_HAS_LDSTEX */
95 #include <linux/irqflags.h>
97 #define __atomic_add_unless __atomic_add_unless
98 static inline int __atomic_add_unless(atomic_t *v, int a, int u)
100 unsigned long tmp, ret, flags;
102 raw_local_irq_save(flags);
112 : "=&r" (tmp), "=&r" (ret)
113 : "r" (a), "r"(&v->counter), "r"(u)
116 raw_local_irq_restore(flags);
121 #define ATOMIC_OP(op, c_op) \
122 static inline void atomic_##op(int i, atomic_t *v) \
124 unsigned long tmp, flags; \
126 raw_local_irq_save(flags); \
130 " " #op " %0, %1 \n" \
133 : "r" (i), "r"(&v->counter) \
136 raw_local_irq_restore(flags); \
139 #define ATOMIC_OP_RETURN(op, c_op) \
140 static inline int atomic_##op##_return(int i, atomic_t *v) \
142 unsigned long tmp, ret, flags; \
144 raw_local_irq_save(flags); \
148 " " #op " %0, %2 \n" \
151 : "=&r" (tmp), "=&r" (ret) \
152 : "r" (i), "r"(&v->counter) \
155 raw_local_irq_restore(flags); \
160 #define ATOMIC_FETCH_OP(op, c_op) \
161 static inline int atomic_fetch_##op(int i, atomic_t *v) \
163 unsigned long tmp, ret, flags; \
165 raw_local_irq_save(flags); \
170 " " #op " %0, %2 \n" \
172 : "=&r" (tmp), "=&r" (ret) \
173 : "r" (i), "r"(&v->counter) \
176 raw_local_irq_restore(flags); \
181 #endif /* CONFIG_CPU_HAS_LDSTEX */
183 #define atomic_add_return atomic_add_return
184 ATOMIC_OP_RETURN(add, +)
185 #define atomic_sub_return atomic_sub_return
186 ATOMIC_OP_RETURN(sub, -)
188 #define atomic_fetch_add atomic_fetch_add
189 ATOMIC_FETCH_OP(add, +)
190 #define atomic_fetch_sub atomic_fetch_sub
191 ATOMIC_FETCH_OP(sub, -)
192 #define atomic_fetch_and atomic_fetch_and
193 ATOMIC_FETCH_OP(and, &)
194 #define atomic_fetch_or atomic_fetch_or
195 ATOMIC_FETCH_OP(or, |)
196 #define atomic_fetch_xor atomic_fetch_xor
197 ATOMIC_FETCH_OP(xor, ^)
199 #define atomic_and atomic_and
201 #define atomic_or atomic_or
203 #define atomic_xor atomic_xor
206 #undef ATOMIC_FETCH_OP
207 #undef ATOMIC_OP_RETURN
210 #include <asm-generic/atomic.h>
212 #endif /* __ASM_CSKY_ATOMIC_H */