Merge tag 's390-5.14-1' of git://git.kernel.org/pub/scm/linux/kernel/git/s390/linux
[linux-2.6-microblaze.git] / arch / s390 / include / asm / atomic_ops.h
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * Low level function for atomic operations
4  *
5  * Copyright IBM Corp. 1999, 2016
6  */
7
8 #ifndef __ARCH_S390_ATOMIC_OPS__
9 #define __ARCH_S390_ATOMIC_OPS__
10
11 static inline int __atomic_read(const atomic_t *v)
12 {
13         int c;
14
15         asm volatile(
16                 "       l       %0,%1\n"
17                 : "=d" (c) : "R" (v->counter));
18         return c;
19 }
20
21 static inline void __atomic_set(atomic_t *v, int i)
22 {
23         asm volatile(
24                 "       st      %1,%0\n"
25                 : "=R" (v->counter) : "d" (i));
26 }
27
28 static inline s64 __atomic64_read(const atomic64_t *v)
29 {
30         s64 c;
31
32         asm volatile(
33                 "       lg      %0,%1\n"
34                 : "=d" (c) : "RT" (v->counter));
35         return c;
36 }
37
38 static inline void __atomic64_set(atomic64_t *v, s64 i)
39 {
40         asm volatile(
41                 "       stg     %1,%0\n"
42                 : "=RT" (v->counter) : "d" (i));
43 }
44
45 #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
46
47 #define __ATOMIC_OP(op_name, op_type, op_string, op_barrier)            \
48 static inline op_type op_name(op_type val, op_type *ptr)                \
49 {                                                                       \
50         op_type old;                                                    \
51                                                                         \
52         asm volatile(                                                   \
53                 op_string "     %[old],%[val],%[ptr]\n"                 \
54                 op_barrier                                              \
55                 : [old] "=d" (old), [ptr] "+QS" (*ptr)                  \
56                 : [val] "d" (val) : "cc", "memory");                    \
57         return old;                                                     \
58 }                                                                       \
59
60 #define __ATOMIC_OPS(op_name, op_type, op_string)                       \
61         __ATOMIC_OP(op_name, op_type, op_string, "\n")                  \
62         __ATOMIC_OP(op_name##_barrier, op_type, op_string, "bcr 14,0\n")
63
64 __ATOMIC_OPS(__atomic_add, int, "laa")
65 __ATOMIC_OPS(__atomic_and, int, "lan")
66 __ATOMIC_OPS(__atomic_or,  int, "lao")
67 __ATOMIC_OPS(__atomic_xor, int, "lax")
68
69 __ATOMIC_OPS(__atomic64_add, long, "laag")
70 __ATOMIC_OPS(__atomic64_and, long, "lang")
71 __ATOMIC_OPS(__atomic64_or,  long, "laog")
72 __ATOMIC_OPS(__atomic64_xor, long, "laxg")
73
74 #undef __ATOMIC_OPS
75 #undef __ATOMIC_OP
76
77 #define __ATOMIC_CONST_OP(op_name, op_type, op_string, op_barrier)      \
78 static __always_inline void op_name(op_type val, op_type *ptr)          \
79 {                                                                       \
80         asm volatile(                                                   \
81                 op_string "     %[ptr],%[val]\n"                        \
82                 op_barrier                                              \
83                 : [ptr] "+QS" (*ptr) : [val] "i" (val) : "cc", "memory");\
84 }
85
86 #define __ATOMIC_CONST_OPS(op_name, op_type, op_string)                 \
87         __ATOMIC_CONST_OP(op_name, op_type, op_string, "\n")            \
88         __ATOMIC_CONST_OP(op_name##_barrier, op_type, op_string, "bcr 14,0\n")
89
90 __ATOMIC_CONST_OPS(__atomic_add_const, int, "asi")
91 __ATOMIC_CONST_OPS(__atomic64_add_const, long, "agsi")
92
93 #undef __ATOMIC_CONST_OPS
94 #undef __ATOMIC_CONST_OP
95
96 #else /* CONFIG_HAVE_MARCH_Z196_FEATURES */
97
98 #define __ATOMIC_OP(op_name, op_string)                                 \
99 static inline int op_name(int val, int *ptr)                            \
100 {                                                                       \
101         int old, new;                                                   \
102                                                                         \
103         asm volatile(                                                   \
104                 "0:     lr      %[new],%[old]\n"                        \
105                 op_string "     %[new],%[val]\n"                        \
106                 "       cs      %[old],%[new],%[ptr]\n"                 \
107                 "       jl      0b"                                     \
108                 : [old] "=d" (old), [new] "=&d" (new), [ptr] "+Q" (*ptr)\
109                 : [val] "d" (val), "0" (*ptr) : "cc", "memory");        \
110         return old;                                                     \
111 }
112
113 #define __ATOMIC_OPS(op_name, op_string)                                \
114         __ATOMIC_OP(op_name, op_string)                                 \
115         __ATOMIC_OP(op_name##_barrier, op_string)
116
117 __ATOMIC_OPS(__atomic_add, "ar")
118 __ATOMIC_OPS(__atomic_and, "nr")
119 __ATOMIC_OPS(__atomic_or,  "or")
120 __ATOMIC_OPS(__atomic_xor, "xr")
121
122 #undef __ATOMIC_OPS
123
124 #define __ATOMIC64_OP(op_name, op_string)                               \
125 static inline long op_name(long val, long *ptr)                         \
126 {                                                                       \
127         long old, new;                                                  \
128                                                                         \
129         asm volatile(                                                   \
130                 "0:     lgr     %[new],%[old]\n"                        \
131                 op_string "     %[new],%[val]\n"                        \
132                 "       csg     %[old],%[new],%[ptr]\n"                 \
133                 "       jl      0b"                                     \
134                 : [old] "=d" (old), [new] "=&d" (new), [ptr] "+QS" (*ptr)\
135                 : [val] "d" (val), "0" (*ptr) : "cc", "memory");        \
136         return old;                                                     \
137 }
138
139 #define __ATOMIC64_OPS(op_name, op_string)                              \
140         __ATOMIC64_OP(op_name, op_string)                               \
141         __ATOMIC64_OP(op_name##_barrier, op_string)
142
143 __ATOMIC64_OPS(__atomic64_add, "agr")
144 __ATOMIC64_OPS(__atomic64_and, "ngr")
145 __ATOMIC64_OPS(__atomic64_or,  "ogr")
146 __ATOMIC64_OPS(__atomic64_xor, "xgr")
147
148 #undef __ATOMIC64_OPS
149
150 #define __atomic_add_const(val, ptr)            __atomic_add(val, ptr)
151 #define __atomic_add_const_barrier(val, ptr)    __atomic_add(val, ptr)
152 #define __atomic64_add_const(val, ptr)          __atomic64_add(val, ptr)
153 #define __atomic64_add_const_barrier(val, ptr)  __atomic64_add(val, ptr)
154
155 #endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */
156
157 static inline int __atomic_cmpxchg(int *ptr, int old, int new)
158 {
159         asm volatile(
160                 "       cs      %[old],%[new],%[ptr]"
161                 : [old] "+d" (old), [ptr] "+Q" (*ptr)
162                 : [new] "d" (new)
163                 : "cc", "memory");
164         return old;
165 }
166
167 static inline bool __atomic_cmpxchg_bool(int *ptr, int old, int new)
168 {
169         int old_expected = old;
170
171         asm volatile(
172                 "       cs      %[old],%[new],%[ptr]"
173                 : [old] "+d" (old), [ptr] "+Q" (*ptr)
174                 : [new] "d" (new)
175                 : "cc", "memory");
176         return old == old_expected;
177 }
178
179 static inline long __atomic64_cmpxchg(long *ptr, long old, long new)
180 {
181         asm volatile(
182                 "       csg     %[old],%[new],%[ptr]"
183                 : [old] "+d" (old), [ptr] "+QS" (*ptr)
184                 : [new] "d" (new)
185                 : "cc", "memory");
186         return old;
187 }
188
189 static inline bool __atomic64_cmpxchg_bool(long *ptr, long old, long new)
190 {
191         long old_expected = old;
192
193         asm volatile(
194                 "       csg     %[old],%[new],%[ptr]"
195                 : [old] "+d" (old), [ptr] "+QS" (*ptr)
196                 : [new] "d" (new)
197                 : "cc", "memory");
198         return old == old_expected;
199 }
200
201 #endif /* __ARCH_S390_ATOMIC_OPS__  */