1 /* SPDX-License-Identifier: GPL-2.0 */
3 * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
8 #include <asm/cmpxchg.h>
9 #include <asm/loongarch.h>
11 /* Use r21 for fast access */
12 register unsigned long __my_cpu_offset __asm__("$r21");
14 static inline void set_my_cpu_offset(unsigned long off)
16 __my_cpu_offset = off;
17 csr_write64(off, PERCPU_BASE_KS);
19 #define __my_cpu_offset __my_cpu_offset
21 #define PERCPU_OP(op, asm_op, c_op) \
22 static inline unsigned long __percpu_##op(void *ptr, \
23 unsigned long val, int size) \
29 __asm__ __volatile__( \
30 "am"#asm_op".w" " %[ret], %[val], %[ptr] \n" \
31 : [ret] "=&r" (ret), [ptr] "+ZB"(*(u32 *)ptr) \
35 __asm__ __volatile__( \
36 "am"#asm_op".d" " %[ret], %[val], %[ptr] \n" \
37 : [ret] "=&r" (ret), [ptr] "+ZB"(*(u64 *)ptr) \
45 return ret c_op val; \
48 PERCPU_OP(add, add, +)
49 PERCPU_OP(and, and, &)
53 static inline unsigned long __percpu_read(void *ptr, int size)
59 __asm__ __volatile__ ("ldx.b %[ret], $r21, %[ptr] \n"
65 __asm__ __volatile__ ("ldx.h %[ret], $r21, %[ptr] \n"
71 __asm__ __volatile__ ("ldx.w %[ret], $r21, %[ptr] \n"
77 __asm__ __volatile__ ("ldx.d %[ret], $r21, %[ptr] \n"
90 static inline void __percpu_write(void *ptr, unsigned long val, int size)
94 __asm__ __volatile__("stx.b %[val], $r21, %[ptr] \n"
96 : [val] "r" (val), [ptr] "r" (ptr)
100 __asm__ __volatile__("stx.h %[val], $r21, %[ptr] \n"
102 : [val] "r" (val), [ptr] "r" (ptr)
106 __asm__ __volatile__("stx.w %[val], $r21, %[ptr] \n"
108 : [val] "r" (val), [ptr] "r" (ptr)
112 __asm__ __volatile__("stx.d %[val], $r21, %[ptr] \n"
114 : [val] "r" (val), [ptr] "r" (ptr)
122 static inline unsigned long __percpu_xchg(void *ptr, unsigned long val,
127 return __xchg_asm("amswap.w", (volatile u32 *)ptr, (u32)val);
130 return __xchg_asm("amswap.d", (volatile u64 *)ptr, (u64)val);
139 /* this_cpu_cmpxchg */
140 #define _protect_cmpxchg_local(pcp, o, n) \
142 typeof(*raw_cpu_ptr(&(pcp))) __ret; \
143 preempt_disable_notrace(); \
144 __ret = cmpxchg_local(raw_cpu_ptr(&(pcp)), o, n); \
145 preempt_enable_notrace(); \
149 #define _percpu_read(pcp) \
151 typeof(pcp) __retval; \
152 __retval = (typeof(pcp))__percpu_read(&(pcp), sizeof(pcp)); \
156 #define _percpu_write(pcp, val) \
158 __percpu_write(&(pcp), (unsigned long)(val), sizeof(pcp)); \
161 #define _pcp_protect(operation, pcp, val) \
163 typeof(pcp) __retval; \
164 preempt_disable_notrace(); \
165 __retval = (typeof(pcp))operation(raw_cpu_ptr(&(pcp)), \
166 (val), sizeof(pcp)); \
167 preempt_enable_notrace(); \
171 #define _percpu_add(pcp, val) \
172 _pcp_protect(__percpu_add, pcp, val)
174 #define _percpu_add_return(pcp, val) _percpu_add(pcp, val)
176 #define _percpu_and(pcp, val) \
177 _pcp_protect(__percpu_and, pcp, val)
179 #define _percpu_or(pcp, val) \
180 _pcp_protect(__percpu_or, pcp, val)
182 #define _percpu_xchg(pcp, val) ((typeof(pcp)) \
183 _pcp_protect(__percpu_xchg, pcp, (unsigned long)(val)))
185 #define this_cpu_add_4(pcp, val) _percpu_add(pcp, val)
186 #define this_cpu_add_8(pcp, val) _percpu_add(pcp, val)
188 #define this_cpu_add_return_4(pcp, val) _percpu_add_return(pcp, val)
189 #define this_cpu_add_return_8(pcp, val) _percpu_add_return(pcp, val)
191 #define this_cpu_and_4(pcp, val) _percpu_and(pcp, val)
192 #define this_cpu_and_8(pcp, val) _percpu_and(pcp, val)
194 #define this_cpu_or_4(pcp, val) _percpu_or(pcp, val)
195 #define this_cpu_or_8(pcp, val) _percpu_or(pcp, val)
197 #define this_cpu_read_1(pcp) _percpu_read(pcp)
198 #define this_cpu_read_2(pcp) _percpu_read(pcp)
199 #define this_cpu_read_4(pcp) _percpu_read(pcp)
200 #define this_cpu_read_8(pcp) _percpu_read(pcp)
202 #define this_cpu_write_1(pcp, val) _percpu_write(pcp, val)
203 #define this_cpu_write_2(pcp, val) _percpu_write(pcp, val)
204 #define this_cpu_write_4(pcp, val) _percpu_write(pcp, val)
205 #define this_cpu_write_8(pcp, val) _percpu_write(pcp, val)
207 #define this_cpu_xchg_4(pcp, val) _percpu_xchg(pcp, val)
208 #define this_cpu_xchg_8(pcp, val) _percpu_xchg(pcp, val)
210 #define this_cpu_cmpxchg_4(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
211 #define this_cpu_cmpxchg_8(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
213 #include <asm-generic/percpu.h>
215 #endif /* __ASM_PERCPU_H */