1 /* SPDX-License-Identifier: GPL-2.0-only */
3 * Copyright (C) 2012 Regents of the University of California
6 #ifndef _ASM_RISCV_BITOPS_H
7 #define _ASM_RISCV_BITOPS_H
9 #ifndef _LINUX_BITOPS_H
10 #error "Only <linux/bitops.h> can be included directly"
11 #endif /* _LINUX_BITOPS_H */
13 #include <linux/compiler.h>
14 #include <linux/irqflags.h>
15 #include <asm/barrier.h>
16 #include <asm/bitsperlong.h>
18 #if !defined(CONFIG_RISCV_ISA_ZBB) || defined(NO_ALTERNATIVE)
19 #include <asm-generic/bitops/__ffs.h>
20 #include <asm-generic/bitops/__fls.h>
21 #include <asm-generic/bitops/ffs.h>
22 #include <asm-generic/bitops/fls.h>
25 #include <asm/alternative-macros.h>
26 #include <asm/hwcap.h>
28 #if (BITS_PER_LONG == 64)
31 #elif (BITS_PER_LONG == 32)
35 #error "Unexpected BITS_PER_LONG"
38 static __always_inline unsigned long variable__ffs(unsigned long word)
42 asm_volatile_goto(ALTERNATIVE("j %l[legacy]", "nop", 0,
46 asm volatile (".option push\n"
50 : "=r" (word) : "r" (word) :);
56 #if BITS_PER_LONG == 64
57 if ((word & 0xffffffff) == 0) {
62 if ((word & 0xffff) == 0) {
66 if ((word & 0xff) == 0) {
70 if ((word & 0xf) == 0) {
74 if ((word & 0x3) == 0) {
78 if ((word & 0x1) == 0)
84 * __ffs - find first set bit in a long word
85 * @word: The word to search
87 * Undefined if no set bit exists, so code should check against 0 first.
90 (__builtin_constant_p(word) ? \
91 (unsigned long)__builtin_ctzl(word) : \
94 static __always_inline unsigned long variable__fls(unsigned long word)
98 asm_volatile_goto(ALTERNATIVE("j %l[legacy]", "nop", 0,
102 asm volatile (".option push\n"
103 ".option arch,+zbb\n"
106 : "=r" (word) : "r" (word) :);
108 return BITS_PER_LONG - 1 - word;
111 num = BITS_PER_LONG - 1;
112 #if BITS_PER_LONG == 64
113 if (!(word & (~0ul << 32))) {
118 if (!(word & (~0ul << (BITS_PER_LONG - 16)))) {
122 if (!(word & (~0ul << (BITS_PER_LONG - 8)))) {
126 if (!(word & (~0ul << (BITS_PER_LONG - 4)))) {
130 if (!(word & (~0ul << (BITS_PER_LONG - 2)))) {
134 if (!(word & (~0ul << (BITS_PER_LONG - 1))))
140 * __fls - find last set bit in a long word
141 * @word: the word to search
143 * Undefined if no set bit exists, so code should check against 0 first.
145 #define __fls(word) \
146 (__builtin_constant_p(word) ? \
147 (unsigned long)(BITS_PER_LONG - 1 - __builtin_clzl(word)) : \
150 static __always_inline int variable_ffs(int x)
157 asm_volatile_goto(ALTERNATIVE("j %l[legacy]", "nop", 0,
158 RISCV_ISA_EXT_ZBB, 1)
161 asm volatile (".option push\n"
162 ".option arch,+zbb\n"
165 : "=r" (r) : "r" (x) :);
195 * ffs - find first set bit in a word
196 * @x: the word to search
198 * This is defined the same way as the libc and compiler builtin ffs routines.
200 * ffs(value) returns 0 if value is 0 or the position of the first set bit if
201 * value is nonzero. The first (least significant) bit is at position 1.
203 #define ffs(x) (__builtin_constant_p(x) ? __builtin_ffs(x) : variable_ffs(x))
205 static __always_inline int variable_fls(unsigned int x)
212 asm_volatile_goto(ALTERNATIVE("j %l[legacy]", "nop", 0,
213 RISCV_ISA_EXT_ZBB, 1)
216 asm volatile (".option push\n"
217 ".option arch,+zbb\n"
220 : "=r" (r) : "r" (x) :);
226 if (!(x & 0xffff0000u)) {
230 if (!(x & 0xff000000u)) {
234 if (!(x & 0xf0000000u)) {
238 if (!(x & 0xc0000000u)) {
242 if (!(x & 0x80000000u)) {
250 * fls - find last set bit in a word
251 * @x: the word to search
253 * This is defined in a similar way as ffs, but returns the position of the most
254 * significant set bit.
256 * fls(value) returns 0 if value is 0 or the position of the last set bit if
257 * value is nonzero. The last (most significant) bit is at position 32.
261 typeof(x) x_ = (x); \
262 __builtin_constant_p(x_) ? \
263 (int)((x_ != 0) ? (32 - __builtin_clz(x_)) : 0) \
268 #endif /* !defined(CONFIG_RISCV_ISA_ZBB) || defined(NO_ALTERNATIVE) */
270 #include <asm-generic/bitops/ffz.h>
271 #include <asm-generic/bitops/fls64.h>
272 #include <asm-generic/bitops/sched.h>
274 #include <asm-generic/bitops/hweight.h>
276 #if (BITS_PER_LONG == 64)
277 #define __AMO(op) "amo" #op ".d"
278 #elif (BITS_PER_LONG == 32)
279 #define __AMO(op) "amo" #op ".w"
281 #error "Unexpected BITS_PER_LONG"
284 #define __test_and_op_bit_ord(op, mod, nr, addr, ord) \
286 unsigned long __res, __mask; \
287 __mask = BIT_MASK(nr); \
288 __asm__ __volatile__ ( \
289 __AMO(op) #ord " %0, %2, %1" \
290 : "=r" (__res), "+A" (addr[BIT_WORD(nr)]) \
291 : "r" (mod(__mask)) \
293 ((__res & __mask) != 0); \
296 #define __op_bit_ord(op, mod, nr, addr, ord) \
297 __asm__ __volatile__ ( \
298 __AMO(op) #ord " zero, %1, %0" \
299 : "+A" (addr[BIT_WORD(nr)]) \
300 : "r" (mod(BIT_MASK(nr))) \
303 #define __test_and_op_bit(op, mod, nr, addr) \
304 __test_and_op_bit_ord(op, mod, nr, addr, .aqrl)
305 #define __op_bit(op, mod, nr, addr) \
306 __op_bit_ord(op, mod, nr, addr, )
308 /* Bitmask modifiers */
310 #define __NOT(x) (~(x))
313 * test_and_set_bit - Set a bit and return its old value
315 * @addr: Address to count from
317 * This operation may be reordered on other architectures than x86.
319 static inline int test_and_set_bit(int nr, volatile unsigned long *addr)
321 return __test_and_op_bit(or, __NOP, nr, addr);
325 * test_and_clear_bit - Clear a bit and return its old value
327 * @addr: Address to count from
329 * This operation can be reordered on other architectures other than x86.
331 static inline int test_and_clear_bit(int nr, volatile unsigned long *addr)
333 return __test_and_op_bit(and, __NOT, nr, addr);
337 * test_and_change_bit - Change a bit and return its old value
339 * @addr: Address to count from
341 * This operation is atomic and cannot be reordered.
342 * It also implies a memory barrier.
344 static inline int test_and_change_bit(int nr, volatile unsigned long *addr)
346 return __test_and_op_bit(xor, __NOP, nr, addr);
350 * set_bit - Atomically set a bit in memory
351 * @nr: the bit to set
352 * @addr: the address to start counting from
354 * Note: there are no guarantees that this function will not be reordered
355 * on non x86 architectures, so if you are writing portable code,
356 * make sure not to rely on its reordering guarantees.
358 * Note that @nr may be almost arbitrarily large; this function is not
359 * restricted to acting on a single-word quantity.
361 static inline void set_bit(int nr, volatile unsigned long *addr)
363 __op_bit(or, __NOP, nr, addr);
367 * clear_bit - Clears a bit in memory
369 * @addr: Address to start counting from
371 * Note: there are no guarantees that this function will not be reordered
372 * on non x86 architectures, so if you are writing portable code,
373 * make sure not to rely on its reordering guarantees.
375 static inline void clear_bit(int nr, volatile unsigned long *addr)
377 __op_bit(and, __NOT, nr, addr);
381 * change_bit - Toggle a bit in memory
383 * @addr: Address to start counting from
385 * change_bit() may be reordered on other architectures than x86.
386 * Note that @nr may be almost arbitrarily large; this function is not
387 * restricted to acting on a single-word quantity.
389 static inline void change_bit(int nr, volatile unsigned long *addr)
391 __op_bit(xor, __NOP, nr, addr);
395 * test_and_set_bit_lock - Set a bit and return its old value, for lock
397 * @addr: Address to count from
399 * This operation is atomic and provides acquire barrier semantics.
400 * It can be used to implement bit locks.
402 static inline int test_and_set_bit_lock(
403 unsigned long nr, volatile unsigned long *addr)
405 return __test_and_op_bit_ord(or, __NOP, nr, addr, .aq);
409 * clear_bit_unlock - Clear a bit in memory, for unlock
410 * @nr: the bit to set
411 * @addr: the address to start counting from
413 * This operation is atomic and provides release barrier semantics.
415 static inline void clear_bit_unlock(
416 unsigned long nr, volatile unsigned long *addr)
418 __op_bit_ord(and, __NOT, nr, addr, .rl);
422 * __clear_bit_unlock - Clear a bit in memory, for unlock
423 * @nr: the bit to set
424 * @addr: the address to start counting from
426 * This operation is like clear_bit_unlock, however it is not atomic.
427 * It does provide release barrier semantics so it can be used to unlock
428 * a bit lock, however it would only be used if no other CPU can modify
429 * any bits in the memory until the lock is released (a good example is
430 * if the bit lock itself protects access to the other bits in the word).
432 * On RISC-V systems there seems to be no benefit to taking advantage of the
433 * non-atomic property here: it's a lot more instructions and we still have to
434 * provide release semantics anyway.
436 static inline void __clear_bit_unlock(
437 unsigned long nr, volatile unsigned long *addr)
439 clear_bit_unlock(nr, addr);
442 static inline bool xor_unlock_is_negative_byte(unsigned long mask,
443 volatile unsigned long *addr)
446 __asm__ __volatile__ (
447 __AMO(xor) ".rl %0, %2, %1"
448 : "=r" (res), "+A" (*addr)
451 return (res & BIT(7)) != 0;
454 #undef __test_and_op_bit
460 #include <asm-generic/bitops/non-atomic.h>
461 #include <asm-generic/bitops/le.h>
462 #include <asm-generic/bitops/ext2-atomic.h>
464 #endif /* _ASM_RISCV_BITOPS_H */