2 * include/asm-xtensa/bitops.h
4 * Atomic operations that C can't guarantee us.Useful for resource counting etc.
6 * This file is subject to the terms and conditions of the GNU General Public
7 * License. See the file "COPYING" in the main directory of this archive
10 * Copyright (C) 2001 - 2007 Tensilica Inc.
13 #ifndef _XTENSA_BITOPS_H
14 #define _XTENSA_BITOPS_H
16 #ifndef _LINUX_BITOPS_H
17 #error only <linux/bitops.h> can be included directly
20 #include <asm/processor.h>
21 #include <asm/byteorder.h>
22 #include <asm/barrier.h>
24 #include <asm-generic/bitops/non-atomic.h>
28 static inline unsigned long __cntlz (unsigned long x)
31 asm ("nsau %0, %1" : "=r" (lz) : "r" (x));
36 * ffz: Find first zero in word. Undefined if no zero exists.
37 * bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1).
40 static inline int ffz(unsigned long x)
42 return 31 - __cntlz(~x & -~x);
46 * __ffs: Find first bit set in word. Return 0 for bit 0
49 static inline unsigned long __ffs(unsigned long x)
51 return 31 - __cntlz(x & -x);
55 * ffs: Find first bit set in word. This is defined the same way as
56 * the libc and compiler builtin ffs routines, therefore
57 * differs in spirit from the above ffz (man ffs).
60 static inline int ffs(unsigned long x)
62 return 32 - __cntlz(x & -x);
66 * fls: Find last (most-significant) bit set in word.
67 * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
70 static inline int fls (unsigned int x)
72 return 32 - __cntlz(x);
76 * __fls - find last (most-significant) set bit in a long word
77 * @word: the word to search
79 * Undefined if no set bit exists, so code should check against 0 first.
81 static inline unsigned long __fls(unsigned long word)
83 return 31 - __cntlz(word);
87 /* Use the generic implementation if we don't have the nsa/nsau instructions. */
89 # include <asm-generic/bitops/ffs.h>
90 # include <asm-generic/bitops/__ffs.h>
91 # include <asm-generic/bitops/ffz.h>
92 # include <asm-generic/bitops/fls.h>
93 # include <asm-generic/bitops/__fls.h>
97 #include <asm-generic/bitops/fls64.h>
99 #if XCHAL_HAVE_EXCLUSIVE
101 static inline void set_bit(unsigned int bit, volatile unsigned long *p)
104 unsigned long mask = 1UL << (bit & 31);
108 __asm__ __volatile__(
115 : "a" (mask), "a" (p)
119 static inline void clear_bit(unsigned int bit, volatile unsigned long *p)
122 unsigned long mask = 1UL << (bit & 31);
126 __asm__ __volatile__(
133 : "a" (~mask), "a" (p)
137 static inline void change_bit(unsigned int bit, volatile unsigned long *p)
140 unsigned long mask = 1UL << (bit & 31);
144 __asm__ __volatile__(
151 : "a" (mask), "a" (p)
156 test_and_set_bit(unsigned int bit, volatile unsigned long *p)
158 unsigned long tmp, value;
159 unsigned long mask = 1UL << (bit & 31);
163 __asm__ __volatile__(
169 : "=&a" (tmp), "=&a" (value)
170 : "a" (mask), "a" (p)
177 test_and_clear_bit(unsigned int bit, volatile unsigned long *p)
179 unsigned long tmp, value;
180 unsigned long mask = 1UL << (bit & 31);
184 __asm__ __volatile__(
190 : "=&a" (tmp), "=&a" (value)
191 : "a" (~mask), "a" (p)
198 test_and_change_bit(unsigned int bit, volatile unsigned long *p)
200 unsigned long tmp, value;
201 unsigned long mask = 1UL << (bit & 31);
205 __asm__ __volatile__(
211 : "=&a" (tmp), "=&a" (value)
212 : "a" (mask), "a" (p)
218 #elif XCHAL_HAVE_S32C1I
220 static inline void set_bit(unsigned int bit, volatile unsigned long *p)
222 unsigned long tmp, value;
223 unsigned long mask = 1UL << (bit & 31);
227 __asm__ __volatile__(
228 "1: l32i %1, %3, 0\n"
229 " wsr %1, scompare1\n"
231 " s32c1i %0, %3, 0\n"
233 : "=&a" (tmp), "=&a" (value)
234 : "a" (mask), "a" (p)
238 static inline void clear_bit(unsigned int bit, volatile unsigned long *p)
240 unsigned long tmp, value;
241 unsigned long mask = 1UL << (bit & 31);
245 __asm__ __volatile__(
246 "1: l32i %1, %3, 0\n"
247 " wsr %1, scompare1\n"
249 " s32c1i %0, %3, 0\n"
251 : "=&a" (tmp), "=&a" (value)
252 : "a" (~mask), "a" (p)
256 static inline void change_bit(unsigned int bit, volatile unsigned long *p)
258 unsigned long tmp, value;
259 unsigned long mask = 1UL << (bit & 31);
263 __asm__ __volatile__(
264 "1: l32i %1, %3, 0\n"
265 " wsr %1, scompare1\n"
267 " s32c1i %0, %3, 0\n"
269 : "=&a" (tmp), "=&a" (value)
270 : "a" (mask), "a" (p)
275 test_and_set_bit(unsigned int bit, volatile unsigned long *p)
277 unsigned long tmp, value;
278 unsigned long mask = 1UL << (bit & 31);
282 __asm__ __volatile__(
283 "1: l32i %1, %3, 0\n"
284 " wsr %1, scompare1\n"
286 " s32c1i %0, %3, 0\n"
288 : "=&a" (tmp), "=&a" (value)
289 : "a" (mask), "a" (p)
296 test_and_clear_bit(unsigned int bit, volatile unsigned long *p)
298 unsigned long tmp, value;
299 unsigned long mask = 1UL << (bit & 31);
303 __asm__ __volatile__(
304 "1: l32i %1, %3, 0\n"
305 " wsr %1, scompare1\n"
307 " s32c1i %0, %3, 0\n"
309 : "=&a" (tmp), "=&a" (value)
310 : "a" (~mask), "a" (p)
317 test_and_change_bit(unsigned int bit, volatile unsigned long *p)
319 unsigned long tmp, value;
320 unsigned long mask = 1UL << (bit & 31);
324 __asm__ __volatile__(
325 "1: l32i %1, %3, 0\n"
326 " wsr %1, scompare1\n"
328 " s32c1i %0, %3, 0\n"
330 : "=&a" (tmp), "=&a" (value)
331 : "a" (mask), "a" (p)
339 #include <asm-generic/bitops/atomic.h>
341 #endif /* XCHAL_HAVE_S32C1I */
343 #include <asm-generic/bitops/find.h>
344 #include <asm-generic/bitops/le.h>
346 #include <asm-generic/bitops/ext2-atomic-setbit.h>
348 #include <asm-generic/bitops/hweight.h>
349 #include <asm-generic/bitops/lock.h>
350 #include <asm-generic/bitops/sched.h>
352 #endif /* _XTENSA_BITOPS_H */