xen/blkback: rework connect_ring() to avoid inconsistent xenstore 'ring-page-order...
[linux-2.6-microblaze.git] / arch / mips / include / asm / bitops.h
1 /*
2  * This file is subject to the terms and conditions of the GNU General Public
3  * License.  See the file "COPYING" in the main directory of this archive
4  * for more details.
5  *
6  * Copyright (c) 1994 - 1997, 99, 2000, 06, 07  Ralf Baechle (ralf@linux-mips.org)
7  * Copyright (c) 1999, 2000  Silicon Graphics, Inc.
8  */
9 #ifndef _ASM_BITOPS_H
10 #define _ASM_BITOPS_H
11
12 #ifndef _LINUX_BITOPS_H
13 #error only <linux/bitops.h> can be included directly
14 #endif
15
16 #include <linux/compiler.h>
17 #include <linux/types.h>
18 #include <asm/barrier.h>
19 #include <asm/byteorder.h>              /* sigh ... */
20 #include <asm/compiler.h>
21 #include <asm/cpu-features.h>
22 #include <asm/llsc.h>
23 #include <asm/sgidefs.h>
24 #include <asm/war.h>
25
26 /*
27  * These are the "slower" versions of the functions and are in bitops.c.
28  * These functions call raw_local_irq_{save,restore}().
29  */
30 void __mips_set_bit(unsigned long nr, volatile unsigned long *addr);
31 void __mips_clear_bit(unsigned long nr, volatile unsigned long *addr);
32 void __mips_change_bit(unsigned long nr, volatile unsigned long *addr);
33 int __mips_test_and_set_bit(unsigned long nr,
34                             volatile unsigned long *addr);
35 int __mips_test_and_set_bit_lock(unsigned long nr,
36                                  volatile unsigned long *addr);
37 int __mips_test_and_clear_bit(unsigned long nr,
38                               volatile unsigned long *addr);
39 int __mips_test_and_change_bit(unsigned long nr,
40                                volatile unsigned long *addr);
41
42
43 /*
44  * set_bit - Atomically set a bit in memory
45  * @nr: the bit to set
46  * @addr: the address to start counting from
47  *
48  * This function is atomic and may not be reordered.  See __set_bit()
49  * if you do not require the atomic guarantees.
50  * Note that @nr may be almost arbitrarily large; this function is not
51  * restricted to acting on a single-word quantity.
52  */
53 static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
54 {
55         unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
56         int bit = nr & SZLONG_MASK;
57         unsigned long temp;
58
59         if (kernel_uses_llsc && R10000_LLSC_WAR) {
60                 __asm__ __volatile__(
61                 "       .set    push                                    \n"
62                 "       .set    arch=r4000                              \n"
63                 "1:     " __LL "%0, %1                  # set_bit       \n"
64                 "       or      %0, %2                                  \n"
65                 "       " __SC  "%0, %1                                 \n"
66                 "       beqzl   %0, 1b                                  \n"
67                 "       .set    pop                                     \n"
68                 : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*m)
69                 : "ir" (1UL << bit), GCC_OFF_SMALL_ASM() (*m));
70 #if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
71         } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
72                 do {
73                         __asm__ __volatile__(
74                         "       " __LL "%0, %1          # set_bit       \n"
75                         "       " __INS "%0, %3, %2, 1                  \n"
76                         "       " __SC "%0, %1                          \n"
77                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
78                         : "ir" (bit), "r" (~0));
79                 } while (unlikely(!temp));
80 #endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */
81         } else if (kernel_uses_llsc) {
82                 do {
83                         __asm__ __volatile__(
84                         "       .set    push                            \n"
85                         "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
86                         "       " __LL "%0, %1          # set_bit       \n"
87                         "       or      %0, %2                          \n"
88                         "       " __SC  "%0, %1                         \n"
89                         "       .set    pop                             \n"
90                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
91                         : "ir" (1UL << bit));
92                 } while (unlikely(!temp));
93         } else
94                 __mips_set_bit(nr, addr);
95 }
96
97 /*
98  * clear_bit - Clears a bit in memory
99  * @nr: Bit to clear
100  * @addr: Address to start counting from
101  *
102  * clear_bit() is atomic and may not be reordered.  However, it does
103  * not contain a memory barrier, so if it is used for locking purposes,
104  * you should call smp_mb__before_atomic() and/or smp_mb__after_atomic()
105  * in order to ensure changes are visible on other processors.
106  */
107 static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
108 {
109         unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
110         int bit = nr & SZLONG_MASK;
111         unsigned long temp;
112
113         if (kernel_uses_llsc && R10000_LLSC_WAR) {
114                 __asm__ __volatile__(
115                 "       .set    push                                    \n"
116                 "       .set    arch=r4000                              \n"
117                 "1:     " __LL "%0, %1                  # clear_bit     \n"
118                 "       and     %0, %2                                  \n"
119                 "       " __SC "%0, %1                                  \n"
120                 "       beqzl   %0, 1b                                  \n"
121                 "       .set    pop                                     \n"
122                 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
123                 : "ir" (~(1UL << bit)));
124 #if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
125         } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
126                 do {
127                         __asm__ __volatile__(
128                         "       " __LL "%0, %1          # clear_bit     \n"
129                         "       " __INS "%0, $0, %2, 1                  \n"
130                         "       " __SC "%0, %1                          \n"
131                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
132                         : "ir" (bit));
133                 } while (unlikely(!temp));
134 #endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */
135         } else if (kernel_uses_llsc) {
136                 do {
137                         __asm__ __volatile__(
138                         "       .set    push                            \n"
139                         "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
140                         "       " __LL "%0, %1          # clear_bit     \n"
141                         "       and     %0, %2                          \n"
142                         "       " __SC "%0, %1                          \n"
143                         "       .set    pop                             \n"
144                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
145                         : "ir" (~(1UL << bit)));
146                 } while (unlikely(!temp));
147         } else
148                 __mips_clear_bit(nr, addr);
149 }
150
151 /*
152  * clear_bit_unlock - Clears a bit in memory
153  * @nr: Bit to clear
154  * @addr: Address to start counting from
155  *
156  * clear_bit() is atomic and implies release semantics before the memory
157  * operation. It can be used for an unlock.
158  */
159 static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
160 {
161         smp_mb__before_atomic();
162         clear_bit(nr, addr);
163 }
164
165 /*
166  * change_bit - Toggle a bit in memory
167  * @nr: Bit to change
168  * @addr: Address to start counting from
169  *
170  * change_bit() is atomic and may not be reordered.
171  * Note that @nr may be almost arbitrarily large; this function is not
172  * restricted to acting on a single-word quantity.
173  */
174 static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
175 {
176         int bit = nr & SZLONG_MASK;
177
178         if (kernel_uses_llsc && R10000_LLSC_WAR) {
179                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
180                 unsigned long temp;
181
182                 __asm__ __volatile__(
183                 "       .set    push                            \n"
184                 "       .set    arch=r4000                      \n"
185                 "1:     " __LL "%0, %1          # change_bit    \n"
186                 "       xor     %0, %2                          \n"
187                 "       " __SC  "%0, %1                         \n"
188                 "       beqzl   %0, 1b                          \n"
189                 "       .set    pop                             \n"
190                 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
191                 : "ir" (1UL << bit));
192         } else if (kernel_uses_llsc) {
193                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
194                 unsigned long temp;
195
196                 do {
197                         __asm__ __volatile__(
198                         "       .set    push                            \n"
199                         "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
200                         "       " __LL "%0, %1          # change_bit    \n"
201                         "       xor     %0, %2                          \n"
202                         "       " __SC  "%0, %1                         \n"
203                         "       .set    pop                             \n"
204                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
205                         : "ir" (1UL << bit));
206                 } while (unlikely(!temp));
207         } else
208                 __mips_change_bit(nr, addr);
209 }
210
211 /*
212  * test_and_set_bit - Set a bit and return its old value
213  * @nr: Bit to set
214  * @addr: Address to count from
215  *
216  * This operation is atomic and cannot be reordered.
217  * It also implies a memory barrier.
218  */
219 static inline int test_and_set_bit(unsigned long nr,
220         volatile unsigned long *addr)
221 {
222         int bit = nr & SZLONG_MASK;
223         unsigned long res;
224
225         smp_mb__before_llsc();
226
227         if (kernel_uses_llsc && R10000_LLSC_WAR) {
228                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
229                 unsigned long temp;
230
231                 __asm__ __volatile__(
232                 "       .set    push                                    \n"
233                 "       .set    arch=r4000                              \n"
234                 "1:     " __LL "%0, %1          # test_and_set_bit      \n"
235                 "       or      %2, %0, %3                              \n"
236                 "       " __SC  "%2, %1                                 \n"
237                 "       beqzl   %2, 1b                                  \n"
238                 "       and     %2, %0, %3                              \n"
239                 "       .set    pop                                     \n"
240                 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
241                 : "r" (1UL << bit)
242                 : "memory");
243         } else if (kernel_uses_llsc) {
244                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
245                 unsigned long temp;
246
247                 do {
248                         __asm__ __volatile__(
249                         "       .set    push                            \n"
250                         "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
251                         "       " __LL "%0, %1  # test_and_set_bit      \n"
252                         "       or      %2, %0, %3                      \n"
253                         "       " __SC  "%2, %1                         \n"
254                         "       .set    pop                             \n"
255                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
256                         : "r" (1UL << bit)
257                         : "memory");
258                 } while (unlikely(!res));
259
260                 res = temp & (1UL << bit);
261         } else
262                 res = __mips_test_and_set_bit(nr, addr);
263
264         smp_llsc_mb();
265
266         return res != 0;
267 }
268
269 /*
270  * test_and_set_bit_lock - Set a bit and return its old value
271  * @nr: Bit to set
272  * @addr: Address to count from
273  *
274  * This operation is atomic and implies acquire ordering semantics
275  * after the memory operation.
276  */
277 static inline int test_and_set_bit_lock(unsigned long nr,
278         volatile unsigned long *addr)
279 {
280         int bit = nr & SZLONG_MASK;
281         unsigned long res;
282
283         if (kernel_uses_llsc && R10000_LLSC_WAR) {
284                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
285                 unsigned long temp;
286
287                 __asm__ __volatile__(
288                 "       .set    push                                    \n"
289                 "       .set    arch=r4000                              \n"
290                 "1:     " __LL "%0, %1          # test_and_set_bit      \n"
291                 "       or      %2, %0, %3                              \n"
292                 "       " __SC  "%2, %1                                 \n"
293                 "       beqzl   %2, 1b                                  \n"
294                 "       and     %2, %0, %3                              \n"
295                 "       .set    pop                                     \n"
296                 : "=&r" (temp), "+m" (*m), "=&r" (res)
297                 : "r" (1UL << bit)
298                 : "memory");
299         } else if (kernel_uses_llsc) {
300                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
301                 unsigned long temp;
302
303                 do {
304                         __asm__ __volatile__(
305                         "       .set    push                            \n"
306                         "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
307                         "       " __LL "%0, %1  # test_and_set_bit      \n"
308                         "       or      %2, %0, %3                      \n"
309                         "       " __SC  "%2, %1                         \n"
310                         "       .set    pop                             \n"
311                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
312                         : "r" (1UL << bit)
313                         : "memory");
314                 } while (unlikely(!res));
315
316                 res = temp & (1UL << bit);
317         } else
318                 res = __mips_test_and_set_bit_lock(nr, addr);
319
320         smp_llsc_mb();
321
322         return res != 0;
323 }
324 /*
325  * test_and_clear_bit - Clear a bit and return its old value
326  * @nr: Bit to clear
327  * @addr: Address to count from
328  *
329  * This operation is atomic and cannot be reordered.
330  * It also implies a memory barrier.
331  */
332 static inline int test_and_clear_bit(unsigned long nr,
333         volatile unsigned long *addr)
334 {
335         int bit = nr & SZLONG_MASK;
336         unsigned long res;
337
338         smp_mb__before_llsc();
339
340         if (kernel_uses_llsc && R10000_LLSC_WAR) {
341                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
342                 unsigned long temp;
343
344                 __asm__ __volatile__(
345                 "       .set    push                                    \n"
346                 "       .set    arch=r4000                              \n"
347                 "1:     " __LL  "%0, %1         # test_and_clear_bit    \n"
348                 "       or      %2, %0, %3                              \n"
349                 "       xor     %2, %3                                  \n"
350                 "       " __SC  "%2, %1                                 \n"
351                 "       beqzl   %2, 1b                                  \n"
352                 "       and     %2, %0, %3                              \n"
353                 "       .set    pop                                     \n"
354                 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
355                 : "r" (1UL << bit)
356                 : "memory");
357 #if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
358         } else if (kernel_uses_llsc && __builtin_constant_p(nr)) {
359                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
360                 unsigned long temp;
361
362                 do {
363                         __asm__ __volatile__(
364                         "       " __LL  "%0, %1 # test_and_clear_bit    \n"
365                         "       " __EXT "%2, %0, %3, 1                  \n"
366                         "       " __INS "%0, $0, %3, 1                  \n"
367                         "       " __SC  "%0, %1                         \n"
368                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
369                         : "ir" (bit)
370                         : "memory");
371                 } while (unlikely(!temp));
372 #endif
373         } else if (kernel_uses_llsc) {
374                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
375                 unsigned long temp;
376
377                 do {
378                         __asm__ __volatile__(
379                         "       .set    push                            \n"
380                         "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
381                         "       " __LL  "%0, %1 # test_and_clear_bit    \n"
382                         "       or      %2, %0, %3                      \n"
383                         "       xor     %2, %3                          \n"
384                         "       " __SC  "%2, %1                         \n"
385                         "       .set    pop                             \n"
386                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
387                         : "r" (1UL << bit)
388                         : "memory");
389                 } while (unlikely(!res));
390
391                 res = temp & (1UL << bit);
392         } else
393                 res = __mips_test_and_clear_bit(nr, addr);
394
395         smp_llsc_mb();
396
397         return res != 0;
398 }
399
400 /*
401  * test_and_change_bit - Change a bit and return its old value
402  * @nr: Bit to change
403  * @addr: Address to count from
404  *
405  * This operation is atomic and cannot be reordered.
406  * It also implies a memory barrier.
407  */
408 static inline int test_and_change_bit(unsigned long nr,
409         volatile unsigned long *addr)
410 {
411         int bit = nr & SZLONG_MASK;
412         unsigned long res;
413
414         smp_mb__before_llsc();
415
416         if (kernel_uses_llsc && R10000_LLSC_WAR) {
417                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
418                 unsigned long temp;
419
420                 __asm__ __volatile__(
421                 "       .set    push                                    \n"
422                 "       .set    arch=r4000                              \n"
423                 "1:     " __LL  "%0, %1         # test_and_change_bit   \n"
424                 "       xor     %2, %0, %3                              \n"
425                 "       " __SC  "%2, %1                                 \n"
426                 "       beqzl   %2, 1b                                  \n"
427                 "       and     %2, %0, %3                              \n"
428                 "       .set    pop                                     \n"
429                 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
430                 : "r" (1UL << bit)
431                 : "memory");
432         } else if (kernel_uses_llsc) {
433                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
434                 unsigned long temp;
435
436                 do {
437                         __asm__ __volatile__(
438                         "       .set    push                            \n"
439                         "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
440                         "       " __LL  "%0, %1 # test_and_change_bit   \n"
441                         "       xor     %2, %0, %3                      \n"
442                         "       " __SC  "\t%2, %1                       \n"
443                         "       .set    pop                             \n"
444                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
445                         : "r" (1UL << bit)
446                         : "memory");
447                 } while (unlikely(!res));
448
449                 res = temp & (1UL << bit);
450         } else
451                 res = __mips_test_and_change_bit(nr, addr);
452
453         smp_llsc_mb();
454
455         return res != 0;
456 }
457
458 #include <asm-generic/bitops/non-atomic.h>
459
460 /*
461  * __clear_bit_unlock - Clears a bit in memory
462  * @nr: Bit to clear
463  * @addr: Address to start counting from
464  *
465  * __clear_bit() is non-atomic and implies release semantics before the memory
466  * operation. It can be used for an unlock if no other CPUs can concurrently
467  * modify other bits in the word.
468  */
469 static inline void __clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
470 {
471         smp_mb__before_llsc();
472         __clear_bit(nr, addr);
473         nudge_writes();
474 }
475
476 /*
477  * Return the bit position (0..63) of the most significant 1 bit in a word
478  * Returns -1 if no 1 bit exists
479  */
480 static inline unsigned long __fls(unsigned long word)
481 {
482         int num;
483
484         if (BITS_PER_LONG == 32 && !__builtin_constant_p(word) &&
485             __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
486                 __asm__(
487                 "       .set    push                                    \n"
488                 "       .set    "MIPS_ISA_LEVEL"                        \n"
489                 "       clz     %0, %1                                  \n"
490                 "       .set    pop                                     \n"
491                 : "=r" (num)
492                 : "r" (word));
493
494                 return 31 - num;
495         }
496
497         if (BITS_PER_LONG == 64 && !__builtin_constant_p(word) &&
498             __builtin_constant_p(cpu_has_mips64) && cpu_has_mips64) {
499                 __asm__(
500                 "       .set    push                                    \n"
501                 "       .set    "MIPS_ISA_LEVEL"                        \n"
502                 "       dclz    %0, %1                                  \n"
503                 "       .set    pop                                     \n"
504                 : "=r" (num)
505                 : "r" (word));
506
507                 return 63 - num;
508         }
509
510         num = BITS_PER_LONG - 1;
511
512 #if BITS_PER_LONG == 64
513         if (!(word & (~0ul << 32))) {
514                 num -= 32;
515                 word <<= 32;
516         }
517 #endif
518         if (!(word & (~0ul << (BITS_PER_LONG-16)))) {
519                 num -= 16;
520                 word <<= 16;
521         }
522         if (!(word & (~0ul << (BITS_PER_LONG-8)))) {
523                 num -= 8;
524                 word <<= 8;
525         }
526         if (!(word & (~0ul << (BITS_PER_LONG-4)))) {
527                 num -= 4;
528                 word <<= 4;
529         }
530         if (!(word & (~0ul << (BITS_PER_LONG-2)))) {
531                 num -= 2;
532                 word <<= 2;
533         }
534         if (!(word & (~0ul << (BITS_PER_LONG-1))))
535                 num -= 1;
536         return num;
537 }
538
539 /*
540  * __ffs - find first bit in word.
541  * @word: The word to search
542  *
543  * Returns 0..SZLONG-1
544  * Undefined if no bit exists, so code should check against 0 first.
545  */
546 static inline unsigned long __ffs(unsigned long word)
547 {
548         return __fls(word & -word);
549 }
550
551 /*
552  * fls - find last bit set.
553  * @word: The word to search
554  *
555  * This is defined the same way as ffs.
556  * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
557  */
558 static inline int fls(unsigned int x)
559 {
560         int r;
561
562         if (!__builtin_constant_p(x) &&
563             __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
564                 __asm__(
565                 "       .set    push                                    \n"
566                 "       .set    "MIPS_ISA_LEVEL"                        \n"
567                 "       clz     %0, %1                                  \n"
568                 "       .set    pop                                     \n"
569                 : "=r" (x)
570                 : "r" (x));
571
572                 return 32 - x;
573         }
574
575         r = 32;
576         if (!x)
577                 return 0;
578         if (!(x & 0xffff0000u)) {
579                 x <<= 16;
580                 r -= 16;
581         }
582         if (!(x & 0xff000000u)) {
583                 x <<= 8;
584                 r -= 8;
585         }
586         if (!(x & 0xf0000000u)) {
587                 x <<= 4;
588                 r -= 4;
589         }
590         if (!(x & 0xc0000000u)) {
591                 x <<= 2;
592                 r -= 2;
593         }
594         if (!(x & 0x80000000u)) {
595                 x <<= 1;
596                 r -= 1;
597         }
598         return r;
599 }
600
601 #include <asm-generic/bitops/fls64.h>
602
603 /*
604  * ffs - find first bit set.
605  * @word: The word to search
606  *
607  * This is defined the same way as
608  * the libc and compiler builtin ffs routines, therefore
609  * differs in spirit from the above ffz (man ffs).
610  */
611 static inline int ffs(int word)
612 {
613         if (!word)
614                 return 0;
615
616         return fls(word & -word);
617 }
618
619 #include <asm-generic/bitops/ffz.h>
620 #include <asm-generic/bitops/find.h>
621
622 #ifdef __KERNEL__
623
624 #include <asm-generic/bitops/sched.h>
625
626 #include <asm/arch_hweight.h>
627 #include <asm-generic/bitops/const_hweight.h>
628
629 #include <asm-generic/bitops/le.h>
630 #include <asm-generic/bitops/ext2-atomic.h>
631
632 #endif /* __KERNEL__ */
633
634 #endif /* _ASM_BITOPS_H */