projects
/
linux-2.6-microblaze.git
/ blobdiff
commit
grep
author
committer
pickaxe
?
search:
re
summary
|
shortlog
|
log
|
commit
|
commitdiff
|
tree
raw
|
inline
| side by side
Merge tag 'locking-core-2021-08-30' of git://git.kernel.org/pub/scm/linux/kernel...
[linux-2.6-microblaze.git]
/
include
/
asm-generic
/
bitops
/
atomic.h
diff --git
a/include/asm-generic/bitops/atomic.h
b/include/asm-generic/bitops/atomic.h
index
0e7316a
..
3096f08
100644
(file)
--- a/
include/asm-generic/bitops/atomic.h
+++ b/
include/asm-generic/bitops/atomic.h
@@
-11,25
+11,29
@@
* See Documentation/atomic_bitops.txt for details.
*/
* See Documentation/atomic_bitops.txt for details.
*/
-static __always_inline void set_bit(unsigned int nr, volatile unsigned long *p)
+static __always_inline void
+arch_set_bit(unsigned int nr, volatile unsigned long *p)
{
p += BIT_WORD(nr);
{
p += BIT_WORD(nr);
- atomic_long_or(BIT_MASK(nr), (atomic_long_t *)p);
+ a
rch_a
tomic_long_or(BIT_MASK(nr), (atomic_long_t *)p);
}
}
-static __always_inline void clear_bit(unsigned int nr, volatile unsigned long *p)
+static __always_inline void
+arch_clear_bit(unsigned int nr, volatile unsigned long *p)
{
p += BIT_WORD(nr);
{
p += BIT_WORD(nr);
- atomic_long_andnot(BIT_MASK(nr), (atomic_long_t *)p);
+ a
rch_a
tomic_long_andnot(BIT_MASK(nr), (atomic_long_t *)p);
}
}
-static __always_inline void change_bit(unsigned int nr, volatile unsigned long *p)
+static __always_inline void
+arch_change_bit(unsigned int nr, volatile unsigned long *p)
{
p += BIT_WORD(nr);
{
p += BIT_WORD(nr);
- atomic_long_xor(BIT_MASK(nr), (atomic_long_t *)p);
+ a
rch_a
tomic_long_xor(BIT_MASK(nr), (atomic_long_t *)p);
}
}
-static inline int test_and_set_bit(unsigned int nr, volatile unsigned long *p)
+static __always_inline int
+arch_test_and_set_bit(unsigned int nr, volatile unsigned long *p)
{
long old;
unsigned long mask = BIT_MASK(nr);
{
long old;
unsigned long mask = BIT_MASK(nr);
@@
-38,11
+42,12
@@
static inline int test_and_set_bit(unsigned int nr, volatile unsigned long *p)
if (READ_ONCE(*p) & mask)
return 1;
if (READ_ONCE(*p) & mask)
return 1;
- old = atomic_long_fetch_or(mask, (atomic_long_t *)p);
+ old = a
rch_a
tomic_long_fetch_or(mask, (atomic_long_t *)p);
return !!(old & mask);
}
return !!(old & mask);
}
-static inline int test_and_clear_bit(unsigned int nr, volatile unsigned long *p)
+static __always_inline int
+arch_test_and_clear_bit(unsigned int nr, volatile unsigned long *p)
{
long old;
unsigned long mask = BIT_MASK(nr);
{
long old;
unsigned long mask = BIT_MASK(nr);
@@
-51,18
+56,21
@@
static inline int test_and_clear_bit(unsigned int nr, volatile unsigned long *p)
if (!(READ_ONCE(*p) & mask))
return 0;
if (!(READ_ONCE(*p) & mask))
return 0;
- old = atomic_long_fetch_andnot(mask, (atomic_long_t *)p);
+ old = a
rch_a
tomic_long_fetch_andnot(mask, (atomic_long_t *)p);
return !!(old & mask);
}
return !!(old & mask);
}
-static inline int test_and_change_bit(unsigned int nr, volatile unsigned long *p)
+static __always_inline int
+arch_test_and_change_bit(unsigned int nr, volatile unsigned long *p)
{
long old;
unsigned long mask = BIT_MASK(nr);
p += BIT_WORD(nr);
{
long old;
unsigned long mask = BIT_MASK(nr);
p += BIT_WORD(nr);
- old = atomic_long_fetch_xor(mask, (atomic_long_t *)p);
+ old = a
rch_a
tomic_long_fetch_xor(mask, (atomic_long_t *)p);
return !!(old & mask);
}
return !!(old & mask);
}
+#include <asm-generic/bitops/instrumented-atomic.h>
+
#endif /* _ASM_GENERIC_BITOPS_ATOMIC_H */
#endif /* _ASM_GENERIC_BITOPS_ATOMIC_H */