locking/atomic: cmpxchg: make `generic` a prefix
authorMark Rutland <mark.rutland@arm.com>
Tue, 25 May 2021 14:02:10 +0000 (15:02 +0100)
committerPeter Zijlstra <peterz@infradead.org>
Wed, 26 May 2021 11:20:50 +0000 (13:20 +0200)
The asm-generic implementations of cmpxchg_local() and cmpxchg64_local()
use a `_generic` suffix to distinguish themselves from arch code or
wrappers used elsewhere.

Subsequent patches will add ARCH_ATOMIC support to these
implementations, and will distinguish more functions with a `generic`
portion. To align with how ARCH_ATOMIC uses an `arch_` prefix, it would
be helpful to use a `generic_` prefix rather than a `_generic` suffix.

In preparation for this, this patch renames the existing functions to
make `generic` a prefix rather than a suffix. There should be no
functional change as a result of this patch.

Signed-off-by: Mark Rutland <mark.rutland@arm.com>
Acked-by: Geert Uytterhoeven <geert@linux-m68k.org>
Cc: Arnd Bergmann <arnd@arndb.de>
Cc: Boqun Feng <boqun.feng@gmail.com>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Will Deacon <will@kernel.org>
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Link: https://lore.kernel.org/r/20210525140232.53872-12-mark.rutland@arm.com
arch/arm/include/asm/cmpxchg.h
arch/m68k/include/asm/cmpxchg.h
arch/mips/include/asm/cmpxchg.h
arch/parisc/include/asm/cmpxchg.h
arch/powerpc/include/asm/cmpxchg.h
arch/sparc/include/asm/cmpxchg_32.h
arch/sparc/include/asm/cmpxchg_64.h
arch/xtensa/include/asm/cmpxchg.h
include/asm-generic/cmpxchg-local.h
include/asm-generic/cmpxchg.h

index 8b701f8..06bd8ce 100644 (file)
@@ -135,13 +135,13 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size
  * them available.
  */
 #define cmpxchg_local(ptr, o, n) ({                                    \
-       (__typeof(*ptr))__cmpxchg_local_generic((ptr),                  \
+       (__typeof(*ptr))__generic_cmpxchg_local((ptr),                  \
                                                (unsigned long)(o),     \
                                                (unsigned long)(n),     \
                                                sizeof(*(ptr)));        \
 })
 
-#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
+#define cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n))
 
 #include <asm-generic/cmpxchg.h>
 
@@ -224,7 +224,7 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr,
 #ifdef CONFIG_CPU_V6   /* min ARCH == ARMv6 */
        case 1:
        case 2:
-               ret = __cmpxchg_local_generic(ptr, old, new, size);
+               ret = __generic_cmpxchg_local(ptr, old, new, size);
                break;
 #endif
        default:
index a4aa820..7629c9c 100644 (file)
@@ -80,7 +80,7 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz
 
 #include <asm-generic/cmpxchg-local.h>
 
-#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
+#define cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n))
 
 extern unsigned long __invalid_cmpxchg_size(volatile void *,
                                            unsigned long, unsigned long, int);
index ed8f3f3..c7e0455 100644 (file)
@@ -222,7 +222,7 @@ unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
 #else
 
 # include <asm-generic/cmpxchg-local.h>
-# define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
+# define cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n))
 
 # ifdef CONFIG_SMP
 
index 84ee232..c201565 100644 (file)
@@ -98,7 +98,7 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr,
 #endif
        case 4: return __cmpxchg_u32(ptr, old, new_);
        default:
-               return __cmpxchg_local_generic(ptr, old, new_, size);
+               return __generic_cmpxchg_local(ptr, old, new_, size);
        }
 }
 
@@ -116,7 +116,7 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr,
        cmpxchg_local((ptr), (o), (n));                                 \
 })
 #else
-#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
+#define cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n))
 #endif
 
 #define cmpxchg64(ptr, o, n) __cmpxchg_u64(ptr, o, n)
index cf091c4..69f52fd 100644 (file)
@@ -524,7 +524,7 @@ __cmpxchg_acquire(void *ptr, unsigned long old, unsigned long new,
 })
 #else
 #include <asm-generic/cmpxchg-local.h>
-#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
+#define cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n))
 #endif
 
 #endif /* __KERNEL__ */
index a53d744..86e3da1 100644 (file)
@@ -73,8 +73,8 @@ u64 __cmpxchg_u64(u64 *ptr, u64 old, u64 new);
  * them available.
  */
 #define cmpxchg_local(ptr, o, n)                                              \
-       ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
+       ((__typeof__(*(ptr)))__generic_cmpxchg_local((ptr), (unsigned long)(o),\
                        (unsigned long)(n), sizeof(*(ptr))))
-#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
+#define cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n))
 
 #endif /* __ARCH_SPARC_CMPXCHG__ */
index 316faa0..8915b57 100644 (file)
@@ -189,7 +189,7 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr,
        case 4:
        case 8: return __cmpxchg(ptr, old, new, size);
        default:
-               return __cmpxchg_local_generic(ptr, old, new, size);
+               return __generic_cmpxchg_local(ptr, old, new, size);
        }
 
        return old;
index a175f8a..9c4d6e5 100644 (file)
@@ -97,7 +97,7 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr,
        case 4:
                return __cmpxchg_u32(ptr, old, new);
        default:
-               return __cmpxchg_local_generic(ptr, old, new, size);
+               return __generic_cmpxchg_local(ptr, old, new, size);
        }
 
        return old;
@@ -108,9 +108,9 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr,
  * them available.
  */
 #define cmpxchg_local(ptr, o, n)                                              \
-       ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
+       ((__typeof__(*(ptr)))__generic_cmpxchg_local((ptr), (unsigned long)(o),\
                        (unsigned long)(n), sizeof(*(ptr))))
-#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
+#define cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n))
 #define cmpxchg64(ptr, o, n)    cmpxchg64_local((ptr), (o), (n))
 
 /*
index f17f14f..380cdc8 100644 (file)
@@ -12,7 +12,7 @@ extern unsigned long wrong_size_cmpxchg(volatile void *ptr)
  * Generic version of __cmpxchg_local (disables interrupts). Takes an unsigned
  * long parameter, supporting various types of architectures.
  */
-static inline unsigned long __cmpxchg_local_generic(volatile void *ptr,
+static inline unsigned long __generic_cmpxchg_local(volatile void *ptr,
                unsigned long old, unsigned long new, int size)
 {
        unsigned long flags, prev;
@@ -51,7 +51,7 @@ static inline unsigned long __cmpxchg_local_generic(volatile void *ptr,
 /*
  * Generic version of __cmpxchg64_local. Takes an u64 parameter.
  */
-static inline u64 __cmpxchg64_local_generic(volatile void *ptr,
+static inline u64 __generic_cmpxchg64_local(volatile void *ptr,
                u64 old, u64 new)
 {
        u64 prev;
index 9a24510..b9d54c7 100644 (file)
@@ -94,13 +94,13 @@ unsigned long __xchg(unsigned long x, volatile void *ptr, int size)
 
 #ifndef cmpxchg_local
 #define cmpxchg_local(ptr, o, n) ({                                           \
-       ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
+       ((__typeof__(*(ptr)))__generic_cmpxchg_local((ptr), (unsigned long)(o),\
                        (unsigned long)(n), sizeof(*(ptr))));                  \
 })
 #endif
 
 #ifndef cmpxchg64_local
-#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
+#define cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n))
 #endif
 
 #define cmpxchg(ptr, o, n)     cmpxchg_local((ptr), (o), (n))