powerpc: Implement smp_cond_load_relaxed()
authorNicholas Piggin <npiggin@gmail.com>
Fri, 24 Jul 2020 13:14:23 +0000 (23:14 +1000)
committerMichael Ellerman <mpe@ellerman.id.au>
Sun, 26 Jul 2020 14:01:29 +0000 (00:01 +1000)
This implements smp_cond_load_relaxed() with the slowpath busy loop
using the preferred SMT priority pattern.

Signed-off-by: Nicholas Piggin <npiggin@gmail.com>
Acked-by: Waiman Long <longman@redhat.com>
[mpe: Make it 64-bit only to fix build errors on 32-bit]
Signed-off-by: Michael Ellerman <mpe@ellerman.id.au>
Link: https://lore.kernel.org/r/20200724131423.1362108-7-npiggin@gmail.com
arch/powerpc/include/asm/barrier.h

index 35c1b8f..f53c423 100644 (file)
@@ -80,6 +80,22 @@ do {                                                                 \
        ___p1;                                                          \
 })
 
+#ifdef CONFIG_PPC64
+#define smp_cond_load_relaxed(ptr, cond_expr) ({               \
+       typeof(ptr) __PTR = (ptr);                              \
+       __unqual_scalar_typeof(*ptr) VAL;                       \
+       VAL = READ_ONCE(*__PTR);                                \
+       if (unlikely(!(cond_expr))) {                           \
+               spin_begin();                                   \
+               do {                                            \
+                       VAL = READ_ONCE(*__PTR);                \
+               } while (!(cond_expr));                         \
+               spin_end();                                     \
+       }                                                       \
+       (typeof(*ptr))VAL;                                      \
+})
+#endif
+
 #ifdef CONFIG_PPC_BOOK3S_64
 #define NOSPEC_BARRIER_SLOT   nop
 #elif defined(CONFIG_PPC_FSL_BOOK3E)