ARM: smp: elide HWCAP_TLS checks or __entry_task updates on SMP+v6
authorArd Biesheuvel <ardb@kernel.org>
Mon, 24 Jan 2022 18:28:58 +0000 (19:28 +0100)
committerArd Biesheuvel <ardb@kernel.org>
Tue, 25 Jan 2022 08:53:52 +0000 (09:53 +0100)
Use the SMP_ON_UP patching framework to elide HWCAP_TLS tests from the
context switch and return to userspace code paths, as SMP systems are
guaranteed to have this h/w capability.

At the same time, omit the update of __entry_task if the system is
detected to be UP at runtime, as in that case, the value is never used.

Signed-off-by: Ard Biesheuvel <ardb@kernel.org>
arch/arm/include/asm/switch_to.h
arch/arm/include/asm/tls.h
arch/arm/kernel/entry-header.S

index a482c99..f67ae94 100644 (file)
@@ -3,6 +3,7 @@
 #define __ASM_ARM_SWITCH_TO_H
 
 #include <linux/thread_info.h>
+#include <asm/smp_plat.h>
 
 /*
  * For v7 SMP cores running a preemptible kernel we may be pre-empted
@@ -40,8 +41,7 @@ static inline void set_ti_cpu(struct task_struct *p)
 do {                                                                   \
        __complete_pending_tlbi();                                      \
        set_ti_cpu(next);                                               \
-       if (IS_ENABLED(CONFIG_CURRENT_POINTER_IN_TPIDRURO) ||           \
-           IS_ENABLED(CONFIG_SMP))                                     \
+       if (IS_ENABLED(CONFIG_CURRENT_POINTER_IN_TPIDRURO) || is_smp()) \
                __this_cpu_write(__entry_task, next);                   \
        last = __switch_to(prev,task_thread_info(prev), task_thread_info(next));        \
 } while (0)
index d712c17..3dcd0f7 100644 (file)
        .endm
 
        .macro switch_tls_v6, base, tp, tpuser, tmp1, tmp2
+#ifdef CONFIG_SMP
+ALT_SMP(nop)
+ALT_UP_B(.L0_\@)
+       .subsection 1
+#endif
+.L0_\@:
        ldr_va  \tmp1, elf_hwcap
        mov     \tmp2, #0xffff0fff
        tst     \tmp1, #HWCAP_TLS               @ hardware TLS available?
        streq   \tp, [\tmp2, #-15]              @ set TLS value at 0xffff0ff0
-       mrcne   p15, 0, \tmp2, c13, c0, 2       @ get the user r/w register
-#ifndef CONFIG_SMP
-       mcrne   p15, 0, \tp, c13, c0, 3         @ yes, set TLS register
+       beq     .L2_\@
+       mcr     p15, 0, \tp, c13, c0, 3         @ yes, set TLS register
+#ifdef CONFIG_SMP
+       b       .L1_\@
+       .previous
 #endif
-       mcrne   p15, 0, \tpuser, c13, c0, 2     @ set user r/w register
-       strne   \tmp2, [\base, #TI_TP_VALUE + 4] @ save it
+.L1_\@: switch_tls_v6k \base, \tp, \tpuser, \tmp1, \tmp2
+.L2_\@:
        .endm
 
        .macro switch_tls_software, base, tp, tpuser, tmp1, tmp2
        mov     \tmp1, #0xffff0fff
        str     \tp, [\tmp1, #-15]              @ set TLS value at 0xffff0ff0
        .endm
+#else
+#include <asm/smp_plat.h>
 #endif
 
 #ifdef CONFIG_TLS_REG_EMUL
@@ -44,7 +54,7 @@
 #elif defined(CONFIG_CPU_V6)
 #define tls_emu                0
 #define has_tls_reg            (elf_hwcap & HWCAP_TLS)
-#define defer_tls_reg_update   IS_ENABLED(CONFIG_SMP)
+#define defer_tls_reg_update   is_smp()
 #define switch_tls     switch_tls_v6
 #elif defined(CONFIG_CPU_32v6K)
 #define tls_emu                0
index cb82ff5..9a1dc14 100644 (file)
 
 
        .macro  restore_user_regs, fast = 0, offset = 0
-#if defined(CONFIG_CPU_32v6K) || defined(CONFIG_SMP)
-#if defined(CONFIG_CPU_V6) && defined(CONFIG_SMP)
-ALT_SMP(b      .L1_\@  )
-ALT_UP( nop            )
-       ldr_va  r1, elf_hwcap
-       tst     r1, #HWCAP_TLS                  @ hardware TLS available?
-       beq     .L2_\@
-.L1_\@:
+#if defined(CONFIG_CPU_32v6K) && \
+    (!defined(CONFIG_CPU_V6) || defined(CONFIG_SMP))
+#ifdef CONFIG_CPU_V6
+ALT_SMP(nop)
+ALT_UP_B(.L1_\@)
 #endif
        @ The TLS register update is deferred until return to user space so we
        @ can use it for other things while running in the kernel
-       get_thread_info r1
+       mrc     p15, 0, r1, c13, c0, 3          @ get current_thread_info pointer
        ldr     r1, [r1, #TI_TP_VALUE]
        mcr     p15, 0, r1, c13, c0, 3          @ set TLS register
-.L2_\@:
+.L1_\@:
 #endif
 
        uaccess_enable r1, isb=0