arm64: compat: Get sigreturn trampolines from vDSO
authorVincenzo Frascino <vincenzo.frascino@arm.com>
Fri, 21 Jun 2019 09:52:41 +0000 (10:52 +0100)
committerThomas Gleixner <tglx@linutronix.de>
Sat, 22 Jun 2019 19:21:09 +0000 (21:21 +0200)
When the compat vDSO is enabled, the sigreturn trampolines are not
anymore available through [sigpage] but through [vdso].

Add the relevant code the enable the feature.

Signed-off-by: Vincenzo Frascino <vincenzo.frascino@arm.com>
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Tested-by: Shijith Thotton <sthotton@marvell.com>
Tested-by: Andre Przywara <andre.przywara@arm.com>
Cc: linux-arch@vger.kernel.org
Cc: linux-arm-kernel@lists.infradead.org
Cc: linux-mips@vger.kernel.org
Cc: linux-kselftest@vger.kernel.org
Cc: Catalin Marinas <catalin.marinas@arm.com>
Cc: Will Deacon <will.deacon@arm.com>
Cc: Arnd Bergmann <arnd@arndb.de>
Cc: Russell King <linux@armlinux.org.uk>
Cc: Ralf Baechle <ralf@linux-mips.org>
Cc: Paul Burton <paul.burton@mips.com>
Cc: Daniel Lezcano <daniel.lezcano@linaro.org>
Cc: Mark Salyzyn <salyzyn@android.com>
Cc: Peter Collingbourne <pcc@google.com>
Cc: Shuah Khan <shuah@kernel.org>
Cc: Dmitry Safonov <0x7f454c46@gmail.com>
Cc: Rasmus Villemoes <linux@rasmusvillemoes.dk>
Cc: Huw Davies <huw@codeweavers.com>
Link: https://lkml.kernel.org/r/20190621095252.32307-15-vincenzo.frascino@arm.com
arch/arm64/include/asm/vdso.h
arch/arm64/kernel/signal32.c

index 1f94ec1..9c15e0a 100644 (file)
@@ -17,6 +17,9 @@
 #ifndef __ASSEMBLY__
 
 #include <generated/vdso-offsets.h>
+#ifdef CONFIG_COMPAT_VDSO
+#include <generated/vdso32-offsets.h>
+#endif
 
 #define VDSO_SYMBOL(base, name)                                                   \
 ({                                                                        \
index 8a9a5ce..12a5853 100644 (file)
@@ -18,6 +18,7 @@
 #include <asm/traps.h>
 #include <linux/uaccess.h>
 #include <asm/unistd.h>
+#include <asm/vdso.h>
 
 struct compat_vfp_sigframe {
        compat_ulong_t  magic;
@@ -341,6 +342,30 @@ static void compat_setup_return(struct pt_regs *regs, struct k_sigaction *ka,
                retcode = ptr_to_compat(ka->sa.sa_restorer);
        } else {
                /* Set up sigreturn pointer */
+#ifdef CONFIG_COMPAT_VDSO
+               void *vdso_base = current->mm->context.vdso;
+               void *vdso_trampoline;
+
+               if (ka->sa.sa_flags & SA_SIGINFO) {
+                       if (thumb) {
+                               vdso_trampoline = VDSO_SYMBOL(vdso_base,
+                                                       compat_rt_sigreturn_thumb);
+                       } else {
+                               vdso_trampoline = VDSO_SYMBOL(vdso_base,
+                                                       compat_rt_sigreturn_arm);
+                       }
+               } else {
+                       if (thumb) {
+                               vdso_trampoline = VDSO_SYMBOL(vdso_base,
+                                                       compat_sigreturn_thumb);
+                       } else {
+                               vdso_trampoline = VDSO_SYMBOL(vdso_base,
+                                                       compat_sigreturn_arm);
+                       }
+               }
+
+               retcode = ptr_to_compat(vdso_trampoline) + thumb;
+#else
                unsigned int idx = thumb << 1;
 
                if (ka->sa.sa_flags & SA_SIGINFO)
@@ -348,6 +373,7 @@ static void compat_setup_return(struct pt_regs *regs, struct k_sigaction *ka,
 
                retcode = (unsigned long)current->mm->context.vdso +
                          (idx << 2) + thumb;
+#endif
        }
 
        regs->regs[0]   = usig;