Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

arm64: compat: Get sigreturn trampolines from vDSO

When the compat vDSO is enabled, the sigreturn trampolines are not
anymore available through [sigpage] but through [vdso].

Add the relevant code the enable the feature.

Signed-off-by: Vincenzo Frascino <vincenzo.frascino@arm.com>
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Tested-by: Shijith Thotton <sthotton@marvell.com>
Tested-by: Andre Przywara <andre.przywara@arm.com>
Cc: linux-arch@vger.kernel.org
Cc: linux-arm-kernel@lists.infradead.org
Cc: linux-mips@vger.kernel.org
Cc: linux-kselftest@vger.kernel.org
Cc: Catalin Marinas <catalin.marinas@arm.com>
Cc: Will Deacon <will.deacon@arm.com>
Cc: Arnd Bergmann <arnd@arndb.de>
Cc: Russell King <linux@armlinux.org.uk>
Cc: Ralf Baechle <ralf@linux-mips.org>
Cc: Paul Burton <paul.burton@mips.com>
Cc: Daniel Lezcano <daniel.lezcano@linaro.org>
Cc: Mark Salyzyn <salyzyn@android.com>
Cc: Peter Collingbourne <pcc@google.com>
Cc: Shuah Khan <shuah@kernel.org>
Cc: Dmitry Safonov <0x7f454c46@gmail.com>
Cc: Rasmus Villemoes <linux@rasmusvillemoes.dk>
Cc: Huw Davies <huw@codeweavers.com>
Link: https://lkml.kernel.org/r/20190621095252.32307-15-vincenzo.frascino@arm.com

authored by

Vincenzo Frascino and committed by
Thomas Gleixner
f01703b3 1e3f17f5

+29
+3
arch/arm64/include/asm/vdso.h
··· 17 17 #ifndef __ASSEMBLY__ 18 18 19 19 #include <generated/vdso-offsets.h> 20 + #ifdef CONFIG_COMPAT_VDSO 21 + #include <generated/vdso32-offsets.h> 22 + #endif 20 23 21 24 #define VDSO_SYMBOL(base, name) \ 22 25 ({ \
+26
arch/arm64/kernel/signal32.c
··· 18 18 #include <asm/traps.h> 19 19 #include <linux/uaccess.h> 20 20 #include <asm/unistd.h> 21 + #include <asm/vdso.h> 21 22 22 23 struct compat_vfp_sigframe { 23 24 compat_ulong_t magic; ··· 342 341 retcode = ptr_to_compat(ka->sa.sa_restorer); 343 342 } else { 344 343 /* Set up sigreturn pointer */ 344 + #ifdef CONFIG_COMPAT_VDSO 345 + void *vdso_base = current->mm->context.vdso; 346 + void *vdso_trampoline; 347 + 348 + if (ka->sa.sa_flags & SA_SIGINFO) { 349 + if (thumb) { 350 + vdso_trampoline = VDSO_SYMBOL(vdso_base, 351 + compat_rt_sigreturn_thumb); 352 + } else { 353 + vdso_trampoline = VDSO_SYMBOL(vdso_base, 354 + compat_rt_sigreturn_arm); 355 + } 356 + } else { 357 + if (thumb) { 358 + vdso_trampoline = VDSO_SYMBOL(vdso_base, 359 + compat_sigreturn_thumb); 360 + } else { 361 + vdso_trampoline = VDSO_SYMBOL(vdso_base, 362 + compat_sigreturn_arm); 363 + } 364 + } 365 + 366 + retcode = ptr_to_compat(vdso_trampoline) + thumb; 367 + #else 345 368 unsigned int idx = thumb << 1; 346 369 347 370 if (ka->sa.sa_flags & SA_SIGINFO) ··· 373 348 374 349 retcode = (unsigned long)current->mm->context.vdso + 375 350 (idx << 2) + thumb; 351 + #endif 376 352 } 377 353 378 354 regs->regs[0] = usig;