Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

arm64: use a common .arch preamble for inline assembly

Commit 7c78f67e9bd9 ("arm64: enable tlbi range instructions") breaks
LLVM's integrated assembler, because -Wa,-march is only passed to
external assemblers and therefore, the new instructions are not enabled
when IAS is used.

This change adds a common architecture version preamble, which can be
used in inline assembly blocks that contain instructions that require
a newer architecture version, and uses it to fix __TLBI_0 and __TLBI_1
with ARM64_TLB_RANGE.

Fixes: 7c78f67e9bd9 ("arm64: enable tlbi range instructions")
Signed-off-by: Sami Tolvanen <samitolvanen@google.com>
Tested-by: Nathan Chancellor <natechancellor@gmail.com>
Reviewed-by: Nathan Chancellor <natechancellor@gmail.com>
Link: https://github.com/ClangBuiltLinux/linux/issues/1106
Link: https://lore.kernel.org/r/20200827203608.1225689-1-samitolvanen@google.com
Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>

authored by

Sami Tolvanen and committed by
Catalin Marinas
1764c3ed d012a719

+18 -5
+8 -3
arch/arm64/Makefile
··· 82 82 # compiler to generate them and consequently to break the single image contract 83 83 # we pass it only to the assembler. This option is utilized only in case of non 84 84 # integrated assemblers. 85 - ifneq ($(CONFIG_AS_HAS_ARMV8_4), y) 86 - branch-prot-flags-$(CONFIG_AS_HAS_PAC) += -Wa,-march=armv8.3-a 85 + ifeq ($(CONFIG_AS_HAS_PAC), y) 86 + asm-arch := armv8.3-a 87 87 endif 88 88 endif 89 89 ··· 91 91 92 92 ifeq ($(CONFIG_AS_HAS_ARMV8_4), y) 93 93 # make sure to pass the newest target architecture to -march. 94 - KBUILD_CFLAGS += -Wa,-march=armv8.4-a 94 + asm-arch := armv8.4-a 95 + endif 96 + 97 + ifdef asm-arch 98 + KBUILD_CFLAGS += -Wa,-march=$(asm-arch) \ 99 + -DARM64_ASM_ARCH='"$(asm-arch)"' 95 100 endif 96 101 97 102 ifeq ($(CONFIG_SHADOW_CALL_STACK), y)
+6
arch/arm64/include/asm/compiler.h
··· 2 2 #ifndef __ASM_COMPILER_H 3 3 #define __ASM_COMPILER_H 4 4 5 + #ifdef ARM64_ASM_ARCH 6 + #define ARM64_ASM_PREAMBLE ".arch " ARM64_ASM_ARCH "\n" 7 + #else 8 + #define ARM64_ASM_PREAMBLE 9 + #endif 10 + 5 11 /* 6 12 * The EL0/EL1 pointer bits used by a pointer authentication code. 7 13 * This is dependent on TBI0/TBI1 being enabled, or bits 63:56 would also apply.
+4 -2
arch/arm64/include/asm/tlbflush.h
··· 28 28 * not. The macros handles invoking the asm with or without the 29 29 * register argument as appropriate. 30 30 */ 31 - #define __TLBI_0(op, arg) asm ("tlbi " #op "\n" \ 31 + #define __TLBI_0(op, arg) asm (ARM64_ASM_PREAMBLE \ 32 + "tlbi " #op "\n" \ 32 33 ALTERNATIVE("nop\n nop", \ 33 34 "dsb ish\n tlbi " #op, \ 34 35 ARM64_WORKAROUND_REPEAT_TLBI, \ 35 36 CONFIG_ARM64_WORKAROUND_REPEAT_TLBI) \ 36 37 : : ) 37 38 38 - #define __TLBI_1(op, arg) asm ("tlbi " #op ", %0\n" \ 39 + #define __TLBI_1(op, arg) asm (ARM64_ASM_PREAMBLE \ 40 + "tlbi " #op ", %0\n" \ 39 41 ALTERNATIVE("nop\n nop", \ 40 42 "dsb ish\n tlbi " #op ", %0", \ 41 43 ARM64_WORKAROUND_REPEAT_TLBI, \