Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

powerpc: prepare string/mem functions for KASAN

CONFIG_KASAN implements wrappers for memcpy() memmove() and memset()
Those wrappers are doing the verification then call respectively
__memcpy() __memmove() and __memset(). The arches are therefore
expected to rename their optimised functions that way.

For files on which KASAN is inhibited, #defines are used to allow
them to directly call optimised versions of the functions without
going through the KASAN wrappers.

See commit 393f203f5fd5 ("x86_64: kasan: add interceptors for
memset/memmove/memcpy functions") for details.

Other string / mem functions do not (yet) have kasan wrappers,
we therefore have to fallback to the generic versions when
KASAN is active, otherwise KASAN checks will be skipped.

Signed-off-by: Christophe Leroy <christophe.leroy@c-s.fr>
[mpe: Fixups to keep selftests working]
Signed-off-by: Michael Ellerman <mpe@ellerman.id.au>

authored by

Christophe Leroy and committed by
Michael Ellerman
26deb043 d69ca6ba

+82 -13
+15
arch/powerpc/include/asm/kasan.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0 */ 2 + #ifndef __ASM_KASAN_H 3 + #define __ASM_KASAN_H 4 + 5 + #ifdef CONFIG_KASAN 6 + #define _GLOBAL_KASAN(fn) _GLOBAL(__##fn) 7 + #define _GLOBAL_TOC_KASAN(fn) _GLOBAL_TOC(__##fn) 8 + #define EXPORT_SYMBOL_KASAN(fn) EXPORT_SYMBOL(__##fn) 9 + #else 10 + #define _GLOBAL_KASAN(fn) _GLOBAL(fn) 11 + #define _GLOBAL_TOC_KASAN(fn) _GLOBAL_TOC(fn) 12 + #define EXPORT_SYMBOL_KASAN(fn) 13 + #endif 14 + 15 + #endif
+29 -3
arch/powerpc/include/asm/string.h
··· 4 4 5 5 #ifdef __KERNEL__ 6 6 7 + #ifndef CONFIG_KASAN 7 8 #define __HAVE_ARCH_STRNCPY 8 9 #define __HAVE_ARCH_STRNCMP 10 + #define __HAVE_ARCH_MEMCHR 11 + #define __HAVE_ARCH_MEMCMP 12 + #define __HAVE_ARCH_MEMSET16 13 + #endif 14 + 9 15 #define __HAVE_ARCH_MEMSET 10 16 #define __HAVE_ARCH_MEMCPY 11 17 #define __HAVE_ARCH_MEMMOVE 12 - #define __HAVE_ARCH_MEMCMP 13 - #define __HAVE_ARCH_MEMCHR 14 - #define __HAVE_ARCH_MEMSET16 15 18 #define __HAVE_ARCH_MEMCPY_FLUSHCACHE 16 19 17 20 extern char * strcpy(char *,const char *); ··· 30 27 extern void * memchr(const void *,int,__kernel_size_t); 31 28 extern void * memcpy_flushcache(void *,const void *,__kernel_size_t); 32 29 30 + void *__memset(void *s, int c, __kernel_size_t count); 31 + void *__memcpy(void *to, const void *from, __kernel_size_t n); 32 + void *__memmove(void *to, const void *from, __kernel_size_t n); 33 + 34 + #if defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__) 35 + /* 36 + * For files that are not instrumented (e.g. mm/slub.c) we 37 + * should use not instrumented version of mem* functions. 38 + */ 39 + #define memcpy(dst, src, len) __memcpy(dst, src, len) 40 + #define memmove(dst, src, len) __memmove(dst, src, len) 41 + #define memset(s, c, n) __memset(s, c, n) 42 + 43 + #ifndef __NO_FORTIFY 44 + #define __NO_FORTIFY /* FORTIFY_SOURCE uses __builtin_memcpy, etc. */ 45 + #endif 46 + 47 + #endif 48 + 33 49 #ifdef CONFIG_PPC64 50 + #ifndef CONFIG_KASAN 34 51 #define __HAVE_ARCH_MEMSET32 35 52 #define __HAVE_ARCH_MEMSET64 36 53 ··· 72 49 { 73 50 return __memset64(p, v, n * 8); 74 51 } 52 + #endif 75 53 #else 54 + #ifndef CONFIG_KASAN 76 55 #define __HAVE_ARCH_STRLEN 56 + #endif 77 57 78 58 extern void *memset16(uint16_t *, uint16_t, __kernel_size_t); 79 59 #endif
+9 -1
arch/powerpc/kernel/prom_init_check.sh
··· 16 16 # If you really need to reference something from prom_init.o add 17 17 # it to the list below: 18 18 19 + grep "^CONFIG_KASAN=y$" .config >/dev/null 20 + if [ $? -eq 0 ] 21 + then 22 + MEM_FUNCS="__memcpy __memset" 23 + else 24 + MEM_FUNCS="memcpy memset" 25 + fi 26 + 19 27 WHITELIST="add_reloc_offset __bss_start __bss_stop copy_and_flush 20 - _end enter_prom memcpy memset reloc_offset __secondary_hold 28 + _end enter_prom $MEM_FUNCS reloc_offset __secondary_hold 21 29 __secondary_hold_acknowledge __secondary_hold_spinloop __start 22 30 strcmp strcpy strlcpy strlen strncmp strstr kstrtobool logo_linux_clut224 23 31 reloc_got2 kernstart_addr memstart_addr linux_banner _stext
+8 -3
arch/powerpc/lib/Makefile
··· 8 8 CFLAGS_REMOVE_code-patching.o = $(CC_FLAGS_FTRACE) 9 9 CFLAGS_REMOVE_feature-fixups.o = $(CC_FLAGS_FTRACE) 10 10 11 - obj-y += string.o alloc.o code-patching.o feature-fixups.o 11 + obj-y += alloc.o code-patching.o feature-fixups.o 12 12 13 - obj-$(CONFIG_PPC32) += div64.o copy_32.o crtsavres.o strlen_32.o 13 + ifndef CONFIG_KASAN 14 + obj-y += string.o memcmp_$(BITS).o 15 + obj-$(CONFIG_PPC32) += strlen_32.o 16 + endif 17 + 18 + obj-$(CONFIG_PPC32) += div64.o copy_32.o crtsavres.o 14 19 15 20 obj-$(CONFIG_FUNCTION_ERROR_INJECTION) += error-inject.o 16 21 ··· 39 34 test_emulate_step_exec_instr.o 40 35 41 36 obj-y += checksum_$(BITS).o checksum_wrappers.o \ 42 - string_$(BITS).o memcmp_$(BITS).o 37 + string_$(BITS).o 43 38 44 39 obj-y += sstep.o ldstfp.o quad.o 45 40 obj64-y += quad.o
+9 -3
arch/powerpc/lib/copy_32.S
··· 14 14 #include <asm/ppc_asm.h> 15 15 #include <asm/export.h> 16 16 #include <asm/code-patching-asm.h> 17 + #include <asm/kasan.h> 17 18 18 19 #define COPY_16_BYTES \ 19 20 lwz r7,4(r4); \ ··· 69 68 LG_CACHELINE_BYTES = L1_CACHE_SHIFT 70 69 CACHELINE_MASK = (L1_CACHE_BYTES-1) 71 70 71 + #ifndef CONFIG_KASAN 72 72 _GLOBAL(memset16) 73 73 rlwinm. r0 ,r5, 31, 1, 31 74 74 addi r6, r3, -4 ··· 83 81 sth r4, 4(r6) 84 82 blr 85 83 EXPORT_SYMBOL(memset16) 84 + #endif 86 85 87 86 /* 88 87 * Use dcbz on the complete cache lines in the destination ··· 94 91 * We therefore skip the optimised bloc that uses dcbz. This jump is 95 92 * replaced by a nop once cache is active. This is done in machine_init() 96 93 */ 97 - _GLOBAL(memset) 94 + _GLOBAL_KASAN(memset) 98 95 cmplwi 0,r5,4 99 96 blt 7f 100 97 ··· 154 151 bdnz 9b 155 152 blr 156 153 EXPORT_SYMBOL(memset) 154 + EXPORT_SYMBOL_KASAN(memset) 157 155 158 156 /* 159 157 * This version uses dcbz on the complete cache lines in the ··· 167 163 * We therefore jump to generic_memcpy which doesn't use dcbz. This jump is 168 164 * replaced by a nop once cache is active. This is done in machine_init() 169 165 */ 170 - _GLOBAL(memmove) 166 + _GLOBAL_KASAN(memmove) 171 167 cmplw 0,r3,r4 172 168 bgt backwards_memcpy 173 169 /* fall through */ 174 170 175 - _GLOBAL(memcpy) 171 + _GLOBAL_KASAN(memcpy) 176 172 1: b generic_memcpy 177 173 patch_site 1b, patch__memcpy_nocache 178 174 ··· 248 244 65: blr 249 245 EXPORT_SYMBOL(memcpy) 250 246 EXPORT_SYMBOL(memmove) 247 + EXPORT_SYMBOL_KASAN(memcpy) 248 + EXPORT_SYMBOL_KASAN(memmove) 251 249 252 250 generic_memcpy: 253 251 srwi. r7,r5,3
+7 -2
arch/powerpc/lib/mem_64.S
··· 12 12 #include <asm/errno.h> 13 13 #include <asm/ppc_asm.h> 14 14 #include <asm/export.h> 15 + #include <asm/kasan.h> 15 16 17 + #ifndef CONFIG_KASAN 16 18 _GLOBAL(__memset16) 17 19 rlwimi r4,r4,16,0,15 18 20 /* fall through */ ··· 31 29 EXPORT_SYMBOL(__memset16) 32 30 EXPORT_SYMBOL(__memset32) 33 31 EXPORT_SYMBOL(__memset64) 32 + #endif 34 33 35 - _GLOBAL(memset) 34 + _GLOBAL_KASAN(memset) 36 35 neg r0,r3 37 36 rlwimi r4,r4,8,16,23 38 37 andi. r0,r0,7 /* # bytes to be 8-byte aligned */ ··· 99 96 stb r4,0(r6) 100 97 blr 101 98 EXPORT_SYMBOL(memset) 99 + EXPORT_SYMBOL_KASAN(memset) 102 100 103 - _GLOBAL_TOC(memmove) 101 + _GLOBAL_TOC_KASAN(memmove) 104 102 cmplw 0,r3,r4 105 103 bgt backwards_memcpy 106 104 b memcpy ··· 143 139 mtctr r7 144 140 b 1b 145 141 EXPORT_SYMBOL(memmove) 142 + EXPORT_SYMBOL_KASAN(memmove)
+3 -1
arch/powerpc/lib/memcpy_64.S
··· 11 11 #include <asm/export.h> 12 12 #include <asm/asm-compat.h> 13 13 #include <asm/feature-fixups.h> 14 + #include <asm/kasan.h> 14 15 15 16 #ifndef SELFTEST_CASE 16 17 /* For big-endian, 0 == most CPUs, 1 == POWER6, 2 == Cell */ ··· 19 18 #endif 20 19 21 20 .align 7 22 - _GLOBAL_TOC(memcpy) 21 + _GLOBAL_TOC_KASAN(memcpy) 23 22 BEGIN_FTR_SECTION 24 23 #ifdef __LITTLE_ENDIAN__ 25 24 cmpdi cr7,r5,0 ··· 231 230 blr 232 231 #endif 233 232 EXPORT_SYMBOL(memcpy) 233 + EXPORT_SYMBOL_KASAN(memcpy)
+1
tools/testing/selftests/powerpc/copyloops/asm/export.h
··· 1 1 /* SPDX-License-Identifier: GPL-2.0 */ 2 2 #define EXPORT_SYMBOL(x) 3 + #define EXPORT_SYMBOL_KASAN(x)
tools/testing/selftests/powerpc/copyloops/asm/kasan.h
+1
tools/testing/selftests/powerpc/copyloops/asm/ppc_asm.h
··· 25 25 26 26 #define _GLOBAL(A) FUNC_START(test_ ## A) 27 27 #define _GLOBAL_TOC(A) _GLOBAL(A) 28 + #define _GLOBAL_TOC_KASAN(A) _GLOBAL(A) 28 29 29 30 #define PPC_MTOCRF(A, B) mtocrf A, B 30 31