Commit 26deb043 authored by Christophe Leroy's avatar Christophe Leroy Committed by Michael Ellerman

powerpc: prepare string/mem functions for KASAN

CONFIG_KASAN implements wrappers for memcpy() memmove() and memset()
Those wrappers are doing the verification then call respectively
__memcpy() __memmove() and __memset(). The arches are therefore
expected to rename their optimised functions that way.

For files on which KASAN is inhibited, #defines are used to allow
them to directly call optimised versions of the functions without
going through the KASAN wrappers.

See commit 393f203f ("x86_64: kasan: add interceptors for
memset/memmove/memcpy functions") for details.

Other string / mem functions do not (yet) have kasan wrappers,
we therefore have to fallback to the generic versions when
KASAN is active, otherwise KASAN checks will be skipped.
Signed-off-by: default avatarChristophe Leroy <christophe.leroy@c-s.fr>
[mpe: Fixups to keep selftests working]
Signed-off-by: default avatarMichael Ellerman <mpe@ellerman.id.au>
parent d69ca6ba
/* SPDX-License-Identifier: GPL-2.0 */
#ifndef __ASM_KASAN_H
#define __ASM_KASAN_H
#ifdef CONFIG_KASAN
#define _GLOBAL_KASAN(fn) _GLOBAL(__##fn)
#define _GLOBAL_TOC_KASAN(fn) _GLOBAL_TOC(__##fn)
#define EXPORT_SYMBOL_KASAN(fn) EXPORT_SYMBOL(__##fn)
#else
#define _GLOBAL_KASAN(fn) _GLOBAL(fn)
#define _GLOBAL_TOC_KASAN(fn) _GLOBAL_TOC(fn)
#define EXPORT_SYMBOL_KASAN(fn)
#endif
#endif
...@@ -4,14 +4,17 @@ ...@@ -4,14 +4,17 @@
#ifdef __KERNEL__ #ifdef __KERNEL__
#ifndef CONFIG_KASAN
#define __HAVE_ARCH_STRNCPY #define __HAVE_ARCH_STRNCPY
#define __HAVE_ARCH_STRNCMP #define __HAVE_ARCH_STRNCMP
#define __HAVE_ARCH_MEMCHR
#define __HAVE_ARCH_MEMCMP
#define __HAVE_ARCH_MEMSET16
#endif
#define __HAVE_ARCH_MEMSET #define __HAVE_ARCH_MEMSET
#define __HAVE_ARCH_MEMCPY #define __HAVE_ARCH_MEMCPY
#define __HAVE_ARCH_MEMMOVE #define __HAVE_ARCH_MEMMOVE
#define __HAVE_ARCH_MEMCMP
#define __HAVE_ARCH_MEMCHR
#define __HAVE_ARCH_MEMSET16
#define __HAVE_ARCH_MEMCPY_FLUSHCACHE #define __HAVE_ARCH_MEMCPY_FLUSHCACHE
extern char * strcpy(char *,const char *); extern char * strcpy(char *,const char *);
...@@ -27,7 +30,27 @@ extern int memcmp(const void *,const void *,__kernel_size_t); ...@@ -27,7 +30,27 @@ extern int memcmp(const void *,const void *,__kernel_size_t);
extern void * memchr(const void *,int,__kernel_size_t); extern void * memchr(const void *,int,__kernel_size_t);
extern void * memcpy_flushcache(void *,const void *,__kernel_size_t); extern void * memcpy_flushcache(void *,const void *,__kernel_size_t);
void *__memset(void *s, int c, __kernel_size_t count);
void *__memcpy(void *to, const void *from, __kernel_size_t n);
void *__memmove(void *to, const void *from, __kernel_size_t n);
#if defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__)
/*
* For files that are not instrumented (e.g. mm/slub.c) we
* should use not instrumented version of mem* functions.
*/
#define memcpy(dst, src, len) __memcpy(dst, src, len)
#define memmove(dst, src, len) __memmove(dst, src, len)
#define memset(s, c, n) __memset(s, c, n)
#ifndef __NO_FORTIFY
#define __NO_FORTIFY /* FORTIFY_SOURCE uses __builtin_memcpy, etc. */
#endif
#endif
#ifdef CONFIG_PPC64 #ifdef CONFIG_PPC64
#ifndef CONFIG_KASAN
#define __HAVE_ARCH_MEMSET32 #define __HAVE_ARCH_MEMSET32
#define __HAVE_ARCH_MEMSET64 #define __HAVE_ARCH_MEMSET64
...@@ -49,8 +72,11 @@ static inline void *memset64(uint64_t *p, uint64_t v, __kernel_size_t n) ...@@ -49,8 +72,11 @@ static inline void *memset64(uint64_t *p, uint64_t v, __kernel_size_t n)
{ {
return __memset64(p, v, n * 8); return __memset64(p, v, n * 8);
} }
#endif
#else #else
#ifndef CONFIG_KASAN
#define __HAVE_ARCH_STRLEN #define __HAVE_ARCH_STRLEN
#endif
extern void *memset16(uint16_t *, uint16_t, __kernel_size_t); extern void *memset16(uint16_t *, uint16_t, __kernel_size_t);
#endif #endif
......
...@@ -16,8 +16,16 @@ ...@@ -16,8 +16,16 @@
# If you really need to reference something from prom_init.o add # If you really need to reference something from prom_init.o add
# it to the list below: # it to the list below:
grep "^CONFIG_KASAN=y$" .config >/dev/null
if [ $? -eq 0 ]
then
MEM_FUNCS="__memcpy __memset"
else
MEM_FUNCS="memcpy memset"
fi
WHITELIST="add_reloc_offset __bss_start __bss_stop copy_and_flush WHITELIST="add_reloc_offset __bss_start __bss_stop copy_and_flush
_end enter_prom memcpy memset reloc_offset __secondary_hold _end enter_prom $MEM_FUNCS reloc_offset __secondary_hold
__secondary_hold_acknowledge __secondary_hold_spinloop __start __secondary_hold_acknowledge __secondary_hold_spinloop __start
strcmp strcpy strlcpy strlen strncmp strstr kstrtobool logo_linux_clut224 strcmp strcpy strlcpy strlen strncmp strstr kstrtobool logo_linux_clut224
reloc_got2 kernstart_addr memstart_addr linux_banner _stext reloc_got2 kernstart_addr memstart_addr linux_banner _stext
......
...@@ -8,9 +8,14 @@ ccflags-$(CONFIG_PPC64) := $(NO_MINIMAL_TOC) ...@@ -8,9 +8,14 @@ ccflags-$(CONFIG_PPC64) := $(NO_MINIMAL_TOC)
CFLAGS_REMOVE_code-patching.o = $(CC_FLAGS_FTRACE) CFLAGS_REMOVE_code-patching.o = $(CC_FLAGS_FTRACE)
CFLAGS_REMOVE_feature-fixups.o = $(CC_FLAGS_FTRACE) CFLAGS_REMOVE_feature-fixups.o = $(CC_FLAGS_FTRACE)
obj-y += string.o alloc.o code-patching.o feature-fixups.o obj-y += alloc.o code-patching.o feature-fixups.o
obj-$(CONFIG_PPC32) += div64.o copy_32.o crtsavres.o strlen_32.o ifndef CONFIG_KASAN
obj-y += string.o memcmp_$(BITS).o
obj-$(CONFIG_PPC32) += strlen_32.o
endif
obj-$(CONFIG_PPC32) += div64.o copy_32.o crtsavres.o
obj-$(CONFIG_FUNCTION_ERROR_INJECTION) += error-inject.o obj-$(CONFIG_FUNCTION_ERROR_INJECTION) += error-inject.o
...@@ -34,7 +39,7 @@ obj64-$(CONFIG_KPROBES_SANITY_TEST) += test_emulate_step.o \ ...@@ -34,7 +39,7 @@ obj64-$(CONFIG_KPROBES_SANITY_TEST) += test_emulate_step.o \
test_emulate_step_exec_instr.o test_emulate_step_exec_instr.o
obj-y += checksum_$(BITS).o checksum_wrappers.o \ obj-y += checksum_$(BITS).o checksum_wrappers.o \
string_$(BITS).o memcmp_$(BITS).o string_$(BITS).o
obj-y += sstep.o ldstfp.o quad.o obj-y += sstep.o ldstfp.o quad.o
obj64-y += quad.o obj64-y += quad.o
......
...@@ -14,6 +14,7 @@ ...@@ -14,6 +14,7 @@
#include <asm/ppc_asm.h> #include <asm/ppc_asm.h>
#include <asm/export.h> #include <asm/export.h>
#include <asm/code-patching-asm.h> #include <asm/code-patching-asm.h>
#include <asm/kasan.h>
#define COPY_16_BYTES \ #define COPY_16_BYTES \
lwz r7,4(r4); \ lwz r7,4(r4); \
...@@ -68,6 +69,7 @@ CACHELINE_BYTES = L1_CACHE_BYTES ...@@ -68,6 +69,7 @@ CACHELINE_BYTES = L1_CACHE_BYTES
LG_CACHELINE_BYTES = L1_CACHE_SHIFT LG_CACHELINE_BYTES = L1_CACHE_SHIFT
CACHELINE_MASK = (L1_CACHE_BYTES-1) CACHELINE_MASK = (L1_CACHE_BYTES-1)
#ifndef CONFIG_KASAN
_GLOBAL(memset16) _GLOBAL(memset16)
rlwinm. r0 ,r5, 31, 1, 31 rlwinm. r0 ,r5, 31, 1, 31
addi r6, r3, -4 addi r6, r3, -4
...@@ -81,6 +83,7 @@ _GLOBAL(memset16) ...@@ -81,6 +83,7 @@ _GLOBAL(memset16)
sth r4, 4(r6) sth r4, 4(r6)
blr blr
EXPORT_SYMBOL(memset16) EXPORT_SYMBOL(memset16)
#endif
/* /*
* Use dcbz on the complete cache lines in the destination * Use dcbz on the complete cache lines in the destination
...@@ -91,7 +94,7 @@ EXPORT_SYMBOL(memset16) ...@@ -91,7 +94,7 @@ EXPORT_SYMBOL(memset16)
* We therefore skip the optimised bloc that uses dcbz. This jump is * We therefore skip the optimised bloc that uses dcbz. This jump is
* replaced by a nop once cache is active. This is done in machine_init() * replaced by a nop once cache is active. This is done in machine_init()
*/ */
_GLOBAL(memset) _GLOBAL_KASAN(memset)
cmplwi 0,r5,4 cmplwi 0,r5,4
blt 7f blt 7f
...@@ -151,6 +154,7 @@ _GLOBAL(memset) ...@@ -151,6 +154,7 @@ _GLOBAL(memset)
bdnz 9b bdnz 9b
blr blr
EXPORT_SYMBOL(memset) EXPORT_SYMBOL(memset)
EXPORT_SYMBOL_KASAN(memset)
/* /*
* This version uses dcbz on the complete cache lines in the * This version uses dcbz on the complete cache lines in the
...@@ -163,12 +167,12 @@ EXPORT_SYMBOL(memset) ...@@ -163,12 +167,12 @@ EXPORT_SYMBOL(memset)
* We therefore jump to generic_memcpy which doesn't use dcbz. This jump is * We therefore jump to generic_memcpy which doesn't use dcbz. This jump is
* replaced by a nop once cache is active. This is done in machine_init() * replaced by a nop once cache is active. This is done in machine_init()
*/ */
_GLOBAL(memmove) _GLOBAL_KASAN(memmove)
cmplw 0,r3,r4 cmplw 0,r3,r4
bgt backwards_memcpy bgt backwards_memcpy
/* fall through */ /* fall through */
_GLOBAL(memcpy) _GLOBAL_KASAN(memcpy)
1: b generic_memcpy 1: b generic_memcpy
patch_site 1b, patch__memcpy_nocache patch_site 1b, patch__memcpy_nocache
...@@ -244,6 +248,8 @@ _GLOBAL(memcpy) ...@@ -244,6 +248,8 @@ _GLOBAL(memcpy)
65: blr 65: blr
EXPORT_SYMBOL(memcpy) EXPORT_SYMBOL(memcpy)
EXPORT_SYMBOL(memmove) EXPORT_SYMBOL(memmove)
EXPORT_SYMBOL_KASAN(memcpy)
EXPORT_SYMBOL_KASAN(memmove)
generic_memcpy: generic_memcpy:
srwi. r7,r5,3 srwi. r7,r5,3
......
...@@ -12,7 +12,9 @@ ...@@ -12,7 +12,9 @@
#include <asm/errno.h> #include <asm/errno.h>
#include <asm/ppc_asm.h> #include <asm/ppc_asm.h>
#include <asm/export.h> #include <asm/export.h>
#include <asm/kasan.h>
#ifndef CONFIG_KASAN
_GLOBAL(__memset16) _GLOBAL(__memset16)
rlwimi r4,r4,16,0,15 rlwimi r4,r4,16,0,15
/* fall through */ /* fall through */
...@@ -29,8 +31,9 @@ _GLOBAL(__memset64) ...@@ -29,8 +31,9 @@ _GLOBAL(__memset64)
EXPORT_SYMBOL(__memset16) EXPORT_SYMBOL(__memset16)
EXPORT_SYMBOL(__memset32) EXPORT_SYMBOL(__memset32)
EXPORT_SYMBOL(__memset64) EXPORT_SYMBOL(__memset64)
#endif
_GLOBAL(memset) _GLOBAL_KASAN(memset)
neg r0,r3 neg r0,r3
rlwimi r4,r4,8,16,23 rlwimi r4,r4,8,16,23
andi. r0,r0,7 /* # bytes to be 8-byte aligned */ andi. r0,r0,7 /* # bytes to be 8-byte aligned */
...@@ -96,8 +99,9 @@ _GLOBAL(memset) ...@@ -96,8 +99,9 @@ _GLOBAL(memset)
stb r4,0(r6) stb r4,0(r6)
blr blr
EXPORT_SYMBOL(memset) EXPORT_SYMBOL(memset)
EXPORT_SYMBOL_KASAN(memset)
_GLOBAL_TOC(memmove) _GLOBAL_TOC_KASAN(memmove)
cmplw 0,r3,r4 cmplw 0,r3,r4
bgt backwards_memcpy bgt backwards_memcpy
b memcpy b memcpy
...@@ -139,3 +143,4 @@ _GLOBAL(backwards_memcpy) ...@@ -139,3 +143,4 @@ _GLOBAL(backwards_memcpy)
mtctr r7 mtctr r7
b 1b b 1b
EXPORT_SYMBOL(memmove) EXPORT_SYMBOL(memmove)
EXPORT_SYMBOL_KASAN(memmove)
...@@ -11,6 +11,7 @@ ...@@ -11,6 +11,7 @@
#include <asm/export.h> #include <asm/export.h>
#include <asm/asm-compat.h> #include <asm/asm-compat.h>
#include <asm/feature-fixups.h> #include <asm/feature-fixups.h>
#include <asm/kasan.h>
#ifndef SELFTEST_CASE #ifndef SELFTEST_CASE
/* For big-endian, 0 == most CPUs, 1 == POWER6, 2 == Cell */ /* For big-endian, 0 == most CPUs, 1 == POWER6, 2 == Cell */
...@@ -18,7 +19,7 @@ ...@@ -18,7 +19,7 @@
#endif #endif
.align 7 .align 7
_GLOBAL_TOC(memcpy) _GLOBAL_TOC_KASAN(memcpy)
BEGIN_FTR_SECTION BEGIN_FTR_SECTION
#ifdef __LITTLE_ENDIAN__ #ifdef __LITTLE_ENDIAN__
cmpdi cr7,r5,0 cmpdi cr7,r5,0
...@@ -230,3 +231,4 @@ END_FTR_SECTION_IFCLR(CPU_FTR_UNALIGNED_LD_STD) ...@@ -230,3 +231,4 @@ END_FTR_SECTION_IFCLR(CPU_FTR_UNALIGNED_LD_STD)
blr blr
#endif #endif
EXPORT_SYMBOL(memcpy) EXPORT_SYMBOL(memcpy)
EXPORT_SYMBOL_KASAN(memcpy)
/* SPDX-License-Identifier: GPL-2.0 */ /* SPDX-License-Identifier: GPL-2.0 */
#define EXPORT_SYMBOL(x) #define EXPORT_SYMBOL(x)
#define EXPORT_SYMBOL_KASAN(x)
...@@ -25,6 +25,7 @@ ...@@ -25,6 +25,7 @@
#define _GLOBAL(A) FUNC_START(test_ ## A) #define _GLOBAL(A) FUNC_START(test_ ## A)
#define _GLOBAL_TOC(A) _GLOBAL(A) #define _GLOBAL_TOC(A) _GLOBAL(A)
#define _GLOBAL_TOC_KASAN(A) _GLOBAL(A)
#define PPC_MTOCRF(A, B) mtocrf A, B #define PPC_MTOCRF(A, B) mtocrf A, B
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment