diff options
author | Christophe Leroy <christophe.leroy@c-s.fr> | 2019-04-26 16:23:26 +0000 |
---|---|---|
committer | Michael Ellerman <mpe@ellerman.id.au> | 2019-05-03 01:20:25 +1000 |
commit | 26deb04342e343ac58ab05bc7d2345ff0be9b667 (patch) | |
tree | 39040b7c409757661ecd6c7d82dce9e919072625 /arch/powerpc/lib/mem_64.S | |
parent | d69ca6bab39e84a84781535b977c7e62c8f84d37 (diff) | |
download | linux-26deb04342e343ac58ab05bc7d2345ff0be9b667.tar.gz linux-26deb04342e343ac58ab05bc7d2345ff0be9b667.tar.bz2 linux-26deb04342e343ac58ab05bc7d2345ff0be9b667.zip |
powerpc: prepare string/mem functions for KASAN
CONFIG_KASAN implements wrappers for memcpy() memmove() and memset()
Those wrappers are doing the verification then call respectively
__memcpy() __memmove() and __memset(). The arches are therefore
expected to rename their optimised functions that way.
For files on which KASAN is inhibited, #defines are used to allow
them to directly call optimised versions of the functions without
going through the KASAN wrappers.
See commit 393f203f5fd5 ("x86_64: kasan: add interceptors for
memset/memmove/memcpy functions") for details.
Other string / mem functions do not (yet) have kasan wrappers,
we therefore have to fallback to the generic versions when
KASAN is active, otherwise KASAN checks will be skipped.
Signed-off-by: Christophe Leroy <christophe.leroy@c-s.fr>
[mpe: Fixups to keep selftests working]
Signed-off-by: Michael Ellerman <mpe@ellerman.id.au>
Diffstat (limited to 'arch/powerpc/lib/mem_64.S')
-rw-r--r-- | arch/powerpc/lib/mem_64.S | 9 |
1 files changed, 7 insertions, 2 deletions
diff --git a/arch/powerpc/lib/mem_64.S b/arch/powerpc/lib/mem_64.S index 3c3be02f33b7..7f6bd031c306 100644 --- a/arch/powerpc/lib/mem_64.S +++ b/arch/powerpc/lib/mem_64.S @@ -12,7 +12,9 @@ #include <asm/errno.h> #include <asm/ppc_asm.h> #include <asm/export.h> +#include <asm/kasan.h> +#ifndef CONFIG_KASAN _GLOBAL(__memset16) rlwimi r4,r4,16,0,15 /* fall through */ @@ -29,8 +31,9 @@ _GLOBAL(__memset64) EXPORT_SYMBOL(__memset16) EXPORT_SYMBOL(__memset32) EXPORT_SYMBOL(__memset64) +#endif -_GLOBAL(memset) +_GLOBAL_KASAN(memset) neg r0,r3 rlwimi r4,r4,8,16,23 andi. r0,r0,7 /* # bytes to be 8-byte aligned */ @@ -96,8 +99,9 @@ _GLOBAL(memset) stb r4,0(r6) blr EXPORT_SYMBOL(memset) +EXPORT_SYMBOL_KASAN(memset) -_GLOBAL_TOC(memmove) +_GLOBAL_TOC_KASAN(memmove) cmplw 0,r3,r4 bgt backwards_memcpy b memcpy @@ -139,3 +143,4 @@ _GLOBAL(backwards_memcpy) mtctr r7 b 1b EXPORT_SYMBOL(memmove) +EXPORT_SYMBOL_KASAN(memmove) |