summaryrefslogtreecommitdiffstats
path: root/arch/arm64/crypto/Kconfig
diff options
context:
space:
mode:
authorArd Biesheuvel <ard.biesheuvel@linaro.org>2017-01-11 16:41:52 +0000
committerHerbert Xu <herbert@gondor.apana.org.au>2017-01-13 00:26:49 +0800
commitbed593c0e852f5c1efd3ca4e984fd744c51cf6ee (patch)
treec3ef5fd3b1f202288c63d037f1b0bf62ea26fd4f /arch/arm64/crypto/Kconfig
parent293614ce3eda94a3c9b38d5c18fdc06eb1397221 (diff)
downloadlinux-bed593c0e852f5c1efd3ca4e984fd744c51cf6ee.tar.gz
linux-bed593c0e852f5c1efd3ca4e984fd744c51cf6ee.tar.bz2
linux-bed593c0e852f5c1efd3ca4e984fd744c51cf6ee.zip
crypto: arm64/aes - add scalar implementation
This adds a scalar implementation of AES, based on the precomputed tables that are exposed by the generic AES code. Since rotates are cheap on arm64, this implementation only uses the 4 core tables (of 1 KB each), and avoids the prerotated ones, reducing the D-cache footprint by 75%. On Cortex-A57, this code manages 13.0 cycles per byte, which is ~34% faster than the generic C code. (Note that this is still >13x slower than the code that uses the optional ARMv8 Crypto Extensions, which manages <1 cycles per byte.) Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'arch/arm64/crypto/Kconfig')
-rw-r--r--arch/arm64/crypto/Kconfig4
1 files changed, 4 insertions, 0 deletions
diff --git a/arch/arm64/crypto/Kconfig b/arch/arm64/crypto/Kconfig
index 0bf0f531f539..0826f8e599a6 100644
--- a/arch/arm64/crypto/Kconfig
+++ b/arch/arm64/crypto/Kconfig
@@ -41,6 +41,10 @@ config CRYPTO_CRC32_ARM64_CE
depends on KERNEL_MODE_NEON && CRC32
select CRYPTO_HASH
+config CRYPTO_AES_ARM64
+ tristate "AES core cipher using scalar instructions"
+ select CRYPTO_AES
+
config CRYPTO_AES_ARM64_CE
tristate "AES core cipher using ARMv8 Crypto Extensions"
depends on ARM64 && KERNEL_MODE_NEON