summaryrefslogtreecommitdiffstats
path: root/crypto
diff options
context:
space:
mode:
authorArd Biesheuvel <ardb@kernel.org>2019-11-08 13:22:08 +0100
committerHerbert Xu <herbert@gondor.apana.org.au>2019-11-17 09:02:39 +0800
commit5fb8ef25803ef33e2eb60b626435828b937bed75 (patch)
tree156401ad072c1acc18f74cbdf8eb31859191b9ac /crypto
parent746b2e024c67aa605ac12d135cd7085a49cf9dc4 (diff)
downloadlinux-5fb8ef25803ef33e2eb60b626435828b937bed75.tar.gz
linux-5fb8ef25803ef33e2eb60b626435828b937bed75.tar.bz2
linux-5fb8ef25803ef33e2eb60b626435828b937bed75.zip
crypto: chacha - move existing library code into lib/crypto
Currently, our generic ChaCha implementation consists of a permute function in lib/chacha.c that operates on the 64-byte ChaCha state directly [and which is always included into the core kernel since it is used by the /dev/random driver], and the crypto API plumbing to expose it as a skcipher. In order to support in-kernel users that need the ChaCha streamcipher but have no need [or tolerance] for going through the abstractions of the crypto API, let's expose the streamcipher bits via a library API as well, in a way that permits the implementation to be superseded by an architecture specific one if provided. So move the streamcipher code into a separate module in lib/crypto, and expose the init() and crypt() routines to users of the library. Signed-off-by: Ard Biesheuvel <ardb@kernel.org> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto')
-rw-r--r--crypto/Kconfig1
-rw-r--r--crypto/chacha_generic.c60
2 files changed, 6 insertions, 55 deletions
diff --git a/crypto/Kconfig b/crypto/Kconfig
index 9def945e9549..ae4495ae003f 100644
--- a/crypto/Kconfig
+++ b/crypto/Kconfig
@@ -1412,6 +1412,7 @@ config CRYPTO_SALSA20
config CRYPTO_CHACHA20
tristate "ChaCha stream cipher algorithms"
+ select CRYPTO_LIB_CHACHA_GENERIC
select CRYPTO_SKCIPHER
help
The ChaCha20, XChaCha20, and XChaCha12 stream cipher algorithms.
diff --git a/crypto/chacha_generic.c b/crypto/chacha_generic.c
index 085d8d219987..ebae6d9d9b32 100644
--- a/crypto/chacha_generic.c
+++ b/crypto/chacha_generic.c
@@ -8,29 +8,10 @@
#include <asm/unaligned.h>
#include <crypto/algapi.h>
-#include <crypto/chacha.h>
+#include <crypto/internal/chacha.h>
#include <crypto/internal/skcipher.h>
#include <linux/module.h>
-static void chacha_docrypt(u32 *state, u8 *dst, const u8 *src,
- unsigned int bytes, int nrounds)
-{
- /* aligned to potentially speed up crypto_xor() */
- u8 stream[CHACHA_BLOCK_SIZE] __aligned(sizeof(long));
-
- while (bytes >= CHACHA_BLOCK_SIZE) {
- chacha_block(state, stream, nrounds);
- crypto_xor_cpy(dst, src, stream, CHACHA_BLOCK_SIZE);
- bytes -= CHACHA_BLOCK_SIZE;
- dst += CHACHA_BLOCK_SIZE;
- src += CHACHA_BLOCK_SIZE;
- }
- if (bytes) {
- chacha_block(state, stream, nrounds);
- crypto_xor_cpy(dst, src, stream, bytes);
- }
-}
-
static int chacha_stream_xor(struct skcipher_request *req,
const struct chacha_ctx *ctx, const u8 *iv)
{
@@ -48,8 +29,8 @@ static int chacha_stream_xor(struct skcipher_request *req,
if (nbytes < walk.total)
nbytes = round_down(nbytes, CHACHA_BLOCK_SIZE);
- chacha_docrypt(state, walk.dst.virt.addr, walk.src.virt.addr,
- nbytes, ctx->nrounds);
+ chacha_crypt_generic(state, walk.dst.virt.addr,
+ walk.src.virt.addr, nbytes, ctx->nrounds);
err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
}
@@ -58,41 +39,10 @@ static int chacha_stream_xor(struct skcipher_request *req,
void crypto_chacha_init(u32 *state, const struct chacha_ctx *ctx, const u8 *iv)
{
- state[0] = 0x61707865; /* "expa" */
- state[1] = 0x3320646e; /* "nd 3" */
- state[2] = 0x79622d32; /* "2-by" */
- state[3] = 0x6b206574; /* "te k" */
- state[4] = ctx->key[0];
- state[5] = ctx->key[1];
- state[6] = ctx->key[2];
- state[7] = ctx->key[3];
- state[8] = ctx->key[4];
- state[9] = ctx->key[5];
- state[10] = ctx->key[6];
- state[11] = ctx->key[7];
- state[12] = get_unaligned_le32(iv + 0);
- state[13] = get_unaligned_le32(iv + 4);
- state[14] = get_unaligned_le32(iv + 8);
- state[15] = get_unaligned_le32(iv + 12);
+ chacha_init_generic(state, ctx->key, iv);
}
EXPORT_SYMBOL_GPL(crypto_chacha_init);
-static int chacha_setkey(struct crypto_skcipher *tfm, const u8 *key,
- unsigned int keysize, int nrounds)
-{
- struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
- int i;
-
- if (keysize != CHACHA_KEY_SIZE)
- return -EINVAL;
-
- for (i = 0; i < ARRAY_SIZE(ctx->key); i++)
- ctx->key[i] = get_unaligned_le32(key + i * sizeof(u32));
-
- ctx->nrounds = nrounds;
- return 0;
-}
-
int crypto_chacha20_setkey(struct crypto_skcipher *tfm, const u8 *key,
unsigned int keysize)
{
@@ -126,7 +76,7 @@ int crypto_xchacha_crypt(struct skcipher_request *req)
/* Compute the subkey given the original key and first 128 nonce bits */
crypto_chacha_init(state, ctx, req->iv);
- hchacha_block(state, subctx.key, ctx->nrounds);
+ hchacha_block_generic(state, subctx.key, ctx->nrounds);
subctx.nrounds = ctx->nrounds;
/* Build the real IV */