summaryrefslogtreecommitdiffstats
path: root/arch/x86/crypto/aesni-intel_glue.c
diff options
context:
space:
mode:
authorDave Watson <davejwatson@fb.com>2018-12-10 19:58:19 +0000
committerHerbert Xu <herbert@gondor.apana.org.au>2018-12-23 11:52:42 +0800
commit38003cd26c9f59da77d98927fb9af58732da207a (patch)
tree2e973dd27e7c8ce3d92da1a6bb02f36d3528eb48 /arch/x86/crypto/aesni-intel_glue.c
parente377bedb09d6970ad27d7714b0a6365ee7e4d732 (diff)
downloadlinux-38003cd26c9f59da77d98927fb9af58732da207a.tar.gz
linux-38003cd26c9f59da77d98927fb9af58732da207a.tar.bz2
linux-38003cd26c9f59da77d98927fb9af58732da207a.zip
crypto: aesni - Split AAD hash calculation to separate macro
AAD hash only needs to be calculated once for each scatter/gather operation. Move it to its own macro, and call it from GCM_INIT instead of INITIAL_BLOCKS. Signed-off-by: Dave Watson <davejwatson@fb.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'arch/x86/crypto/aesni-intel_glue.c')
-rw-r--r--arch/x86/crypto/aesni-intel_glue.c28
1 files changed, 20 insertions, 8 deletions
diff --git a/arch/x86/crypto/aesni-intel_glue.c b/arch/x86/crypto/aesni-intel_glue.c
index 7d1259feb0f9..2648842f1c3f 100644
--- a/arch/x86/crypto/aesni-intel_glue.c
+++ b/arch/x86/crypto/aesni-intel_glue.c
@@ -189,7 +189,10 @@ asmlinkage void aes_ctr_enc_256_avx_by8(const u8 *in, u8 *iv,
*/
asmlinkage void aesni_gcm_precomp_avx_gen2(void *my_ctx_data,
struct gcm_context_data *gdata,
- u8 *hash_subkey);
+ u8 *hash_subkey,
+ u8 *iv,
+ const u8 *aad,
+ unsigned long aad_len);
asmlinkage void aesni_gcm_enc_avx_gen2(void *ctx,
struct gcm_context_data *gdata, u8 *out,
@@ -214,7 +217,8 @@ static void aesni_gcm_enc_avx(void *ctx,
plaintext_len, iv, hash_subkey, aad,
aad_len, auth_tag, auth_tag_len);
} else {
- aesni_gcm_precomp_avx_gen2(ctx, data, hash_subkey);
+ aesni_gcm_precomp_avx_gen2(ctx, data, hash_subkey, iv,
+ aad, aad_len);
aesni_gcm_enc_avx_gen2(ctx, data, out, in, plaintext_len, iv,
aad, aad_len, auth_tag, auth_tag_len);
}
@@ -231,7 +235,8 @@ static void aesni_gcm_dec_avx(void *ctx,
ciphertext_len, iv, hash_subkey, aad,
aad_len, auth_tag, auth_tag_len);
} else {
- aesni_gcm_precomp_avx_gen2(ctx, data, hash_subkey);
+ aesni_gcm_precomp_avx_gen2(ctx, data, hash_subkey, iv,
+ aad, aad_len);
aesni_gcm_dec_avx_gen2(ctx, data, out, in, ciphertext_len, iv,
aad, aad_len, auth_tag, auth_tag_len);
}
@@ -246,7 +251,10 @@ static void aesni_gcm_dec_avx(void *ctx,
*/
asmlinkage void aesni_gcm_precomp_avx_gen4(void *my_ctx_data,
struct gcm_context_data *gdata,
- u8 *hash_subkey);
+ u8 *hash_subkey,
+ u8 *iv,
+ const u8 *aad,
+ unsigned long aad_len);
asmlinkage void aesni_gcm_enc_avx_gen4(void *ctx,
struct gcm_context_data *gdata, u8 *out,
@@ -271,11 +279,13 @@ static void aesni_gcm_enc_avx2(void *ctx,
plaintext_len, iv, hash_subkey, aad,
aad_len, auth_tag, auth_tag_len);
} else if (plaintext_len < AVX_GEN4_OPTSIZE) {
- aesni_gcm_precomp_avx_gen2(ctx, data, hash_subkey);
+ aesni_gcm_precomp_avx_gen2(ctx, data, hash_subkey, iv,
+ aad, aad_len);
aesni_gcm_enc_avx_gen2(ctx, data, out, in, plaintext_len, iv,
aad, aad_len, auth_tag, auth_tag_len);
} else {
- aesni_gcm_precomp_avx_gen4(ctx, data, hash_subkey);
+ aesni_gcm_precomp_avx_gen4(ctx, data, hash_subkey, iv,
+ aad, aad_len);
aesni_gcm_enc_avx_gen4(ctx, data, out, in, plaintext_len, iv,
aad, aad_len, auth_tag, auth_tag_len);
}
@@ -292,11 +302,13 @@ static void aesni_gcm_dec_avx2(void *ctx,
ciphertext_len, iv, hash_subkey,
aad, aad_len, auth_tag, auth_tag_len);
} else if (ciphertext_len < AVX_GEN4_OPTSIZE) {
- aesni_gcm_precomp_avx_gen2(ctx, data, hash_subkey);
+ aesni_gcm_precomp_avx_gen2(ctx, data, hash_subkey, iv,
+ aad, aad_len);
aesni_gcm_dec_avx_gen2(ctx, data, out, in, ciphertext_len, iv,
aad, aad_len, auth_tag, auth_tag_len);
} else {
- aesni_gcm_precomp_avx_gen4(ctx, data, hash_subkey);
+ aesni_gcm_precomp_avx_gen4(ctx, data, hash_subkey, iv,
+ aad, aad_len);
aesni_gcm_dec_avx_gen4(ctx, data, out, in, ciphertext_len, iv,
aad, aad_len, auth_tag, auth_tag_len);
}