summaryrefslogtreecommitdiffstats
path: root/crypto/poly1305_generic.c
diff options
context:
space:
mode:
authorEric Biggers <ebiggers@google.com>2017-12-29 10:10:24 -0600
committerHerbert Xu <herbert@gondor.apana.org.au>2018-01-05 18:43:10 +1100
commitfcfbeedf79adc7abaea35b0f88ec23cf546d3b77 (patch)
treef63e386916ea32ed7b6e5f9f543fabf73baa51c6 /crypto/poly1305_generic.c
parent8b55107c57f763c7ca393e72f7ce6f89ea1ba49a (diff)
downloadlinux-stable-fcfbeedf79adc7abaea35b0f88ec23cf546d3b77.tar.gz
linux-stable-fcfbeedf79adc7abaea35b0f88ec23cf546d3b77.tar.bz2
linux-stable-fcfbeedf79adc7abaea35b0f88ec23cf546d3b77.zip
crypto: poly1305 - use unaligned access macros to output digest
Currently the only part of poly1305-generic which is assuming special alignment is the part where the final digest is written. Switch this over to the unaligned access macros so that we'll be able to remove the cra_alignmask. Signed-off-by: Eric Biggers <ebiggers@google.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto/poly1305_generic.c')
-rw-r--r--crypto/poly1305_generic.c9
1 files changed, 4 insertions, 5 deletions
diff --git a/crypto/poly1305_generic.c b/crypto/poly1305_generic.c
index b1c2d57dc734..d752901ba0bc 100644
--- a/crypto/poly1305_generic.c
+++ b/crypto/poly1305_generic.c
@@ -210,7 +210,6 @@ EXPORT_SYMBOL_GPL(crypto_poly1305_update);
int crypto_poly1305_final(struct shash_desc *desc, u8 *dst)
{
struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
- __le32 *mac = (__le32 *)dst;
u32 h0, h1, h2, h3, h4;
u32 g0, g1, g2, g3, g4;
u32 mask;
@@ -267,10 +266,10 @@ int crypto_poly1305_final(struct shash_desc *desc, u8 *dst)
h3 = (h3 >> 18) | (h4 << 8);
/* mac = (h + s) % (2^128) */
- f = (f >> 32) + h0 + dctx->s[0]; mac[0] = cpu_to_le32(f);
- f = (f >> 32) + h1 + dctx->s[1]; mac[1] = cpu_to_le32(f);
- f = (f >> 32) + h2 + dctx->s[2]; mac[2] = cpu_to_le32(f);
- f = (f >> 32) + h3 + dctx->s[3]; mac[3] = cpu_to_le32(f);
+ f = (f >> 32) + h0 + dctx->s[0]; put_unaligned_le32(f, dst + 0);
+ f = (f >> 32) + h1 + dctx->s[1]; put_unaligned_le32(f, dst + 4);
+ f = (f >> 32) + h2 + dctx->s[2]; put_unaligned_le32(f, dst + 8);
+ f = (f >> 32) + h3 + dctx->s[3]; put_unaligned_le32(f, dst + 12);
return 0;
}