summaryrefslogtreecommitdiffstats
path: root/crypto
diff options
context:
space:
mode:
authorEric Biggers <ebiggers@google.com>2019-01-31 23:51:36 -0800
committerGreg Kroah-Hartman <gregkh@linuxfoundation.org>2019-03-23 20:09:54 +0100
commit4c152af938cec61614955a631b8f70278428ac34 (patch)
tree767bcd9b95f4b4a9505430f803e5d7dbab761a33 /crypto
parent736807d6915735c0e8f6ac5bae45e9ce04fe8c50 (diff)
downloadlinux-stable-4c152af938cec61614955a631b8f70278428ac34.tar.gz
linux-stable-4c152af938cec61614955a631b8f70278428ac34.tar.bz2
linux-stable-4c152af938cec61614955a631b8f70278428ac34.zip
crypto: aegis - fix handling chunked inputs
commit 0f533e67d26f228ea5dfdacc8a4bdeb487af5208 upstream. The generic AEGIS implementations all fail the improved AEAD tests because they produce the wrong result with some data layouts. The issue is that they assume that if the skcipher_walk API gives 'nbytes' not aligned to the walksize (a.k.a. walk.stride), then it is the end of the data. In fact, this can happen before the end. Fix them. Fixes: f606a88e5823 ("crypto: aegis - Add generic AEGIS AEAD implementations") Cc: <stable@vger.kernel.org> # v4.18+ Cc: Ondrej Mosnacek <omosnace@redhat.com> Signed-off-by: Eric Biggers <ebiggers@google.com> Reviewed-by: Ondrej Mosnacek <omosnace@redhat.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au> Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
Diffstat (limited to 'crypto')
-rw-r--r--crypto/aegis128.c14
-rw-r--r--crypto/aegis128l.c14
-rw-r--r--crypto/aegis256.c14
3 files changed, 21 insertions, 21 deletions
diff --git a/crypto/aegis128.c b/crypto/aegis128.c
index c22f4414856d..789716f92e4c 100644
--- a/crypto/aegis128.c
+++ b/crypto/aegis128.c
@@ -290,19 +290,19 @@ static void crypto_aegis128_process_crypt(struct aegis_state *state,
const struct aegis128_ops *ops)
{
struct skcipher_walk walk;
- u8 *src, *dst;
- unsigned int chunksize;
ops->skcipher_walk_init(&walk, req, false);
while (walk.nbytes) {
- src = walk.src.virt.addr;
- dst = walk.dst.virt.addr;
- chunksize = walk.nbytes;
+ unsigned int nbytes = walk.nbytes;
- ops->crypt_chunk(state, dst, src, chunksize);
+ if (nbytes < walk.total)
+ nbytes = round_down(nbytes, walk.stride);
- skcipher_walk_done(&walk, 0);
+ ops->crypt_chunk(state, walk.dst.virt.addr, walk.src.virt.addr,
+ nbytes);
+
+ skcipher_walk_done(&walk, walk.nbytes - nbytes);
}
}
diff --git a/crypto/aegis128l.c b/crypto/aegis128l.c
index b6fb21ebdc3e..73811448cb6b 100644
--- a/crypto/aegis128l.c
+++ b/crypto/aegis128l.c
@@ -353,19 +353,19 @@ static void crypto_aegis128l_process_crypt(struct aegis_state *state,
const struct aegis128l_ops *ops)
{
struct skcipher_walk walk;
- u8 *src, *dst;
- unsigned int chunksize;
ops->skcipher_walk_init(&walk, req, false);
while (walk.nbytes) {
- src = walk.src.virt.addr;
- dst = walk.dst.virt.addr;
- chunksize = walk.nbytes;
+ unsigned int nbytes = walk.nbytes;
- ops->crypt_chunk(state, dst, src, chunksize);
+ if (nbytes < walk.total)
+ nbytes = round_down(nbytes, walk.stride);
- skcipher_walk_done(&walk, 0);
+ ops->crypt_chunk(state, walk.dst.virt.addr, walk.src.virt.addr,
+ nbytes);
+
+ skcipher_walk_done(&walk, walk.nbytes - nbytes);
}
}
diff --git a/crypto/aegis256.c b/crypto/aegis256.c
index 11f0f8ec9c7c..8a71e9c06193 100644
--- a/crypto/aegis256.c
+++ b/crypto/aegis256.c
@@ -303,19 +303,19 @@ static void crypto_aegis256_process_crypt(struct aegis_state *state,
const struct aegis256_ops *ops)
{
struct skcipher_walk walk;
- u8 *src, *dst;
- unsigned int chunksize;
ops->skcipher_walk_init(&walk, req, false);
while (walk.nbytes) {
- src = walk.src.virt.addr;
- dst = walk.dst.virt.addr;
- chunksize = walk.nbytes;
+ unsigned int nbytes = walk.nbytes;
- ops->crypt_chunk(state, dst, src, chunksize);
+ if (nbytes < walk.total)
+ nbytes = round_down(nbytes, walk.stride);
- skcipher_walk_done(&walk, 0);
+ ops->crypt_chunk(state, walk.dst.virt.addr, walk.src.virt.addr,
+ nbytes);
+
+ skcipher_walk_done(&walk, walk.nbytes - nbytes);
}
}