From b81126e01a8c6048249955feea46c8217ebefa91 Mon Sep 17 00:00:00 2001 From: Ingo Franzki Date: Mon, 27 Aug 2018 14:28:47 +0200 Subject: s390/crypto: Fix return code checking in cbc_paes_crypt() The return code of cpacf_kmc() is less than the number of bytes to process in case of an error, not greater. The crypt routines for the other cipher modes already have this correctly. Cc: stable@vger.kernel.org # v4.11+ Fixes: 279378430768 ("s390/crypt: Add protected key AES module") Signed-off-by: Ingo Franzki Acked-by: Harald Freudenberger Signed-off-by: Martin Schwidefsky --- arch/s390/crypto/paes_s390.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'arch') diff --git a/arch/s390/crypto/paes_s390.c b/arch/s390/crypto/paes_s390.c index 80b27294c1de..ab9a0ebecc19 100644 --- a/arch/s390/crypto/paes_s390.c +++ b/arch/s390/crypto/paes_s390.c @@ -208,7 +208,7 @@ static int cbc_paes_crypt(struct blkcipher_desc *desc, unsigned long modifier, walk->dst.virt.addr, walk->src.virt.addr, n); if (k) ret = blkcipher_walk_done(desc, walk, nbytes - k); - if (n < k) { + if (k < n) { if (__cbc_paes_set_key(ctx) != 0) return blkcipher_walk_done(desc, walk, -EIO); memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE); -- cgit v1.2.3