summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorHerbert Xu <herbert@gondor.apana.org.au>2018-07-13 16:12:32 +0800
committerHerbert Xu <herbert@gondor.apana.org.au>2018-07-20 13:47:42 +0800
commit46d8c4b28652d35dc6cfb5adf7f54e102fc04384 (patch)
tree3c1d6b4111feb002b39f27cea7039a9d8090ffb0
parent2546da99212f22034aecf279da9c47cbfac6c981 (diff)
downloadlinux-stable-46d8c4b28652d35dc6cfb5adf7f54e102fc04384.tar.gz
linux-stable-46d8c4b28652d35dc6cfb5adf7f54e102fc04384.tar.bz2
linux-stable-46d8c4b28652d35dc6cfb5adf7f54e102fc04384.zip
crypto: padlock-aes - Fix Nano workaround data corruption
This was detected by the self-test thanks to Ard's chunking patch. I finally got around to testing this out on my ancient Via box. It turns out that the workaround got the assembly wrong and we end up doing count + initial cycles of the loop instead of just count. This obviously causes corruption, either by overwriting the source that is yet to be processed, or writing over the end of the buffer. On CPUs that don't require the workaround only ECB is affected. On Nano CPUs both ECB and CBC are affected. This patch fixes it by doing the subtraction prior to the assembly. Fixes: a76c1c23d0c3 ("crypto: padlock-aes - work around Nano CPU...") Cc: <stable@vger.kernel.org> Reported-by: Jamie Heilman <jamie@audible.transient.net> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
-rw-r--r--drivers/crypto/padlock-aes.c8
1 files changed, 6 insertions, 2 deletions
diff --git a/drivers/crypto/padlock-aes.c b/drivers/crypto/padlock-aes.c
index 1c6cbda56afe..09d823d36d3a 100644
--- a/drivers/crypto/padlock-aes.c
+++ b/drivers/crypto/padlock-aes.c
@@ -266,6 +266,8 @@ static inline void padlock_xcrypt_ecb(const u8 *input, u8 *output, void *key,
return;
}
+ count -= initial;
+
if (initial)
asm volatile (".byte 0xf3,0x0f,0xa7,0xc8" /* rep xcryptecb */
: "+S"(input), "+D"(output)
@@ -273,7 +275,7 @@ static inline void padlock_xcrypt_ecb(const u8 *input, u8 *output, void *key,
asm volatile (".byte 0xf3,0x0f,0xa7,0xc8" /* rep xcryptecb */
: "+S"(input), "+D"(output)
- : "d"(control_word), "b"(key), "c"(count - initial));
+ : "d"(control_word), "b"(key), "c"(count));
}
static inline u8 *padlock_xcrypt_cbc(const u8 *input, u8 *output, void *key,
@@ -284,6 +286,8 @@ static inline u8 *padlock_xcrypt_cbc(const u8 *input, u8 *output, void *key,
if (count < cbc_fetch_blocks)
return cbc_crypt(input, output, key, iv, control_word, count);
+ count -= initial;
+
if (initial)
asm volatile (".byte 0xf3,0x0f,0xa7,0xd0" /* rep xcryptcbc */
: "+S" (input), "+D" (output), "+a" (iv)
@@ -291,7 +295,7 @@ static inline u8 *padlock_xcrypt_cbc(const u8 *input, u8 *output, void *key,
asm volatile (".byte 0xf3,0x0f,0xa7,0xd0" /* rep xcryptcbc */
: "+S" (input), "+D" (output), "+a" (iv)
- : "d" (control_word), "b" (key), "c" (count-initial));
+ : "d" (control_word), "b" (key), "c" (count));
return iv;
}