From 2117eaa62aa92776d1465abcfaa5becfd8eaf3f9 Mon Sep 17 00:00:00 2001 From: Ard Biesheuvel Date: Thu, 1 Sep 2016 14:25:41 +0100 Subject: crypto: arm/sha1-neon - add support for building in Thumb2 mode The ARMv7 NEON module is explicitly built in ARM mode, which is not supported by the Thumb2 kernel. So remove the explicit override, and leave it up to the build environment to decide whether the core SHA1 routines are assembled as ARM or as Thumb2 code. Signed-off-by: Ard Biesheuvel Signed-off-by: Herbert Xu --- arch/arm/crypto/sha1-armv7-neon.S | 1 - 1 file changed, 1 deletion(-) (limited to 'arch') diff --git a/arch/arm/crypto/sha1-armv7-neon.S b/arch/arm/crypto/sha1-armv7-neon.S index dcd01f3f0bb0..2468fade49cf 100644 --- a/arch/arm/crypto/sha1-armv7-neon.S +++ b/arch/arm/crypto/sha1-armv7-neon.S @@ -12,7 +12,6 @@ #include .syntax unified -.code 32 .fpu neon .text -- cgit v1.2.3 From ed4767d612fd2c39e2c4c69eba484c1219dcddb6 Mon Sep 17 00:00:00 2001 From: Ard Biesheuvel Date: Thu, 1 Sep 2016 14:25:42 +0100 Subject: crypto: arm/ghash-ce - add missing async import/export Since commit 8996eafdcbad ("crypto: ahash - ensure statesize is non-zero"), all ahash drivers are required to implement import()/export(), and must have a non-zero statesize. Fix this for the ARM Crypto Extensions GHASH implementation. Fixes: 8996eafdcbad ("crypto: ahash - ensure statesize is non-zero") Signed-off-by: Ard Biesheuvel Signed-off-by: Herbert Xu --- arch/arm/crypto/ghash-ce-glue.c | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) (limited to 'arch') diff --git a/arch/arm/crypto/ghash-ce-glue.c b/arch/arm/crypto/ghash-ce-glue.c index 1568cb5cd870..b88364aa149a 100644 --- a/arch/arm/crypto/ghash-ce-glue.c +++ b/arch/arm/crypto/ghash-ce-glue.c @@ -220,6 +220,27 @@ static int ghash_async_digest(struct ahash_request *req) } } +static int ghash_async_import(struct ahash_request *req, const void *in) +{ + struct ahash_request *cryptd_req = ahash_request_ctx(req); + struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); + struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm); + struct shash_desc *desc = cryptd_shash_desc(cryptd_req); + + desc->tfm = cryptd_ahash_child(ctx->cryptd_tfm); + desc->flags = req->base.flags; + + return crypto_shash_import(desc, in); +} + +static int ghash_async_export(struct ahash_request *req, void *out) +{ + struct ahash_request *cryptd_req = ahash_request_ctx(req); + struct shash_desc *desc = cryptd_shash_desc(cryptd_req); + + return crypto_shash_export(desc, out); +} + static int ghash_async_setkey(struct crypto_ahash *tfm, const u8 *key, unsigned int keylen) { @@ -268,7 +289,10 @@ static struct ahash_alg ghash_async_alg = { .final = ghash_async_final, .setkey = ghash_async_setkey, .digest = ghash_async_digest, + .import = ghash_async_import, + .export = ghash_async_export, .halg.digestsize = GHASH_DIGEST_SIZE, + .halg.statesize = sizeof(struct ghash_desc_ctx), .halg.base = { .cra_name = "ghash", .cra_driver_name = "ghash-ce", -- cgit v1.2.3 From 71f89917ff96c03191b19c2bb8cd587b61004e62 Mon Sep 17 00:00:00 2001 From: Ard Biesheuvel Date: Mon, 5 Sep 2016 12:58:44 +0100 Subject: crypto: arm/ghash - change internal cra_name to "__ghash" The fact that the internal synchrous hash implementation is called "ghash" like the publicly visible one is causing the testmgr code to misidentify it as an algorithm that requires testing at boottime. So rename it to "__ghash" to prevent this. Signed-off-by: Ard Biesheuvel Signed-off-by: Herbert Xu --- arch/arm/crypto/ghash-ce-glue.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'arch') diff --git a/arch/arm/crypto/ghash-ce-glue.c b/arch/arm/crypto/ghash-ce-glue.c index b88364aa149a..7546b3c02466 100644 --- a/arch/arm/crypto/ghash-ce-glue.c +++ b/arch/arm/crypto/ghash-ce-glue.c @@ -138,7 +138,7 @@ static struct shash_alg ghash_alg = { .setkey = ghash_setkey, .descsize = sizeof(struct ghash_desc_ctx), .base = { - .cra_name = "ghash", + .cra_name = "__ghash", .cra_driver_name = "__driver-ghash-ce", .cra_priority = 0, .cra_flags = CRYPTO_ALG_TYPE_SHASH | CRYPTO_ALG_INTERNAL, -- cgit v1.2.3 From 74ff6cb3aa438490ad8f8432e7b68dbcfa5ca449 Mon Sep 17 00:00:00 2001 From: Marcelo Cerri Date: Fri, 23 Sep 2016 16:31:56 -0300 Subject: crypto: sha1-powerpc - little-endian support The driver does not handle endianness properly when loading the input data. Signed-off-by: Marcelo Cerri Signed-off-by: Herbert Xu --- arch/powerpc/crypto/sha1-powerpc-asm.S | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) (limited to 'arch') diff --git a/arch/powerpc/crypto/sha1-powerpc-asm.S b/arch/powerpc/crypto/sha1-powerpc-asm.S index 125e16520061..82ddc9bdfeb1 100644 --- a/arch/powerpc/crypto/sha1-powerpc-asm.S +++ b/arch/powerpc/crypto/sha1-powerpc-asm.S @@ -7,6 +7,15 @@ #include #include +#ifdef __BIG_ENDIAN__ +#define LWZ(rt, d, ra) \ + lwz rt,d(ra) +#else +#define LWZ(rt, d, ra) \ + li rt,d; \ + lwbrx rt,rt,ra +#endif + /* * We roll the registers for T, A, B, C, D, E around on each * iteration; T on iteration t is A on iteration t+1, and so on. @@ -23,7 +32,7 @@ #define W(t) (((t)%16)+16) #define LOADW(t) \ - lwz W(t),(t)*4(r4) + LWZ(W(t),(t)*4,r4) #define STEPD0_LOAD(t) \ andc r0,RD(t),RB(t); \ @@ -33,7 +42,7 @@ add r0,RE(t),r15; \ add RT(t),RT(t),r6; \ add r14,r0,W(t); \ - lwz W((t)+4),((t)+4)*4(r4); \ + LWZ(W((t)+4),((t)+4)*4,r4); \ rotlwi RB(t),RB(t),30; \ add RT(t),RT(t),r14 -- cgit v1.2.3