diff options
author | Eric Biggers <ebiggers@google.com> | 2018-06-30 15:16:13 -0700 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2018-07-09 00:30:26 +0800 |
commit | c87a405e3bacaae324bb05ee9a48aa9844688469 (patch) | |
tree | 27f6da4f88b3c55969e39c066815847624c3c162 /arch | |
parent | 6a38f62245c9d5217b696ec5aca6a5cf6351f615 (diff) | |
download | linux-stable-c87a405e3bacaae324bb05ee9a48aa9844688469.tar.gz linux-stable-c87a405e3bacaae324bb05ee9a48aa9844688469.tar.bz2 linux-stable-c87a405e3bacaae324bb05ee9a48aa9844688469.zip |
crypto: ahash - remove useless setting of cra_type
Some ahash algorithms set .cra_type = &crypto_ahash_type. But this is
redundant with the C structure type ('struct ahash_alg'), and
crypto_register_ahash() already sets the .cra_type automatically.
Apparently the useless assignment has just been copy+pasted around.
So, remove the useless assignment from all the ahash algorithms.
This patch shouldn't change any actual behavior.
Signed-off-by: Eric Biggers <ebiggers@google.com>
Acked-by: Gilad Ben-Yossef <gilad@benyossef.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'arch')
-rw-r--r-- | arch/arm/crypto/ghash-ce-glue.c | 1 | ||||
-rw-r--r-- | arch/x86/crypto/ghash-clmulni-intel_glue.c | 1 | ||||
-rw-r--r-- | arch/x86/crypto/sha1-mb/sha1_mb.c | 1 | ||||
-rw-r--r-- | arch/x86/crypto/sha256-mb/sha256_mb.c | 1 | ||||
-rw-r--r-- | arch/x86/crypto/sha512-mb/sha512_mb.c | 1 |
5 files changed, 0 insertions, 5 deletions
diff --git a/arch/arm/crypto/ghash-ce-glue.c b/arch/arm/crypto/ghash-ce-glue.c index 124fee03246e..8930fc4e7c22 100644 --- a/arch/arm/crypto/ghash-ce-glue.c +++ b/arch/arm/crypto/ghash-ce-glue.c @@ -310,7 +310,6 @@ static struct ahash_alg ghash_async_alg = { .cra_priority = 300, .cra_flags = CRYPTO_ALG_ASYNC, .cra_blocksize = GHASH_BLOCK_SIZE, - .cra_type = &crypto_ahash_type, .cra_ctxsize = sizeof(struct ghash_async_ctx), .cra_module = THIS_MODULE, .cra_init = ghash_async_init_tfm, diff --git a/arch/x86/crypto/ghash-clmulni-intel_glue.c b/arch/x86/crypto/ghash-clmulni-intel_glue.c index a3de43b5e20a..3582ae885ee1 100644 --- a/arch/x86/crypto/ghash-clmulni-intel_glue.c +++ b/arch/x86/crypto/ghash-clmulni-intel_glue.c @@ -316,7 +316,6 @@ static struct ahash_alg ghash_async_alg = { .cra_ctxsize = sizeof(struct ghash_async_ctx), .cra_flags = CRYPTO_ALG_ASYNC, .cra_blocksize = GHASH_BLOCK_SIZE, - .cra_type = &crypto_ahash_type, .cra_module = THIS_MODULE, .cra_init = ghash_async_init_tfm, .cra_exit = ghash_async_exit_tfm, diff --git a/arch/x86/crypto/sha1-mb/sha1_mb.c b/arch/x86/crypto/sha1-mb/sha1_mb.c index f7929ba6cfb4..b93805664c1d 100644 --- a/arch/x86/crypto/sha1-mb/sha1_mb.c +++ b/arch/x86/crypto/sha1-mb/sha1_mb.c @@ -880,7 +880,6 @@ static struct ahash_alg sha1_mb_async_alg = { .cra_priority = 50, .cra_flags = CRYPTO_ALG_ASYNC, .cra_blocksize = SHA1_BLOCK_SIZE, - .cra_type = &crypto_ahash_type, .cra_module = THIS_MODULE, .cra_list = LIST_HEAD_INIT(sha1_mb_async_alg.halg.base.cra_list), .cra_init = sha1_mb_async_init_tfm, diff --git a/arch/x86/crypto/sha256-mb/sha256_mb.c b/arch/x86/crypto/sha256-mb/sha256_mb.c index 59a47048920a..97c5fc43e115 100644 --- a/arch/x86/crypto/sha256-mb/sha256_mb.c +++ b/arch/x86/crypto/sha256-mb/sha256_mb.c @@ -879,7 +879,6 @@ static struct ahash_alg sha256_mb_async_alg = { .cra_priority = 50, .cra_flags = CRYPTO_ALG_ASYNC, .cra_blocksize = SHA256_BLOCK_SIZE, - .cra_type = &crypto_ahash_type, .cra_module = THIS_MODULE, .cra_list = LIST_HEAD_INIT (sha256_mb_async_alg.halg.base.cra_list), diff --git a/arch/x86/crypto/sha512-mb/sha512_mb.c b/arch/x86/crypto/sha512-mb/sha512_mb.c index d3a758ac3ade..26b85678012d 100644 --- a/arch/x86/crypto/sha512-mb/sha512_mb.c +++ b/arch/x86/crypto/sha512-mb/sha512_mb.c @@ -913,7 +913,6 @@ static struct ahash_alg sha512_mb_async_alg = { .cra_priority = 50, .cra_flags = CRYPTO_ALG_ASYNC, .cra_blocksize = SHA512_BLOCK_SIZE, - .cra_type = &crypto_ahash_type, .cra_module = THIS_MODULE, .cra_list = LIST_HEAD_INIT (sha512_mb_async_alg.halg.base.cra_list), |