diff options
author | Linus Torvalds <torvalds@linux-foundation.org> | 2018-04-04 17:11:08 -0700 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2018-04-04 17:11:08 -0700 |
commit | 9eb31227cbccd3a37da0f42604f1ab5fc556bc53 (patch) | |
tree | 9aa467e620e002bf01cecdd98e3908e0cc3e7221 /crypto | |
parent | 527cd20771888443b5d8707debe98f62c7a1f596 (diff) | |
parent | f444ec106407d600f17fa1a4bd14f84577401dec (diff) | |
download | linux-9eb31227cbccd3a37da0f42604f1ab5fc556bc53.tar.gz linux-9eb31227cbccd3a37da0f42604f1ab5fc556bc53.tar.bz2 linux-9eb31227cbccd3a37da0f42604f1ab5fc556bc53.zip |
Merge branch 'linus' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
Pull crypto updates from Herbert Xu:
"API:
- add AEAD support to crypto engine
- allow batch registration in simd
Algorithms:
- add CFB mode
- add speck block cipher
- add sm4 block cipher
- new test case for crct10dif
- improve scheduling latency on ARM
- scatter/gather support to gcm in aesni
- convert x86 crypto algorithms to skcihper
Drivers:
- hmac(sha224/sha256) support in inside-secure
- aes gcm/ccm support in stm32
- stm32mp1 support in stm32
- ccree driver from staging tree
- gcm support over QI in caam
- add ks-sa hwrng driver"
* 'linus' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (212 commits)
crypto: ccree - remove unused enums
crypto: ahash - Fix early termination in hash walk
crypto: brcm - explicitly cast cipher to hash type
crypto: talitos - don't leak pointers to authenc keys
crypto: qat - don't leak pointers to authenc keys
crypto: picoxcell - don't leak pointers to authenc keys
crypto: ixp4xx - don't leak pointers to authenc keys
crypto: chelsio - don't leak pointers to authenc keys
crypto: caam/qi - don't leak pointers to authenc keys
crypto: caam - don't leak pointers to authenc keys
crypto: lrw - Free rctx->ext with kzfree
crypto: talitos - fix IPsec cipher in length
crypto: Deduplicate le32_to_cpu_array() and cpu_to_le32_array()
crypto: doc - clarify hash callbacks state machine
crypto: api - Keep failed instances alive
crypto: api - Make crypto_alg_lookup static
crypto: api - Remove unused crypto_type lookup function
crypto: chelsio - Remove declaration of static function from header
crypto: inside-secure - hmac(sha224) support
crypto: inside-secure - hmac(sha256) support
..
Diffstat (limited to 'crypto')
-rw-r--r-- | crypto/Kconfig | 129 | ||||
-rw-r--r-- | crypto/Makefile | 4 | ||||
-rw-r--r-- | crypto/ablk_helper.c | 150 | ||||
-rw-r--r-- | crypto/ahash.c | 25 | ||||
-rw-r--r-- | crypto/algapi.c | 8 | ||||
-rw-r--r-- | crypto/api.c | 34 | ||||
-rw-r--r-- | crypto/cfb.c | 353 | ||||
-rw-r--r-- | crypto/crypto_engine.c | 301 | ||||
-rw-r--r-- | crypto/crypto_user.c | 2 | ||||
-rw-r--r-- | crypto/ecc.c | 23 | ||||
-rw-r--r-- | crypto/ecdh.c | 23 | ||||
-rw-r--r-- | crypto/internal.h | 1 | ||||
-rw-r--r-- | crypto/lrw.c | 154 | ||||
-rw-r--r-- | crypto/mcryptd.c | 34 | ||||
-rw-r--r-- | crypto/md4.c | 17 | ||||
-rw-r--r-- | crypto/md5.c | 17 | ||||
-rw-r--r-- | crypto/rsa-pkcs1pad.c | 2 | ||||
-rw-r--r-- | crypto/simd.c | 50 | ||||
-rw-r--r-- | crypto/sm4_generic.c | 244 | ||||
-rw-r--r-- | crypto/speck.c | 307 | ||||
-rw-r--r-- | crypto/tcrypt.c | 3 | ||||
-rw-r--r-- | crypto/testmgr.c | 45 | ||||
-rw-r--r-- | crypto/testmgr.h | 1882 | ||||
-rw-r--r-- | crypto/xts.c | 72 |
24 files changed, 3219 insertions, 661 deletions
diff --git a/crypto/Kconfig b/crypto/Kconfig index b75264b09a46..c0dabed5122e 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -245,10 +245,6 @@ config CRYPTO_TEST help Quick & dirty crypto test module. -config CRYPTO_ABLK_HELPER - tristate - select CRYPTO_CRYPTD - config CRYPTO_SIMD tristate select CRYPTO_CRYPTD @@ -324,6 +320,14 @@ config CRYPTO_CBC CBC: Cipher Block Chaining mode This block cipher algorithm is required for IPSec. +config CRYPTO_CFB + tristate "CFB support" + select CRYPTO_BLKCIPHER + select CRYPTO_MANAGER + help + CFB: Cipher FeedBack mode + This block cipher algorithm is required for TPM2 Cryptography. + config CRYPTO_CTR tristate "CTR support" select CRYPTO_BLKCIPHER @@ -1114,7 +1118,7 @@ config CRYPTO_BLOWFISH_COMMON config CRYPTO_BLOWFISH_X86_64 tristate "Blowfish cipher algorithm (x86_64)" depends on X86 && 64BIT - select CRYPTO_ALGAPI + select CRYPTO_BLKCIPHER select CRYPTO_BLOWFISH_COMMON help Blowfish cipher algorithm (x86_64), by Bruce Schneier. @@ -1145,10 +1149,8 @@ config CRYPTO_CAMELLIA_X86_64 tristate "Camellia cipher algorithm (x86_64)" depends on X86 && 64BIT depends on CRYPTO - select CRYPTO_ALGAPI + select CRYPTO_BLKCIPHER select CRYPTO_GLUE_HELPER_X86 - select CRYPTO_LRW - select CRYPTO_XTS help Camellia cipher algorithm module (x86_64). @@ -1164,12 +1166,10 @@ config CRYPTO_CAMELLIA_AESNI_AVX_X86_64 tristate "Camellia cipher algorithm (x86_64/AES-NI/AVX)" depends on X86 && 64BIT depends on CRYPTO - select CRYPTO_ALGAPI - select CRYPTO_CRYPTD - select CRYPTO_ABLK_HELPER - select CRYPTO_GLUE_HELPER_X86 + select CRYPTO_BLKCIPHER select CRYPTO_CAMELLIA_X86_64 - select CRYPTO_LRW + select CRYPTO_GLUE_HELPER_X86 + select CRYPTO_SIMD select CRYPTO_XTS help Camellia cipher algorithm module (x86_64/AES-NI/AVX). @@ -1186,14 +1186,7 @@ config CRYPTO_CAMELLIA_AESNI_AVX2_X86_64 tristate "Camellia cipher algorithm (x86_64/AES-NI/AVX2)" depends on X86 && 64BIT depends on CRYPTO - select CRYPTO_ALGAPI - select CRYPTO_CRYPTD - select CRYPTO_ABLK_HELPER - select CRYPTO_GLUE_HELPER_X86 - select CRYPTO_CAMELLIA_X86_64 select CRYPTO_CAMELLIA_AESNI_AVX_X86_64 - select CRYPTO_LRW - select CRYPTO_XTS help Camellia cipher algorithm module (x86_64/AES-NI/AVX2). @@ -1238,11 +1231,10 @@ config CRYPTO_CAST5 config CRYPTO_CAST5_AVX_X86_64 tristate "CAST5 (CAST-128) cipher algorithm (x86_64/AVX)" depends on X86 && 64BIT - select CRYPTO_ALGAPI - select CRYPTO_CRYPTD - select CRYPTO_ABLK_HELPER - select CRYPTO_CAST_COMMON + select CRYPTO_BLKCIPHER select CRYPTO_CAST5 + select CRYPTO_CAST_COMMON + select CRYPTO_SIMD help The CAST5 encryption algorithm (synonymous with CAST-128) is described in RFC2144. @@ -1261,13 +1253,11 @@ config CRYPTO_CAST6 config CRYPTO_CAST6_AVX_X86_64 tristate "CAST6 (CAST-256) cipher algorithm (x86_64/AVX)" depends on X86 && 64BIT - select CRYPTO_ALGAPI - select CRYPTO_CRYPTD - select CRYPTO_ABLK_HELPER - select CRYPTO_GLUE_HELPER_X86 - select CRYPTO_CAST_COMMON + select CRYPTO_BLKCIPHER select CRYPTO_CAST6 - select CRYPTO_LRW + select CRYPTO_CAST_COMMON + select CRYPTO_GLUE_HELPER_X86 + select CRYPTO_SIMD select CRYPTO_XTS help The CAST6 encryption algorithm (synonymous with CAST-256) is @@ -1294,7 +1284,7 @@ config CRYPTO_DES_SPARC64 config CRYPTO_DES3_EDE_X86_64 tristate "Triple DES EDE cipher algorithm (x86-64)" depends on X86 && 64BIT - select CRYPTO_ALGAPI + select CRYPTO_BLKCIPHER select CRYPTO_DES help Triple DES EDE (FIPS 46-3) algorithm. @@ -1422,13 +1412,10 @@ config CRYPTO_SERPENT config CRYPTO_SERPENT_SSE2_X86_64 tristate "Serpent cipher algorithm (x86_64/SSE2)" depends on X86 && 64BIT - select CRYPTO_ALGAPI - select CRYPTO_CRYPTD - select CRYPTO_ABLK_HELPER + select CRYPTO_BLKCIPHER select CRYPTO_GLUE_HELPER_X86 select CRYPTO_SERPENT - select CRYPTO_LRW - select CRYPTO_XTS + select CRYPTO_SIMD help Serpent cipher algorithm, by Anderson, Biham & Knudsen. @@ -1444,13 +1431,10 @@ config CRYPTO_SERPENT_SSE2_X86_64 config CRYPTO_SERPENT_SSE2_586 tristate "Serpent cipher algorithm (i586/SSE2)" depends on X86 && !64BIT - select CRYPTO_ALGAPI - select CRYPTO_CRYPTD - select CRYPTO_ABLK_HELPER + select CRYPTO_BLKCIPHER select CRYPTO_GLUE_HELPER_X86 select CRYPTO_SERPENT - select CRYPTO_LRW - select CRYPTO_XTS + select CRYPTO_SIMD help Serpent cipher algorithm, by Anderson, Biham & Knudsen. @@ -1466,12 +1450,10 @@ config CRYPTO_SERPENT_SSE2_586 config CRYPTO_SERPENT_AVX_X86_64 tristate "Serpent cipher algorithm (x86_64/AVX)" depends on X86 && 64BIT - select CRYPTO_ALGAPI - select CRYPTO_CRYPTD - select CRYPTO_ABLK_HELPER + select CRYPTO_BLKCIPHER select CRYPTO_GLUE_HELPER_X86 select CRYPTO_SERPENT - select CRYPTO_LRW + select CRYPTO_SIMD select CRYPTO_XTS help Serpent cipher algorithm, by Anderson, Biham & Knudsen. @@ -1488,14 +1470,7 @@ config CRYPTO_SERPENT_AVX_X86_64 config CRYPTO_SERPENT_AVX2_X86_64 tristate "Serpent cipher algorithm (x86_64/AVX2)" depends on X86 && 64BIT - select CRYPTO_ALGAPI - select CRYPTO_CRYPTD - select CRYPTO_ABLK_HELPER - select CRYPTO_GLUE_HELPER_X86 - select CRYPTO_SERPENT select CRYPTO_SERPENT_AVX_X86_64 - select CRYPTO_LRW - select CRYPTO_XTS help Serpent cipher algorithm, by Anderson, Biham & Knudsen. @@ -1508,6 +1483,45 @@ config CRYPTO_SERPENT_AVX2_X86_64 See also: <http://www.cl.cam.ac.uk/~rja14/serpent.html> +config CRYPTO_SM4 + tristate "SM4 cipher algorithm" + select CRYPTO_ALGAPI + help + SM4 cipher algorithms (OSCCA GB/T 32907-2016). + + SM4 (GBT.32907-2016) is a cryptographic standard issued by the + Organization of State Commercial Administration of China (OSCCA) + as an authorized cryptographic algorithms for the use within China. + + SMS4 was originally created for use in protecting wireless + networks, and is mandated in the Chinese National Standard for + Wireless LAN WAPI (Wired Authentication and Privacy Infrastructure) + (GB.15629.11-2003). + + The latest SM4 standard (GBT.32907-2016) was proposed by OSCCA and + standardized through TC 260 of the Standardization Administration + of the People's Republic of China (SAC). + + The input, output, and key of SMS4 are each 128 bits. + + See also: <https://eprint.iacr.org/2008/329.pdf> + + If unsure, say N. + +config CRYPTO_SPECK + tristate "Speck cipher algorithm" + select CRYPTO_ALGAPI + help + Speck is a lightweight block cipher that is tuned for optimal + performance in software (rather than hardware). + + Speck may not be as secure as AES, and should only be used on systems + where AES is not fast enough. + + See also: <https://eprint.iacr.org/2013/404.pdf> + + If unsure, say N. + config CRYPTO_TEA tristate "TEA, XTEA and XETA cipher algorithms" select CRYPTO_ALGAPI @@ -1581,12 +1595,10 @@ config CRYPTO_TWOFISH_X86_64 config CRYPTO_TWOFISH_X86_64_3WAY tristate "Twofish cipher algorithm (x86_64, 3-way parallel)" depends on X86 && 64BIT - select CRYPTO_ALGAPI + select CRYPTO_BLKCIPHER select CRYPTO_TWOFISH_COMMON select CRYPTO_TWOFISH_X86_64 select CRYPTO_GLUE_HELPER_X86 - select CRYPTO_LRW - select CRYPTO_XTS help Twofish cipher algorithm (x86_64, 3-way parallel). @@ -1604,15 +1616,12 @@ config CRYPTO_TWOFISH_X86_64_3WAY config CRYPTO_TWOFISH_AVX_X86_64 tristate "Twofish cipher algorithm (x86_64/AVX)" depends on X86 && 64BIT - select CRYPTO_ALGAPI - select CRYPTO_CRYPTD - select CRYPTO_ABLK_HELPER + select CRYPTO_BLKCIPHER select CRYPTO_GLUE_HELPER_X86 + select CRYPTO_SIMD select CRYPTO_TWOFISH_COMMON select CRYPTO_TWOFISH_X86_64 select CRYPTO_TWOFISH_X86_64_3WAY - select CRYPTO_LRW - select CRYPTO_XTS help Twofish cipher algorithm (x86_64/AVX). diff --git a/crypto/Makefile b/crypto/Makefile index cdbc03b35510..4fc69fe94e6a 100644 --- a/crypto/Makefile +++ b/crypto/Makefile @@ -78,6 +78,7 @@ obj-$(CONFIG_CRYPTO_TGR192) += tgr192.o obj-$(CONFIG_CRYPTO_GF128MUL) += gf128mul.o obj-$(CONFIG_CRYPTO_ECB) += ecb.o obj-$(CONFIG_CRYPTO_CBC) += cbc.o +obj-$(CONFIG_CRYPTO_CFB) += cfb.o obj-$(CONFIG_CRYPTO_PCBC) += pcbc.o obj-$(CONFIG_CRYPTO_CTS) += cts.o obj-$(CONFIG_CRYPTO_LRW) += lrw.o @@ -100,6 +101,7 @@ obj-$(CONFIG_CRYPTO_SERPENT) += serpent_generic.o CFLAGS_serpent_generic.o := $(call cc-option,-fsched-pressure) # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=79149 obj-$(CONFIG_CRYPTO_AES) += aes_generic.o CFLAGS_aes_generic.o := $(call cc-option,-fno-code-hoisting) # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=83356 +obj-$(CONFIG_CRYPTO_SM4) += sm4_generic.o obj-$(CONFIG_CRYPTO_AES_TI) += aes_ti.o obj-$(CONFIG_CRYPTO_CAMELLIA) += camellia_generic.o obj-$(CONFIG_CRYPTO_CAST_COMMON) += cast_common.o @@ -110,6 +112,7 @@ obj-$(CONFIG_CRYPTO_TEA) += tea.o obj-$(CONFIG_CRYPTO_KHAZAD) += khazad.o obj-$(CONFIG_CRYPTO_ANUBIS) += anubis.o obj-$(CONFIG_CRYPTO_SEED) += seed.o +obj-$(CONFIG_CRYPTO_SPECK) += speck.o obj-$(CONFIG_CRYPTO_SALSA20) += salsa20_generic.o obj-$(CONFIG_CRYPTO_CHACHA20) += chacha20_generic.o obj-$(CONFIG_CRYPTO_POLY1305) += poly1305_generic.o @@ -149,6 +152,5 @@ obj-$(CONFIG_XOR_BLOCKS) += xor.o obj-$(CONFIG_ASYNC_CORE) += async_tx/ obj-$(CONFIG_ASYMMETRIC_KEY_TYPE) += asymmetric_keys/ obj-$(CONFIG_CRYPTO_HASH_INFO) += hash_info.o -obj-$(CONFIG_CRYPTO_ABLK_HELPER) += ablk_helper.o crypto_simd-y := simd.o obj-$(CONFIG_CRYPTO_SIMD) += crypto_simd.o diff --git a/crypto/ablk_helper.c b/crypto/ablk_helper.c deleted file mode 100644 index 09776bb1360e..000000000000 --- a/crypto/ablk_helper.c +++ /dev/null @@ -1,150 +0,0 @@ -/* - * Shared async block cipher helpers - * - * Copyright (c) 2012 Jussi Kivilinna <jussi.kivilinna@mbnet.fi> - * - * Based on aesni-intel_glue.c by: - * Copyright (C) 2008, Intel Corp. - * Author: Huang Ying <ying.huang@intel.com> - * - * This program is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 2 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - * - */ - -#include <linux/kernel.h> -#include <linux/crypto.h> -#include <linux/init.h> -#include <linux/module.h> -#include <crypto/algapi.h> -#include <crypto/cryptd.h> -#include <crypto/ablk_helper.h> -#include <asm/simd.h> - -int ablk_set_key(struct crypto_ablkcipher *tfm, const u8 *key, - unsigned int key_len) -{ - struct async_helper_ctx *ctx = crypto_ablkcipher_ctx(tfm); - struct crypto_ablkcipher *child = &ctx->cryptd_tfm->base; - int err; - - crypto_ablkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); - crypto_ablkcipher_set_flags(child, crypto_ablkcipher_get_flags(tfm) - & CRYPTO_TFM_REQ_MASK); - err = crypto_ablkcipher_setkey(child, key, key_len); - crypto_ablkcipher_set_flags(tfm, crypto_ablkcipher_get_flags(child) - & CRYPTO_TFM_RES_MASK); - return err; -} -EXPORT_SYMBOL_GPL(ablk_set_key); - -int __ablk_encrypt(struct ablkcipher_request *req) -{ - struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req); - struct async_helper_ctx *ctx = crypto_ablkcipher_ctx(tfm); - struct blkcipher_desc desc; - - desc.tfm = cryptd_ablkcipher_child(ctx->cryptd_tfm); - desc.info = req->info; - desc.flags = 0; - - return crypto_blkcipher_crt(desc.tfm)->encrypt( - &desc, req->dst, req->src, req->nbytes); -} -EXPORT_SYMBOL_GPL(__ablk_encrypt); - -int ablk_encrypt(struct ablkcipher_request *req) -{ - struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req); - struct async_helper_ctx *ctx = crypto_ablkcipher_ctx(tfm); - - if (!may_use_simd() || - (in_atomic() && cryptd_ablkcipher_queued(ctx->cryptd_tfm))) { - struct ablkcipher_request *cryptd_req = - ablkcipher_request_ctx(req); - - *cryptd_req = *req; - ablkcipher_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base); - - return crypto_ablkcipher_encrypt(cryptd_req); - } else { - return __ablk_encrypt(req); - } -} -EXPORT_SYMBOL_GPL(ablk_encrypt); - -int ablk_decrypt(struct ablkcipher_request *req) -{ - struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req); - struct async_helper_ctx *ctx = crypto_ablkcipher_ctx(tfm); - - if (!may_use_simd() || - (in_atomic() && cryptd_ablkcipher_queued(ctx->cryptd_tfm))) { - struct ablkcipher_request *cryptd_req = - ablkcipher_request_ctx(req); - - *cryptd_req = *req; - ablkcipher_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base); - - return crypto_ablkcipher_decrypt(cryptd_req); - } else { - struct blkcipher_desc desc; - - desc.tfm = cryptd_ablkcipher_child(ctx->cryptd_tfm); - desc.info = req->info; - desc.flags = 0; - - return crypto_blkcipher_crt(desc.tfm)->decrypt( - &desc, req->dst, req->src, req->nbytes); - } -} -EXPORT_SYMBOL_GPL(ablk_decrypt); - -void ablk_exit(struct crypto_tfm *tfm) -{ - struct async_helper_ctx *ctx = crypto_tfm_ctx(tfm); - - cryptd_free_ablkcipher(ctx->cryptd_tfm); -} -EXPORT_SYMBOL_GPL(ablk_exit); - -int ablk_init_common(struct crypto_tfm *tfm, const char *drv_name) -{ - struct async_helper_ctx *ctx = crypto_tfm_ctx(tfm); - struct cryptd_ablkcipher *cryptd_tfm; - - cryptd_tfm = cryptd_alloc_ablkcipher(drv_name, CRYPTO_ALG_INTERNAL, - CRYPTO_ALG_INTERNAL); - if (IS_ERR(cryptd_tfm)) - return PTR_ERR(cryptd_tfm); - - ctx->cryptd_tfm = cryptd_tfm; - tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request) + - crypto_ablkcipher_reqsize(&cryptd_tfm->base); - - return 0; -} -EXPORT_SYMBOL_GPL(ablk_init_common); - -int ablk_init(struct crypto_tfm *tfm) -{ - char drv_name[CRYPTO_MAX_ALG_NAME]; - - snprintf(drv_name, sizeof(drv_name), "__driver-%s", - crypto_tfm_alg_driver_name(tfm)); - - return ablk_init_common(tfm, drv_name); -} -EXPORT_SYMBOL_GPL(ablk_init); - -MODULE_LICENSE("GPL"); diff --git a/crypto/ahash.c b/crypto/ahash.c index 266fc1d64f61..a64c143165b1 100644 --- a/crypto/ahash.c +++ b/crypto/ahash.c @@ -92,13 +92,14 @@ int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err) if (nbytes && walk->offset & alignmask && !err) { walk->offset = ALIGN(walk->offset, alignmask + 1); - walk->data += walk->offset; - nbytes = min(nbytes, ((unsigned int)(PAGE_SIZE)) - walk->offset); walk->entrylen -= nbytes; - return nbytes; + if (nbytes) { + walk->data += walk->offset; + return nbytes; + } } if (walk->flags & CRYPTO_ALG_ASYNC) @@ -446,24 +447,12 @@ static int ahash_def_finup(struct ahash_request *req) return ahash_def_finup_finish1(req, err); } -static int ahash_no_export(struct ahash_request *req, void *out) -{ - return -ENOSYS; -} - -static int ahash_no_import(struct ahash_request *req, const void *in) -{ - return -ENOSYS; -} - static int crypto_ahash_init_tfm(struct crypto_tfm *tfm) { struct crypto_ahash *hash = __crypto_ahash_cast(tfm); struct ahash_alg *alg = crypto_ahash_alg(hash); hash->setkey = ahash_nosetkey; - hash->export = ahash_no_export; - hash->import = ahash_no_import; if (tfm->__crt_alg->cra_type != &crypto_ahash_type) return crypto_init_shash_ops_async(tfm); @@ -473,16 +462,14 @@ static int crypto_ahash_init_tfm(struct crypto_tfm *tfm) hash->final = alg->final; hash->finup = alg->finup ?: ahash_def_finup; hash->digest = alg->digest; + hash->export = alg->export; + hash->import = alg->import; if (alg->setkey) { hash->setkey = alg->setkey; if (!(alg->halg.base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY)) crypto_ahash_set_flags(hash, CRYPTO_TFM_NEED_KEY); } - if (alg->export) - hash->export = alg->export; - if (alg->import) - hash->import = alg->import; return 0; } diff --git a/crypto/algapi.c b/crypto/algapi.c index 395b082d03a9..2a0271b5f62a 100644 --- a/crypto/algapi.c +++ b/crypto/algapi.c @@ -543,9 +543,6 @@ int crypto_register_instance(struct crypto_template *tmpl, inst->alg.cra_module = tmpl->module; inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE; - if (unlikely(!crypto_mod_get(&inst->alg))) - return -EAGAIN; - down_write(&crypto_alg_sem); larval = __crypto_register_alg(&inst->alg); @@ -563,14 +560,9 @@ unlock: goto err; crypto_wait_for_test(larval); - - /* Remove instance if test failed */ - if (!(inst->alg.cra_flags & CRYPTO_ALG_TESTED)) - crypto_unregister_instance(inst); err = 0; err: - crypto_mod_put(&inst->alg); return err; } EXPORT_SYMBOL_GPL(crypto_register_instance); diff --git a/crypto/api.c b/crypto/api.c index 70a894e52ff3..1d5290c67108 100644 --- a/crypto/api.c +++ b/crypto/api.c @@ -193,17 +193,24 @@ static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg) return alg; } -struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask) +static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, + u32 mask) { struct crypto_alg *alg; + u32 test = 0; + + if (!((type | mask) & CRYPTO_ALG_TESTED)) + test |= CRYPTO_ALG_TESTED; down_read(&crypto_alg_sem); - alg = __crypto_alg_lookup(name, type, mask); + alg = __crypto_alg_lookup(name, type | test, mask | test); + if (!alg && test) + alg = __crypto_alg_lookup(name, type, mask) ? + ERR_PTR(-ELIBBAD) : NULL; up_read(&crypto_alg_sem); return alg; } -EXPORT_SYMBOL_GPL(crypto_alg_lookup); static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask) @@ -227,10 +234,12 @@ static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, alg = crypto_alg_lookup(name, type, mask); } - if (alg) - return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg; + if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg)) + alg = crypto_larval_wait(alg); + else if (!alg) + alg = crypto_larval_add(name, type, mask); - return crypto_larval_add(name, type, mask); + return alg; } int crypto_probing_notify(unsigned long val, void *v) @@ -253,11 +262,6 @@ struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask) struct crypto_alg *larval; int ok; - if (!((type | mask) & CRYPTO_ALG_TESTED)) { - type |= CRYPTO_ALG_TESTED; - mask |= CRYPTO_ALG_TESTED; - } - /* * If the internal flag is set for a cipher, require a caller to * to invoke the cipher with the internal flag to use that cipher. @@ -485,20 +489,14 @@ struct crypto_alg *crypto_find_alg(const char *alg_name, const struct crypto_type *frontend, u32 type, u32 mask) { - struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask) = - crypto_alg_mod_lookup; - if (frontend) { type &= frontend->maskclear; mask &= frontend->maskclear; type |= frontend->type; mask |= frontend->maskset; - - if (frontend->lookup) - lookup = frontend->lookup; } - return lookup(alg_name, type, mask); + return crypto_alg_mod_lookup(alg_name, type, mask); } EXPORT_SYMBOL_GPL(crypto_find_alg); diff --git a/crypto/cfb.c b/crypto/cfb.c new file mode 100644 index 000000000000..94ee39bed758 --- /dev/null +++ b/crypto/cfb.c @@ -0,0 +1,353 @@ +//SPDX-License-Identifier: GPL-2.0 +/* + * CFB: Cipher FeedBack mode + * + * Copyright (c) 2018 James.Bottomley@HansenPartnership.com + * + * CFB is a stream cipher mode which is layered on to a block + * encryption scheme. It works very much like a one time pad where + * the pad is generated initially from the encrypted IV and then + * subsequently from the encrypted previous block of ciphertext. The + * pad is XOR'd into the plain text to get the final ciphertext. + * + * The scheme of CFB is best described by wikipedia: + * + * https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#CFB + * + * Note that since the pad for both encryption and decryption is + * generated by an encryption operation, CFB never uses the block + * decryption function. + */ + +#include <crypto/algapi.h> +#include <crypto/internal/skcipher.h> +#include <linux/err.h> +#include <linux/init.h> +#include <linux/kernel.h> +#include <linux/module.h> +#include <linux/slab.h> +#include <linux/string.h> +#include <linux/types.h> + +struct crypto_cfb_ctx { + struct crypto_cipher *child; +}; + +static unsigned int crypto_cfb_bsize(struct crypto_skcipher *tfm) +{ + struct crypto_cfb_ctx *ctx = crypto_skcipher_ctx(tfm); + struct crypto_cipher *child = ctx->child; + + return crypto_cipher_blocksize(child); +} + +static void crypto_cfb_encrypt_one(struct crypto_skcipher *tfm, + const u8 *src, u8 *dst) +{ + struct crypto_cfb_ctx *ctx = crypto_skcipher_ctx(tfm); + + crypto_cipher_encrypt_one(ctx->child, dst, src); +} + +/* final encrypt and decrypt is the same */ +static void crypto_cfb_final(struct skcipher_walk *walk, + struct crypto_skcipher *tfm) +{ + const unsigned int bsize = crypto_cfb_bsize(tfm); + const unsigned long alignmask = crypto_skcipher_alignmask(tfm); + u8 tmp[bsize + alignmask]; + u8 *stream = PTR_ALIGN(tmp + 0, alignmask + 1); + u8 *src = walk->src.virt.addr; + u8 *dst = walk->dst.virt.addr; + u8 *iv = walk->iv; + unsigned int nbytes = walk->nbytes; + + crypto_cfb_encrypt_one(tfm, iv, stream); + crypto_xor_cpy(dst, stream, src, nbytes); +} + +static int crypto_cfb_encrypt_segment(struct skcipher_walk *walk, + struct crypto_skcipher *tfm) +{ + const unsigned int bsize = crypto_cfb_bsize(tfm); + unsigned int nbytes = walk->nbytes; + u8 *src = walk->src.virt.addr; + u8 *dst = walk->dst.virt.addr; + u8 *iv = walk->iv; + + do { + crypto_cfb_encrypt_one(tfm, iv, dst); + crypto_xor(dst, src, bsize); + memcpy(iv, dst, bsize); + + src += bsize; + dst += bsize; + } while ((nbytes -= bsize) >= bsize); + + return nbytes; +} + +static int crypto_cfb_encrypt_inplace(struct skcipher_walk *walk, + struct crypto_skcipher *tfm) +{ + const unsigned int bsize = crypto_cfb_bsize(tfm); + unsigned int nbytes = walk->nbytes; + u8 *src = walk->src.virt.addr; + u8 *iv = walk->iv; + u8 tmp[bsize]; + + do { + crypto_cfb_encrypt_one(tfm, iv, tmp); + crypto_xor(src, tmp, bsize); + iv = src; + + src += bsize; + } while ((nbytes -= bsize) >= bsize); + + memcpy(walk->iv, iv, bsize); + + return nbytes; +} + +static int crypto_cfb_encrypt(struct skcipher_request *req) +{ + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); + struct skcipher_walk walk; + unsigned int bsize = crypto_cfb_bsize(tfm); + int err; + + err = skcipher_walk_virt(&walk, req, false); + + while (walk.nbytes >= bsize) { + if (walk.src.virt.addr == walk.dst.virt.addr) + err = crypto_cfb_encrypt_inplace(&walk, tfm); + else + err = crypto_cfb_encrypt_segment(&walk, tfm); + err = skcipher_walk_done(&walk, err); + } + + if (walk.nbytes) { + crypto_cfb_final(&walk, tfm); + err = skcipher_walk_done(&walk, 0); + } + + return err; +} + +static int crypto_cfb_decrypt_segment(struct skcipher_walk *walk, + struct crypto_skcipher *tfm) +{ + const unsigned int bsize = crypto_cfb_bsize(tfm); + unsigned int nbytes = walk->nbytes; + u8 *src = walk->src.virt.addr; + u8 *dst = walk->dst.virt.addr; + u8 *iv = walk->iv; + + do { + crypto_cfb_encrypt_one(tfm, iv, dst); + crypto_xor(dst, iv, bsize); + iv = src; + + src += bsize; + dst += bsize; + } while ((nbytes -= bsize) >= bsize); + + memcpy(walk->iv, iv, bsize); + + return nbytes; +} + +static int crypto_cfb_decrypt_inplace(struct skcipher_walk *walk, + struct crypto_skcipher *tfm) +{ + const unsigned int bsize = crypto_cfb_bsize(tfm); + unsigned int nbytes = walk->nbytes; + u8 *src = walk->src.virt.addr; + u8 *iv = walk->iv; + u8 tmp[bsize]; + + do { + crypto_cfb_encrypt_one(tfm, iv, tmp); + memcpy(iv, src, bsize); + crypto_xor(src, tmp, bsize); + src += bsize; + } while ((nbytes -= bsize) >= bsize); + + memcpy(walk->iv, iv, bsize); + + return nbytes; +} + +static int crypto_cfb_decrypt_blocks(struct skcipher_walk *walk, + struct crypto_skcipher *tfm) +{ + if (walk->src.virt.addr == walk->dst.virt.addr) + return crypto_cfb_decrypt_inplace(walk, tfm); + else + return crypto_cfb_decrypt_segment(walk, tfm); +} + +static int crypto_cfb_setkey(struct crypto_skcipher *parent, const u8 *key, + unsigned int keylen) +{ + struct crypto_cfb_ctx *ctx = crypto_skcipher_ctx(parent); + struct crypto_cipher *child = ctx->child; + int err; + + crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); + crypto_cipher_set_flags(child, crypto_skcipher_get_flags(parent) & + CRYPTO_TFM_REQ_MASK); + err = crypto_cipher_setkey(child, key, keylen); + crypto_skcipher_set_flags(parent, crypto_cipher_get_flags(child) & + CRYPTO_TFM_RES_MASK); + return err; +} + +static int crypto_cfb_decrypt(struct skcipher_request *req) +{ + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); + struct skcipher_walk walk; + const unsigned int bsize = crypto_cfb_bsize(tfm); + int err; + + err = skcipher_walk_virt(&walk, req, false); + + while (walk.nbytes >= bsize) { + err = crypto_cfb_decrypt_blocks(&walk, tfm); + err = skcipher_walk_done(&walk, err); + } + + if (walk.nbytes) { + crypto_cfb_final(&walk, tfm); + err = skcipher_walk_done(&walk, 0); + } + + return err; +} + +static int crypto_cfb_init_tfm(struct crypto_skcipher *tfm) +{ + struct skcipher_instance *inst = skcipher_alg_instance(tfm); + struct crypto_spawn *spawn = skcipher_instance_ctx(inst); + struct crypto_cfb_ctx *ctx = crypto_skcipher_ctx(tfm); + struct crypto_cipher *cipher; + + cipher = crypto_spawn_cipher(spawn); + if (IS_ERR(cipher)) + return PTR_ERR(cipher); + + ctx->child = cipher; + return 0; +} + +static void crypto_cfb_exit_tfm(struct crypto_skcipher *tfm) +{ + struct crypto_cfb_ctx *ctx = crypto_skcipher_ctx(tfm); + + crypto_free_cipher(ctx->child); +} + +static void crypto_cfb_free(struct skcipher_instance *inst) +{ + crypto_drop_skcipher(skcipher_instance_ctx(inst)); + kfree(inst); +} + +static int crypto_cfb_create(struct crypto_template *tmpl, struct rtattr **tb) +{ + struct skcipher_instance *inst; + struct crypto_attr_type *algt; + struct crypto_spawn *spawn; + struct crypto_alg *alg; + u32 mask; + int err; + + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SKCIPHER); + if (err) + return err; + + inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); + if (!inst) + return -ENOMEM; + + algt = crypto_get_attr_type(tb); + err = PTR_ERR(algt); + if (IS_ERR(algt)) + goto err_free_inst; + + mask = CRYPTO_ALG_TYPE_MASK | + crypto_requires_off(algt->type, algt->mask, + CRYPTO_ALG_NEED_FALLBACK); + + alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER, mask); + err = PTR_ERR(alg); + if (IS_ERR(alg)) + goto err_free_inst; + + spawn = skcipher_instance_ctx(inst); + err = crypto_init_spawn(spawn, alg, skcipher_crypto_instance(inst), + CRYPTO_ALG_TYPE_MASK); + crypto_mod_put(alg); + if (err) + goto err_free_inst; + + err = crypto_inst_setname(skcipher_crypto_instance(inst), "cfb", alg); + if (err) + goto err_drop_spawn; + + inst->alg.base.cra_priority = alg->cra_priority; + /* we're a stream cipher independend of the crypto cra_blocksize */ + inst->alg.base.cra_blocksize = 1; + inst->alg.base.cra_alignmask = alg->cra_alignmask; + + inst->alg.ivsize = alg->cra_blocksize; + inst->alg.min_keysize = alg->cra_cipher.cia_min_keysize; + inst->alg.max_keysize = alg->cra_cipher.cia_max_keysize; + + inst->alg.base.cra_ctxsize = sizeof(struct crypto_cfb_ctx); + + inst->alg.init = crypto_cfb_init_tfm; + inst->alg.exit = crypto_cfb_exit_tfm; + + inst->alg.setkey = crypto_cfb_setkey; + inst->alg.encrypt = crypto_cfb_encrypt; + inst->alg.decrypt = crypto_cfb_decrypt; + + inst->free = crypto_cfb_free; + + err = skcipher_register_instance(tmpl, inst); + if (err) + goto err_drop_spawn; + +out: + return err; + +err_drop_spawn: + crypto_drop_spawn(spawn); +err_free_inst: + kfree(inst); + goto out; +} + +static struct crypto_template crypto_cfb_tmpl = { + .name = "cfb", + .create = crypto_cfb_create, + .module = THIS_MODULE, +}; + +static int __init crypto_cfb_module_init(void) +{ + return crypto_register_template(&crypto_cfb_tmpl); +} + +static void __exit crypto_cfb_module_exit(void) +{ + crypto_unregister_template(&crypto_cfb_tmpl); +} + +module_init(crypto_cfb_module_init); +module_exit(crypto_cfb_module_exit); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("CFB block cipher algorithm"); +MODULE_ALIAS_CRYPTO("cfb"); diff --git a/crypto/crypto_engine.c b/crypto/crypto_engine.c index 61e7c4e02fd2..992e8d8dcdd9 100644 --- a/crypto/crypto_engine.c +++ b/crypto/crypto_engine.c @@ -15,13 +15,50 @@ #include <linux/err.h> #include <linux/delay.h> #include <crypto/engine.h> -#include <crypto/internal/hash.h> #include <uapi/linux/sched/types.h> #include "internal.h" #define CRYPTO_ENGINE_MAX_QLEN 10 /** + * crypto_finalize_request - finalize one request if the request is done + * @engine: the hardware engine + * @req: the request need to be finalized + * @err: error number + */ +static void crypto_finalize_request(struct crypto_engine *engine, + struct crypto_async_request *req, int err) +{ + unsigned long flags; + bool finalize_cur_req = false; + int ret; + struct crypto_engine_ctx *enginectx; + + spin_lock_irqsave(&engine->queue_lock, flags); + if (engine->cur_req == req) + finalize_cur_req = true; + spin_unlock_irqrestore(&engine->queue_lock, flags); + + if (finalize_cur_req) { + enginectx = crypto_tfm_ctx(req->tfm); + if (engine->cur_req_prepared && + enginectx->op.unprepare_request) { + ret = enginectx->op.unprepare_request(engine, req); + if (ret) + dev_err(engine->dev, "failed to unprepare request\n"); + } + spin_lock_irqsave(&engine->queue_lock, flags); + engine->cur_req = NULL; + engine->cur_req_prepared = false; + spin_unlock_irqrestore(&engine->queue_lock, flags); + } + + req->complete(req, err); + + kthread_queue_work(engine->kworker, &engine->pump_requests); +} + +/** * crypto_pump_requests - dequeue one request from engine queue to process * @engine: the hardware engine * @in_kthread: true if we are in the context of the request pump thread @@ -34,11 +71,10 @@ static void crypto_pump_requests(struct crypto_engine *engine, bool in_kthread) { struct crypto_async_request *async_req, *backlog; - struct ahash_request *hreq; - struct ablkcipher_request *breq; unsigned long flags; bool was_busy = false; - int ret, rtype; + int ret; + struct crypto_engine_ctx *enginectx; spin_lock_irqsave(&engine->queue_lock, flags); @@ -94,7 +130,6 @@ static void crypto_pump_requests(struct crypto_engine *engine, spin_unlock_irqrestore(&engine->queue_lock, flags); - rtype = crypto_tfm_alg_type(engine->cur_req->tfm); /* Until here we get the request need to be encrypted successfully */ if (!was_busy && engine->prepare_crypt_hardware) { ret = engine->prepare_crypt_hardware(engine); @@ -104,57 +139,31 @@ static void crypto_pump_requests(struct crypto_engine *engine, } } - switch (rtype) { - case CRYPTO_ALG_TYPE_AHASH: - hreq = ahash_request_cast(engine->cur_req); - if (engine->prepare_hash_request) { - ret = engine->prepare_hash_request(engine, hreq); - if (ret) { - dev_err(engine->dev, "failed to prepare request: %d\n", - ret); - goto req_err; - } - engine->cur_req_prepared = true; - } - ret = engine->hash_one_request(engine, hreq); - if (ret) { - dev_err(engine->dev, "failed to hash one request from queue\n"); - goto req_err; - } - return; - case CRYPTO_ALG_TYPE_ABLKCIPHER: - breq = ablkcipher_request_cast(engine->cur_req); - if (engine->prepare_cipher_request) { - ret = engine->prepare_cipher_request(engine, breq); - if (ret) { - dev_err(engine->dev, "failed to prepare request: %d\n", - ret); - goto req_err; - } - engine->cur_req_prepared = true; - } - ret = engine->cipher_one_request(engine, breq); + enginectx = crypto_tfm_ctx(async_req->tfm); + + if (enginectx->op.prepare_request) { + ret = enginectx->op.prepare_request(engine, async_req); if (ret) { - dev_err(engine->dev, "failed to cipher one request from queue\n"); + dev_err(engine->dev, "failed to prepare request: %d\n", + ret); goto req_err; } - return; - default: - dev_err(engine->dev, "failed to prepare request of unknown type\n"); - return; + engine->cur_req_prepared = true; + } + if (!enginectx->op.do_one_request) { + dev_err(engine->dev, "failed to do request\n"); + ret = -EINVAL; + goto req_err; } + ret = enginectx->op.do_one_request(engine, async_req); + if (ret) { + dev_err(engine->dev, "Failed to do one request from queue: %d\n", ret); + goto req_err; + } + return; req_err: - switch (rtype) { - case CRYPTO_ALG_TYPE_AHASH: - hreq = ahash_request_cast(engine->cur_req); - crypto_finalize_hash_request(engine, hreq, ret); - break; - case CRYPTO_ALG_TYPE_ABLKCIPHER: - breq = ablkcipher_request_cast(engine->cur_req); - crypto_finalize_cipher_request(engine, breq, ret); - break; - } + crypto_finalize_request(engine, async_req, ret); return; out: @@ -170,13 +179,12 @@ static void crypto_pump_work(struct kthread_work *work) } /** - * crypto_transfer_cipher_request - transfer the new request into the - * enginequeue + * crypto_transfer_request - transfer the new request into the engine queue * @engine: the hardware engine * @req: the request need to be listed into the engine queue */ -int crypto_transfer_cipher_request(struct crypto_engine *engine, - struct ablkcipher_request *req, +static int crypto_transfer_request(struct crypto_engine *engine, + struct crypto_async_request *req, bool need_pump) { unsigned long flags; @@ -189,7 +197,7 @@ int crypto_transfer_cipher_request(struct crypto_engine *engine, return -ESHUTDOWN; } - ret = ablkcipher_enqueue_request(&engine->queue, req); + ret = crypto_enqueue_request(&engine->queue, req); if (!engine->busy && need_pump) kthread_queue_work(engine->kworker, &engine->pump_requests); @@ -197,102 +205,131 @@ int crypto_transfer_cipher_request(struct crypto_engine *engine, spin_unlock_irqrestore(&engine->queue_lock, flags); return ret; } -EXPORT_SYMBOL_GPL(crypto_transfer_cipher_request); /** - * crypto_transfer_cipher_request_to_engine - transfer one request to list + * crypto_transfer_request_to_engine - transfer one request to list * into the engine queue * @engine: the hardware engine * @req: the request need to be listed into the engine queue */ -int crypto_transfer_cipher_request_to_engine(struct crypto_engine *engine, - struct ablkcipher_request *req) +static int crypto_transfer_request_to_engine(struct crypto_engine *engine, + struct crypto_async_request *req) { - return crypto_transfer_cipher_request(engine, req, true); + return crypto_transfer_request(engine, req, true); } -EXPORT_SYMBOL_GPL(crypto_transfer_cipher_request_to_engine); /** - * crypto_transfer_hash_request - transfer the new request into the - * enginequeue + * crypto_transfer_ablkcipher_request_to_engine - transfer one ablkcipher_request + * to list into the engine queue * @engine: the hardware engine * @req: the request need to be listed into the engine queue + * TODO: Remove this function when skcipher conversion is finished */ -int crypto_transfer_hash_request(struct crypto_engine *engine, - struct ahash_request *req, bool need_pump) +int crypto_transfer_ablkcipher_request_to_engine(struct crypto_engine *engine, + struct ablkcipher_request *req) { - unsigned long flags; - int ret; - - spin_lock_irqsave(&engine->queue_lock, flags); - - if (!engine->running) { - spin_unlock_irqrestore(&engine->queue_lock, flags); - return -ESHUTDOWN; - } - - ret = ahash_enqueue_request(&engine->queue, req); + return crypto_transfer_request_to_engine(engine, &req->base); +} +EXPORT_SYMBOL_GPL(crypto_transfer_ablkcipher_request_to_engine); - if (!engine->busy && need_pump) - kthread_queue_work(engine->kworker, &engine->pump_requests); +/** + * crypto_transfer_aead_request_to_engine - transfer one aead_request + * to list into the engine queue + * @engine: the hardware engine + * @req: the request need to be listed into the engine queue + */ +int crypto_transfer_aead_request_to_engine(struct crypto_engine *engine, + struct aead_request *req) +{ + return crypto_transfer_request_to_engine(engine, &req->base); +} +EXPORT_SYMBOL_GPL(crypto_transfer_aead_request_to_engine); - spin_unlock_irqrestore(&engine->queue_lock, flags); - return ret; +/** + * crypto_transfer_akcipher_request_to_engine - transfer one akcipher_request + * to list into the engine queue + * @engine: the hardware engine + * @req: the request need to be listed into the engine queue + */ +int crypto_transfer_akcipher_request_to_engine(struct crypto_engine *engine, + struct akcipher_request *req) +{ + return crypto_transfer_request_to_engine(engine, &req->base); } -EXPORT_SYMBOL_GPL(crypto_transfer_hash_request); +EXPORT_SYMBOL_GPL(crypto_transfer_akcipher_request_to_engine); /** - * crypto_transfer_hash_request_to_engine - transfer one request to list - * into the engine queue + * crypto_transfer_hash_request_to_engine - transfer one ahash_request + * to list into the engine queue * @engine: the hardware engine * @req: the request need to be listed into the engine queue */ int crypto_transfer_hash_request_to_engine(struct crypto_engine *engine, struct ahash_request *req) { - return crypto_transfer_hash_request(engine, req, true); + return crypto_transfer_request_to_engine(engine, &req->base); } EXPORT_SYMBOL_GPL(crypto_transfer_hash_request_to_engine); /** - * crypto_finalize_cipher_request - finalize one request if the request is done + * crypto_transfer_skcipher_request_to_engine - transfer one skcipher_request + * to list into the engine queue + * @engine: the hardware engine + * @req: the request need to be listed into the engine queue + */ +int crypto_transfer_skcipher_request_to_engine(struct crypto_engine *engine, + struct skcipher_request *req) +{ + return crypto_transfer_request_to_engine(engine, &req->base); +} +EXPORT_SYMBOL_GPL(crypto_transfer_skcipher_request_to_engine); + +/** + * crypto_finalize_ablkcipher_request - finalize one ablkcipher_request if + * the request is done * @engine: the hardware engine * @req: the request need to be finalized * @err: error number + * TODO: Remove this function when skcipher conversion is finished */ -void crypto_finalize_cipher_request(struct crypto_engine *engine, - struct ablkcipher_request *req, int err) +void crypto_finalize_ablkcipher_request(struct crypto_engine *engine, + struct ablkcipher_request *req, int err) { - unsigned long flags; - bool finalize_cur_req = false; - int ret; - - spin_lock_irqsave(&engine->queue_lock, flags); - if (engine->cur_req == &req->base) - finalize_cur_req = true; - spin_unlock_irqrestore(&engine->queue_lock, flags); - - if (finalize_cur_req) { - if (engine->cur_req_prepared && - engine->unprepare_cipher_request) { - ret = engine->unprepare_cipher_request(engine, req); - if (ret) - dev_err(engine->dev, "failed to unprepare request\n"); - } - spin_lock_irqsave(&engine->queue_lock, flags); - engine->cur_req = NULL; - engine->cur_req_prepared = false; - spin_unlock_irqrestore(&engine->queue_lock, flags); - } + return crypto_finalize_request(engine, &req->base, err); +} +EXPORT_SYMBOL_GPL(crypto_finalize_ablkcipher_request); - req->base.complete(&req->base, err); +/** + * crypto_finalize_aead_request - finalize one aead_request if + * the request is done + * @engine: the hardware engine + * @req: the request need to be finalized + * @err: error number + */ +void crypto_finalize_aead_request(struct crypto_engine *engine, + struct aead_request *req, int err) +{ + return crypto_finalize_request(engine, &req->base, err); +} +EXPORT_SYMBOL_GPL(crypto_finalize_aead_request); - kthread_queue_work(engine->kworker, &engine->pump_requests); +/** + * crypto_finalize_akcipher_request - finalize one akcipher_request if + * the request is done + * @engine: the hardware engine + * @req: the request need to be finalized + * @err: error number + */ +void crypto_finalize_akcipher_request(struct crypto_engine *engine, + struct akcipher_request *req, int err) +{ + return crypto_finalize_request(engine, &req->base, err); } -EXPORT_SYMBOL_GPL(crypto_finalize_cipher_request); +EXPORT_SYMBOL_GPL(crypto_finalize_akcipher_request); /** - * crypto_finalize_hash_request - finalize one request if the request is done + * crypto_finalize_hash_request - finalize one ahash_request if + * the request is done * @engine: the hardware engine * @req: the request need to be finalized * @err: error number @@ -300,35 +337,25 @@ EXPORT_SYMBOL_GPL(crypto_finalize_cipher_request); void crypto_finalize_hash_request(struct crypto_engine *engine, struct ahash_request *req, int err) { - unsigned long flags; - bool finalize_cur_req = false; - int ret; - - spin_lock_irqsave(&engine->queue_lock, flags); - if (engine->cur_req == &req->base) - finalize_cur_req = true; - spin_unlock_irqrestore(&engine->queue_lock, flags); - - if (finalize_cur_req) { - if (engine->cur_req_prepared && - engine->unprepare_hash_request) { - ret = engine->unprepare_hash_request(engine, req); - if (ret) - dev_err(engine->dev, "failed to unprepare request\n"); - } - spin_lock_irqsave(&engine->queue_lock, flags); - engine->cur_req = NULL; - engine->cur_req_prepared = false; - spin_unlock_irqrestore(&engine->queue_lock, flags); - } - - req->base.complete(&req->base, err); - - kthread_queue_work(engine->kworker, &engine->pump_requests); + return crypto_finalize_request(engine, &req->base, err); } EXPORT_SYMBOL_GPL(crypto_finalize_hash_request); /** + * crypto_finalize_skcipher_request - finalize one skcipher_request if + * the request is done + * @engine: the hardware engine + * @req: the request need to be finalized + * @err: error number + */ +void crypto_finalize_skcipher_request(struct crypto_engine *engine, + struct skcipher_request *req, int err) +{ + return crypto_finalize_request(engine, &req->base, err); +} +EXPORT_SYMBOL_GPL(crypto_finalize_skcipher_request); + +/** * crypto_engine_start - start the hardware engine * @engine: the hardware engine need to be started * diff --git a/crypto/crypto_user.c b/crypto/crypto_user.c index 5c291eedaa70..0e89b5457cab 100644 --- a/crypto/crypto_user.c +++ b/crypto/crypto_user.c @@ -271,7 +271,7 @@ static int crypto_report(struct sk_buff *in_skb, struct nlmsghdr *in_nlh, return -ENOENT; err = -ENOMEM; - skb = nlmsg_new(NLMSG_DEFAULT_SIZE, GFP_ATOMIC); + skb = nlmsg_new(NLMSG_DEFAULT_SIZE, GFP_KERNEL); if (!skb) goto drop_alg; diff --git a/crypto/ecc.c b/crypto/ecc.c index 18f32f2a5e1c..9c066b5ac12d 100644 --- a/crypto/ecc.c +++ b/crypto/ecc.c @@ -1025,9 +1025,7 @@ int crypto_ecdh_shared_secret(unsigned int curve_id, unsigned int ndigits, { int ret = 0; struct ecc_point *product, *pk; - u64 priv[ndigits]; - u64 rand_z[ndigits]; - unsigned int nbytes; + u64 *priv, *rand_z; const struct ecc_curve *curve = ecc_get_curve(curve_id); if (!private_key || !public_key || !curve) { @@ -1035,14 +1033,22 @@ int crypto_ecdh_shared_secret(unsigned int curve_id, unsigned int ndigits, goto out; } - nbytes = ndigits << ECC_DIGITS_TO_BYTES_SHIFT; + priv = kmalloc_array(ndigits, sizeof(*priv), GFP_KERNEL); + if (!priv) { + ret = -ENOMEM; + goto out; + } - get_random_bytes(rand_z, nbytes); + rand_z = kmalloc_array(ndigits, sizeof(*rand_z), GFP_KERNEL); + if (!rand_z) { + ret = -ENOMEM; + goto kfree_out; + } pk = ecc_alloc_point(ndigits); if (!pk) { ret = -ENOMEM; - goto out; + goto kfree_out; } product = ecc_alloc_point(ndigits); @@ -1051,6 +1057,8 @@ int crypto_ecdh_shared_secret(unsigned int curve_id, unsigned int ndigits, goto err_alloc_product; } + get_random_bytes(rand_z, ndigits << ECC_DIGITS_TO_BYTES_SHIFT); + ecc_swap_digits(public_key, pk->x, ndigits); ecc_swap_digits(&public_key[ndigits], pk->y, ndigits); ecc_swap_digits(private_key, priv, ndigits); @@ -1065,6 +1073,9 @@ int crypto_ecdh_shared_secret(unsigned int curve_id, unsigned int ndigits, ecc_free_point(product); err_alloc_product: ecc_free_point(pk); +kfree_out: + kzfree(priv); + kzfree(rand_z); out: return ret; } diff --git a/crypto/ecdh.c b/crypto/ecdh.c index 3aca0933ec44..d2ec33f0e098 100644 --- a/crypto/ecdh.c +++ b/crypto/ecdh.c @@ -89,12 +89,19 @@ static int ecdh_compute_value(struct kpp_request *req) if (!shared_secret) goto free_pubkey; - copied = sg_copy_to_buffer(req->src, 1, public_key, - public_key_sz); - if (copied != public_key_sz) { - ret = -EINVAL; + /* from here on it's invalid parameters */ + ret = -EINVAL; + + /* must have exactly two points to be on the curve */ + if (public_key_sz != req->src_len) + goto free_all; + + copied = sg_copy_to_buffer(req->src, + sg_nents_for_len(req->src, + public_key_sz), + public_key, public_key_sz); + if (copied != public_key_sz) goto free_all; - } ret = crypto_ecdh_shared_secret(ctx->curve_id, ctx->ndigits, ctx->private_key, public_key, @@ -111,7 +118,11 @@ static int ecdh_compute_value(struct kpp_request *req) if (ret < 0) goto free_all; - copied = sg_copy_from_buffer(req->dst, 1, buf, nbytes); + /* might want less than we've got */ + nbytes = min_t(size_t, nbytes, req->dst_len); + copied = sg_copy_from_buffer(req->dst, sg_nents_for_len(req->dst, + nbytes), + buf, nbytes); if (copied != nbytes) ret = -EINVAL; diff --git a/crypto/internal.h b/crypto/internal.h index 5ac27fba10e8..9a3f39939fba 100644 --- a/crypto/internal.h +++ b/crypto/internal.h @@ -67,7 +67,6 @@ static inline unsigned int crypto_compress_ctxsize(struct crypto_alg *alg) } struct crypto_alg *crypto_mod_get(struct crypto_alg *alg); -struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask); struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask); int crypto_init_cipher_ops(struct crypto_tfm *tfm); diff --git a/crypto/lrw.c b/crypto/lrw.c index cbbd7c50ad19..954a7064a179 100644 --- a/crypto/lrw.c +++ b/crypto/lrw.c @@ -28,13 +28,31 @@ #include <crypto/b128ops.h> #include <crypto/gf128mul.h> -#include <crypto/lrw.h> #define LRW_BUFFER_SIZE 128u +#define LRW_BLOCK_SIZE 16 + struct priv { struct crypto_skcipher *child; - struct lrw_table_ctx table; + + /* + * optimizes multiplying a random (non incrementing, as at the + * start of a new sector) value with key2, we could also have + * used 4k optimization tables or no optimization at all. In the + * latter case we would have to store key2 here + */ + struct gf128mul_64k *table; + + /* + * stores: + * key2*{ 0,0,...0,0,0,0,1 }, key2*{ 0,0,...0,0,0,1,1 }, + * key2*{ 0,0,...0,0,1,1,1 }, key2*{ 0,0,...0,1,1,1,1 } + * key2*{ 0,0,...1,1,1,1,1 }, etc + * needed for optimized multiplication of incrementing values + * with key2 + */ + be128 mulinc[128]; }; struct rctx { @@ -65,11 +83,25 @@ static inline void setbit128_bbe(void *b, int bit) ), b); } -int lrw_init_table(struct lrw_table_ctx *ctx, const u8 *tweak) +static int setkey(struct crypto_skcipher *parent, const u8 *key, + unsigned int keylen) { + struct priv *ctx = crypto_skcipher_ctx(parent); + struct crypto_skcipher *child = ctx->child; + int err, bsize = LRW_BLOCK_SIZE; + const u8 *tweak = key + keylen - bsize; be128 tmp = { 0 }; int i; + crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); + crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) & + CRYPTO_TFM_REQ_MASK); + err = crypto_skcipher_setkey(child, key, keylen - bsize); + crypto_skcipher_set_flags(parent, crypto_skcipher_get_flags(child) & + CRYPTO_TFM_RES_MASK); + if (err) + return err; + if (ctx->table) gf128mul_free_64k(ctx->table); @@ -87,34 +119,6 @@ int lrw_init_table(struct lrw_table_ctx *ctx, const u8 *tweak) return 0; } -EXPORT_SYMBOL_GPL(lrw_init_table); - -void lrw_free_table(struct lrw_table_ctx *ctx) -{ - if (ctx->table) - gf128mul_free_64k(ctx->table); -} -EXPORT_SYMBOL_GPL(lrw_free_table); - -static int setkey(struct crypto_skcipher *parent, const u8 *key, - unsigned int keylen) -{ - struct priv *ctx = crypto_skcipher_ctx(parent); - struct crypto_skcipher *child = ctx->child; - int err, bsize = LRW_BLOCK_SIZE; - const u8 *tweak = key + keylen - bsize; - - crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); - crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) & - CRYPTO_TFM_REQ_MASK); - err = crypto_skcipher_setkey(child, key, keylen - bsize); - crypto_skcipher_set_flags(parent, crypto_skcipher_get_flags(child) & - CRYPTO_TFM_RES_MASK); - if (err) - return err; - - return lrw_init_table(&ctx->table, tweak); -} static inline void inc(be128 *iv) { @@ -238,7 +242,7 @@ static int pre_crypt(struct skcipher_request *req) /* T <- I*Key2, using the optimization * discussed in the specification */ be128_xor(&rctx->t, &rctx->t, - &ctx->table.mulinc[get_index128(iv)]); + &ctx->mulinc[get_index128(iv)]); inc(iv); } while ((avail -= bs) >= bs); @@ -301,7 +305,7 @@ static int init_crypt(struct skcipher_request *req, crypto_completion_t done) memcpy(&rctx->t, req->iv, sizeof(rctx->t)); /* T <- I*Key2 */ - gf128mul_64k_bbe(&rctx->t, ctx->table.table); + gf128mul_64k_bbe(&rctx->t, ctx->table); return 0; } @@ -313,7 +317,7 @@ static void exit_crypt(struct skcipher_request *req) rctx->left = 0; if (rctx->ext) - kfree(rctx->ext); + kzfree(rctx->ext); } static int do_encrypt(struct skcipher_request *req, int err) @@ -416,85 +420,6 @@ static int decrypt(struct skcipher_request *req) return do_decrypt(req, init_crypt(req, decrypt_done)); } -int lrw_crypt(struct blkcipher_desc *desc, struct scatterlist *sdst, - struct scatterlist *ssrc, unsigned int nbytes, - struct lrw_crypt_req *req) -{ - const unsigned int bsize = LRW_BLOCK_SIZE; - const unsigned int max_blks = req->tbuflen / bsize; - struct lrw_table_ctx *ctx = req->table_ctx; - struct blkcipher_walk walk; - unsigned int nblocks; - be128 *iv, *src, *dst, *t; - be128 *t_buf = req->tbuf; - int err, i; - - BUG_ON(max_blks < 1); - - blkcipher_walk_init(&walk, sdst, ssrc, nbytes); - - err = blkcipher_walk_virt(desc, &walk); - nbytes = walk.nbytes; - if (!nbytes) - return err; - - nblocks = min(walk.nbytes / bsize, max_blks); - src = (be128 *)walk.src.virt.addr; - dst = (be128 *)walk.dst.virt.addr; - - /* calculate first value of T */ - iv = (be128 *)walk.iv; - t_buf[0] = *iv; - - /* T <- I*Key2 */ - gf128mul_64k_bbe(&t_buf[0], ctx->table); - - i = 0; - goto first; - - for (;;) { - do { - for (i = 0; i < nblocks; i++) { - /* T <- I*Key2, using the optimization - * discussed in the specification */ - be128_xor(&t_buf[i], t, - &ctx->mulinc[get_index128(iv)]); - inc(iv); -first: - t = &t_buf[i]; - - /* PP <- T xor P */ - be128_xor(dst + i, t, src + i); - } - - /* CC <- E(Key2,PP) */ - req->crypt_fn(req->crypt_ctx, (u8 *)dst, - nblocks * bsize); - - /* C <- T xor CC */ - for (i = 0; i < nblocks; i++) - be128_xor(dst + i, dst + i, &t_buf[i]); - - src += nblocks; - dst += nblocks; - nbytes -= nblocks * bsize; - nblocks = min(nbytes / bsize, max_blks); - } while (nblocks > 0); - - err = blkcipher_walk_done(desc, &walk, nbytes); - nbytes = walk.nbytes; - if (!nbytes) - break; - - nblocks = min(nbytes / bsize, max_blks); - src = (be128 *)walk.src.virt.addr; - dst = (be128 *)walk.dst.virt.addr; - } - - return err; -} -EXPORT_SYMBOL_GPL(lrw_crypt); - static int init_tfm(struct crypto_skcipher *tfm) { struct skcipher_instance *inst = skcipher_alg_instance(tfm); @@ -518,7 +443,8 @@ static void exit_tfm(struct crypto_skcipher *tfm) { struct priv *ctx = crypto_skcipher_ctx(tfm); - lrw_free_table(&ctx->table); + if (ctx->table) + gf128mul_free_64k(ctx->table); crypto_free_skcipher(ctx->child); } diff --git a/crypto/mcryptd.c b/crypto/mcryptd.c index fe5129d6ff4e..f14152147ce8 100644 --- a/crypto/mcryptd.c +++ b/crypto/mcryptd.c @@ -367,7 +367,7 @@ static void mcryptd_hash_update(struct crypto_async_request *req_async, int err) goto out; rctx->out = req->result; - err = ahash_mcryptd_update(&rctx->areq); + err = crypto_ahash_update(&rctx->areq); if (err) { req->base.complete = rctx->complete; goto out; @@ -394,7 +394,7 @@ static void mcryptd_hash_final(struct crypto_async_request *req_async, int err) goto out; rctx->out = req->result; - err = ahash_mcryptd_final(&rctx->areq); + err = crypto_ahash_final(&rctx->areq); if (err) { req->base.complete = rctx->complete; goto out; @@ -420,7 +420,7 @@ static void mcryptd_hash_finup(struct crypto_async_request *req_async, int err) if (unlikely(err == -EINPROGRESS)) goto out; rctx->out = req->result; - err = ahash_mcryptd_finup(&rctx->areq); + err = crypto_ahash_finup(&rctx->areq); if (err) { req->base.complete = rctx->complete; @@ -455,7 +455,7 @@ static void mcryptd_hash_digest(struct crypto_async_request *req_async, int err) rctx->complete, req_async); rctx->out = req->result; - err = ahash_mcryptd_digest(desc); + err = crypto_ahash_init(desc) ?: crypto_ahash_finup(desc); out: local_bh_disable(); @@ -612,32 +612,6 @@ struct mcryptd_ahash *mcryptd_alloc_ahash(const char *alg_name, } EXPORT_SYMBOL_GPL(mcryptd_alloc_ahash); -int ahash_mcryptd_digest(struct ahash_request *desc) -{ - return crypto_ahash_init(desc) ?: ahash_mcryptd_finup(desc); -} - -int ahash_mcryptd_update(struct ahash_request *desc) -{ - /* alignment is to be done by multi-buffer crypto algorithm if needed */ - - return crypto_ahash_update(desc); -} - -int ahash_mcryptd_finup(struct ahash_request *desc) -{ - /* alignment is to be done by multi-buffer crypto algorithm if needed */ - - return crypto_ahash_finup(desc); -} - -int ahash_mcryptd_final(struct ahash_request *desc) -{ - /* alignment is to be done by multi-buffer crypto algorithm if needed */ - - return crypto_ahash_final(desc); -} - struct crypto_ahash *mcryptd_ahash_child(struct mcryptd_ahash *tfm) { struct mcryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base); diff --git a/crypto/md4.c b/crypto/md4.c index 3515af425cc9..810fefb0a007 100644 --- a/crypto/md4.c +++ b/crypto/md4.c @@ -64,23 +64,6 @@ static inline u32 H(u32 x, u32 y, u32 z) #define ROUND2(a,b,c,d,k,s) (a = lshift(a + G(b,c,d) + k + (u32)0x5A827999,s)) #define ROUND3(a,b,c,d,k,s) (a = lshift(a + H(b,c,d) + k + (u32)0x6ED9EBA1,s)) -/* XXX: this stuff can be optimized */ -static inline void le32_to_cpu_array(u32 *buf, unsigned int words) -{ - while (words--) { - __le32_to_cpus(buf); - buf++; - } -} - -static inline void cpu_to_le32_array(u32 *buf, unsigned int words) -{ - while (words--) { - __cpu_to_le32s(buf); - buf++; - } -} - static void md4_transform(u32 *hash, u32 const *in) { u32 a, b, c, d; diff --git a/crypto/md5.c b/crypto/md5.c index f7ae1a48225b..f776ef43d621 100644 --- a/crypto/md5.c +++ b/crypto/md5.c @@ -32,23 +32,6 @@ const u8 md5_zero_message_hash[MD5_DIGEST_SIZE] = { }; EXPORT_SYMBOL_GPL(md5_zero_message_hash); -/* XXX: this stuff can be optimized */ -static inline void le32_to_cpu_array(u32 *buf, unsigned int words) -{ - while (words--) { - __le32_to_cpus(buf); - buf++; - } -} - -static inline void cpu_to_le32_array(u32 *buf, unsigned int words) -{ - while (words--) { - __cpu_to_le32s(buf); - buf++; - } -} - #define F1(x, y, z) (z ^ (x & (y ^ z))) #define F2(x, y, z) F1(z, x, y) #define F3(x, y, z) (x ^ y ^ z) diff --git a/crypto/rsa-pkcs1pad.c b/crypto/rsa-pkcs1pad.c index 2908f93c3e55..9893dbfc1af4 100644 --- a/crypto/rsa-pkcs1pad.c +++ b/crypto/rsa-pkcs1pad.c @@ -192,7 +192,7 @@ static int pkcs1pad_encrypt_sign_complete(struct akcipher_request *req, int err) if (likely(!pad_len)) goto out; - out_buf = kzalloc(ctx->key_size, GFP_ATOMIC); + out_buf = kzalloc(ctx->key_size, GFP_KERNEL); err = -ENOMEM; if (!out_buf) goto out; diff --git a/crypto/simd.c b/crypto/simd.c index 208226d7f908..ea7240be3001 100644 --- a/crypto/simd.c +++ b/crypto/simd.c @@ -221,4 +221,54 @@ void simd_skcipher_free(struct simd_skcipher_alg *salg) } EXPORT_SYMBOL_GPL(simd_skcipher_free); +int simd_register_skciphers_compat(struct skcipher_alg *algs, int count, + struct simd_skcipher_alg **simd_algs) +{ + int err; + int i; + const char *algname; + const char *drvname; + const char *basename; + struct simd_skcipher_alg *simd; + + err = crypto_register_skciphers(algs, count); + if (err) + return err; + + for (i = 0; i < count; i++) { + WARN_ON(strncmp(algs[i].base.cra_name, "__", 2)); + WARN_ON(strncmp(algs[i].base.cra_driver_name, "__", 2)); + algname = algs[i].base.cra_name + 2; + drvname = algs[i].base.cra_driver_name + 2; + basename = algs[i].base.cra_driver_name; + simd = simd_skcipher_create_compat(algname, drvname, basename); + err = PTR_ERR(simd); + if (IS_ERR(simd)) + goto err_unregister; + simd_algs[i] = simd; + } + return 0; + +err_unregister: + simd_unregister_skciphers(algs, count, simd_algs); + return err; +} +EXPORT_SYMBOL_GPL(simd_register_skciphers_compat); + +void simd_unregister_skciphers(struct skcipher_alg *algs, int count, + struct simd_skcipher_alg **simd_algs) +{ + int i; + + crypto_unregister_skciphers(algs, count); + + for (i = 0; i < count; i++) { + if (simd_algs[i]) { + simd_skcipher_free(simd_algs[i]); + simd_algs[i] = NULL; + } + } +} +EXPORT_SYMBOL_GPL(simd_unregister_skciphers); + MODULE_LICENSE("GPL"); diff --git a/crypto/sm4_generic.c b/crypto/sm4_generic.c new file mode 100644 index 000000000000..f537a2766c55 --- /dev/null +++ b/crypto/sm4_generic.c @@ -0,0 +1,244 @@ +// SPDX-License-Identifier: GPL-2.0 + +/* + * SM4 Cipher Algorithm. + * + * Copyright (C) 2018 ARM Limited or its affiliates. + * All rights reserved. + */ + +#include <crypto/sm4.h> +#include <linux/module.h> +#include <linux/init.h> +#include <linux/types.h> +#include <linux/errno.h> +#include <linux/crypto.h> +#include <asm/byteorder.h> +#include <asm/unaligned.h> + +static const u32 fk[4] = { + 0xa3b1bac6, 0x56aa3350, 0x677d9197, 0xb27022dc +}; + +static const u8 sbox[256] = { + 0xd6, 0x90, 0xe9, 0xfe, 0xcc, 0xe1, 0x3d, 0xb7, + 0x16, 0xb6, 0x14, 0xc2, 0x28, 0xfb, 0x2c, 0x05, + 0x2b, 0x67, 0x9a, 0x76, 0x2a, 0xbe, 0x04, 0xc3, + 0xaa, 0x44, 0x13, 0x26, 0x49, 0x86, 0x06, 0x99, + 0x9c, 0x42, 0x50, 0xf4, 0x91, 0xef, 0x98, 0x7a, + 0x33, 0x54, 0x0b, 0x43, 0xed, 0xcf, 0xac, 0x62, + 0xe4, 0xb3, 0x1c, 0xa9, 0xc9, 0x08, 0xe8, 0x95, + 0x80, 0xdf, 0x94, 0xfa, 0x75, 0x8f, 0x3f, 0xa6, + 0x47, 0x07, 0xa7, 0xfc, 0xf3, 0x73, 0x17, 0xba, + 0x83, 0x59, 0x3c, 0x19, 0xe6, 0x85, 0x4f, 0xa8, + 0x68, 0x6b, 0x81, 0xb2, 0x71, 0x64, 0xda, 0x8b, + 0xf8, 0xeb, 0x0f, 0x4b, 0x70, 0x56, 0x9d, 0x35, + 0x1e, 0x24, 0x0e, 0x5e, 0x63, 0x58, 0xd1, 0xa2, + 0x25, 0x22, 0x7c, 0x3b, 0x01, 0x21, 0x78, 0x87, + 0xd4, 0x00, 0x46, 0x57, 0x9f, 0xd3, 0x27, 0x52, + 0x4c, 0x36, 0x02, 0xe7, 0xa0, 0xc4, 0xc8, 0x9e, + 0xea, 0xbf, 0x8a, 0xd2, 0x40, 0xc7, 0x38, 0xb5, + 0xa3, 0xf7, 0xf2, 0xce, 0xf9, 0x61, 0x15, 0xa1, + 0xe0, 0xae, 0x5d, 0xa4, 0x9b, 0x34, 0x1a, 0x55, + 0xad, 0x93, 0x32, 0x30, 0xf5, 0x8c, 0xb1, 0xe3, + 0x1d, 0xf6, 0xe2, 0x2e, 0x82, 0x66, 0xca, 0x60, + 0xc0, 0x29, 0x23, 0xab, 0x0d, 0x53, 0x4e, 0x6f, + 0xd5, 0xdb, 0x37, 0x45, 0xde, 0xfd, 0x8e, 0x2f, + 0x03, 0xff, 0x6a, 0x72, 0x6d, 0x6c, 0x5b, 0x51, + 0x8d, 0x1b, 0xaf, 0x92, 0xbb, 0xdd, 0xbc, 0x7f, + 0x11, 0xd9, 0x5c, 0x41, 0x1f, 0x10, 0x5a, 0xd8, + 0x0a, 0xc1, 0x31, 0x88, 0xa5, 0xcd, 0x7b, 0xbd, + 0x2d, 0x74, 0xd0, 0x12, 0xb8, 0xe5, 0xb4, 0xb0, + 0x89, 0x69, 0x97, 0x4a, 0x0c, 0x96, 0x77, 0x7e, + 0x65, 0xb9, 0xf1, 0x09, 0xc5, 0x6e, 0xc6, 0x84, + 0x18, 0xf0, 0x7d, 0xec, 0x3a, 0xdc, 0x4d, 0x20, + 0x79, 0xee, 0x5f, 0x3e, 0xd7, 0xcb, 0x39, 0x48 +}; + +static const u32 ck[] = { + 0x00070e15, 0x1c232a31, 0x383f464d, 0x545b6269, + 0x70777e85, 0x8c939aa1, 0xa8afb6bd, 0xc4cbd2d9, + 0xe0e7eef5, 0xfc030a11, 0x181f262d, 0x343b4249, + 0x50575e65, 0x6c737a81, 0x888f969d, 0xa4abb2b9, + 0xc0c7ced5, 0xdce3eaf1, 0xf8ff060d, 0x141b2229, + 0x30373e45, 0x4c535a61, 0x686f767d, 0x848b9299, + 0xa0a7aeb5, 0xbcc3cad1, 0xd8dfe6ed, 0xf4fb0209, + 0x10171e25, 0x2c333a41, 0x484f565d, 0x646b7279 +}; + +static u32 sm4_t_non_lin_sub(u32 x) +{ + int i; + u8 *b = (u8 *)&x; + + for (i = 0; i < 4; ++i) + b[i] = sbox[b[i]]; + + return x; +} + +static u32 sm4_key_lin_sub(u32 x) +{ + return x ^ rol32(x, 13) ^ rol32(x, 23); + +} + +static u32 sm4_enc_lin_sub(u32 x) +{ + return x ^ rol32(x, 2) ^ rol32(x, 10) ^ rol32(x, 18) ^ rol32(x, 24); +} + +static u32 sm4_key_sub(u32 x) +{ + return sm4_key_lin_sub(sm4_t_non_lin_sub(x)); +} + +static u32 sm4_enc_sub(u32 x) +{ + return sm4_enc_lin_sub(sm4_t_non_lin_sub(x)); +} + +static u32 sm4_round(const u32 *x, const u32 rk) +{ + return x[0] ^ sm4_enc_sub(x[1] ^ x[2] ^ x[3] ^ rk); +} + + +/** + * crypto_sm4_expand_key - Expands the SM4 key as described in GB/T 32907-2016 + * @ctx: The location where the computed key will be stored. + * @in_key: The supplied key. + * @key_len: The length of the supplied key. + * + * Returns 0 on success. The function fails only if an invalid key size (or + * pointer) is supplied. + */ +int crypto_sm4_expand_key(struct crypto_sm4_ctx *ctx, const u8 *in_key, + unsigned int key_len) +{ + u32 rk[4], t; + const u32 *key = (u32 *)in_key; + int i; + + if (key_len != SM4_KEY_SIZE) + return -EINVAL; + + for (i = 0; i < 4; ++i) + rk[i] = get_unaligned_be32(&key[i]) ^ fk[i]; + + for (i = 0; i < 32; ++i) { + t = rk[0] ^ sm4_key_sub(rk[1] ^ rk[2] ^ rk[3] ^ ck[i]); + ctx->rkey_enc[i] = t; + rk[0] = rk[1]; + rk[1] = rk[2]; + rk[2] = rk[3]; + rk[3] = t; + } + + for (i = 0; i < 32; ++i) + ctx->rkey_dec[i] = ctx->rkey_enc[31 - i]; + + return 0; +} +EXPORT_SYMBOL_GPL(crypto_sm4_expand_key); + +/** + * crypto_sm4_set_key - Set the AES key. + * @tfm: The %crypto_tfm that is used in the context. + * @in_key: The input key. + * @key_len: The size of the key. + * + * Returns 0 on success, on failure the %CRYPTO_TFM_RES_BAD_KEY_LEN flag in tfm + * is set. The function uses crypto_sm4_expand_key() to expand the key. + * &crypto_sm4_ctx _must_ be the private data embedded in @tfm which is + * retrieved with crypto_tfm_ctx(). + */ +int crypto_sm4_set_key(struct crypto_tfm *tfm, const u8 *in_key, + unsigned int key_len) +{ + struct crypto_sm4_ctx *ctx = crypto_tfm_ctx(tfm); + u32 *flags = &tfm->crt_flags; + int ret; + + ret = crypto_sm4_expand_key(ctx, in_key, key_len); + if (!ret) + return 0; + + *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; + return -EINVAL; +} +EXPORT_SYMBOL_GPL(crypto_sm4_set_key); + +static void sm4_do_crypt(const u32 *rk, u32 *out, const u32 *in) +{ + u32 x[4], i, t; + + for (i = 0; i < 4; ++i) + x[i] = get_unaligned_be32(&in[i]); + + for (i = 0; i < 32; ++i) { + t = sm4_round(x, rk[i]); + x[0] = x[1]; + x[1] = x[2]; + x[2] = x[3]; + x[3] = t; + } + + for (i = 0; i < 4; ++i) + put_unaligned_be32(x[3 - i], &out[i]); +} + +/* encrypt a block of text */ + +static void sm4_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) +{ + const struct crypto_sm4_ctx *ctx = crypto_tfm_ctx(tfm); + + sm4_do_crypt(ctx->rkey_enc, (u32 *)out, (u32 *)in); +} + +/* decrypt a block of text */ + +static void sm4_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) +{ + const struct crypto_sm4_ctx *ctx = crypto_tfm_ctx(tfm); + + sm4_do_crypt(ctx->rkey_dec, (u32 *)out, (u32 *)in); +} + +static struct crypto_alg sm4_alg = { + .cra_name = "sm4", + .cra_driver_name = "sm4-generic", + .cra_priority = 100, + .cra_flags = CRYPTO_ALG_TYPE_CIPHER, + .cra_blocksize = SM4_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct crypto_sm4_ctx), + .cra_module = THIS_MODULE, + .cra_u = { + .cipher = { + .cia_min_keysize = SM4_KEY_SIZE, + .cia_max_keysize = SM4_KEY_SIZE, + .cia_setkey = crypto_sm4_set_key, + .cia_encrypt = sm4_encrypt, + .cia_decrypt = sm4_decrypt + } + } +}; + +static int __init sm4_init(void) +{ + return crypto_register_alg(&sm4_alg); +} + +static void __exit sm4_fini(void) +{ + crypto_unregister_alg(&sm4_alg); +} + +module_init(sm4_init); +module_exit(sm4_fini); + +MODULE_DESCRIPTION("SM4 Cipher Algorithm"); +MODULE_LICENSE("GPL v2"); +MODULE_ALIAS_CRYPTO("sm4"); +MODULE_ALIAS_CRYPTO("sm4-generic"); diff --git a/crypto/speck.c b/crypto/speck.c new file mode 100644 index 000000000000..58aa9f7f91f7 --- /dev/null +++ b/crypto/speck.c @@ -0,0 +1,307 @@ +// SPDX-License-Identifier: GPL-2.0 +/* + * Speck: a lightweight block cipher + * + * Copyright (c) 2018 Google, Inc + * + * Speck has 10 variants, including 5 block sizes. For now we only implement + * the variants Speck128/128, Speck128/192, Speck128/256, Speck64/96, and + * Speck64/128. Speck${B}/${K} denotes the variant with a block size of B bits + * and a key size of K bits. The Speck128 variants are believed to be the most + * secure variants, and they use the same block size and key sizes as AES. The + * Speck64 variants are less secure, but on 32-bit processors are usually + * faster. The remaining variants (Speck32, Speck48, and Speck96) are even less + * secure and/or not as well suited for implementation on either 32-bit or + * 64-bit processors, so are omitted. + * + * Reference: "The Simon and Speck Families of Lightweight Block Ciphers" + * https://eprint.iacr.org/2013/404.pdf + * + * In a correspondence, the Speck designers have also clarified that the words + * should be interpreted in little-endian format, and the words should be + * ordered such that the first word of each block is 'y' rather than 'x', and + * the first key word (rather than the last) becomes the first round key. + */ + +#include <asm/unaligned.h> +#include <crypto/speck.h> +#include <linux/bitops.h> +#include <linux/crypto.h> +#include <linux/init.h> +#include <linux/module.h> + +/* Speck128 */ + +static __always_inline void speck128_round(u64 *x, u64 *y, u64 k) +{ + *x = ror64(*x, 8); + *x += *y; + *x ^= k; + *y = rol64(*y, 3); + *y ^= *x; +} + +static __always_inline void speck128_unround(u64 *x, u64 *y, u64 k) +{ + *y ^= *x; + *y = ror64(*y, 3); + *x ^= k; + *x -= *y; + *x = rol64(*x, 8); +} + +void crypto_speck128_encrypt(const struct speck128_tfm_ctx *ctx, + u8 *out, const u8 *in) +{ + u64 y = get_unaligned_le64(in); + u64 x = get_unaligned_le64(in + 8); + int i; + + for (i = 0; i < ctx->nrounds; i++) + speck128_round(&x, &y, ctx->round_keys[i]); + + put_unaligned_le64(y, out); + put_unaligned_le64(x, out + 8); +} +EXPORT_SYMBOL_GPL(crypto_speck128_encrypt); + +static void speck128_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) +{ + crypto_speck128_encrypt(crypto_tfm_ctx(tfm), out, in); +} + +void crypto_speck128_decrypt(const struct speck128_tfm_ctx *ctx, + u8 *out, const u8 *in) +{ + u64 y = get_unaligned_le64(in); + u64 x = get_unaligned_le64(in + 8); + int i; + + for (i = ctx->nrounds - 1; i >= 0; i--) + speck128_unround(&x, &y, ctx->round_keys[i]); + + put_unaligned_le64(y, out); + put_unaligned_le64(x, out + 8); +} +EXPORT_SYMBOL_GPL(crypto_speck128_decrypt); + +static void speck128_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) +{ + crypto_speck128_decrypt(crypto_tfm_ctx(tfm), out, in); +} + +int crypto_speck128_setkey(struct speck128_tfm_ctx *ctx, const u8 *key, + unsigned int keylen) +{ + u64 l[3]; + u64 k; + int i; + + switch (keylen) { + case SPECK128_128_KEY_SIZE: + k = get_unaligned_le64(key); + l[0] = get_unaligned_le64(key + 8); + ctx->nrounds = SPECK128_128_NROUNDS; + for (i = 0; i < ctx->nrounds; i++) { + ctx->round_keys[i] = k; + speck128_round(&l[0], &k, i); + } + break; + case SPECK128_192_KEY_SIZE: + k = get_unaligned_le64(key); + l[0] = get_unaligned_le64(key + 8); + l[1] = get_unaligned_le64(key + 16); + ctx->nrounds = SPECK128_192_NROUNDS; + for (i = 0; i < ctx->nrounds; i++) { + ctx->round_keys[i] = k; + speck128_round(&l[i % 2], &k, i); + } + break; + case SPECK128_256_KEY_SIZE: + k = get_unaligned_le64(key); + l[0] = get_unaligned_le64(key + 8); + l[1] = get_unaligned_le64(key + 16); + l[2] = get_unaligned_le64(key + 24); + ctx->nrounds = SPECK128_256_NROUNDS; + for (i = 0; i < ctx->nrounds; i++) { + ctx->round_keys[i] = k; + speck128_round(&l[i % 3], &k, i); + } + break; + default: + return -EINVAL; + } + + return 0; +} +EXPORT_SYMBOL_GPL(crypto_speck128_setkey); + +static int speck128_setkey(struct crypto_tfm *tfm, const u8 *key, + unsigned int keylen) +{ + return crypto_speck128_setkey(crypto_tfm_ctx(tfm), key, keylen); +} + +/* Speck64 */ + +static __always_inline void speck64_round(u32 *x, u32 *y, u32 k) +{ + *x = ror32(*x, 8); + *x += *y; + *x ^= k; + *y = rol32(*y, 3); + *y ^= *x; +} + +static __always_inline void speck64_unround(u32 *x, u32 *y, u32 k) +{ + *y ^= *x; + *y = ror32(*y, 3); + *x ^= k; + *x -= *y; + *x = rol32(*x, 8); +} + +void crypto_speck64_encrypt(const struct speck64_tfm_ctx *ctx, + u8 *out, const u8 *in) +{ + u32 y = get_unaligned_le32(in); + u32 x = get_unaligned_le32(in + 4); + int i; + + for (i = 0; i < ctx->nrounds; i++) + speck64_round(&x, &y, ctx->round_keys[i]); + + put_unaligned_le32(y, out); + put_unaligned_le32(x, out + 4); +} +EXPORT_SYMBOL_GPL(crypto_speck64_encrypt); + +static void speck64_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) +{ + crypto_speck64_encrypt(crypto_tfm_ctx(tfm), out, in); +} + +void crypto_speck64_decrypt(const struct speck64_tfm_ctx *ctx, + u8 *out, const u8 *in) +{ + u32 y = get_unaligned_le32(in); + u32 x = get_unaligned_le32(in + 4); + int i; + + for (i = ctx->nrounds - 1; i >= 0; i--) + speck64_unround(&x, &y, ctx->round_keys[i]); + + put_unaligned_le32(y, out); + put_unaligned_le32(x, out + 4); +} +EXPORT_SYMBOL_GPL(crypto_speck64_decrypt); + +static void speck64_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) +{ + crypto_speck64_decrypt(crypto_tfm_ctx(tfm), out, in); +} + +int crypto_speck64_setkey(struct speck64_tfm_ctx *ctx, const u8 *key, + unsigned int keylen) +{ + u32 l[3]; + u32 k; + int i; + + switch (keylen) { + case SPECK64_96_KEY_SIZE: + k = get_unaligned_le32(key); + l[0] = get_unaligned_le32(key + 4); + l[1] = get_unaligned_le32(key + 8); + ctx->nrounds = SPECK64_96_NROUNDS; + for (i = 0; i < ctx->nrounds; i++) { + ctx->round_keys[i] = k; + speck64_round(&l[i % 2], &k, i); + } + break; + case SPECK64_128_KEY_SIZE: + k = get_unaligned_le32(key); + l[0] = get_unaligned_le32(key + 4); + l[1] = get_unaligned_le32(key + 8); + l[2] = get_unaligned_le32(key + 12); + ctx->nrounds = SPECK64_128_NROUNDS; + for (i = 0; i < ctx->nrounds; i++) { + ctx->round_keys[i] = k; + speck64_round(&l[i % 3], &k, i); + } + break; + default: + return -EINVAL; + } + + return 0; +} +EXPORT_SYMBOL_GPL(crypto_speck64_setkey); + +static int speck64_setkey(struct crypto_tfm *tfm, const u8 *key, + unsigned int keylen) +{ + return crypto_speck64_setkey(crypto_tfm_ctx(tfm), key, keylen); +} + +/* Algorithm definitions */ + +static struct crypto_alg speck_algs[] = { + { + .cra_name = "speck128", + .cra_driver_name = "speck128-generic", + .cra_priority = 100, + .cra_flags = CRYPTO_ALG_TYPE_CIPHER, + .cra_blocksize = SPECK128_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct speck128_tfm_ctx), + .cra_module = THIS_MODULE, + .cra_u = { + .cipher = { + .cia_min_keysize = SPECK128_128_KEY_SIZE, + .cia_max_keysize = SPECK128_256_KEY_SIZE, + .cia_setkey = speck128_setkey, + .cia_encrypt = speck128_encrypt, + .cia_decrypt = speck128_decrypt + } + } + }, { + .cra_name = "speck64", + .cra_driver_name = "speck64-generic", + .cra_priority = 100, + .cra_flags = CRYPTO_ALG_TYPE_CIPHER, + .cra_blocksize = SPECK64_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct speck64_tfm_ctx), + .cra_module = THIS_MODULE, + .cra_u = { + .cipher = { + .cia_min_keysize = SPECK64_96_KEY_SIZE, + .cia_max_keysize = SPECK64_128_KEY_SIZE, + .cia_setkey = speck64_setkey, + .cia_encrypt = speck64_encrypt, + .cia_decrypt = speck64_decrypt + } + } + } +}; + +static int __init speck_module_init(void) +{ + return crypto_register_algs(speck_algs, ARRAY_SIZE(speck_algs)); +} + +static void __exit speck_module_exit(void) +{ + crypto_unregister_algs(speck_algs, ARRAY_SIZE(speck_algs)); +} + +module_init(speck_module_init); +module_exit(speck_module_exit); + +MODULE_DESCRIPTION("Speck block cipher (generic)"); +MODULE_LICENSE("GPL"); +MODULE_AUTHOR("Eric Biggers <ebiggers@google.com>"); +MODULE_ALIAS_CRYPTO("speck128"); +MODULE_ALIAS_CRYPTO("speck128-generic"); +MODULE_ALIAS_CRYPTO("speck64"); +MODULE_ALIAS_CRYPTO("speck64-generic"); diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c index 14213a096fd2..51fe7c8744ae 100644 --- a/crypto/tcrypt.c +++ b/crypto/tcrypt.c @@ -1983,6 +1983,9 @@ static int do_test(const char *alg, u32 type, u32 mask, int m) case 190: ret += tcrypt_test("authenc(hmac(sha512),cbc(des3_ede))"); break; + case 191: + ret += tcrypt_test("ecb(sm4)"); + break; case 200: test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0, speed_template_16_24_32); diff --git a/crypto/testmgr.c b/crypto/testmgr.c index d5e23a142a04..af4a01c5037b 100644 --- a/crypto/testmgr.c +++ b/crypto/testmgr.c @@ -3001,6 +3001,33 @@ static const struct alg_test_desc alg_test_descs[] = { } } }, { + .alg = "ecb(sm4)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = __VECS(sm4_enc_tv_template), + .dec = __VECS(sm4_dec_tv_template) + } + } + }, { + .alg = "ecb(speck128)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = __VECS(speck128_enc_tv_template), + .dec = __VECS(speck128_dec_tv_template) + } + } + }, { + .alg = "ecb(speck64)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = __VECS(speck64_enc_tv_template), + .dec = __VECS(speck64_dec_tv_template) + } + } + }, { .alg = "ecb(tea)", .test = alg_test_skcipher, .suite = { @@ -3558,6 +3585,24 @@ static const struct alg_test_desc alg_test_descs[] = { } } }, { + .alg = "xts(speck128)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = __VECS(speck128_xts_enc_tv_template), + .dec = __VECS(speck128_xts_dec_tv_template) + } + } + }, { + .alg = "xts(speck64)", + .test = alg_test_skcipher, + .suite = { + .cipher = { + .enc = __VECS(speck64_xts_enc_tv_template), + .dec = __VECS(speck64_xts_dec_tv_template) + } + } + }, { .alg = "xts(twofish)", .test = alg_test_skcipher, .suite = { diff --git a/crypto/testmgr.h b/crypto/testmgr.h index 6044f6906bd6..004c0a0f8004 100644 --- a/crypto/testmgr.h +++ b/crypto/testmgr.h @@ -548,7 +548,7 @@ static const struct akcipher_testvec rsa_tv_template[] = { static const struct akcipher_testvec pkcs1pad_rsa_tv_template[] = { { .key = - "\x30\x82\x03\x1f\x02\x01\x10\x02\x82\x01\x01\x00\xd7\x1e\x77\x82" + "\x30\x82\x03\x1f\x02\x01\x00\x02\x82\x01\x01\x00\xd7\x1e\x77\x82" "\x8c\x92\x31\xe7\x69\x02\xa2\xd5\x5c\x78\xde\xa2\x0c\x8f\xfe\x28" "\x59\x31\xdf\x40\x9c\x60\x61\x06\xb9\x2f\x62\x40\x80\x76\xcb\x67" "\x4a\xb5\x59\x56\x69\x17\x07\xfa\xf9\x4c\xbd\x6c\x37\x7a\x46\x7d" @@ -597,8 +597,8 @@ static const struct akcipher_testvec pkcs1pad_rsa_tv_template[] = { "\xfe\xf8\x27\x1b\xd6\x55\x60\x5e\x48\xb7\x6d\x9a\xa8\x37\xf9\x7a" "\xde\x1b\xcd\x5d\x1a\x30\xd4\xe9\x9e\x5b\x3c\x15\xf8\x9c\x1f\xda" "\xd1\x86\x48\x55\xce\x83\xee\x8e\x51\xc7\xde\x32\x12\x47\x7d\x46" - "\xb8\x35\xdf\x41\x02\x01\x30\x02\x01\x30\x02\x01\x30\x02\x01\x30" - "\x02\x01\x30", + "\xb8\x35\xdf\x41\x02\x01\x00\x02\x01\x00\x02\x01\x00\x02\x01\x00" + "\x02\x01\x00", .key_len = 804, /* * m is SHA256 hash of following message: @@ -2044,6 +2044,265 @@ static const struct hash_testvec crct10dif_tv_template[] = { .digest = (u8 *)(u16 []){ 0x44c6 }, .np = 4, .tap = { 1, 255, 57, 6 }, + }, { + .plaintext = "\x6e\x05\x79\x10\xa7\x1b\xb2\x49" + "\xe0\x54\xeb\x82\x19\x8d\x24\xbb" + "\x2f\xc6\x5d\xf4\x68\xff\x96\x0a" + "\xa1\x38\xcf\x43\xda\x71\x08\x7c" + "\x13\xaa\x1e\xb5\x4c\xe3\x57\xee" + "\x85\x1c\x90\x27\xbe\x32\xc9\x60" + "\xf7\x6b\x02\x99\x0d\xa4\x3b\xd2" + "\x46\xdd\x74\x0b\x7f\x16\xad\x21" + "\xb8\x4f\xe6\x5a\xf1\x88\x1f\x93" + "\x2a\xc1\x35\xcc\x63\xfa\x6e\x05" + "\x9c\x10\xa7\x3e\xd5\x49\xe0\x77" + "\x0e\x82\x19\xb0\x24\xbb\x52\xe9" + "\x5d\xf4\x8b\x22\x96\x2d\xc4\x38" + "\xcf\x66\xfd\x71\x08\x9f\x13\xaa" + "\x41\xd8\x4c\xe3\x7a\x11\x85\x1c" + "\xb3\x27\xbe\x55\xec\x60\xf7\x8e" + "\x02\x99\x30\xc7\x3b\xd2\x69\x00" + "\x74\x0b\xa2\x16\xad\x44\xdb\x4f" + "\xe6\x7d\x14\x88\x1f\xb6\x2a\xc1" + "\x58\xef\x63\xfa\x91\x05\x9c\x33" + "\xca\x3e\xd5\x6c\x03\x77\x0e\xa5" + "\x19\xb0\x47\xde\x52\xe9\x80\x17" + "\x8b\x22\xb9\x2d\xc4\x5b\xf2\x66" + "\xfd\x94\x08\x9f\x36\xcd\x41\xd8" + "\x6f\x06\x7a\x11\xa8\x1c\xb3\x4a" + "\xe1\x55\xec\x83\x1a\x8e\x25\xbc" + "\x30\xc7\x5e\xf5\x69\x00\x97\x0b" + "\xa2\x39\xd0\x44\xdb\x72\x09\x7d" + "\x14\xab\x1f\xb6\x4d\xe4\x58\xef" + "\x86\x1d\x91\x28\xbf\x33\xca\x61" + "\xf8\x6c\x03\x9a\x0e\xa5\x3c\xd3" + "\x47\xde\x75\x0c\x80\x17\xae\x22" + "\xb9\x50\xe7\x5b\xf2\x89\x20\x94" + "\x2b\xc2\x36\xcd\x64\xfb\x6f\x06" + "\x9d\x11\xa8\x3f\xd6\x4a\xe1\x78" + "\x0f\x83\x1a\xb1\x25\xbc\x53\xea" + "\x5e\xf5\x8c\x00\x97\x2e\xc5\x39" + "\xd0\x67\xfe\x72\x09\xa0\x14\xab" + "\x42\xd9\x4d\xe4\x7b\x12\x86\x1d" + "\xb4\x28\xbf\x56\xed\x61\xf8\x8f" + "\x03\x9a\x31\xc8\x3c\xd3\x6a\x01" + "\x75\x0c\xa3\x17\xae\x45\xdc\x50" + "\xe7\x7e\x15\x89\x20\xb7\x2b\xc2" + "\x59\xf0\x64\xfb\x92\x06\x9d\x34" + "\xcb\x3f\xd6\x6d\x04\x78\x0f\xa6" + "\x1a\xb1\x48\xdf\x53\xea\x81\x18" + "\x8c\x23\xba\x2e\xc5\x5c\xf3\x67" + "\xfe\x95\x09\xa0\x37\xce\x42\xd9" + "\x70\x07\x7b\x12\xa9\x1d\xb4\x4b" + "\xe2\x56\xed\x84\x1b\x8f\x26\xbd" + "\x31\xc8\x5f\xf6\x6a\x01\x98\x0c" + "\xa3\x3a\xd1\x45\xdc\x73\x0a\x7e" + "\x15\xac\x20\xb7\x4e\xe5\x59\xf0" + "\x87\x1e\x92\x29\xc0\x34\xcb\x62" + "\xf9\x6d\x04\x9b\x0f\xa6\x3d\xd4" + "\x48\xdf\x76\x0d\x81\x18\xaf\x23" + "\xba\x51\xe8\x5c\xf3\x8a\x21\x95" + "\x2c\xc3\x37\xce\x65\xfc\x70\x07" + "\x9e\x12\xa9\x40\xd7\x4b\xe2\x79" + "\x10\x84\x1b\xb2\x26\xbd\x54\xeb" + "\x5f\xf6\x8d\x01\x98\x2f\xc6\x3a" + "\xd1\x68\xff\x73\x0a\xa1\x15\xac" + "\x43\xda\x4e\xe5\x7c\x13\x87\x1e" + "\xb5\x29\xc0\x57\xee\x62\xf9\x90" + "\x04\x9b\x32\xc9\x3d\xd4\x6b\x02" + "\x76\x0d\xa4\x18\xaf\x46\xdd\x51" + "\xe8\x7f\x16\x8a\x21\xb8\x2c\xc3" + "\x5a\xf1\x65\xfc\x93\x07\x9e\x35" + "\xcc\x40\xd7\x6e\x05\x79\x10\xa7" + "\x1b\xb2\x49\xe0\x54\xeb\x82\x19" + "\x8d\x24\xbb\x2f\xc6\x5d\xf4\x68" + "\xff\x96\x0a\xa1\x38\xcf\x43\xda" + "\x71\x08\x7c\x13\xaa\x1e\xb5\x4c" + "\xe3\x57\xee\x85\x1c\x90\x27\xbe" + "\x32\xc9\x60\xf7\x6b\x02\x99\x0d" + "\xa4\x3b\xd2\x46\xdd\x74\x0b\x7f" + "\x16\xad\x21\xb8\x4f\xe6\x5a\xf1" + "\x88\x1f\x93\x2a\xc1\x35\xcc\x63" + "\xfa\x6e\x05\x9c\x10\xa7\x3e\xd5" + "\x49\xe0\x77\x0e\x82\x19\xb0\x24" + "\xbb\x52\xe9\x5d\xf4\x8b\x22\x96" + "\x2d\xc4\x38\xcf\x66\xfd\x71\x08" + "\x9f\x13\xaa\x41\xd8\x4c\xe3\x7a" + "\x11\x85\x1c\xb3\x27\xbe\x55\xec" + "\x60\xf7\x8e\x02\x99\x30\xc7\x3b" + "\xd2\x69\x00\x74\x0b\xa2\x16\xad" + "\x44\xdb\x4f\xe6\x7d\x14\x88\x1f" + "\xb6\x2a\xc1\x58\xef\x63\xfa\x91" + "\x05\x9c\x33\xca\x3e\xd5\x6c\x03" + "\x77\x0e\xa5\x19\xb0\x47\xde\x52" + "\xe9\x80\x17\x8b\x22\xb9\x2d\xc4" + "\x5b\xf2\x66\xfd\x94\x08\x9f\x36" + "\xcd\x41\xd8\x6f\x06\x7a\x11\xa8" + "\x1c\xb3\x4a\xe1\x55\xec\x83\x1a" + "\x8e\x25\xbc\x30\xc7\x5e\xf5\x69" + "\x00\x97\x0b\xa2\x39\xd0\x44\xdb" + "\x72\x09\x7d\x14\xab\x1f\xb6\x4d" + "\xe4\x58\xef\x86\x1d\x91\x28\xbf" + "\x33\xca\x61\xf8\x6c\x03\x9a\x0e" + "\xa5\x3c\xd3\x47\xde\x75\x0c\x80" + "\x17\xae\x22\xb9\x50\xe7\x5b\xf2" + "\x89\x20\x94\x2b\xc2\x36\xcd\x64" + "\xfb\x6f\x06\x9d\x11\xa8\x3f\xd6" + "\x4a\xe1\x78\x0f\x83\x1a\xb1\x25" + "\xbc\x53\xea\x5e\xf5\x8c\x00\x97" + "\x2e\xc5\x39\xd0\x67\xfe\x72\x09" + "\xa0\x14\xab\x42\xd9\x4d\xe4\x7b" + "\x12\x86\x1d\xb4\x28\xbf\x56\xed" + "\x61\xf8\x8f\x03\x9a\x31\xc8\x3c" + "\xd3\x6a\x01\x75\x0c\xa3\x17\xae" + "\x45\xdc\x50\xe7\x7e\x15\x89\x20" + "\xb7\x2b\xc2\x59\xf0\x64\xfb\x92" + "\x06\x9d\x34\xcb\x3f\xd6\x6d\x04" + "\x78\x0f\xa6\x1a\xb1\x48\xdf\x53" + "\xea\x81\x18\x8c\x23\xba\x2e\xc5" + "\x5c\xf3\x67\xfe\x95\x09\xa0\x37" + "\xce\x42\xd9\x70\x07\x7b\x12\xa9" + "\x1d\xb4\x4b\xe2\x56\xed\x84\x1b" + "\x8f\x26\xbd\x31\xc8\x5f\xf6\x6a" + "\x01\x98\x0c\xa3\x3a\xd1\x45\xdc" + "\x73\x0a\x7e\x15\xac\x20\xb7\x4e" + "\xe5\x59\xf0\x87\x1e\x92\x29\xc0" + "\x34\xcb\x62\xf9\x6d\x04\x9b\x0f" + "\xa6\x3d\xd4\x48\xdf\x76\x0d\x81" + "\x18\xaf\x23\xba\x51\xe8\x5c\xf3" + "\x8a\x21\x95\x2c\xc3\x37\xce\x65" + "\xfc\x70\x07\x9e\x12\xa9\x40\xd7" + "\x4b\xe2\x79\x10\x84\x1b\xb2\x26" + "\xbd\x54\xeb\x5f\xf6\x8d\x01\x98" + "\x2f\xc6\x3a\xd1\x68\xff\x73\x0a" + "\xa1\x15\xac\x43\xda\x4e\xe5\x7c" + "\x13\x87\x1e\xb5\x29\xc0\x57\xee" + "\x62\xf9\x90\x04\x9b\x32\xc9\x3d" + "\xd4\x6b\x02\x76\x0d\xa4\x18\xaf" + "\x46\xdd\x51\xe8\x7f\x16\x8a\x21" + "\xb8\x2c\xc3\x5a\xf1\x65\xfc\x93" + "\x07\x9e\x35\xcc\x40\xd7\x6e\x05" + "\x79\x10\xa7\x1b\xb2\x49\xe0\x54" + "\xeb\x82\x19\x8d\x24\xbb\x2f\xc6" + "\x5d\xf4\x68\xff\x96\x0a\xa1\x38" + "\xcf\x43\xda\x71\x08\x7c\x13\xaa" + "\x1e\xb5\x4c\xe3\x57\xee\x85\x1c" + "\x90\x27\xbe\x32\xc9\x60\xf7\x6b" + "\x02\x99\x0d\xa4\x3b\xd2\x46\xdd" + "\x74\x0b\x7f\x16\xad\x21\xb8\x4f" + "\xe6\x5a\xf1\x88\x1f\x93\x2a\xc1" + "\x35\xcc\x63\xfa\x6e\x05\x9c\x10" + "\xa7\x3e\xd5\x49\xe0\x77\x0e\x82" + "\x19\xb0\x24\xbb\x52\xe9\x5d\xf4" + "\x8b\x22\x96\x2d\xc4\x38\xcf\x66" + "\xfd\x71\x08\x9f\x13\xaa\x41\xd8" + "\x4c\xe3\x7a\x11\x85\x1c\xb3\x27" + "\xbe\x55\xec\x60\xf7\x8e\x02\x99" + "\x30\xc7\x3b\xd2\x69\x00\x74\x0b" + "\xa2\x16\xad\x44\xdb\x4f\xe6\x7d" + "\x14\x88\x1f\xb6\x2a\xc1\x58\xef" + "\x63\xfa\x91\x05\x9c\x33\xca\x3e" + "\xd5\x6c\x03\x77\x0e\xa5\x19\xb0" + "\x47\xde\x52\xe9\x80\x17\x8b\x22" + "\xb9\x2d\xc4\x5b\xf2\x66\xfd\x94" + "\x08\x9f\x36\xcd\x41\xd8\x6f\x06" + "\x7a\x11\xa8\x1c\xb3\x4a\xe1\x55" + "\xec\x83\x1a\x8e\x25\xbc\x30\xc7" + "\x5e\xf5\x69\x00\x97\x0b\xa2\x39" + "\xd0\x44\xdb\x72\x09\x7d\x14\xab" + "\x1f\xb6\x4d\xe4\x58\xef\x86\x1d" + "\x91\x28\xbf\x33\xca\x61\xf8\x6c" + "\x03\x9a\x0e\xa5\x3c\xd3\x47\xde" + "\x75\x0c\x80\x17\xae\x22\xb9\x50" + "\xe7\x5b\xf2\x89\x20\x94\x2b\xc2" + "\x36\xcd\x64\xfb\x6f\x06\x9d\x11" + "\xa8\x3f\xd6\x4a\xe1\x78\x0f\x83" + "\x1a\xb1\x25\xbc\x53\xea\x5e\xf5" + "\x8c\x00\x97\x2e\xc5\x39\xd0\x67" + "\xfe\x72\x09\xa0\x14\xab\x42\xd9" + "\x4d\xe4\x7b\x12\x86\x1d\xb4\x28" + "\xbf\x56\xed\x61\xf8\x8f\x03\x9a" + "\x31\xc8\x3c\xd3\x6a\x01\x75\x0c" + "\xa3\x17\xae\x45\xdc\x50\xe7\x7e" + "\x15\x89\x20\xb7\x2b\xc2\x59\xf0" + "\x64\xfb\x92\x06\x9d\x34\xcb\x3f" + "\xd6\x6d\x04\x78\x0f\xa6\x1a\xb1" + "\x48\xdf\x53\xea\x81\x18\x8c\x23" + "\xba\x2e\xc5\x5c\xf3\x67\xfe\x95" + "\x09\xa0\x37\xce\x42\xd9\x70\x07" + "\x7b\x12\xa9\x1d\xb4\x4b\xe2\x56" + "\xed\x84\x1b\x8f\x26\xbd\x31\xc8" + "\x5f\xf6\x6a\x01\x98\x0c\xa3\x3a" + "\xd1\x45\xdc\x73\x0a\x7e\x15\xac" + "\x20\xb7\x4e\xe5\x59\xf0\x87\x1e" + "\x92\x29\xc0\x34\xcb\x62\xf9\x6d" + "\x04\x9b\x0f\xa6\x3d\xd4\x48\xdf" + "\x76\x0d\x81\x18\xaf\x23\xba\x51" + "\xe8\x5c\xf3\x8a\x21\x95\x2c\xc3" + "\x37\xce\x65\xfc\x70\x07\x9e\x12" + "\xa9\x40\xd7\x4b\xe2\x79\x10\x84" + "\x1b\xb2\x26\xbd\x54\xeb\x5f\xf6" + "\x8d\x01\x98\x2f\xc6\x3a\xd1\x68" + "\xff\x73\x0a\xa1\x15\xac\x43\xda" + "\x4e\xe5\x7c\x13\x87\x1e\xb5\x29" + "\xc0\x57\xee\x62\xf9\x90\x04\x9b" + "\x32\xc9\x3d\xd4\x6b\x02\x76\x0d" + "\xa4\x18\xaf\x46\xdd\x51\xe8\x7f" + "\x16\x8a\x21\xb8\x2c\xc3\x5a\xf1" + "\x65\xfc\x93\x07\x9e\x35\xcc\x40" + "\xd7\x6e\x05\x79\x10\xa7\x1b\xb2" + "\x49\xe0\x54\xeb\x82\x19\x8d\x24" + "\xbb\x2f\xc6\x5d\xf4\x68\xff\x96" + "\x0a\xa1\x38\xcf\x43\xda\x71\x08" + "\x7c\x13\xaa\x1e\xb5\x4c\xe3\x57" + "\xee\x85\x1c\x90\x27\xbe\x32\xc9" + "\x60\xf7\x6b\x02\x99\x0d\xa4\x3b" + "\xd2\x46\xdd\x74\x0b\x7f\x16\xad" + "\x21\xb8\x4f\xe6\x5a\xf1\x88\x1f" + "\x93\x2a\xc1\x35\xcc\x63\xfa\x6e" + "\x05\x9c\x10\xa7\x3e\xd5\x49\xe0" + "\x77\x0e\x82\x19\xb0\x24\xbb\x52" + "\xe9\x5d\xf4\x8b\x22\x96\x2d\xc4" + "\x38\xcf\x66\xfd\x71\x08\x9f\x13" + "\xaa\x41\xd8\x4c\xe3\x7a\x11\x85" + "\x1c\xb3\x27\xbe\x55\xec\x60\xf7" + "\x8e\x02\x99\x30\xc7\x3b\xd2\x69" + "\x00\x74\x0b\xa2\x16\xad\x44\xdb" + "\x4f\xe6\x7d\x14\x88\x1f\xb6\x2a" + "\xc1\x58\xef\x63\xfa\x91\x05\x9c" + "\x33\xca\x3e\xd5\x6c\x03\x77\x0e" + "\xa5\x19\xb0\x47\xde\x52\xe9\x80" + "\x17\x8b\x22\xb9\x2d\xc4\x5b\xf2" + "\x66\xfd\x94\x08\x9f\x36\xcd\x41" + "\xd8\x6f\x06\x7a\x11\xa8\x1c\xb3" + "\x4a\xe1\x55\xec\x83\x1a\x8e\x25" + "\xbc\x30\xc7\x5e\xf5\x69\x00\x97" + "\x0b\xa2\x39\xd0\x44\xdb\x72\x09" + "\x7d\x14\xab\x1f\xb6\x4d\xe4\x58" + "\xef\x86\x1d\x91\x28\xbf\x33\xca" + "\x61\xf8\x6c\x03\x9a\x0e\xa5\x3c" + "\xd3\x47\xde\x75\x0c\x80\x17\xae" + "\x22\xb9\x50\xe7\x5b\xf2\x89\x20" + "\x94\x2b\xc2\x36\xcd\x64\xfb\x6f" + "\x06\x9d\x11\xa8\x3f\xd6\x4a\xe1" + "\x78\x0f\x83\x1a\xb1\x25\xbc\x53" + "\xea\x5e\xf5\x8c\x00\x97\x2e\xc5" + "\x39\xd0\x67\xfe\x72\x09\xa0\x14" + "\xab\x42\xd9\x4d\xe4\x7b\x12\x86" + "\x1d\xb4\x28\xbf\x56\xed\x61\xf8" + "\x8f\x03\x9a\x31\xc8\x3c\xd3\x6a" + "\x01\x75\x0c\xa3\x17\xae\x45\xdc" + "\x50\xe7\x7e\x15\x89\x20\xb7\x2b" + "\xc2\x59\xf0\x64\xfb\x92\x06\x9d" + "\x34\xcb\x3f\xd6\x6d\x04\x78\x0f" + "\xa6\x1a\xb1\x48\xdf\x53\xea\x81" + "\x18\x8c\x23\xba\x2e\xc5\x5c\xf3" + "\x67\xfe\x95\x09\xa0\x37\xce\x42" + "\xd9\x70\x07\x7b\x12\xa9\x1d\xb4" + "\x4b\xe2\x56\xed\x84\x1b\x8f\x26" + "\xbd\x31\xc8\x5f\xf6\x6a\x01\x98", + .psize = 2048, + .digest = (u8 *)(u16 []){ 0x23ca }, } }; @@ -14323,6 +14582,1623 @@ static const struct cipher_testvec serpent_xts_dec_tv_template[] = { }, }; +/* + * SM4 test vector taken from the draft RFC + * https://tools.ietf.org/html/draft-crypto-sm4-00#ref-GBT.32907-2016 + */ + +static const struct cipher_testvec sm4_enc_tv_template[] = { + { /* SM4 Appendix A: Example Calculations. Example 1. */ + .key = "\x01\x23\x45\x67\x89\xAB\xCD\xEF" + "\xFE\xDC\xBA\x98\x76\x54\x32\x10", + .klen = 16, + .input = "\x01\x23\x45\x67\x89\xAB\xCD\xEF" + "\xFE\xDC\xBA\x98\x76\x54\x32\x10", + .ilen = 16, + .result = "\x68\x1E\xDF\x34\xD2\x06\x96\x5E" + "\x86\xB3\xE9\x4F\x53\x6E\x42\x46", + .rlen = 16, + }, { /* + * SM4 Appendix A: Example Calculations. + * Last 10 iterations of Example 2. + */ + .key = "\x01\x23\x45\x67\x89\xAB\xCD\xEF" + "\xFE\xDC\xBA\x98\x76\x54\x32\x10", + .klen = 16, + .input = "\x99\x4a\xc3\xe7\xc3\x57\x89\x6a" + "\x81\xfc\xa8\xe\x38\x3e\xef\x80" + "\xb1\x98\xf2\xde\x3f\x4b\xae\xd1" + "\xf0\xf1\x30\x4c\x1\x27\x5a\x8f" + "\x45\xe1\x39\xb7\xae\xff\x1f\x27" + "\xad\x57\x15\xab\x31\x5d\xc\xef" + "\x8c\xc8\x80\xbd\x11\x98\xf3\x7b" + "\xa2\xdd\x14\x20\xf9\xe8\xbb\x82" + "\xf7\x32\xca\x4b\xa8\xf7\xb3\x4d" + "\x27\xd1\xcd\xe6\xb6\x65\x5a\x23" + "\xc2\xf3\x54\x84\x53\xe3\xb9\x20" + "\xa5\x37\x0\xbe\xe7\x7b\x48\xfb" + "\x21\x3d\x9e\x48\x1d\x9e\xf5\xbf" + "\x77\xd5\xb4\x4a\x53\x71\x94\x7a" + "\x88\xa6\x6e\x6\x93\xca\x43\xa5" + "\xc4\xf6\xcd\x53\x4b\x7b\x8e\xfe" + "\xb4\x28\x7c\x42\x29\x32\x5d\x88" + "\xed\xce\x0\x19\xe\x16\x2\x6e" + "\x87\xff\x2c\xac\xe8\xe7\xe9\xbf" + "\x31\x51\xec\x47\xc3\x51\x83\xc1", + .ilen = 160, + .result = "\xb1\x98\xf2\xde\x3f\x4b\xae\xd1" + "\xf0\xf1\x30\x4c\x1\x27\x5a\x8f" + "\x45\xe1\x39\xb7\xae\xff\x1f\x27" + "\xad\x57\x15\xab\x31\x5d\xc\xef" + "\x8c\xc8\x80\xbd\x11\x98\xf3\x7b" + "\xa2\xdd\x14\x20\xf9\xe8\xbb\x82" + "\xf7\x32\xca\x4b\xa8\xf7\xb3\x4d" + "\x27\xd1\xcd\xe6\xb6\x65\x5a\x23" + "\xc2\xf3\x54\x84\x53\xe3\xb9\x20" + "\xa5\x37\x0\xbe\xe7\x7b\x48\xfb" + "\x21\x3d\x9e\x48\x1d\x9e\xf5\xbf" + "\x77\xd5\xb4\x4a\x53\x71\x94\x7a" + "\x88\xa6\x6e\x6\x93\xca\x43\xa5" + "\xc4\xf6\xcd\x53\x4b\x7b\x8e\xfe" + "\xb4\x28\x7c\x42\x29\x32\x5d\x88" + "\xed\xce\x0\x19\xe\x16\x2\x6e" + "\x87\xff\x2c\xac\xe8\xe7\xe9\xbf" + "\x31\x51\xec\x47\xc3\x51\x83\xc1" + "\x59\x52\x98\xc7\xc6\xfd\x27\x1f" + "\x4\x2\xf8\x4\xc3\x3d\x3f\x66", + .rlen = 160 + } +}; + +static const struct cipher_testvec sm4_dec_tv_template[] = { + { /* SM4 Appendix A: Example Calculations. Example 1. */ + .key = "\x01\x23\x45\x67\x89\xAB\xCD\xEF" + "\xFE\xDC\xBA\x98\x76\x54\x32\x10", + .klen = 16, + .input = "\x68\x1E\xDF\x34\xD2\x06\x96\x5E" + "\x86\xB3\xE9\x4F\x53\x6E\x42\x46", + .ilen = 16, + .result = "\x01\x23\x45\x67\x89\xAB\xCD\xEF" + "\xFE\xDC\xBA\x98\x76\x54\x32\x10", + .rlen = 16, + }, { /* + * SM4 Appendix A: Example Calculations. + * Last 10 iterations of Example 2. + */ + .key = "\x01\x23\x45\x67\x89\xAB\xCD\xEF" + "\xFE\xDC\xBA\x98\x76\x54\x32\x10", + .klen = 16, + .input = "\xb1\x98\xf2\xde\x3f\x4b\xae\xd1" + "\xf0\xf1\x30\x4c\x1\x27\x5a\x8f" + "\x45\xe1\x39\xb7\xae\xff\x1f\x27" + "\xad\x57\x15\xab\x31\x5d\xc\xef" + "\x8c\xc8\x80\xbd\x11\x98\xf3\x7b" + "\xa2\xdd\x14\x20\xf9\xe8\xbb\x82" + "\xf7\x32\xca\x4b\xa8\xf7\xb3\x4d" + "\x27\xd1\xcd\xe6\xb6\x65\x5a\x23" + "\xc2\xf3\x54\x84\x53\xe3\xb9\x20" + "\xa5\x37\x0\xbe\xe7\x7b\x48\xfb" + "\x21\x3d\x9e\x48\x1d\x9e\xf5\xbf" + "\x77\xd5\xb4\x4a\x53\x71\x94\x7a" + "\x88\xa6\x6e\x6\x93\xca\x43\xa5" + "\xc4\xf6\xcd\x53\x4b\x7b\x8e\xfe" + "\xb4\x28\x7c\x42\x29\x32\x5d\x88" + "\xed\xce\x0\x19\xe\x16\x2\x6e" + "\x87\xff\x2c\xac\xe8\xe7\xe9\xbf" + "\x31\x51\xec\x47\xc3\x51\x83\xc1" + "\x59\x52\x98\xc7\xc6\xfd\x27\x1f" + "\x4\x2\xf8\x4\xc3\x3d\x3f\x66", + .ilen = 160, + .result = "\x99\x4a\xc3\xe7\xc3\x57\x89\x6a" + "\x81\xfc\xa8\xe\x38\x3e\xef\x80" + "\xb1\x98\xf2\xde\x3f\x4b\xae\xd1" + "\xf0\xf1\x30\x4c\x1\x27\x5a\x8f" + "\x45\xe1\x39\xb7\xae\xff\x1f\x27" + "\xad\x57\x15\xab\x31\x5d\xc\xef" + "\x8c\xc8\x80\xbd\x11\x98\xf3\x7b" + "\xa2\xdd\x14\x20\xf9\xe8\xbb\x82" + "\xf7\x32\xca\x4b\xa8\xf7\xb3\x4d" + "\x27\xd1\xcd\xe6\xb6\x65\x5a\x23" + "\xc2\xf3\x54\x84\x53\xe3\xb9\x20" + "\xa5\x37\x0\xbe\xe7\x7b\x48\xfb" + "\x21\x3d\x9e\x48\x1d\x9e\xf5\xbf" + "\x77\xd5\xb4\x4a\x53\x71\x94\x7a" + "\x88\xa6\x6e\x6\x93\xca\x43\xa5" + "\xc4\xf6\xcd\x53\x4b\x7b\x8e\xfe" + "\xb4\x28\x7c\x42\x29\x32\x5d\x88" + "\xed\xce\x0\x19\xe\x16\x2\x6e" + "\x87\xff\x2c\xac\xe8\xe7\xe9\xbf" + "\x31\x51\xec\x47\xc3\x51\x83\xc1", + .rlen = 160 + } +}; + +/* + * Speck test vectors taken from the original paper: + * "The Simon and Speck Families of Lightweight Block Ciphers" + * https://eprint.iacr.org/2013/404.pdf + * + * Note that the paper does not make byte and word order clear. But it was + * confirmed with the authors that the intended orders are little endian byte + * order and (y, x) word order. Equivalently, the printed test vectors, when + * looking at only the bytes (ignoring the whitespace that divides them into + * words), are backwards: the left-most byte is actually the one with the + * highest memory address, while the right-most byte is actually the one with + * the lowest memory address. + */ + +static const struct cipher_testvec speck128_enc_tv_template[] = { + { /* Speck128/128 */ + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .klen = 16, + .input = "\x20\x6d\x61\x64\x65\x20\x69\x74" + "\x20\x65\x71\x75\x69\x76\x61\x6c", + .ilen = 16, + .result = "\x18\x0d\x57\x5c\xdf\xfe\x60\x78" + "\x65\x32\x78\x79\x51\x98\x5d\xa6", + .rlen = 16, + }, { /* Speck128/192 */ + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17", + .klen = 24, + .input = "\x65\x6e\x74\x20\x74\x6f\x20\x43" + "\x68\x69\x65\x66\x20\x48\x61\x72", + .ilen = 16, + .result = "\x86\x18\x3c\xe0\x5d\x18\xbc\xf9" + "\x66\x55\x13\x13\x3a\xcf\xe4\x1b", + .rlen = 16, + }, { /* Speck128/256 */ + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + .klen = 32, + .input = "\x70\x6f\x6f\x6e\x65\x72\x2e\x20" + "\x49\x6e\x20\x74\x68\x6f\x73\x65", + .ilen = 16, + .result = "\x43\x8f\x18\x9c\x8d\xb4\xee\x4e" + "\x3e\xf5\xc0\x05\x04\x01\x09\x41", + .rlen = 16, + }, +}; + +static const struct cipher_testvec speck128_dec_tv_template[] = { + { /* Speck128/128 */ + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + .klen = 16, + .input = "\x18\x0d\x57\x5c\xdf\xfe\x60\x78" + "\x65\x32\x78\x79\x51\x98\x5d\xa6", + .ilen = 16, + .result = "\x20\x6d\x61\x64\x65\x20\x69\x74" + "\x20\x65\x71\x75\x69\x76\x61\x6c", + .rlen = 16, + }, { /* Speck128/192 */ + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17", + .klen = 24, + .input = "\x86\x18\x3c\xe0\x5d\x18\xbc\xf9" + "\x66\x55\x13\x13\x3a\xcf\xe4\x1b", + .ilen = 16, + .result = "\x65\x6e\x74\x20\x74\x6f\x20\x43" + "\x68\x69\x65\x66\x20\x48\x61\x72", + .rlen = 16, + }, { /* Speck128/256 */ + .key = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + .klen = 32, + .input = "\x43\x8f\x18\x9c\x8d\xb4\xee\x4e" + "\x3e\xf5\xc0\x05\x04\x01\x09\x41", + .ilen = 16, + .result = "\x70\x6f\x6f\x6e\x65\x72\x2e\x20" + "\x49\x6e\x20\x74\x68\x6f\x73\x65", + .rlen = 16, + }, +}; + +/* + * Speck128-XTS test vectors, taken from the AES-XTS test vectors with the + * result recomputed with Speck128 as the cipher + */ + +static const struct cipher_testvec speck128_xts_enc_tv_template[] = { + { + .key = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .ilen = 32, + .result = "\xbe\xa0\xe7\x03\xd7\xfe\xab\x62" + "\x3b\x99\x4a\x64\x74\x77\xac\xed" + "\xd8\xf4\xa6\xcf\xae\xb9\x07\x42" + "\x51\xd9\xb6\x1d\xe0\x5e\xbc\x54", + .rlen = 32, + }, { + .key = "\x11\x11\x11\x11\x11\x11\x11\x11" + "\x11\x11\x11\x11\x11\x11\x11\x11" + "\x22\x22\x22\x22\x22\x22\x22\x22" + "\x22\x22\x22\x22\x22\x22\x22\x22", + .klen = 32, + .iv = "\x33\x33\x33\x33\x33\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44", + .ilen = 32, + .result = "\xfb\x53\x81\x75\x6f\x9f\x34\xad" + "\x7e\x01\xed\x7b\xcc\xda\x4e\x4a" + "\xd4\x84\xa4\x53\xd5\x88\x73\x1b" + "\xfd\xcb\xae\x0d\xf3\x04\xee\xe6", + .rlen = 32, + }, { + .key = "\xff\xfe\xfd\xfc\xfb\xfa\xf9\xf8" + "\xf7\xf6\xf5\xf4\xf3\xf2\xf1\xf0" + "\x22\x22\x22\x22\x22\x22\x22\x22" + "\x22\x22\x22\x22\x22\x22\x22\x22", + .klen = 32, + .iv = "\x33\x33\x33\x33\x33\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44", + .ilen = 32, + .result = "\x21\x52\x84\x15\xd1\xf7\x21\x55" + "\xd9\x75\x4a\xd3\xc5\xdb\x9f\x7d" + "\xda\x63\xb2\xf1\x82\xb0\x89\x59" + "\x86\xd4\xaa\xaa\xdd\xff\x4f\x92", + .rlen = 32, + }, { + .key = "\x27\x18\x28\x18\x28\x45\x90\x45" + "\x23\x53\x60\x28\x74\x71\x35\x26" + "\x31\x41\x59\x26\x53\x58\x97\x93" + "\x23\x84\x62\x64\x33\x83\x27\x95", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" + "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .ilen = 512, + .result = "\x57\xb5\xf8\x71\x6e\x6d\xdd\x82" + "\x53\xd0\xed\x2d\x30\xc1\x20\xef" + "\x70\x67\x5e\xff\x09\x70\xbb\xc1" + "\x3a\x7b\x48\x26\xd9\x0b\xf4\x48" + "\xbe\xce\xb1\xc7\xb2\x67\xc4\xa7" + "\x76\xf8\x36\x30\xb7\xb4\x9a\xd9" + "\xf5\x9d\xd0\x7b\xc1\x06\x96\x44" + "\x19\xc5\x58\x84\x63\xb9\x12\x68" + "\x68\xc7\xaa\x18\x98\xf2\x1f\x5c" + "\x39\xa6\xd8\x32\x2b\xc3\x51\xfd" + "\x74\x79\x2e\xb4\x44\xd7\x69\xc4" + "\xfc\x29\xe6\xed\x26\x1e\xa6\x9d" + "\x1c\xbe\x00\x0e\x7f\x3a\xca\xfb" + "\x6d\x13\x65\xa0\xf9\x31\x12\xe2" + "\x26\xd1\xec\x2b\x0a\x8b\x59\x99" + "\xa7\x49\xa0\x0e\x09\x33\x85\x50" + "\xc3\x23\xca\x7a\xdd\x13\x45\x5f" + "\xde\x4c\xa7\xcb\x00\x8a\x66\x6f" + "\xa2\xb6\xb1\x2e\xe1\xa0\x18\xf6" + "\xad\xf3\xbd\xeb\xc7\xef\x55\x4f" + "\x79\x91\x8d\x36\x13\x7b\xd0\x4a" + "\x6c\x39\xfb\x53\xb8\x6f\x02\x51" + "\xa5\x20\xac\x24\x1c\x73\x59\x73" + "\x58\x61\x3a\x87\x58\xb3\x20\x56" + "\x39\x06\x2b\x4d\xd3\x20\x2b\x89" + "\x3f\xa2\xf0\x96\xeb\x7f\xa4\xcd" + "\x11\xae\xbd\xcb\x3a\xb4\xd9\x91" + "\x09\x35\x71\x50\x65\xac\x92\xe3" + "\x7b\x32\xc0\x7a\xdd\xd4\xc3\x92" + "\x6f\xeb\x79\xde\x6f\xd3\x25\xc9" + "\xcd\x63\xf5\x1e\x7a\x3b\x26\x9d" + "\x77\x04\x80\xa9\xbf\x38\xb5\xbd" + "\xb8\x05\x07\xbd\xfd\xab\x7b\xf8" + "\x2a\x26\xcc\x49\x14\x6d\x55\x01" + "\x06\x94\xd8\xb2\x2d\x53\x83\x1b" + "\x8f\xd4\xdd\x57\x12\x7e\x18\xba" + "\x8e\xe2\x4d\x80\xef\x7e\x6b\x9d" + "\x24\xa9\x60\xa4\x97\x85\x86\x2a" + "\x01\x00\x09\xf1\xcb\x4a\x24\x1c" + "\xd8\xf6\xe6\x5b\xe7\x5d\xf2\xc4" + "\x97\x1c\x10\xc6\x4d\x66\x4f\x98" + "\x87\x30\xac\xd5\xea\x73\x49\x10" + "\x80\xea\xe5\x5f\x4d\x5f\x03\x33" + "\x66\x02\x35\x3d\x60\x06\x36\x4f" + "\x14\x1c\xd8\x07\x1f\x78\xd0\xf8" + "\x4f\x6c\x62\x7c\x15\xa5\x7c\x28" + "\x7c\xcc\xeb\x1f\xd1\x07\x90\x93" + "\x7e\xc2\xa8\x3a\x80\xc0\xf5\x30" + "\xcc\x75\xcf\x16\x26\xa9\x26\x3b" + "\xe7\x68\x2f\x15\x21\x5b\xe4\x00" + "\xbd\x48\x50\xcd\x75\x70\xc4\x62" + "\xbb\x41\xfb\x89\x4a\x88\x3b\x3b" + "\x51\x66\x02\x69\x04\x97\x36\xd4" + "\x75\xae\x0b\xa3\x42\xf8\xca\x79" + "\x8f\x93\xe9\xcc\x38\xbd\xd6\xd2" + "\xf9\x70\x4e\xc3\x6a\x8e\x25\xbd" + "\xea\x15\x5a\xa0\x85\x7e\x81\x0d" + "\x03\xe7\x05\x39\xf5\x05\x26\xee" + "\xec\xaa\x1f\x3d\xc9\x98\x76\x01" + "\x2c\xf4\xfc\xa3\x88\x77\x38\xc4" + "\x50\x65\x50\x6d\x04\x1f\xdf\x5a" + "\xaa\xf2\x01\xa9\xc1\x8d\xee\xca" + "\x47\x26\xef\x39\xb8\xb4\xf2\xd1" + "\xd6\xbb\x1b\x2a\xc1\x34\x14\xcf", + .rlen = 512, + }, { + .key = "\x27\x18\x28\x18\x28\x45\x90\x45" + "\x23\x53\x60\x28\x74\x71\x35\x26" + "\x62\x49\x77\x57\x24\x70\x93\x69" + "\x99\x59\x57\x49\x66\x96\x76\x27" + "\x31\x41\x59\x26\x53\x58\x97\x93" + "\x23\x84\x62\x64\x33\x83\x27\x95" + "\x02\x88\x41\x97\x16\x93\x99\x37" + "\x51\x05\x82\x09\x74\x94\x45\x92", + .klen = 64, + .iv = "\xff\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" + "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .ilen = 512, + .result = "\xc5\x85\x2a\x4b\x73\xe4\xf6\xf1" + "\x7e\xf9\xf6\xe9\xa3\x73\x36\xcb" + "\xaa\xb6\x22\xb0\x24\x6e\x3d\x73" + "\x92\x99\xde\xd3\x76\xed\xcd\x63" + "\x64\x3a\x22\x57\xc1\x43\x49\xd4" + "\x79\x36\x31\x19\x62\xae\x10\x7e" + "\x7d\xcf\x7a\xe2\x6b\xce\x27\xfa" + "\xdc\x3d\xd9\x83\xd3\x42\x4c\xe0" + "\x1b\xd6\x1d\x1a\x6f\xd2\x03\x00" + "\xfc\x81\x99\x8a\x14\x62\xf5\x7e" + "\x0d\xe7\x12\xe8\x17\x9d\x0b\xec" + "\xe2\xf7\xc9\xa7\x63\xd1\x79\xb6" + "\x62\x62\x37\xfe\x0a\x4c\x4a\x37" + "\x70\xc7\x5e\x96\x5f\xbc\x8e\x9e" + "\x85\x3c\x4f\x26\x64\x85\xbc\x68" + "\xb0\xe0\x86\x5e\x26\x41\xce\x11" + "\x50\xda\x97\x14\xe9\x9e\xc7\x6d" + "\x3b\xdc\x43\xde\x2b\x27\x69\x7d" + "\xfc\xb0\x28\xbd\x8f\xb1\xc6\x31" + "\x14\x4d\xf0\x74\x37\xfd\x07\x25" + "\x96\x55\xe5\xfc\x9e\x27\x2a\x74" + "\x1b\x83\x4d\x15\x83\xac\x57\xa0" + "\xac\xa5\xd0\x38\xef\x19\x56\x53" + "\x25\x4b\xfc\xce\x04\x23\xe5\x6b" + "\xf6\xc6\x6c\x32\x0b\xb3\x12\xc5" + "\xed\x22\x34\x1c\x5d\xed\x17\x06" + "\x36\xa3\xe6\x77\xb9\x97\x46\xb8" + "\xe9\x3f\x7e\xc7\xbc\x13\x5c\xdc" + "\x6e\x3f\x04\x5e\xd1\x59\xa5\x82" + "\x35\x91\x3d\x1b\xe4\x97\x9f\x92" + "\x1c\x5e\x5f\x6f\x41\xd4\x62\xa1" + "\x8d\x39\xfc\x42\xfb\x38\x80\xb9" + "\x0a\xe3\xcc\x6a\x93\xd9\x7a\xb1" + "\xe9\x69\xaf\x0a\x6b\x75\x38\xa7" + "\xa1\xbf\xf7\xda\x95\x93\x4b\x78" + "\x19\xf5\x94\xf9\xd2\x00\x33\x37" + "\xcf\xf5\x9e\x9c\xf3\xcc\xa6\xee" + "\x42\xb2\x9e\x2c\x5f\x48\x23\x26" + "\x15\x25\x17\x03\x3d\xfe\x2c\xfc" + "\xeb\xba\xda\xe0\x00\x05\xb6\xa6" + "\x07\xb3\xe8\x36\x5b\xec\x5b\xbf" + "\xd6\x5b\x00\x74\xc6\x97\xf1\x6a" + "\x49\xa1\xc3\xfa\x10\x52\xb9\x14" + "\xad\xb7\x73\xf8\x78\x12\xc8\x59" + "\x17\x80\x4c\x57\x39\xf1\x6d\x80" + "\x25\x77\x0f\x5e\x7d\xf0\xaf\x21" + "\xec\xce\xb7\xc8\x02\x8a\xed\x53" + "\x2c\x25\x68\x2e\x1f\x85\x5e\x67" + "\xd1\x07\x7a\x3a\x89\x08\xe0\x34" + "\xdc\xdb\x26\xb4\x6b\x77\xfc\x40" + "\x31\x15\x72\xa0\xf0\x73\xd9\x3b" + "\xd5\xdb\xfe\xfc\x8f\xa9\x44\xa2" + "\x09\x9f\xc6\x33\xe5\xe2\x88\xe8" + "\xf3\xf0\x1a\xf4\xce\x12\x0f\xd6" + "\xf7\x36\xe6\xa4\xf4\x7a\x10\x58" + "\xcc\x1f\x48\x49\x65\x47\x75\xe9" + "\x28\xe1\x65\x7b\xf2\xc4\xb5\x07" + "\xf2\xec\x76\xd8\x8f\x09\xf3\x16" + "\xa1\x51\x89\x3b\xeb\x96\x42\xac" + "\x65\xe0\x67\x63\x29\xdc\xb4\x7d" + "\xf2\x41\x51\x6a\xcb\xde\x3c\xfb" + "\x66\x8d\x13\xca\xe0\x59\x2a\x00" + "\xc9\x53\x4c\xe6\x9e\xe2\x73\xd5" + "\x67\x19\xb2\xbd\x9a\x63\xd7\x5c", + .rlen = 512, + .also_non_np = 1, + .np = 3, + .tap = { 512 - 20, 4, 16 }, + } +}; + +static const struct cipher_testvec speck128_xts_dec_tv_template[] = { + { + .key = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\xbe\xa0\xe7\x03\xd7\xfe\xab\x62" + "\x3b\x99\x4a\x64\x74\x77\xac\xed" + "\xd8\xf4\xa6\xcf\xae\xb9\x07\x42" + "\x51\xd9\xb6\x1d\xe0\x5e\xbc\x54", + .ilen = 32, + .result = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .rlen = 32, + }, { + .key = "\x11\x11\x11\x11\x11\x11\x11\x11" + "\x11\x11\x11\x11\x11\x11\x11\x11" + "\x22\x22\x22\x22\x22\x22\x22\x22" + "\x22\x22\x22\x22\x22\x22\x22\x22", + .klen = 32, + .iv = "\x33\x33\x33\x33\x33\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\xfb\x53\x81\x75\x6f\x9f\x34\xad" + "\x7e\x01\xed\x7b\xcc\xda\x4e\x4a" + "\xd4\x84\xa4\x53\xd5\x88\x73\x1b" + "\xfd\xcb\xae\x0d\xf3\x04\xee\xe6", + .ilen = 32, + .result = "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44", + .rlen = 32, + }, { + .key = "\xff\xfe\xfd\xfc\xfb\xfa\xf9\xf8" + "\xf7\xf6\xf5\xf4\xf3\xf2\xf1\xf0" + "\x22\x22\x22\x22\x22\x22\x22\x22" + "\x22\x22\x22\x22\x22\x22\x22\x22", + .klen = 32, + .iv = "\x33\x33\x33\x33\x33\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x21\x52\x84\x15\xd1\xf7\x21\x55" + "\xd9\x75\x4a\xd3\xc5\xdb\x9f\x7d" + "\xda\x63\xb2\xf1\x82\xb0\x89\x59" + "\x86\xd4\xaa\xaa\xdd\xff\x4f\x92", + .ilen = 32, + .result = "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44", + .rlen = 32, + }, { + .key = "\x27\x18\x28\x18\x28\x45\x90\x45" + "\x23\x53\x60\x28\x74\x71\x35\x26" + "\x31\x41\x59\x26\x53\x58\x97\x93" + "\x23\x84\x62\x64\x33\x83\x27\x95", + .klen = 32, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x57\xb5\xf8\x71\x6e\x6d\xdd\x82" + "\x53\xd0\xed\x2d\x30\xc1\x20\xef" + "\x70\x67\x5e\xff\x09\x70\xbb\xc1" + "\x3a\x7b\x48\x26\xd9\x0b\xf4\x48" + "\xbe\xce\xb1\xc7\xb2\x67\xc4\xa7" + "\x76\xf8\x36\x30\xb7\xb4\x9a\xd9" + "\xf5\x9d\xd0\x7b\xc1\x06\x96\x44" + "\x19\xc5\x58\x84\x63\xb9\x12\x68" + "\x68\xc7\xaa\x18\x98\xf2\x1f\x5c" + "\x39\xa6\xd8\x32\x2b\xc3\x51\xfd" + "\x74\x79\x2e\xb4\x44\xd7\x69\xc4" + "\xfc\x29\xe6\xed\x26\x1e\xa6\x9d" + "\x1c\xbe\x00\x0e\x7f\x3a\xca\xfb" + "\x6d\x13\x65\xa0\xf9\x31\x12\xe2" + "\x26\xd1\xec\x2b\x0a\x8b\x59\x99" + "\xa7\x49\xa0\x0e\x09\x33\x85\x50" + "\xc3\x23\xca\x7a\xdd\x13\x45\x5f" + "\xde\x4c\xa7\xcb\x00\x8a\x66\x6f" + "\xa2\xb6\xb1\x2e\xe1\xa0\x18\xf6" + "\xad\xf3\xbd\xeb\xc7\xef\x55\x4f" + "\x79\x91\x8d\x36\x13\x7b\xd0\x4a" + "\x6c\x39\xfb\x53\xb8\x6f\x02\x51" + "\xa5\x20\xac\x24\x1c\x73\x59\x73" + "\x58\x61\x3a\x87\x58\xb3\x20\x56" + "\x39\x06\x2b\x4d\xd3\x20\x2b\x89" + "\x3f\xa2\xf0\x96\xeb\x7f\xa4\xcd" + "\x11\xae\xbd\xcb\x3a\xb4\xd9\x91" + "\x09\x35\x71\x50\x65\xac\x92\xe3" + "\x7b\x32\xc0\x7a\xdd\xd4\xc3\x92" + "\x6f\xeb\x79\xde\x6f\xd3\x25\xc9" + "\xcd\x63\xf5\x1e\x7a\x3b\x26\x9d" + "\x77\x04\x80\xa9\xbf\x38\xb5\xbd" + "\xb8\x05\x07\xbd\xfd\xab\x7b\xf8" + "\x2a\x26\xcc\x49\x14\x6d\x55\x01" + "\x06\x94\xd8\xb2\x2d\x53\x83\x1b" + "\x8f\xd4\xdd\x57\x12\x7e\x18\xba" + "\x8e\xe2\x4d\x80\xef\x7e\x6b\x9d" + "\x24\xa9\x60\xa4\x97\x85\x86\x2a" + "\x01\x00\x09\xf1\xcb\x4a\x24\x1c" + "\xd8\xf6\xe6\x5b\xe7\x5d\xf2\xc4" + "\x97\x1c\x10\xc6\x4d\x66\x4f\x98" + "\x87\x30\xac\xd5\xea\x73\x49\x10" + "\x80\xea\xe5\x5f\x4d\x5f\x03\x33" + "\x66\x02\x35\x3d\x60\x06\x36\x4f" + "\x14\x1c\xd8\x07\x1f\x78\xd0\xf8" + "\x4f\x6c\x62\x7c\x15\xa5\x7c\x28" + "\x7c\xcc\xeb\x1f\xd1\x07\x90\x93" + "\x7e\xc2\xa8\x3a\x80\xc0\xf5\x30" + "\xcc\x75\xcf\x16\x26\xa9\x26\x3b" + "\xe7\x68\x2f\x15\x21\x5b\xe4\x00" + "\xbd\x48\x50\xcd\x75\x70\xc4\x62" + "\xbb\x41\xfb\x89\x4a\x88\x3b\x3b" + "\x51\x66\x02\x69\x04\x97\x36\xd4" + "\x75\xae\x0b\xa3\x42\xf8\xca\x79" + "\x8f\x93\xe9\xcc\x38\xbd\xd6\xd2" + "\xf9\x70\x4e\xc3\x6a\x8e\x25\xbd" + "\xea\x15\x5a\xa0\x85\x7e\x81\x0d" + "\x03\xe7\x05\x39\xf5\x05\x26\xee" + "\xec\xaa\x1f\x3d\xc9\x98\x76\x01" + "\x2c\xf4\xfc\xa3\x88\x77\x38\xc4" + "\x50\x65\x50\x6d\x04\x1f\xdf\x5a" + "\xaa\xf2\x01\xa9\xc1\x8d\xee\xca" + "\x47\x26\xef\x39\xb8\xb4\xf2\xd1" + "\xd6\xbb\x1b\x2a\xc1\x34\x14\xcf", + .ilen = 512, + .result = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" + "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .rlen = 512, + }, { + .key = "\x27\x18\x28\x18\x28\x45\x90\x45" + "\x23\x53\x60\x28\x74\x71\x35\x26" + "\x62\x49\x77\x57\x24\x70\x93\x69" + "\x99\x59\x57\x49\x66\x96\x76\x27" + "\x31\x41\x59\x26\x53\x58\x97\x93" + "\x23\x84\x62\x64\x33\x83\x27\x95" + "\x02\x88\x41\x97\x16\x93\x99\x37" + "\x51\x05\x82\x09\x74\x94\x45\x92", + .klen = 64, + .iv = "\xff\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\xc5\x85\x2a\x4b\x73\xe4\xf6\xf1" + "\x7e\xf9\xf6\xe9\xa3\x73\x36\xcb" + "\xaa\xb6\x22\xb0\x24\x6e\x3d\x73" + "\x92\x99\xde\xd3\x76\xed\xcd\x63" + "\x64\x3a\x22\x57\xc1\x43\x49\xd4" + "\x79\x36\x31\x19\x62\xae\x10\x7e" + "\x7d\xcf\x7a\xe2\x6b\xce\x27\xfa" + "\xdc\x3d\xd9\x83\xd3\x42\x4c\xe0" + "\x1b\xd6\x1d\x1a\x6f\xd2\x03\x00" + "\xfc\x81\x99\x8a\x14\x62\xf5\x7e" + "\x0d\xe7\x12\xe8\x17\x9d\x0b\xec" + "\xe2\xf7\xc9\xa7\x63\xd1\x79\xb6" + "\x62\x62\x37\xfe\x0a\x4c\x4a\x37" + "\x70\xc7\x5e\x96\x5f\xbc\x8e\x9e" + "\x85\x3c\x4f\x26\x64\x85\xbc\x68" + "\xb0\xe0\x86\x5e\x26\x41\xce\x11" + "\x50\xda\x97\x14\xe9\x9e\xc7\x6d" + "\x3b\xdc\x43\xde\x2b\x27\x69\x7d" + "\xfc\xb0\x28\xbd\x8f\xb1\xc6\x31" + "\x14\x4d\xf0\x74\x37\xfd\x07\x25" + "\x96\x55\xe5\xfc\x9e\x27\x2a\x74" + "\x1b\x83\x4d\x15\x83\xac\x57\xa0" + "\xac\xa5\xd0\x38\xef\x19\x56\x53" + "\x25\x4b\xfc\xce\x04\x23\xe5\x6b" + "\xf6\xc6\x6c\x32\x0b\xb3\x12\xc5" + "\xed\x22\x34\x1c\x5d\xed\x17\x06" + "\x36\xa3\xe6\x77\xb9\x97\x46\xb8" + "\xe9\x3f\x7e\xc7\xbc\x13\x5c\xdc" + "\x6e\x3f\x04\x5e\xd1\x59\xa5\x82" + "\x35\x91\x3d\x1b\xe4\x97\x9f\x92" + "\x1c\x5e\x5f\x6f\x41\xd4\x62\xa1" + "\x8d\x39\xfc\x42\xfb\x38\x80\xb9" + "\x0a\xe3\xcc\x6a\x93\xd9\x7a\xb1" + "\xe9\x69\xaf\x0a\x6b\x75\x38\xa7" + "\xa1\xbf\xf7\xda\x95\x93\x4b\x78" + "\x19\xf5\x94\xf9\xd2\x00\x33\x37" + "\xcf\xf5\x9e\x9c\xf3\xcc\xa6\xee" + "\x42\xb2\x9e\x2c\x5f\x48\x23\x26" + "\x15\x25\x17\x03\x3d\xfe\x2c\xfc" + "\xeb\xba\xda\xe0\x00\x05\xb6\xa6" + "\x07\xb3\xe8\x36\x5b\xec\x5b\xbf" + "\xd6\x5b\x00\x74\xc6\x97\xf1\x6a" + "\x49\xa1\xc3\xfa\x10\x52\xb9\x14" + "\xad\xb7\x73\xf8\x78\x12\xc8\x59" + "\x17\x80\x4c\x57\x39\xf1\x6d\x80" + "\x25\x77\x0f\x5e\x7d\xf0\xaf\x21" + "\xec\xce\xb7\xc8\x02\x8a\xed\x53" + "\x2c\x25\x68\x2e\x1f\x85\x5e\x67" + "\xd1\x07\x7a\x3a\x89\x08\xe0\x34" + "\xdc\xdb\x26\xb4\x6b\x77\xfc\x40" + "\x31\x15\x72\xa0\xf0\x73\xd9\x3b" + "\xd5\xdb\xfe\xfc\x8f\xa9\x44\xa2" + "\x09\x9f\xc6\x33\xe5\xe2\x88\xe8" + "\xf3\xf0\x1a\xf4\xce\x12\x0f\xd6" + "\xf7\x36\xe6\xa4\xf4\x7a\x10\x58" + "\xcc\x1f\x48\x49\x65\x47\x75\xe9" + "\x28\xe1\x65\x7b\xf2\xc4\xb5\x07" + "\xf2\xec\x76\xd8\x8f\x09\xf3\x16" + "\xa1\x51\x89\x3b\xeb\x96\x42\xac" + "\x65\xe0\x67\x63\x29\xdc\xb4\x7d" + "\xf2\x41\x51\x6a\xcb\xde\x3c\xfb" + "\x66\x8d\x13\xca\xe0\x59\x2a\x00" + "\xc9\x53\x4c\xe6\x9e\xe2\x73\xd5" + "\x67\x19\xb2\xbd\x9a\x63\xd7\x5c", + .ilen = 512, + .result = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" + "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .rlen = 512, + .also_non_np = 1, + .np = 3, + .tap = { 512 - 20, 4, 16 }, + } +}; + +static const struct cipher_testvec speck64_enc_tv_template[] = { + { /* Speck64/96 */ + .key = "\x00\x01\x02\x03\x08\x09\x0a\x0b" + "\x10\x11\x12\x13", + .klen = 12, + .input = "\x65\x61\x6e\x73\x20\x46\x61\x74", + .ilen = 8, + .result = "\x6c\x94\x75\x41\xec\x52\x79\x9f", + .rlen = 8, + }, { /* Speck64/128 */ + .key = "\x00\x01\x02\x03\x08\x09\x0a\x0b" + "\x10\x11\x12\x13\x18\x19\x1a\x1b", + .klen = 16, + .input = "\x2d\x43\x75\x74\x74\x65\x72\x3b", + .ilen = 8, + .result = "\x8b\x02\x4e\x45\x48\xa5\x6f\x8c", + .rlen = 8, + }, +}; + +static const struct cipher_testvec speck64_dec_tv_template[] = { + { /* Speck64/96 */ + .key = "\x00\x01\x02\x03\x08\x09\x0a\x0b" + "\x10\x11\x12\x13", + .klen = 12, + .input = "\x6c\x94\x75\x41\xec\x52\x79\x9f", + .ilen = 8, + .result = "\x65\x61\x6e\x73\x20\x46\x61\x74", + .rlen = 8, + }, { /* Speck64/128 */ + .key = "\x00\x01\x02\x03\x08\x09\x0a\x0b" + "\x10\x11\x12\x13\x18\x19\x1a\x1b", + .klen = 16, + .input = "\x8b\x02\x4e\x45\x48\xa5\x6f\x8c", + .ilen = 8, + .result = "\x2d\x43\x75\x74\x74\x65\x72\x3b", + .rlen = 8, + }, +}; + +/* + * Speck64-XTS test vectors, taken from the AES-XTS test vectors with the result + * recomputed with Speck64 as the cipher, and key lengths adjusted + */ + +static const struct cipher_testvec speck64_xts_enc_tv_template[] = { + { + .key = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .klen = 24, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .ilen = 32, + .result = "\x84\xaf\x54\x07\x19\xd4\x7c\xa6" + "\xe4\xfe\xdf\xc4\x1f\x34\xc3\xc2" + "\x80\xf5\x72\xe7\xcd\xf0\x99\x22" + "\x35\xa7\x2f\x06\xef\xdc\x51\xaa", + .rlen = 32, + }, { + .key = "\x11\x11\x11\x11\x11\x11\x11\x11" + "\x11\x11\x11\x11\x11\x11\x11\x11" + "\x22\x22\x22\x22\x22\x22\x22\x22", + .klen = 24, + .iv = "\x33\x33\x33\x33\x33\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44", + .ilen = 32, + .result = "\x12\x56\x73\xcd\x15\x87\xa8\x59" + "\xcf\x84\xae\xd9\x1c\x66\xd6\x9f" + "\xb3\x12\x69\x7e\x36\xeb\x52\xff" + "\x62\xdd\xba\x90\xb3\xe1\xee\x99", + .rlen = 32, + }, { + .key = "\xff\xfe\xfd\xfc\xfb\xfa\xf9\xf8" + "\xf7\xf6\xf5\xf4\xf3\xf2\xf1\xf0" + "\x22\x22\x22\x22\x22\x22\x22\x22", + .klen = 24, + .iv = "\x33\x33\x33\x33\x33\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44", + .ilen = 32, + .result = "\x15\x1b\xe4\x2c\xa2\x5a\x2d\x2c" + "\x27\x36\xc0\xbf\x5d\xea\x36\x37" + "\x2d\x1a\x88\xbc\x66\xb5\xd0\x0b" + "\xa1\xbc\x19\xb2\x0f\x3b\x75\x34", + .rlen = 32, + }, { + .key = "\x27\x18\x28\x18\x28\x45\x90\x45" + "\x23\x53\x60\x28\x74\x71\x35\x26" + "\x31\x41\x59\x26\x53\x58\x97\x93", + .klen = 24, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" + "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .ilen = 512, + .result = "\xaf\xa1\x81\xa6\x32\xbb\x15\x8e" + "\xf8\x95\x2e\xd3\xe6\xee\x7e\x09" + "\x0c\x1a\xf5\x02\x97\x8b\xe3\xb3" + "\x11\xc7\x39\x96\xd0\x95\xf4\x56" + "\xf4\xdd\x03\x38\x01\x44\x2c\xcf" + "\x88\xae\x8e\x3c\xcd\xe7\xaa\x66" + "\xfe\x3d\xc6\xfb\x01\x23\x51\x43" + "\xd5\xd2\x13\x86\x94\x34\xe9\x62" + "\xf9\x89\xe3\xd1\x7b\xbe\xf8\xef" + "\x76\x35\x04\x3f\xdb\x23\x9d\x0b" + "\x85\x42\xb9\x02\xd6\xcc\xdb\x96" + "\xa7\x6b\x27\xb6\xd4\x45\x8f\x7d" + "\xae\xd2\x04\xd5\xda\xc1\x7e\x24" + "\x8c\x73\xbe\x48\x7e\xcf\x65\x28" + "\x29\xe5\xbe\x54\x30\xcb\x46\x95" + "\x4f\x2e\x8a\x36\xc8\x27\xc5\xbe" + "\xd0\x1a\xaf\xab\x26\xcd\x9e\x69" + "\xa1\x09\x95\x71\x26\xe9\xc4\xdf" + "\xe6\x31\xc3\x46\xda\xaf\x0b\x41" + "\x1f\xab\xb1\x8e\xd6\xfc\x0b\xb3" + "\x82\xc0\x37\x27\xfc\x91\xa7\x05" + "\xfb\xc5\xdc\x2b\x74\x96\x48\x43" + "\x5d\x9c\x19\x0f\x60\x63\x3a\x1f" + "\x6f\xf0\x03\xbe\x4d\xfd\xc8\x4a" + "\xc6\xa4\x81\x6d\xc3\x12\x2a\x5c" + "\x07\xff\xf3\x72\x74\x48\xb5\x40" + "\x50\xb5\xdd\x90\x43\x31\x18\x15" + "\x7b\xf2\xa6\xdb\x83\xc8\x4b\x4a" + "\x29\x93\x90\x8b\xda\x07\xf0\x35" + "\x6d\x90\x88\x09\x4e\x83\xf5\x5b" + "\x94\x12\xbb\x33\x27\x1d\x3f\x23" + "\x51\xa8\x7c\x07\xa2\xae\x77\xa6" + "\x50\xfd\xcc\xc0\x4f\x80\x7a\x9f" + "\x66\xdd\xcd\x75\x24\x8b\x33\xf7" + "\x20\xdb\x83\x9b\x4f\x11\x63\x6e" + "\xcf\x37\xef\xc9\x11\x01\x5c\x45" + "\x32\x99\x7c\x3c\x9e\x42\x89\xe3" + "\x70\x6d\x15\x9f\xb1\xe6\xb6\x05" + "\xfe\x0c\xb9\x49\x2d\x90\x6d\xcc" + "\x5d\x3f\xc1\xfe\x89\x0a\x2e\x2d" + "\xa0\xa8\x89\x3b\x73\x39\xa5\x94" + "\x4c\xa4\xa6\xbb\xa7\x14\x46\x89" + "\x10\xff\xaf\xef\xca\xdd\x4f\x80" + "\xb3\xdf\x3b\xab\xd4\xe5\x5a\xc7" + "\x33\xca\x00\x8b\x8b\x3f\xea\xec" + "\x68\x8a\xc2\x6d\xfd\xd4\x67\x0f" + "\x22\x31\xe1\x0e\xfe\x5a\x04\xd5" + "\x64\xa3\xf1\x1a\x76\x28\xcc\x35" + "\x36\xa7\x0a\x74\xf7\x1c\x44\x9b" + "\xc7\x1b\x53\x17\x02\xea\xd1\xad" + "\x13\x51\x73\xc0\xa0\xb2\x05\x32" + "\xa8\xa2\x37\x2e\xe1\x7a\x3a\x19" + "\x26\xb4\x6c\x62\x5d\xb3\x1a\x1d" + "\x59\xda\xee\x1a\x22\x18\xda\x0d" + "\x88\x0f\x55\x8b\x72\x62\xfd\xc1" + "\x69\x13\xcd\x0d\x5f\xc1\x09\x52" + "\xee\xd6\xe3\x84\x4d\xee\xf6\x88" + "\xaf\x83\xdc\x76\xf4\xc0\x93\x3f" + "\x4a\x75\x2f\xb0\x0b\x3e\xc4\x54" + "\x7d\x69\x8d\x00\x62\x77\x0d\x14" + "\xbe\x7c\xa6\x7d\xc5\x24\x4f\xf3" + "\x50\xf7\x5f\xf4\xc2\xca\x41\x97" + "\x37\xbe\x75\x74\xcd\xf0\x75\x6e" + "\x25\x23\x94\xbd\xda\x8d\xb0\xd4", + .rlen = 512, + }, { + .key = "\x27\x18\x28\x18\x28\x45\x90\x45" + "\x23\x53\x60\x28\x74\x71\x35\x26" + "\x62\x49\x77\x57\x24\x70\x93\x69" + "\x99\x59\x57\x49\x66\x96\x76\x27", + .klen = 32, + .iv = "\xff\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" + "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .ilen = 512, + .result = "\x55\xed\x71\xd3\x02\x8e\x15\x3b" + "\xc6\x71\x29\x2d\x3e\x89\x9f\x59" + "\x68\x6a\xcc\x8a\x56\x97\xf3\x95" + "\x4e\x51\x08\xda\x2a\xf8\x6f\x3c" + "\x78\x16\xea\x80\xdb\x33\x75\x94" + "\xf9\x29\xc4\x2b\x76\x75\x97\xc7" + "\xf2\x98\x2c\xf9\xff\xc8\xd5\x2b" + "\x18\xf1\xaf\xcf\x7c\xc5\x0b\xee" + "\xad\x3c\x76\x7c\xe6\x27\xa2\x2a" + "\xe4\x66\xe1\xab\xa2\x39\xfc\x7c" + "\xf5\xec\x32\x74\xa3\xb8\x03\x88" + "\x52\xfc\x2e\x56\x3f\xa1\xf0\x9f" + "\x84\x5e\x46\xed\x20\x89\xb6\x44" + "\x8d\xd0\xed\x54\x47\x16\xbe\x95" + "\x8a\xb3\x6b\x72\xc4\x32\x52\x13" + "\x1b\xb0\x82\xbe\xac\xf9\x70\xa6" + "\x44\x18\xdd\x8c\x6e\xca\x6e\x45" + "\x8f\x1e\x10\x07\x57\x25\x98\x7b" + "\x17\x8c\x78\xdd\x80\xa7\xd9\xd8" + "\x63\xaf\xb9\x67\x57\xfd\xbc\xdb" + "\x44\xe9\xc5\x65\xd1\xc7\x3b\xff" + "\x20\xa0\x80\x1a\xc3\x9a\xad\x5e" + "\x5d\x3b\xd3\x07\xd9\xf5\xfd\x3d" + "\x4a\x8b\xa8\xd2\x6e\x7a\x51\x65" + "\x6c\x8e\x95\xe0\x45\xc9\x5f\x4a" + "\x09\x3c\x3d\x71\x7f\x0c\x84\x2a" + "\xc8\x48\x52\x1a\xc2\xd5\xd6\x78" + "\x92\x1e\xa0\x90\x2e\xea\xf0\xf3" + "\xdc\x0f\xb1\xaf\x0d\x9b\x06\x2e" + "\x35\x10\x30\x82\x0d\xe7\xc5\x9b" + "\xde\x44\x18\xbd\x9f\xd1\x45\xa9" + "\x7b\x7a\x4a\xad\x35\x65\x27\xca" + "\xb2\xc3\xd4\x9b\x71\x86\x70\xee" + "\xf1\x89\x3b\x85\x4b\x5b\xaa\xaf" + "\xfc\x42\xc8\x31\x59\xbe\x16\x60" + "\x4f\xf9\xfa\x12\xea\xd0\xa7\x14" + "\xf0\x7a\xf3\xd5\x8d\xbd\x81\xef" + "\x52\x7f\x29\x51\x94\x20\x67\x3c" + "\xd1\xaf\x77\x9f\x22\x5a\x4e\x63" + "\xe7\xff\x73\x25\xd1\xdd\x96\x8a" + "\x98\x52\x6d\xf3\xac\x3e\xf2\x18" + "\x6d\xf6\x0a\x29\xa6\x34\x3d\xed" + "\xe3\x27\x0d\x9d\x0a\x02\x44\x7e" + "\x5a\x7e\x67\x0f\x0a\x9e\xd6\xad" + "\x91\xe6\x4d\x81\x8c\x5c\x59\xaa" + "\xfb\xeb\x56\x53\xd2\x7d\x4c\x81" + "\x65\x53\x0f\x41\x11\xbd\x98\x99" + "\xf9\xc6\xfa\x51\x2e\xa3\xdd\x8d" + "\x84\x98\xf9\x34\xed\x33\x2a\x1f" + "\x82\xed\xc1\x73\x98\xd3\x02\xdc" + "\xe6\xc2\x33\x1d\xa2\xb4\xca\x76" + "\x63\x51\x34\x9d\x96\x12\xae\xce" + "\x83\xc9\x76\x5e\xa4\x1b\x53\x37" + "\x17\xd5\xc0\x80\x1d\x62\xf8\x3d" + "\x54\x27\x74\xbb\x10\x86\x57\x46" + "\x68\xe1\xed\x14\xe7\x9d\xfc\x84" + "\x47\xbc\xc2\xf8\x19\x4b\x99\xcf" + "\x7a\xe9\xc4\xb8\x8c\x82\x72\x4d" + "\x7b\x4f\x38\x55\x36\x71\x64\xc1" + "\xfc\x5c\x75\x52\x33\x02\x18\xf8" + "\x17\xe1\x2b\xc2\x43\x39\xbd\x76" + "\x9b\x63\x76\x32\x2f\x19\x72\x10" + "\x9f\x21\x0c\xf1\x66\x50\x7f\xa5" + "\x0d\x1f\x46\xe0\xba\xd3\x2f\x3c", + .rlen = 512, + .also_non_np = 1, + .np = 3, + .tap = { 512 - 20, 4, 16 }, + } +}; + +static const struct cipher_testvec speck64_xts_dec_tv_template[] = { + { + .key = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .klen = 24, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x84\xaf\x54\x07\x19\xd4\x7c\xa6" + "\xe4\xfe\xdf\xc4\x1f\x34\xc3\xc2" + "\x80\xf5\x72\xe7\xcd\xf0\x99\x22" + "\x35\xa7\x2f\x06\xef\xdc\x51\xaa", + .ilen = 32, + .result = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .rlen = 32, + }, { + .key = "\x11\x11\x11\x11\x11\x11\x11\x11" + "\x11\x11\x11\x11\x11\x11\x11\x11" + "\x22\x22\x22\x22\x22\x22\x22\x22", + .klen = 24, + .iv = "\x33\x33\x33\x33\x33\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x12\x56\x73\xcd\x15\x87\xa8\x59" + "\xcf\x84\xae\xd9\x1c\x66\xd6\x9f" + "\xb3\x12\x69\x7e\x36\xeb\x52\xff" + "\x62\xdd\xba\x90\xb3\xe1\xee\x99", + .ilen = 32, + .result = "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44", + .rlen = 32, + }, { + .key = "\xff\xfe\xfd\xfc\xfb\xfa\xf9\xf8" + "\xf7\xf6\xf5\xf4\xf3\xf2\xf1\xf0" + "\x22\x22\x22\x22\x22\x22\x22\x22", + .klen = 24, + .iv = "\x33\x33\x33\x33\x33\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x15\x1b\xe4\x2c\xa2\x5a\x2d\x2c" + "\x27\x36\xc0\xbf\x5d\xea\x36\x37" + "\x2d\x1a\x88\xbc\x66\xb5\xd0\x0b" + "\xa1\xbc\x19\xb2\x0f\x3b\x75\x34", + .ilen = 32, + .result = "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44" + "\x44\x44\x44\x44\x44\x44\x44\x44", + .rlen = 32, + }, { + .key = "\x27\x18\x28\x18\x28\x45\x90\x45" + "\x23\x53\x60\x28\x74\x71\x35\x26" + "\x31\x41\x59\x26\x53\x58\x97\x93", + .klen = 24, + .iv = "\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\xaf\xa1\x81\xa6\x32\xbb\x15\x8e" + "\xf8\x95\x2e\xd3\xe6\xee\x7e\x09" + "\x0c\x1a\xf5\x02\x97\x8b\xe3\xb3" + "\x11\xc7\x39\x96\xd0\x95\xf4\x56" + "\xf4\xdd\x03\x38\x01\x44\x2c\xcf" + "\x88\xae\x8e\x3c\xcd\xe7\xaa\x66" + "\xfe\x3d\xc6\xfb\x01\x23\x51\x43" + "\xd5\xd2\x13\x86\x94\x34\xe9\x62" + "\xf9\x89\xe3\xd1\x7b\xbe\xf8\xef" + "\x76\x35\x04\x3f\xdb\x23\x9d\x0b" + "\x85\x42\xb9\x02\xd6\xcc\xdb\x96" + "\xa7\x6b\x27\xb6\xd4\x45\x8f\x7d" + "\xae\xd2\x04\xd5\xda\xc1\x7e\x24" + "\x8c\x73\xbe\x48\x7e\xcf\x65\x28" + "\x29\xe5\xbe\x54\x30\xcb\x46\x95" + "\x4f\x2e\x8a\x36\xc8\x27\xc5\xbe" + "\xd0\x1a\xaf\xab\x26\xcd\x9e\x69" + "\xa1\x09\x95\x71\x26\xe9\xc4\xdf" + "\xe6\x31\xc3\x46\xda\xaf\x0b\x41" + "\x1f\xab\xb1\x8e\xd6\xfc\x0b\xb3" + "\x82\xc0\x37\x27\xfc\x91\xa7\x05" + "\xfb\xc5\xdc\x2b\x74\x96\x48\x43" + "\x5d\x9c\x19\x0f\x60\x63\x3a\x1f" + "\x6f\xf0\x03\xbe\x4d\xfd\xc8\x4a" + "\xc6\xa4\x81\x6d\xc3\x12\x2a\x5c" + "\x07\xff\xf3\x72\x74\x48\xb5\x40" + "\x50\xb5\xdd\x90\x43\x31\x18\x15" + "\x7b\xf2\xa6\xdb\x83\xc8\x4b\x4a" + "\x29\x93\x90\x8b\xda\x07\xf0\x35" + "\x6d\x90\x88\x09\x4e\x83\xf5\x5b" + "\x94\x12\xbb\x33\x27\x1d\x3f\x23" + "\x51\xa8\x7c\x07\xa2\xae\x77\xa6" + "\x50\xfd\xcc\xc0\x4f\x80\x7a\x9f" + "\x66\xdd\xcd\x75\x24\x8b\x33\xf7" + "\x20\xdb\x83\x9b\x4f\x11\x63\x6e" + "\xcf\x37\xef\xc9\x11\x01\x5c\x45" + "\x32\x99\x7c\x3c\x9e\x42\x89\xe3" + "\x70\x6d\x15\x9f\xb1\xe6\xb6\x05" + "\xfe\x0c\xb9\x49\x2d\x90\x6d\xcc" + "\x5d\x3f\xc1\xfe\x89\x0a\x2e\x2d" + "\xa0\xa8\x89\x3b\x73\x39\xa5\x94" + "\x4c\xa4\xa6\xbb\xa7\x14\x46\x89" + "\x10\xff\xaf\xef\xca\xdd\x4f\x80" + "\xb3\xdf\x3b\xab\xd4\xe5\x5a\xc7" + "\x33\xca\x00\x8b\x8b\x3f\xea\xec" + "\x68\x8a\xc2\x6d\xfd\xd4\x67\x0f" + "\x22\x31\xe1\x0e\xfe\x5a\x04\xd5" + "\x64\xa3\xf1\x1a\x76\x28\xcc\x35" + "\x36\xa7\x0a\x74\xf7\x1c\x44\x9b" + "\xc7\x1b\x53\x17\x02\xea\xd1\xad" + "\x13\x51\x73\xc0\xa0\xb2\x05\x32" + "\xa8\xa2\x37\x2e\xe1\x7a\x3a\x19" + "\x26\xb4\x6c\x62\x5d\xb3\x1a\x1d" + "\x59\xda\xee\x1a\x22\x18\xda\x0d" + "\x88\x0f\x55\x8b\x72\x62\xfd\xc1" + "\x69\x13\xcd\x0d\x5f\xc1\x09\x52" + "\xee\xd6\xe3\x84\x4d\xee\xf6\x88" + "\xaf\x83\xdc\x76\xf4\xc0\x93\x3f" + "\x4a\x75\x2f\xb0\x0b\x3e\xc4\x54" + "\x7d\x69\x8d\x00\x62\x77\x0d\x14" + "\xbe\x7c\xa6\x7d\xc5\x24\x4f\xf3" + "\x50\xf7\x5f\xf4\xc2\xca\x41\x97" + "\x37\xbe\x75\x74\xcd\xf0\x75\x6e" + "\x25\x23\x94\xbd\xda\x8d\xb0\xd4", + .ilen = 512, + .result = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" + "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .rlen = 512, + }, { + .key = "\x27\x18\x28\x18\x28\x45\x90\x45" + "\x23\x53\x60\x28\x74\x71\x35\x26" + "\x62\x49\x77\x57\x24\x70\x93\x69" + "\x99\x59\x57\x49\x66\x96\x76\x27", + .klen = 32, + .iv = "\xff\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00", + .input = "\x55\xed\x71\xd3\x02\x8e\x15\x3b" + "\xc6\x71\x29\x2d\x3e\x89\x9f\x59" + "\x68\x6a\xcc\x8a\x56\x97\xf3\x95" + "\x4e\x51\x08\xda\x2a\xf8\x6f\x3c" + "\x78\x16\xea\x80\xdb\x33\x75\x94" + "\xf9\x29\xc4\x2b\x76\x75\x97\xc7" + "\xf2\x98\x2c\xf9\xff\xc8\xd5\x2b" + "\x18\xf1\xaf\xcf\x7c\xc5\x0b\xee" + "\xad\x3c\x76\x7c\xe6\x27\xa2\x2a" + "\xe4\x66\xe1\xab\xa2\x39\xfc\x7c" + "\xf5\xec\x32\x74\xa3\xb8\x03\x88" + "\x52\xfc\x2e\x56\x3f\xa1\xf0\x9f" + "\x84\x5e\x46\xed\x20\x89\xb6\x44" + "\x8d\xd0\xed\x54\x47\x16\xbe\x95" + "\x8a\xb3\x6b\x72\xc4\x32\x52\x13" + "\x1b\xb0\x82\xbe\xac\xf9\x70\xa6" + "\x44\x18\xdd\x8c\x6e\xca\x6e\x45" + "\x8f\x1e\x10\x07\x57\x25\x98\x7b" + "\x17\x8c\x78\xdd\x80\xa7\xd9\xd8" + "\x63\xaf\xb9\x67\x57\xfd\xbc\xdb" + "\x44\xe9\xc5\x65\xd1\xc7\x3b\xff" + "\x20\xa0\x80\x1a\xc3\x9a\xad\x5e" + "\x5d\x3b\xd3\x07\xd9\xf5\xfd\x3d" + "\x4a\x8b\xa8\xd2\x6e\x7a\x51\x65" + "\x6c\x8e\x95\xe0\x45\xc9\x5f\x4a" + "\x09\x3c\x3d\x71\x7f\x0c\x84\x2a" + "\xc8\x48\x52\x1a\xc2\xd5\xd6\x78" + "\x92\x1e\xa0\x90\x2e\xea\xf0\xf3" + "\xdc\x0f\xb1\xaf\x0d\x9b\x06\x2e" + "\x35\x10\x30\x82\x0d\xe7\xc5\x9b" + "\xde\x44\x18\xbd\x9f\xd1\x45\xa9" + "\x7b\x7a\x4a\xad\x35\x65\x27\xca" + "\xb2\xc3\xd4\x9b\x71\x86\x70\xee" + "\xf1\x89\x3b\x85\x4b\x5b\xaa\xaf" + "\xfc\x42\xc8\x31\x59\xbe\x16\x60" + "\x4f\xf9\xfa\x12\xea\xd0\xa7\x14" + "\xf0\x7a\xf3\xd5\x8d\xbd\x81\xef" + "\x52\x7f\x29\x51\x94\x20\x67\x3c" + "\xd1\xaf\x77\x9f\x22\x5a\x4e\x63" + "\xe7\xff\x73\x25\xd1\xdd\x96\x8a" + "\x98\x52\x6d\xf3\xac\x3e\xf2\x18" + "\x6d\xf6\x0a\x29\xa6\x34\x3d\xed" + "\xe3\x27\x0d\x9d\x0a\x02\x44\x7e" + "\x5a\x7e\x67\x0f\x0a\x9e\xd6\xad" + "\x91\xe6\x4d\x81\x8c\x5c\x59\xaa" + "\xfb\xeb\x56\x53\xd2\x7d\x4c\x81" + "\x65\x53\x0f\x41\x11\xbd\x98\x99" + "\xf9\xc6\xfa\x51\x2e\xa3\xdd\x8d" + "\x84\x98\xf9\x34\xed\x33\x2a\x1f" + "\x82\xed\xc1\x73\x98\xd3\x02\xdc" + "\xe6\xc2\x33\x1d\xa2\xb4\xca\x76" + "\x63\x51\x34\x9d\x96\x12\xae\xce" + "\x83\xc9\x76\x5e\xa4\x1b\x53\x37" + "\x17\xd5\xc0\x80\x1d\x62\xf8\x3d" + "\x54\x27\x74\xbb\x10\x86\x57\x46" + "\x68\xe1\xed\x14\xe7\x9d\xfc\x84" + "\x47\xbc\xc2\xf8\x19\x4b\x99\xcf" + "\x7a\xe9\xc4\xb8\x8c\x82\x72\x4d" + "\x7b\x4f\x38\x55\x36\x71\x64\xc1" + "\xfc\x5c\x75\x52\x33\x02\x18\xf8" + "\x17\xe1\x2b\xc2\x43\x39\xbd\x76" + "\x9b\x63\x76\x32\x2f\x19\x72\x10" + "\x9f\x21\x0c\xf1\x66\x50\x7f\xa5" + "\x0d\x1f\x46\xe0\xba\xd3\x2f\x3c", + .ilen = 512, + .result = "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" + "\x00\x01\x02\x03\x04\x05\x06\x07" + "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17" + "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27" + "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37" + "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47" + "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57" + "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67" + "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77" + "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97" + "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" + "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7" + "\xa8\xa9\xaa\xab\xac\xad\xae\xaf" + "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7" + "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf" + "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7" + "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf" + "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7" + "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf" + "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7" + "\xe8\xe9\xea\xeb\xec\xed\xee\xef" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7" + "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + .rlen = 512, + .also_non_np = 1, + .np = 3, + .tap = { 512 - 20, 4, 16 }, + } +}; + /* Cast6 test vectors from RFC 2612 */ static const struct cipher_testvec cast6_enc_tv_template[] = { { diff --git a/crypto/xts.c b/crypto/xts.c index f317c48b5e43..12284183bd20 100644 --- a/crypto/xts.c +++ b/crypto/xts.c @@ -357,78 +357,6 @@ static int decrypt(struct skcipher_request *req) return do_decrypt(req, init_crypt(req, decrypt_done)); } -int xts_crypt(struct blkcipher_desc *desc, struct scatterlist *sdst, - struct scatterlist *ssrc, unsigned int nbytes, - struct xts_crypt_req *req) -{ - const unsigned int bsize = XTS_BLOCK_SIZE; - const unsigned int max_blks = req->tbuflen / bsize; - struct blkcipher_walk walk; - unsigned int nblocks; - le128 *src, *dst, *t; - le128 *t_buf = req->tbuf; - int err, i; - - BUG_ON(max_blks < 1); - - blkcipher_walk_init(&walk, sdst, ssrc, nbytes); - - err = blkcipher_walk_virt(desc, &walk); - nbytes = walk.nbytes; - if (!nbytes) - return err; - - nblocks = min(nbytes / bsize, max_blks); - src = (le128 *)walk.src.virt.addr; - dst = (le128 *)walk.dst.virt.addr; - - /* calculate first value of T */ - req->tweak_fn(req->tweak_ctx, (u8 *)&t_buf[0], walk.iv); - - i = 0; - goto first; - - for (;;) { - do { - for (i = 0; i < nblocks; i++) { - gf128mul_x_ble(&t_buf[i], t); -first: - t = &t_buf[i]; - - /* PP <- T xor P */ - le128_xor(dst + i, t, src + i); - } - - /* CC <- E(Key2,PP) */ - req->crypt_fn(req->crypt_ctx, (u8 *)dst, - nblocks * bsize); - - /* C <- T xor CC */ - for (i = 0; i < nblocks; i++) - le128_xor(dst + i, dst + i, &t_buf[i]); - - src += nblocks; - dst += nblocks; - nbytes -= nblocks * bsize; - nblocks = min(nbytes / bsize, max_blks); - } while (nblocks > 0); - - *(le128 *)walk.iv = *t; - - err = blkcipher_walk_done(desc, &walk, nbytes); - nbytes = walk.nbytes; - if (!nbytes) - break; - - nblocks = min(nbytes / bsize, max_blks); - src = (le128 *)walk.src.virt.addr; - dst = (le128 *)walk.dst.virt.addr; - } - - return err; -} -EXPORT_SYMBOL_GPL(xts_crypt); - static int init_tfm(struct crypto_skcipher *tfm) { struct skcipher_instance *inst = skcipher_alg_instance(tfm); |