From fb469840b8c34b2f95b40a64b271f245cc1075b7 Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Sun, 10 Dec 2006 10:45:28 +1100 Subject: [CRYPTO] all: Check for usage in hard IRQ context Using blkcipher/hash crypto operations in hard IRQ context can lead to random memory corruption due to the reuse of kmap_atomic slots. Since crypto operations were never meant to be used in hard IRQ contexts, this patch checks for such usage and returns an error before kmap_atomic is performed. Signed-off-by: Herbert Xu --- crypto/blkcipher.c | 4 ++++ crypto/digest.c | 19 ++++++++++++++++--- crypto/xcbc.c | 21 +++++++++++++++++---- 3 files changed, 37 insertions(+), 7 deletions(-) (limited to 'crypto') diff --git a/crypto/blkcipher.c b/crypto/blkcipher.c index 6e93004f2181..cbb4c4e5c229 100644 --- a/crypto/blkcipher.c +++ b/crypto/blkcipher.c @@ -16,6 +16,7 @@ #include #include +#include #include #include #include @@ -313,6 +314,9 @@ static int blkcipher_walk_first(struct blkcipher_desc *desc, struct crypto_blkcipher *tfm = desc->tfm; unsigned int alignmask = crypto_blkcipher_alignmask(tfm); + if (WARN_ON_ONCE(in_irq())) + return -EDEADLK; + walk->nbytes = walk->total; if (unlikely(!walk->total)) return 0; diff --git a/crypto/digest.c b/crypto/digest.c index 8f4593268ce0..bc47af648cb1 100644 --- a/crypto/digest.c +++ b/crypto/digest.c @@ -14,7 +14,9 @@ #include #include +#include #include +#include #include #include @@ -29,8 +31,8 @@ static int init(struct hash_desc *desc) return 0; } -static int update(struct hash_desc *desc, - struct scatterlist *sg, unsigned int nbytes) +static int update2(struct hash_desc *desc, + struct scatterlist *sg, unsigned int nbytes) { struct crypto_tfm *tfm = crypto_hash_tfm(desc->tfm); unsigned int alignmask = crypto_tfm_alg_alignmask(tfm); @@ -81,6 +83,14 @@ static int update(struct hash_desc *desc, return 0; } +static int update(struct hash_desc *desc, + struct scatterlist *sg, unsigned int nbytes) +{ + if (WARN_ON_ONCE(in_irq())) + return -EDEADLK; + return update2(desc, sg, nbytes); +} + static int final(struct hash_desc *desc, u8 *out) { struct crypto_tfm *tfm = crypto_hash_tfm(desc->tfm); @@ -118,8 +128,11 @@ static int setkey(struct crypto_hash *hash, const u8 *key, unsigned int keylen) static int digest(struct hash_desc *desc, struct scatterlist *sg, unsigned int nbytes, u8 *out) { + if (WARN_ON_ONCE(in_irq())) + return -EDEADLK; + init(desc); - update(desc, sg, nbytes); + update2(desc, sg, nbytes); return final(desc, out); } diff --git a/crypto/xcbc.c b/crypto/xcbc.c index 9347eb6bcf69..317e9f08fc04 100644 --- a/crypto/xcbc.c +++ b/crypto/xcbc.c @@ -21,6 +21,7 @@ #include #include +#include #include #include #include @@ -108,9 +109,9 @@ static int crypto_xcbc_digest_init(struct hash_desc *pdesc) return 0; } -static int crypto_xcbc_digest_update(struct hash_desc *pdesc, - struct scatterlist *sg, - unsigned int nbytes) +static int crypto_xcbc_digest_update2(struct hash_desc *pdesc, + struct scatterlist *sg, + unsigned int nbytes) { struct crypto_hash *parent = pdesc->tfm; struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(parent); @@ -183,6 +184,15 @@ static int crypto_xcbc_digest_update(struct hash_desc *pdesc, return 0; } +static int crypto_xcbc_digest_update(struct hash_desc *pdesc, + struct scatterlist *sg, + unsigned int nbytes) +{ + if (WARN_ON_ONCE(in_irq())) + return -EDEADLK; + return crypto_xcbc_digest_update2(pdesc, sg, nbytes); +} + static int crypto_xcbc_digest_final(struct hash_desc *pdesc, u8 *out) { struct crypto_hash *parent = pdesc->tfm; @@ -234,8 +244,11 @@ static int crypto_xcbc_digest_final(struct hash_desc *pdesc, u8 *out) static int crypto_xcbc_digest(struct hash_desc *pdesc, struct scatterlist *sg, unsigned int nbytes, u8 *out) { + if (WARN_ON_ONCE(in_irq())) + return -EDEADLK; + crypto_xcbc_digest_init(pdesc); - crypto_xcbc_digest_update(pdesc, sg, nbytes); + crypto_xcbc_digest_update2(pdesc, sg, nbytes); return crypto_xcbc_digest_final(pdesc, out); } -- cgit v1.2.3 From a28091ae170cd06695bf461905c5b97a165633ba Mon Sep 17 00:00:00 2001 From: Andrew Donofrio Date: Sun, 10 Dec 2006 12:10:20 +1100 Subject: [CRYPTO] tcrypt: Added test vectors for sha384/sha512 This patch adds tests for SHA384 HMAC and SHA512 HMAC to the tcrypt module. Test data was taken from RFC4231. This patch is a follow-up to the discovery (bug 7646) that the kernel SHA384 HMAC implementation was not generating proper SHA384 HMACs. Signed-off-by: Andrew Donofrio Signed-off-by: Herbert Xu --- crypto/tcrypt.c | 15 ++++ crypto/tcrypt.h | 245 +++++++++++++++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 259 insertions(+), 1 deletion(-) (limited to 'crypto') diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c index d671e8942b1f..ff489423b2cf 100644 --- a/crypto/tcrypt.c +++ b/crypto/tcrypt.c @@ -12,6 +12,7 @@ * Software Foundation; either version 2 of the License, or (at your option) * any later version. * + * 2006-12-07 Added SHA384 HMAC and SHA512 HMAC tests * 2004-08-09 Added cipher speed tests (Reyk Floeter ) * 2003-09-14 Rewritten by Kartikey Mahendra Bhatt * @@ -980,6 +981,10 @@ static void do_test(void) HMAC_SHA1_TEST_VECTORS); test_hash("hmac(sha256)", hmac_sha256_tv_template, HMAC_SHA256_TEST_VECTORS); + test_hash("hmac(sha384)", hmac_sha384_tv_template, + HMAC_SHA384_TEST_VECTORS); + test_hash("hmac(sha512)", hmac_sha512_tv_template, + HMAC_SHA512_TEST_VECTORS); test_hash("xcbc(aes)", aes_xcbc128_tv_template, XCBC_AES_TEST_VECTORS); @@ -1192,6 +1197,16 @@ static void do_test(void) HMAC_SHA256_TEST_VECTORS); break; + case 103: + test_hash("hmac(sha384)", hmac_sha384_tv_template, + HMAC_SHA384_TEST_VECTORS); + break; + + case 104: + test_hash("hmac(sha512)", hmac_sha512_tv_template, + HMAC_SHA512_TEST_VECTORS); + break; + case 200: test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0, diff --git a/crypto/tcrypt.h b/crypto/tcrypt.h index 48a81362cb85..6c7b7a1e73f4 100644 --- a/crypto/tcrypt.h +++ b/crypto/tcrypt.h @@ -12,6 +12,7 @@ * Software Foundation; either version 2 of the License, or (at your option) * any later version. * + * 2006-12-07 Added SHA384 HMAC and SHA512 HMAC tests * 2004-08-09 Cipher speed tests by Reyk Floeter * 2003-09-14 Changes by Kartikey Mahendra Bhatt * @@ -27,7 +28,7 @@ struct hash_testvec { /* only used with keyed hash algorithms */ - char key[128] __attribute__ ((__aligned__(4))); + char key[132] __attribute__ ((__aligned__(4))); char plaintext[240]; char digest[MAX_DIGEST_SIZE]; unsigned char tap[MAX_TAP]; @@ -1001,6 +1002,248 @@ static struct hash_testvec aes_xcbc128_tv_template[] = { } }; +/* + * SHA384 HMAC test vectors from RFC4231 + */ + +#define HMAC_SHA384_TEST_VECTORS 4 + +static struct hash_testvec hmac_sha384_tv_template[] = { + { + .key = { 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, + 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, + 0x0b, 0x0b, 0x0b, 0x0b }, // (20 bytes) + .ksize = 20, + .plaintext = { 0x48, 0x69, 0x20, 0x54, 0x68, 0x65, 0x72, 0x65 }, // ("Hi There") + .psize = 8, + .digest = { 0xaf, 0xd0, 0x39, 0x44, 0xd8, 0x48, 0x95, 0x62, + 0x6b, 0x08, 0x25, 0xf4, 0xab, 0x46, 0x90, 0x7f, + 0x15, 0xf9, 0xda, 0xdb, 0xe4, 0x10, 0x1e, 0xc6, + 0x82, 0xaa, 0x03, 0x4c, 0x7c, 0xeb, 0xc5, 0x9c, + 0xfa, 0xea, 0x9e, 0xa9, 0x07, 0x6e, 0xde, 0x7f, + 0x4a, 0xf1, 0x52, 0xe8, 0xb2, 0xfa, 0x9c, 0xb6 }, + }, { + .key = { 0x4a, 0x65, 0x66, 0x65 }, // ("Jefe") + .ksize = 4, + .plaintext = { 0x77, 0x68, 0x61, 0x74, 0x20, 0x64, 0x6f, 0x20, + 0x79, 0x61, 0x20, 0x77, 0x61, 0x6e, 0x74, 0x20, // ("what do ya want ") + 0x66, 0x6f, 0x72, 0x20, 0x6e, 0x6f, 0x74, 0x68, + 0x69, 0x6e, 0x67, 0x3f }, // ("for nothing?") + .psize = 28, + .digest = { 0xaf, 0x45, 0xd2, 0xe3, 0x76, 0x48, 0x40, 0x31, + 0x61, 0x7f, 0x78, 0xd2, 0xb5, 0x8a, 0x6b, 0x1b, + 0x9c, 0x7e, 0xf4, 0x64, 0xf5, 0xa0, 0x1b, 0x47, + 0xe4, 0x2e, 0xc3, 0x73, 0x63, 0x22, 0x44, 0x5e, + 0x8e, 0x22, 0x40, 0xca, 0x5e, 0x69, 0xe2, 0xc7, + 0x8b, 0x32, 0x39, 0xec, 0xfa, 0xb2, 0x16, 0x49 }, + .np = 4, + .tap = { 7, 7, 7, 7 } + }, { + .key = { 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa }, // (131 bytes) + .ksize = 131, + .plaintext = { 0x54, 0x65, 0x73, 0x74, 0x20, 0x55, 0x73, 0x69, + 0x6e, 0x67, 0x20, 0x4c, 0x61, 0x72, 0x67, 0x65, // ("Test Using Large") + 0x72, 0x20, 0x54, 0x68, 0x61, 0x6e, 0x20, 0x42, + 0x6c, 0x6f, 0x63, 0x6b, 0x2d, 0x53, 0x69, 0x7a, // ("r Than Block-Siz") + 0x65, 0x20, 0x4b, 0x65, 0x79, 0x20, 0x2d, 0x20, + 0x48, 0x61, 0x73, 0x68, 0x20, 0x4b, 0x65, 0x79, // ("e Key - Hash Key") + 0x20, 0x46, 0x69, 0x72, 0x73, 0x74 }, // (" First") + .psize = 54, + .digest = { 0x4e, 0xce, 0x08, 0x44, 0x85, 0x81, 0x3e, 0x90, + 0x88, 0xd2, 0xc6, 0x3a, 0x04, 0x1b, 0xc5, 0xb4, + 0x4f, 0x9e, 0xf1, 0x01, 0x2a, 0x2b, 0x58, 0x8f, + 0x3c, 0xd1, 0x1f, 0x05, 0x03, 0x3a, 0xc4, 0xc6, + 0x0c, 0x2e, 0xf6, 0xab, 0x40, 0x30, 0xfe, 0x82, + 0x96, 0x24, 0x8d, 0xf1, 0x63, 0xf4, 0x49, 0x52 }, + }, { + .key = { 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa }, // (131 bytes) + .ksize = 131, + .plaintext = { 0x54, 0x68, 0x69, 0x73, 0x20, 0x69, 0x73, 0x20, + 0x61, 0x20, 0x74, 0x65, 0x73, 0x74, 0x20, 0x75, // ("This is a test u") + 0x73, 0x69, 0x6e, 0x67, 0x20, 0x61, 0x20, 0x6c, + 0x61, 0x72, 0x67, 0x65, 0x72, 0x20, 0x74, 0x68, // ("sing a larger th") + 0x61, 0x6e, 0x20, 0x62, 0x6c, 0x6f, 0x63, 0x6b, + 0x2d, 0x73, 0x69, 0x7a, 0x65, 0x20, 0x6b, 0x65, // ("an block-size ke") + 0x79, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x61, 0x20, + 0x6c, 0x61, 0x72, 0x67, 0x65, 0x72, 0x20, 0x74, // ("y and a larger t") + 0x68, 0x61, 0x6e, 0x20, 0x62, 0x6c, 0x6f, 0x63, + 0x6b, 0x2d, 0x73, 0x69, 0x7a, 0x65, 0x20, 0x64, // ("han block-size d") + 0x61, 0x74, 0x61, 0x2e, 0x20, 0x54, 0x68, 0x65, + 0x20, 0x6b, 0x65, 0x79, 0x20, 0x6e, 0x65, 0x65, // ("ata. The key nee") + 0x64, 0x73, 0x20, 0x74, 0x6f, 0x20, 0x62, 0x65, + 0x20, 0x68, 0x61, 0x73, 0x68, 0x65, 0x64, 0x20, // ("ds to be hashed ") + 0x62, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x20, 0x62, + 0x65, 0x69, 0x6e, 0x67, 0x20, 0x75, 0x73, 0x65, // ("before being use") + 0x64, 0x20, 0x62, 0x79, 0x20, 0x74, 0x68, 0x65, + 0x20, 0x48, 0x4d, 0x41, 0x43, 0x20, 0x61, 0x6c, // ("d by the HMAC al") + 0x67, 0x6f, 0x72, 0x69, 0x74, 0x68, 0x6d, 0x2e }, // ("gorithm.") + .psize = 152, + .digest = { 0x66, 0x17, 0x17, 0x8e, 0x94, 0x1f, 0x02, 0x0d, + 0x35, 0x1e, 0x2f, 0x25, 0x4e, 0x8f, 0xd3, 0x2c, + 0x60, 0x24, 0x20, 0xfe, 0xb0, 0xb8, 0xfb, 0x9a, + 0xdc, 0xce, 0xbb, 0x82, 0x46, 0x1e, 0x99, 0xc5, + 0xa6, 0x78, 0xcc, 0x31, 0xe7, 0x99, 0x17, 0x6d, + 0x38, 0x60, 0xe6, 0x11, 0x0c, 0x46, 0x52, 0x3e }, + }, +}; + +/* + * SHA512 HMAC test vectors from RFC4231 + */ + +#define HMAC_SHA512_TEST_VECTORS 4 + +static struct hash_testvec hmac_sha512_tv_template[] = { + { + .key = { 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, + 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, + 0x0b, 0x0b, 0x0b, 0x0b }, // (20 bytes) + .ksize = 20, + .plaintext = { 0x48, 0x69, 0x20, 0x54, 0x68, 0x65, 0x72, 0x65 }, // ("Hi There") + .psize = 8, + .digest = { 0x87, 0xaa, 0x7c, 0xde, 0xa5, 0xef, 0x61, 0x9d, + 0x4f, 0xf0, 0xb4, 0x24, 0x1a, 0x1d, 0x6c, 0xb0, + 0x23, 0x79, 0xf4, 0xe2, 0xce, 0x4e, 0xc2, 0x78, + 0x7a, 0xd0, 0xb3, 0x05, 0x45, 0xe1, 0x7c, 0xde, + 0xda, 0xa8, 0x33, 0xb7, 0xd6, 0xb8, 0xa7, 0x02, + 0x03, 0x8b, 0x27, 0x4e, 0xae, 0xa3, 0xf4, 0xe4, + 0xbe, 0x9d, 0x91, 0x4e, 0xeb, 0x61, 0xf1, 0x70, + 0x2e, 0x69, 0x6c, 0x20, 0x3a, 0x12, 0x68, 0x54 }, + }, { + .key = { 0x4a, 0x65, 0x66, 0x65 }, // ("Jefe") + .ksize = 4, + .plaintext = { 0x77, 0x68, 0x61, 0x74, 0x20, 0x64, 0x6f, 0x20, + 0x79, 0x61, 0x20, 0x77, 0x61, 0x6e, 0x74, 0x20, // ("what do ya want ") + 0x66, 0x6f, 0x72, 0x20, 0x6e, 0x6f, 0x74, 0x68, + 0x69, 0x6e, 0x67, 0x3f }, // ("for nothing?") + .psize = 28, + .digest = { 0x16, 0x4b, 0x7a, 0x7b, 0xfc, 0xf8, 0x19, 0xe2, + 0xe3, 0x95, 0xfb, 0xe7, 0x3b, 0x56, 0xe0, 0xa3, + 0x87, 0xbd, 0x64, 0x22, 0x2e, 0x83, 0x1f, 0xd6, + 0x10, 0x27, 0x0c, 0xd7, 0xea, 0x25, 0x05, 0x54, + 0x97, 0x58, 0xbf, 0x75, 0xc0, 0x5a, 0x99, 0x4a, + 0x6d, 0x03, 0x4f, 0x65, 0xf8, 0xf0, 0xe6, 0xfd, + 0xca, 0xea, 0xb1, 0xa3, 0x4d, 0x4a, 0x6b, 0x4b, + 0x63, 0x6e, 0x07, 0x0a, 0x38, 0xbc, 0xe7, 0x37 }, + .np = 4, + .tap = { 7, 7, 7, 7 } + }, { + .key = { 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa }, // (131 bytes) + .ksize = 131, + .plaintext = { 0x54, 0x65, 0x73, 0x74, 0x20, 0x55, 0x73, 0x69, + 0x6e, 0x67, 0x20, 0x4c, 0x61, 0x72, 0x67, 0x65, // ("Test Using Large") + 0x72, 0x20, 0x54, 0x68, 0x61, 0x6e, 0x20, 0x42, + 0x6c, 0x6f, 0x63, 0x6b, 0x2d, 0x53, 0x69, 0x7a, // ("r Than Block-Siz") + 0x65, 0x20, 0x4b, 0x65, 0x79, 0x20, 0x2d, 0x20, + 0x48, 0x61, 0x73, 0x68, 0x20, 0x4b, 0x65, 0x79, // ("e Key - Hash Key") + 0x20, 0x46, 0x69, 0x72, 0x73, 0x74 }, // (" First") + .psize = 54, + .digest = { 0x80, 0xb2, 0x42, 0x63, 0xc7, 0xc1, 0xa3, 0xeb, + 0xb7, 0x14, 0x93, 0xc1, 0xdd, 0x7b, 0xe8, 0xb4, + 0x9b, 0x46, 0xd1, 0xf4, 0x1b, 0x4a, 0xee, 0xc1, + 0x12, 0x1b, 0x01, 0x37, 0x83, 0xf8, 0xf3, 0x52, + 0x6b, 0x56, 0xd0, 0x37, 0xe0, 0x5f, 0x25, 0x98, + 0xbd, 0x0f, 0xd2, 0x21, 0x5d, 0x6a, 0x1e, 0x52, + 0x95, 0xe6, 0x4f, 0x73, 0xf6, 0x3f, 0x0a, 0xec, + 0x8b, 0x91, 0x5a, 0x98, 0x5d, 0x78, 0x65, 0x98 }, + }, { + .key = { 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa }, // (131 bytes) + .ksize = 131, + .plaintext = { 0x54, 0x68, 0x69, 0x73, 0x20, 0x69, 0x73, 0x20, + 0x61, 0x20, 0x74, 0x65, 0x73, 0x74, 0x20, 0x75, // ("This is a test u") + 0x73, 0x69, 0x6e, 0x67, 0x20, 0x61, 0x20, 0x6c, + 0x61, 0x72, 0x67, 0x65, 0x72, 0x20, 0x74, 0x68, // ("sing a larger th") + 0x61, 0x6e, 0x20, 0x62, 0x6c, 0x6f, 0x63, 0x6b, + 0x2d, 0x73, 0x69, 0x7a, 0x65, 0x20, 0x6b, 0x65, // ("an block-size ke") + 0x79, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x61, 0x20, + 0x6c, 0x61, 0x72, 0x67, 0x65, 0x72, 0x20, 0x74, // ("y and a larger t") + 0x68, 0x61, 0x6e, 0x20, 0x62, 0x6c, 0x6f, 0x63, + 0x6b, 0x2d, 0x73, 0x69, 0x7a, 0x65, 0x20, 0x64, // ("han block-size d") + 0x61, 0x74, 0x61, 0x2e, 0x20, 0x54, 0x68, 0x65, + 0x20, 0x6b, 0x65, 0x79, 0x20, 0x6e, 0x65, 0x65, // ("ata. The key nee") + 0x64, 0x73, 0x20, 0x74, 0x6f, 0x20, 0x62, 0x65, + 0x20, 0x68, 0x61, 0x73, 0x68, 0x65, 0x64, 0x20, // ("ds to be hashed ") + 0x62, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x20, 0x62, + 0x65, 0x69, 0x6e, 0x67, 0x20, 0x75, 0x73, 0x65, // ("before being use") + 0x64, 0x20, 0x62, 0x79, 0x20, 0x74, 0x68, 0x65, + 0x20, 0x48, 0x4d, 0x41, 0x43, 0x20, 0x61, 0x6c, // ("d by the HMAC al") + 0x67, 0x6f, 0x72, 0x69, 0x74, 0x68, 0x6d, 0x2e }, // ("gorithm.") + .psize = 152, + .digest = { 0xe3, 0x7b, 0x6a, 0x77, 0x5d, 0xc8, 0x7d, 0xba, + 0xa4, 0xdf, 0xa9, 0xf9, 0x6e, 0x5e, 0x3f, 0xfd, + 0xde, 0xbd, 0x71, 0xf8, 0x86, 0x72, 0x89, 0x86, + 0x5d, 0xf5, 0xa3, 0x2d, 0x20, 0xcd, 0xc9, 0x44, + 0xb6, 0x02, 0x2c, 0xac, 0x3c, 0x49, 0x82, 0xb1, + 0x0d, 0x5e, 0xeb, 0x55, 0xc3, 0xe4, 0xde, 0x15, + 0x13, 0x46, 0x76, 0xfb, 0x6d, 0xe0, 0x44, 0x60, + 0x65, 0xc9, 0x74, 0x40, 0xfa, 0x8c, 0x6a, 0x58 }, + }, +}; + /* * DES test vectors. */ -- cgit v1.2.3 From 91652be5d1b901673a8e926455f0ed146cfaa588 Mon Sep 17 00:00:00 2001 From: David Howells Date: Sat, 16 Dec 2006 12:09:02 +1100 Subject: [CRYPTO] pcbc: Add Propagated CBC template Add PCBC crypto template support as used by RxRPC. Signed-Off-By: David Howells Signed-off-by: Herbert Xu --- crypto/Kconfig | 9 ++ crypto/Makefile | 1 + crypto/pcbc.c | 348 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 358 insertions(+) create mode 100644 crypto/pcbc.c (limited to 'crypto') diff --git a/crypto/Kconfig b/crypto/Kconfig index 92ba249f3a5b..9d3a44cf8637 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -168,6 +168,15 @@ config CRYPTO_CBC CBC: Cipher Block Chaining mode This block cipher algorithm is required for IPSec. +config CRYPTO_PCBC + tristate "PCBC support" + select CRYPTO_BLKCIPHER + select CRYPTO_MANAGER + default m + help + PCBC: Propagating Cipher Block Chaining mode + This block cipher algorithm is required for RxRPC. + config CRYPTO_LRW tristate "LRW support (EXPERIMENTAL)" depends on EXPERIMENTAL diff --git a/crypto/Makefile b/crypto/Makefile index 60e3d24f61f5..9ef048dbb788 100644 --- a/crypto/Makefile +++ b/crypto/Makefile @@ -27,6 +27,7 @@ obj-$(CONFIG_CRYPTO_TGR192) += tgr192.o obj-$(CONFIG_CRYPTO_GF128MUL) += gf128mul.o obj-$(CONFIG_CRYPTO_ECB) += ecb.o obj-$(CONFIG_CRYPTO_CBC) += cbc.o +obj-$(CONFIG_CRYPTO_PCBC) += pcbc.o obj-$(CONFIG_CRYPTO_LRW) += lrw.o obj-$(CONFIG_CRYPTO_DES) += des.o obj-$(CONFIG_CRYPTO_BLOWFISH) += blowfish.o diff --git a/crypto/pcbc.c b/crypto/pcbc.c new file mode 100644 index 000000000000..0ffb46eb259f --- /dev/null +++ b/crypto/pcbc.c @@ -0,0 +1,348 @@ +/* + * PCBC: Propagating Cipher Block Chaining mode + * + * Copyright (C) 2006 Red Hat, Inc. All Rights Reserved. + * Written by David Howells (dhowells@redhat.com) + * + * Derived from cbc.c + * - Copyright (c) 2006 Herbert Xu + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include +#include +#include +#include +#include +#include +#include + +struct crypto_pcbc_ctx { + struct crypto_cipher *child; + void (*xor)(u8 *dst, const u8 *src, unsigned int bs); +}; + +static int crypto_pcbc_setkey(struct crypto_tfm *parent, const u8 *key, + unsigned int keylen) +{ + struct crypto_pcbc_ctx *ctx = crypto_tfm_ctx(parent); + struct crypto_cipher *child = ctx->child; + int err; + + crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); + crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) & + CRYPTO_TFM_REQ_MASK); + err = crypto_cipher_setkey(child, key, keylen); + crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) & + CRYPTO_TFM_RES_MASK); + return err; +} + +static int crypto_pcbc_encrypt_segment(struct blkcipher_desc *desc, + struct blkcipher_walk *walk, + struct crypto_cipher *tfm, + void (*xor)(u8 *, const u8 *, + unsigned int)) +{ + void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = + crypto_cipher_alg(tfm)->cia_encrypt; + int bsize = crypto_cipher_blocksize(tfm); + unsigned int nbytes = walk->nbytes; + u8 *src = walk->src.virt.addr; + u8 *dst = walk->dst.virt.addr; + u8 *iv = walk->iv; + + do { + xor(iv, src, bsize); + fn(crypto_cipher_tfm(tfm), dst, iv); + memcpy(iv, dst, bsize); + xor(iv, src, bsize); + + src += bsize; + dst += bsize; + } while ((nbytes -= bsize) >= bsize); + + return nbytes; +} + +static int crypto_pcbc_encrypt_inplace(struct blkcipher_desc *desc, + struct blkcipher_walk *walk, + struct crypto_cipher *tfm, + void (*xor)(u8 *, const u8 *, + unsigned int)) +{ + void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = + crypto_cipher_alg(tfm)->cia_encrypt; + int bsize = crypto_cipher_blocksize(tfm); + unsigned int nbytes = walk->nbytes; + u8 *src = walk->src.virt.addr; + u8 *iv = walk->iv; + u8 tmpbuf[bsize]; + + do { + memcpy(tmpbuf, src, bsize); + xor(iv, tmpbuf, bsize); + fn(crypto_cipher_tfm(tfm), src, iv); + memcpy(iv, src, bsize); + xor(iv, tmpbuf, bsize); + + src += bsize; + } while ((nbytes -= bsize) >= bsize); + + memcpy(walk->iv, iv, bsize); + + return nbytes; +} + +static int crypto_pcbc_encrypt(struct blkcipher_desc *desc, + struct scatterlist *dst, struct scatterlist *src, + unsigned int nbytes) +{ + struct blkcipher_walk walk; + struct crypto_blkcipher *tfm = desc->tfm; + struct crypto_pcbc_ctx *ctx = crypto_blkcipher_ctx(tfm); + struct crypto_cipher *child = ctx->child; + void (*xor)(u8 *, const u8 *, unsigned int bs) = ctx->xor; + int err; + + blkcipher_walk_init(&walk, dst, src, nbytes); + err = blkcipher_walk_virt(desc, &walk); + + while ((nbytes = walk.nbytes)) { + if (walk.src.virt.addr == walk.dst.virt.addr) + nbytes = crypto_pcbc_encrypt_inplace(desc, &walk, child, + xor); + else + nbytes = crypto_pcbc_encrypt_segment(desc, &walk, child, + xor); + err = blkcipher_walk_done(desc, &walk, nbytes); + } + + return err; +} + +static int crypto_pcbc_decrypt_segment(struct blkcipher_desc *desc, + struct blkcipher_walk *walk, + struct crypto_cipher *tfm, + void (*xor)(u8 *, const u8 *, + unsigned int)) +{ + void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = + crypto_cipher_alg(tfm)->cia_decrypt; + int bsize = crypto_cipher_blocksize(tfm); + unsigned int nbytes = walk->nbytes; + u8 *src = walk->src.virt.addr; + u8 *dst = walk->dst.virt.addr; + u8 *iv = walk->iv; + + do { + fn(crypto_cipher_tfm(tfm), dst, src); + xor(dst, iv, bsize); + memcpy(iv, src, bsize); + xor(iv, dst, bsize); + + src += bsize; + dst += bsize; + } while ((nbytes -= bsize) >= bsize); + + memcpy(walk->iv, iv, bsize); + + return nbytes; +} + +static int crypto_pcbc_decrypt_inplace(struct blkcipher_desc *desc, + struct blkcipher_walk *walk, + struct crypto_cipher *tfm, + void (*xor)(u8 *, const u8 *, + unsigned int)) +{ + void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = + crypto_cipher_alg(tfm)->cia_decrypt; + int bsize = crypto_cipher_blocksize(tfm); + unsigned int nbytes = walk->nbytes; + u8 *src = walk->src.virt.addr; + u8 *iv = walk->iv; + u8 tmpbuf[bsize]; + + do { + memcpy(tmpbuf, src, bsize); + fn(crypto_cipher_tfm(tfm), src, src); + xor(src, iv, bsize); + memcpy(iv, tmpbuf, bsize); + xor(iv, src, bsize); + + src += bsize; + } while ((nbytes -= bsize) >= bsize); + + memcpy(walk->iv, iv, bsize); + + return nbytes; +} + +static int crypto_pcbc_decrypt(struct blkcipher_desc *desc, + struct scatterlist *dst, struct scatterlist *src, + unsigned int nbytes) +{ + struct blkcipher_walk walk; + struct crypto_blkcipher *tfm = desc->tfm; + struct crypto_pcbc_ctx *ctx = crypto_blkcipher_ctx(tfm); + struct crypto_cipher *child = ctx->child; + void (*xor)(u8 *, const u8 *, unsigned int bs) = ctx->xor; + int err; + + blkcipher_walk_init(&walk, dst, src, nbytes); + err = blkcipher_walk_virt(desc, &walk); + + while ((nbytes = walk.nbytes)) { + if (walk.src.virt.addr == walk.dst.virt.addr) + nbytes = crypto_pcbc_decrypt_inplace(desc, &walk, child, + xor); + else + nbytes = crypto_pcbc_decrypt_segment(desc, &walk, child, + xor); + err = blkcipher_walk_done(desc, &walk, nbytes); + } + + return err; +} + +static void xor_byte(u8 *a, const u8 *b, unsigned int bs) +{ + do { + *a++ ^= *b++; + } while (--bs); +} + +static void xor_quad(u8 *dst, const u8 *src, unsigned int bs) +{ + u32 *a = (u32 *)dst; + u32 *b = (u32 *)src; + + do { + *a++ ^= *b++; + } while ((bs -= 4)); +} + +static void xor_64(u8 *a, const u8 *b, unsigned int bs) +{ + ((u32 *)a)[0] ^= ((u32 *)b)[0]; + ((u32 *)a)[1] ^= ((u32 *)b)[1]; +} + +static void xor_128(u8 *a, const u8 *b, unsigned int bs) +{ + ((u32 *)a)[0] ^= ((u32 *)b)[0]; + ((u32 *)a)[1] ^= ((u32 *)b)[1]; + ((u32 *)a)[2] ^= ((u32 *)b)[2]; + ((u32 *)a)[3] ^= ((u32 *)b)[3]; +} + +static int crypto_pcbc_init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_instance *inst = (void *)tfm->__crt_alg; + struct crypto_spawn *spawn = crypto_instance_ctx(inst); + struct crypto_pcbc_ctx *ctx = crypto_tfm_ctx(tfm); + + switch (crypto_tfm_alg_blocksize(tfm)) { + case 8: + ctx->xor = xor_64; + break; + + case 16: + ctx->xor = xor_128; + break; + + default: + if (crypto_tfm_alg_blocksize(tfm) % 4) + ctx->xor = xor_byte; + else + ctx->xor = xor_quad; + } + + tfm = crypto_spawn_tfm(spawn); + if (IS_ERR(tfm)) + return PTR_ERR(tfm); + + ctx->child = crypto_cipher_cast(tfm); + return 0; +} + +static void crypto_pcbc_exit_tfm(struct crypto_tfm *tfm) +{ + struct crypto_pcbc_ctx *ctx = crypto_tfm_ctx(tfm); + crypto_free_cipher(ctx->child); +} + +static struct crypto_instance *crypto_pcbc_alloc(void *param, unsigned int len) +{ + struct crypto_instance *inst; + struct crypto_alg *alg; + + alg = crypto_get_attr_alg(param, len, CRYPTO_ALG_TYPE_CIPHER, + CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC); + if (IS_ERR(alg)) + return ERR_PTR(PTR_ERR(alg)); + + inst = crypto_alloc_instance("pcbc", alg); + if (IS_ERR(inst)) + goto out_put_alg; + + inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER; + inst->alg.cra_priority = alg->cra_priority; + inst->alg.cra_blocksize = alg->cra_blocksize; + inst->alg.cra_alignmask = alg->cra_alignmask; + inst->alg.cra_type = &crypto_blkcipher_type; + + if (!(alg->cra_blocksize % 4)) + inst->alg.cra_alignmask |= 3; + inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize; + inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize; + inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize; + + inst->alg.cra_ctxsize = sizeof(struct crypto_pcbc_ctx); + + inst->alg.cra_init = crypto_pcbc_init_tfm; + inst->alg.cra_exit = crypto_pcbc_exit_tfm; + + inst->alg.cra_blkcipher.setkey = crypto_pcbc_setkey; + inst->alg.cra_blkcipher.encrypt = crypto_pcbc_encrypt; + inst->alg.cra_blkcipher.decrypt = crypto_pcbc_decrypt; + +out_put_alg: + crypto_mod_put(alg); + return inst; +} + +static void crypto_pcbc_free(struct crypto_instance *inst) +{ + crypto_drop_spawn(crypto_instance_ctx(inst)); + kfree(inst); +} + +static struct crypto_template crypto_pcbc_tmpl = { + .name = "pcbc", + .alloc = crypto_pcbc_alloc, + .free = crypto_pcbc_free, + .module = THIS_MODULE, +}; + +static int __init crypto_pcbc_module_init(void) +{ + return crypto_register_template(&crypto_pcbc_tmpl); +} + +static void __exit crypto_pcbc_module_exit(void) +{ + crypto_unregister_template(&crypto_pcbc_tmpl); +} + +module_init(crypto_pcbc_module_init); +module_exit(crypto_pcbc_module_exit); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("PCBC block cipher algorithm"); -- cgit v1.2.3 From 90831639a65592d6d3dc888dc3341f54ebf932e6 Mon Sep 17 00:00:00 2001 From: David Howells Date: Sat, 16 Dec 2006 12:13:14 +1100 Subject: [CRYPTO] fcrypt: Add FCrypt from RxRPC Add a crypto module to provide FCrypt encryption as used by RxRPC. Signed-Off-By: David Howells Signed-off-by: Herbert Xu --- crypto/Kconfig | 7 + crypto/Makefile | 1 + crypto/fcrypt.c | 423 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ crypto/tcrypt.c | 16 ++- crypto/tcrypt.h | 128 +++++++++++++++++ 5 files changed, 574 insertions(+), 1 deletion(-) create mode 100644 crypto/fcrypt.c (limited to 'crypto') diff --git a/crypto/Kconfig b/crypto/Kconfig index 9d3a44cf8637..e2e1eb13827e 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -204,6 +204,13 @@ config CRYPTO_DES_S390 help DES cipher algorithm (FIPS 46-2), and Triple DES EDE (FIPS 46-3). +config CRYPTO_FCRYPT + tristate "FCrypt cipher algorithm" + select CRYPTO_ALGAPI + select CRYPTO_BLKCIPHER + help + FCrypt algorithm used by RxRPC. + config CRYPTO_BLOWFISH tristate "Blowfish cipher algorithm" select CRYPTO_ALGAPI diff --git a/crypto/Makefile b/crypto/Makefile index 9ef048dbb788..a3e1915ef01a 100644 --- a/crypto/Makefile +++ b/crypto/Makefile @@ -30,6 +30,7 @@ obj-$(CONFIG_CRYPTO_CBC) += cbc.o obj-$(CONFIG_CRYPTO_PCBC) += pcbc.o obj-$(CONFIG_CRYPTO_LRW) += lrw.o obj-$(CONFIG_CRYPTO_DES) += des.o +obj-$(CONFIG_CRYPTO_FCRYPT) += fcrypt.o obj-$(CONFIG_CRYPTO_BLOWFISH) += blowfish.o obj-$(CONFIG_CRYPTO_TWOFISH) += twofish.o obj-$(CONFIG_CRYPTO_TWOFISH_COMMON) += twofish_common.o diff --git a/crypto/fcrypt.c b/crypto/fcrypt.c new file mode 100644 index 000000000000..9c2bb535b09a --- /dev/null +++ b/crypto/fcrypt.c @@ -0,0 +1,423 @@ +/* FCrypt encryption algorithm + * + * Copyright (C) 2006 Red Hat, Inc. All Rights Reserved. + * Written by David Howells (dhowells@redhat.com) + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU General Public License + * as published by the Free Software Foundation; either version + * 2 of the License, or (at your option) any later version. + * + * Based on code: + * + * Copyright (c) 1995 - 2000 Kungliga Tekniska Högskolan + * (Royal Institute of Technology, Stockholm, Sweden). + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * 3. Neither the name of the Institute nor the names of its contributors + * may be used to endorse or promote products derived from this software + * without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE INSTITUTE AND CONTRIBUTORS ``AS IS'' AND + * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE INSTITUTE OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS + * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY + * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + * SUCH DAMAGE. + */ + +#include +#include +#include +#include +#include + +#define ROUNDS 16 + +struct fcrypt_ctx { + u32 sched[ROUNDS]; +}; + +/* Rotate right two 32 bit numbers as a 56 bit number */ +#define ror56(hi, lo, n) \ +do { \ + u32 t = lo & ((1 << n) - 1); \ + lo = (lo >> n) | ((hi & ((1 << n) - 1)) << (32 - n)); \ + hi = (hi >> n) | (t << (24-n)); \ +} while(0) + +/* Rotate right one 64 bit number as a 56 bit number */ +#define ror56_64(k, n) \ +do { \ + k = (k >> n) | ((k & ((1 << n) - 1)) << (56 - n)); \ +} while(0) + +/* + * Sboxes for Feistel network derived from + * /afs/transarc.com/public/afsps/afs.rel31b.export-src/rxkad/sboxes.h + */ +#undef Z +#define Z(x) __constant_be32_to_cpu(x << 3) +static const u32 sbox0[256] = { + Z(0xea), Z(0x7f), Z(0xb2), Z(0x64), Z(0x9d), Z(0xb0), Z(0xd9), Z(0x11), + Z(0xcd), Z(0x86), Z(0x86), Z(0x91), Z(0x0a), Z(0xb2), Z(0x93), Z(0x06), + Z(0x0e), Z(0x06), Z(0xd2), Z(0x65), Z(0x73), Z(0xc5), Z(0x28), Z(0x60), + Z(0xf2), Z(0x20), Z(0xb5), Z(0x38), Z(0x7e), Z(0xda), Z(0x9f), Z(0xe3), + Z(0xd2), Z(0xcf), Z(0xc4), Z(0x3c), Z(0x61), Z(0xff), Z(0x4a), Z(0x4a), + Z(0x35), Z(0xac), Z(0xaa), Z(0x5f), Z(0x2b), Z(0xbb), Z(0xbc), Z(0x53), + Z(0x4e), Z(0x9d), Z(0x78), Z(0xa3), Z(0xdc), Z(0x09), Z(0x32), Z(0x10), + Z(0xc6), Z(0x6f), Z(0x66), Z(0xd6), Z(0xab), Z(0xa9), Z(0xaf), Z(0xfd), + Z(0x3b), Z(0x95), Z(0xe8), Z(0x34), Z(0x9a), Z(0x81), Z(0x72), Z(0x80), + Z(0x9c), Z(0xf3), Z(0xec), Z(0xda), Z(0x9f), Z(0x26), Z(0x76), Z(0x15), + Z(0x3e), Z(0x55), Z(0x4d), Z(0xde), Z(0x84), Z(0xee), Z(0xad), Z(0xc7), + Z(0xf1), Z(0x6b), Z(0x3d), Z(0xd3), Z(0x04), Z(0x49), Z(0xaa), Z(0x24), + Z(0x0b), Z(0x8a), Z(0x83), Z(0xba), Z(0xfa), Z(0x85), Z(0xa0), Z(0xa8), + Z(0xb1), Z(0xd4), Z(0x01), Z(0xd8), Z(0x70), Z(0x64), Z(0xf0), Z(0x51), + Z(0xd2), Z(0xc3), Z(0xa7), Z(0x75), Z(0x8c), Z(0xa5), Z(0x64), Z(0xef), + Z(0x10), Z(0x4e), Z(0xb7), Z(0xc6), Z(0x61), Z(0x03), Z(0xeb), Z(0x44), + Z(0x3d), Z(0xe5), Z(0xb3), Z(0x5b), Z(0xae), Z(0xd5), Z(0xad), Z(0x1d), + Z(0xfa), Z(0x5a), Z(0x1e), Z(0x33), Z(0xab), Z(0x93), Z(0xa2), Z(0xb7), + Z(0xe7), Z(0xa8), Z(0x45), Z(0xa4), Z(0xcd), Z(0x29), Z(0x63), Z(0x44), + Z(0xb6), Z(0x69), Z(0x7e), Z(0x2e), Z(0x62), Z(0x03), Z(0xc8), Z(0xe0), + Z(0x17), Z(0xbb), Z(0xc7), Z(0xf3), Z(0x3f), Z(0x36), Z(0xba), Z(0x71), + Z(0x8e), Z(0x97), Z(0x65), Z(0x60), Z(0x69), Z(0xb6), Z(0xf6), Z(0xe6), + Z(0x6e), Z(0xe0), Z(0x81), Z(0x59), Z(0xe8), Z(0xaf), Z(0xdd), Z(0x95), + Z(0x22), Z(0x99), Z(0xfd), Z(0x63), Z(0x19), Z(0x74), Z(0x61), Z(0xb1), + Z(0xb6), Z(0x5b), Z(0xae), Z(0x54), Z(0xb3), Z(0x70), Z(0xff), Z(0xc6), + Z(0x3b), Z(0x3e), Z(0xc1), Z(0xd7), Z(0xe1), Z(0x0e), Z(0x76), Z(0xe5), + Z(0x36), Z(0x4f), Z(0x59), Z(0xc7), Z(0x08), Z(0x6e), Z(0x82), Z(0xa6), + Z(0x93), Z(0xc4), Z(0xaa), Z(0x26), Z(0x49), Z(0xe0), Z(0x21), Z(0x64), + Z(0x07), Z(0x9f), Z(0x64), Z(0x81), Z(0x9c), Z(0xbf), Z(0xf9), Z(0xd1), + Z(0x43), Z(0xf8), Z(0xb6), Z(0xb9), Z(0xf1), Z(0x24), Z(0x75), Z(0x03), + Z(0xe4), Z(0xb0), Z(0x99), Z(0x46), Z(0x3d), Z(0xf5), Z(0xd1), Z(0x39), + Z(0x72), Z(0x12), Z(0xf6), Z(0xba), Z(0x0c), Z(0x0d), Z(0x42), Z(0x2e) +}; + +#undef Z +#define Z(x) __constant_be32_to_cpu((x << 27) | (x >> 5)) +static const u32 sbox1[256] = { + Z(0x77), Z(0x14), Z(0xa6), Z(0xfe), Z(0xb2), Z(0x5e), Z(0x8c), Z(0x3e), + Z(0x67), Z(0x6c), Z(0xa1), Z(0x0d), Z(0xc2), Z(0xa2), Z(0xc1), Z(0x85), + Z(0x6c), Z(0x7b), Z(0x67), Z(0xc6), Z(0x23), Z(0xe3), Z(0xf2), Z(0x89), + Z(0x50), Z(0x9c), Z(0x03), Z(0xb7), Z(0x73), Z(0xe6), Z(0xe1), Z(0x39), + Z(0x31), Z(0x2c), Z(0x27), Z(0x9f), Z(0xa5), Z(0x69), Z(0x44), Z(0xd6), + Z(0x23), Z(0x83), Z(0x98), Z(0x7d), Z(0x3c), Z(0xb4), Z(0x2d), Z(0x99), + Z(0x1c), Z(0x1f), Z(0x8c), Z(0x20), Z(0x03), Z(0x7c), Z(0x5f), Z(0xad), + Z(0xf4), Z(0xfa), Z(0x95), Z(0xca), Z(0x76), Z(0x44), Z(0xcd), Z(0xb6), + Z(0xb8), Z(0xa1), Z(0xa1), Z(0xbe), Z(0x9e), Z(0x54), Z(0x8f), Z(0x0b), + Z(0x16), Z(0x74), Z(0x31), Z(0x8a), Z(0x23), Z(0x17), Z(0x04), Z(0xfa), + Z(0x79), Z(0x84), Z(0xb1), Z(0xf5), Z(0x13), Z(0xab), Z(0xb5), Z(0x2e), + Z(0xaa), Z(0x0c), Z(0x60), Z(0x6b), Z(0x5b), Z(0xc4), Z(0x4b), Z(0xbc), + Z(0xe2), Z(0xaf), Z(0x45), Z(0x73), Z(0xfa), Z(0xc9), Z(0x49), Z(0xcd), + Z(0x00), Z(0x92), Z(0x7d), Z(0x97), Z(0x7a), Z(0x18), Z(0x60), Z(0x3d), + Z(0xcf), Z(0x5b), Z(0xde), Z(0xc6), Z(0xe2), Z(0xe6), Z(0xbb), Z(0x8b), + Z(0x06), Z(0xda), Z(0x08), Z(0x15), Z(0x1b), Z(0x88), Z(0x6a), Z(0x17), + Z(0x89), Z(0xd0), Z(0xa9), Z(0xc1), Z(0xc9), Z(0x70), Z(0x6b), Z(0xe5), + Z(0x43), Z(0xf4), Z(0x68), Z(0xc8), Z(0xd3), Z(0x84), Z(0x28), Z(0x0a), + Z(0x52), Z(0x66), Z(0xa3), Z(0xca), Z(0xf2), Z(0xe3), Z(0x7f), Z(0x7a), + Z(0x31), Z(0xf7), Z(0x88), Z(0x94), Z(0x5e), Z(0x9c), Z(0x63), Z(0xd5), + Z(0x24), Z(0x66), Z(0xfc), Z(0xb3), Z(0x57), Z(0x25), Z(0xbe), Z(0x89), + Z(0x44), Z(0xc4), Z(0xe0), Z(0x8f), Z(0x23), Z(0x3c), Z(0x12), Z(0x52), + Z(0xf5), Z(0x1e), Z(0xf4), Z(0xcb), Z(0x18), Z(0x33), Z(0x1f), Z(0xf8), + Z(0x69), Z(0x10), Z(0x9d), Z(0xd3), Z(0xf7), Z(0x28), Z(0xf8), Z(0x30), + Z(0x05), Z(0x5e), Z(0x32), Z(0xc0), Z(0xd5), Z(0x19), Z(0xbd), Z(0x45), + Z(0x8b), Z(0x5b), Z(0xfd), Z(0xbc), Z(0xe2), Z(0x5c), Z(0xa9), Z(0x96), + Z(0xef), Z(0x70), Z(0xcf), Z(0xc2), Z(0x2a), Z(0xb3), Z(0x61), Z(0xad), + Z(0x80), Z(0x48), Z(0x81), Z(0xb7), Z(0x1d), Z(0x43), Z(0xd9), Z(0xd7), + Z(0x45), Z(0xf0), Z(0xd8), Z(0x8a), Z(0x59), Z(0x7c), Z(0x57), Z(0xc1), + Z(0x79), Z(0xc7), Z(0x34), Z(0xd6), Z(0x43), Z(0xdf), Z(0xe4), Z(0x78), + Z(0x16), Z(0x06), Z(0xda), Z(0x92), Z(0x76), Z(0x51), Z(0xe1), Z(0xd4), + Z(0x70), Z(0x03), Z(0xe0), Z(0x2f), Z(0x96), Z(0x91), Z(0x82), Z(0x80) +}; + +#undef Z +#define Z(x) __constant_be32_to_cpu(x << 11) +static const u32 sbox2[256] = { + Z(0xf0), Z(0x37), Z(0x24), Z(0x53), Z(0x2a), Z(0x03), Z(0x83), Z(0x86), + Z(0xd1), Z(0xec), Z(0x50), Z(0xf0), Z(0x42), Z(0x78), Z(0x2f), Z(0x6d), + Z(0xbf), Z(0x80), Z(0x87), Z(0x27), Z(0x95), Z(0xe2), Z(0xc5), Z(0x5d), + Z(0xf9), Z(0x6f), Z(0xdb), Z(0xb4), Z(0x65), Z(0x6e), Z(0xe7), Z(0x24), + Z(0xc8), Z(0x1a), Z(0xbb), Z(0x49), Z(0xb5), Z(0x0a), Z(0x7d), Z(0xb9), + Z(0xe8), Z(0xdc), Z(0xb7), Z(0xd9), Z(0x45), Z(0x20), Z(0x1b), Z(0xce), + Z(0x59), Z(0x9d), Z(0x6b), Z(0xbd), Z(0x0e), Z(0x8f), Z(0xa3), Z(0xa9), + Z(0xbc), Z(0x74), Z(0xa6), Z(0xf6), Z(0x7f), Z(0x5f), Z(0xb1), Z(0x68), + Z(0x84), Z(0xbc), Z(0xa9), Z(0xfd), Z(0x55), Z(0x50), Z(0xe9), Z(0xb6), + Z(0x13), Z(0x5e), Z(0x07), Z(0xb8), Z(0x95), Z(0x02), Z(0xc0), Z(0xd0), + Z(0x6a), Z(0x1a), Z(0x85), Z(0xbd), Z(0xb6), Z(0xfd), Z(0xfe), Z(0x17), + Z(0x3f), Z(0x09), Z(0xa3), Z(0x8d), Z(0xfb), Z(0xed), Z(0xda), Z(0x1d), + Z(0x6d), Z(0x1c), Z(0x6c), Z(0x01), Z(0x5a), Z(0xe5), Z(0x71), Z(0x3e), + Z(0x8b), Z(0x6b), Z(0xbe), Z(0x29), Z(0xeb), Z(0x12), Z(0x19), Z(0x34), + Z(0xcd), Z(0xb3), Z(0xbd), Z(0x35), Z(0xea), Z(0x4b), Z(0xd5), Z(0xae), + Z(0x2a), Z(0x79), Z(0x5a), Z(0xa5), Z(0x32), Z(0x12), Z(0x7b), Z(0xdc), + Z(0x2c), Z(0xd0), Z(0x22), Z(0x4b), Z(0xb1), Z(0x85), Z(0x59), Z(0x80), + Z(0xc0), Z(0x30), Z(0x9f), Z(0x73), Z(0xd3), Z(0x14), Z(0x48), Z(0x40), + Z(0x07), Z(0x2d), Z(0x8f), Z(0x80), Z(0x0f), Z(0xce), Z(0x0b), Z(0x5e), + Z(0xb7), Z(0x5e), Z(0xac), Z(0x24), Z(0x94), Z(0x4a), Z(0x18), Z(0x15), + Z(0x05), Z(0xe8), Z(0x02), Z(0x77), Z(0xa9), Z(0xc7), Z(0x40), Z(0x45), + Z(0x89), Z(0xd1), Z(0xea), Z(0xde), Z(0x0c), Z(0x79), Z(0x2a), Z(0x99), + Z(0x6c), Z(0x3e), Z(0x95), Z(0xdd), Z(0x8c), Z(0x7d), Z(0xad), Z(0x6f), + Z(0xdc), Z(0xff), Z(0xfd), Z(0x62), Z(0x47), Z(0xb3), Z(0x21), Z(0x8a), + Z(0xec), Z(0x8e), Z(0x19), Z(0x18), Z(0xb4), Z(0x6e), Z(0x3d), Z(0xfd), + Z(0x74), Z(0x54), Z(0x1e), Z(0x04), Z(0x85), Z(0xd8), Z(0xbc), Z(0x1f), + Z(0x56), Z(0xe7), Z(0x3a), Z(0x56), Z(0x67), Z(0xd6), Z(0xc8), Z(0xa5), + Z(0xf3), Z(0x8e), Z(0xde), Z(0xae), Z(0x37), Z(0x49), Z(0xb7), Z(0xfa), + Z(0xc8), Z(0xf4), Z(0x1f), Z(0xe0), Z(0x2a), Z(0x9b), Z(0x15), Z(0xd1), + Z(0x34), Z(0x0e), Z(0xb5), Z(0xe0), Z(0x44), Z(0x78), Z(0x84), Z(0x59), + Z(0x56), Z(0x68), Z(0x77), Z(0xa5), Z(0x14), Z(0x06), Z(0xf5), Z(0x2f), + Z(0x8c), Z(0x8a), Z(0x73), Z(0x80), Z(0x76), Z(0xb4), Z(0x10), Z(0x86) +}; + +#undef Z +#define Z(x) __constant_be32_to_cpu(x << 19) +static const u32 sbox3[256] = { + Z(0xa9), Z(0x2a), Z(0x48), Z(0x51), Z(0x84), Z(0x7e), Z(0x49), Z(0xe2), + Z(0xb5), Z(0xb7), Z(0x42), Z(0x33), Z(0x7d), Z(0x5d), Z(0xa6), Z(0x12), + Z(0x44), Z(0x48), Z(0x6d), Z(0x28), Z(0xaa), Z(0x20), Z(0x6d), Z(0x57), + Z(0xd6), Z(0x6b), Z(0x5d), Z(0x72), Z(0xf0), Z(0x92), Z(0x5a), Z(0x1b), + Z(0x53), Z(0x80), Z(0x24), Z(0x70), Z(0x9a), Z(0xcc), Z(0xa7), Z(0x66), + Z(0xa1), Z(0x01), Z(0xa5), Z(0x41), Z(0x97), Z(0x41), Z(0x31), Z(0x82), + Z(0xf1), Z(0x14), Z(0xcf), Z(0x53), Z(0x0d), Z(0xa0), Z(0x10), Z(0xcc), + Z(0x2a), Z(0x7d), Z(0xd2), Z(0xbf), Z(0x4b), Z(0x1a), Z(0xdb), Z(0x16), + Z(0x47), Z(0xf6), Z(0x51), Z(0x36), Z(0xed), Z(0xf3), Z(0xb9), Z(0x1a), + Z(0xa7), Z(0xdf), Z(0x29), Z(0x43), Z(0x01), Z(0x54), Z(0x70), Z(0xa4), + Z(0xbf), Z(0xd4), Z(0x0b), Z(0x53), Z(0x44), Z(0x60), Z(0x9e), Z(0x23), + Z(0xa1), Z(0x18), Z(0x68), Z(0x4f), Z(0xf0), Z(0x2f), Z(0x82), Z(0xc2), + Z(0x2a), Z(0x41), Z(0xb2), Z(0x42), Z(0x0c), Z(0xed), Z(0x0c), Z(0x1d), + Z(0x13), Z(0x3a), Z(0x3c), Z(0x6e), Z(0x35), Z(0xdc), Z(0x60), Z(0x65), + Z(0x85), Z(0xe9), Z(0x64), Z(0x02), Z(0x9a), Z(0x3f), Z(0x9f), Z(0x87), + Z(0x96), Z(0xdf), Z(0xbe), Z(0xf2), Z(0xcb), Z(0xe5), Z(0x6c), Z(0xd4), + Z(0x5a), Z(0x83), Z(0xbf), Z(0x92), Z(0x1b), Z(0x94), Z(0x00), Z(0x42), + Z(0xcf), Z(0x4b), Z(0x00), Z(0x75), Z(0xba), Z(0x8f), Z(0x76), Z(0x5f), + Z(0x5d), Z(0x3a), Z(0x4d), Z(0x09), Z(0x12), Z(0x08), Z(0x38), Z(0x95), + Z(0x17), Z(0xe4), Z(0x01), Z(0x1d), Z(0x4c), Z(0xa9), Z(0xcc), Z(0x85), + Z(0x82), Z(0x4c), Z(0x9d), Z(0x2f), Z(0x3b), Z(0x66), Z(0xa1), Z(0x34), + Z(0x10), Z(0xcd), Z(0x59), Z(0x89), Z(0xa5), Z(0x31), Z(0xcf), Z(0x05), + Z(0xc8), Z(0x84), Z(0xfa), Z(0xc7), Z(0xba), Z(0x4e), Z(0x8b), Z(0x1a), + Z(0x19), Z(0xf1), Z(0xa1), Z(0x3b), Z(0x18), Z(0x12), Z(0x17), Z(0xb0), + Z(0x98), Z(0x8d), Z(0x0b), Z(0x23), Z(0xc3), Z(0x3a), Z(0x2d), Z(0x20), + Z(0xdf), Z(0x13), Z(0xa0), Z(0xa8), Z(0x4c), Z(0x0d), Z(0x6c), Z(0x2f), + Z(0x47), Z(0x13), Z(0x13), Z(0x52), Z(0x1f), Z(0x2d), Z(0xf5), Z(0x79), + Z(0x3d), Z(0xa2), Z(0x54), Z(0xbd), Z(0x69), Z(0xc8), Z(0x6b), Z(0xf3), + Z(0x05), Z(0x28), Z(0xf1), Z(0x16), Z(0x46), Z(0x40), Z(0xb0), Z(0x11), + Z(0xd3), Z(0xb7), Z(0x95), Z(0x49), Z(0xcf), Z(0xc3), Z(0x1d), Z(0x8f), + Z(0xd8), Z(0xe1), Z(0x73), Z(0xdb), Z(0xad), Z(0xc8), Z(0xc9), Z(0xa9), + Z(0xa1), Z(0xc2), Z(0xc5), Z(0xe3), Z(0xba), Z(0xfc), Z(0x0e), Z(0x25) +}; + +/* + * This is a 16 round Feistel network with permutation F_ENCRYPT + */ +#define F_ENCRYPT(R, L, sched) \ +do { \ + union lc4 { u32 l; u8 c[4]; } u; \ + u.l = sched ^ R; \ + L ^= sbox0[u.c[0]] ^ sbox1[u.c[1]] ^ sbox2[u.c[2]] ^ sbox3[u.c[3]]; \ +} while(0) + +/* + * encryptor + */ +static void fcrypt_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ + const struct fcrypt_ctx *ctx = crypto_tfm_ctx(tfm); + struct { + u32 l, r; + } X; + + memcpy(&X, src, sizeof(X)); + + F_ENCRYPT(X.r, X.l, ctx->sched[0x0]); + F_ENCRYPT(X.l, X.r, ctx->sched[0x1]); + F_ENCRYPT(X.r, X.l, ctx->sched[0x2]); + F_ENCRYPT(X.l, X.r, ctx->sched[0x3]); + F_ENCRYPT(X.r, X.l, ctx->sched[0x4]); + F_ENCRYPT(X.l, X.r, ctx->sched[0x5]); + F_ENCRYPT(X.r, X.l, ctx->sched[0x6]); + F_ENCRYPT(X.l, X.r, ctx->sched[0x7]); + F_ENCRYPT(X.r, X.l, ctx->sched[0x8]); + F_ENCRYPT(X.l, X.r, ctx->sched[0x9]); + F_ENCRYPT(X.r, X.l, ctx->sched[0xa]); + F_ENCRYPT(X.l, X.r, ctx->sched[0xb]); + F_ENCRYPT(X.r, X.l, ctx->sched[0xc]); + F_ENCRYPT(X.l, X.r, ctx->sched[0xd]); + F_ENCRYPT(X.r, X.l, ctx->sched[0xe]); + F_ENCRYPT(X.l, X.r, ctx->sched[0xf]); + + memcpy(dst, &X, sizeof(X)); +} + +/* + * decryptor + */ +static void fcrypt_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) +{ + const struct fcrypt_ctx *ctx = crypto_tfm_ctx(tfm); + struct { + u32 l, r; + } X; + + memcpy(&X, src, sizeof(X)); + + F_ENCRYPT(X.l, X.r, ctx->sched[0xf]); + F_ENCRYPT(X.r, X.l, ctx->sched[0xe]); + F_ENCRYPT(X.l, X.r, ctx->sched[0xd]); + F_ENCRYPT(X.r, X.l, ctx->sched[0xc]); + F_ENCRYPT(X.l, X.r, ctx->sched[0xb]); + F_ENCRYPT(X.r, X.l, ctx->sched[0xa]); + F_ENCRYPT(X.l, X.r, ctx->sched[0x9]); + F_ENCRYPT(X.r, X.l, ctx->sched[0x8]); + F_ENCRYPT(X.l, X.r, ctx->sched[0x7]); + F_ENCRYPT(X.r, X.l, ctx->sched[0x6]); + F_ENCRYPT(X.l, X.r, ctx->sched[0x5]); + F_ENCRYPT(X.r, X.l, ctx->sched[0x4]); + F_ENCRYPT(X.l, X.r, ctx->sched[0x3]); + F_ENCRYPT(X.r, X.l, ctx->sched[0x2]); + F_ENCRYPT(X.l, X.r, ctx->sched[0x1]); + F_ENCRYPT(X.r, X.l, ctx->sched[0x0]); + + memcpy(dst, &X, sizeof(X)); +} + +/* + * Generate a key schedule from key, the least significant bit in each key byte + * is parity and shall be ignored. This leaves 56 significant bits in the key + * to scatter over the 16 key schedules. For each schedule extract the low + * order 32 bits and use as schedule, then rotate right by 11 bits. + */ +static int fcrypt_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) +{ + struct fcrypt_ctx *ctx = crypto_tfm_ctx(tfm); + +#if BITS_PER_LONG == 64 /* the 64-bit version can also be used for 32-bit + * kernels - it seems to be faster but the code is + * larger */ + + u64 k; /* k holds all 56 non-parity bits */ + + /* discard the parity bits */ + k = (*key++) >> 1; + k <<= 7; + k |= (*key++) >> 1; + k <<= 7; + k |= (*key++) >> 1; + k <<= 7; + k |= (*key++) >> 1; + k <<= 7; + k |= (*key++) >> 1; + k <<= 7; + k |= (*key++) >> 1; + k <<= 7; + k |= (*key++) >> 1; + k <<= 7; + k |= (*key) >> 1; + + /* Use lower 32 bits for schedule, rotate by 11 each round (16 times) */ + ctx->sched[0x0] = be32_to_cpu(k); ror56_64(k, 11); + ctx->sched[0x1] = be32_to_cpu(k); ror56_64(k, 11); + ctx->sched[0x2] = be32_to_cpu(k); ror56_64(k, 11); + ctx->sched[0x3] = be32_to_cpu(k); ror56_64(k, 11); + ctx->sched[0x4] = be32_to_cpu(k); ror56_64(k, 11); + ctx->sched[0x5] = be32_to_cpu(k); ror56_64(k, 11); + ctx->sched[0x6] = be32_to_cpu(k); ror56_64(k, 11); + ctx->sched[0x7] = be32_to_cpu(k); ror56_64(k, 11); + ctx->sched[0x8] = be32_to_cpu(k); ror56_64(k, 11); + ctx->sched[0x9] = be32_to_cpu(k); ror56_64(k, 11); + ctx->sched[0xa] = be32_to_cpu(k); ror56_64(k, 11); + ctx->sched[0xb] = be32_to_cpu(k); ror56_64(k, 11); + ctx->sched[0xc] = be32_to_cpu(k); ror56_64(k, 11); + ctx->sched[0xd] = be32_to_cpu(k); ror56_64(k, 11); + ctx->sched[0xe] = be32_to_cpu(k); ror56_64(k, 11); + ctx->sched[0xf] = be32_to_cpu(k); + + return 0; +#else + u32 hi, lo; /* hi is upper 24 bits and lo lower 32, total 56 */ + + /* discard the parity bits */ + lo = (*key++) >> 1; + lo <<= 7; + lo |= (*key++) >> 1; + lo <<= 7; + lo |= (*key++) >> 1; + lo <<= 7; + lo |= (*key++) >> 1; + hi = lo >> 4; + lo &= 0xf; + lo <<= 7; + lo |= (*key++) >> 1; + lo <<= 7; + lo |= (*key++) >> 1; + lo <<= 7; + lo |= (*key++) >> 1; + lo <<= 7; + lo |= (*key) >> 1; + + /* Use lower 32 bits for schedule, rotate by 11 each round (16 times) */ + ctx->sched[0x0] = be32_to_cpu(lo); ror56(hi, lo, 11); + ctx->sched[0x1] = be32_to_cpu(lo); ror56(hi, lo, 11); + ctx->sched[0x2] = be32_to_cpu(lo); ror56(hi, lo, 11); + ctx->sched[0x3] = be32_to_cpu(lo); ror56(hi, lo, 11); + ctx->sched[0x4] = be32_to_cpu(lo); ror56(hi, lo, 11); + ctx->sched[0x5] = be32_to_cpu(lo); ror56(hi, lo, 11); + ctx->sched[0x6] = be32_to_cpu(lo); ror56(hi, lo, 11); + ctx->sched[0x7] = be32_to_cpu(lo); ror56(hi, lo, 11); + ctx->sched[0x8] = be32_to_cpu(lo); ror56(hi, lo, 11); + ctx->sched[0x9] = be32_to_cpu(lo); ror56(hi, lo, 11); + ctx->sched[0xa] = be32_to_cpu(lo); ror56(hi, lo, 11); + ctx->sched[0xb] = be32_to_cpu(lo); ror56(hi, lo, 11); + ctx->sched[0xc] = be32_to_cpu(lo); ror56(hi, lo, 11); + ctx->sched[0xd] = be32_to_cpu(lo); ror56(hi, lo, 11); + ctx->sched[0xe] = be32_to_cpu(lo); ror56(hi, lo, 11); + ctx->sched[0xf] = be32_to_cpu(lo); + return 0; +#endif +} + +static struct crypto_alg fcrypt_alg = { + .cra_name = "fcrypt", + .cra_flags = CRYPTO_ALG_TYPE_CIPHER, + .cra_blocksize = 8, + .cra_ctxsize = sizeof(struct fcrypt_ctx), + .cra_module = THIS_MODULE, + .cra_alignmask = 3, + .cra_list = LIST_HEAD_INIT(fcrypt_alg.cra_list), + .cra_u = { .cipher = { + .cia_min_keysize = 8, + .cia_max_keysize = 8, + .cia_setkey = fcrypt_setkey, + .cia_encrypt = fcrypt_encrypt, + .cia_decrypt = fcrypt_decrypt } } +}; + +static int __init init(void) +{ + return crypto_register_alg(&fcrypt_alg); +} + +static void __exit fini(void) +{ + crypto_unregister_alg(&fcrypt_alg); +} + +module_init(init); +module_exit(fini); + +MODULE_LICENSE("Dual BSD/GPL"); +MODULE_DESCRIPTION("FCrypt Cipher Algorithm"); +MODULE_AUTHOR("David Howells "); diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c index ff489423b2cf..57882c2c5f71 100644 --- a/crypto/tcrypt.c +++ b/crypto/tcrypt.c @@ -72,7 +72,8 @@ static char *check[] = { "des", "md5", "des3_ede", "rot13", "sha1", "sha256", "blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes", "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea", - "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", NULL + "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt", + NULL }; static void hexdump(unsigned char *buf, unsigned int len) @@ -965,6 +966,12 @@ static void do_test(void) test_cipher("ecb(xeta)", DECRYPT, xeta_dec_tv_template, XETA_DEC_TEST_VECTORS); + //FCrypt + test_cipher("pcbc(fcrypt)", ENCRYPT, fcrypt_pcbc_enc_tv_template, + FCRYPT_ENC_TEST_VECTORS); + test_cipher("pcbc(fcrypt)", DECRYPT, fcrypt_pcbc_dec_tv_template, + FCRYPT_DEC_TEST_VECTORS); + test_hash("sha384", sha384_tv_template, SHA384_TEST_VECTORS); test_hash("sha512", sha512_tv_template, SHA512_TEST_VECTORS); test_hash("wp512", wp512_tv_template, WP512_TEST_VECTORS); @@ -1182,6 +1189,13 @@ static void do_test(void) XETA_DEC_TEST_VECTORS); break; + case 31: + test_cipher("pcbc(fcrypt)", ENCRYPT, fcrypt_pcbc_enc_tv_template, + FCRYPT_ENC_TEST_VECTORS); + test_cipher("pcbc(fcrypt)", DECRYPT, fcrypt_pcbc_dec_tv_template, + FCRYPT_DEC_TEST_VECTORS); + break; + case 100: test_hash("hmac(md5)", hmac_md5_tv_template, HMAC_MD5_TEST_VECTORS); diff --git a/crypto/tcrypt.h b/crypto/tcrypt.h index 6c7b7a1e73f4..ec77814d104e 100644 --- a/crypto/tcrypt.h +++ b/crypto/tcrypt.h @@ -3559,6 +3559,134 @@ static struct cipher_testvec xeta_dec_tv_template[] = { } }; +/* + * FCrypt test vectors + */ +#define FCRYPT_ENC_TEST_VECTORS ARRAY_SIZE(fcrypt_pcbc_enc_tv_template) +#define FCRYPT_DEC_TEST_VECTORS ARRAY_SIZE(fcrypt_pcbc_dec_tv_template) + +static struct cipher_testvec fcrypt_pcbc_enc_tv_template[] = { + { /* http://www.openafs.org/pipermail/openafs-devel/2000-December/005320.html */ + .key = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, + .klen = 8, + .iv = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, + .input = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, + .ilen = 8, + .result = { 0x0E, 0x09, 0x00, 0xC7, 0x3E, 0xF7, 0xED, 0x41 }, + .rlen = 8, + }, { + .key = { 0x11, 0x44, 0x77, 0xAA, 0xDD, 0x00, 0x33, 0x66 }, + .klen = 8, + .iv = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, + .input = { 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xF0 }, + .ilen = 8, + .result = { 0xD8, 0xED, 0x78, 0x74, 0x77, 0xEC, 0x06, 0x80 }, + .rlen = 8, + }, { /* From Arla */ + .key = { 0xf0, 0xe1, 0xd2, 0xc3, 0xb4, 0xa5, 0x96, 0x87 }, + .klen = 8, + .iv = { 0xfe, 0xdc, 0xba, 0x98, 0x76, 0x54, 0x32, 0x10 }, + .input = "The quick brown fox jumps over the lazy dogs.\0\0", + .ilen = 48, + .result = { 0x00, 0xf0, 0xe, 0x11, 0x75, 0xe6, 0x23, 0x82, + 0xee, 0xac, 0x98, 0x62, 0x44, 0x51, 0xe4, 0x84, + 0xc3, 0x59, 0xd8, 0xaa, 0x64, 0x60, 0xae, 0xf7, + 0xd2, 0xd9, 0x13, 0x79, 0x72, 0xa3, 0x45, 0x03, + 0x23, 0xb5, 0x62, 0xd7, 0x0c, 0xf5, 0x27, 0xd1, + 0xf8, 0x91, 0x3c, 0xac, 0x44, 0x22, 0x92, 0xef }, + .rlen = 48, + }, { + .key = { 0xfe, 0xdc, 0xba, 0x98, 0x76, 0x54, 0x32, 0x10 }, + .klen = 8, + .iv = { 0xf0, 0xe1, 0xd2, 0xc3, 0xb4, 0xa5, 0x96, 0x87 }, + .input = "The quick brown fox jumps over the lazy dogs.\0\0", + .ilen = 48, + .result = { 0xca, 0x90, 0xf5, 0x9d, 0xcb, 0xd4, 0xd2, 0x3c, + 0x01, 0x88, 0x7f, 0x3e, 0x31, 0x6e, 0x62, 0x9d, + 0xd8, 0xe0, 0x57, 0xa3, 0x06, 0x3a, 0x42, 0x58, + 0x2a, 0x28, 0xfe, 0x72, 0x52, 0x2f, 0xdd, 0xe0, + 0x19, 0x89, 0x09, 0x1c, 0x2a, 0x8e, 0x8c, 0x94, + 0xfc, 0xc7, 0x68, 0xe4, 0x88, 0xaa, 0xde, 0x0f }, + .rlen = 48, + }, { /* split-page version */ + .key = { 0xfe, 0xdc, 0xba, 0x98, 0x76, 0x54, 0x32, 0x10 }, + .klen = 8, + .iv = { 0xf0, 0xe1, 0xd2, 0xc3, 0xb4, 0xa5, 0x96, 0x87 }, + .input = "The quick brown fox jumps over the lazy dogs.\0\0", + .ilen = 48, + .result = { 0xca, 0x90, 0xf5, 0x9d, 0xcb, 0xd4, 0xd2, 0x3c, + 0x01, 0x88, 0x7f, 0x3e, 0x31, 0x6e, 0x62, 0x9d, + 0xd8, 0xe0, 0x57, 0xa3, 0x06, 0x3a, 0x42, 0x58, + 0x2a, 0x28, 0xfe, 0x72, 0x52, 0x2f, 0xdd, 0xe0, + 0x19, 0x89, 0x09, 0x1c, 0x2a, 0x8e, 0x8c, 0x94, + 0xfc, 0xc7, 0x68, 0xe4, 0x88, 0xaa, 0xde, 0x0f }, + .rlen = 48, + .np = 2, + .tap = { 20, 28 }, + } +}; + +static struct cipher_testvec fcrypt_pcbc_dec_tv_template[] = { + { /* http://www.openafs.org/pipermail/openafs-devel/2000-December/005320.html */ + .key = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, + .klen = 8, + .iv = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, + .input = { 0x0E, 0x09, 0x00, 0xC7, 0x3E, 0xF7, 0xED, 0x41 }, + .ilen = 8, + .result = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, + .rlen = 8, + }, { + .key = { 0x11, 0x44, 0x77, 0xAA, 0xDD, 0x00, 0x33, 0x66 }, + .klen = 8, + .iv = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, + .input = { 0xD8, 0xED, 0x78, 0x74, 0x77, 0xEC, 0x06, 0x80 }, + .ilen = 8, + .result = { 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xF0 }, + .rlen = 8, + }, { /* From Arla */ + .key = { 0xf0, 0xe1, 0xd2, 0xc3, 0xb4, 0xa5, 0x96, 0x87 }, + .klen = 8, + .iv = { 0xfe, 0xdc, 0xba, 0x98, 0x76, 0x54, 0x32, 0x10 }, + .input = { 0x00, 0xf0, 0xe, 0x11, 0x75, 0xe6, 0x23, 0x82, + 0xee, 0xac, 0x98, 0x62, 0x44, 0x51, 0xe4, 0x84, + 0xc3, 0x59, 0xd8, 0xaa, 0x64, 0x60, 0xae, 0xf7, + 0xd2, 0xd9, 0x13, 0x79, 0x72, 0xa3, 0x45, 0x03, + 0x23, 0xb5, 0x62, 0xd7, 0x0c, 0xf5, 0x27, 0xd1, + 0xf8, 0x91, 0x3c, 0xac, 0x44, 0x22, 0x92, 0xef }, + .ilen = 48, + .result = "The quick brown fox jumps over the lazy dogs.\0\0", + .rlen = 48, + }, { + .key = { 0xfe, 0xdc, 0xba, 0x98, 0x76, 0x54, 0x32, 0x10 }, + .klen = 8, + .iv = { 0xf0, 0xe1, 0xd2, 0xc3, 0xb4, 0xa5, 0x96, 0x87 }, + .input = { 0xca, 0x90, 0xf5, 0x9d, 0xcb, 0xd4, 0xd2, 0x3c, + 0x01, 0x88, 0x7f, 0x3e, 0x31, 0x6e, 0x62, 0x9d, + 0xd8, 0xe0, 0x57, 0xa3, 0x06, 0x3a, 0x42, 0x58, + 0x2a, 0x28, 0xfe, 0x72, 0x52, 0x2f, 0xdd, 0xe0, + 0x19, 0x89, 0x09, 0x1c, 0x2a, 0x8e, 0x8c, 0x94, + 0xfc, 0xc7, 0x68, 0xe4, 0x88, 0xaa, 0xde, 0x0f }, + .ilen = 48, + .result = "The quick brown fox jumps over the lazy dogs.\0\0", + .rlen = 48, + }, { /* split-page version */ + .key = { 0xfe, 0xdc, 0xba, 0x98, 0x76, 0x54, 0x32, 0x10 }, + .klen = 8, + .iv = { 0xf0, 0xe1, 0xd2, 0xc3, 0xb4, 0xa5, 0x96, 0x87 }, + .input = { 0xca, 0x90, 0xf5, 0x9d, 0xcb, 0xd4, 0xd2, 0x3c, + 0x01, 0x88, 0x7f, 0x3e, 0x31, 0x6e, 0x62, 0x9d, + 0xd8, 0xe0, 0x57, 0xa3, 0x06, 0x3a, 0x42, 0x58, + 0x2a, 0x28, 0xfe, 0x72, 0x52, 0x2f, 0xdd, 0xe0, + 0x19, 0x89, 0x09, 0x1c, 0x2a, 0x8e, 0x8c, 0x94, + 0xfc, 0xc7, 0x68, 0xe4, 0x88, 0xaa, 0xde, 0x0f }, + .ilen = 48, + .result = "The quick brown fox jumps over the lazy dogs.\0\0", + .rlen = 48, + .np = 2, + .tap = { 20, 28 }, + } +}; + /* * Compression stuff. */ -- cgit v1.2.3 From ba8da2a9485f22455dcb06dd17e2f6d94b81ba89 Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Sun, 17 Dec 2006 08:57:38 +1100 Subject: [CRYPTO] tcrypt: Removed vestigial crypto_alloc_tfm call The crypto_comp conversion missed the last remaining crypto_alloc_tfm call. This patch replaces it with crypto_alloc_comp. Signed-off-by: Herbert Xu --- crypto/tcrypt.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'crypto') diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c index 57882c2c5f71..8c8e2f95dc7e 100644 --- a/crypto/tcrypt.c +++ b/crypto/tcrypt.c @@ -767,7 +767,7 @@ static void test_deflate(void) memcpy(tvmem, deflate_comp_tv_template, tsize); tv = (void *)tvmem; - tfm = crypto_alloc_tfm("deflate", 0); + tfm = crypto_alloc_comp("deflate", 0, CRYPTO_ALG_ASYNC); if (tfm == NULL) { printk("failed to load transform for deflate\n"); return; -- cgit v1.2.3 From f1ddcaf3393b7a3871809b97fae90fac841a1f39 Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Sat, 27 Jan 2007 10:05:15 +1100 Subject: [CRYPTO] api: Remove deprecated interface This patch removes the old cipher interface and related code. Signed-off-by: Herbert Xu --- crypto/algapi.c | 2 +- crypto/api.c | 63 +------- crypto/cipher.c | 447 +----------------------------------------------------- crypto/compress.c | 5 - crypto/digest.c | 5 - crypto/internal.h | 26 +--- 6 files changed, 16 insertions(+), 532 deletions(-) (limited to 'crypto') diff --git a/crypto/algapi.c b/crypto/algapi.c index c91530021e9c..69eb504721a4 100644 --- a/crypto/algapi.c +++ b/crypto/algapi.c @@ -396,7 +396,7 @@ struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn) return ERR_PTR(-EAGAIN); } - tfm = __crypto_alloc_tfm(alg, 0); + tfm = __crypto_alloc_tfm(alg); if (IS_ERR(tfm)) crypto_mod_put(alg); diff --git a/crypto/api.c b/crypto/api.c index 8c446871cd5b..8b80baec853a 100644 --- a/crypto/api.c +++ b/crypto/api.c @@ -212,25 +212,6 @@ struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask) } EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup); -static int crypto_init_flags(struct crypto_tfm *tfm, u32 flags) -{ - tfm->crt_flags = flags & CRYPTO_TFM_REQ_MASK; - flags &= ~CRYPTO_TFM_REQ_MASK; - - switch (crypto_tfm_alg_type(tfm)) { - case CRYPTO_ALG_TYPE_CIPHER: - return crypto_init_cipher_flags(tfm, flags); - - case CRYPTO_ALG_TYPE_DIGEST: - return crypto_init_digest_flags(tfm, flags); - - case CRYPTO_ALG_TYPE_COMPRESS: - return crypto_init_compress_flags(tfm, flags); - } - - return 0; -} - static int crypto_init_ops(struct crypto_tfm *tfm) { const struct crypto_type *type = tfm->__crt_alg->cra_type; @@ -285,7 +266,7 @@ static void crypto_exit_ops(struct crypto_tfm *tfm) } } -static unsigned int crypto_ctxsize(struct crypto_alg *alg, int flags) +static unsigned int crypto_ctxsize(struct crypto_alg *alg) { const struct crypto_type *type = alg->cra_type; unsigned int len; @@ -299,15 +280,15 @@ static unsigned int crypto_ctxsize(struct crypto_alg *alg, int flags) BUG(); case CRYPTO_ALG_TYPE_CIPHER: - len += crypto_cipher_ctxsize(alg, flags); + len += crypto_cipher_ctxsize(alg); break; case CRYPTO_ALG_TYPE_DIGEST: - len += crypto_digest_ctxsize(alg, flags); + len += crypto_digest_ctxsize(alg); break; case CRYPTO_ALG_TYPE_COMPRESS: - len += crypto_compress_ctxsize(alg, flags); + len += crypto_compress_ctxsize(alg); break; } @@ -322,23 +303,19 @@ void crypto_shoot_alg(struct crypto_alg *alg) } EXPORT_SYMBOL_GPL(crypto_shoot_alg); -struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 flags) +struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg) { struct crypto_tfm *tfm = NULL; unsigned int tfm_size; int err = -ENOMEM; - tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, flags); + tfm_size = sizeof(*tfm) + crypto_ctxsize(alg); tfm = kzalloc(tfm_size, GFP_KERNEL); if (tfm == NULL) goto out_err; tfm->__crt_alg = alg; - err = crypto_init_flags(tfm, flags); - if (err) - goto out_free_tfm; - err = crypto_init_ops(tfm); if (err) goto out_free_tfm; @@ -362,31 +339,6 @@ out: } EXPORT_SYMBOL_GPL(__crypto_alloc_tfm); -struct crypto_tfm *crypto_alloc_tfm(const char *name, u32 flags) -{ - struct crypto_tfm *tfm = NULL; - int err; - - do { - struct crypto_alg *alg; - - alg = crypto_alg_mod_lookup(name, 0, CRYPTO_ALG_ASYNC); - err = PTR_ERR(alg); - if (IS_ERR(alg)) - continue; - - tfm = __crypto_alloc_tfm(alg, flags); - err = 0; - if (IS_ERR(tfm)) { - crypto_mod_put(alg); - err = PTR_ERR(tfm); - tfm = NULL; - } - } while (err == -EAGAIN && !signal_pending(current)); - - return tfm; -} - /* * crypto_alloc_base - Locate algorithm and allocate transform * @alg_name: Name of algorithm @@ -420,7 +372,7 @@ struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask) goto err; } - tfm = __crypto_alloc_tfm(alg, 0); + tfm = __crypto_alloc_tfm(alg); if (!IS_ERR(tfm)) return tfm; @@ -466,7 +418,6 @@ void crypto_free_tfm(struct crypto_tfm *tfm) kfree(tfm); } -EXPORT_SYMBOL_GPL(crypto_alloc_tfm); EXPORT_SYMBOL_GPL(crypto_free_tfm); int crypto_has_alg(const char *name, u32 type, u32 mask) diff --git a/crypto/cipher.c b/crypto/cipher.c index 9e03701cfdcc..333aab2f0277 100644 --- a/crypto/cipher.c +++ b/crypto/cipher.c @@ -12,274 +12,13 @@ * any later version. * */ -#include + #include #include #include -#include -#include +#include #include -#include #include "internal.h" -#include "scatterwalk.h" - -struct cipher_alg_compat { - unsigned int cia_min_keysize; - unsigned int cia_max_keysize; - int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key, - unsigned int keylen); - void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); - void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); - - unsigned int (*cia_encrypt_ecb)(const struct cipher_desc *desc, - u8 *dst, const u8 *src, - unsigned int nbytes); - unsigned int (*cia_decrypt_ecb)(const struct cipher_desc *desc, - u8 *dst, const u8 *src, - unsigned int nbytes); - unsigned int (*cia_encrypt_cbc)(const struct cipher_desc *desc, - u8 *dst, const u8 *src, - unsigned int nbytes); - unsigned int (*cia_decrypt_cbc)(const struct cipher_desc *desc, - u8 *dst, const u8 *src, - unsigned int nbytes); -}; - -static inline void xor_64(u8 *a, const u8 *b) -{ - ((u32 *)a)[0] ^= ((u32 *)b)[0]; - ((u32 *)a)[1] ^= ((u32 *)b)[1]; -} - -static inline void xor_128(u8 *a, const u8 *b) -{ - ((u32 *)a)[0] ^= ((u32 *)b)[0]; - ((u32 *)a)[1] ^= ((u32 *)b)[1]; - ((u32 *)a)[2] ^= ((u32 *)b)[2]; - ((u32 *)a)[3] ^= ((u32 *)b)[3]; -} - -static unsigned int crypt_slow(const struct cipher_desc *desc, - struct scatter_walk *in, - struct scatter_walk *out, unsigned int bsize) -{ - unsigned long alignmask = crypto_tfm_alg_alignmask(desc->tfm); - u8 buffer[bsize * 2 + alignmask]; - u8 *src = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); - u8 *dst = src + bsize; - - scatterwalk_copychunks(src, in, bsize, 0); - desc->prfn(desc, dst, src, bsize); - scatterwalk_copychunks(dst, out, bsize, 1); - - return bsize; -} - -static inline unsigned int crypt_fast(const struct cipher_desc *desc, - struct scatter_walk *in, - struct scatter_walk *out, - unsigned int nbytes, u8 *tmp) -{ - u8 *src, *dst; - u8 *real_src, *real_dst; - - real_src = scatterwalk_map(in, 0); - real_dst = scatterwalk_map(out, 1); - - src = real_src; - dst = scatterwalk_samebuf(in, out) ? src : real_dst; - - if (tmp) { - memcpy(tmp, src, nbytes); - src = tmp; - dst = tmp; - } - - nbytes = desc->prfn(desc, dst, src, nbytes); - - if (tmp) - memcpy(real_dst, tmp, nbytes); - - scatterwalk_unmap(real_src, 0); - scatterwalk_unmap(real_dst, 1); - - scatterwalk_advance(in, nbytes); - scatterwalk_advance(out, nbytes); - - return nbytes; -} - -/* - * Generic encrypt/decrypt wrapper for ciphers, handles operations across - * multiple page boundaries by using temporary blocks. In user context, - * the kernel is given a chance to schedule us once per page. - */ -static int crypt(const struct cipher_desc *desc, - struct scatterlist *dst, - struct scatterlist *src, - unsigned int nbytes) -{ - struct scatter_walk walk_in, walk_out; - struct crypto_tfm *tfm = desc->tfm; - const unsigned int bsize = crypto_tfm_alg_blocksize(tfm); - unsigned int alignmask = crypto_tfm_alg_alignmask(tfm); - unsigned long buffer = 0; - - if (!nbytes) - return 0; - - if (nbytes % bsize) { - tfm->crt_flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN; - return -EINVAL; - } - - scatterwalk_start(&walk_in, src); - scatterwalk_start(&walk_out, dst); - - for(;;) { - unsigned int n = nbytes; - u8 *tmp = NULL; - - if (!scatterwalk_aligned(&walk_in, alignmask) || - !scatterwalk_aligned(&walk_out, alignmask)) { - if (!buffer) { - buffer = __get_free_page(GFP_ATOMIC); - if (!buffer) - n = 0; - } - tmp = (u8 *)buffer; - } - - n = scatterwalk_clamp(&walk_in, n); - n = scatterwalk_clamp(&walk_out, n); - - if (likely(n >= bsize)) - n = crypt_fast(desc, &walk_in, &walk_out, n, tmp); - else - n = crypt_slow(desc, &walk_in, &walk_out, bsize); - - nbytes -= n; - - scatterwalk_done(&walk_in, 0, nbytes); - scatterwalk_done(&walk_out, 1, nbytes); - - if (!nbytes) - break; - - crypto_yield(tfm->crt_flags); - } - - if (buffer) - free_page(buffer); - - return 0; -} - -static int crypt_iv_unaligned(struct cipher_desc *desc, - struct scatterlist *dst, - struct scatterlist *src, - unsigned int nbytes) -{ - struct crypto_tfm *tfm = desc->tfm; - unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); - u8 *iv = desc->info; - - if (unlikely(((unsigned long)iv & alignmask))) { - unsigned int ivsize = tfm->crt_cipher.cit_ivsize; - u8 buffer[ivsize + alignmask]; - u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); - int err; - - desc->info = memcpy(tmp, iv, ivsize); - err = crypt(desc, dst, src, nbytes); - memcpy(iv, tmp, ivsize); - - return err; - } - - return crypt(desc, dst, src, nbytes); -} - -static unsigned int cbc_process_encrypt(const struct cipher_desc *desc, - u8 *dst, const u8 *src, - unsigned int nbytes) -{ - struct crypto_tfm *tfm = desc->tfm; - void (*xor)(u8 *, const u8 *) = tfm->crt_u.cipher.cit_xor_block; - int bsize = crypto_tfm_alg_blocksize(tfm); - - void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = desc->crfn; - u8 *iv = desc->info; - unsigned int done = 0; - - nbytes -= bsize; - - do { - xor(iv, src); - fn(tfm, dst, iv); - memcpy(iv, dst, bsize); - - src += bsize; - dst += bsize; - } while ((done += bsize) <= nbytes); - - return done; -} - -static unsigned int cbc_process_decrypt(const struct cipher_desc *desc, - u8 *dst, const u8 *src, - unsigned int nbytes) -{ - struct crypto_tfm *tfm = desc->tfm; - void (*xor)(u8 *, const u8 *) = tfm->crt_u.cipher.cit_xor_block; - int bsize = crypto_tfm_alg_blocksize(tfm); - unsigned long alignmask = crypto_tfm_alg_alignmask(desc->tfm); - - u8 stack[src == dst ? bsize + alignmask : 0]; - u8 *buf = (u8 *)ALIGN((unsigned long)stack, alignmask + 1); - u8 **dst_p = src == dst ? &buf : &dst; - - void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = desc->crfn; - u8 *iv = desc->info; - unsigned int done = 0; - - nbytes -= bsize; - - do { - u8 *tmp_dst = *dst_p; - - fn(tfm, tmp_dst, src); - xor(tmp_dst, iv); - memcpy(iv, src, bsize); - if (tmp_dst != dst) - memcpy(dst, tmp_dst, bsize); - - src += bsize; - dst += bsize; - } while ((done += bsize) <= nbytes); - - return done; -} - -static unsigned int ecb_process(const struct cipher_desc *desc, u8 *dst, - const u8 *src, unsigned int nbytes) -{ - struct crypto_tfm *tfm = desc->tfm; - int bsize = crypto_tfm_alg_blocksize(tfm); - void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = desc->crfn; - unsigned int done = 0; - - nbytes -= bsize; - - do { - fn(tfm, dst, src); - - src += bsize; - dst += bsize; - } while ((done += bsize) <= nbytes); - - return done; -} static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) { @@ -293,122 +32,6 @@ static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) return cia->cia_setkey(tfm, key, keylen); } -static int ecb_encrypt(struct crypto_tfm *tfm, - struct scatterlist *dst, - struct scatterlist *src, unsigned int nbytes) -{ - struct cipher_desc desc; - struct cipher_alg_compat *cipher = (void *)&tfm->__crt_alg->cra_cipher; - - desc.tfm = tfm; - desc.crfn = cipher->cia_encrypt; - desc.prfn = cipher->cia_encrypt_ecb ?: ecb_process; - - return crypt(&desc, dst, src, nbytes); -} - -static int ecb_decrypt(struct crypto_tfm *tfm, - struct scatterlist *dst, - struct scatterlist *src, - unsigned int nbytes) -{ - struct cipher_desc desc; - struct cipher_alg_compat *cipher = (void *)&tfm->__crt_alg->cra_cipher; - - desc.tfm = tfm; - desc.crfn = cipher->cia_decrypt; - desc.prfn = cipher->cia_decrypt_ecb ?: ecb_process; - - return crypt(&desc, dst, src, nbytes); -} - -static int cbc_encrypt(struct crypto_tfm *tfm, - struct scatterlist *dst, - struct scatterlist *src, - unsigned int nbytes) -{ - struct cipher_desc desc; - struct cipher_alg_compat *cipher = (void *)&tfm->__crt_alg->cra_cipher; - - desc.tfm = tfm; - desc.crfn = cipher->cia_encrypt; - desc.prfn = cipher->cia_encrypt_cbc ?: cbc_process_encrypt; - desc.info = tfm->crt_cipher.cit_iv; - - return crypt(&desc, dst, src, nbytes); -} - -static int cbc_encrypt_iv(struct crypto_tfm *tfm, - struct scatterlist *dst, - struct scatterlist *src, - unsigned int nbytes, u8 *iv) -{ - struct cipher_desc desc; - struct cipher_alg_compat *cipher = (void *)&tfm->__crt_alg->cra_cipher; - - desc.tfm = tfm; - desc.crfn = cipher->cia_encrypt; - desc.prfn = cipher->cia_encrypt_cbc ?: cbc_process_encrypt; - desc.info = iv; - - return crypt_iv_unaligned(&desc, dst, src, nbytes); -} - -static int cbc_decrypt(struct crypto_tfm *tfm, - struct scatterlist *dst, - struct scatterlist *src, - unsigned int nbytes) -{ - struct cipher_desc desc; - struct cipher_alg_compat *cipher = (void *)&tfm->__crt_alg->cra_cipher; - - desc.tfm = tfm; - desc.crfn = cipher->cia_decrypt; - desc.prfn = cipher->cia_decrypt_cbc ?: cbc_process_decrypt; - desc.info = tfm->crt_cipher.cit_iv; - - return crypt(&desc, dst, src, nbytes); -} - -static int cbc_decrypt_iv(struct crypto_tfm *tfm, - struct scatterlist *dst, - struct scatterlist *src, - unsigned int nbytes, u8 *iv) -{ - struct cipher_desc desc; - struct cipher_alg_compat *cipher = (void *)&tfm->__crt_alg->cra_cipher; - - desc.tfm = tfm; - desc.crfn = cipher->cia_decrypt; - desc.prfn = cipher->cia_decrypt_cbc ?: cbc_process_decrypt; - desc.info = iv; - - return crypt_iv_unaligned(&desc, dst, src, nbytes); -} - -static int nocrypt(struct crypto_tfm *tfm, - struct scatterlist *dst, - struct scatterlist *src, - unsigned int nbytes) -{ - return -ENOSYS; -} - -static int nocrypt_iv(struct crypto_tfm *tfm, - struct scatterlist *dst, - struct scatterlist *src, - unsigned int nbytes, u8 *iv) -{ - return -ENOSYS; -} - -int crypto_init_cipher_flags(struct crypto_tfm *tfm, u32 flags) -{ - u32 mode = flags & CRYPTO_TFM_MODE_MASK; - tfm->crt_cipher.cit_mode = mode ? mode : CRYPTO_TFM_MODE_ECB; - return 0; -} - static void cipher_crypt_unaligned(void (*fn)(struct crypto_tfm *, u8 *, const u8 *), struct crypto_tfm *tfm, @@ -454,7 +77,6 @@ static void cipher_decrypt_unaligned(struct crypto_tfm *tfm, int crypto_init_cipher_ops(struct crypto_tfm *tfm) { - int ret = 0; struct cipher_tfm *ops = &tfm->crt_cipher; struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; @@ -464,70 +86,7 @@ int crypto_init_cipher_ops(struct crypto_tfm *tfm) ops->cit_decrypt_one = crypto_tfm_alg_alignmask(tfm) ? cipher_decrypt_unaligned : cipher->cia_decrypt; - switch (tfm->crt_cipher.cit_mode) { - case CRYPTO_TFM_MODE_ECB: - ops->cit_encrypt = ecb_encrypt; - ops->cit_decrypt = ecb_decrypt; - ops->cit_encrypt_iv = nocrypt_iv; - ops->cit_decrypt_iv = nocrypt_iv; - break; - - case CRYPTO_TFM_MODE_CBC: - ops->cit_encrypt = cbc_encrypt; - ops->cit_decrypt = cbc_decrypt; - ops->cit_encrypt_iv = cbc_encrypt_iv; - ops->cit_decrypt_iv = cbc_decrypt_iv; - break; - - case CRYPTO_TFM_MODE_CFB: - ops->cit_encrypt = nocrypt; - ops->cit_decrypt = nocrypt; - ops->cit_encrypt_iv = nocrypt_iv; - ops->cit_decrypt_iv = nocrypt_iv; - break; - - case CRYPTO_TFM_MODE_CTR: - ops->cit_encrypt = nocrypt; - ops->cit_decrypt = nocrypt; - ops->cit_encrypt_iv = nocrypt_iv; - ops->cit_decrypt_iv = nocrypt_iv; - break; - - default: - BUG(); - } - - if (ops->cit_mode == CRYPTO_TFM_MODE_CBC) { - unsigned long align; - unsigned long addr; - - switch (crypto_tfm_alg_blocksize(tfm)) { - case 8: - ops->cit_xor_block = xor_64; - break; - - case 16: - ops->cit_xor_block = xor_128; - break; - - default: - printk(KERN_WARNING "%s: block size %u not supported\n", - crypto_tfm_alg_name(tfm), - crypto_tfm_alg_blocksize(tfm)); - ret = -EINVAL; - goto out; - } - - ops->cit_ivsize = crypto_tfm_alg_blocksize(tfm); - align = crypto_tfm_alg_alignmask(tfm) + 1; - addr = (unsigned long)crypto_tfm_ctx(tfm); - addr = ALIGN(addr, align); - addr += ALIGN(tfm->__crt_alg->cra_ctxsize, align); - ops->cit_iv = (void *)addr; - } - -out: - return ret; + return 0; } void crypto_exit_cipher_ops(struct crypto_tfm *tfm) diff --git a/crypto/compress.c b/crypto/compress.c index eca182aa3380..0a6570048c1e 100644 --- a/crypto/compress.c +++ b/crypto/compress.c @@ -34,11 +34,6 @@ static int crypto_decompress(struct crypto_tfm *tfm, dlen); } -int crypto_init_compress_flags(struct crypto_tfm *tfm, u32 flags) -{ - return flags ? -EINVAL : 0; -} - int crypto_init_compress_ops(struct crypto_tfm *tfm) { struct compress_tfm *ops = &tfm->crt_compress; diff --git a/crypto/digest.c b/crypto/digest.c index bc47af648cb1..1bf7414aeb9e 100644 --- a/crypto/digest.c +++ b/crypto/digest.c @@ -136,11 +136,6 @@ static int digest(struct hash_desc *desc, return final(desc, out); } -int crypto_init_digest_flags(struct crypto_tfm *tfm, u32 flags) -{ - return flags ? -EINVAL : 0; -} - int crypto_init_digest_ops(struct crypto_tfm *tfm) { struct hash_tfm *ops = &tfm->crt_hash; diff --git a/crypto/internal.h b/crypto/internal.h index 2da6ad4f3593..784a7745315f 100644 --- a/crypto/internal.h +++ b/crypto/internal.h @@ -83,8 +83,7 @@ static inline void crypto_exit_proc(void) { } #endif -static inline unsigned int crypto_digest_ctxsize(struct crypto_alg *alg, - int flags) +static inline unsigned int crypto_digest_ctxsize(struct crypto_alg *alg) { unsigned int len = alg->cra_ctxsize; @@ -96,23 +95,12 @@ static inline unsigned int crypto_digest_ctxsize(struct crypto_alg *alg, return len; } -static inline unsigned int crypto_cipher_ctxsize(struct crypto_alg *alg, - int flags) +static inline unsigned int crypto_cipher_ctxsize(struct crypto_alg *alg) { - unsigned int len = alg->cra_ctxsize; - - switch (flags & CRYPTO_TFM_MODE_MASK) { - case CRYPTO_TFM_MODE_CBC: - len = ALIGN(len, (unsigned long)alg->cra_alignmask + 1); - len += alg->cra_blocksize; - break; - } - - return len; + return alg->cra_ctxsize; } -static inline unsigned int crypto_compress_ctxsize(struct crypto_alg *alg, - int flags) +static inline unsigned int crypto_compress_ctxsize(struct crypto_alg *alg) { return alg->cra_ctxsize; } @@ -121,10 +109,6 @@ struct crypto_alg *crypto_mod_get(struct crypto_alg *alg); struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type, u32 mask); struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask); -int crypto_init_digest_flags(struct crypto_tfm *tfm, u32 flags); -int crypto_init_cipher_flags(struct crypto_tfm *tfm, u32 flags); -int crypto_init_compress_flags(struct crypto_tfm *tfm, u32 flags); - int crypto_init_digest_ops(struct crypto_tfm *tfm); int crypto_init_cipher_ops(struct crypto_tfm *tfm); int crypto_init_compress_ops(struct crypto_tfm *tfm); @@ -136,7 +120,7 @@ void crypto_exit_compress_ops(struct crypto_tfm *tfm); void crypto_larval_error(const char *name, u32 type, u32 mask); void crypto_shoot_alg(struct crypto_alg *alg); -struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 flags); +struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg); int crypto_register_instance(struct crypto_template *tmpl, struct crypto_instance *inst); -- cgit v1.2.3 From 2e306ee016fd4750289e65c3b1856db569f1f3f2 Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Sun, 17 Dec 2006 10:05:58 +1100 Subject: [CRYPTO] api: Add type-safe spawns This patch allows spawns of specific types (e.g., cipher) to be allocated. Signed-off-by: Herbert Xu --- crypto/algapi.c | 13 +++++++++++-- crypto/cbc.c | 9 +++++---- crypto/ecb.c | 9 +++++---- crypto/hmac.c | 9 +++++---- crypto/lrw.c | 11 ++++++----- crypto/pcbc.c | 9 +++++---- crypto/xcbc.c | 9 +++++---- 7 files changed, 42 insertions(+), 27 deletions(-) (limited to 'crypto') diff --git a/crypto/algapi.c b/crypto/algapi.c index 69eb504721a4..0f1abca1b98c 100644 --- a/crypto/algapi.c +++ b/crypto/algapi.c @@ -377,7 +377,8 @@ void crypto_drop_spawn(struct crypto_spawn *spawn) } EXPORT_SYMBOL_GPL(crypto_drop_spawn); -struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn) +struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, + u32 mask) { struct crypto_alg *alg; struct crypto_alg *alg2; @@ -396,10 +397,18 @@ struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn) return ERR_PTR(-EAGAIN); } + tfm = ERR_PTR(-EINVAL); + if (unlikely((alg->cra_flags ^ type) & mask)) + goto out_put_alg; + tfm = __crypto_alloc_tfm(alg); if (IS_ERR(tfm)) - crypto_mod_put(alg); + goto out_put_alg; + + return tfm; +out_put_alg: + crypto_mod_put(alg); return tfm; } EXPORT_SYMBOL_GPL(crypto_spawn_tfm); diff --git a/crypto/cbc.c b/crypto/cbc.c index f5542b4db387..136fea7e7000 100644 --- a/crypto/cbc.c +++ b/crypto/cbc.c @@ -243,6 +243,7 @@ static int crypto_cbc_init_tfm(struct crypto_tfm *tfm) struct crypto_instance *inst = (void *)tfm->__crt_alg; struct crypto_spawn *spawn = crypto_instance_ctx(inst); struct crypto_cbc_ctx *ctx = crypto_tfm_ctx(tfm); + struct crypto_cipher *cipher; switch (crypto_tfm_alg_blocksize(tfm)) { case 8: @@ -260,11 +261,11 @@ static int crypto_cbc_init_tfm(struct crypto_tfm *tfm) ctx->xor = xor_quad; } - tfm = crypto_spawn_tfm(spawn); - if (IS_ERR(tfm)) - return PTR_ERR(tfm); + cipher = crypto_spawn_cipher(spawn); + if (IS_ERR(cipher)) + return PTR_ERR(cipher); - ctx->child = crypto_cipher_cast(tfm); + ctx->child = cipher; return 0; } diff --git a/crypto/ecb.c b/crypto/ecb.c index f239aa9c4017..839a0aed8c22 100644 --- a/crypto/ecb.c +++ b/crypto/ecb.c @@ -99,12 +99,13 @@ static int crypto_ecb_init_tfm(struct crypto_tfm *tfm) struct crypto_instance *inst = (void *)tfm->__crt_alg; struct crypto_spawn *spawn = crypto_instance_ctx(inst); struct crypto_ecb_ctx *ctx = crypto_tfm_ctx(tfm); + struct crypto_cipher *cipher; - tfm = crypto_spawn_tfm(spawn); - if (IS_ERR(tfm)) - return PTR_ERR(tfm); + cipher = crypto_spawn_cipher(spawn); + if (IS_ERR(cipher)) + return PTR_ERR(cipher); - ctx->child = crypto_cipher_cast(tfm); + ctx->child = cipher; return 0; } diff --git a/crypto/hmac.c b/crypto/hmac.c index b521bcd2b2c6..44187c5ee593 100644 --- a/crypto/hmac.c +++ b/crypto/hmac.c @@ -172,15 +172,16 @@ static int hmac_digest(struct hash_desc *pdesc, struct scatterlist *sg, static int hmac_init_tfm(struct crypto_tfm *tfm) { + struct crypto_hash *hash; struct crypto_instance *inst = (void *)tfm->__crt_alg; struct crypto_spawn *spawn = crypto_instance_ctx(inst); struct hmac_ctx *ctx = hmac_ctx(__crypto_hash_cast(tfm)); - tfm = crypto_spawn_tfm(spawn); - if (IS_ERR(tfm)) - return PTR_ERR(tfm); + hash = crypto_spawn_hash(spawn); + if (IS_ERR(hash)) + return PTR_ERR(hash); - ctx->child = crypto_hash_cast(tfm); + ctx->child = hash; return 0; } diff --git a/crypto/lrw.c b/crypto/lrw.c index 56642586d84f..b4105080ac7a 100644 --- a/crypto/lrw.c +++ b/crypto/lrw.c @@ -201,21 +201,22 @@ static int decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, static int init_tfm(struct crypto_tfm *tfm) { + struct crypto_cipher *cipher; struct crypto_instance *inst = (void *)tfm->__crt_alg; struct crypto_spawn *spawn = crypto_instance_ctx(inst); struct priv *ctx = crypto_tfm_ctx(tfm); u32 *flags = &tfm->crt_flags; - tfm = crypto_spawn_tfm(spawn); - if (IS_ERR(tfm)) - return PTR_ERR(tfm); + cipher = crypto_spawn_cipher(spawn); + if (IS_ERR(cipher)) + return PTR_ERR(cipher); - if (crypto_tfm_alg_blocksize(tfm) != 16) { + if (crypto_cipher_blocksize(cipher) != 16) { *flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN; return -EINVAL; } - ctx->child = crypto_cipher_cast(tfm); + ctx->child = cipher; return 0; } diff --git a/crypto/pcbc.c b/crypto/pcbc.c index 0ffb46eb259f..5174d7fdad6e 100644 --- a/crypto/pcbc.c +++ b/crypto/pcbc.c @@ -247,6 +247,7 @@ static int crypto_pcbc_init_tfm(struct crypto_tfm *tfm) struct crypto_instance *inst = (void *)tfm->__crt_alg; struct crypto_spawn *spawn = crypto_instance_ctx(inst); struct crypto_pcbc_ctx *ctx = crypto_tfm_ctx(tfm); + struct crypto_cipher *cipher; switch (crypto_tfm_alg_blocksize(tfm)) { case 8: @@ -264,11 +265,11 @@ static int crypto_pcbc_init_tfm(struct crypto_tfm *tfm) ctx->xor = xor_quad; } - tfm = crypto_spawn_tfm(spawn); - if (IS_ERR(tfm)) - return PTR_ERR(tfm); + cipher = crypto_spawn_cipher(spawn); + if (IS_ERR(cipher)) + return PTR_ERR(cipher); - ctx->child = crypto_cipher_cast(tfm); + ctx->child = cipher; return 0; } diff --git a/crypto/xcbc.c b/crypto/xcbc.c index 317e9f08fc04..d7b4be0a057d 100644 --- a/crypto/xcbc.c +++ b/crypto/xcbc.c @@ -254,14 +254,15 @@ static int crypto_xcbc_digest(struct hash_desc *pdesc, static int xcbc_init_tfm(struct crypto_tfm *tfm) { + struct crypto_cipher *cipher; struct crypto_instance *inst = (void *)tfm->__crt_alg; struct crypto_spawn *spawn = crypto_instance_ctx(inst); struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(__crypto_hash_cast(tfm)); int bs = crypto_hash_blocksize(__crypto_hash_cast(tfm)); - tfm = crypto_spawn_tfm(spawn); - if (IS_ERR(tfm)) - return PTR_ERR(tfm); + cipher = crypto_spawn_cipher(spawn); + if (IS_ERR(cipher)) + return PTR_ERR(cipher); switch(bs) { case 16: @@ -271,7 +272,7 @@ static int xcbc_init_tfm(struct crypto_tfm *tfm) return -EINVAL; } - ctx->child = crypto_cipher_cast(tfm); + ctx->child = cipher; ctx->odds = (u8*)(ctx+1); ctx->prev = ctx->odds + bs; ctx->key = ctx->prev + bs; -- cgit v1.2.3 From 27d2a3300755387d2fec231d37944907ff992ce8 Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Wed, 24 Jan 2007 20:50:26 +1100 Subject: [CRYPTO] api: Allow multiple frontends per backend This patch adds support for multiple frontend types for each backend algorithm by passing the type and mask through to the backend type init function. Signed-off-by: Herbert Xu --- crypto/algapi.c | 2 +- crypto/api.c | 25 +++++++++++++------------ crypto/blkcipher.c | 5 +++-- crypto/hash.c | 5 +++-- crypto/internal.h | 3 ++- 5 files changed, 22 insertions(+), 18 deletions(-) (limited to 'crypto') diff --git a/crypto/algapi.c b/crypto/algapi.c index 0f1abca1b98c..f7d2185b2c8f 100644 --- a/crypto/algapi.c +++ b/crypto/algapi.c @@ -401,7 +401,7 @@ struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, if (unlikely((alg->cra_flags ^ type) & mask)) goto out_put_alg; - tfm = __crypto_alloc_tfm(alg); + tfm = __crypto_alloc_tfm(alg, type, mask); if (IS_ERR(tfm)) goto out_put_alg; diff --git a/crypto/api.c b/crypto/api.c index 8b80baec853a..55af8bb0f050 100644 --- a/crypto/api.c +++ b/crypto/api.c @@ -212,12 +212,12 @@ struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask) } EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup); -static int crypto_init_ops(struct crypto_tfm *tfm) +static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask) { - const struct crypto_type *type = tfm->__crt_alg->cra_type; + const struct crypto_type *type_obj = tfm->__crt_alg->cra_type; - if (type) - return type->init(tfm); + if (type_obj) + return type_obj->init(tfm, type, mask); switch (crypto_tfm_alg_type(tfm)) { case CRYPTO_ALG_TYPE_CIPHER: @@ -266,14 +266,14 @@ static void crypto_exit_ops(struct crypto_tfm *tfm) } } -static unsigned int crypto_ctxsize(struct crypto_alg *alg) +static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask) { - const struct crypto_type *type = alg->cra_type; + const struct crypto_type *type_obj = alg->cra_type; unsigned int len; len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1); - if (type) - return len + type->ctxsize(alg); + if (type_obj) + return len + type_obj->ctxsize(alg, type, mask); switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) { default: @@ -303,20 +303,21 @@ void crypto_shoot_alg(struct crypto_alg *alg) } EXPORT_SYMBOL_GPL(crypto_shoot_alg); -struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg) +struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type, + u32 mask) { struct crypto_tfm *tfm = NULL; unsigned int tfm_size; int err = -ENOMEM; - tfm_size = sizeof(*tfm) + crypto_ctxsize(alg); + tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask); tfm = kzalloc(tfm_size, GFP_KERNEL); if (tfm == NULL) goto out_err; tfm->__crt_alg = alg; - err = crypto_init_ops(tfm); + err = crypto_init_ops(tfm, type, mask); if (err) goto out_free_tfm; @@ -372,7 +373,7 @@ struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask) goto err; } - tfm = __crypto_alloc_tfm(alg); + tfm = __crypto_alloc_tfm(alg, type, mask); if (!IS_ERR(tfm)) return tfm; diff --git a/crypto/blkcipher.c b/crypto/blkcipher.c index cbb4c4e5c229..b5befe8c3a96 100644 --- a/crypto/blkcipher.c +++ b/crypto/blkcipher.c @@ -349,7 +349,8 @@ static int setkey(struct crypto_tfm *tfm, const u8 *key, return cipher->setkey(tfm, key, keylen); } -static unsigned int crypto_blkcipher_ctxsize(struct crypto_alg *alg) +static unsigned int crypto_blkcipher_ctxsize(struct crypto_alg *alg, u32 type, + u32 mask) { struct blkcipher_alg *cipher = &alg->cra_blkcipher; unsigned int len = alg->cra_ctxsize; @@ -362,7 +363,7 @@ static unsigned int crypto_blkcipher_ctxsize(struct crypto_alg *alg) return len; } -static int crypto_init_blkcipher_ops(struct crypto_tfm *tfm) +static int crypto_init_blkcipher_ops(struct crypto_tfm *tfm, u32 type, u32 mask) { struct blkcipher_tfm *crt = &tfm->crt_blkcipher; struct blkcipher_alg *alg = &tfm->__crt_alg->cra_blkcipher; diff --git a/crypto/hash.c b/crypto/hash.c index cdec23d885fe..12c4514f3478 100644 --- a/crypto/hash.c +++ b/crypto/hash.c @@ -16,12 +16,13 @@ #include "internal.h" -static unsigned int crypto_hash_ctxsize(struct crypto_alg *alg) +static unsigned int crypto_hash_ctxsize(struct crypto_alg *alg, u32 type, + u32 mask) { return alg->cra_ctxsize; } -static int crypto_init_hash_ops(struct crypto_tfm *tfm) +static int crypto_init_hash_ops(struct crypto_tfm *tfm, u32 type, u32 mask) { struct hash_tfm *crt = &tfm->crt_hash; struct hash_alg *alg = &tfm->__crt_alg->cra_hash; diff --git a/crypto/internal.h b/crypto/internal.h index 784a7745315f..60acad9788c5 100644 --- a/crypto/internal.h +++ b/crypto/internal.h @@ -120,7 +120,8 @@ void crypto_exit_compress_ops(struct crypto_tfm *tfm); void crypto_larval_error(const char *name, u32 type, u32 mask); void crypto_shoot_alg(struct crypto_alg *alg); -struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg); +struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type, + u32 mask); int crypto_register_instance(struct crypto_template *tmpl, struct crypto_instance *inst); -- cgit v1.2.3 From 6b701dde8e0584f3bf0b6857d0e92f7ed15ed6f9 Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Sun, 24 Dec 2006 09:59:42 +1100 Subject: [CRYPTO] xcbc: Use new cipher interface This patch changes xcbc to use the new cipher encryt_one interface. Signed-off-by: Herbert Xu --- crypto/xcbc.c | 30 ++++++++++++++++-------------- 1 file changed, 16 insertions(+), 14 deletions(-) (limited to 'crypto') diff --git a/crypto/xcbc.c b/crypto/xcbc.c index d7b4be0a057d..53e8ccbf0f5f 100644 --- a/crypto/xcbc.c +++ b/crypto/xcbc.c @@ -48,7 +48,7 @@ static u_int32_t ks[12] = {0x01010101, 0x01010101, 0x01010101, 0x01010101, * +------------------------ */ struct crypto_xcbc_ctx { - struct crypto_tfm *child; + struct crypto_cipher *child; u8 *odds; u8 *prev; u8 *key; @@ -76,8 +76,7 @@ static int _crypto_xcbc_digest_setkey(struct crypto_hash *parent, if ((err = crypto_cipher_setkey(ctx->child, ctx->key, ctx->keylen))) return err; - ctx->child->__crt_alg->cra_cipher.cia_encrypt(ctx->child, key1, - ctx->consts); + crypto_cipher_encrypt_one(ctx->child, key1, ctx->consts); return crypto_cipher_setkey(ctx->child, key1, bs); } @@ -87,7 +86,7 @@ static int crypto_xcbc_digest_setkey(struct crypto_hash *parent, { struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(parent); - if (keylen != crypto_tfm_alg_blocksize(ctx->child)) + if (keylen != crypto_cipher_blocksize(ctx->child)) return -EINVAL; ctx->keylen = keylen; @@ -115,7 +114,7 @@ static int crypto_xcbc_digest_update2(struct hash_desc *pdesc, { struct crypto_hash *parent = pdesc->tfm; struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(parent); - struct crypto_tfm *tfm = ctx->child; + struct crypto_cipher *tfm = ctx->child; int bs = crypto_hash_blocksize(parent); unsigned int i = 0; @@ -143,7 +142,7 @@ static int crypto_xcbc_digest_update2(struct hash_desc *pdesc, offset += len; crypto_kunmap(p, 0); - crypto_yield(tfm->crt_flags); + crypto_yield(pdesc->flags); continue; } @@ -153,7 +152,7 @@ static int crypto_xcbc_digest_update2(struct hash_desc *pdesc, p += bs - ctx->len; ctx->xor(ctx->prev, ctx->odds, bs); - tfm->__crt_alg->cra_cipher.cia_encrypt(tfm, ctx->prev, ctx->prev); + crypto_cipher_encrypt_one(tfm, ctx->prev, ctx->prev); /* clearing the length */ ctx->len = 0; @@ -161,7 +160,8 @@ static int crypto_xcbc_digest_update2(struct hash_desc *pdesc, /* encrypting the rest of data */ while (len > bs) { ctx->xor(ctx->prev, p, bs); - tfm->__crt_alg->cra_cipher.cia_encrypt(tfm, ctx->prev, ctx->prev); + crypto_cipher_encrypt_one(tfm, ctx->prev, + ctx->prev); p += bs; len -= bs; } @@ -172,7 +172,7 @@ static int crypto_xcbc_digest_update2(struct hash_desc *pdesc, ctx->len = len; } crypto_kunmap(p, 0); - crypto_yield(tfm->crt_flags); + crypto_yield(pdesc->flags); slen -= min(slen, ((unsigned int)(PAGE_SIZE)) - offset); offset = 0; pg++; @@ -197,7 +197,7 @@ static int crypto_xcbc_digest_final(struct hash_desc *pdesc, u8 *out) { struct crypto_hash *parent = pdesc->tfm; struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(parent); - struct crypto_tfm *tfm = ctx->child; + struct crypto_cipher *tfm = ctx->child; int bs = crypto_hash_blocksize(parent); int err = 0; @@ -207,13 +207,14 @@ static int crypto_xcbc_digest_final(struct hash_desc *pdesc, u8 *out) if ((err = crypto_cipher_setkey(tfm, ctx->key, ctx->keylen)) != 0) return err; - tfm->__crt_alg->cra_cipher.cia_encrypt(tfm, key2, (const u8*)(ctx->consts+bs)); + crypto_cipher_encrypt_one(tfm, key2, + (u8 *)(ctx->consts + bs)); ctx->xor(ctx->prev, ctx->odds, bs); ctx->xor(ctx->prev, key2, bs); _crypto_xcbc_digest_setkey(parent, ctx); - tfm->__crt_alg->cra_cipher.cia_encrypt(tfm, out, ctx->prev); + crypto_cipher_encrypt_one(tfm, out, ctx->prev); } else { u8 key3[bs]; unsigned int rlen; @@ -228,14 +229,15 @@ static int crypto_xcbc_digest_final(struct hash_desc *pdesc, u8 *out) if ((err = crypto_cipher_setkey(tfm, ctx->key, ctx->keylen)) != 0) return err; - tfm->__crt_alg->cra_cipher.cia_encrypt(tfm, key3, (const u8*)(ctx->consts+bs*2)); + crypto_cipher_encrypt_one(tfm, key3, + (u8 *)(ctx->consts + bs * 2)); ctx->xor(ctx->prev, ctx->odds, bs); ctx->xor(ctx->prev, key3, bs); _crypto_xcbc_digest_setkey(parent, ctx); - tfm->__crt_alg->cra_cipher.cia_encrypt(tfm, out, ctx->prev); + crypto_cipher_encrypt_one(tfm, out, ctx->prev); } return 0; -- cgit v1.2.3 From 04ac7db3f23d98abe5d3c91d21b0e45fc09e74ea Mon Sep 17 00:00:00 2001 From: Noriaki TAKAMIYA Date: Sun, 22 Oct 2006 14:49:17 +1000 Subject: [CRYPTO] camellia: Add Kconfig entry. This patch adds the Kconfig entry for Camellia. Signed-off-by: Noriaki TAKAMIYA Signed-off-by: Herbert Xu --- crypto/Kconfig | 15 +++++++++++++++ 1 file changed, 15 insertions(+) (limited to 'crypto') diff --git a/crypto/Kconfig b/crypto/Kconfig index e2e1eb13827e..c2a85bd922b9 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -474,6 +474,21 @@ config CRYPTO_CRC32C See Castagnoli93. This implementation uses lib/libcrc32c. Module will be crc32c. +config CRYPTO_CAMELLIA + tristate "Camellia cipher algorithms" + depends on CRYPTO + select CRYPTO_ALGAPI + help + Camellia cipher algorithms module. + + Camellia is a symmetric key block cipher developed jointly + at NTT and Mitsubishi Electric Corporation. + + The Camellia specifies three key sizes: 128, 192 and 256 bits. + + See also: + + config CRYPTO_TEST tristate "Testing module" depends on m -- cgit v1.2.3 From d64beac050914de6fe6565741b39905ecd5994b7 Mon Sep 17 00:00:00 2001 From: Noriaki TAKAMIYA Date: Wed, 24 Jan 2007 21:47:48 +1100 Subject: [CRYPTO] camellia: added the code of Camellia cipher algorithm. This patch adds the main code of Camellia cipher algorithm. Signed-off-by: Noriaki TAKAMIYA Signed-off-by: Herbert Xu --- crypto/Makefile | 1 + crypto/camellia.c | 1801 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 1802 insertions(+) create mode 100644 crypto/camellia.c (limited to 'crypto') diff --git a/crypto/Makefile b/crypto/Makefile index a3e1915ef01a..12f93f578171 100644 --- a/crypto/Makefile +++ b/crypto/Makefile @@ -36,6 +36,7 @@ obj-$(CONFIG_CRYPTO_TWOFISH) += twofish.o obj-$(CONFIG_CRYPTO_TWOFISH_COMMON) += twofish_common.o obj-$(CONFIG_CRYPTO_SERPENT) += serpent.o obj-$(CONFIG_CRYPTO_AES) += aes.o +obj-$(CONFIG_CRYPTO_CAMELLIA) += camellia.o obj-$(CONFIG_CRYPTO_CAST5) += cast5.o obj-$(CONFIG_CRYPTO_CAST6) += cast6.o obj-$(CONFIG_CRYPTO_ARC4) += arc4.o diff --git a/crypto/camellia.c b/crypto/camellia.c new file mode 100644 index 000000000000..6877ecfd90bb --- /dev/null +++ b/crypto/camellia.c @@ -0,0 +1,1801 @@ +/* + * Copyright (C) 2006 + * NTT (Nippon Telegraph and Telephone Corporation). + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. + */ + +/* + * Algorithm Specification + * http://info.isl.ntt.co.jp/crypt/eng/camellia/specifications.html + */ + +/* + * + * NOTE --- NOTE --- NOTE --- NOTE + * This implementation assumes that all memory addresses passed + * as parameters are four-byte aligned. + * + */ + +#include +#include +#include +#include +#include + + +#define CAMELLIA_MIN_KEY_SIZE 16 +#define CAMELLIA_MAX_KEY_SIZE 32 +#define CAMELLIA_BLOCK_SIZE 16 +#define CAMELLIA_TABLE_BYTE_LEN 272 +#define CAMELLIA_TABLE_WORD_LEN (CAMELLIA_TABLE_BYTE_LEN / 4) + +typedef u32 KEY_TABLE_TYPE[CAMELLIA_TABLE_WORD_LEN]; + + +/* key constants */ + +#define CAMELLIA_SIGMA1L (0xA09E667FL) +#define CAMELLIA_SIGMA1R (0x3BCC908BL) +#define CAMELLIA_SIGMA2L (0xB67AE858L) +#define CAMELLIA_SIGMA2R (0x4CAA73B2L) +#define CAMELLIA_SIGMA3L (0xC6EF372FL) +#define CAMELLIA_SIGMA3R (0xE94F82BEL) +#define CAMELLIA_SIGMA4L (0x54FF53A5L) +#define CAMELLIA_SIGMA4R (0xF1D36F1CL) +#define CAMELLIA_SIGMA5L (0x10E527FAL) +#define CAMELLIA_SIGMA5R (0xDE682D1DL) +#define CAMELLIA_SIGMA6L (0xB05688C2L) +#define CAMELLIA_SIGMA6R (0xB3E6C1FDL) + +struct camellia_ctx { + int key_length; + KEY_TABLE_TYPE key_table; +}; + + +/* + * macros + */ + + +# define GETU32(pt) (((u32)(pt)[0] << 24) \ + ^ ((u32)(pt)[1] << 16) \ + ^ ((u32)(pt)[2] << 8) \ + ^ ((u32)(pt)[3])) + +#define COPY4WORD(dst, src) \ + do { \ + (dst)[0]=(src)[0]; \ + (dst)[1]=(src)[1]; \ + (dst)[2]=(src)[2]; \ + (dst)[3]=(src)[3]; \ + }while(0) + +#define SWAP4WORD(word) \ + do { \ + CAMELLIA_SWAP4((word)[0]); \ + CAMELLIA_SWAP4((word)[1]); \ + CAMELLIA_SWAP4((word)[2]); \ + CAMELLIA_SWAP4((word)[3]); \ + }while(0) + +#define XOR4WORD(a, b)/* a = a ^ b */ \ + do { \ + (a)[0]^=(b)[0]; \ + (a)[1]^=(b)[1]; \ + (a)[2]^=(b)[2]; \ + (a)[3]^=(b)[3]; \ + }while(0) + +#define XOR4WORD2(a, b, c)/* a = b ^ c */ \ + do { \ + (a)[0]=(b)[0]^(c)[0]; \ + (a)[1]=(b)[1]^(c)[1]; \ + (a)[2]=(b)[2]^(c)[2]; \ + (a)[3]=(b)[3]^(c)[3]; \ + }while(0) + +#define CAMELLIA_SUBKEY_L(INDEX) (subkey[(INDEX)*2]) +#define CAMELLIA_SUBKEY_R(INDEX) (subkey[(INDEX)*2 + 1]) + +/* rotation right shift 1byte */ +#define CAMELLIA_RR8(x) (((x) >> 8) + ((x) << 24)) +/* rotation left shift 1bit */ +#define CAMELLIA_RL1(x) (((x) << 1) + ((x) >> 31)) +/* rotation left shift 1byte */ +#define CAMELLIA_RL8(x) (((x) << 8) + ((x) >> 24)) + +#define CAMELLIA_ROLDQ(ll, lr, rl, rr, w0, w1, bits) \ + do { \ + w0 = ll; \ + ll = (ll << bits) + (lr >> (32 - bits)); \ + lr = (lr << bits) + (rl >> (32 - bits)); \ + rl = (rl << bits) + (rr >> (32 - bits)); \ + rr = (rr << bits) + (w0 >> (32 - bits)); \ + } while(0) + +#define CAMELLIA_ROLDQo32(ll, lr, rl, rr, w0, w1, bits) \ + do { \ + w0 = ll; \ + w1 = lr; \ + ll = (lr << (bits - 32)) + (rl >> (64 - bits)); \ + lr = (rl << (bits - 32)) + (rr >> (64 - bits)); \ + rl = (rr << (bits - 32)) + (w0 >> (64 - bits)); \ + rr = (w0 << (bits - 32)) + (w1 >> (64 - bits)); \ + } while(0) + +#define CAMELLIA_SP1110(INDEX) (camellia_sp1110[(INDEX)]) +#define CAMELLIA_SP0222(INDEX) (camellia_sp0222[(INDEX)]) +#define CAMELLIA_SP3033(INDEX) (camellia_sp3033[(INDEX)]) +#define CAMELLIA_SP4404(INDEX) (camellia_sp4404[(INDEX)]) + +#define CAMELLIA_F(xl, xr, kl, kr, yl, yr, il, ir, t0, t1) \ + do { \ + il = xl ^ kl; \ + ir = xr ^ kr; \ + t0 = il >> 16; \ + t1 = ir >> 16; \ + yl = CAMELLIA_SP1110(ir & 0xff) \ + ^ CAMELLIA_SP0222((t1 >> 8) & 0xff) \ + ^ CAMELLIA_SP3033(t1 & 0xff) \ + ^ CAMELLIA_SP4404((ir >> 8) & 0xff); \ + yr = CAMELLIA_SP1110((t0 >> 8) & 0xff) \ + ^ CAMELLIA_SP0222(t0 & 0xff) \ + ^ CAMELLIA_SP3033((il >> 8) & 0xff) \ + ^ CAMELLIA_SP4404(il & 0xff); \ + yl ^= yr; \ + yr = CAMELLIA_RR8(yr); \ + yr ^= yl; \ + } while(0) + + +/* + * for speed up + * + */ +#define CAMELLIA_FLS(ll, lr, rl, rr, kll, klr, krl, krr, t0, t1, t2, t3) \ + do { \ + t0 = kll; \ + t2 = krr; \ + t0 &= ll; \ + t2 |= rr; \ + rl ^= t2; \ + lr ^= CAMELLIA_RL1(t0); \ + t3 = krl; \ + t1 = klr; \ + t3 &= rl; \ + t1 |= lr; \ + ll ^= t1; \ + rr ^= CAMELLIA_RL1(t3); \ + } while(0) + +#define CAMELLIA_ROUNDSM(xl, xr, kl, kr, yl, yr, il, ir, t0, t1) \ + do { \ + ir = CAMELLIA_SP1110(xr & 0xff); \ + il = CAMELLIA_SP1110((xl>>24) & 0xff); \ + ir ^= CAMELLIA_SP0222((xr>>24) & 0xff); \ + il ^= CAMELLIA_SP0222((xl>>16) & 0xff); \ + ir ^= CAMELLIA_SP3033((xr>>16) & 0xff); \ + il ^= CAMELLIA_SP3033((xl>>8) & 0xff); \ + ir ^= CAMELLIA_SP4404((xr>>8) & 0xff); \ + il ^= CAMELLIA_SP4404(xl & 0xff); \ + il ^= kl; \ + ir ^= il ^ kr; \ + yl ^= ir; \ + yr ^= CAMELLIA_RR8(il) ^ ir; \ + } while(0) + +/** + * Stuff related to the Camellia key schedule + */ +#define SUBL(x) subL[(x)] +#define SUBR(x) subR[(x)] + + +static const u32 camellia_sp1110[256] = { + 0x70707000,0x82828200,0x2c2c2c00,0xececec00, + 0xb3b3b300,0x27272700,0xc0c0c000,0xe5e5e500, + 0xe4e4e400,0x85858500,0x57575700,0x35353500, + 0xeaeaea00,0x0c0c0c00,0xaeaeae00,0x41414100, + 0x23232300,0xefefef00,0x6b6b6b00,0x93939300, + 0x45454500,0x19191900,0xa5a5a500,0x21212100, + 0xededed00,0x0e0e0e00,0x4f4f4f00,0x4e4e4e00, + 0x1d1d1d00,0x65656500,0x92929200,0xbdbdbd00, + 0x86868600,0xb8b8b800,0xafafaf00,0x8f8f8f00, + 0x7c7c7c00,0xebebeb00,0x1f1f1f00,0xcecece00, + 0x3e3e3e00,0x30303000,0xdcdcdc00,0x5f5f5f00, + 0x5e5e5e00,0xc5c5c500,0x0b0b0b00,0x1a1a1a00, + 0xa6a6a600,0xe1e1e100,0x39393900,0xcacaca00, + 0xd5d5d500,0x47474700,0x5d5d5d00,0x3d3d3d00, + 0xd9d9d900,0x01010100,0x5a5a5a00,0xd6d6d600, + 0x51515100,0x56565600,0x6c6c6c00,0x4d4d4d00, + 0x8b8b8b00,0x0d0d0d00,0x9a9a9a00,0x66666600, + 0xfbfbfb00,0xcccccc00,0xb0b0b000,0x2d2d2d00, + 0x74747400,0x12121200,0x2b2b2b00,0x20202000, + 0xf0f0f000,0xb1b1b100,0x84848400,0x99999900, + 0xdfdfdf00,0x4c4c4c00,0xcbcbcb00,0xc2c2c200, + 0x34343400,0x7e7e7e00,0x76767600,0x05050500, + 0x6d6d6d00,0xb7b7b700,0xa9a9a900,0x31313100, + 0xd1d1d100,0x17171700,0x04040400,0xd7d7d700, + 0x14141400,0x58585800,0x3a3a3a00,0x61616100, + 0xdedede00,0x1b1b1b00,0x11111100,0x1c1c1c00, + 0x32323200,0x0f0f0f00,0x9c9c9c00,0x16161600, + 0x53535300,0x18181800,0xf2f2f200,0x22222200, + 0xfefefe00,0x44444400,0xcfcfcf00,0xb2b2b200, + 0xc3c3c300,0xb5b5b500,0x7a7a7a00,0x91919100, + 0x24242400,0x08080800,0xe8e8e800,0xa8a8a800, + 0x60606000,0xfcfcfc00,0x69696900,0x50505000, + 0xaaaaaa00,0xd0d0d000,0xa0a0a000,0x7d7d7d00, + 0xa1a1a100,0x89898900,0x62626200,0x97979700, + 0x54545400,0x5b5b5b00,0x1e1e1e00,0x95959500, + 0xe0e0e000,0xffffff00,0x64646400,0xd2d2d200, + 0x10101000,0xc4c4c400,0x00000000,0x48484800, + 0xa3a3a300,0xf7f7f700,0x75757500,0xdbdbdb00, + 0x8a8a8a00,0x03030300,0xe6e6e600,0xdadada00, + 0x09090900,0x3f3f3f00,0xdddddd00,0x94949400, + 0x87878700,0x5c5c5c00,0x83838300,0x02020200, + 0xcdcdcd00,0x4a4a4a00,0x90909000,0x33333300, + 0x73737300,0x67676700,0xf6f6f600,0xf3f3f300, + 0x9d9d9d00,0x7f7f7f00,0xbfbfbf00,0xe2e2e200, + 0x52525200,0x9b9b9b00,0xd8d8d800,0x26262600, + 0xc8c8c800,0x37373700,0xc6c6c600,0x3b3b3b00, + 0x81818100,0x96969600,0x6f6f6f00,0x4b4b4b00, + 0x13131300,0xbebebe00,0x63636300,0x2e2e2e00, + 0xe9e9e900,0x79797900,0xa7a7a700,0x8c8c8c00, + 0x9f9f9f00,0x6e6e6e00,0xbcbcbc00,0x8e8e8e00, + 0x29292900,0xf5f5f500,0xf9f9f900,0xb6b6b600, + 0x2f2f2f00,0xfdfdfd00,0xb4b4b400,0x59595900, + 0x78787800,0x98989800,0x06060600,0x6a6a6a00, + 0xe7e7e700,0x46464600,0x71717100,0xbababa00, + 0xd4d4d400,0x25252500,0xababab00,0x42424200, + 0x88888800,0xa2a2a200,0x8d8d8d00,0xfafafa00, + 0x72727200,0x07070700,0xb9b9b900,0x55555500, + 0xf8f8f800,0xeeeeee00,0xacacac00,0x0a0a0a00, + 0x36363600,0x49494900,0x2a2a2a00,0x68686800, + 0x3c3c3c00,0x38383800,0xf1f1f100,0xa4a4a400, + 0x40404000,0x28282800,0xd3d3d300,0x7b7b7b00, + 0xbbbbbb00,0xc9c9c900,0x43434300,0xc1c1c100, + 0x15151500,0xe3e3e300,0xadadad00,0xf4f4f400, + 0x77777700,0xc7c7c700,0x80808000,0x9e9e9e00, +}; + +static const u32 camellia_sp0222[256] = { + 0x00e0e0e0,0x00050505,0x00585858,0x00d9d9d9, + 0x00676767,0x004e4e4e,0x00818181,0x00cbcbcb, + 0x00c9c9c9,0x000b0b0b,0x00aeaeae,0x006a6a6a, + 0x00d5d5d5,0x00181818,0x005d5d5d,0x00828282, + 0x00464646,0x00dfdfdf,0x00d6d6d6,0x00272727, + 0x008a8a8a,0x00323232,0x004b4b4b,0x00424242, + 0x00dbdbdb,0x001c1c1c,0x009e9e9e,0x009c9c9c, + 0x003a3a3a,0x00cacaca,0x00252525,0x007b7b7b, + 0x000d0d0d,0x00717171,0x005f5f5f,0x001f1f1f, + 0x00f8f8f8,0x00d7d7d7,0x003e3e3e,0x009d9d9d, + 0x007c7c7c,0x00606060,0x00b9b9b9,0x00bebebe, + 0x00bcbcbc,0x008b8b8b,0x00161616,0x00343434, + 0x004d4d4d,0x00c3c3c3,0x00727272,0x00959595, + 0x00ababab,0x008e8e8e,0x00bababa,0x007a7a7a, + 0x00b3b3b3,0x00020202,0x00b4b4b4,0x00adadad, + 0x00a2a2a2,0x00acacac,0x00d8d8d8,0x009a9a9a, + 0x00171717,0x001a1a1a,0x00353535,0x00cccccc, + 0x00f7f7f7,0x00999999,0x00616161,0x005a5a5a, + 0x00e8e8e8,0x00242424,0x00565656,0x00404040, + 0x00e1e1e1,0x00636363,0x00090909,0x00333333, + 0x00bfbfbf,0x00989898,0x00979797,0x00858585, + 0x00686868,0x00fcfcfc,0x00ececec,0x000a0a0a, + 0x00dadada,0x006f6f6f,0x00535353,0x00626262, + 0x00a3a3a3,0x002e2e2e,0x00080808,0x00afafaf, + 0x00282828,0x00b0b0b0,0x00747474,0x00c2c2c2, + 0x00bdbdbd,0x00363636,0x00222222,0x00383838, + 0x00646464,0x001e1e1e,0x00393939,0x002c2c2c, + 0x00a6a6a6,0x00303030,0x00e5e5e5,0x00444444, + 0x00fdfdfd,0x00888888,0x009f9f9f,0x00656565, + 0x00878787,0x006b6b6b,0x00f4f4f4,0x00232323, + 0x00484848,0x00101010,0x00d1d1d1,0x00515151, + 0x00c0c0c0,0x00f9f9f9,0x00d2d2d2,0x00a0a0a0, + 0x00555555,0x00a1a1a1,0x00414141,0x00fafafa, + 0x00434343,0x00131313,0x00c4c4c4,0x002f2f2f, + 0x00a8a8a8,0x00b6b6b6,0x003c3c3c,0x002b2b2b, + 0x00c1c1c1,0x00ffffff,0x00c8c8c8,0x00a5a5a5, + 0x00202020,0x00898989,0x00000000,0x00909090, + 0x00474747,0x00efefef,0x00eaeaea,0x00b7b7b7, + 0x00151515,0x00060606,0x00cdcdcd,0x00b5b5b5, + 0x00121212,0x007e7e7e,0x00bbbbbb,0x00292929, + 0x000f0f0f,0x00b8b8b8,0x00070707,0x00040404, + 0x009b9b9b,0x00949494,0x00212121,0x00666666, + 0x00e6e6e6,0x00cecece,0x00ededed,0x00e7e7e7, + 0x003b3b3b,0x00fefefe,0x007f7f7f,0x00c5c5c5, + 0x00a4a4a4,0x00373737,0x00b1b1b1,0x004c4c4c, + 0x00919191,0x006e6e6e,0x008d8d8d,0x00767676, + 0x00030303,0x002d2d2d,0x00dedede,0x00969696, + 0x00262626,0x007d7d7d,0x00c6c6c6,0x005c5c5c, + 0x00d3d3d3,0x00f2f2f2,0x004f4f4f,0x00191919, + 0x003f3f3f,0x00dcdcdc,0x00797979,0x001d1d1d, + 0x00525252,0x00ebebeb,0x00f3f3f3,0x006d6d6d, + 0x005e5e5e,0x00fbfbfb,0x00696969,0x00b2b2b2, + 0x00f0f0f0,0x00313131,0x000c0c0c,0x00d4d4d4, + 0x00cfcfcf,0x008c8c8c,0x00e2e2e2,0x00757575, + 0x00a9a9a9,0x004a4a4a,0x00575757,0x00848484, + 0x00111111,0x00454545,0x001b1b1b,0x00f5f5f5, + 0x00e4e4e4,0x000e0e0e,0x00737373,0x00aaaaaa, + 0x00f1f1f1,0x00dddddd,0x00595959,0x00141414, + 0x006c6c6c,0x00929292,0x00545454,0x00d0d0d0, + 0x00787878,0x00707070,0x00e3e3e3,0x00494949, + 0x00808080,0x00505050,0x00a7a7a7,0x00f6f6f6, + 0x00777777,0x00939393,0x00868686,0x00838383, + 0x002a2a2a,0x00c7c7c7,0x005b5b5b,0x00e9e9e9, + 0x00eeeeee,0x008f8f8f,0x00010101,0x003d3d3d, +}; + +static const u32 camellia_sp3033[256] = { + 0x38003838,0x41004141,0x16001616,0x76007676, + 0xd900d9d9,0x93009393,0x60006060,0xf200f2f2, + 0x72007272,0xc200c2c2,0xab00abab,0x9a009a9a, + 0x75007575,0x06000606,0x57005757,0xa000a0a0, + 0x91009191,0xf700f7f7,0xb500b5b5,0xc900c9c9, + 0xa200a2a2,0x8c008c8c,0xd200d2d2,0x90009090, + 0xf600f6f6,0x07000707,0xa700a7a7,0x27002727, + 0x8e008e8e,0xb200b2b2,0x49004949,0xde00dede, + 0x43004343,0x5c005c5c,0xd700d7d7,0xc700c7c7, + 0x3e003e3e,0xf500f5f5,0x8f008f8f,0x67006767, + 0x1f001f1f,0x18001818,0x6e006e6e,0xaf00afaf, + 0x2f002f2f,0xe200e2e2,0x85008585,0x0d000d0d, + 0x53005353,0xf000f0f0,0x9c009c9c,0x65006565, + 0xea00eaea,0xa300a3a3,0xae00aeae,0x9e009e9e, + 0xec00ecec,0x80008080,0x2d002d2d,0x6b006b6b, + 0xa800a8a8,0x2b002b2b,0x36003636,0xa600a6a6, + 0xc500c5c5,0x86008686,0x4d004d4d,0x33003333, + 0xfd00fdfd,0x66006666,0x58005858,0x96009696, + 0x3a003a3a,0x09000909,0x95009595,0x10001010, + 0x78007878,0xd800d8d8,0x42004242,0xcc00cccc, + 0xef00efef,0x26002626,0xe500e5e5,0x61006161, + 0x1a001a1a,0x3f003f3f,0x3b003b3b,0x82008282, + 0xb600b6b6,0xdb00dbdb,0xd400d4d4,0x98009898, + 0xe800e8e8,0x8b008b8b,0x02000202,0xeb00ebeb, + 0x0a000a0a,0x2c002c2c,0x1d001d1d,0xb000b0b0, + 0x6f006f6f,0x8d008d8d,0x88008888,0x0e000e0e, + 0x19001919,0x87008787,0x4e004e4e,0x0b000b0b, + 0xa900a9a9,0x0c000c0c,0x79007979,0x11001111, + 0x7f007f7f,0x22002222,0xe700e7e7,0x59005959, + 0xe100e1e1,0xda00dada,0x3d003d3d,0xc800c8c8, + 0x12001212,0x04000404,0x74007474,0x54005454, + 0x30003030,0x7e007e7e,0xb400b4b4,0x28002828, + 0x55005555,0x68006868,0x50005050,0xbe00bebe, + 0xd000d0d0,0xc400c4c4,0x31003131,0xcb00cbcb, + 0x2a002a2a,0xad00adad,0x0f000f0f,0xca00caca, + 0x70007070,0xff00ffff,0x32003232,0x69006969, + 0x08000808,0x62006262,0x00000000,0x24002424, + 0xd100d1d1,0xfb00fbfb,0xba00baba,0xed00eded, + 0x45004545,0x81008181,0x73007373,0x6d006d6d, + 0x84008484,0x9f009f9f,0xee00eeee,0x4a004a4a, + 0xc300c3c3,0x2e002e2e,0xc100c1c1,0x01000101, + 0xe600e6e6,0x25002525,0x48004848,0x99009999, + 0xb900b9b9,0xb300b3b3,0x7b007b7b,0xf900f9f9, + 0xce00cece,0xbf00bfbf,0xdf00dfdf,0x71007171, + 0x29002929,0xcd00cdcd,0x6c006c6c,0x13001313, + 0x64006464,0x9b009b9b,0x63006363,0x9d009d9d, + 0xc000c0c0,0x4b004b4b,0xb700b7b7,0xa500a5a5, + 0x89008989,0x5f005f5f,0xb100b1b1,0x17001717, + 0xf400f4f4,0xbc00bcbc,0xd300d3d3,0x46004646, + 0xcf00cfcf,0x37003737,0x5e005e5e,0x47004747, + 0x94009494,0xfa00fafa,0xfc00fcfc,0x5b005b5b, + 0x97009797,0xfe00fefe,0x5a005a5a,0xac00acac, + 0x3c003c3c,0x4c004c4c,0x03000303,0x35003535, + 0xf300f3f3,0x23002323,0xb800b8b8,0x5d005d5d, + 0x6a006a6a,0x92009292,0xd500d5d5,0x21002121, + 0x44004444,0x51005151,0xc600c6c6,0x7d007d7d, + 0x39003939,0x83008383,0xdc00dcdc,0xaa00aaaa, + 0x7c007c7c,0x77007777,0x56005656,0x05000505, + 0x1b001b1b,0xa400a4a4,0x15001515,0x34003434, + 0x1e001e1e,0x1c001c1c,0xf800f8f8,0x52005252, + 0x20002020,0x14001414,0xe900e9e9,0xbd00bdbd, + 0xdd00dddd,0xe400e4e4,0xa100a1a1,0xe000e0e0, + 0x8a008a8a,0xf100f1f1,0xd600d6d6,0x7a007a7a, + 0xbb00bbbb,0xe300e3e3,0x40004040,0x4f004f4f, +}; + +static const u32 camellia_sp4404[256] = { + 0x70700070,0x2c2c002c,0xb3b300b3,0xc0c000c0, + 0xe4e400e4,0x57570057,0xeaea00ea,0xaeae00ae, + 0x23230023,0x6b6b006b,0x45450045,0xa5a500a5, + 0xeded00ed,0x4f4f004f,0x1d1d001d,0x92920092, + 0x86860086,0xafaf00af,0x7c7c007c,0x1f1f001f, + 0x3e3e003e,0xdcdc00dc,0x5e5e005e,0x0b0b000b, + 0xa6a600a6,0x39390039,0xd5d500d5,0x5d5d005d, + 0xd9d900d9,0x5a5a005a,0x51510051,0x6c6c006c, + 0x8b8b008b,0x9a9a009a,0xfbfb00fb,0xb0b000b0, + 0x74740074,0x2b2b002b,0xf0f000f0,0x84840084, + 0xdfdf00df,0xcbcb00cb,0x34340034,0x76760076, + 0x6d6d006d,0xa9a900a9,0xd1d100d1,0x04040004, + 0x14140014,0x3a3a003a,0xdede00de,0x11110011, + 0x32320032,0x9c9c009c,0x53530053,0xf2f200f2, + 0xfefe00fe,0xcfcf00cf,0xc3c300c3,0x7a7a007a, + 0x24240024,0xe8e800e8,0x60600060,0x69690069, + 0xaaaa00aa,0xa0a000a0,0xa1a100a1,0x62620062, + 0x54540054,0x1e1e001e,0xe0e000e0,0x64640064, + 0x10100010,0x00000000,0xa3a300a3,0x75750075, + 0x8a8a008a,0xe6e600e6,0x09090009,0xdddd00dd, + 0x87870087,0x83830083,0xcdcd00cd,0x90900090, + 0x73730073,0xf6f600f6,0x9d9d009d,0xbfbf00bf, + 0x52520052,0xd8d800d8,0xc8c800c8,0xc6c600c6, + 0x81810081,0x6f6f006f,0x13130013,0x63630063, + 0xe9e900e9,0xa7a700a7,0x9f9f009f,0xbcbc00bc, + 0x29290029,0xf9f900f9,0x2f2f002f,0xb4b400b4, + 0x78780078,0x06060006,0xe7e700e7,0x71710071, + 0xd4d400d4,0xabab00ab,0x88880088,0x8d8d008d, + 0x72720072,0xb9b900b9,0xf8f800f8,0xacac00ac, + 0x36360036,0x2a2a002a,0x3c3c003c,0xf1f100f1, + 0x40400040,0xd3d300d3,0xbbbb00bb,0x43430043, + 0x15150015,0xadad00ad,0x77770077,0x80800080, + 0x82820082,0xecec00ec,0x27270027,0xe5e500e5, + 0x85850085,0x35350035,0x0c0c000c,0x41410041, + 0xefef00ef,0x93930093,0x19190019,0x21210021, + 0x0e0e000e,0x4e4e004e,0x65650065,0xbdbd00bd, + 0xb8b800b8,0x8f8f008f,0xebeb00eb,0xcece00ce, + 0x30300030,0x5f5f005f,0xc5c500c5,0x1a1a001a, + 0xe1e100e1,0xcaca00ca,0x47470047,0x3d3d003d, + 0x01010001,0xd6d600d6,0x56560056,0x4d4d004d, + 0x0d0d000d,0x66660066,0xcccc00cc,0x2d2d002d, + 0x12120012,0x20200020,0xb1b100b1,0x99990099, + 0x4c4c004c,0xc2c200c2,0x7e7e007e,0x05050005, + 0xb7b700b7,0x31310031,0x17170017,0xd7d700d7, + 0x58580058,0x61610061,0x1b1b001b,0x1c1c001c, + 0x0f0f000f,0x16160016,0x18180018,0x22220022, + 0x44440044,0xb2b200b2,0xb5b500b5,0x91910091, + 0x08080008,0xa8a800a8,0xfcfc00fc,0x50500050, + 0xd0d000d0,0x7d7d007d,0x89890089,0x97970097, + 0x5b5b005b,0x95950095,0xffff00ff,0xd2d200d2, + 0xc4c400c4,0x48480048,0xf7f700f7,0xdbdb00db, + 0x03030003,0xdada00da,0x3f3f003f,0x94940094, + 0x5c5c005c,0x02020002,0x4a4a004a,0x33330033, + 0x67670067,0xf3f300f3,0x7f7f007f,0xe2e200e2, + 0x9b9b009b,0x26260026,0x37370037,0x3b3b003b, + 0x96960096,0x4b4b004b,0xbebe00be,0x2e2e002e, + 0x79790079,0x8c8c008c,0x6e6e006e,0x8e8e008e, + 0xf5f500f5,0xb6b600b6,0xfdfd00fd,0x59590059, + 0x98980098,0x6a6a006a,0x46460046,0xbaba00ba, + 0x25250025,0x42420042,0xa2a200a2,0xfafa00fa, + 0x07070007,0x55550055,0xeeee00ee,0x0a0a000a, + 0x49490049,0x68680068,0x38380038,0xa4a400a4, + 0x28280028,0x7b7b007b,0xc9c900c9,0xc1c100c1, + 0xe3e300e3,0xf4f400f4,0xc7c700c7,0x9e9e009e, +}; + + + +static void camellia_setup128(const unsigned char *key, u32 *subkey) +{ + u32 kll, klr, krl, krr; + u32 il, ir, t0, t1, w0, w1; + u32 kw4l, kw4r, dw, tl, tr; + u32 subL[26]; + u32 subR[26]; + + /** + * k == kll || klr || krl || krr (|| is concatination) + */ + kll = GETU32(key ); + klr = GETU32(key + 4); + krl = GETU32(key + 8); + krr = GETU32(key + 12); + /** + * generate KL dependent subkeys + */ + /* kw1 */ + SUBL(0) = kll; SUBR(0) = klr; + /* kw2 */ + SUBL(1) = krl; SUBR(1) = krr; + /* rotation left shift 15bit */ + CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 15); + /* k3 */ + SUBL(4) = kll; SUBR(4) = klr; + /* k4 */ + SUBL(5) = krl; SUBR(5) = krr; + /* rotation left shift 15+30bit */ + CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 30); + /* k7 */ + SUBL(10) = kll; SUBR(10) = klr; + /* k8 */ + SUBL(11) = krl; SUBR(11) = krr; + /* rotation left shift 15+30+15bit */ + CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 15); + /* k10 */ + SUBL(13) = krl; SUBR(13) = krr; + /* rotation left shift 15+30+15+17 bit */ + CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 17); + /* kl3 */ + SUBL(16) = kll; SUBR(16) = klr; + /* kl4 */ + SUBL(17) = krl; SUBR(17) = krr; + /* rotation left shift 15+30+15+17+17 bit */ + CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 17); + /* k13 */ + SUBL(18) = kll; SUBR(18) = klr; + /* k14 */ + SUBL(19) = krl; SUBR(19) = krr; + /* rotation left shift 15+30+15+17+17+17 bit */ + CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 17); + /* k17 */ + SUBL(22) = kll; SUBR(22) = klr; + /* k18 */ + SUBL(23) = krl; SUBR(23) = krr; + + /* generate KA */ + kll = SUBL(0); klr = SUBR(0); + krl = SUBL(1); krr = SUBR(1); + CAMELLIA_F(kll, klr, + CAMELLIA_SIGMA1L, CAMELLIA_SIGMA1R, + w0, w1, il, ir, t0, t1); + krl ^= w0; krr ^= w1; + CAMELLIA_F(krl, krr, + CAMELLIA_SIGMA2L, CAMELLIA_SIGMA2R, + kll, klr, il, ir, t0, t1); + /* current status == (kll, klr, w0, w1) */ + CAMELLIA_F(kll, klr, + CAMELLIA_SIGMA3L, CAMELLIA_SIGMA3R, + krl, krr, il, ir, t0, t1); + krl ^= w0; krr ^= w1; + CAMELLIA_F(krl, krr, + CAMELLIA_SIGMA4L, CAMELLIA_SIGMA4R, + w0, w1, il, ir, t0, t1); + kll ^= w0; klr ^= w1; + + /* generate KA dependent subkeys */ + /* k1, k2 */ + SUBL(2) = kll; SUBR(2) = klr; + SUBL(3) = krl; SUBR(3) = krr; + CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 15); + /* k5,k6 */ + SUBL(6) = kll; SUBR(6) = klr; + SUBL(7) = krl; SUBR(7) = krr; + CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 15); + /* kl1, kl2 */ + SUBL(8) = kll; SUBR(8) = klr; + SUBL(9) = krl; SUBR(9) = krr; + CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 15); + /* k9 */ + SUBL(12) = kll; SUBR(12) = klr; + CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 15); + /* k11, k12 */ + SUBL(14) = kll; SUBR(14) = klr; + SUBL(15) = krl; SUBR(15) = krr; + CAMELLIA_ROLDQo32(kll, klr, krl, krr, w0, w1, 34); + /* k15, k16 */ + SUBL(20) = kll; SUBR(20) = klr; + SUBL(21) = krl; SUBR(21) = krr; + CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 17); + /* kw3, kw4 */ + SUBL(24) = kll; SUBR(24) = klr; + SUBL(25) = krl; SUBR(25) = krr; + + + /* absorb kw2 to other subkeys */ + /* round 2 */ + SUBL(3) ^= SUBL(1); SUBR(3) ^= SUBR(1); + /* round 4 */ + SUBL(5) ^= SUBL(1); SUBR(5) ^= SUBR(1); + /* round 6 */ + SUBL(7) ^= SUBL(1); SUBR(7) ^= SUBR(1); + SUBL(1) ^= SUBR(1) & ~SUBR(9); + dw = SUBL(1) & SUBL(9), + SUBR(1) ^= CAMELLIA_RL1(dw); /* modified for FLinv(kl2) */ + /* round 8 */ + SUBL(11) ^= SUBL(1); SUBR(11) ^= SUBR(1); + /* round 10 */ + SUBL(13) ^= SUBL(1); SUBR(13) ^= SUBR(1); + /* round 12 */ + SUBL(15) ^= SUBL(1); SUBR(15) ^= SUBR(1); + SUBL(1) ^= SUBR(1) & ~SUBR(17); + dw = SUBL(1) & SUBL(17), + SUBR(1) ^= CAMELLIA_RL1(dw); /* modified for FLinv(kl4) */ + /* round 14 */ + SUBL(19) ^= SUBL(1); SUBR(19) ^= SUBR(1); + /* round 16 */ + SUBL(21) ^= SUBL(1); SUBR(21) ^= SUBR(1); + /* round 18 */ + SUBL(23) ^= SUBL(1); SUBR(23) ^= SUBR(1); + /* kw3 */ + SUBL(24) ^= SUBL(1); SUBR(24) ^= SUBR(1); + + /* absorb kw4 to other subkeys */ + kw4l = SUBL(25); kw4r = SUBR(25); + /* round 17 */ + SUBL(22) ^= kw4l; SUBR(22) ^= kw4r; + /* round 15 */ + SUBL(20) ^= kw4l; SUBR(20) ^= kw4r; + /* round 13 */ + SUBL(18) ^= kw4l; SUBR(18) ^= kw4r; + kw4l ^= kw4r & ~SUBR(16); + dw = kw4l & SUBL(16), + kw4r ^= CAMELLIA_RL1(dw); /* modified for FL(kl3) */ + /* round 11 */ + SUBL(14) ^= kw4l; SUBR(14) ^= kw4r; + /* round 9 */ + SUBL(12) ^= kw4l; SUBR(12) ^= kw4r; + /* round 7 */ + SUBL(10) ^= kw4l; SUBR(10) ^= kw4r; + kw4l ^= kw4r & ~SUBR(8); + dw = kw4l & SUBL(8), + kw4r ^= CAMELLIA_RL1(dw); /* modified for FL(kl1) */ + /* round 5 */ + SUBL(6) ^= kw4l; SUBR(6) ^= kw4r; + /* round 3 */ + SUBL(4) ^= kw4l; SUBR(4) ^= kw4r; + /* round 1 */ + SUBL(2) ^= kw4l; SUBR(2) ^= kw4r; + /* kw1 */ + SUBL(0) ^= kw4l; SUBR(0) ^= kw4r; + + + /* key XOR is end of F-function */ + CAMELLIA_SUBKEY_L(0) = SUBL(0) ^ SUBL(2);/* kw1 */ + CAMELLIA_SUBKEY_R(0) = SUBR(0) ^ SUBR(2); + CAMELLIA_SUBKEY_L(2) = SUBL(3); /* round 1 */ + CAMELLIA_SUBKEY_R(2) = SUBR(3); + CAMELLIA_SUBKEY_L(3) = SUBL(2) ^ SUBL(4); /* round 2 */ + CAMELLIA_SUBKEY_R(3) = SUBR(2) ^ SUBR(4); + CAMELLIA_SUBKEY_L(4) = SUBL(3) ^ SUBL(5); /* round 3 */ + CAMELLIA_SUBKEY_R(4) = SUBR(3) ^ SUBR(5); + CAMELLIA_SUBKEY_L(5) = SUBL(4) ^ SUBL(6); /* round 4 */ + CAMELLIA_SUBKEY_R(5) = SUBR(4) ^ SUBR(6); + CAMELLIA_SUBKEY_L(6) = SUBL(5) ^ SUBL(7); /* round 5 */ + CAMELLIA_SUBKEY_R(6) = SUBR(5) ^ SUBR(7); + tl = SUBL(10) ^ (SUBR(10) & ~SUBR(8)); + dw = tl & SUBL(8), /* FL(kl1) */ + tr = SUBR(10) ^ CAMELLIA_RL1(dw); + CAMELLIA_SUBKEY_L(7) = SUBL(6) ^ tl; /* round 6 */ + CAMELLIA_SUBKEY_R(7) = SUBR(6) ^ tr; + CAMELLIA_SUBKEY_L(8) = SUBL(8); /* FL(kl1) */ + CAMELLIA_SUBKEY_R(8) = SUBR(8); + CAMELLIA_SUBKEY_L(9) = SUBL(9); /* FLinv(kl2) */ + CAMELLIA_SUBKEY_R(9) = SUBR(9); + tl = SUBL(7) ^ (SUBR(7) & ~SUBR(9)); + dw = tl & SUBL(9), /* FLinv(kl2) */ + tr = SUBR(7) ^ CAMELLIA_RL1(dw); + CAMELLIA_SUBKEY_L(10) = tl ^ SUBL(11); /* round 7 */ + CAMELLIA_SUBKEY_R(10) = tr ^ SUBR(11); + CAMELLIA_SUBKEY_L(11) = SUBL(10) ^ SUBL(12); /* round 8 */ + CAMELLIA_SUBKEY_R(11) = SUBR(10) ^ SUBR(12); + CAMELLIA_SUBKEY_L(12) = SUBL(11) ^ SUBL(13); /* round 9 */ + CAMELLIA_SUBKEY_R(12) = SUBR(11) ^ SUBR(13); + CAMELLIA_SUBKEY_L(13) = SUBL(12) ^ SUBL(14); /* round 10 */ + CAMELLIA_SUBKEY_R(13) = SUBR(12) ^ SUBR(14); + CAMELLIA_SUBKEY_L(14) = SUBL(13) ^ SUBL(15); /* round 11 */ + CAMELLIA_SUBKEY_R(14) = SUBR(13) ^ SUBR(15); + tl = SUBL(18) ^ (SUBR(18) & ~SUBR(16)); + dw = tl & SUBL(16), /* FL(kl3) */ + tr = SUBR(18) ^ CAMELLIA_RL1(dw); + CAMELLIA_SUBKEY_L(15) = SUBL(14) ^ tl; /* round 12 */ + CAMELLIA_SUBKEY_R(15) = SUBR(14) ^ tr; + CAMELLIA_SUBKEY_L(16) = SUBL(16); /* FL(kl3) */ + CAMELLIA_SUBKEY_R(16) = SUBR(16); + CAMELLIA_SUBKEY_L(17) = SUBL(17); /* FLinv(kl4) */ + CAMELLIA_SUBKEY_R(17) = SUBR(17); + tl = SUBL(15) ^ (SUBR(15) & ~SUBR(17)); + dw = tl & SUBL(17), /* FLinv(kl4) */ + tr = SUBR(15) ^ CAMELLIA_RL1(dw); + CAMELLIA_SUBKEY_L(18) = tl ^ SUBL(19); /* round 13 */ + CAMELLIA_SUBKEY_R(18) = tr ^ SUBR(19); + CAMELLIA_SUBKEY_L(19) = SUBL(18) ^ SUBL(20); /* round 14 */ + CAMELLIA_SUBKEY_R(19) = SUBR(18) ^ SUBR(20); + CAMELLIA_SUBKEY_L(20) = SUBL(19) ^ SUBL(21); /* round 15 */ + CAMELLIA_SUBKEY_R(20) = SUBR(19) ^ SUBR(21); + CAMELLIA_SUBKEY_L(21) = SUBL(20) ^ SUBL(22); /* round 16 */ + CAMELLIA_SUBKEY_R(21) = SUBR(20) ^ SUBR(22); + CAMELLIA_SUBKEY_L(22) = SUBL(21) ^ SUBL(23); /* round 17 */ + CAMELLIA_SUBKEY_R(22) = SUBR(21) ^ SUBR(23); + CAMELLIA_SUBKEY_L(23) = SUBL(22); /* round 18 */ + CAMELLIA_SUBKEY_R(23) = SUBR(22); + CAMELLIA_SUBKEY_L(24) = SUBL(24) ^ SUBL(23); /* kw3 */ + CAMELLIA_SUBKEY_R(24) = SUBR(24) ^ SUBR(23); + + /* apply the inverse of the last half of P-function */ + dw = CAMELLIA_SUBKEY_L(2) ^ CAMELLIA_SUBKEY_R(2), + dw = CAMELLIA_RL8(dw);/* round 1 */ + CAMELLIA_SUBKEY_R(2) = CAMELLIA_SUBKEY_L(2) ^ dw, + CAMELLIA_SUBKEY_L(2) = dw; + dw = CAMELLIA_SUBKEY_L(3) ^ CAMELLIA_SUBKEY_R(3), + dw = CAMELLIA_RL8(dw);/* round 2 */ + CAMELLIA_SUBKEY_R(3) = CAMELLIA_SUBKEY_L(3) ^ dw, + CAMELLIA_SUBKEY_L(3) = dw; + dw = CAMELLIA_SUBKEY_L(4) ^ CAMELLIA_SUBKEY_R(4), + dw = CAMELLIA_RL8(dw);/* round 3 */ + CAMELLIA_SUBKEY_R(4) = CAMELLIA_SUBKEY_L(4) ^ dw, + CAMELLIA_SUBKEY_L(4) = dw; + dw = CAMELLIA_SUBKEY_L(5) ^ CAMELLIA_SUBKEY_R(5), + dw = CAMELLIA_RL8(dw);/* round 4 */ + CAMELLIA_SUBKEY_R(5) = CAMELLIA_SUBKEY_L(5) ^ dw, + CAMELLIA_SUBKEY_L(5) = dw; + dw = CAMELLIA_SUBKEY_L(6) ^ CAMELLIA_SUBKEY_R(6), + dw = CAMELLIA_RL8(dw);/* round 5 */ + CAMELLIA_SUBKEY_R(6) = CAMELLIA_SUBKEY_L(6) ^ dw, + CAMELLIA_SUBKEY_L(6) = dw; + dw = CAMELLIA_SUBKEY_L(7) ^ CAMELLIA_SUBKEY_R(7), + dw = CAMELLIA_RL8(dw);/* round 6 */ + CAMELLIA_SUBKEY_R(7) = CAMELLIA_SUBKEY_L(7) ^ dw, + CAMELLIA_SUBKEY_L(7) = dw; + dw = CAMELLIA_SUBKEY_L(10) ^ CAMELLIA_SUBKEY_R(10), + dw = CAMELLIA_RL8(dw);/* round 7 */ + CAMELLIA_SUBKEY_R(10) = CAMELLIA_SUBKEY_L(10) ^ dw, + CAMELLIA_SUBKEY_L(10) = dw; + dw = CAMELLIA_SUBKEY_L(11) ^ CAMELLIA_SUBKEY_R(11), + dw = CAMELLIA_RL8(dw);/* round 8 */ + CAMELLIA_SUBKEY_R(11) = CAMELLIA_SUBKEY_L(11) ^ dw, + CAMELLIA_SUBKEY_L(11) = dw; + dw = CAMELLIA_SUBKEY_L(12) ^ CAMELLIA_SUBKEY_R(12), + dw = CAMELLIA_RL8(dw);/* round 9 */ + CAMELLIA_SUBKEY_R(12) = CAMELLIA_SUBKEY_L(12) ^ dw, + CAMELLIA_SUBKEY_L(12) = dw; + dw = CAMELLIA_SUBKEY_L(13) ^ CAMELLIA_SUBKEY_R(13), + dw = CAMELLIA_RL8(dw);/* round 10 */ + CAMELLIA_SUBKEY_R(13) = CAMELLIA_SUBKEY_L(13) ^ dw, + CAMELLIA_SUBKEY_L(13) = dw; + dw = CAMELLIA_SUBKEY_L(14) ^ CAMELLIA_SUBKEY_R(14), + dw = CAMELLIA_RL8(dw);/* round 11 */ + CAMELLIA_SUBKEY_R(14) = CAMELLIA_SUBKEY_L(14) ^ dw, + CAMELLIA_SUBKEY_L(14) = dw; + dw = CAMELLIA_SUBKEY_L(15) ^ CAMELLIA_SUBKEY_R(15), + dw = CAMELLIA_RL8(dw);/* round 12 */ + CAMELLIA_SUBKEY_R(15) = CAMELLIA_SUBKEY_L(15) ^ dw, + CAMELLIA_SUBKEY_L(15) = dw; + dw = CAMELLIA_SUBKEY_L(18) ^ CAMELLIA_SUBKEY_R(18), + dw = CAMELLIA_RL8(dw);/* round 13 */ + CAMELLIA_SUBKEY_R(18) = CAMELLIA_SUBKEY_L(18) ^ dw, + CAMELLIA_SUBKEY_L(18) = dw; + dw = CAMELLIA_SUBKEY_L(19) ^ CAMELLIA_SUBKEY_R(19), + dw = CAMELLIA_RL8(dw);/* round 14 */ + CAMELLIA_SUBKEY_R(19) = CAMELLIA_SUBKEY_L(19) ^ dw, + CAMELLIA_SUBKEY_L(19) = dw; + dw = CAMELLIA_SUBKEY_L(20) ^ CAMELLIA_SUBKEY_R(20), + dw = CAMELLIA_RL8(dw);/* round 15 */ + CAMELLIA_SUBKEY_R(20) = CAMELLIA_SUBKEY_L(20) ^ dw, + CAMELLIA_SUBKEY_L(20) = dw; + dw = CAMELLIA_SUBKEY_L(21) ^ CAMELLIA_SUBKEY_R(21), + dw = CAMELLIA_RL8(dw);/* round 16 */ + CAMELLIA_SUBKEY_R(21) = CAMELLIA_SUBKEY_L(21) ^ dw, + CAMELLIA_SUBKEY_L(21) = dw; + dw = CAMELLIA_SUBKEY_L(22) ^ CAMELLIA_SUBKEY_R(22), + dw = CAMELLIA_RL8(dw);/* round 17 */ + CAMELLIA_SUBKEY_R(22) = CAMELLIA_SUBKEY_L(22) ^ dw, + CAMELLIA_SUBKEY_L(22) = dw; + dw = CAMELLIA_SUBKEY_L(23) ^ CAMELLIA_SUBKEY_R(23), + dw = CAMELLIA_RL8(dw);/* round 18 */ + CAMELLIA_SUBKEY_R(23) = CAMELLIA_SUBKEY_L(23) ^ dw, + CAMELLIA_SUBKEY_L(23) = dw; + + return; +} + + +static void camellia_setup256(const unsigned char *key, u32 *subkey) +{ + u32 kll,klr,krl,krr; /* left half of key */ + u32 krll,krlr,krrl,krrr; /* right half of key */ + u32 il, ir, t0, t1, w0, w1; /* temporary variables */ + u32 kw4l, kw4r, dw, tl, tr; + u32 subL[34]; + u32 subR[34]; + + /** + * key = (kll || klr || krl || krr || krll || krlr || krrl || krrr) + * (|| is concatination) + */ + + kll = GETU32(key ); + klr = GETU32(key + 4); + krl = GETU32(key + 8); + krr = GETU32(key + 12); + krll = GETU32(key + 16); + krlr = GETU32(key + 20); + krrl = GETU32(key + 24); + krrr = GETU32(key + 28); + + /* generate KL dependent subkeys */ + /* kw1 */ + SUBL(0) = kll; SUBR(0) = klr; + /* kw2 */ + SUBL(1) = krl; SUBR(1) = krr; + CAMELLIA_ROLDQo32(kll, klr, krl, krr, w0, w1, 45); + /* k9 */ + SUBL(12) = kll; SUBR(12) = klr; + /* k10 */ + SUBL(13) = krl; SUBR(13) = krr; + CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 15); + /* kl3 */ + SUBL(16) = kll; SUBR(16) = klr; + /* kl4 */ + SUBL(17) = krl; SUBR(17) = krr; + CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 17); + /* k17 */ + SUBL(22) = kll; SUBR(22) = klr; + /* k18 */ + SUBL(23) = krl; SUBR(23) = krr; + CAMELLIA_ROLDQo32(kll, klr, krl, krr, w0, w1, 34); + /* k23 */ + SUBL(30) = kll; SUBR(30) = klr; + /* k24 */ + SUBL(31) = krl; SUBR(31) = krr; + + /* generate KR dependent subkeys */ + CAMELLIA_ROLDQ(krll, krlr, krrl, krrr, w0, w1, 15); + /* k3 */ + SUBL(4) = krll; SUBR(4) = krlr; + /* k4 */ + SUBL(5) = krrl; SUBR(5) = krrr; + CAMELLIA_ROLDQ(krll, krlr, krrl, krrr, w0, w1, 15); + /* kl1 */ + SUBL(8) = krll; SUBR(8) = krlr; + /* kl2 */ + SUBL(9) = krrl; SUBR(9) = krrr; + CAMELLIA_ROLDQ(krll, krlr, krrl, krrr, w0, w1, 30); + /* k13 */ + SUBL(18) = krll; SUBR(18) = krlr; + /* k14 */ + SUBL(19) = krrl; SUBR(19) = krrr; + CAMELLIA_ROLDQo32(krll, krlr, krrl, krrr, w0, w1, 34); + /* k19 */ + SUBL(26) = krll; SUBR(26) = krlr; + /* k20 */ + SUBL(27) = krrl; SUBR(27) = krrr; + CAMELLIA_ROLDQo32(krll, krlr, krrl, krrr, w0, w1, 34); + + /* generate KA */ + kll = SUBL(0) ^ krll; klr = SUBR(0) ^ krlr; + krl = SUBL(1) ^ krrl; krr = SUBR(1) ^ krrr; + CAMELLIA_F(kll, klr, + CAMELLIA_SIGMA1L, CAMELLIA_SIGMA1R, + w0, w1, il, ir, t0, t1); + krl ^= w0; krr ^= w1; + CAMELLIA_F(krl, krr, + CAMELLIA_SIGMA2L, CAMELLIA_SIGMA2R, + kll, klr, il, ir, t0, t1); + kll ^= krll; klr ^= krlr; + CAMELLIA_F(kll, klr, + CAMELLIA_SIGMA3L, CAMELLIA_SIGMA3R, + krl, krr, il, ir, t0, t1); + krl ^= w0 ^ krrl; krr ^= w1 ^ krrr; + CAMELLIA_F(krl, krr, + CAMELLIA_SIGMA4L, CAMELLIA_SIGMA4R, + w0, w1, il, ir, t0, t1); + kll ^= w0; klr ^= w1; + + /* generate KB */ + krll ^= kll; krlr ^= klr; + krrl ^= krl; krrr ^= krr; + CAMELLIA_F(krll, krlr, + CAMELLIA_SIGMA5L, CAMELLIA_SIGMA5R, + w0, w1, il, ir, t0, t1); + krrl ^= w0; krrr ^= w1; + CAMELLIA_F(krrl, krrr, + CAMELLIA_SIGMA6L, CAMELLIA_SIGMA6R, + w0, w1, il, ir, t0, t1); + krll ^= w0; krlr ^= w1; + + /* generate KA dependent subkeys */ + CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 15); + /* k5 */ + SUBL(6) = kll; SUBR(6) = klr; + /* k6 */ + SUBL(7) = krl; SUBR(7) = krr; + CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 30); + /* k11 */ + SUBL(14) = kll; SUBR(14) = klr; + /* k12 */ + SUBL(15) = krl; SUBR(15) = krr; + /* rotation left shift 32bit */ + /* kl5 */ + SUBL(24) = klr; SUBR(24) = krl; + /* kl6 */ + SUBL(25) = krr; SUBR(25) = kll; + /* rotation left shift 49 from k11,k12 -> k21,k22 */ + CAMELLIA_ROLDQo32(kll, klr, krl, krr, w0, w1, 49); + /* k21 */ + SUBL(28) = kll; SUBR(28) = klr; + /* k22 */ + SUBL(29) = krl; SUBR(29) = krr; + + /* generate KB dependent subkeys */ + /* k1 */ + SUBL(2) = krll; SUBR(2) = krlr; + /* k2 */ + SUBL(3) = krrl; SUBR(3) = krrr; + CAMELLIA_ROLDQ(krll, krlr, krrl, krrr, w0, w1, 30); + /* k7 */ + SUBL(10) = krll; SUBR(10) = krlr; + /* k8 */ + SUBL(11) = krrl; SUBR(11) = krrr; + CAMELLIA_ROLDQ(krll, krlr, krrl, krrr, w0, w1, 30); + /* k15 */ + SUBL(20) = krll; SUBR(20) = krlr; + /* k16 */ + SUBL(21) = krrl; SUBR(21) = krrr; + CAMELLIA_ROLDQo32(krll, krlr, krrl, krrr, w0, w1, 51); + /* kw3 */ + SUBL(32) = krll; SUBR(32) = krlr; + /* kw4 */ + SUBL(33) = krrl; SUBR(33) = krrr; + + /* absorb kw2 to other subkeys */ + /* round 2 */ + SUBL(3) ^= SUBL(1); SUBR(3) ^= SUBR(1); + /* round 4 */ + SUBL(5) ^= SUBL(1); SUBR(5) ^= SUBR(1); + /* round 6 */ + SUBL(7) ^= SUBL(1); SUBR(7) ^= SUBR(1); + SUBL(1) ^= SUBR(1) & ~SUBR(9); + dw = SUBL(1) & SUBL(9), + SUBR(1) ^= CAMELLIA_RL1(dw); /* modified for FLinv(kl2) */ + /* round 8 */ + SUBL(11) ^= SUBL(1); SUBR(11) ^= SUBR(1); + /* round 10 */ + SUBL(13) ^= SUBL(1); SUBR(13) ^= SUBR(1); + /* round 12 */ + SUBL(15) ^= SUBL(1); SUBR(15) ^= SUBR(1); + SUBL(1) ^= SUBR(1) & ~SUBR(17); + dw = SUBL(1) & SUBL(17), + SUBR(1) ^= CAMELLIA_RL1(dw); /* modified for FLinv(kl4) */ + /* round 14 */ + SUBL(19) ^= SUBL(1); SUBR(19) ^= SUBR(1); + /* round 16 */ + SUBL(21) ^= SUBL(1); SUBR(21) ^= SUBR(1); + /* round 18 */ + SUBL(23) ^= SUBL(1); SUBR(23) ^= SUBR(1); + SUBL(1) ^= SUBR(1) & ~SUBR(25); + dw = SUBL(1) & SUBL(25), + SUBR(1) ^= CAMELLIA_RL1(dw); /* modified for FLinv(kl6) */ + /* round 20 */ + SUBL(27) ^= SUBL(1); SUBR(27) ^= SUBR(1); + /* round 22 */ + SUBL(29) ^= SUBL(1); SUBR(29) ^= SUBR(1); + /* round 24 */ + SUBL(31) ^= SUBL(1); SUBR(31) ^= SUBR(1); + /* kw3 */ + SUBL(32) ^= SUBL(1); SUBR(32) ^= SUBR(1); + + + /* absorb kw4 to other subkeys */ + kw4l = SUBL(33); kw4r = SUBR(33); + /* round 23 */ + SUBL(30) ^= kw4l; SUBR(30) ^= kw4r; + /* round 21 */ + SUBL(28) ^= kw4l; SUBR(28) ^= kw4r; + /* round 19 */ + SUBL(26) ^= kw4l; SUBR(26) ^= kw4r; + kw4l ^= kw4r & ~SUBR(24); + dw = kw4l & SUBL(24), + kw4r ^= CAMELLIA_RL1(dw); /* modified for FL(kl5) */ + /* round 17 */ + SUBL(22) ^= kw4l; SUBR(22) ^= kw4r; + /* round 15 */ + SUBL(20) ^= kw4l; SUBR(20) ^= kw4r; + /* round 13 */ + SUBL(18) ^= kw4l; SUBR(18) ^= kw4r; + kw4l ^= kw4r & ~SUBR(16); + dw = kw4l & SUBL(16), + kw4r ^= CAMELLIA_RL1(dw); /* modified for FL(kl3) */ + /* round 11 */ + SUBL(14) ^= kw4l; SUBR(14) ^= kw4r; + /* round 9 */ + SUBL(12) ^= kw4l; SUBR(12) ^= kw4r; + /* round 7 */ + SUBL(10) ^= kw4l; SUBR(10) ^= kw4r; + kw4l ^= kw4r & ~SUBR(8); + dw = kw4l & SUBL(8), + kw4r ^= CAMELLIA_RL1(dw); /* modified for FL(kl1) */ + /* round 5 */ + SUBL(6) ^= kw4l; SUBR(6) ^= kw4r; + /* round 3 */ + SUBL(4) ^= kw4l; SUBR(4) ^= kw4r; + /* round 1 */ + SUBL(2) ^= kw4l; SUBR(2) ^= kw4r; + /* kw1 */ + SUBL(0) ^= kw4l; SUBR(0) ^= kw4r; + + /* key XOR is end of F-function */ + CAMELLIA_SUBKEY_L(0) = SUBL(0) ^ SUBL(2);/* kw1 */ + CAMELLIA_SUBKEY_R(0) = SUBR(0) ^ SUBR(2); + CAMELLIA_SUBKEY_L(2) = SUBL(3); /* round 1 */ + CAMELLIA_SUBKEY_R(2) = SUBR(3); + CAMELLIA_SUBKEY_L(3) = SUBL(2) ^ SUBL(4); /* round 2 */ + CAMELLIA_SUBKEY_R(3) = SUBR(2) ^ SUBR(4); + CAMELLIA_SUBKEY_L(4) = SUBL(3) ^ SUBL(5); /* round 3 */ + CAMELLIA_SUBKEY_R(4) = SUBR(3) ^ SUBR(5); + CAMELLIA_SUBKEY_L(5) = SUBL(4) ^ SUBL(6); /* round 4 */ + CAMELLIA_SUBKEY_R(5) = SUBR(4) ^ SUBR(6); + CAMELLIA_SUBKEY_L(6) = SUBL(5) ^ SUBL(7); /* round 5 */ + CAMELLIA_SUBKEY_R(6) = SUBR(5) ^ SUBR(7); + tl = SUBL(10) ^ (SUBR(10) & ~SUBR(8)); + dw = tl & SUBL(8), /* FL(kl1) */ + tr = SUBR(10) ^ CAMELLIA_RL1(dw); + CAMELLIA_SUBKEY_L(7) = SUBL(6) ^ tl; /* round 6 */ + CAMELLIA_SUBKEY_R(7) = SUBR(6) ^ tr; + CAMELLIA_SUBKEY_L(8) = SUBL(8); /* FL(kl1) */ + CAMELLIA_SUBKEY_R(8) = SUBR(8); + CAMELLIA_SUBKEY_L(9) = SUBL(9); /* FLinv(kl2) */ + CAMELLIA_SUBKEY_R(9) = SUBR(9); + tl = SUBL(7) ^ (SUBR(7) & ~SUBR(9)); + dw = tl & SUBL(9), /* FLinv(kl2) */ + tr = SUBR(7) ^ CAMELLIA_RL1(dw); + CAMELLIA_SUBKEY_L(10) = tl ^ SUBL(11); /* round 7 */ + CAMELLIA_SUBKEY_R(10) = tr ^ SUBR(11); + CAMELLIA_SUBKEY_L(11) = SUBL(10) ^ SUBL(12); /* round 8 */ + CAMELLIA_SUBKEY_R(11) = SUBR(10) ^ SUBR(12); + CAMELLIA_SUBKEY_L(12) = SUBL(11) ^ SUBL(13); /* round 9 */ + CAMELLIA_SUBKEY_R(12) = SUBR(11) ^ SUBR(13); + CAMELLIA_SUBKEY_L(13) = SUBL(12) ^ SUBL(14); /* round 10 */ + CAMELLIA_SUBKEY_R(13) = SUBR(12) ^ SUBR(14); + CAMELLIA_SUBKEY_L(14) = SUBL(13) ^ SUBL(15); /* round 11 */ + CAMELLIA_SUBKEY_R(14) = SUBR(13) ^ SUBR(15); + tl = SUBL(18) ^ (SUBR(18) & ~SUBR(16)); + dw = tl & SUBL(16), /* FL(kl3) */ + tr = SUBR(18) ^ CAMELLIA_RL1(dw); + CAMELLIA_SUBKEY_L(15) = SUBL(14) ^ tl; /* round 12 */ + CAMELLIA_SUBKEY_R(15) = SUBR(14) ^ tr; + CAMELLIA_SUBKEY_L(16) = SUBL(16); /* FL(kl3) */ + CAMELLIA_SUBKEY_R(16) = SUBR(16); + CAMELLIA_SUBKEY_L(17) = SUBL(17); /* FLinv(kl4) */ + CAMELLIA_SUBKEY_R(17) = SUBR(17); + tl = SUBL(15) ^ (SUBR(15) & ~SUBR(17)); + dw = tl & SUBL(17), /* FLinv(kl4) */ + tr = SUBR(15) ^ CAMELLIA_RL1(dw); + CAMELLIA_SUBKEY_L(18) = tl ^ SUBL(19); /* round 13 */ + CAMELLIA_SUBKEY_R(18) = tr ^ SUBR(19); + CAMELLIA_SUBKEY_L(19) = SUBL(18) ^ SUBL(20); /* round 14 */ + CAMELLIA_SUBKEY_R(19) = SUBR(18) ^ SUBR(20); + CAMELLIA_SUBKEY_L(20) = SUBL(19) ^ SUBL(21); /* round 15 */ + CAMELLIA_SUBKEY_R(20) = SUBR(19) ^ SUBR(21); + CAMELLIA_SUBKEY_L(21) = SUBL(20) ^ SUBL(22); /* round 16 */ + CAMELLIA_SUBKEY_R(21) = SUBR(20) ^ SUBR(22); + CAMELLIA_SUBKEY_L(22) = SUBL(21) ^ SUBL(23); /* round 17 */ + CAMELLIA_SUBKEY_R(22) = SUBR(21) ^ SUBR(23); + tl = SUBL(26) ^ (SUBR(26) + & ~SUBR(24)); + dw = tl & SUBL(24), /* FL(kl5) */ + tr = SUBR(26) ^ CAMELLIA_RL1(dw); + CAMELLIA_SUBKEY_L(23) = SUBL(22) ^ tl; /* round 18 */ + CAMELLIA_SUBKEY_R(23) = SUBR(22) ^ tr; + CAMELLIA_SUBKEY_L(24) = SUBL(24); /* FL(kl5) */ + CAMELLIA_SUBKEY_R(24) = SUBR(24); + CAMELLIA_SUBKEY_L(25) = SUBL(25); /* FLinv(kl6) */ + CAMELLIA_SUBKEY_R(25) = SUBR(25); + tl = SUBL(23) ^ (SUBR(23) & + ~SUBR(25)); + dw = tl & SUBL(25), /* FLinv(kl6) */ + tr = SUBR(23) ^ CAMELLIA_RL1(dw); + CAMELLIA_SUBKEY_L(26) = tl ^ SUBL(27); /* round 19 */ + CAMELLIA_SUBKEY_R(26) = tr ^ SUBR(27); + CAMELLIA_SUBKEY_L(27) = SUBL(26) ^ SUBL(28); /* round 20 */ + CAMELLIA_SUBKEY_R(27) = SUBR(26) ^ SUBR(28); + CAMELLIA_SUBKEY_L(28) = SUBL(27) ^ SUBL(29); /* round 21 */ + CAMELLIA_SUBKEY_R(28) = SUBR(27) ^ SUBR(29); + CAMELLIA_SUBKEY_L(29) = SUBL(28) ^ SUBL(30); /* round 22 */ + CAMELLIA_SUBKEY_R(29) = SUBR(28) ^ SUBR(30); + CAMELLIA_SUBKEY_L(30) = SUBL(29) ^ SUBL(31); /* round 23 */ + CAMELLIA_SUBKEY_R(30) = SUBR(29) ^ SUBR(31); + CAMELLIA_SUBKEY_L(31) = SUBL(30); /* round 24 */ + CAMELLIA_SUBKEY_R(31) = SUBR(30); + CAMELLIA_SUBKEY_L(32) = SUBL(32) ^ SUBL(31); /* kw3 */ + CAMELLIA_SUBKEY_R(32) = SUBR(32) ^ SUBR(31); + + /* apply the inverse of the last half of P-function */ + dw = CAMELLIA_SUBKEY_L(2) ^ CAMELLIA_SUBKEY_R(2), + dw = CAMELLIA_RL8(dw);/* round 1 */ + CAMELLIA_SUBKEY_R(2) = CAMELLIA_SUBKEY_L(2) ^ dw, + CAMELLIA_SUBKEY_L(2) = dw; + dw = CAMELLIA_SUBKEY_L(3) ^ CAMELLIA_SUBKEY_R(3), + dw = CAMELLIA_RL8(dw);/* round 2 */ + CAMELLIA_SUBKEY_R(3) = CAMELLIA_SUBKEY_L(3) ^ dw, + CAMELLIA_SUBKEY_L(3) = dw; + dw = CAMELLIA_SUBKEY_L(4) ^ CAMELLIA_SUBKEY_R(4), + dw = CAMELLIA_RL8(dw);/* round 3 */ + CAMELLIA_SUBKEY_R(4) = CAMELLIA_SUBKEY_L(4) ^ dw, + CAMELLIA_SUBKEY_L(4) = dw; + dw = CAMELLIA_SUBKEY_L(5) ^ CAMELLIA_SUBKEY_R(5), + dw = CAMELLIA_RL8(dw);/* round 4 */ + CAMELLIA_SUBKEY_R(5) = CAMELLIA_SUBKEY_L(5) ^ dw, + CAMELLIA_SUBKEY_L(5) = dw; + dw = CAMELLIA_SUBKEY_L(6) ^ CAMELLIA_SUBKEY_R(6), + dw = CAMELLIA_RL8(dw);/* round 5 */ + CAMELLIA_SUBKEY_R(6) = CAMELLIA_SUBKEY_L(6) ^ dw, + CAMELLIA_SUBKEY_L(6) = dw; + dw = CAMELLIA_SUBKEY_L(7) ^ CAMELLIA_SUBKEY_R(7), + dw = CAMELLIA_RL8(dw);/* round 6 */ + CAMELLIA_SUBKEY_R(7) = CAMELLIA_SUBKEY_L(7) ^ dw, + CAMELLIA_SUBKEY_L(7) = dw; + dw = CAMELLIA_SUBKEY_L(10) ^ CAMELLIA_SUBKEY_R(10), + dw = CAMELLIA_RL8(dw);/* round 7 */ + CAMELLIA_SUBKEY_R(10) = CAMELLIA_SUBKEY_L(10) ^ dw, + CAMELLIA_SUBKEY_L(10) = dw; + dw = CAMELLIA_SUBKEY_L(11) ^ CAMELLIA_SUBKEY_R(11), + dw = CAMELLIA_RL8(dw);/* round 8 */ + CAMELLIA_SUBKEY_R(11) = CAMELLIA_SUBKEY_L(11) ^ dw, + CAMELLIA_SUBKEY_L(11) = dw; + dw = CAMELLIA_SUBKEY_L(12) ^ CAMELLIA_SUBKEY_R(12), + dw = CAMELLIA_RL8(dw);/* round 9 */ + CAMELLIA_SUBKEY_R(12) = CAMELLIA_SUBKEY_L(12) ^ dw, + CAMELLIA_SUBKEY_L(12) = dw; + dw = CAMELLIA_SUBKEY_L(13) ^ CAMELLIA_SUBKEY_R(13), + dw = CAMELLIA_RL8(dw);/* round 10 */ + CAMELLIA_SUBKEY_R(13) = CAMELLIA_SUBKEY_L(13) ^ dw, + CAMELLIA_SUBKEY_L(13) = dw; + dw = CAMELLIA_SUBKEY_L(14) ^ CAMELLIA_SUBKEY_R(14), + dw = CAMELLIA_RL8(dw);/* round 11 */ + CAMELLIA_SUBKEY_R(14) = CAMELLIA_SUBKEY_L(14) ^ dw, + CAMELLIA_SUBKEY_L(14) = dw; + dw = CAMELLIA_SUBKEY_L(15) ^ CAMELLIA_SUBKEY_R(15), + dw = CAMELLIA_RL8(dw);/* round 12 */ + CAMELLIA_SUBKEY_R(15) = CAMELLIA_SUBKEY_L(15) ^ dw, + CAMELLIA_SUBKEY_L(15) = dw; + dw = CAMELLIA_SUBKEY_L(18) ^ CAMELLIA_SUBKEY_R(18), + dw = CAMELLIA_RL8(dw);/* round 13 */ + CAMELLIA_SUBKEY_R(18) = CAMELLIA_SUBKEY_L(18) ^ dw, + CAMELLIA_SUBKEY_L(18) = dw; + dw = CAMELLIA_SUBKEY_L(19) ^ CAMELLIA_SUBKEY_R(19), + dw = CAMELLIA_RL8(dw);/* round 14 */ + CAMELLIA_SUBKEY_R(19) = CAMELLIA_SUBKEY_L(19) ^ dw, + CAMELLIA_SUBKEY_L(19) = dw; + dw = CAMELLIA_SUBKEY_L(20) ^ CAMELLIA_SUBKEY_R(20), + dw = CAMELLIA_RL8(dw);/* round 15 */ + CAMELLIA_SUBKEY_R(20) = CAMELLIA_SUBKEY_L(20) ^ dw, + CAMELLIA_SUBKEY_L(20) = dw; + dw = CAMELLIA_SUBKEY_L(21) ^ CAMELLIA_SUBKEY_R(21), + dw = CAMELLIA_RL8(dw);/* round 16 */ + CAMELLIA_SUBKEY_R(21) = CAMELLIA_SUBKEY_L(21) ^ dw, + CAMELLIA_SUBKEY_L(21) = dw; + dw = CAMELLIA_SUBKEY_L(22) ^ CAMELLIA_SUBKEY_R(22), + dw = CAMELLIA_RL8(dw);/* round 17 */ + CAMELLIA_SUBKEY_R(22) = CAMELLIA_SUBKEY_L(22) ^ dw, + CAMELLIA_SUBKEY_L(22) = dw; + dw = CAMELLIA_SUBKEY_L(23) ^ CAMELLIA_SUBKEY_R(23), + dw = CAMELLIA_RL8(dw);/* round 18 */ + CAMELLIA_SUBKEY_R(23) = CAMELLIA_SUBKEY_L(23) ^ dw, + CAMELLIA_SUBKEY_L(23) = dw; + dw = CAMELLIA_SUBKEY_L(26) ^ CAMELLIA_SUBKEY_R(26), + dw = CAMELLIA_RL8(dw);/* round 19 */ + CAMELLIA_SUBKEY_R(26) = CAMELLIA_SUBKEY_L(26) ^ dw, + CAMELLIA_SUBKEY_L(26) = dw; + dw = CAMELLIA_SUBKEY_L(27) ^ CAMELLIA_SUBKEY_R(27), + dw = CAMELLIA_RL8(dw);/* round 20 */ + CAMELLIA_SUBKEY_R(27) = CAMELLIA_SUBKEY_L(27) ^ dw, + CAMELLIA_SUBKEY_L(27) = dw; + dw = CAMELLIA_SUBKEY_L(28) ^ CAMELLIA_SUBKEY_R(28), + dw = CAMELLIA_RL8(dw);/* round 21 */ + CAMELLIA_SUBKEY_R(28) = CAMELLIA_SUBKEY_L(28) ^ dw, + CAMELLIA_SUBKEY_L(28) = dw; + dw = CAMELLIA_SUBKEY_L(29) ^ CAMELLIA_SUBKEY_R(29), + dw = CAMELLIA_RL8(dw);/* round 22 */ + CAMELLIA_SUBKEY_R(29) = CAMELLIA_SUBKEY_L(29) ^ dw, + CAMELLIA_SUBKEY_L(29) = dw; + dw = CAMELLIA_SUBKEY_L(30) ^ CAMELLIA_SUBKEY_R(30), + dw = CAMELLIA_RL8(dw);/* round 23 */ + CAMELLIA_SUBKEY_R(30) = CAMELLIA_SUBKEY_L(30) ^ dw, + CAMELLIA_SUBKEY_L(30) = dw; + dw = CAMELLIA_SUBKEY_L(31) ^ CAMELLIA_SUBKEY_R(31), + dw = CAMELLIA_RL8(dw);/* round 24 */ + CAMELLIA_SUBKEY_R(31) = CAMELLIA_SUBKEY_L(31) ^ dw, + CAMELLIA_SUBKEY_L(31) = dw; + + return; +} + +static void camellia_setup192(const unsigned char *key, u32 *subkey) +{ + unsigned char kk[32]; + u32 krll, krlr, krrl,krrr; + + memcpy(kk, key, 24); + memcpy((unsigned char *)&krll, key+16,4); + memcpy((unsigned char *)&krlr, key+20,4); + krrl = ~krll; + krrr = ~krlr; + memcpy(kk+24, (unsigned char *)&krrl, 4); + memcpy(kk+28, (unsigned char *)&krrr, 4); + camellia_setup256(kk, subkey); + return; +} + + +/** + * Stuff related to camellia encryption/decryption + */ +static void camellia_encrypt128(const u32 *subkey, __be32 *io_text) +{ + u32 il,ir,t0,t1; /* temporary valiables */ + + u32 io[4]; + + io[0] = be32_to_cpu(io_text[0]); + io[1] = be32_to_cpu(io_text[1]); + io[2] = be32_to_cpu(io_text[2]); + io[3] = be32_to_cpu(io_text[3]); + + /* pre whitening but absorb kw2*/ + io[0] ^= CAMELLIA_SUBKEY_L(0); + io[1] ^= CAMELLIA_SUBKEY_R(0); + /* main iteration */ + + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(2),CAMELLIA_SUBKEY_R(2), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(3),CAMELLIA_SUBKEY_R(3), + io[0],io[1],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(4),CAMELLIA_SUBKEY_R(4), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(5),CAMELLIA_SUBKEY_R(5), + io[0],io[1],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(6),CAMELLIA_SUBKEY_R(6), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(7),CAMELLIA_SUBKEY_R(7), + io[0],io[1],il,ir,t0,t1); + + CAMELLIA_FLS(io[0],io[1],io[2],io[3], + CAMELLIA_SUBKEY_L(8),CAMELLIA_SUBKEY_R(8), + CAMELLIA_SUBKEY_L(9),CAMELLIA_SUBKEY_R(9), + t0,t1,il,ir); + + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(10),CAMELLIA_SUBKEY_R(10), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(11),CAMELLIA_SUBKEY_R(11), + io[0],io[1],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(12),CAMELLIA_SUBKEY_R(12), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(13),CAMELLIA_SUBKEY_R(13), + io[0],io[1],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(14),CAMELLIA_SUBKEY_R(14), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(15),CAMELLIA_SUBKEY_R(15), + io[0],io[1],il,ir,t0,t1); + + CAMELLIA_FLS(io[0],io[1],io[2],io[3], + CAMELLIA_SUBKEY_L(16),CAMELLIA_SUBKEY_R(16), + CAMELLIA_SUBKEY_L(17),CAMELLIA_SUBKEY_R(17), + t0,t1,il,ir); + + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(18),CAMELLIA_SUBKEY_R(18), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(19),CAMELLIA_SUBKEY_R(19), + io[0],io[1],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(20),CAMELLIA_SUBKEY_R(20), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(21),CAMELLIA_SUBKEY_R(21), + io[0],io[1],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(22),CAMELLIA_SUBKEY_R(22), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(23),CAMELLIA_SUBKEY_R(23), + io[0],io[1],il,ir,t0,t1); + + /* post whitening but kw4 */ + io[2] ^= CAMELLIA_SUBKEY_L(24); + io[3] ^= CAMELLIA_SUBKEY_R(24); + + t0 = io[0]; + t1 = io[1]; + io[0] = io[2]; + io[1] = io[3]; + io[2] = t0; + io[3] = t1; + + io_text[0] = cpu_to_be32(io[0]); + io_text[1] = cpu_to_be32(io[1]); + io_text[2] = cpu_to_be32(io[2]); + io_text[3] = cpu_to_be32(io[3]); + + return; +} + +static void camellia_decrypt128(const u32 *subkey, __be32 *io_text) +{ + u32 il,ir,t0,t1; /* temporary valiables */ + + u32 io[4]; + + io[0] = be32_to_cpu(io_text[0]); + io[1] = be32_to_cpu(io_text[1]); + io[2] = be32_to_cpu(io_text[2]); + io[3] = be32_to_cpu(io_text[3]); + + /* pre whitening but absorb kw2*/ + io[0] ^= CAMELLIA_SUBKEY_L(24); + io[1] ^= CAMELLIA_SUBKEY_R(24); + + /* main iteration */ + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(23),CAMELLIA_SUBKEY_R(23), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(22),CAMELLIA_SUBKEY_R(22), + io[0],io[1],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(21),CAMELLIA_SUBKEY_R(21), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(20),CAMELLIA_SUBKEY_R(20), + io[0],io[1],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(19),CAMELLIA_SUBKEY_R(19), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(18),CAMELLIA_SUBKEY_R(18), + io[0],io[1],il,ir,t0,t1); + + CAMELLIA_FLS(io[0],io[1],io[2],io[3], + CAMELLIA_SUBKEY_L(17),CAMELLIA_SUBKEY_R(17), + CAMELLIA_SUBKEY_L(16),CAMELLIA_SUBKEY_R(16), + t0,t1,il,ir); + + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(15),CAMELLIA_SUBKEY_R(15), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(14),CAMELLIA_SUBKEY_R(14), + io[0],io[1],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(13),CAMELLIA_SUBKEY_R(13), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(12),CAMELLIA_SUBKEY_R(12), + io[0],io[1],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(11),CAMELLIA_SUBKEY_R(11), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(10),CAMELLIA_SUBKEY_R(10), + io[0],io[1],il,ir,t0,t1); + + CAMELLIA_FLS(io[0],io[1],io[2],io[3], + CAMELLIA_SUBKEY_L(9),CAMELLIA_SUBKEY_R(9), + CAMELLIA_SUBKEY_L(8),CAMELLIA_SUBKEY_R(8), + t0,t1,il,ir); + + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(7),CAMELLIA_SUBKEY_R(7), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(6),CAMELLIA_SUBKEY_R(6), + io[0],io[1],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(5),CAMELLIA_SUBKEY_R(5), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(4),CAMELLIA_SUBKEY_R(4), + io[0],io[1],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(3),CAMELLIA_SUBKEY_R(3), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(2),CAMELLIA_SUBKEY_R(2), + io[0],io[1],il,ir,t0,t1); + + /* post whitening but kw4 */ + io[2] ^= CAMELLIA_SUBKEY_L(0); + io[3] ^= CAMELLIA_SUBKEY_R(0); + + t0 = io[0]; + t1 = io[1]; + io[0] = io[2]; + io[1] = io[3]; + io[2] = t0; + io[3] = t1; + + io_text[0] = cpu_to_be32(io[0]); + io_text[1] = cpu_to_be32(io[1]); + io_text[2] = cpu_to_be32(io[2]); + io_text[3] = cpu_to_be32(io[3]); + + return; +} + + +/** + * stuff for 192 and 256bit encryption/decryption + */ +static void camellia_encrypt256(const u32 *subkey, __be32 *io_text) +{ + u32 il,ir,t0,t1; /* temporary valiables */ + + u32 io[4]; + + io[0] = be32_to_cpu(io_text[0]); + io[1] = be32_to_cpu(io_text[1]); + io[2] = be32_to_cpu(io_text[2]); + io[3] = be32_to_cpu(io_text[3]); + + /* pre whitening but absorb kw2*/ + io[0] ^= CAMELLIA_SUBKEY_L(0); + io[1] ^= CAMELLIA_SUBKEY_R(0); + + /* main iteration */ + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(2),CAMELLIA_SUBKEY_R(2), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(3),CAMELLIA_SUBKEY_R(3), + io[0],io[1],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(4),CAMELLIA_SUBKEY_R(4), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(5),CAMELLIA_SUBKEY_R(5), + io[0],io[1],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(6),CAMELLIA_SUBKEY_R(6), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(7),CAMELLIA_SUBKEY_R(7), + io[0],io[1],il,ir,t0,t1); + + CAMELLIA_FLS(io[0],io[1],io[2],io[3], + CAMELLIA_SUBKEY_L(8),CAMELLIA_SUBKEY_R(8), + CAMELLIA_SUBKEY_L(9),CAMELLIA_SUBKEY_R(9), + t0,t1,il,ir); + + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(10),CAMELLIA_SUBKEY_R(10), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(11),CAMELLIA_SUBKEY_R(11), + io[0],io[1],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(12),CAMELLIA_SUBKEY_R(12), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(13),CAMELLIA_SUBKEY_R(13), + io[0],io[1],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(14),CAMELLIA_SUBKEY_R(14), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(15),CAMELLIA_SUBKEY_R(15), + io[0],io[1],il,ir,t0,t1); + + CAMELLIA_FLS(io[0],io[1],io[2],io[3], + CAMELLIA_SUBKEY_L(16),CAMELLIA_SUBKEY_R(16), + CAMELLIA_SUBKEY_L(17),CAMELLIA_SUBKEY_R(17), + t0,t1,il,ir); + + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(18),CAMELLIA_SUBKEY_R(18), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(19),CAMELLIA_SUBKEY_R(19), + io[0],io[1],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(20),CAMELLIA_SUBKEY_R(20), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(21),CAMELLIA_SUBKEY_R(21), + io[0],io[1],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(22),CAMELLIA_SUBKEY_R(22), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(23),CAMELLIA_SUBKEY_R(23), + io[0],io[1],il,ir,t0,t1); + + CAMELLIA_FLS(io[0],io[1],io[2],io[3], + CAMELLIA_SUBKEY_L(24),CAMELLIA_SUBKEY_R(24), + CAMELLIA_SUBKEY_L(25),CAMELLIA_SUBKEY_R(25), + t0,t1,il,ir); + + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(26),CAMELLIA_SUBKEY_R(26), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(27),CAMELLIA_SUBKEY_R(27), + io[0],io[1],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(28),CAMELLIA_SUBKEY_R(28), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(29),CAMELLIA_SUBKEY_R(29), + io[0],io[1],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(30),CAMELLIA_SUBKEY_R(30), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(31),CAMELLIA_SUBKEY_R(31), + io[0],io[1],il,ir,t0,t1); + + /* post whitening but kw4 */ + io[2] ^= CAMELLIA_SUBKEY_L(32); + io[3] ^= CAMELLIA_SUBKEY_R(32); + + t0 = io[0]; + t1 = io[1]; + io[0] = io[2]; + io[1] = io[3]; + io[2] = t0; + io[3] = t1; + + io_text[0] = cpu_to_be32(io[0]); + io_text[1] = cpu_to_be32(io[1]); + io_text[2] = cpu_to_be32(io[2]); + io_text[3] = cpu_to_be32(io[3]); + + return; +} + + +static void camellia_decrypt256(const u32 *subkey, __be32 *io_text) +{ + u32 il,ir,t0,t1; /* temporary valiables */ + + u32 io[4]; + + io[0] = be32_to_cpu(io_text[0]); + io[1] = be32_to_cpu(io_text[1]); + io[2] = be32_to_cpu(io_text[2]); + io[3] = be32_to_cpu(io_text[3]); + + /* pre whitening but absorb kw2*/ + io[0] ^= CAMELLIA_SUBKEY_L(32); + io[1] ^= CAMELLIA_SUBKEY_R(32); + + /* main iteration */ + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(31),CAMELLIA_SUBKEY_R(31), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(30),CAMELLIA_SUBKEY_R(30), + io[0],io[1],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(29),CAMELLIA_SUBKEY_R(29), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(28),CAMELLIA_SUBKEY_R(28), + io[0],io[1],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(27),CAMELLIA_SUBKEY_R(27), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(26),CAMELLIA_SUBKEY_R(26), + io[0],io[1],il,ir,t0,t1); + + CAMELLIA_FLS(io[0],io[1],io[2],io[3], + CAMELLIA_SUBKEY_L(25),CAMELLIA_SUBKEY_R(25), + CAMELLIA_SUBKEY_L(24),CAMELLIA_SUBKEY_R(24), + t0,t1,il,ir); + + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(23),CAMELLIA_SUBKEY_R(23), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(22),CAMELLIA_SUBKEY_R(22), + io[0],io[1],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(21),CAMELLIA_SUBKEY_R(21), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(20),CAMELLIA_SUBKEY_R(20), + io[0],io[1],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(19),CAMELLIA_SUBKEY_R(19), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(18),CAMELLIA_SUBKEY_R(18), + io[0],io[1],il,ir,t0,t1); + + CAMELLIA_FLS(io[0],io[1],io[2],io[3], + CAMELLIA_SUBKEY_L(17),CAMELLIA_SUBKEY_R(17), + CAMELLIA_SUBKEY_L(16),CAMELLIA_SUBKEY_R(16), + t0,t1,il,ir); + + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(15),CAMELLIA_SUBKEY_R(15), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(14),CAMELLIA_SUBKEY_R(14), + io[0],io[1],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(13),CAMELLIA_SUBKEY_R(13), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(12),CAMELLIA_SUBKEY_R(12), + io[0],io[1],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(11),CAMELLIA_SUBKEY_R(11), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(10),CAMELLIA_SUBKEY_R(10), + io[0],io[1],il,ir,t0,t1); + + CAMELLIA_FLS(io[0],io[1],io[2],io[3], + CAMELLIA_SUBKEY_L(9),CAMELLIA_SUBKEY_R(9), + CAMELLIA_SUBKEY_L(8),CAMELLIA_SUBKEY_R(8), + t0,t1,il,ir); + + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(7),CAMELLIA_SUBKEY_R(7), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(6),CAMELLIA_SUBKEY_R(6), + io[0],io[1],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(5),CAMELLIA_SUBKEY_R(5), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(4),CAMELLIA_SUBKEY_R(4), + io[0],io[1],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[0],io[1], + CAMELLIA_SUBKEY_L(3),CAMELLIA_SUBKEY_R(3), + io[2],io[3],il,ir,t0,t1); + CAMELLIA_ROUNDSM(io[2],io[3], + CAMELLIA_SUBKEY_L(2),CAMELLIA_SUBKEY_R(2), + io[0],io[1],il,ir,t0,t1); + + /* post whitening but kw4 */ + io[2] ^= CAMELLIA_SUBKEY_L(0); + io[3] ^= CAMELLIA_SUBKEY_R(0); + + t0 = io[0]; + t1 = io[1]; + io[0] = io[2]; + io[1] = io[3]; + io[2] = t0; + io[3] = t1; + + io_text[0] = cpu_to_be32(io[0]); + io_text[1] = cpu_to_be32(io[1]); + io_text[2] = cpu_to_be32(io[2]); + io_text[3] = cpu_to_be32(io[3]); + + return; +} + + +static int +camellia_set_key(struct crypto_tfm *tfm, const u8 *in_key, + unsigned int key_len) +{ + struct camellia_ctx *cctx = crypto_tfm_ctx(tfm); + const unsigned char *key = (const unsigned char *)in_key; + u32 *flags = &tfm->crt_flags; + + if (key_len != 16 && key_len != 24 && key_len != 32) { + *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; + return -EINVAL; + } + + cctx->key_length = key_len; + + switch(key_len) { + case 16: + camellia_setup128(key, cctx->key_table); + break; + case 24: + camellia_setup192(key, cctx->key_table); + break; + case 32: + camellia_setup256(key, cctx->key_table); + break; + default: + break; + } + + return 0; +} + + +static void camellia_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) +{ + const struct camellia_ctx *cctx = crypto_tfm_ctx(tfm); + const __be32 *src = (const __be32 *)in; + __be32 *dst = (__be32 *)out; + + __be32 tmp[4]; + + memcpy(tmp, src, CAMELLIA_BLOCK_SIZE); + + switch (cctx->key_length) { + case 16: + camellia_encrypt128(cctx->key_table, tmp); + break; + case 24: + /* fall through */ + case 32: + camellia_encrypt256(cctx->key_table, tmp); + break; + default: + break; + } + + memcpy(dst, tmp, CAMELLIA_BLOCK_SIZE); +} + + +static void camellia_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) +{ + const struct camellia_ctx *cctx = crypto_tfm_ctx(tfm); + const __be32 *src = (const __be32 *)in; + __be32 *dst = (__be32 *)out; + + __be32 tmp[4]; + + memcpy(tmp, src, CAMELLIA_BLOCK_SIZE); + + switch (cctx->key_length) { + case 16: + camellia_decrypt128(cctx->key_table, tmp); + break; + case 24: + /* fall through */ + case 32: + camellia_decrypt256(cctx->key_table, tmp); + break; + default: + break; + } + + memcpy(dst, tmp, CAMELLIA_BLOCK_SIZE); +} + + +static struct crypto_alg camellia_alg = { + .cra_name = "camellia", + .cra_driver_name = "camellia-generic", + .cra_priority = 100, + .cra_flags = CRYPTO_ALG_TYPE_CIPHER, + .cra_blocksize = CAMELLIA_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct camellia_ctx), + .cra_alignmask = 3, + .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(camellia_alg.cra_list), + .cra_u = { + .cipher = { + .cia_min_keysize = CAMELLIA_MIN_KEY_SIZE, + .cia_max_keysize = CAMELLIA_MAX_KEY_SIZE, + .cia_setkey = camellia_set_key, + .cia_encrypt = camellia_encrypt, + .cia_decrypt = camellia_decrypt + } + } +}; + +static int __init camellia_init(void) +{ + return crypto_register_alg(&camellia_alg); +} + + +static void __exit camellia_fini(void) +{ + crypto_unregister_alg(&camellia_alg); +} + + +module_init(camellia_init); +module_exit(camellia_fini); + + +MODULE_DESCRIPTION("Camellia Cipher Algorithm"); +MODULE_LICENSE("GPL"); -- cgit v1.2.3 From 02ab5a7056bd8441ba6ae8ba8662d4296c202ecb Mon Sep 17 00:00:00 2001 From: Noriaki TAKAMIYA Date: Wed, 24 Jan 2007 21:48:19 +1100 Subject: [CRYPTO] camellia: added the testing code of Camellia cipher This patch adds the code of Camellia code for testing module. Signed-off-by: Noriaki TAKAMIYA Signed-off-by: Herbert Xu --- crypto/tcrypt.c | 42 ++++++++++++++- crypto/tcrypt.h | 165 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 206 insertions(+), 1 deletion(-) (limited to 'crypto') diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c index 8c8e2f95dc7e..f5e9da319ece 100644 --- a/crypto/tcrypt.c +++ b/crypto/tcrypt.c @@ -73,7 +73,7 @@ static char *check[] = { "twofish", "serpent", "sha384", "sha512", "md4", "aes", "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea", "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt", - NULL + "camellia", NULL }; static void hexdump(unsigned char *buf, unsigned int len) @@ -972,6 +972,20 @@ static void do_test(void) test_cipher("pcbc(fcrypt)", DECRYPT, fcrypt_pcbc_dec_tv_template, FCRYPT_DEC_TEST_VECTORS); + //CAMELLIA + test_cipher("ecb(camellia)", ENCRYPT, + camellia_enc_tv_template, + CAMELLIA_ENC_TEST_VECTORS); + test_cipher("ecb(camellia)", DECRYPT, + camellia_dec_tv_template, + CAMELLIA_DEC_TEST_VECTORS); + test_cipher("cbc(camellia)", ENCRYPT, + camellia_cbc_enc_tv_template, + CAMELLIA_CBC_ENC_TEST_VECTORS); + test_cipher("cbc(camellia)", DECRYPT, + camellia_cbc_dec_tv_template, + CAMELLIA_CBC_DEC_TEST_VECTORS); + test_hash("sha384", sha384_tv_template, SHA384_TEST_VECTORS); test_hash("sha512", sha512_tv_template, SHA512_TEST_VECTORS); test_hash("wp512", wp512_tv_template, WP512_TEST_VECTORS); @@ -1196,6 +1210,21 @@ static void do_test(void) FCRYPT_DEC_TEST_VECTORS); break; + case 32: + test_cipher("ecb(camellia)", ENCRYPT, + camellia_enc_tv_template, + CAMELLIA_ENC_TEST_VECTORS); + test_cipher("ecb(camellia)", DECRYPT, + camellia_dec_tv_template, + CAMELLIA_DEC_TEST_VECTORS); + test_cipher("cbc(camellia)", ENCRYPT, + camellia_cbc_enc_tv_template, + CAMELLIA_CBC_ENC_TEST_VECTORS); + test_cipher("cbc(camellia)", DECRYPT, + camellia_cbc_dec_tv_template, + CAMELLIA_CBC_DEC_TEST_VECTORS); + break; + case 100: test_hash("hmac(md5)", hmac_md5_tv_template, HMAC_MD5_TEST_VECTORS); @@ -1289,6 +1318,17 @@ static void do_test(void) des_speed_template); break; + case 205: + test_cipher_speed("ecb(camellia)", ENCRYPT, sec, NULL, 0, + camellia_speed_template); + test_cipher_speed("ecb(camellia)", DECRYPT, sec, NULL, 0, + camellia_speed_template); + test_cipher_speed("cbc(camellia)", ENCRYPT, sec, NULL, 0, + camellia_speed_template); + test_cipher_speed("cbc(camellia)", DECRYPT, sec, NULL, 0, + camellia_speed_template); + break; + case 300: /* fall through */ diff --git a/crypto/tcrypt.h b/crypto/tcrypt.h index ec77814d104e..887527bd5bc6 100644 --- a/crypto/tcrypt.h +++ b/crypto/tcrypt.h @@ -3687,6 +3687,150 @@ static struct cipher_testvec fcrypt_pcbc_dec_tv_template[] = { } }; +/* + * CAMELLIA test vectors. + */ +#define CAMELLIA_ENC_TEST_VECTORS 3 +#define CAMELLIA_DEC_TEST_VECTORS 3 +#define CAMELLIA_CBC_ENC_TEST_VECTORS 2 +#define CAMELLIA_CBC_DEC_TEST_VECTORS 2 + +static struct cipher_testvec camellia_enc_tv_template[] = { + { + .key = { 0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef, + 0xfe, 0xdc, 0xba, 0x98, 0x76, 0x54, 0x32, 0x10 }, + .klen = 16, + .input = { 0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef, + 0xfe, 0xdc, 0xba, 0x98, 0x76, 0x54, 0x32, 0x10 }, + .ilen = 16, + .result = { 0x67, 0x67, 0x31, 0x38, 0x54, 0x96, 0x69, 0x73, + 0x08, 0x57, 0x06, 0x56, 0x48, 0xea, 0xbe, 0x43 }, + .rlen = 16, + }, { + .key = { 0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef, + 0xfe, 0xdc, 0xba, 0x98, 0x76, 0x54, 0x32, 0x10, + 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77 }, + .klen = 24, + .input = { 0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef, + 0xfe, 0xdc, 0xba, 0x98, 0x76, 0x54, 0x32, 0x10 }, + .ilen = 16, + .result = { 0xb4, 0x99, 0x34, 0x01, 0xb3, 0xe9, 0x96, 0xf8, + 0x4e, 0xe5, 0xce, 0xe7, 0xd7, 0x9b, 0x09, 0xb9 }, + .rlen = 16, + }, { + .key = { 0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef, + 0xfe, 0xdc, 0xba, 0x98, 0x76, 0x54, 0x32, 0x10, + 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, + 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff }, + .klen = 32, + .input = { 0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef, + 0xfe, 0xdc, 0xba, 0x98, 0x76, 0x54, 0x32, 0x10 }, + .ilen = 16, + .result = { 0x9a, 0xcc, 0x23, 0x7d, 0xff, 0x16, 0xd7, 0x6c, + 0x20, 0xef, 0x7c, 0x91, 0x9e, 0x3a, 0x75, 0x09 }, + .rlen = 16, + }, +}; + +static struct cipher_testvec camellia_dec_tv_template[] = { + { + .key = { 0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef, + 0xfe, 0xdc, 0xba, 0x98, 0x76, 0x54, 0x32, 0x10 }, + .klen = 16, + .input = { 0x67, 0x67, 0x31, 0x38, 0x54, 0x96, 0x69, 0x73, + 0x08, 0x57, 0x06, 0x56, 0x48, 0xea, 0xbe, 0x43 }, + .ilen = 16, + .result = { 0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef, + 0xfe, 0xdc, 0xba, 0x98, 0x76, 0x54, 0x32, 0x10 }, + .rlen = 16, + }, { + .key = { 0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef, + 0xfe, 0xdc, 0xba, 0x98, 0x76, 0x54, 0x32, 0x10, + 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77 }, + .klen = 24, + .input = { 0xb4, 0x99, 0x34, 0x01, 0xb3, 0xe9, 0x96, 0xf8, + 0x4e, 0xe5, 0xce, 0xe7, 0xd7, 0x9b, 0x09, 0xb9 }, + .ilen = 16, + .result = { 0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef, + 0xfe, 0xdc, 0xba, 0x98, 0x76, 0x54, 0x32, 0x10 }, + .rlen = 16, + }, { + .key = { 0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef, + 0xfe, 0xdc, 0xba, 0x98, 0x76, 0x54, 0x32, 0x10, + 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, + 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff }, + .klen = 32, + .input = { 0x9a, 0xcc, 0x23, 0x7d, 0xff, 0x16, 0xd7, 0x6c, + 0x20, 0xef, 0x7c, 0x91, 0x9e, 0x3a, 0x75, 0x09 }, + .ilen = 16, + .result = { 0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef, + 0xfe, 0xdc, 0xba, 0x98, 0x76, 0x54, 0x32, 0x10 }, + .rlen = 16, + }, +}; + +static struct cipher_testvec camellia_cbc_enc_tv_template[] = { + { + .key = { 0x06, 0xa9, 0x21, 0x40, 0x36, 0xb8, 0xa1, 0x5b, + 0x51, 0x2e, 0x03, 0xd5, 0x34, 0x12, 0x00, 0x06 }, + .klen = 16, + .iv = { 0x3d, 0xaf, 0xba, 0x42, 0x9d, 0x9e, 0xb4, 0x30, + 0xb4, 0x22, 0xda, 0x80, 0x2c, 0x9f, 0xac, 0x41 }, + .input = { "Single block msg" }, + .ilen = 16, + .result = { 0xea, 0x32, 0x12, 0x76, 0x3b, 0x50, 0x10, 0xe7, + 0x18, 0xf6, 0xfd, 0x5d, 0xf6, 0x8f, 0x13, 0x51 }, + .rlen = 16, + }, { + .key = { 0xc2, 0x86, 0x69, 0x6d, 0x88, 0x7c, 0x9a, 0xa0, + 0x61, 0x1b, 0xbb, 0x3e, 0x20, 0x25, 0xa4, 0x5a }, + .klen = 16, + .iv = { 0x56, 0x2e, 0x17, 0x99, 0x6d, 0x09, 0x3d, 0x28, + 0xdd, 0xb3, 0xba, 0x69, 0x5a, 0x2e, 0x6f, 0x58 }, + .input = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, + 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, + 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, + 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f }, + .ilen = 32, + .result = { 0xa5, 0xdf, 0x6e, 0x50, 0xda, 0x70, 0x6c, 0x01, + 0x4a, 0xab, 0xf3, 0xf2, 0xd6, 0xfc, 0x6c, 0xfd, + 0x19, 0xb4, 0x3e, 0x57, 0x1c, 0x02, 0x5e, 0xa0, + 0x15, 0x78, 0xe0, 0x5e, 0xf2, 0xcb, 0x87, 0x16 }, + .rlen = 32, + }, +}; + +static struct cipher_testvec camellia_cbc_dec_tv_template[] = { + { + .key = { 0x06, 0xa9, 0x21, 0x40, 0x36, 0xb8, 0xa1, 0x5b, + 0x51, 0x2e, 0x03, 0xd5, 0x34, 0x12, 0x00, 0x06 }, + .klen = 16, + .iv = { 0x3d, 0xaf, 0xba, 0x42, 0x9d, 0x9e, 0xb4, 0x30, + 0xb4, 0x22, 0xda, 0x80, 0x2c, 0x9f, 0xac, 0x41 }, + .input = { 0xea, 0x32, 0x12, 0x76, 0x3b, 0x50, 0x10, 0xe7, + 0x18, 0xf6, 0xfd, 0x5d, 0xf6, 0x8f, 0x13, 0x51 }, + .ilen = 16, + .result = { "Single block msg" }, + .rlen = 16, + }, { + .key = { 0xc2, 0x86, 0x69, 0x6d, 0x88, 0x7c, 0x9a, 0xa0, + 0x61, 0x1b, 0xbb, 0x3e, 0x20, 0x25, 0xa4, 0x5a }, + .klen = 16, + .iv = { 0x56, 0x2e, 0x17, 0x99, 0x6d, 0x09, 0x3d, 0x28, + 0xdd, 0xb3, 0xba, 0x69, 0x5a, 0x2e, 0x6f, 0x58 }, + .input = { 0xa5, 0xdf, 0x6e, 0x50, 0xda, 0x70, 0x6c, 0x01, + 0x4a, 0xab, 0xf3, 0xf2, 0xd6, 0xfc, 0x6c, 0xfd, + 0x19, 0xb4, 0x3e, 0x57, 0x1c, 0x02, 0x5e, 0xa0, + 0x15, 0x78, 0xe0, 0x5e, 0xf2, 0xcb, 0x87, 0x16 }, + .ilen = 32, + .result = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, + 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, + 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, + 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f }, + .rlen = 32, + }, +}; + /* * Compression stuff. */ @@ -4140,4 +4284,25 @@ static struct hash_speed generic_hash_speed_template[] = { { .blen = 0, .plen = 0, } }; +static struct cipher_speed camellia_speed_template[] = { + { .klen = 16, .blen = 16, }, + { .klen = 16, .blen = 64, }, + { .klen = 16, .blen = 256, }, + { .klen = 16, .blen = 1024, }, + { .klen = 16, .blen = 8192, }, + { .klen = 24, .blen = 16, }, + { .klen = 24, .blen = 64, }, + { .klen = 24, .blen = 256, }, + { .klen = 24, .blen = 1024, }, + { .klen = 24, .blen = 8192, }, + { .klen = 32, .blen = 16, }, + { .klen = 32, .blen = 64, }, + { .klen = 32, .blen = 256, }, + { .klen = 32, .blen = 1024, }, + { .klen = 32, .blen = 8192, }, + + /* End marker */ + { .klen = 0, .blen = 0, } +}; + #endif /* _CRYPTO_TCRYPT_H */ -- cgit v1.2.3