From 902e7e2936a614ce2e72addcec388a3c77aebfd5 Mon Sep 17 00:00:00 2001 From: Joy Latten Date: Tue, 23 Oct 2007 08:50:32 +0800 Subject: [CRYPTO] ctr: Add CTR (Counter) block cipher mode This patch implements CTR mode for IPsec. It is based off of RFC 3686. Please note: 1. CTR turns a block cipher into a stream cipher. Encryption is done in blocks, however the last block may be a partial block. A "counter block" is encrypted, creating a keystream that is xor'ed with the plaintext. The counter portion of the counter block is incremented after each block of plaintext is encrypted. Decryption is performed in same manner. 2. The CTR counterblock is composed of, nonce + IV + counter The size of the counterblock is equivalent to the blocksize of the cipher. sizeof(nonce) + sizeof(IV) + sizeof(counter) = blocksize The CTR template requires the name of the cipher algorithm, the sizeof the nonce, and the sizeof the iv. ctr(cipher,sizeof_nonce,sizeof_iv) So for example, ctr(aes,4,8) specifies the counterblock will be composed of 4 bytes from a nonce, 8 bytes from the iv, and 4 bytes for counter since aes has a blocksize of 16 bytes. 3. The counter portion of the counter block is stored in big endian for conformance to rfc 3686. Signed-off-by: Joy Latten Signed-off-by: Herbert Xu --- crypto/Kconfig | 9 +++++++++ 1 file changed, 9 insertions(+) (limited to 'crypto/Kconfig') diff --git a/crypto/Kconfig b/crypto/Kconfig index 083d2e1d..1f32071a 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -195,6 +195,15 @@ config CRYPTO_XTS key size 256, 384 or 512 bits. This implementation currently can't handle a sectorsize which is not a multiple of 16 bytes. +config CRYPTO_CTR + tristate "CTR support" + select CRYPTO_BLKCIPHER + select CRYPTO_MANAGER + default m + help + CTR: Counter mode + This block cipher algorithm is required for IPSec. + config CRYPTO_CRYPTD tristate "Software async crypto daemon" select CRYPTO_ABLKCIPHER -- cgit v1.2.3 From 3ad61ba54bcd194c362a33649793c692dcb0e8ab Mon Sep 17 00:00:00 2001 From: Sebastian Siewior Date: Thu, 8 Nov 2007 21:25:04 +0800 Subject: [CRYPTO] aes-x86-64: Remove setkey The setkey() function can be shared with the generic algorithm. Signed-off-by: Sebastian Siewior Signed-off-by: Herbert Xu --- crypto/Kconfig | 1 + 1 file changed, 1 insertion(+) (limited to 'crypto/Kconfig') diff --git a/crypto/Kconfig b/crypto/Kconfig index 1f32071a..3f0bc0ed 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -350,6 +350,7 @@ config CRYPTO_AES_X86_64 tristate "AES cipher algorithms (x86_64)" depends on (X86 || UML_X86) && 64BIT select CRYPTO_ALGAPI + select CRYPTO_AES help AES cipher algorithms (FIPS-197). AES uses the Rijndael algorithm. -- cgit v1.2.3 From b7af911276da3efe838cc5505b6469fbe67e60c5 Mon Sep 17 00:00:00 2001 From: Sebastian Siewior Date: Thu, 8 Nov 2007 21:27:05 +0800 Subject: [CRYPTO] ctr: Remove default M NO other block mode is M by default. Signed-off-by: Sebastian Siewior Signed-off-by: Herbert Xu --- crypto/Kconfig | 1 - 1 file changed, 1 deletion(-) (limited to 'crypto/Kconfig') diff --git a/crypto/Kconfig b/crypto/Kconfig index 3f0bc0ed..d9666e33 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -199,7 +199,6 @@ config CRYPTO_CTR tristate "CTR support" select CRYPTO_BLKCIPHER select CRYPTO_MANAGER - default m help CTR: Counter mode This block cipher algorithm is required for IPSec. -- cgit v1.2.3 From 608aa58f3732b46b0dbcf043f33bb3a9b0711d80 Mon Sep 17 00:00:00 2001 From: Sebastian Siewior Date: Sat, 10 Nov 2007 19:07:16 +0800 Subject: [CRYPTO] aes-i586: Remove setkey The setkey() function can be shared with the generic algorithm. Signed-off-by: Sebastian Siewior Signed-off-by: Herbert Xu --- crypto/Kconfig | 1 + 1 file changed, 1 insertion(+) (limited to 'crypto/Kconfig') diff --git a/crypto/Kconfig b/crypto/Kconfig index d9666e33..cf115b14 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -328,6 +328,7 @@ config CRYPTO_AES_586 tristate "AES cipher algorithms (i586)" depends on (X86 || UML_X86) && !64BIT select CRYPTO_ALGAPI + select CRYPTO_AES help AES cipher algorithms (FIPS-197). AES uses the Rijndael algorithm. -- cgit v1.2.3 From b8803214ef1c505b448dce93679c6a01b2ed66f6 Mon Sep 17 00:00:00 2001 From: Jonathan Lynch Date: Sat, 10 Nov 2007 20:08:25 +0800 Subject: [CRYPTO] sha256-generic: Extend sha256_generic.c to support SHA-224 Resubmitting this patch which extends sha256_generic.c to support SHA-224 as described in FIPS 180-2 and RFC 3874. HMAC-SHA-224 as described in RFC4231 is then supported through the hmac interface. Patch includes test vectors for SHA-224 and HMAC-SHA-224. SHA-224 chould be chosen as a hash algorithm when 112 bits of security strength is required. Patch generated against the 2.6.24-rc1 kernel and tested against 2.6.24-rc1-git14 which includes fix for scatter gather implementation for HMAC. Signed-off-by: Jonathan Lynch Signed-off-by: Herbert Xu --- crypto/Kconfig | 5 +- crypto/sha256_generic.c | 72 +++++++++++++++++++++--- crypto/tcrypt.c | 22 +++++++- crypto/tcrypt.h | 142 ++++++++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 229 insertions(+), 12 deletions(-) (limited to 'crypto/Kconfig') diff --git a/crypto/Kconfig b/crypto/Kconfig index cf115b14..7758454b 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -91,7 +91,7 @@ config CRYPTO_SHA1 SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2). config CRYPTO_SHA256 - tristate "SHA256 digest algorithm" + tristate "SHA224 and SHA256 digest algorithm" select CRYPTO_ALGAPI help SHA256 secure hash standard (DFIPS 180-2). @@ -99,6 +99,9 @@ config CRYPTO_SHA256 This version of SHA implements a 256 bit hash with 128 bits of security against collision attacks. + This code also includes SHA-224, a 224 bit hash with 112 bits + of security against collision attacks. + config CRYPTO_SHA512 tristate "SHA384 and SHA512 digest algorithms" select CRYPTO_ALGAPI diff --git a/crypto/sha256_generic.c b/crypto/sha256_generic.c index fd3918be..3cc93fd6 100644 --- a/crypto/sha256_generic.c +++ b/crypto/sha256_generic.c @@ -9,6 +9,7 @@ * Copyright (c) Jean-Luc Cooke * Copyright (c) Andrew McDonald * Copyright (c) 2002 James Morris + * SHA224 Support Copyright 2007 Intel Corporation * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the Free @@ -218,6 +219,22 @@ static void sha256_transform(u32 *state, const u8 *input) memset(W, 0, 64 * sizeof(u32)); } + +static void sha224_init(struct crypto_tfm *tfm) +{ + struct sha256_ctx *sctx = crypto_tfm_ctx(tfm); + sctx->state[0] = SHA224_H0; + sctx->state[1] = SHA224_H1; + sctx->state[2] = SHA224_H2; + sctx->state[3] = SHA224_H3; + sctx->state[4] = SHA224_H4; + sctx->state[5] = SHA224_H5; + sctx->state[6] = SHA224_H6; + sctx->state[7] = SHA224_H7; + sctx->count[0] = 0; + sctx->count[1] = 0; +} + static void sha256_init(struct crypto_tfm *tfm) { struct sha256_ctx *sctx = crypto_tfm_ctx(tfm); @@ -294,8 +311,17 @@ static void sha256_final(struct crypto_tfm *tfm, u8 *out) memset(sctx, 0, sizeof(*sctx)); } +static void sha224_final(struct crypto_tfm *tfm, u8 *hash) +{ + u8 D[SHA256_DIGEST_SIZE]; + + sha256_final(tfm, D); + + memcpy(hash, D, SHA224_DIGEST_SIZE); + memset(D, 0, SHA256_DIGEST_SIZE); +} -static struct crypto_alg alg = { +static struct crypto_alg sha256 = { .cra_name = "sha256", .cra_driver_name= "sha256-generic", .cra_flags = CRYPTO_ALG_TYPE_DIGEST, @@ -303,28 +329,58 @@ static struct crypto_alg alg = { .cra_ctxsize = sizeof(struct sha256_ctx), .cra_module = THIS_MODULE, .cra_alignmask = 3, - .cra_list = LIST_HEAD_INIT(alg.cra_list), + .cra_list = LIST_HEAD_INIT(sha256.cra_list), .cra_u = { .digest = { .dia_digestsize = SHA256_DIGEST_SIZE, - .dia_init = sha256_init, - .dia_update = sha256_update, - .dia_final = sha256_final } } + .dia_init = sha256_init, + .dia_update = sha256_update, + .dia_final = sha256_final } } +}; + +static struct crypto_alg sha224 = { + .cra_name = "sha224", + .cra_driver_name = "sha224-generic", + .cra_flags = CRYPTO_ALG_TYPE_DIGEST, + .cra_blocksize = SHA224_BLOCK_SIZE, + .cra_ctxsize = sizeof(struct sha256_ctx), + .cra_module = THIS_MODULE, + .cra_alignmask = 3, + .cra_list = LIST_HEAD_INIT(sha224.cra_list), + .cra_u = { .digest = { + .dia_digestsize = SHA224_DIGEST_SIZE, + .dia_init = sha224_init, + .dia_update = sha256_update, + .dia_final = sha224_final } } }; static int __init init(void) { - return crypto_register_alg(&alg); + int ret = 0; + + ret = crypto_register_alg(&sha224); + + if (ret < 0) + return ret; + + ret = crypto_register_alg(&sha256); + + if (ret < 0) + crypto_unregister_alg(&sha224); + + return ret; } static void __exit fini(void) { - crypto_unregister_alg(&alg); + crypto_unregister_alg(&sha224); + crypto_unregister_alg(&sha256); } module_init(init); module_exit(fini); MODULE_LICENSE("GPL"); -MODULE_DESCRIPTION("SHA256 Secure Hash Algorithm"); +MODULE_DESCRIPTION("SHA-224 and SHA-256 Secure Hash Algorithm"); +MODULE_ALIAS("sha224"); MODULE_ALIAS("sha256"); diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c index aa84bc4f..4d364cca 100644 --- a/crypto/tcrypt.c +++ b/crypto/tcrypt.c @@ -12,6 +12,7 @@ * Software Foundation; either version 2 of the License, or (at your option) * any later version. * + * 2007-11-06 Added SHA-224 and SHA-224-HMAC tests * 2006-12-07 Added SHA384 HMAC and SHA512 HMAC tests * 2004-08-09 Added cipher speed tests (Reyk Floeter ) * 2003-09-14 Rewritten by Kartikey Mahendra Bhatt @@ -74,8 +75,9 @@ static char *xbuf; static char *tvmem; static char *check[] = { - "des", "md5", "des3_ede", "rot13", "sha1", "sha256", "blowfish", - "twofish", "serpent", "sha384", "sha512", "md4", "aes", "cast6", + "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256", + "blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes", + "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea", "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt", "camellia", "seed", NULL @@ -918,6 +920,8 @@ static void do_test(void) test_hash("md4", md4_tv_template, MD4_TEST_VECTORS); + test_hash("sha224", sha224_tv_template, SHA224_TEST_VECTORS); + test_hash("sha256", sha256_tv_template, SHA256_TEST_VECTORS); //BLOWFISH @@ -1067,6 +1071,8 @@ static void do_test(void) HMAC_MD5_TEST_VECTORS); test_hash("hmac(sha1)", hmac_sha1_tv_template, HMAC_SHA1_TEST_VECTORS); + test_hash("hmac(sha224)", hmac_sha224_tv_template, + HMAC_SHA224_TEST_VECTORS); test_hash("hmac(sha256)", hmac_sha256_tv_template, HMAC_SHA256_TEST_VECTORS); test_hash("hmac(sha384)", hmac_sha384_tv_template, @@ -1299,6 +1305,9 @@ static void do_test(void) camellia_cbc_dec_tv_template, CAMELLIA_CBC_DEC_TEST_VECTORS); break; + case 33: + test_hash("sha224", sha224_tv_template, SHA224_TEST_VECTORS); + break; case 100: test_hash("hmac(md5)", hmac_md5_tv_template, @@ -1324,7 +1333,10 @@ static void do_test(void) test_hash("hmac(sha512)", hmac_sha512_tv_template, HMAC_SHA512_TEST_VECTORS); break; - + case 105: + test_hash("hmac(sha224)", hmac_sha224_tv_template, + HMAC_SHA224_TEST_VECTORS); + break; case 200: test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0, @@ -1459,6 +1471,10 @@ static void do_test(void) test_hash_speed("tgr192", sec, generic_hash_speed_template); if (mode > 300 && mode < 400) break; + case 313: + test_hash_speed("sha224", sec, generic_hash_speed_template); + if (mode > 300 && mode < 400) break; + case 399: break; diff --git a/crypto/tcrypt.h b/crypto/tcrypt.h index f7f9b237..b91585ea 100644 --- a/crypto/tcrypt.h +++ b/crypto/tcrypt.h @@ -173,6 +173,33 @@ static struct hash_testvec sha1_tv_template[] = { } }; + +/* + * SHA224 test vectors from from FIPS PUB 180-2 + */ +#define SHA224_TEST_VECTORS 2 + +static struct hash_testvec sha224_tv_template[] = { + { + .plaintext = "abc", + .psize = 3, + .digest = { 0x23, 0x09, 0x7D, 0x22, 0x34, 0x05, 0xD8, 0x22, + 0x86, 0x42, 0xA4, 0x77, 0xBD, 0xA2, 0x55, 0xB3, + 0x2A, 0xAD, 0xBC, 0xE4, 0xBD, 0xA0, 0xB3, 0xF7, + 0xE3, 0x6C, 0x9D, 0xA7}, + }, { + .plaintext = + "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", + .psize = 56, + .digest = { 0x75, 0x38, 0x8B, 0x16, 0x51, 0x27, 0x76, 0xCC, + 0x5D, 0xBA, 0x5D, 0xA1, 0xFD, 0x89, 0x01, 0x50, + 0xB0, 0xC6, 0x45, 0x5C, 0xB4, 0xF5, 0x8B, 0x19, + 0x52, 0x52, 0x25, 0x25 }, + .np = 2, + .tap = { 28, 28 } + } +}; + /* * SHA256 test vectors from from NIST */ @@ -817,6 +844,121 @@ static struct hash_testvec hmac_sha1_tv_template[] = { }, }; + +/* + * SHA224 HMAC test vectors from RFC4231 + */ +#define HMAC_SHA224_TEST_VECTORS 4 + +static struct hash_testvec hmac_sha224_tv_template[] = { + { + .key = { 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, + 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, + 0x0b, 0x0b, 0x0b, 0x0b }, + .ksize = 20, + /* ("Hi There") */ + .plaintext = { 0x48, 0x69, 0x20, 0x54, 0x68, 0x65, 0x72, 0x65 }, + .psize = 8, + .digest = { 0x89, 0x6f, 0xb1, 0x12, 0x8a, 0xbb, 0xdf, 0x19, + 0x68, 0x32, 0x10, 0x7c, 0xd4, 0x9d, 0xf3, 0x3f, + 0x47, 0xb4, 0xb1, 0x16, 0x99, 0x12, 0xba, 0x4f, + 0x53, 0x68, 0x4b, 0x22}, + }, { + .key = { 0x4a, 0x65, 0x66, 0x65 }, /* ("Jefe") */ + .ksize = 4, + /* ("what do ya want for nothing?") */ + .plaintext = { 0x77, 0x68, 0x61, 0x74, 0x20, 0x64, 0x6f, 0x20, + 0x79, 0x61, 0x20, 0x77, 0x61, 0x6e, 0x74, 0x20, + 0x66, 0x6f, 0x72, 0x20, 0x6e, 0x6f, 0x74, 0x68, + 0x69, 0x6e, 0x67, 0x3f }, + .psize = 28, + .digest = { 0xa3, 0x0e, 0x01, 0x09, 0x8b, 0xc6, 0xdb, 0xbf, + 0x45, 0x69, 0x0f, 0x3a, 0x7e, 0x9e, 0x6d, 0x0f, + 0x8b, 0xbe, 0xa2, 0xa3, 0x9e, 0x61, 0x48, 0x00, + 0x8f, 0xd0, 0x5e, 0x44 }, + .np = 4, + .tap = { 7, 7, 7, 7 } + }, { + .key = { 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa }, + .ksize = 131, + /* ("Test Using Larger Than Block-Size Key - Hash Key First") */ + .plaintext = { 0x54, 0x65, 0x73, 0x74, 0x20, 0x55, 0x73, 0x69, + 0x6e, 0x67, 0x20, 0x4c, 0x61, 0x72, 0x67, 0x65, + 0x72, 0x20, 0x54, 0x68, 0x61, 0x6e, 0x20, 0x42, + 0x6c, 0x6f, 0x63, 0x6b, 0x2d, 0x53, 0x69, 0x7a, + 0x65, 0x20, 0x4b, 0x65, 0x79, 0x20, 0x2d, 0x20, + 0x48, 0x61, 0x73, 0x68, 0x20, 0x4b, 0x65, 0x79, + 0x20, 0x46, 0x69, 0x72, 0x73, 0x74 }, + .psize = 54, + .digest = { 0x95, 0xe9, 0xa0, 0xdb, 0x96, 0x20, 0x95, 0xad, + 0xae, 0xbe, 0x9b, 0x2d, 0x6f, 0x0d, 0xbc, 0xe2, + 0xd4, 0x99, 0xf1, 0x12, 0xf2, 0xd2, 0xb7, 0x27, + 0x3f, 0xa6, 0x87, 0x0e }, + }, { + .key = { 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, + 0xaa, 0xaa, 0xaa }, + .ksize = 131, + /* ("This is a test using a larger than block-size key and a") + (" larger than block-size data. The key needs to be") + (" hashed before being used by the HMAC algorithm.") */ + .plaintext = { 0x54, 0x68, 0x69, 0x73, 0x20, 0x69, 0x73, 0x20, + 0x61, 0x20, 0x74, 0x65, 0x73, 0x74, 0x20, 0x75, + 0x73, 0x69, 0x6e, 0x67, 0x20, 0x61, 0x20, 0x6c, + 0x61, 0x72, 0x67, 0x65, 0x72, 0x20, 0x74, 0x68, + 0x61, 0x6e, 0x20, 0x62, 0x6c, 0x6f, 0x63, 0x6b, + 0x2d, 0x73, 0x69, 0x7a, 0x65, 0x20, 0x6b, 0x65, + 0x79, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x61, 0x20, + 0x6c, 0x61, 0x72, 0x67, 0x65, 0x72, 0x20, 0x74, + 0x68, 0x61, 0x6e, 0x20, 0x62, 0x6c, 0x6f, 0x63, + 0x6b, 0x2d, 0x73, 0x69, 0x7a, 0x65, 0x20, 0x64, + 0x61, 0x74, 0x61, 0x2e, 0x20, 0x54, 0x68, 0x65, + 0x20, 0x6b, 0x65, 0x79, 0x20, 0x6e, 0x65, 0x65, + 0x64, 0x73, 0x20, 0x74, 0x6f, 0x20, 0x62, 0x65, + 0x20, 0x68, 0x61, 0x73, 0x68, 0x65, 0x64, 0x20, + 0x62, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x20, 0x62, + 0x65, 0x69, 0x6e, 0x67, 0x20, 0x75, 0x73, 0x65, + 0x64, 0x20, 0x62, 0x79, 0x20, 0x74, 0x68, 0x65, + 0x20, 0x48, 0x4d, 0x41, 0x43, 0x20, 0x61, 0x6c, + 0x67, 0x6f, 0x72, 0x69, 0x74, 0x68, 0x6d, 0x2e }, + .psize = 152, + .digest = { 0x3a, 0x85, 0x41, 0x66, 0xac, 0x5d, 0x9f, 0x02, + 0x3f, 0x54, 0xd5, 0x17, 0xd0, 0xb3, 0x9d, 0xbd, + 0x94, 0x67, 0x70, 0xdb, 0x9c, 0x2b, 0x95, 0xc9, + 0xf6, 0xf5, 0x65, 0xd1 }, + }, +}; + /* * HMAC-SHA256 test vectors from * draft-ietf-ipsec-ciph-sha-256-01.txt -- cgit v1.2.3 From 47b69df106ed3e8836f6963c1ad469d83a6b37e3 Mon Sep 17 00:00:00 2001 From: Tan Swee Heng Date: Fri, 23 Nov 2007 19:45:00 +0800 Subject: [CRYPTO] salsa20: Salsa20 stream cipher This patch implements the Salsa20 stream cipher using the blkcipher interface. The core cipher code comes from Daniel Bernstein's submission to eSTREAM: http://www.ecrypt.eu.org/stream/svn/viewcvs.cgi/ecrypt/trunk/submissions/salsa20/full/ref/ The test vectors comes from: http://www.ecrypt.eu.org/stream/svn/viewcvs.cgi/ecrypt/trunk/submissions/salsa20/full/ It has been tested successfully with "modprobe tcrypt mode=34" on an UML instance. Signed-off-by: Tan Swee Heng Signed-off-by: Herbert Xu --- crypto/Kconfig | 12 +++ crypto/Makefile | 1 + crypto/salsa20_generic.c | 243 +++++++++++++++++++++++++++++++++++++++++++++++ crypto/tcrypt.c | 8 +- crypto/tcrypt.h | 161 +++++++++++++++++++++++++++++++ 5 files changed, 424 insertions(+), 1 deletion(-) create mode 100644 crypto/salsa20_generic.c (limited to 'crypto/Kconfig') diff --git a/crypto/Kconfig b/crypto/Kconfig index 7758454b..8d6cac97 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -454,6 +454,18 @@ config CRYPTO_SEED See also: +config CRYPTO_SALSA20 + tristate "Salsa20 stream cipher algorithm (EXPERIMENTAL)" + depends on EXPERIMENTAL + select CRYPTO_BLKCIPHER + help + Salsa20 stream cipher algorithm. + + Salsa20 is a stream cipher submitted to eSTREAM, the ECRYPT + Stream Cipher Project. See + + The Salsa20 stream cipher algorithm is designed by Daniel J. + Bernstein . See config CRYPTO_DEFLATE tristate "Deflate compression algorithm" diff --git a/crypto/Makefile b/crypto/Makefile index 1f87db2e..9b1476e5 100644 --- a/crypto/Makefile +++ b/crypto/Makefile @@ -49,6 +49,7 @@ obj-$(CONFIG_CRYPTO_TEA) += tea.o obj-$(CONFIG_CRYPTO_KHAZAD) += khazad.o obj-$(CONFIG_CRYPTO_ANUBIS) += anubis.o obj-$(CONFIG_CRYPTO_SEED) += seed.o +obj-$(CONFIG_CRYPTO_SALSA20) += salsa20_generic.o obj-$(CONFIG_CRYPTO_DEFLATE) += deflate.o obj-$(CONFIG_CRYPTO_MICHAEL_MIC) += michael_mic.o obj-$(CONFIG_CRYPTO_CRC32C) += crc32c.o diff --git a/crypto/salsa20_generic.c b/crypto/salsa20_generic.c new file mode 100644 index 00000000..b49328af --- /dev/null +++ b/crypto/salsa20_generic.c @@ -0,0 +1,243 @@ +/* + * Salsa20: Salsa20 stream cipher algorithm + * + * Copyright (c) 2007 Tan Swee Heng + * + * Derived from: + * - salsa20.c: Public domain C code by Daniel J. Bernstein + * + * Salsa20 is a stream cipher candidate in eSTREAM, the ECRYPT Stream + * Cipher Project. It is designed by Daniel J. Bernstein . + * More information about eSTREAM and Salsa20 can be found here: + * http://www.ecrypt.eu.org/stream/ + * http://cr.yp.to/snuffle.html + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include +#include +#include +#include +#include +#include +#include + +#define SALSA20_IV_SIZE 8U +#define SALSA20_MIN_KEY_SIZE 16U +#define SALSA20_MAX_KEY_SIZE 32U + +/* + * Start of code taken from D. J. Bernstein's reference implementation. + * With some modifications and optimizations made to suit our needs. + */ + +/* +salsa20-ref.c version 20051118 +D. J. Bernstein +Public domain. +*/ + +#define ROTATE(v,n) (((v) << (n)) | ((v) >> (32 - (n)))) +#define XOR(v,w) ((v) ^ (w)) +#define PLUS(v,w) (((v) + (w))) +#define PLUSONE(v) (PLUS((v),1)) +#define U32TO8_LITTLE(p, v) \ + { (p)[0] = (v >> 0) & 0xff; (p)[1] = (v >> 8) & 0xff; \ + (p)[2] = (v >> 16) & 0xff; (p)[3] = (v >> 24) & 0xff; } +#define U8TO32_LITTLE(p) \ + (((u32)((p)[0]) ) | ((u32)((p)[1]) << 8) | \ + ((u32)((p)[2]) << 16) | ((u32)((p)[3]) << 24) ) + +struct salsa20_ctx +{ + u32 input[16]; +}; + +static void salsa20_wordtobyte(u8 output[64], const u32 input[16]) +{ + u32 x[16]; + int i; + + memcpy(x, input, sizeof(x)); + for (i = 20; i > 0; i -= 2) { + x[ 4] = XOR(x[ 4],ROTATE(PLUS(x[ 0],x[12]), 7)); + x[ 8] = XOR(x[ 8],ROTATE(PLUS(x[ 4],x[ 0]), 9)); + x[12] = XOR(x[12],ROTATE(PLUS(x[ 8],x[ 4]),13)); + x[ 0] = XOR(x[ 0],ROTATE(PLUS(x[12],x[ 8]),18)); + x[ 9] = XOR(x[ 9],ROTATE(PLUS(x[ 5],x[ 1]), 7)); + x[13] = XOR(x[13],ROTATE(PLUS(x[ 9],x[ 5]), 9)); + x[ 1] = XOR(x[ 1],ROTATE(PLUS(x[13],x[ 9]),13)); + x[ 5] = XOR(x[ 5],ROTATE(PLUS(x[ 1],x[13]),18)); + x[14] = XOR(x[14],ROTATE(PLUS(x[10],x[ 6]), 7)); + x[ 2] = XOR(x[ 2],ROTATE(PLUS(x[14],x[10]), 9)); + x[ 6] = XOR(x[ 6],ROTATE(PLUS(x[ 2],x[14]),13)); + x[10] = XOR(x[10],ROTATE(PLUS(x[ 6],x[ 2]),18)); + x[ 3] = XOR(x[ 3],ROTATE(PLUS(x[15],x[11]), 7)); + x[ 7] = XOR(x[ 7],ROTATE(PLUS(x[ 3],x[15]), 9)); + x[11] = XOR(x[11],ROTATE(PLUS(x[ 7],x[ 3]),13)); + x[15] = XOR(x[15],ROTATE(PLUS(x[11],x[ 7]),18)); + x[ 1] = XOR(x[ 1],ROTATE(PLUS(x[ 0],x[ 3]), 7)); + x[ 2] = XOR(x[ 2],ROTATE(PLUS(x[ 1],x[ 0]), 9)); + x[ 3] = XOR(x[ 3],ROTATE(PLUS(x[ 2],x[ 1]),13)); + x[ 0] = XOR(x[ 0],ROTATE(PLUS(x[ 3],x[ 2]),18)); + x[ 6] = XOR(x[ 6],ROTATE(PLUS(x[ 5],x[ 4]), 7)); + x[ 7] = XOR(x[ 7],ROTATE(PLUS(x[ 6],x[ 5]), 9)); + x[ 4] = XOR(x[ 4],ROTATE(PLUS(x[ 7],x[ 6]),13)); + x[ 5] = XOR(x[ 5],ROTATE(PLUS(x[ 4],x[ 7]),18)); + x[11] = XOR(x[11],ROTATE(PLUS(x[10],x[ 9]), 7)); + x[ 8] = XOR(x[ 8],ROTATE(PLUS(x[11],x[10]), 9)); + x[ 9] = XOR(x[ 9],ROTATE(PLUS(x[ 8],x[11]),13)); + x[10] = XOR(x[10],ROTATE(PLUS(x[ 9],x[ 8]),18)); + x[12] = XOR(x[12],ROTATE(PLUS(x[15],x[14]), 7)); + x[13] = XOR(x[13],ROTATE(PLUS(x[12],x[15]), 9)); + x[14] = XOR(x[14],ROTATE(PLUS(x[13],x[12]),13)); + x[15] = XOR(x[15],ROTATE(PLUS(x[14],x[13]),18)); + } + for (i = 0; i < 16; ++i) + x[i] = PLUS(x[i],input[i]); + for (i = 0; i < 16; ++i) + U32TO8_LITTLE(output + 4 * i,x[i]); +} + +static const char sigma[16] = "expand 32-byte k"; +static const char tau[16] = "expand 16-byte k"; + +static void salsa20_keysetup(struct salsa20_ctx *ctx, const u8 *k, u32 kbytes) +{ + const char *constants; + + ctx->input[1] = U8TO32_LITTLE(k + 0); + ctx->input[2] = U8TO32_LITTLE(k + 4); + ctx->input[3] = U8TO32_LITTLE(k + 8); + ctx->input[4] = U8TO32_LITTLE(k + 12); + if (kbytes == 32) { /* recommended */ + k += 16; + constants = sigma; + } else { /* kbytes == 16 */ + constants = tau; + } + ctx->input[11] = U8TO32_LITTLE(k + 0); + ctx->input[12] = U8TO32_LITTLE(k + 4); + ctx->input[13] = U8TO32_LITTLE(k + 8); + ctx->input[14] = U8TO32_LITTLE(k + 12); + ctx->input[0] = U8TO32_LITTLE(constants + 0); + ctx->input[5] = U8TO32_LITTLE(constants + 4); + ctx->input[10] = U8TO32_LITTLE(constants + 8); + ctx->input[15] = U8TO32_LITTLE(constants + 12); +} + +static void salsa20_ivsetup(struct salsa20_ctx *ctx, const u8 *iv) +{ + ctx->input[6] = U8TO32_LITTLE(iv + 0); + ctx->input[7] = U8TO32_LITTLE(iv + 4); + ctx->input[8] = 0; + ctx->input[9] = 0; +} + +static void salsa20_encrypt_bytes(struct salsa20_ctx *ctx, u8 *dst, + const u8 *src, unsigned int bytes) +{ + u8 buf[64]; + int i; + + if (dst != src) + memcpy(dst, src, bytes); + + while (bytes) { + salsa20_wordtobyte(buf, ctx->input); + + ctx->input[8] = PLUSONE(ctx->input[8]); + if (!ctx->input[8]) + ctx->input[9] = PLUSONE(ctx->input[9]); + + if (bytes <= 64) { + for (i = 0; i < bytes/4; ++i) + ((u32*)dst)[i] ^= ((u32*)buf)[i]; + for (i = bytes - bytes % 4; i < bytes; ++i) + dst[i] ^= buf[i]; + return; + } + + for (i = 0; i < 64/4; ++i) + ((u32*)dst)[i] ^= ((u32*)buf)[i]; + bytes -= 64; + dst += 64; + } +} + +/* + * End of code taken from D. J. Bernstein's reference implementation. + */ + +static int setkey(struct crypto_tfm *tfm, const u8 *key, + unsigned int keysize) +{ + struct salsa20_ctx *ctx = crypto_tfm_ctx(tfm); + salsa20_keysetup(ctx, key, keysize); + return 0; +} + +static int encrypt(struct blkcipher_desc *desc, + struct scatterlist *dst, struct scatterlist *src, + unsigned int nbytes) +{ + struct blkcipher_walk walk; + struct crypto_blkcipher *tfm = desc->tfm; + struct salsa20_ctx *ctx = crypto_blkcipher_ctx(tfm); + int err; + + blkcipher_walk_init(&walk, dst, src, nbytes); + err = blkcipher_walk_virt(desc, &walk); + + salsa20_ivsetup(ctx, walk.iv); + salsa20_encrypt_bytes(ctx, walk.dst.virt.addr, + walk.src.virt.addr, nbytes); + + err = blkcipher_walk_done(desc, &walk, 0); + return err; +} + +static struct crypto_alg alg = { + .cra_name = "salsa20", + .cra_driver_name = "salsa20-generic", + .cra_priority = 100, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, + .cra_type = &crypto_blkcipher_type, + .cra_blocksize = 1, + .cra_ctxsize = sizeof(struct salsa20_ctx), + .cra_alignmask = 3, + .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(alg.cra_list), + .cra_u = { + .blkcipher = { + .setkey = setkey, + .encrypt = encrypt, + .decrypt = encrypt, + .min_keysize = SALSA20_MIN_KEY_SIZE, + .max_keysize = SALSA20_MAX_KEY_SIZE, + .ivsize = SALSA20_IV_SIZE, + } + } +}; + +static int __init init(void) +{ + return crypto_register_alg(&alg); +} + +static void __exit fini(void) +{ + crypto_unregister_alg(&alg); +} + +module_init(init); +module_exit(fini); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION ("Salsa20 stream cipher algorithm"); +MODULE_ALIAS("salsa20"); diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c index 4d364cca..b8cb1d14 100644 --- a/crypto/tcrypt.c +++ b/crypto/tcrypt.c @@ -80,7 +80,7 @@ static char *check[] = { "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea", "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt", - "camellia", "seed", NULL + "camellia", "seed", "salsa20", NULL }; static void hexdump(unsigned char *buf, unsigned int len) @@ -1309,6 +1309,12 @@ static void do_test(void) test_hash("sha224", sha224_tv_template, SHA224_TEST_VECTORS); break; + case 34: + test_cipher("salsa20", ENCRYPT, + salsa20_stream_enc_tv_template, + SALSA20_STREAM_ENC_TEST_VECTORS); + break; + case 100: test_hash("hmac(md5)", hmac_md5_tv_template, HMAC_MD5_TEST_VECTORS); diff --git a/crypto/tcrypt.h b/crypto/tcrypt.h index b91585ea..6ffc411b 100644 --- a/crypto/tcrypt.h +++ b/crypto/tcrypt.h @@ -4644,6 +4644,167 @@ static struct cipher_testvec seed_dec_tv_template[] = { } }; +#define SALSA20_STREAM_ENC_TEST_VECTORS 4 +static struct cipher_testvec salsa20_stream_enc_tv_template[] = { + /* + * Testvectors from verified.test-vectors submitted to ECRYPT. + * They are truncated to size 39, 64, 111, 129 to test a variety + * of input length. + */ + { /* Set 3, vector 0 */ + .key = { + 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, + 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F + }, + .klen = 16, + .iv = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, + .input = { + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + }, + .ilen = 39, + .result = { + 0x2D, 0xD5, 0xC3, 0xF7, 0xBA, 0x2B, 0x20, 0xF7, + 0x68, 0x02, 0x41, 0x0C, 0x68, 0x86, 0x88, 0x89, + 0x5A, 0xD8, 0xC1, 0xBD, 0x4E, 0xA6, 0xC9, 0xB1, + 0x40, 0xFB, 0x9B, 0x90, 0xE2, 0x10, 0x49, 0xBF, + 0x58, 0x3F, 0x52, 0x79, 0x70, 0xEB, 0xC1, + }, + .rlen = 39, + }, { /* Set 5, vector 0 */ + .key = { + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 + }, + .klen = 16, + .iv = { 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, + .input = { + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + }, + .ilen = 64, + .result = { + 0xB6, 0x6C, 0x1E, 0x44, 0x46, 0xDD, 0x95, 0x57, + 0xE5, 0x78, 0xE2, 0x23, 0xB0, 0xB7, 0x68, 0x01, + 0x7B, 0x23, 0xB2, 0x67, 0xBB, 0x02, 0x34, 0xAE, + 0x46, 0x26, 0xBF, 0x44, 0x3F, 0x21, 0x97, 0x76, + 0x43, 0x6F, 0xB1, 0x9F, 0xD0, 0xE8, 0x86, 0x6F, + 0xCD, 0x0D, 0xE9, 0xA9, 0x53, 0x8F, 0x4A, 0x09, + 0xCA, 0x9A, 0xC0, 0x73, 0x2E, 0x30, 0xBC, 0xF9, + 0x8E, 0x4F, 0x13, 0xE4, 0xB9, 0xE2, 0x01, 0xD9, + }, + .rlen = 64, + }, { /* Set 3, vector 27 */ + .key = { + 0x1B, 0x1C, 0x1D, 0x1E, 0x1F, 0x20, 0x21, 0x22, + 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2A, + 0x2B, 0x2C, 0x2D, 0x2E, 0x2F, 0x30, 0x31, 0x32, + 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3A + }, + .klen = 32, + .iv = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, + .input = { + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + }, + .ilen = 111, + .result = { + 0xAE, 0x39, 0x50, 0x8E, 0xAC, 0x9A, 0xEC, 0xE7, + 0xBF, 0x97, 0xBB, 0x20, 0xB9, 0xDE, 0xE4, 0x1F, + 0x87, 0xD9, 0x47, 0xF8, 0x28, 0x91, 0x35, 0x98, + 0xDB, 0x72, 0xCC, 0x23, 0x29, 0x48, 0x56, 0x5E, + 0x83, 0x7E, 0x0B, 0xF3, 0x7D, 0x5D, 0x38, 0x7B, + 0x2D, 0x71, 0x02, 0xB4, 0x3B, 0xB5, 0xD8, 0x23, + 0xB0, 0x4A, 0xDF, 0x3C, 0xEC, 0xB6, 0xD9, 0x3B, + 0x9B, 0xA7, 0x52, 0xBE, 0xC5, 0xD4, 0x50, 0x59, + + 0x15, 0x14, 0xB4, 0x0E, 0x40, 0xE6, 0x53, 0xD1, + 0x83, 0x9C, 0x5B, 0xA0, 0x92, 0x29, 0x6B, 0x5E, + 0x96, 0x5B, 0x1E, 0x2F, 0xD3, 0xAC, 0xC1, 0x92, + 0xB1, 0x41, 0x3F, 0x19, 0x2F, 0xC4, 0x3B, 0xC6, + 0x95, 0x46, 0x45, 0x54, 0xE9, 0x75, 0x03, 0x08, + 0x44, 0xAF, 0xE5, 0x8A, 0x81, 0x12, 0x09, + }, + .rlen = 111, + + }, { /* Set 5, vector 27 */ + .key = { + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 + }, + .klen = 32, + .iv = { 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00 }, + .input = { + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + + 0x00, + }, + .ilen = 129, + .result = { + 0xD2, 0xDB, 0x1A, 0x5C, 0xF1, 0xC1, 0xAC, 0xDB, + 0xE8, 0x1A, 0x7A, 0x43, 0x40, 0xEF, 0x53, 0x43, + 0x5E, 0x7F, 0x4B, 0x1A, 0x50, 0x52, 0x3F, 0x8D, + 0x28, 0x3D, 0xCF, 0x85, 0x1D, 0x69, 0x6E, 0x60, + 0xF2, 0xDE, 0x74, 0x56, 0x18, 0x1B, 0x84, 0x10, + 0xD4, 0x62, 0xBA, 0x60, 0x50, 0xF0, 0x61, 0xF2, + 0x1C, 0x78, 0x7F, 0xC1, 0x24, 0x34, 0xAF, 0x58, + 0xBF, 0x2C, 0x59, 0xCA, 0x90, 0x77, 0xF3, 0xB0, + + 0x5B, 0x4A, 0xDF, 0x89, 0xCE, 0x2C, 0x2F, 0xFC, + 0x67, 0xF0, 0xE3, 0x45, 0xE8, 0xB3, 0xB3, 0x75, + 0xA0, 0x95, 0x71, 0xA1, 0x29, 0x39, 0x94, 0xCA, + 0x45, 0x2F, 0xBD, 0xCB, 0x10, 0xB6, 0xBE, 0x9F, + 0x8E, 0xF9, 0xB2, 0x01, 0x0A, 0x5A, 0x0A, 0xB7, + 0x6B, 0x9D, 0x70, 0x8E, 0x4B, 0xD6, 0x2F, 0xCD, + 0x2E, 0x40, 0x48, 0x75, 0xE9, 0xE2, 0x21, 0x45, + 0x0B, 0xC9, 0xB6, 0xB5, 0x66, 0xBC, 0x9A, 0x59, + + 0x5A, + }, + .rlen = 129, + } +}; + /* * Compression stuff. */ -- cgit v1.2.3 From 8453222b078275dbb067c66b7ccf09e3a1c295b6 Mon Sep 17 00:00:00 2001 From: Mikko Herranen Date: Mon, 26 Nov 2007 22:24:11 +0800 Subject: [CRYPTO] gcm: New algorithm Add GCM/GMAC support to cryptoapi. GCM (Galois/Counter Mode) is an AEAD mode of operations for any block cipher with a block size of 16. The typical example is AES-GCM. Signed-off-by: Mikko Herranen Reviewed-by: Mika Kukkonen Signed-off-by: Herbert Xu --- crypto/Kconfig | 9 ++ crypto/Makefile | 1 + crypto/gcm.c | 465 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ crypto/tcrypt.c | 5 + crypto/tcrypt.h | 368 ++++++++++++++++++++++++++++++++++++++++++++ 5 files changed, 848 insertions(+) create mode 100644 crypto/gcm.c (limited to 'crypto/Kconfig') diff --git a/crypto/Kconfig b/crypto/Kconfig index 8d6cac97..40ae92ca 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -206,6 +206,15 @@ config CRYPTO_CTR CTR: Counter mode This block cipher algorithm is required for IPSec. +config CRYPTO_GCM + tristate "GCM/GMAC support" + select CRYPTO_CTR + select CRYPTO_AEAD + select CRYPTO_GF128MUL + help + Support for Galois/Counter Mode (GCM) and Galois Message + Authentication Code (GMAC). Required for IPSec. + config CRYPTO_CRYPTD tristate "Software async crypto daemon" select CRYPTO_ABLKCIPHER diff --git a/crypto/Makefile b/crypto/Makefile index 9b1476e5..957343cb 100644 --- a/crypto/Makefile +++ b/crypto/Makefile @@ -33,6 +33,7 @@ obj-$(CONFIG_CRYPTO_PCBC) += pcbc.o obj-$(CONFIG_CRYPTO_LRW) += lrw.o obj-$(CONFIG_CRYPTO_XTS) += xts.o obj-$(CONFIG_CRYPTO_CTR) += ctr.o +obj-$(CONFIG_CRYPTO_GCM) += gcm.o obj-$(CONFIG_CRYPTO_CRYPTD) += cryptd.o obj-$(CONFIG_CRYPTO_DES) += des_generic.o obj-$(CONFIG_CRYPTO_FCRYPT) += fcrypt.o diff --git a/crypto/gcm.c b/crypto/gcm.c new file mode 100644 index 00000000..ad8b8b9a --- /dev/null +++ b/crypto/gcm.c @@ -0,0 +1,465 @@ +/* + * GCM: Galois/Counter Mode. + * + * Copyright (c) 2007 Nokia Siemens Networks - Mikko Herranen + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 as published + * by the Free Software Foundation. + */ + +#include +#include +#include +#include +#include +#include +#include + +#include "scatterwalk.h" + +struct gcm_instance_ctx { + struct crypto_spawn ctr; +}; + +struct crypto_gcm_ctx { + struct crypto_ablkcipher *ctr; + struct gf128mul_4k *gf128; +}; + +struct crypto_gcm_ghash_ctx { + u32 bytes; + u32 flags; + struct gf128mul_4k *gf128; + u8 buffer[16]; +}; + +struct crypto_gcm_req_priv_ctx { + u8 auth_tag[16]; + u8 counter[16]; + struct crypto_gcm_ghash_ctx ghash; +}; + +static void crypto_gcm_ghash_init(struct crypto_gcm_ghash_ctx *ctx, u32 flags, + struct gf128mul_4k *gf128) +{ + ctx->bytes = 0; + ctx->flags = flags; + ctx->gf128 = gf128; + memset(ctx->buffer, 0, 16); +} + +static void crypto_gcm_ghash_update(struct crypto_gcm_ghash_ctx *ctx, + const u8 *src, unsigned int srclen) +{ + u8 *dst = ctx->buffer; + + if (ctx->bytes) { + int n = min(srclen, ctx->bytes); + u8 *pos = dst + (16 - ctx->bytes); + + ctx->bytes -= n; + srclen -= n; + + while (n--) + *pos++ ^= *src++; + + if (!ctx->bytes) + gf128mul_4k_lle((be128 *)dst, ctx->gf128); + } + + while (srclen >= 16) { + crypto_xor(dst, src, 16); + gf128mul_4k_lle((be128 *)dst, ctx->gf128); + src += 16; + srclen -= 16; + } + + if (srclen) { + ctx->bytes = 16 - srclen; + while (srclen--) + *dst++ ^= *src++; + } +} + +static void crypto_gcm_ghash_update_sg(struct crypto_gcm_ghash_ctx *ctx, + struct scatterlist *sg, int len) +{ + struct scatter_walk walk; + u8 *src; + int n; + + scatterwalk_start(&walk, sg); + + while (len) { + n = scatterwalk_clamp(&walk, len); + + if (!n) { + scatterwalk_start(&walk, sg_next(walk.sg)); + n = scatterwalk_clamp(&walk, len); + } + + src = scatterwalk_map(&walk, 0); + + crypto_gcm_ghash_update(ctx, src, n); + len -= n; + + scatterwalk_unmap(src, 0); + scatterwalk_advance(&walk, n); + scatterwalk_done(&walk, 0, len); + if (len) + crypto_yield(ctx->flags); + } +} + +static void crypto_gcm_ghash_flush(struct crypto_gcm_ghash_ctx *ctx) +{ + u8 *dst = ctx->buffer; + + if (ctx->bytes) { + u8 *tmp = dst + (16 - ctx->bytes); + + while (ctx->bytes--) + *tmp++ ^= 0; + + gf128mul_4k_lle((be128 *)dst, ctx->gf128); + } + + ctx->bytes = 0; +} + +static void crypto_gcm_ghash_final_xor(struct crypto_gcm_ghash_ctx *ctx, + unsigned int authlen, + unsigned int cryptlen, u8 *dst) +{ + u8 *buf = ctx->buffer; + u128 lengths; + + lengths.a = cpu_to_be64(authlen * 8); + lengths.b = cpu_to_be64(cryptlen * 8); + + crypto_gcm_ghash_flush(ctx); + crypto_xor(buf, (u8 *)&lengths, 16); + gf128mul_4k_lle((be128 *)buf, ctx->gf128); + crypto_xor(dst, buf, 16); +} + +static inline void crypto_gcm_set_counter(u8 *counterblock, u32 value) +{ + *((u32 *)&counterblock[12]) = cpu_to_be32(value); +} + +static int crypto_gcm_encrypt_counter(struct crypto_aead *aead, u8 *block, + u32 value, const u8 *iv) +{ + struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead); + struct crypto_ablkcipher *ctr = ctx->ctr; + struct ablkcipher_request req; + struct scatterlist sg; + u8 counterblock[16]; + + if (iv == NULL) + memset(counterblock, 0, 12); + else + memcpy(counterblock, iv, 12); + + crypto_gcm_set_counter(counterblock, value); + + sg_init_one(&sg, block, 16); + ablkcipher_request_set_tfm(&req, ctr); + ablkcipher_request_set_crypt(&req, &sg, &sg, 16, counterblock); + ablkcipher_request_set_callback(&req, 0, NULL, NULL); + memset(block, 0, 16); + return crypto_ablkcipher_encrypt(&req); +} + +static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key, + unsigned int keylen) +{ + struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead); + struct crypto_ablkcipher *ctr = ctx->ctr; + int alignmask = crypto_ablkcipher_alignmask(ctr); + u8 alignbuf[16+alignmask]; + u8 *hash = (u8 *)ALIGN((unsigned long)alignbuf, alignmask+1); + int err = 0; + + crypto_ablkcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK); + crypto_ablkcipher_set_flags(ctr, crypto_aead_get_flags(aead) & + CRYPTO_TFM_REQ_MASK); + + err = crypto_ablkcipher_setkey(ctr, key, keylen); + if (err) + goto out; + + crypto_aead_set_flags(aead, crypto_ablkcipher_get_flags(ctr) & + CRYPTO_TFM_RES_MASK); + + err = crypto_gcm_encrypt_counter(aead, hash, -1, NULL); + if (err) + goto out; + + if (ctx->gf128 != NULL) + gf128mul_free_4k(ctx->gf128); + + ctx->gf128 = gf128mul_init_4k_lle((be128 *)hash); + + if (ctx->gf128 == NULL) + err = -ENOMEM; + + out: + return err; +} + +static int crypto_gcm_init_crypt(struct ablkcipher_request *ablk_req, + struct aead_request *req, + void (*done)(struct crypto_async_request *, + int)) +{ + struct crypto_aead *aead = crypto_aead_reqtfm(req); + struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead); + struct crypto_gcm_req_priv_ctx *pctx = aead_request_ctx(req); + u32 flags = req->base.tfm->crt_flags; + u8 *auth_tag = pctx->auth_tag; + u8 *counter = pctx->counter; + struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash; + int err = 0; + + ablkcipher_request_set_tfm(ablk_req, ctx->ctr); + ablkcipher_request_set_callback(ablk_req, aead_request_flags(req), + done, req); + ablkcipher_request_set_crypt(ablk_req, req->src, req->dst, + req->cryptlen, counter); + + err = crypto_gcm_encrypt_counter(aead, auth_tag, 0, req->iv); + if (err) + goto out; + + memcpy(counter, req->iv, 12); + crypto_gcm_set_counter(counter, 1); + + crypto_gcm_ghash_init(ghash, flags, ctx->gf128); + + if (req->assoclen) { + crypto_gcm_ghash_update_sg(ghash, req->assoc, req->assoclen); + crypto_gcm_ghash_flush(ghash); + } + + out: + return err; +} + +static void crypto_gcm_encrypt_done(struct crypto_async_request *areq, int err) +{ + struct aead_request *req = areq->data; + struct crypto_gcm_req_priv_ctx *pctx = aead_request_ctx(req); + u8 *auth_tag = pctx->auth_tag; + struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash; + + crypto_gcm_ghash_update_sg(ghash, req->dst, req->cryptlen); + crypto_gcm_ghash_final_xor(ghash, req->assoclen, req->cryptlen, + auth_tag); + + aead_request_complete(req, err); +} + +static int crypto_gcm_encrypt(struct aead_request *req) +{ + struct ablkcipher_request abreq; + struct crypto_gcm_req_priv_ctx *pctx = aead_request_ctx(req); + u8 *auth_tag = pctx->auth_tag; + struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash; + int err = 0; + + err = crypto_gcm_init_crypt(&abreq, req, crypto_gcm_encrypt_done); + if (err) + return err; + + if (req->cryptlen) { + err = crypto_ablkcipher_encrypt(&abreq); + if (err) + return err; + + crypto_gcm_ghash_update_sg(ghash, req->dst, req->cryptlen); + } + + crypto_gcm_ghash_final_xor(ghash, req->assoclen, req->cryptlen, + auth_tag); + + return err; +} + +static void crypto_gcm_decrypt_done(struct crypto_async_request *areq, int err) +{ + aead_request_complete(areq->data, err); +} + +static int crypto_gcm_decrypt(struct aead_request *req) +{ + struct ablkcipher_request abreq; + struct crypto_gcm_req_priv_ctx *pctx = aead_request_ctx(req); + u8 *auth_tag = pctx->auth_tag; + struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash; + u8 tag[16]; + int err; + + if (!req->cryptlen) + return -EINVAL; + + memcpy(tag, auth_tag, 16); + err = crypto_gcm_init_crypt(&abreq, req, crypto_gcm_decrypt_done); + if (err) + return err; + + crypto_gcm_ghash_update_sg(ghash, req->src, req->cryptlen); + crypto_gcm_ghash_final_xor(ghash, req->assoclen, req->cryptlen, + auth_tag); + + if (memcmp(tag, auth_tag, 16)) + return -EINVAL; + + return crypto_ablkcipher_decrypt(&abreq); +} + +static int crypto_gcm_init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_instance *inst = (void *)tfm->__crt_alg; + struct gcm_instance_ctx *ictx = crypto_instance_ctx(inst); + struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm); + struct crypto_ablkcipher *ctr; + unsigned long align; + int err; + + ctr = crypto_spawn_ablkcipher(&ictx->ctr); + err = PTR_ERR(ctr); + if (IS_ERR(ctr)) + return err; + + ctx->ctr = ctr; + ctx->gf128 = NULL; + + align = max_t(unsigned long, crypto_ablkcipher_alignmask(ctr), + __alignof__(u32) - 1); + align &= ~(crypto_tfm_ctx_alignment() - 1); + tfm->crt_aead.reqsize = align + sizeof(struct crypto_gcm_req_priv_ctx); + + return 0; +} + +static void crypto_gcm_exit_tfm(struct crypto_tfm *tfm) +{ + struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm); + + if (ctx->gf128 != NULL) + gf128mul_free_4k(ctx->gf128); + + crypto_free_ablkcipher(ctx->ctr); +} + +static struct crypto_instance *crypto_gcm_alloc(struct rtattr **tb) +{ + struct crypto_instance *inst; + struct crypto_alg *ctr; + struct crypto_alg *cipher; + struct gcm_instance_ctx *ctx; + int err; + char ctr_name[CRYPTO_MAX_ALG_NAME]; + + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AEAD); + if (err) + return ERR_PTR(err); + + cipher = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_CIPHER, + CRYPTO_ALG_TYPE_MASK); + + inst = ERR_PTR(PTR_ERR(cipher)); + if (IS_ERR(cipher)) + return inst; + + inst = ERR_PTR(ENAMETOOLONG); + if (snprintf( + ctr_name, CRYPTO_MAX_ALG_NAME, + "ctr(%s,0,16,4)", cipher->cra_name) >= CRYPTO_MAX_ALG_NAME) + return inst; + + ctr = crypto_alg_mod_lookup(ctr_name, CRYPTO_ALG_TYPE_BLKCIPHER, + CRYPTO_ALG_TYPE_MASK); + + if (IS_ERR(ctr)) + return ERR_PTR(PTR_ERR(ctr)); + + if (cipher->cra_blocksize != 16) + goto out_put_ctr; + + inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); + err = -ENOMEM; + if (!inst) + goto out_put_ctr; + + err = -ENAMETOOLONG; + if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, + "gcm(%s)", cipher->cra_name) >= CRYPTO_MAX_ALG_NAME || + snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, + "gcm(%s)", cipher->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) + goto err_free_inst; + + + ctx = crypto_instance_ctx(inst); + err = crypto_init_spawn(&ctx->ctr, ctr, inst, CRYPTO_ALG_TYPE_MASK); + if (err) + goto err_free_inst; + + inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC; + inst->alg.cra_priority = ctr->cra_priority; + inst->alg.cra_blocksize = 16; + inst->alg.cra_alignmask = __alignof__(u32) - 1; + inst->alg.cra_type = &crypto_aead_type; + inst->alg.cra_aead.ivsize = 12; + inst->alg.cra_aead.authsize = 16; + inst->alg.cra_ctxsize = sizeof(struct crypto_gcm_ctx); + inst->alg.cra_init = crypto_gcm_init_tfm; + inst->alg.cra_exit = crypto_gcm_exit_tfm; + inst->alg.cra_aead.setkey = crypto_gcm_setkey; + inst->alg.cra_aead.encrypt = crypto_gcm_encrypt; + inst->alg.cra_aead.decrypt = crypto_gcm_decrypt; + +out: + crypto_mod_put(ctr); + return inst; +err_free_inst: + kfree(inst); +out_put_ctr: + inst = ERR_PTR(err); + goto out; +} + +static void crypto_gcm_free(struct crypto_instance *inst) +{ + struct gcm_instance_ctx *ctx = crypto_instance_ctx(inst); + + crypto_drop_spawn(&ctx->ctr); + kfree(inst); +} + +static struct crypto_template crypto_gcm_tmpl = { + .name = "gcm", + .alloc = crypto_gcm_alloc, + .free = crypto_gcm_free, + .module = THIS_MODULE, +}; + +static int __init crypto_gcm_module_init(void) +{ + return crypto_register_template(&crypto_gcm_tmpl); +} + +static void __exit crypto_gcm_module_exit(void) +{ + crypto_unregister_template(&crypto_gcm_tmpl); +} + +module_init(crypto_gcm_module_init); +module_exit(crypto_gcm_module_exit); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Galois/Counter Mode"); +MODULE_AUTHOR("Mikko Herranen "); diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c index b343d81d..1e12b86b 100644 --- a/crypto/tcrypt.c +++ b/crypto/tcrypt.c @@ -13,6 +13,7 @@ * Software Foundation; either version 2 of the License, or (at your option) * any later version. * + * 2007-11-13 Added GCM tests * 2007-11-13 Added AEAD support * 2007-11-06 Added SHA-224 and SHA-224-HMAC tests * 2006-12-07 Added SHA384 HMAC and SHA512 HMAC tests @@ -1208,6 +1209,10 @@ static void do_test(void) AES_CTR_ENC_TEST_VECTORS); test_cipher("ctr(aes,4,8,4)", DECRYPT, aes_ctr_dec_tv_template, AES_CTR_DEC_TEST_VECTORS); + test_aead("gcm(aes)", ENCRYPT, aes_gcm_enc_tv_template, + AES_GCM_ENC_TEST_VECTORS); + test_aead("gcm(aes)", DECRYPT, aes_gcm_dec_tv_template, + AES_GCM_DEC_TEST_VECTORS); //CAST5 test_cipher("ecb(cast5)", ENCRYPT, cast5_enc_tv_template, diff --git a/crypto/tcrypt.h b/crypto/tcrypt.h index 865196a6..2384c41a 100644 --- a/crypto/tcrypt.h +++ b/crypto/tcrypt.h @@ -13,6 +13,7 @@ * Software Foundation; either version 2 of the License, or (at your option) * any later version. * + * 2007-11-13 Added GCM tests * 2007-11-13 Added AEAD support * 2006-12-07 Added SHA384 HMAC and SHA512 HMAC tests * 2004-08-09 Cipher speed tests by Reyk Floeter @@ -2312,6 +2313,8 @@ static struct cipher_testvec cast6_dec_tv_template[] = { #define AES_XTS_DEC_TEST_VECTORS 4 #define AES_CTR_ENC_TEST_VECTORS 6 #define AES_CTR_DEC_TEST_VECTORS 6 +#define AES_GCM_ENC_TEST_VECTORS 9 +#define AES_GCM_DEC_TEST_VECTORS 8 static struct cipher_testvec aes_enc_tv_template[] = { { /* From FIPS-197 */ @@ -3529,6 +3532,371 @@ static struct cipher_testvec aes_ctr_dec_tv_template[] = { }, }; +static struct aead_testvec aes_gcm_enc_tv_template[] = { + { /* From McGrew & Viega - http://citeseer.ist.psu.edu/656989.html */ + .klen = 16, + .tag = { 0x58, 0xe2, 0xfc, 0xce, 0xfa, 0x7e, 0x30, 0x61, + 0x36, 0x7f, 0x1d, 0x57, 0xa4, 0xe7, 0x45, 0x5a }, + .tlen = 16 + }, { + .klen = 16, + .ilen = 16, + .result = { 0x03, 0x88, 0xda, 0xce, 0x60, 0xb6, 0xa3, 0x92, + 0xf3, 0x28, 0xc2, 0xb9, 0x71, 0xb2, 0xfe, 0x78 }, + .rlen = 16, + .tag = { 0xab, 0x6e, 0x47, 0xd4, 0x2c, 0xec, 0x13, 0xbd, + 0xf5, 0x3a, 0x67, 0xb2, 0x12, 0x57, 0xbd, 0xdf }, + .tlen = 16 + }, { + .key = { 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c, + 0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08 }, + .klen = 16, + .iv = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad, + 0xde, 0xca, 0xf8, 0x88 }, + .input = { 0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5, + 0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a, + 0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda, + 0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72, + 0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53, + 0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25, + 0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57, + 0xba, 0x63, 0x7b, 0x39, 0x1a, 0xaf, 0xd2, 0x55 }, + .ilen = 64, + .result = { 0x42, 0x83, 0x1e, 0xc2, 0x21, 0x77, 0x74, 0x24, + 0x4b, 0x72, 0x21, 0xb7, 0x84, 0xd0, 0xd4, 0x9c, + 0xe3, 0xaa, 0x21, 0x2f, 0x2c, 0x02, 0xa4, 0xe0, + 0x35, 0xc1, 0x7e, 0x23, 0x29, 0xac, 0xa1, 0x2e, + 0x21, 0xd5, 0x14, 0xb2, 0x54, 0x66, 0x93, 0x1c, + 0x7d, 0x8f, 0x6a, 0x5a, 0xac, 0x84, 0xaa, 0x05, + 0x1b, 0xa3, 0x0b, 0x39, 0x6a, 0x0a, 0xac, 0x97, + 0x3d, 0x58, 0xe0, 0x91, 0x47, 0x3f, 0x59, 0x85 }, + .rlen = 64, + .tag = { 0x4d, 0x5c, 0x2a, 0xf3, 0x27, 0xcd, 0x64, 0xa6, + 0x2c, 0xf3, 0x5a, 0xbd, 0x2b, 0xa6, 0xfa, 0xb4 }, + .tlen = 16 + }, { + .key = { 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c, + 0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08 }, + .klen = 16, + .iv = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad, + 0xde, 0xca, 0xf8, 0x88 }, + .input = { 0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5, + 0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a, + 0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda, + 0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72, + 0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53, + 0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25, + 0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57, + 0xba, 0x63, 0x7b, 0x39 }, + .ilen = 60, + .assoc = { 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef, + 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef, + 0xab, 0xad, 0xda, 0xd2 }, + .alen = 20, + .result = { 0x42, 0x83, 0x1e, 0xc2, 0x21, 0x77, 0x74, 0x24, + 0x4b, 0x72, 0x21, 0xb7, 0x84, 0xd0, 0xd4, 0x9c, + 0xe3, 0xaa, 0x21, 0x2f, 0x2c, 0x02, 0xa4, 0xe0, + 0x35, 0xc1, 0x7e, 0x23, 0x29, 0xac, 0xa1, 0x2e, + 0x21, 0xd5, 0x14, 0xb2, 0x54, 0x66, 0x93, 0x1c, + 0x7d, 0x8f, 0x6a, 0x5a, 0xac, 0x84, 0xaa, 0x05, + 0x1b, 0xa3, 0x0b, 0x39, 0x6a, 0x0a, 0xac, 0x97, + 0x3d, 0x58, 0xe0, 0x91 }, + .rlen = 60, + .tag = { 0x5b, 0xc9, 0x4f, 0xbc, 0x32, 0x21, 0xa5, 0xdb, + 0x94, 0xfa, 0xe9, 0x5a, 0xe7, 0x12, 0x1a, 0x47 }, + .tlen = 16 + }, { + .klen = 24, + .tag = { 0xcd, 0x33, 0xb2, 0x8a, 0xc7, 0x73, 0xf7, 0x4b, + 0xa0, 0x0e, 0xd1, 0xf3, 0x12, 0x57, 0x24, 0x35 }, + .tlen = 16 + }, { + .klen = 24, + .ilen = 16, + .result = { 0x98, 0xe7, 0x24, 0x7c, 0x07, 0xf0, 0xfe, 0x41, + 0x1c, 0x26, 0x7e, 0x43, 0x84, 0xb0, 0xf6, 0x00 }, + .rlen = 16, + .tag = { 0x2f, 0xf5, 0x8d, 0x80, 0x03, 0x39, 0x27, 0xab, + 0x8e, 0xf4, 0xd4, 0x58, 0x75, 0x14, 0xf0, 0xfb }, + .tlen = 16 + }, { + .key = { 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c, + 0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08, + 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c }, + .klen = 24, + .iv = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad, + 0xde, 0xca, 0xf8, 0x88 }, + .input = { 0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5, + 0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a, + 0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda, + 0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72, + 0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53, + 0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25, + 0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57, + 0xba, 0x63, 0x7b, 0x39, 0x1a, 0xaf, 0xd2, 0x55 }, + .ilen = 64, + .result = { 0x39, 0x80, 0xca, 0x0b, 0x3c, 0x00, 0xe8, 0x41, + 0xeb, 0x06, 0xfa, 0xc4, 0x87, 0x2a, 0x27, 0x57, + 0x85, 0x9e, 0x1c, 0xea, 0xa6, 0xef, 0xd9, 0x84, + 0x62, 0x85, 0x93, 0xb4, 0x0c, 0xa1, 0xe1, 0x9c, + 0x7d, 0x77, 0x3d, 0x00, 0xc1, 0x44, 0xc5, 0x25, + 0xac, 0x61, 0x9d, 0x18, 0xc8, 0x4a, 0x3f, 0x47, + 0x18, 0xe2, 0x44, 0x8b, 0x2f, 0xe3, 0x24, 0xd9, + 0xcc, 0xda, 0x27, 0x10, 0xac, 0xad, 0xe2, 0x56 }, + .rlen = 64, + .tag = { 0x99, 0x24, 0xa7, 0xc8, 0x58, 0x73, 0x36, 0xbf, + 0xb1, 0x18, 0x02, 0x4d, 0xb8, 0x67, 0x4a, 0x14 }, + .tlen = 16 + }, { + .key = { 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c, + 0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08, + 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c }, + .klen = 24, + .iv = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad, + 0xde, 0xca, 0xf8, 0x88 }, + .input = { 0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5, + 0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a, + 0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda, + 0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72, + 0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53, + 0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25, + 0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57, + 0xba, 0x63, 0x7b, 0x39 }, + .ilen = 60, + .assoc = { 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef, + 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef, + 0xab, 0xad, 0xda, 0xd2 }, + .alen = 20, + .result = { 0x39, 0x80, 0xca, 0x0b, 0x3c, 0x00, 0xe8, 0x41, + 0xeb, 0x06, 0xfa, 0xc4, 0x87, 0x2a, 0x27, 0x57, + 0x85, 0x9e, 0x1c, 0xea, 0xa6, 0xef, 0xd9, 0x84, + 0x62, 0x85, 0x93, 0xb4, 0x0c, 0xa1, 0xe1, 0x9c, + 0x7d, 0x77, 0x3d, 0x00, 0xc1, 0x44, 0xc5, 0x25, + 0xac, 0x61, 0x9d, 0x18, 0xc8, 0x4a, 0x3f, 0x47, + 0x18, 0xe2, 0x44, 0x8b, 0x2f, 0xe3, 0x24, 0xd9, + 0xcc, 0xda, 0x27, 0x10 }, + .rlen = 60, + .tag = { 0x25, 0x19, 0x49, 0x8e, 0x80, 0xf1, 0x47, 0x8f, + 0x37, 0xba, 0x55, 0xbd, 0x6d, 0x27, 0x61, 0x8c }, + .tlen = 16, + .np = 2, + .tap = { 32, 28 }, + .anp = 2, + .atap = { 8, 12 } + }, { + .klen = 32, + .tag = { 0x53, 0x0f, 0x8a, 0xfb, 0xc7, 0x45, 0x36, 0xb9, + 0xa9, 0x63, 0xb4, 0xf1, 0xc4, 0xcb, 0x73, 0x8b }, + .tlen = 16 + } +}; + +static struct aead_testvec aes_gcm_dec_tv_template[] = { + { /* From McGrew & Viega - http://citeseer.ist.psu.edu/656989.html */ + .klen = 32, + .input = { 0xce, 0xa7, 0x40, 0x3d, 0x4d, 0x60, 0x6b, 0x6e, + 0x07, 0x4e, 0xc5, 0xd3, 0xba, 0xf3, 0x9d, 0x18 }, + .ilen = 16, + .rlen = 16, + .tag = { 0xd0, 0xd1, 0xc8, 0xa7, 0x99, 0x99, 0x6b, 0xf0, + 0x26, 0x5b, 0x98, 0xb5, 0xd4, 0x8a, 0xb9, 0x19 }, + .tlen = 16 + }, { + .key = { 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c, + 0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08, + 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c, + 0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08 }, + .klen = 32, + .iv = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad, + 0xde, 0xca, 0xf8, 0x88 }, + .input = { 0x52, 0x2d, 0xc1, 0xf0, 0x99, 0x56, 0x7d, 0x07, + 0xf4, 0x7f, 0x37, 0xa3, 0x2a, 0x84, 0x42, 0x7d, + 0x64, 0x3a, 0x8c, 0xdc, 0xbf, 0xe5, 0xc0, 0xc9, + 0x75, 0x98, 0xa2, 0xbd, 0x25, 0x55, 0xd1, 0xaa, + 0x8c, 0xb0, 0x8e, 0x48, 0x59, 0x0d, 0xbb, 0x3d, + 0xa7, 0xb0, 0x8b, 0x10, 0x56, 0x82, 0x88, 0x38, + 0xc5, 0xf6, 0x1e, 0x63, 0x93, 0xba, 0x7a, 0x0a, + 0xbc, 0xc9, 0xf6, 0x62, 0x89, 0x80, 0x15, 0xad }, + .ilen = 64, + .result = { 0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5, + 0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a, + 0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda, + 0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72, + 0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53, + 0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25, + 0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57, + 0xba, 0x63, 0x7b, 0x39, 0x1a, 0xaf, 0xd2, 0x55 }, + .rlen = 64, + .tag = { 0xb0, 0x94, 0xda, 0xc5, 0xd9, 0x34, 0x71, 0xbd, + 0xec, 0x1a, 0x50, 0x22, 0x70, 0xe3, 0xcc, 0x6c }, + .tlen = 16 + }, { + .key = { 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c, + 0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08, + 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c, + 0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08 }, + .klen = 32, + .iv = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad, + 0xde, 0xca, 0xf8, 0x88 }, + .input = { 0x52, 0x2d, 0xc1, 0xf0, 0x99, 0x56, 0x7d, 0x07, + 0xf4, 0x7f, 0x37, 0xa3, 0x2a, 0x84, 0x42, 0x7d, + 0x64, 0x3a, 0x8c, 0xdc, 0xbf, 0xe5, 0xc0, 0xc9, + 0x75, 0x98, 0xa2, 0xbd, 0x25, 0x55, 0xd1, 0xaa, + 0x8c, 0xb0, 0x8e, 0x48, 0x59, 0x0d, 0xbb, 0x3d, + 0xa7, 0xb0, 0x8b, 0x10, 0x56, 0x82, 0x88, 0x38, + 0xc5, 0xf6, 0x1e, 0x63, 0x93, 0xba, 0x7a, 0x0a, + 0xbc, 0xc9, 0xf6, 0x62 }, + .ilen = 60, + .assoc = { 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef, + 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef, + 0xab, 0xad, 0xda, 0xd2 }, + .alen = 20, + .result = { 0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5, + 0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a, + 0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda, + 0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72, + 0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53, + 0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25, + 0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57, + 0xba, 0x63, 0x7b, 0x39 }, + .rlen = 60, + .tag = { 0x76, 0xfc, 0x6e, 0xce, 0x0f, 0x4e, 0x17, 0x68, + 0xcd, 0xdf, 0x88, 0x53, 0xbb, 0x2d, 0x55, 0x1b }, + .tlen = 16, + .np = 2, + .tap = { 48, 12 }, + .anp = 3, + .atap = { 8, 8, 4 } + }, { + .key = { 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c, + 0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08 }, + .klen = 16, + .iv = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad, + 0xde, 0xca, 0xf8, 0x88 }, + .input = { 0x42, 0x83, 0x1e, 0xc2, 0x21, 0x77, 0x74, 0x24, + 0x4b, 0x72, 0x21, 0xb7, 0x84, 0xd0, 0xd4, 0x9c, + 0xe3, 0xaa, 0x21, 0x2f, 0x2c, 0x02, 0xa4, 0xe0, + 0x35, 0xc1, 0x7e, 0x23, 0x29, 0xac, 0xa1, 0x2e, + 0x21, 0xd5, 0x14, 0xb2, 0x54, 0x66, 0x93, 0x1c, + 0x7d, 0x8f, 0x6a, 0x5a, 0xac, 0x84, 0xaa, 0x05, + 0x1b, 0xa3, 0x0b, 0x39, 0x6a, 0x0a, 0xac, 0x97, + 0x3d, 0x58, 0xe0, 0x91, 0x47, 0x3f, 0x59, 0x85 }, + .ilen = 64, + .result = { 0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5, + 0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a, + 0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda, + 0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72, + 0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53, + 0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25, + 0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57, + 0xba, 0x63, 0x7b, 0x39, 0x1a, 0xaf, 0xd2, 0x55 }, + .rlen = 64, + .tag = { 0x4d, 0x5c, 0x2a, 0xf3, 0x27, 0xcd, 0x64, 0xa6, + 0x2c, 0xf3, 0x5a, 0xbd, 0x2b, 0xa6, 0xfa, 0xb4 }, + .tlen = 16 + }, { + .key = { 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c, + 0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08 }, + .klen = 16, + .iv = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad, + 0xde, 0xca, 0xf8, 0x88 }, + .input = { 0x42, 0x83, 0x1e, 0xc2, 0x21, 0x77, 0x74, 0x24, + 0x4b, 0x72, 0x21, 0xb7, 0x84, 0xd0, 0xd4, 0x9c, + 0xe3, 0xaa, 0x21, 0x2f, 0x2c, 0x02, 0xa4, 0xe0, + 0x35, 0xc1, 0x7e, 0x23, 0x29, 0xac, 0xa1, 0x2e, + 0x21, 0xd5, 0x14, 0xb2, 0x54, 0x66, 0x93, 0x1c, + 0x7d, 0x8f, 0x6a, 0x5a, 0xac, 0x84, 0xaa, 0x05, + 0x1b, 0xa3, 0x0b, 0x39, 0x6a, 0x0a, 0xac, 0x97, + 0x3d, 0x58, 0xe0, 0x91 }, + .ilen = 60, + .assoc = { 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef, + 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef, + 0xab, 0xad, 0xda, 0xd2 }, + .alen = 20, + .result = { 0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5, + 0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a, + 0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda, + 0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72, + 0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53, + 0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25, + 0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57, + 0xba, 0x63, 0x7b, 0x39 }, + .rlen = 60, + .tag = { 0x5b, 0xc9, 0x4f, 0xbc, 0x32, 0x21, 0xa5, 0xdb, + 0x94, 0xfa, 0xe9, 0x5a, 0xe7, 0x12, 0x1a, 0x47 }, + .tlen = 16 + }, { + .klen = 24, + .input = { 0x98, 0xe7, 0x24, 0x7c, 0x07, 0xf0, 0xfe, 0x41, + 0x1c, 0x26, 0x7e, 0x43, 0x84, 0xb0, 0xf6, 0x00 }, + .ilen = 16, + .rlen = 16, + .tag = { 0x2f, 0xf5, 0x8d, 0x80, 0x03, 0x39, 0x27, 0xab, + 0x8e, 0xf4, 0xd4, 0x58, 0x75, 0x14, 0xf0, 0xfb }, + .tlen = 16 + }, { + .key = { 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c, + 0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08, + 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c }, + .klen = 24, + .iv = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad, + 0xde, 0xca, 0xf8, 0x88 }, + .input = { 0x39, 0x80, 0xca, 0x0b, 0x3c, 0x00, 0xe8, 0x41, + 0xeb, 0x06, 0xfa, 0xc4, 0x87, 0x2a, 0x27, 0x57, + 0x85, 0x9e, 0x1c, 0xea, 0xa6, 0xef, 0xd9, 0x84, + 0x62, 0x85, 0x93, 0xb4, 0x0c, 0xa1, 0xe1, 0x9c, + 0x7d, 0x77, 0x3d, 0x00, 0xc1, 0x44, 0xc5, 0x25, + 0xac, 0x61, 0x9d, 0x18, 0xc8, 0x4a, 0x3f, 0x47, + 0x18, 0xe2, 0x44, 0x8b, 0x2f, 0xe3, 0x24, 0xd9, + 0xcc, 0xda, 0x27, 0x10, 0xac, 0xad, 0xe2, 0x56 }, + .ilen = 64, + .result = { 0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5, + 0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a, + 0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda, + 0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72, + 0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53, + 0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25, + 0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57, + 0xba, 0x63, 0x7b, 0x39, 0x1a, 0xaf, 0xd2, 0x55 }, + .rlen = 64, + .tag = { 0x99, 0x24, 0xa7, 0xc8, 0x58, 0x73, 0x36, 0xbf, + 0xb1, 0x18, 0x02, 0x4d, 0xb8, 0x67, 0x4a, 0x14 }, + .tlen = 16 + }, { + .key = { 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c, + 0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08, + 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c }, + .klen = 24, + .iv = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad, + 0xde, 0xca, 0xf8, 0x88 }, + .input = { 0x39, 0x80, 0xca, 0x0b, 0x3c, 0x00, 0xe8, 0x41, + 0xeb, 0x06, 0xfa, 0xc4, 0x87, 0x2a, 0x27, 0x57, + 0x85, 0x9e, 0x1c, 0xea, 0xa6, 0xef, 0xd9, 0x84, + 0x62, 0x85, 0x93, 0xb4, 0x0c, 0xa1, 0xe1, 0x9c, + 0x7d, 0x77, 0x3d, 0x00, 0xc1, 0x44, 0xc5, 0x25, + 0xac, 0x61, 0x9d, 0x18, 0xc8, 0x4a, 0x3f, 0x47, + 0x18, 0xe2, 0x44, 0x8b, 0x2f, 0xe3, 0x24, 0xd9, + 0xcc, 0xda, 0x27, 0x10 }, + .ilen = 60, + .assoc = { 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef, + 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef, + 0xab, 0xad, 0xda, 0xd2 }, + .alen = 20, + .result = { 0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5, + 0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a, + 0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda, + 0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72, + 0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53, + 0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25, + 0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57, + 0xba, 0x63, 0x7b, 0x39 }, + .rlen = 60, + .tag = { 0x25, 0x19, 0x49, 0x8e, 0x80, 0xf1, 0x47, 0x8f, + 0x37, 0xba, 0x55, 0xbd, 0x6d, 0x27, 0x61, 0x8c }, + .tlen = 16 + } +}; + /* Cast5 test vectors from RFC 2144 */ #define CAST5_ENC_TEST_VECTORS 3 #define CAST5_DEC_TEST_VECTORS 3 -- cgit v1.2.3 From 4a2616bfe8e4ca2008c52e8cfd7213e636d31e4a Mon Sep 17 00:00:00 2001 From: Zoltan Sogor Date: Fri, 7 Dec 2007 16:53:23 +0800 Subject: [CRYPTO] lzo: Add LZO compression algorithm support Add LZO compression algorithm support Signed-off-by: Zoltan Sogor Signed-off-by: Herbert Xu --- crypto/Kconfig | 8 +++++ crypto/Makefile | 1 + crypto/lzo.c | 106 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ crypto/tcrypt.c | 9 ++++- crypto/tcrypt.h | 82 +++++++++++++++++++++++++++++++++++++++++++ 5 files changed, 205 insertions(+), 1 deletion(-) create mode 100644 crypto/lzo.c (limited to 'crypto/Kconfig') diff --git a/crypto/Kconfig b/crypto/Kconfig index 40ae92ca..4fd14e4e 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -536,6 +536,14 @@ config CRYPTO_AUTHENC Authenc: Combined mode wrapper for IPsec. This is required for IPSec. +config CRYPTO_LZO + tristate "LZO compression algorithm" + select CRYPTO_ALGAPI + select LZO_COMPRESS + select LZO_DECOMPRESS + help + This is the LZO algorithm. + source "drivers/crypto/Kconfig" endif # if CRYPTO diff --git a/crypto/Makefile b/crypto/Makefile index 957343cb..83532ac8 100644 --- a/crypto/Makefile +++ b/crypto/Makefile @@ -55,6 +55,7 @@ obj-$(CONFIG_CRYPTO_DEFLATE) += deflate.o obj-$(CONFIG_CRYPTO_MICHAEL_MIC) += michael_mic.o obj-$(CONFIG_CRYPTO_CRC32C) += crc32c.o obj-$(CONFIG_CRYPTO_AUTHENC) += authenc.o +obj-$(CONFIG_CRYPTO_LZO) += lzo.o obj-$(CONFIG_CRYPTO_TEST) += tcrypt.o diff --git a/crypto/lzo.c b/crypto/lzo.c new file mode 100644 index 00000000..48c32883 --- /dev/null +++ b/crypto/lzo.c @@ -0,0 +1,106 @@ +/* + * Cryptographic API. + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License version 2 as published by + * the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for + * more details. + * + * You should have received a copy of the GNU General Public License along with + * this program; if not, write to the Free Software Foundation, Inc., 51 + * Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + * + */ + +#include +#include +#include +#include +#include + +struct lzo_ctx { + void *lzo_comp_mem; +}; + +static int lzo_init(struct crypto_tfm *tfm) +{ + struct lzo_ctx *ctx = crypto_tfm_ctx(tfm); + + ctx->lzo_comp_mem = vmalloc(LZO1X_MEM_COMPRESS); + if (!ctx->lzo_comp_mem) + return -ENOMEM; + + return 0; +} + +static void lzo_exit(struct crypto_tfm *tfm) +{ + struct lzo_ctx *ctx = crypto_tfm_ctx(tfm); + + vfree(ctx->lzo_comp_mem); +} + +static int lzo_compress(struct crypto_tfm *tfm, const u8 *src, + unsigned int slen, u8 *dst, unsigned int *dlen) +{ + struct lzo_ctx *ctx = crypto_tfm_ctx(tfm); + size_t tmp_len = *dlen; /* size_t(ulong) <-> uint on 64 bit */ + int err; + + err = lzo1x_1_compress(src, slen, dst, &tmp_len, ctx->lzo_comp_mem); + + if (err != LZO_E_OK) + return -EINVAL; + + *dlen = tmp_len; + return 0; +} + +static int lzo_decompress(struct crypto_tfm *tfm, const u8 *src, + unsigned int slen, u8 *dst, unsigned int *dlen) +{ + int err; + size_t tmp_len = *dlen; /* size_t(ulong) <-> uint on 64 bit */ + + err = lzo1x_decompress_safe(src, slen, dst, &tmp_len); + + if (err != LZO_E_OK) + return -EINVAL; + + *dlen = tmp_len; + return 0; + +} + +static struct crypto_alg alg = { + .cra_name = "lzo", + .cra_flags = CRYPTO_ALG_TYPE_COMPRESS, + .cra_ctxsize = sizeof(struct lzo_ctx), + .cra_module = THIS_MODULE, + .cra_list = LIST_HEAD_INIT(alg.cra_list), + .cra_init = lzo_init, + .cra_exit = lzo_exit, + .cra_u = { .compress = { + .coa_compress = lzo_compress, + .coa_decompress = lzo_decompress } } +}; + +static int __init init(void) +{ + return crypto_register_alg(&alg); +} + +static void __exit fini(void) +{ + crypto_unregister_alg(&alg); +} + +module_init(init); +module_exit(fini); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("LZO Compression Algorithm"); diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c index c8d3e600..943a5144 100644 --- a/crypto/tcrypt.c +++ b/crypto/tcrypt.c @@ -84,7 +84,7 @@ static char *check[] = { "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea", "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt", - "camellia", "seed", "salsa20", NULL + "camellia", "seed", "salsa20", "lzo", NULL }; static void hexdump(unsigned char *buf, unsigned int len) @@ -1292,6 +1292,8 @@ static void do_test(void) test_comp("deflate", deflate_comp_tv_template, deflate_decomp_tv_template, DEFLATE_COMP_TEST_VECTORS, DEFLATE_DECOMP_TEST_VECTORS); + test_comp("lzo", lzo_comp_tv_template, lzo_decomp_tv_template, + LZO_COMP_TEST_VECTORS, LZO_DECOMP_TEST_VECTORS); test_hash("crc32c", crc32c_tv_template, CRC32C_TEST_VECTORS); test_hash("hmac(md5)", hmac_md5_tv_template, HMAC_MD5_TEST_VECTORS); @@ -1550,6 +1552,11 @@ static void do_test(void) AES_GCM_DEC_TEST_VECTORS); break; + case 36: + test_comp("lzo", lzo_comp_tv_template, lzo_decomp_tv_template, + LZO_COMP_TEST_VECTORS, LZO_DECOMP_TEST_VECTORS); + break; + case 100: test_hash("hmac(md5)", hmac_md5_tv_template, HMAC_MD5_TEST_VECTORS); diff --git a/crypto/tcrypt.h b/crypto/tcrypt.h index d3c380f5..175f26a5 100644 --- a/crypto/tcrypt.h +++ b/crypto/tcrypt.h @@ -7460,6 +7460,88 @@ static struct comp_testvec deflate_decomp_tv_template[] = { }, }; +/* + * LZO test vectors (null-terminated strings). + */ +#define LZO_COMP_TEST_VECTORS 2 +#define LZO_DECOMP_TEST_VECTORS 2 + +static struct comp_testvec lzo_comp_tv_template[] = { + { + .inlen = 70, + .outlen = 46, + .input = "Join us now and share the software " + "Join us now and share the software ", + .output = { 0x00, 0x0d, 0x4a, 0x6f, 0x69, 0x6e, 0x20, 0x75, + 0x73, 0x20, 0x6e, 0x6f, 0x77, 0x20, 0x61, 0x6e, + 0x64, 0x20, 0x73, 0x68, 0x61, 0x72, 0x65, 0x20, + 0x74, 0x68, 0x65, 0x20, 0x73, 0x6f, 0x66, 0x74, + 0x77, 0x70, 0x01, 0x01, 0x4a, 0x6f, 0x69, 0x6e, + 0x3d, 0x88, 0x00, 0x11, 0x00, 0x00 }, + }, { + .inlen = 159, + .outlen = 133, + .input = "This document describes a compression method based on the LZO " + "compression algorithm. This document defines the application of " + "the LZO algorithm used in UBIFS.", + .output = { 0x00, 0x2b, 0x54, 0x68, 0x69, 0x73, 0x20, 0x64, + 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x20, + 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, + 0x73, 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6d, 0x70, + 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x20, + 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x20, 0x62, + 0x61, 0x73, 0x65, 0x64, 0x20, 0x6f, 0x6e, 0x20, + 0x74, 0x68, 0x65, 0x20, 0x4c, 0x5a, 0x4f, 0x2b, + 0x8c, 0x00, 0x0d, 0x61, 0x6c, 0x67, 0x6f, 0x72, + 0x69, 0x74, 0x68, 0x6d, 0x2e, 0x20, 0x20, 0x54, + 0x68, 0x69, 0x73, 0x2a, 0x54, 0x01, 0x02, 0x66, + 0x69, 0x6e, 0x65, 0x73, 0x94, 0x06, 0x05, 0x61, + 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x76, + 0x0a, 0x6f, 0x66, 0x88, 0x02, 0x60, 0x09, 0x27, + 0xf0, 0x00, 0x0c, 0x20, 0x75, 0x73, 0x65, 0x64, + 0x20, 0x69, 0x6e, 0x20, 0x55, 0x42, 0x49, 0x46, + 0x53, 0x2e, 0x11, 0x00, 0x00 }, + }, +}; + +static struct comp_testvec lzo_decomp_tv_template[] = { + { + .inlen = 133, + .outlen = 159, + .input = { 0x00, 0x2b, 0x54, 0x68, 0x69, 0x73, 0x20, 0x64, + 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x20, + 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, + 0x73, 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6d, 0x70, + 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x20, + 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x20, 0x62, + 0x61, 0x73, 0x65, 0x64, 0x20, 0x6f, 0x6e, 0x20, + 0x74, 0x68, 0x65, 0x20, 0x4c, 0x5a, 0x4f, 0x2b, + 0x8c, 0x00, 0x0d, 0x61, 0x6c, 0x67, 0x6f, 0x72, + 0x69, 0x74, 0x68, 0x6d, 0x2e, 0x20, 0x20, 0x54, + 0x68, 0x69, 0x73, 0x2a, 0x54, 0x01, 0x02, 0x66, + 0x69, 0x6e, 0x65, 0x73, 0x94, 0x06, 0x05, 0x61, + 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x76, + 0x0a, 0x6f, 0x66, 0x88, 0x02, 0x60, 0x09, 0x27, + 0xf0, 0x00, 0x0c, 0x20, 0x75, 0x73, 0x65, 0x64, + 0x20, 0x69, 0x6e, 0x20, 0x55, 0x42, 0x49, 0x46, + 0x53, 0x2e, 0x11, 0x00, 0x00 }, + .output = "This document describes a compression method based on the LZO " + "compression algorithm. This document defines the application of " + "the LZO algorithm used in UBIFS.", + }, { + .inlen = 46, + .outlen = 70, + .input = { 0x00, 0x0d, 0x4a, 0x6f, 0x69, 0x6e, 0x20, 0x75, + 0x73, 0x20, 0x6e, 0x6f, 0x77, 0x20, 0x61, 0x6e, + 0x64, 0x20, 0x73, 0x68, 0x61, 0x72, 0x65, 0x20, + 0x74, 0x68, 0x65, 0x20, 0x73, 0x6f, 0x66, 0x74, + 0x77, 0x70, 0x01, 0x01, 0x4a, 0x6f, 0x69, 0x6e, + 0x3d, 0x88, 0x00, 0x11, 0x00, 0x00 }, + .output = "Join us now and share the software " + "Join us now and share the software ", + }, +}; + /* * Michael MIC test vectors from IEEE 802.11i */ -- cgit v1.2.3 From f5691b66d37f0d9d0f82433232f43e9af6de95af Mon Sep 17 00:00:00 2001 From: Borislav Petkov Date: Fri, 14 Dec 2007 16:43:32 +0800 Subject: [CRYPTO] authenc: Select HASH in Kconfig i get here: ---- LD vmlinux SYSMAP System.map SYSMAP .tmp_System.map Building modules, stage 2. MODPOST 226 modules ERROR: "crypto_hash_type" [crypto/authenc.ko] undefined! make[1]: *** [__modpost] Error 1 make: *** [modules] Error 2 --- which fails because crypto_hash_type is declared in crypto/hash.c. You might wanna fix it like so: Signed-off-by: Borislav Petkov Signed-off-by: Herbert Xu --- crypto/Kconfig | 1 + 1 file changed, 1 insertion(+) (limited to 'crypto/Kconfig') diff --git a/crypto/Kconfig b/crypto/Kconfig index 4fd14e4e..1eb4bcdb 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -532,6 +532,7 @@ config CRYPTO_AUTHENC tristate "Authenc support" select CRYPTO_AEAD select CRYPTO_MANAGER + select CRYPTO_HASH help Authenc: Combined mode wrapper for IPsec. This is required for IPSec. -- cgit v1.2.3 From a790578f0f03c0218a045e8c063b0612a6c8d027 Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Tue, 27 Nov 2007 19:48:27 +0800 Subject: [CRYPTO] blkcipher: Merge ablkcipher and blkcipher into one option/module With the impending addition of the givcipher type, both blkcipher and ablkcipher algorithms will use it to create givcipher objects. As such it no longer makes sense to split the system between ablkcipher and blkcipher. In particular, both ablkcipher.c and blkcipher.c would need to use the givcipher type which has to reside in ablkcipher.c since it shares much code with it. This patch merges the two Kconfig options as well as the modules into one. Signed-off-by: Herbert Xu --- crypto/Kconfig | 6 +----- crypto/Makefile | 6 ++++-- 2 files changed, 5 insertions(+), 7 deletions(-) (limited to 'crypto/Kconfig') diff --git a/crypto/Kconfig b/crypto/Kconfig index 1eb4bcdb..c4b6c912 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -24,10 +24,6 @@ config CRYPTO_ALGAPI help This option provides the API for cryptographic algorithms. -config CRYPTO_ABLKCIPHER - tristate - select CRYPTO_BLKCIPHER - config CRYPTO_AEAD tristate select CRYPTO_ALGAPI @@ -217,7 +213,7 @@ config CRYPTO_GCM config CRYPTO_CRYPTD tristate "Software async crypto daemon" - select CRYPTO_ABLKCIPHER + select CRYPTO_BLKCIPHER select CRYPTO_MANAGER help This is a generic software asynchronous crypto daemon that diff --git a/crypto/Makefile b/crypto/Makefile index 83532ac8..2a1883fa 100644 --- a/crypto/Makefile +++ b/crypto/Makefile @@ -8,9 +8,11 @@ crypto_algapi-$(CONFIG_PROC_FS) += proc.o crypto_algapi-objs := algapi.o scatterwalk.o $(crypto_algapi-y) obj-$(CONFIG_CRYPTO_ALGAPI) += crypto_algapi.o -obj-$(CONFIG_CRYPTO_ABLKCIPHER) += ablkcipher.o obj-$(CONFIG_CRYPTO_AEAD) += aead.o -obj-$(CONFIG_CRYPTO_BLKCIPHER) += blkcipher.o + +crypto_blkcipher-objs := ablkcipher.o +crypto_blkcipher-objs += blkcipher.o +obj-$(CONFIG_CRYPTO_BLKCIPHER) += crypto_blkcipher.o crypto_hash-objs := hash.o obj-$(CONFIG_CRYPTO_HASH) += crypto_hash.o -- cgit v1.2.3 From 20089c96d6eba471d8ae0f48e1bcbc4d8969cd02 Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Fri, 30 Nov 2007 21:38:37 +1100 Subject: [CRYPTO] seqiv: Add Sequence Number IV Generator This generator generates an IV based on a sequence number by xoring it with a salt. This algorithm is mainly useful for CTR and similar modes. This patch also sets it as the default IV generator for ctr. Signed-off-by: Herbert Xu --- crypto/Kconfig | 9 +++ crypto/Makefile | 1 + crypto/ctr.c | 2 + crypto/seqiv.c | 186 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 198 insertions(+) create mode 100644 crypto/seqiv.c (limited to 'crypto/Kconfig') diff --git a/crypto/Kconfig b/crypto/Kconfig index c4b6c912..7ad9711e 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -32,6 +32,14 @@ config CRYPTO_BLKCIPHER tristate select CRYPTO_ALGAPI +config CRYPTO_SEQIV + tristate "Sequence Number IV Generator" + select CRYPTO_BLKCIPHER + help + This IV generator generates an IV based on a sequence number by + xoring it with a salt. This algorithm is mainly useful for CTR + and similar modes. + config CRYPTO_HASH tristate select CRYPTO_ALGAPI @@ -197,6 +205,7 @@ config CRYPTO_XTS config CRYPTO_CTR tristate "CTR support" select CRYPTO_BLKCIPHER + select CRYPTO_SEQIV select CRYPTO_MANAGER help CTR: Counter mode diff --git a/crypto/Makefile b/crypto/Makefile index 968b796d..1b99b3a1 100644 --- a/crypto/Makefile +++ b/crypto/Makefile @@ -15,6 +15,7 @@ crypto_blkcipher-objs += blkcipher.o obj-$(CONFIG_CRYPTO_BLKCIPHER) += crypto_blkcipher.o obj-$(CONFIG_CRYPTO_BLKCIPHER) += chainiv.o obj-$(CONFIG_CRYPTO_BLKCIPHER) += eseqiv.o +obj-$(CONFIG_CRYPTO_SEQIV) += seqiv.o crypto_hash-objs := hash.o obj-$(CONFIG_CRYPTO_HASH) += crypto_hash.o diff --git a/crypto/ctr.c b/crypto/ctr.c index 1052b318..2d7425f0 100644 --- a/crypto/ctr.c +++ b/crypto/ctr.c @@ -361,6 +361,8 @@ static struct crypto_instance *crypto_rfc3686_alloc(struct rtattr **tb) inst->alg.cra_blkcipher.max_keysize = alg->cra_blkcipher.max_keysize + CTR_RFC3686_NONCE_SIZE; + inst->alg.cra_blkcipher.geniv = "seqiv"; + inst->alg.cra_ctxsize = sizeof(struct crypto_rfc3686_ctx); inst->alg.cra_init = crypto_rfc3686_init_tfm; diff --git a/crypto/seqiv.c b/crypto/seqiv.c new file mode 100644 index 00000000..9c2d80d7 --- /dev/null +++ b/crypto/seqiv.c @@ -0,0 +1,186 @@ +/* + * seqiv: Sequence Number IV Generator + * + * This generator generates an IV based on a sequence number by xoring it + * with a salt. This algorithm is mainly useful for CTR and similar modes. + * + * Copyright (c) 2007 Herbert Xu + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include +#include +#include +#include +#include +#include +#include +#include + +struct seqiv_ctx { + spinlock_t lock; + u8 salt[] __attribute__ ((aligned(__alignof__(u32)))); +}; + +static void seqiv_complete2(struct skcipher_givcrypt_request *req, int err) +{ + struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req); + struct crypto_ablkcipher *geniv; + + if (err == -EINPROGRESS) + return; + + if (err) + goto out; + + geniv = skcipher_givcrypt_reqtfm(req); + memcpy(req->creq.info, subreq->info, crypto_ablkcipher_ivsize(geniv)); + +out: + kfree(subreq->info); +} + +static void seqiv_complete(struct crypto_async_request *base, int err) +{ + struct skcipher_givcrypt_request *req = base->data; + + seqiv_complete2(req, err); + skcipher_givcrypt_complete(req, err); +} + +static int seqiv_givencrypt(struct skcipher_givcrypt_request *req) +{ + struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); + struct seqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv); + struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req); + crypto_completion_t complete; + void *data; + u8 *info; + __be64 seq; + unsigned int ivsize; + unsigned int len; + int err; + + ablkcipher_request_set_tfm(subreq, skcipher_geniv_cipher(geniv)); + + complete = req->creq.base.complete; + data = req->creq.base.data; + info = req->creq.info; + + ivsize = crypto_ablkcipher_ivsize(geniv); + + if (unlikely(!IS_ALIGNED((unsigned long)info, + crypto_ablkcipher_alignmask(geniv) + 1))) { + info = kmalloc(ivsize, req->creq.base.flags & + CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL: + GFP_ATOMIC); + if (!info) + return -ENOMEM; + + complete = seqiv_complete; + data = req; + } + + ablkcipher_request_set_callback(subreq, req->creq.base.flags, complete, + data); + ablkcipher_request_set_crypt(subreq, req->creq.src, req->creq.dst, + req->creq.nbytes, info); + + len = ivsize; + if (ivsize > sizeof(u64)) { + memset(info, 0, ivsize - sizeof(u64)); + len = sizeof(u64); + } + seq = cpu_to_be64(req->seq); + memcpy(info + ivsize - len, &seq, len); + crypto_xor(info, ctx->salt, ivsize); + + memcpy(req->giv, info, ivsize); + + err = crypto_ablkcipher_encrypt(subreq); + if (unlikely(info != req->creq.info)) + seqiv_complete2(req, err); + return err; +} + +static int seqiv_givencrypt_first(struct skcipher_givcrypt_request *req) +{ + struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req); + struct seqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv); + + spin_lock_bh(&ctx->lock); + if (crypto_ablkcipher_crt(geniv)->givencrypt != seqiv_givencrypt_first) + goto unlock; + + crypto_ablkcipher_crt(geniv)->givencrypt = seqiv_givencrypt; + get_random_bytes(ctx->salt, crypto_ablkcipher_ivsize(geniv)); + +unlock: + spin_unlock_bh(&ctx->lock); + + return seqiv_givencrypt(req); +} + +static int seqiv_init(struct crypto_tfm *tfm) +{ + struct crypto_ablkcipher *geniv = __crypto_ablkcipher_cast(tfm); + struct seqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv); + + spin_lock_init(&ctx->lock); + + tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request); + + return skcipher_geniv_init(tfm); +} + +static struct crypto_template seqiv_tmpl; + +static struct crypto_instance *seqiv_alloc(struct rtattr **tb) +{ + struct crypto_instance *inst; + + inst = skcipher_geniv_alloc(&seqiv_tmpl, tb, 0, 0); + if (IS_ERR(inst)) + goto out; + + inst->alg.cra_ablkcipher.givencrypt = seqiv_givencrypt_first; + + inst->alg.cra_init = seqiv_init; + inst->alg.cra_exit = skcipher_geniv_exit; + + inst->alg.cra_alignmask |= __alignof__(u32) - 1; + + inst->alg.cra_ctxsize = sizeof(struct seqiv_ctx); + inst->alg.cra_ctxsize += inst->alg.cra_ablkcipher.ivsize; + +out: + return inst; +} + +static struct crypto_template seqiv_tmpl = { + .name = "seqiv", + .alloc = seqiv_alloc, + .free = skcipher_geniv_free, + .module = THIS_MODULE, +}; + +static int __init seqiv_module_init(void) +{ + return crypto_register_template(&seqiv_tmpl); +} + +static void __exit seqiv_module_exit(void) +{ + crypto_unregister_template(&seqiv_tmpl); +} + +module_init(seqiv_module_init); +module_exit(seqiv_module_exit); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Sequence Number IV Generator"); -- cgit v1.2.3 From 66e98f731b041ee313ca32d2aa2cae8057ce3a8c Mon Sep 17 00:00:00 2001 From: Joy Latten Date: Wed, 12 Dec 2007 20:25:13 +0800 Subject: [CRYPTO] ccm: Added CCM mode This patch adds Counter with CBC-MAC (CCM) support. RFC 3610 and NIST Special Publication 800-38C were referenced. Signed-off-by: Joy Latten Signed-off-by: Herbert Xu --- crypto/Kconfig | 7 + crypto/Makefile | 1 + crypto/ccm.c | 889 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 897 insertions(+) create mode 100644 crypto/ccm.c (limited to 'crypto/Kconfig') diff --git a/crypto/Kconfig b/crypto/Kconfig index 7ad9711e..0d89f774 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -220,6 +220,13 @@ config CRYPTO_GCM Support for Galois/Counter Mode (GCM) and Galois Message Authentication Code (GMAC). Required for IPSec. +config CRYPTO_CCM + tristate "CCM support" + select CRYPTO_CTR + select CRYPTO_AEAD + help + Support for Counter with CBC MAC. Required for IPsec. + config CRYPTO_CRYPTD tristate "Software async crypto daemon" select CRYPTO_BLKCIPHER diff --git a/crypto/Makefile b/crypto/Makefile index 1b99b3a1..48c75837 100644 --- a/crypto/Makefile +++ b/crypto/Makefile @@ -39,6 +39,7 @@ obj-$(CONFIG_CRYPTO_LRW) += lrw.o obj-$(CONFIG_CRYPTO_XTS) += xts.o obj-$(CONFIG_CRYPTO_CTR) += ctr.o obj-$(CONFIG_CRYPTO_GCM) += gcm.o +obj-$(CONFIG_CRYPTO_CCM) += ccm.o obj-$(CONFIG_CRYPTO_CRYPTD) += cryptd.o obj-$(CONFIG_CRYPTO_DES) += des_generic.o obj-$(CONFIG_CRYPTO_FCRYPT) += fcrypt.o diff --git a/crypto/ccm.c b/crypto/ccm.c new file mode 100644 index 00000000..7cf7e5a6 --- /dev/null +++ b/crypto/ccm.c @@ -0,0 +1,889 @@ +/* + * CCM: Counter with CBC-MAC + * + * (C) Copyright IBM Corp. 2007 - Joy Latten + * + * This program is free software; you can redistribute it and/or modify it + * under the terms of the GNU General Public License as published by the Free + * Software Foundation; either version 2 of the License, or (at your option) + * any later version. + * + */ + +#include +#include +#include +#include +#include +#include +#include +#include + +#include "internal.h" + +struct ccm_instance_ctx { + struct crypto_skcipher_spawn ctr; + struct crypto_spawn cipher; +}; + +struct crypto_ccm_ctx { + struct crypto_cipher *cipher; + struct crypto_ablkcipher *ctr; +}; + +struct crypto_rfc4309_ctx { + struct crypto_aead *child; + u8 nonce[3]; +}; + +struct crypto_ccm_req_priv_ctx { + u8 odata[16]; + u8 idata[16]; + u8 auth_tag[16]; + u32 ilen; + u32 flags; + struct scatterlist src[2]; + struct scatterlist dst[2]; + struct ablkcipher_request abreq; +}; + +static inline struct crypto_ccm_req_priv_ctx *crypto_ccm_reqctx( + struct aead_request *req) +{ + unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req)); + + return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1); +} + +static int set_msg_len(u8 *block, unsigned int msglen, int csize) +{ + __be32 data; + + memset(block, 0, csize); + block += csize; + + if (csize >= 4) + csize = 4; + else if (msglen > (1 << (8 * csize))) + return -EOVERFLOW; + + data = cpu_to_be32(msglen); + memcpy(block - csize, (u8 *)&data + 4 - csize, csize); + + return 0; +} + +static int crypto_ccm_setkey(struct crypto_aead *aead, const u8 *key, + unsigned int keylen) +{ + struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead); + struct crypto_ablkcipher *ctr = ctx->ctr; + struct crypto_cipher *tfm = ctx->cipher; + int err = 0; + + crypto_ablkcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK); + crypto_ablkcipher_set_flags(ctr, crypto_aead_get_flags(aead) & + CRYPTO_TFM_REQ_MASK); + err = crypto_ablkcipher_setkey(ctr, key, keylen); + crypto_aead_set_flags(aead, crypto_ablkcipher_get_flags(ctr) & + CRYPTO_TFM_RES_MASK); + if (err) + goto out; + + crypto_cipher_clear_flags(tfm, CRYPTO_TFM_REQ_MASK); + crypto_cipher_set_flags(tfm, crypto_aead_get_flags(aead) & + CRYPTO_TFM_REQ_MASK); + err = crypto_cipher_setkey(tfm, key, keylen); + crypto_aead_set_flags(aead, crypto_cipher_get_flags(tfm) & + CRYPTO_TFM_RES_MASK); + +out: + return err; +} + +static int crypto_ccm_setauthsize(struct crypto_aead *tfm, + unsigned int authsize) +{ + switch (authsize) { + case 4: + case 6: + case 8: + case 10: + case 12: + case 14: + case 16: + break; + default: + return -EINVAL; + } + + return 0; +} + +static int format_input(u8 *info, struct aead_request *req, + unsigned int cryptlen) +{ + struct crypto_aead *aead = crypto_aead_reqtfm(req); + unsigned int lp = req->iv[0]; + unsigned int l = lp + 1; + unsigned int m; + + m = crypto_aead_authsize(aead); + + memcpy(info, req->iv, 16); + + /* format control info per RFC 3610 and + * NIST Special Publication 800-38C + */ + *info |= (8 * ((m - 2) / 2)); + if (req->assoclen) + *info |= 64; + + return set_msg_len(info + 16 - l, cryptlen, l); +} + +static int format_adata(u8 *adata, unsigned int a) +{ + int len = 0; + + /* add control info for associated data + * RFC 3610 and NIST Special Publication 800-38C + */ + if (a < 65280) { + *(__be16 *)adata = cpu_to_be16(a); + len = 2; + } else { + *(__be16 *)adata = cpu_to_be16(0xfffe); + *(__be32 *)&adata[2] = cpu_to_be32(a); + len = 6; + } + + return len; +} + +static void compute_mac(struct crypto_cipher *tfm, u8 *data, int n, + struct crypto_ccm_req_priv_ctx *pctx) +{ + unsigned int bs = 16; + u8 *odata = pctx->odata; + u8 *idata = pctx->idata; + int datalen, getlen; + + datalen = n; + + /* first time in here, block may be partially filled. */ + getlen = bs - pctx->ilen; + if (datalen >= getlen) { + memcpy(idata + pctx->ilen, data, getlen); + crypto_xor(odata, idata, bs); + crypto_cipher_encrypt_one(tfm, odata, odata); + datalen -= getlen; + data += getlen; + pctx->ilen = 0; + } + + /* now encrypt rest of data */ + while (datalen >= bs) { + crypto_xor(odata, data, bs); + crypto_cipher_encrypt_one(tfm, odata, odata); + + datalen -= bs; + data += bs; + } + + /* check and see if there's leftover data that wasn't + * enough to fill a block. + */ + if (datalen) { + memcpy(idata + pctx->ilen, data, datalen); + pctx->ilen += datalen; + } +} + +static void get_data_to_compute(struct crypto_cipher *tfm, + struct crypto_ccm_req_priv_ctx *pctx, + struct scatterlist *sg, unsigned int len) +{ + struct scatter_walk walk; + u8 *data_src; + int n; + + scatterwalk_start(&walk, sg); + + while (len) { + n = scatterwalk_clamp(&walk, len); + if (!n) { + scatterwalk_start(&walk, sg_next(walk.sg)); + n = scatterwalk_clamp(&walk, len); + } + data_src = scatterwalk_map(&walk, 0); + + compute_mac(tfm, data_src, n, pctx); + len -= n; + + scatterwalk_unmap(data_src, 0); + scatterwalk_advance(&walk, n); + scatterwalk_done(&walk, 0, len); + if (len) + crypto_yield(pctx->flags); + } + + /* any leftover needs padding and then encrypted */ + if (pctx->ilen) { + int padlen; + u8 *odata = pctx->odata; + u8 *idata = pctx->idata; + + padlen = 16 - pctx->ilen; + memset(idata + pctx->ilen, 0, padlen); + crypto_xor(odata, idata, 16); + crypto_cipher_encrypt_one(tfm, odata, odata); + pctx->ilen = 0; + } +} + +static int crypto_ccm_auth(struct aead_request *req, struct scatterlist *plain, + unsigned int cryptlen) +{ + struct crypto_aead *aead = crypto_aead_reqtfm(req); + struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead); + struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req); + struct crypto_cipher *cipher = ctx->cipher; + unsigned int assoclen = req->assoclen; + u8 *odata = pctx->odata; + u8 *idata = pctx->idata; + int err; + + /* format control data for input */ + err = format_input(odata, req, cryptlen); + if (err) + goto out; + + /* encrypt first block to use as start in computing mac */ + crypto_cipher_encrypt_one(cipher, odata, odata); + + /* format associated data and compute into mac */ + if (assoclen) { + pctx->ilen = format_adata(idata, assoclen); + get_data_to_compute(cipher, pctx, req->assoc, req->assoclen); + } + + /* compute plaintext into mac */ + get_data_to_compute(cipher, pctx, plain, cryptlen); + +out: + return err; +} + +static void crypto_ccm_encrypt_done(struct crypto_async_request *areq, int err) +{ + struct aead_request *req = areq->data; + struct crypto_aead *aead = crypto_aead_reqtfm(req); + struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req); + u8 *odata = pctx->odata; + + if (!err) + scatterwalk_map_and_copy(odata, req->dst, req->cryptlen, + crypto_aead_authsize(aead), 1); + aead_request_complete(req, err); +} + +static inline int crypto_ccm_check_iv(const u8 *iv) +{ + /* 2 <= L <= 8, so 1 <= L' <= 7. */ + if (1 > iv[0] || iv[0] > 7) + return -EINVAL; + + return 0; +} + +static int crypto_ccm_encrypt(struct aead_request *req) +{ + struct crypto_aead *aead = crypto_aead_reqtfm(req); + struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead); + struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req); + struct ablkcipher_request *abreq = &pctx->abreq; + struct scatterlist *dst; + unsigned int cryptlen = req->cryptlen; + u8 *odata = pctx->odata; + u8 *iv = req->iv; + int err; + + err = crypto_ccm_check_iv(iv); + if (err) + return err; + + pctx->flags = aead_request_flags(req); + + err = crypto_ccm_auth(req, req->src, cryptlen); + if (err) + return err; + + /* Note: rfc 3610 and NIST 800-38C require counter of + * zero to encrypt auth tag. + */ + memset(iv + 15 - iv[0], 0, iv[0] + 1); + + sg_init_table(pctx->src, 2); + sg_set_buf(pctx->src, odata, 16); + scatterwalk_sg_chain(pctx->src, 2, req->src); + + dst = pctx->src; + if (req->src != req->dst) { + sg_init_table(pctx->dst, 2); + sg_set_buf(pctx->dst, odata, 16); + scatterwalk_sg_chain(pctx->dst, 2, req->dst); + dst = pctx->dst; + } + + ablkcipher_request_set_tfm(abreq, ctx->ctr); + ablkcipher_request_set_callback(abreq, pctx->flags, + crypto_ccm_encrypt_done, req); + ablkcipher_request_set_crypt(abreq, pctx->src, dst, cryptlen + 16, iv); + err = crypto_ablkcipher_encrypt(abreq); + if (err) + return err; + + /* copy authtag to end of dst */ + scatterwalk_map_and_copy(odata, req->dst, cryptlen, + crypto_aead_authsize(aead), 1); + return err; +} + +static void crypto_ccm_decrypt_done(struct crypto_async_request *areq, + int err) +{ + struct aead_request *req = areq->data; + struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req); + struct crypto_aead *aead = crypto_aead_reqtfm(req); + unsigned int authsize = crypto_aead_authsize(aead); + unsigned int cryptlen = req->cryptlen - authsize; + + if (!err) { + err = crypto_ccm_auth(req, req->dst, cryptlen); + if (!err && memcmp(pctx->auth_tag, pctx->odata, authsize)) + err = -EBADMSG; + } + aead_request_complete(req, err); +} + +static int crypto_ccm_decrypt(struct aead_request *req) +{ + struct crypto_aead *aead = crypto_aead_reqtfm(req); + struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead); + struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req); + struct ablkcipher_request *abreq = &pctx->abreq; + struct scatterlist *dst; + unsigned int authsize = crypto_aead_authsize(aead); + unsigned int cryptlen = req->cryptlen; + u8 *authtag = pctx->auth_tag; + u8 *odata = pctx->odata; + u8 *iv = req->iv; + int err; + + if (cryptlen < authsize) + return -EINVAL; + cryptlen -= authsize; + + err = crypto_ccm_check_iv(iv); + if (err) + return err; + + pctx->flags = aead_request_flags(req); + + scatterwalk_map_and_copy(authtag, req->src, cryptlen, authsize, 0); + + memset(iv + 15 - iv[0], 0, iv[0] + 1); + + sg_init_table(pctx->src, 2); + sg_set_buf(pctx->src, authtag, 16); + scatterwalk_sg_chain(pctx->src, 2, req->src); + + dst = pctx->src; + if (req->src != req->dst) { + sg_init_table(pctx->dst, 2); + sg_set_buf(pctx->dst, authtag, 16); + scatterwalk_sg_chain(pctx->dst, 2, req->dst); + dst = pctx->dst; + } + + ablkcipher_request_set_tfm(abreq, ctx->ctr); + ablkcipher_request_set_callback(abreq, pctx->flags, + crypto_ccm_decrypt_done, req); + ablkcipher_request_set_crypt(abreq, pctx->src, dst, cryptlen + 16, iv); + err = crypto_ablkcipher_decrypt(abreq); + if (err) + return err; + + err = crypto_ccm_auth(req, req->dst, cryptlen); + if (err) + return err; + + /* verify */ + if (memcmp(authtag, odata, authsize)) + return -EBADMSG; + + return err; +} + +static int crypto_ccm_init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_instance *inst = (void *)tfm->__crt_alg; + struct ccm_instance_ctx *ictx = crypto_instance_ctx(inst); + struct crypto_ccm_ctx *ctx = crypto_tfm_ctx(tfm); + struct crypto_cipher *cipher; + struct crypto_ablkcipher *ctr; + unsigned long align; + int err; + + cipher = crypto_spawn_cipher(&ictx->cipher); + if (IS_ERR(cipher)) + return PTR_ERR(cipher); + + ctr = crypto_spawn_skcipher(&ictx->ctr); + err = PTR_ERR(ctr); + if (IS_ERR(ctr)) + goto err_free_cipher; + + ctx->cipher = cipher; + ctx->ctr = ctr; + + align = crypto_tfm_alg_alignmask(tfm); + align &= ~(crypto_tfm_ctx_alignment() - 1); + tfm->crt_aead.reqsize = align + + sizeof(struct crypto_ccm_req_priv_ctx) + + crypto_ablkcipher_reqsize(ctr); + + return 0; + +err_free_cipher: + crypto_free_cipher(cipher); + return err; +} + +static void crypto_ccm_exit_tfm(struct crypto_tfm *tfm) +{ + struct crypto_ccm_ctx *ctx = crypto_tfm_ctx(tfm); + + crypto_free_cipher(ctx->cipher); + crypto_free_ablkcipher(ctx->ctr); +} + +static struct crypto_instance *crypto_ccm_alloc_common(struct rtattr **tb, + const char *full_name, + const char *ctr_name, + const char *cipher_name) +{ + struct crypto_attr_type *algt; + struct crypto_instance *inst; + struct crypto_alg *ctr; + struct crypto_alg *cipher; + struct ccm_instance_ctx *ictx; + int err; + + algt = crypto_get_attr_type(tb); + err = PTR_ERR(algt); + if (IS_ERR(algt)) + return ERR_PTR(err); + + if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) + return ERR_PTR(-EINVAL); + + cipher = crypto_alg_mod_lookup(cipher_name, CRYPTO_ALG_TYPE_CIPHER, + CRYPTO_ALG_TYPE_MASK); + err = PTR_ERR(cipher); + if (IS_ERR(cipher)) + return ERR_PTR(err); + + err = -EINVAL; + if (cipher->cra_blocksize != 16) + goto out_put_cipher; + + inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL); + err = -ENOMEM; + if (!inst) + goto out_put_cipher; + + ictx = crypto_instance_ctx(inst); + + err = crypto_init_spawn(&ictx->cipher, cipher, inst, + CRYPTO_ALG_TYPE_MASK); + if (err) + goto err_free_inst; + + crypto_set_skcipher_spawn(&ictx->ctr, inst); + err = crypto_grab_skcipher(&ictx->ctr, ctr_name, 0, + crypto_requires_sync(algt->type, + algt->mask)); + if (err) + goto err_drop_cipher; + + ctr = crypto_skcipher_spawn_alg(&ictx->ctr); + + /* Not a stream cipher? */ + err = -EINVAL; + if (ctr->cra_blocksize != 1) + goto err_drop_ctr; + + /* We want the real thing! */ + if (ctr->cra_ablkcipher.ivsize != 16) + goto err_drop_ctr; + + err = -ENAMETOOLONG; + if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, + "ccm_base(%s,%s)", ctr->cra_driver_name, + cipher->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) + goto err_drop_ctr; + + memcpy(inst->alg.cra_name, full_name, CRYPTO_MAX_ALG_NAME); + + inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD; + inst->alg.cra_flags |= ctr->cra_flags & CRYPTO_ALG_ASYNC; + inst->alg.cra_priority = cipher->cra_priority + ctr->cra_priority; + inst->alg.cra_blocksize = 1; + inst->alg.cra_alignmask = cipher->cra_alignmask | ctr->cra_alignmask | + (__alignof__(u32) - 1); + inst->alg.cra_type = &crypto_aead_type; + inst->alg.cra_aead.ivsize = 16; + inst->alg.cra_aead.maxauthsize = 16; + inst->alg.cra_ctxsize = sizeof(struct crypto_ccm_ctx); + inst->alg.cra_init = crypto_ccm_init_tfm; + inst->alg.cra_exit = crypto_ccm_exit_tfm; + inst->alg.cra_aead.setkey = crypto_ccm_setkey; + inst->alg.cra_aead.setauthsize = crypto_ccm_setauthsize; + inst->alg.cra_aead.encrypt = crypto_ccm_encrypt; + inst->alg.cra_aead.decrypt = crypto_ccm_decrypt; + +out: + crypto_mod_put(cipher); + return inst; + +err_drop_ctr: + crypto_drop_skcipher(&ictx->ctr); +err_drop_cipher: + crypto_drop_spawn(&ictx->cipher); +err_free_inst: + kfree(inst); +out_put_cipher: + inst = ERR_PTR(err); + goto out; +} + +static struct crypto_instance *crypto_ccm_alloc(struct rtattr **tb) +{ + int err; + const char *cipher_name; + char ctr_name[CRYPTO_MAX_ALG_NAME]; + char full_name[CRYPTO_MAX_ALG_NAME]; + + cipher_name = crypto_attr_alg_name(tb[1]); + err = PTR_ERR(cipher_name); + if (IS_ERR(cipher_name)) + return ERR_PTR(err); + + if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)", + cipher_name) >= CRYPTO_MAX_ALG_NAME) + return ERR_PTR(-ENAMETOOLONG); + + if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm(%s)", cipher_name) >= + CRYPTO_MAX_ALG_NAME) + return ERR_PTR(-ENAMETOOLONG); + + return crypto_ccm_alloc_common(tb, full_name, ctr_name, cipher_name); +} + +static void crypto_ccm_free(struct crypto_instance *inst) +{ + struct ccm_instance_ctx *ctx = crypto_instance_ctx(inst); + + crypto_drop_spawn(&ctx->cipher); + crypto_drop_skcipher(&ctx->ctr); + kfree(inst); +} + +static struct crypto_template crypto_ccm_tmpl = { + .name = "ccm", + .alloc = crypto_ccm_alloc, + .free = crypto_ccm_free, + .module = THIS_MODULE, +}; + +static struct crypto_instance *crypto_ccm_base_alloc(struct rtattr **tb) +{ + int err; + const char *ctr_name; + const char *cipher_name; + char full_name[CRYPTO_MAX_ALG_NAME]; + + ctr_name = crypto_attr_alg_name(tb[1]); + err = PTR_ERR(ctr_name); + if (IS_ERR(ctr_name)) + return ERR_PTR(err); + + cipher_name = crypto_attr_alg_name(tb[2]); + err = PTR_ERR(cipher_name); + if (IS_ERR(cipher_name)) + return ERR_PTR(err); + + if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm_base(%s,%s)", + ctr_name, cipher_name) >= CRYPTO_MAX_ALG_NAME) + return ERR_PTR(-ENAMETOOLONG); + + return crypto_ccm_alloc_common(tb, full_name, ctr_name, cipher_name); +} + +static struct crypto_template crypto_ccm_base_tmpl = { + .name = "ccm_base", + .alloc = crypto_ccm_base_alloc, + .free = crypto_ccm_free, + .module = THIS_MODULE, +}; + +static int crypto_rfc4309_setkey(struct crypto_aead *parent, const u8 *key, + unsigned int keylen) +{ + struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent); + struct crypto_aead *child = ctx->child; + int err; + + if (keylen < 3) + return -EINVAL; + + keylen -= 3; + memcpy(ctx->nonce, key + keylen, 3); + + crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK); + crypto_aead_set_flags(child, crypto_aead_get_flags(parent) & + CRYPTO_TFM_REQ_MASK); + err = crypto_aead_setkey(child, key, keylen); + crypto_aead_set_flags(parent, crypto_aead_get_flags(child) & + CRYPTO_TFM_RES_MASK); + + return err; +} + +static int crypto_rfc4309_setauthsize(struct crypto_aead *parent, + unsigned int authsize) +{ + struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent); + + switch (authsize) { + case 8: + case 12: + case 16: + break; + default: + return -EINVAL; + } + + return crypto_aead_setauthsize(ctx->child, authsize); +} + +static struct aead_request *crypto_rfc4309_crypt(struct aead_request *req) +{ + struct aead_request *subreq = aead_request_ctx(req); + struct crypto_aead *aead = crypto_aead_reqtfm(req); + struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(aead); + struct crypto_aead *child = ctx->child; + u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child), + crypto_aead_alignmask(child) + 1); + + /* L' */ + iv[0] = 3; + + memcpy(iv + 1, ctx->nonce, 3); + memcpy(iv + 4, req->iv, 8); + + aead_request_set_tfm(subreq, child); + aead_request_set_callback(subreq, req->base.flags, req->base.complete, + req->base.data); + aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, iv); + aead_request_set_assoc(subreq, req->assoc, req->assoclen); + + return subreq; +} + +static int crypto_rfc4309_encrypt(struct aead_request *req) +{ + req = crypto_rfc4309_crypt(req); + + return crypto_aead_encrypt(req); +} + +static int crypto_rfc4309_decrypt(struct aead_request *req) +{ + req = crypto_rfc4309_crypt(req); + + return crypto_aead_decrypt(req); +} + +static int crypto_rfc4309_init_tfm(struct crypto_tfm *tfm) +{ + struct crypto_instance *inst = (void *)tfm->__crt_alg; + struct crypto_aead_spawn *spawn = crypto_instance_ctx(inst); + struct crypto_rfc4309_ctx *ctx = crypto_tfm_ctx(tfm); + struct crypto_aead *aead; + unsigned long align; + + aead = crypto_spawn_aead(spawn); + if (IS_ERR(aead)) + return PTR_ERR(aead); + + ctx->child = aead; + + align = crypto_aead_alignmask(aead); + align &= ~(crypto_tfm_ctx_alignment() - 1); + tfm->crt_aead.reqsize = sizeof(struct aead_request) + + ALIGN(crypto_aead_reqsize(aead), + crypto_tfm_ctx_alignment()) + + align + 16; + + return 0; +} + +static void crypto_rfc4309_exit_tfm(struct crypto_tfm *tfm) +{ + struct crypto_rfc4309_ctx *ctx = crypto_tfm_ctx(tfm); + + crypto_free_aead(ctx->child); +} + +static struct crypto_instance *crypto_rfc4309_alloc(struct rtattr **tb) +{ + struct crypto_attr_type *algt; + struct crypto_instance *inst; + struct crypto_aead_spawn *spawn; + struct crypto_alg *alg; + const char *ccm_name; + int err; + + algt = crypto_get_attr_type(tb); + err = PTR_ERR(algt); + if (IS_ERR(algt)) + return ERR_PTR(err); + + if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) + return ERR_PTR(-EINVAL); + + ccm_name = crypto_attr_alg_name(tb[1]); + err = PTR_ERR(ccm_name); + if (IS_ERR(ccm_name)) + return ERR_PTR(err); + + inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); + if (!inst) + return ERR_PTR(-ENOMEM); + + spawn = crypto_instance_ctx(inst); + crypto_set_aead_spawn(spawn, inst); + err = crypto_grab_aead(spawn, ccm_name, 0, + crypto_requires_sync(algt->type, algt->mask)); + if (err) + goto out_free_inst; + + alg = crypto_aead_spawn_alg(spawn); + + err = -EINVAL; + + /* We only support 16-byte blocks. */ + if (alg->cra_aead.ivsize != 16) + goto out_drop_alg; + + /* Not a stream cipher? */ + if (alg->cra_blocksize != 1) + goto out_drop_alg; + + err = -ENAMETOOLONG; + if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, + "rfc4309(%s)", alg->cra_name) >= CRYPTO_MAX_ALG_NAME || + snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, + "rfc4309(%s)", alg->cra_driver_name) >= + CRYPTO_MAX_ALG_NAME) + goto out_drop_alg; + + inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD; + inst->alg.cra_flags |= alg->cra_flags & CRYPTO_ALG_ASYNC; + inst->alg.cra_priority = alg->cra_priority; + inst->alg.cra_blocksize = 1; + inst->alg.cra_alignmask = alg->cra_alignmask; + inst->alg.cra_type = &crypto_nivaead_type; + + inst->alg.cra_aead.ivsize = 8; + inst->alg.cra_aead.maxauthsize = 16; + + inst->alg.cra_ctxsize = sizeof(struct crypto_rfc4309_ctx); + + inst->alg.cra_init = crypto_rfc4309_init_tfm; + inst->alg.cra_exit = crypto_rfc4309_exit_tfm; + + inst->alg.cra_aead.setkey = crypto_rfc4309_setkey; + inst->alg.cra_aead.setauthsize = crypto_rfc4309_setauthsize; + inst->alg.cra_aead.encrypt = crypto_rfc4309_encrypt; + inst->alg.cra_aead.decrypt = crypto_rfc4309_decrypt; + + inst->alg.cra_aead.geniv = "seqiv"; + +out: + return inst; + +out_drop_alg: + crypto_drop_aead(spawn); +out_free_inst: + kfree(inst); + inst = ERR_PTR(err); + goto out; +} + +static void crypto_rfc4309_free(struct crypto_instance *inst) +{ + crypto_drop_spawn(crypto_instance_ctx(inst)); + kfree(inst); +} + +static struct crypto_template crypto_rfc4309_tmpl = { + .name = "rfc4309", + .alloc = crypto_rfc4309_alloc, + .free = crypto_rfc4309_free, + .module = THIS_MODULE, +}; + +static int __init crypto_ccm_module_init(void) +{ + int err; + + err = crypto_register_template(&crypto_ccm_base_tmpl); + if (err) + goto out; + + err = crypto_register_template(&crypto_ccm_tmpl); + if (err) + goto out_undo_base; + + err = crypto_register_template(&crypto_rfc4309_tmpl); + if (err) + goto out_undo_ccm; + +out: + return err; + +out_undo_ccm: + crypto_unregister_template(&crypto_ccm_tmpl); +out_undo_base: + crypto_unregister_template(&crypto_ccm_base_tmpl); + goto out; +} + +static void __exit crypto_ccm_module_exit(void) +{ + crypto_unregister_template(&crypto_rfc4309_tmpl); + crypto_unregister_template(&crypto_ccm_tmpl); + crypto_unregister_template(&crypto_ccm_base_tmpl); +} + +module_init(crypto_ccm_module_init); +module_exit(crypto_ccm_module_exit); + +MODULE_LICENSE("GPL"); +MODULE_DESCRIPTION("Counter with CBC MAC"); +MODULE_ALIAS("ccm_base"); +MODULE_ALIAS("rfc4309"); -- cgit v1.2.3 From 06a1e45934bca7e75d3fa40ee0bea55c568635ca Mon Sep 17 00:00:00 2001 From: Herbert Xu Date: Fri, 14 Dec 2007 10:19:27 +0800 Subject: [CRYPTO] seqiv: Add select AEAD in Kconfig Now that seqiv supports AEAD algorithms it needs to select the AEAD option. Thanks to Erez Zadok for pointing out the problem. Signed-off-by: Herbert Xu --- crypto/Kconfig | 1 + 1 file changed, 1 insertion(+) (limited to 'crypto/Kconfig') diff --git a/crypto/Kconfig b/crypto/Kconfig index 0d89f774..6c086eed 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -34,6 +34,7 @@ config CRYPTO_BLKCIPHER config CRYPTO_SEQIV tristate "Sequence Number IV Generator" + select CRYPTO_AEAD select CRYPTO_BLKCIPHER help This IV generator generates an IV based on a sequence number by -- cgit v1.2.3 From 52162760ba024a96ee814cd2b5872820e9a36ef6 Mon Sep 17 00:00:00 2001 From: Tan Swee Heng Date: Mon, 10 Dec 2007 15:52:56 +0800 Subject: [CRYPTO] salsa20_i586: Salsa20 stream cipher algorithm (i586 version) This patch contains the salsa20-i586 implementation. The original assembly code came from . I have reformatted it (added indents) so that it matches the other algorithms in arch/x86/crypto. Signed-off-by: Tan Swee Heng Signed-off-by: Herbert Xu --- crypto/Kconfig | 15 +++++++++++++++ 1 file changed, 15 insertions(+) (limited to 'crypto/Kconfig') diff --git a/crypto/Kconfig b/crypto/Kconfig index 6c086eed..221356b4 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -489,6 +489,21 @@ config CRYPTO_SALSA20 The Salsa20 stream cipher algorithm is designed by Daniel J. Bernstein . See +config CRYPTO_SALSA20_586 + tristate "Salsa20 stream cipher algorithm (i586) (EXPERIMENTAL)" + depends on (X86 || UML_X86) && !64BIT + depends on EXPERIMENTAL + select CRYPTO_BLKCIPHER + select CRYPTO_SALSA20 + help + Salsa20 stream cipher algorithm. + + Salsa20 is a stream cipher submitted to eSTREAM, the ECRYPT + Stream Cipher Project. See + + The Salsa20 stream cipher algorithm is designed by Daniel J. + Bernstein . See + config CRYPTO_DEFLATE tristate "Deflate compression algorithm" select CRYPTO_ALGAPI -- cgit v1.2.3 From e7a7911b0b0613ad796c4ee8e6940a3a49269774 Mon Sep 17 00:00:00 2001 From: Tan Swee Heng Date: Tue, 18 Dec 2007 00:04:40 +0800 Subject: [CRYPTO] salsa20: Add x86-64 assembly version This is the x86-64 version of the Salsa20 stream cipher algorithm. The original assembly code came from . It has been reformatted for clarity. Signed-off-by: Tan Swee Heng Signed-off-by: Herbert Xu --- crypto/Kconfig | 15 +++++++++++++++ 1 file changed, 15 insertions(+) (limited to 'crypto/Kconfig') diff --git a/crypto/Kconfig b/crypto/Kconfig index 221356b4..b0481f7b 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -504,6 +504,21 @@ config CRYPTO_SALSA20_586 The Salsa20 stream cipher algorithm is designed by Daniel J. Bernstein . See +config CRYPTO_SALSA20_X86_64 + tristate "Salsa20 stream cipher algorithm (x86_64) (EXPERIMENTAL)" + depends on (X86 || UML_X86) && 64BIT + depends on EXPERIMENTAL + select CRYPTO_BLKCIPHER + select CRYPTO_SALSA20 + help + Salsa20 stream cipher algorithm. + + Salsa20 is a stream cipher submitted to eSTREAM, the ECRYPT + Stream Cipher Project. See + + The Salsa20 stream cipher algorithm is designed by Daniel J. + Bernstein . See + config CRYPTO_DEFLATE tristate "Deflate compression algorithm" select CRYPTO_ALGAPI -- cgit v1.2.3 From 17ccfe3fdcd11e6adad9d66025c43489cfce5016 Mon Sep 17 00:00:00 2001 From: Sebastian Siewior Date: Tue, 18 Dec 2007 00:08:27 +0800 Subject: [CRYPTO] tcrypt: Add select of AEAD ERROR: "crypto_aead_setauthsize" [crypto/tcrypt.ko] undefined! ERROR: "crypto_alloc_aead" [crypto/tcrypt.ko] undefined! Signed-off-by: Sebastian Siewior Signed-off-by: Herbert Xu --- crypto/Kconfig | 1 + 1 file changed, 1 insertion(+) (limited to 'crypto/Kconfig') diff --git a/crypto/Kconfig b/crypto/Kconfig index b0481f7b..93ad53a8 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -568,6 +568,7 @@ config CRYPTO_TEST tristate "Testing module" depends on m select CRYPTO_ALGAPI + select CRYPTO_AEAD help Quick & dirty crypto test module. -- cgit v1.2.3 From 72e039ea1455392313ff6b1ef5b63a011b66c65b Mon Sep 17 00:00:00 2001 From: Tan Swee Heng Date: Tue, 18 Dec 2007 22:45:35 +0800 Subject: [CRYPTO] salsa20-asm: Remove unnecessary dependency on CRYPTO_SALSA20 Signed-off-by: Tan Swee Heng Signed-off-by: Herbert Xu --- crypto/Kconfig | 2 -- 1 file changed, 2 deletions(-) (limited to 'crypto/Kconfig') diff --git a/crypto/Kconfig b/crypto/Kconfig index 93ad53a8..c3166a1a 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -494,7 +494,6 @@ config CRYPTO_SALSA20_586 depends on (X86 || UML_X86) && !64BIT depends on EXPERIMENTAL select CRYPTO_BLKCIPHER - select CRYPTO_SALSA20 help Salsa20 stream cipher algorithm. @@ -509,7 +508,6 @@ config CRYPTO_SALSA20_X86_64 depends on (X86 || UML_X86) && 64BIT depends on EXPERIMENTAL select CRYPTO_BLKCIPHER - select CRYPTO_SALSA20 help Salsa20 stream cipher algorithm. -- cgit v1.2.3