From 3b9b423ebebe34f349e7da5fff3b0e881f66fb89 Mon Sep 17 00:00:00 2001 From: Tim Chen Date: Wed, 1 May 2013 12:52:51 -0700 Subject: crypto: crct10dif - Simple correctness and speed test for CRCT10DIF hash These are simple tests to do sanity check of CRC T10 DIF hash. The correctness of the transform can be checked with the command modprobe tcrypt mode=47 The speed of the transform can be evaluated with the command modprobe tcrypt mode=320 Set the cpu frequency to constant and turn turbo off when running the speed test so the frequency governor will not tweak the frequency and affects the measurements. Signed-off-by: Tim Chen Signed-off-by: Herbert Xu --- crypto/testmgr.c | 10 ++++++++++ 1 file changed, 10 insertions(+) (limited to 'crypto/testmgr.c') diff --git a/crypto/testmgr.c b/crypto/testmgr.c index 5823735c..f19a392a 100644 --- a/crypto/testmgr.c +++ b/crypto/testmgr.c @@ -1973,6 +1973,16 @@ static const struct alg_test_desc alg_test_descs[] = { .count = CRC32C_TEST_VECTORS } } + }, { + .alg = "crct10dif", + .test = alg_test_hash, + .fips_allowed = 1, + .suite = { + .hash = { + .vecs = crct10dif_tv_template, + .count = CRCT10DIF_TEST_VECTORS + } + } }, { .alg = "cryptd(__driver-cbc-aes-aesni)", .test = alg_test_null, -- cgit v1.2.3 From 23147f4c33c4ea35956992ad516159b9046b300e Mon Sep 17 00:00:00 2001 From: Jussi Kivilinna Date: Sat, 8 Jun 2013 12:17:42 +0300 Subject: Revert "crypto: blowfish - add AVX2/x86_64 implementation of blowfish cipher" This reverts commit 7241dc00ba8f356b890acf6f38d17cf0279d2b2d. Instruction (vpgatherdd) that this implementation relied on turned out to be slow performer on real hardware (i5-4570). The previous 4-way blowfish implementation is therefore faster and this implementation should be removed. Signed-off-by: Jussi Kivilinna Signed-off-by: Herbert Xu --- crypto/Kconfig | 18 ------------------ crypto/testmgr.c | 12 ------------ 2 files changed, 30 deletions(-) (limited to 'crypto/testmgr.c') diff --git a/crypto/Kconfig b/crypto/Kconfig index d1ca6312..4ef0ee71 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -839,24 +839,6 @@ config CRYPTO_BLOWFISH_X86_64 See also: -config CRYPTO_BLOWFISH_AVX2_X86_64 - tristate "Blowfish cipher algorithm (x86_64/AVX2)" - depends on X86 && 64BIT - select CRYPTO_ALGAPI - select CRYPTO_CRYPTD - select CRYPTO_ABLK_HELPER_X86 - select CRYPTO_BLOWFISH_COMMON - select CRYPTO_BLOWFISH_X86_64 - help - Blowfish cipher algorithm (x86_64/AVX2), by Bruce Schneier. - - This is a variable key length cipher which can use keys from 32 - bits to 448 bits in length. It's fast, simple and specifically - designed for use on "large microprocessors". - - See also: - - config CRYPTO_CAMELLIA tristate "Camellia cipher algorithms" depends on CRYPTO diff --git a/crypto/testmgr.c b/crypto/testmgr.c index f19a392a..27f11187 100644 --- a/crypto/testmgr.c +++ b/crypto/testmgr.c @@ -1660,9 +1660,6 @@ static const struct alg_test_desc alg_test_descs[] = { .alg = "__driver-cbc-aes-aesni", .test = alg_test_null, .fips_allowed = 1, - }, { - .alg = "__driver-cbc-blowfish-avx2", - .test = alg_test_null, }, { .alg = "__driver-cbc-camellia-aesni", .test = alg_test_null, @@ -1694,9 +1691,6 @@ static const struct alg_test_desc alg_test_descs[] = { .alg = "__driver-ecb-aes-aesni", .test = alg_test_null, .fips_allowed = 1, - }, { - .alg = "__driver-ecb-blowfish-avx2", - .test = alg_test_null, }, { .alg = "__driver-ecb-camellia-aesni", .test = alg_test_null, @@ -1987,9 +1981,6 @@ static const struct alg_test_desc alg_test_descs[] = { .alg = "cryptd(__driver-cbc-aes-aesni)", .test = alg_test_null, .fips_allowed = 1, - }, { - .alg = "cryptd(__driver-cbc-blowfish-avx2)", - .test = alg_test_null, }, { .alg = "cryptd(__driver-cbc-camellia-aesni)", .test = alg_test_null, @@ -2003,9 +1994,6 @@ static const struct alg_test_desc alg_test_descs[] = { .alg = "cryptd(__driver-ecb-aes-aesni)", .test = alg_test_null, .fips_allowed = 1, - }, { - .alg = "cryptd(__driver-ecb-blowfish-avx2)", - .test = alg_test_null, }, { .alg = "cryptd(__driver-ecb-camellia-aesni)", .test = alg_test_null, -- cgit v1.2.3 From 852090f3e19bf9e23365c88c824dba28b9dc57ff Mon Sep 17 00:00:00 2001 From: Jussi Kivilinna Date: Sat, 8 Jun 2013 12:17:47 +0300 Subject: Revert "crypto: twofish - add AVX2/x86_64 assembler implementation of twofish cipher" This reverts commit b1e33ac036aa0653c3de6d5b7cced9faebcad1df. Instruction (vpgatherdd) that this implementation relied on turned out to be slow performer on real hardware (i5-4570). The previous 8-way twofish/AVX implementation is therefore faster and this implementation should be removed. Converting this implementation to use the same method as in twofish/AVX for table look-ups would give additional ~3% speed up vs twofish/AVX, but would hardly be worth of the added code and binary size. Signed-off-by: Jussi Kivilinna Signed-off-by: Herbert Xu --- crypto/Kconfig | 24 ------------------------ crypto/testmgr.c | 12 ------------ 2 files changed, 36 deletions(-) (limited to 'crypto/testmgr.c') diff --git a/crypto/Kconfig b/crypto/Kconfig index 4ef0ee71..904ffe83 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig @@ -1297,30 +1297,6 @@ config CRYPTO_TWOFISH_AVX_X86_64 See also: -config CRYPTO_TWOFISH_AVX2_X86_64 - tristate "Twofish cipher algorithm (x86_64/AVX2)" - depends on X86 && 64BIT - select CRYPTO_ALGAPI - select CRYPTO_CRYPTD - select CRYPTO_ABLK_HELPER_X86 - select CRYPTO_GLUE_HELPER_X86 - select CRYPTO_TWOFISH_COMMON - select CRYPTO_TWOFISH_X86_64 - select CRYPTO_TWOFISH_X86_64_3WAY - select CRYPTO_TWOFISH_AVX_X86_64 - select CRYPTO_LRW - select CRYPTO_XTS - help - Twofish cipher algorithm (x86_64/AVX2). - - Twofish was submitted as an AES (Advanced Encryption Standard) - candidate cipher by researchers at CounterPane Systems. It is a - 16 round block cipher supporting key sizes of 128, 192, and 256 - bits. - - See also: - - comment "Compression" config CRYPTO_DEFLATE diff --git a/crypto/testmgr.c b/crypto/testmgr.c index 27f11187..b2bc5334 100644 --- a/crypto/testmgr.c +++ b/crypto/testmgr.c @@ -1653,9 +1653,6 @@ static const struct alg_test_desc alg_test_descs[] = { }, { .alg = "__cbc-twofish-avx", .test = alg_test_null, - }, { - .alg = "__cbc-twofish-avx2", - .test = alg_test_null, }, { .alg = "__driver-cbc-aes-aesni", .test = alg_test_null, @@ -1684,9 +1681,6 @@ static const struct alg_test_desc alg_test_descs[] = { }, { .alg = "__driver-cbc-twofish-avx", .test = alg_test_null, - }, { - .alg = "__driver-cbc-twofish-avx2", - .test = alg_test_null, }, { .alg = "__driver-ecb-aes-aesni", .test = alg_test_null, @@ -1715,9 +1709,6 @@ static const struct alg_test_desc alg_test_descs[] = { }, { .alg = "__driver-ecb-twofish-avx", .test = alg_test_null, - }, { - .alg = "__driver-ecb-twofish-avx2", - .test = alg_test_null, }, { .alg = "__ghash-pclmulqdqni", .test = alg_test_null, @@ -2018,9 +2009,6 @@ static const struct alg_test_desc alg_test_descs[] = { }, { .alg = "cryptd(__driver-ecb-twofish-avx)", .test = alg_test_null, - }, { - .alg = "cryptd(__driver-ecb-twofish-avx2)", - .test = alg_test_null, }, { .alg = "cryptd(__driver-gcm-aes-aesni)", .test = alg_test_null, -- cgit v1.2.3 From 3b685ec11c6e93404d1f4dfa3eeb8075eb1544c3 Mon Sep 17 00:00:00 2001 From: Jussi Kivilinna Date: Thu, 13 Jun 2013 17:37:40 +0300 Subject: crypto: testmgr - check that entries in alg_test_descs are in correct order Patch adds check for alg_test_descs list order, so that accidentically misplaced entries are found quicker. Duplicate entries are also checked for. Signed-off-by: Jussi Kivilinna Signed-off-by: Herbert Xu --- crypto/testmgr.c | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) (limited to 'crypto/testmgr.c') diff --git a/crypto/testmgr.c b/crypto/testmgr.c index b2bc5334..a81c154e 100644 --- a/crypto/testmgr.c +++ b/crypto/testmgr.c @@ -3054,6 +3054,35 @@ static const struct alg_test_desc alg_test_descs[] = { } }; +static bool alg_test_descs_checked; + +static void alg_test_descs_check_order(void) +{ + int i; + + /* only check once */ + if (alg_test_descs_checked) + return; + + alg_test_descs_checked = true; + + for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) { + int diff = strcmp(alg_test_descs[i - 1].alg, + alg_test_descs[i].alg); + + if (WARN_ON(diff > 0)) { + pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n", + alg_test_descs[i - 1].alg, + alg_test_descs[i].alg); + } + + if (WARN_ON(diff == 0)) { + pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n", + alg_test_descs[i].alg); + } + } +} + static int alg_find_test(const char *alg) { int start = 0; @@ -3085,6 +3114,8 @@ int alg_test(const char *driver, const char *alg, u32 type, u32 mask) int j; int rc; + alg_test_descs_check_order(); + if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) { char nalg[CRYPTO_MAX_ALG_NAME]; -- cgit v1.2.3 From d9b246db19bb587cc57e348203597a52a9b7dfbf Mon Sep 17 00:00:00 2001 From: Jussi Kivilinna Date: Thu, 13 Jun 2013 17:37:45 +0300 Subject: crypto: testmgr - test skciphers with unaligned buffers This patch adds unaligned buffer tests for blkciphers. The first new test is with one byte offset and the second test checks if cra_alignmask for driver is big enough; for example, for testing a case where cra_alignmask is set to 7, but driver really needs buffers to be aligned to 16 bytes. Signed-off-by: Jussi Kivilinna Signed-off-by: Herbert Xu --- crypto/testmgr.c | 33 +++++++++++++++++++++++++++++---- 1 file changed, 29 insertions(+), 4 deletions(-) (limited to 'crypto/testmgr.c') diff --git a/crypto/testmgr.c b/crypto/testmgr.c index a81c154e..8bd185f0 100644 --- a/crypto/testmgr.c +++ b/crypto/testmgr.c @@ -820,7 +820,7 @@ out_nobuf: static int __test_skcipher(struct crypto_ablkcipher *tfm, int enc, struct cipher_testvec *template, unsigned int tcount, - const bool diff_dst) + const bool diff_dst, const int align_offset) { const char *algo = crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm)); @@ -876,10 +876,12 @@ static int __test_skcipher(struct crypto_ablkcipher *tfm, int enc, j++; ret = -EINVAL; - if (WARN_ON(template[i].ilen > PAGE_SIZE)) + if (WARN_ON(align_offset + template[i].ilen > + PAGE_SIZE)) goto out; data = xbuf[0]; + data += align_offset; memcpy(data, template[i].input, template[i].ilen); crypto_ablkcipher_clear_flags(tfm, ~0); @@ -900,6 +902,7 @@ static int __test_skcipher(struct crypto_ablkcipher *tfm, int enc, sg_init_one(&sg[0], data, template[i].ilen); if (diff_dst) { data = xoutbuf[0]; + data += align_offset; sg_init_one(&sgout[0], data, template[i].ilen); } @@ -941,6 +944,9 @@ static int __test_skcipher(struct crypto_ablkcipher *tfm, int enc, j = 0; for (i = 0; i < tcount; i++) { + /* alignment tests are only done with continuous buffers */ + if (align_offset != 0) + break; if (template[i].iv) memcpy(iv, template[i].iv, MAX_IVLEN); @@ -1075,15 +1081,34 @@ out_nobuf: static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, struct cipher_testvec *template, unsigned int tcount) { + unsigned int alignmask; int ret; /* test 'dst == src' case */ - ret = __test_skcipher(tfm, enc, template, tcount, false); + ret = __test_skcipher(tfm, enc, template, tcount, false, 0); if (ret) return ret; /* test 'dst != src' case */ - return __test_skcipher(tfm, enc, template, tcount, true); + ret = __test_skcipher(tfm, enc, template, tcount, true, 0); + if (ret) + return ret; + + /* test unaligned buffers, check with one byte offset */ + ret = __test_skcipher(tfm, enc, template, tcount, true, 1); + if (ret) + return ret; + + alignmask = crypto_tfm_alg_alignmask(&tfm->base); + if (alignmask) { + /* Check if alignment mask for tfm is correctly set. */ + ret = __test_skcipher(tfm, enc, template, tcount, true, + alignmask + 1); + if (ret) + return ret; + } + + return 0; } static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate, -- cgit v1.2.3 From 62e8649ae9a0b4d96fe25ced1406f48ad97b5fc4 Mon Sep 17 00:00:00 2001 From: Jussi Kivilinna Date: Thu, 13 Jun 2013 17:37:50 +0300 Subject: crypto: testmgr - test AEADs with unaligned buffers This patch adds unaligned buffer tests for AEADs. The first new test is with one byte offset and the second test checks if cra_alignmask for driver is big enough; for example, for testing a case where cra_alignmask is set to 7, but driver really needs buffers to be aligned to 16 bytes. Signed-off-by: Jussi Kivilinna Signed-off-by: Herbert Xu --- crypto/testmgr.c | 37 +++++++++++++++++++++++++++++++------ 1 file changed, 31 insertions(+), 6 deletions(-) (limited to 'crypto/testmgr.c') diff --git a/crypto/testmgr.c b/crypto/testmgr.c index 8bd185f0..f2053861 100644 --- a/crypto/testmgr.c +++ b/crypto/testmgr.c @@ -360,7 +360,7 @@ out_nobuf: static int __test_aead(struct crypto_aead *tfm, int enc, struct aead_testvec *template, unsigned int tcount, - const bool diff_dst) + const bool diff_dst, const int align_offset) { const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)); unsigned int i, j, k, n, temp; @@ -423,15 +423,16 @@ static int __test_aead(struct crypto_aead *tfm, int enc, if (!template[i].np) { j++; - /* some tepmplates have no input data but they will + /* some templates have no input data but they will * touch input */ input = xbuf[0]; + input += align_offset; assoc = axbuf[0]; ret = -EINVAL; - if (WARN_ON(template[i].ilen > PAGE_SIZE || - template[i].alen > PAGE_SIZE)) + if (WARN_ON(align_offset + template[i].ilen > + PAGE_SIZE || template[i].alen > PAGE_SIZE)) goto out; memcpy(input, template[i].input, template[i].ilen); @@ -470,6 +471,7 @@ static int __test_aead(struct crypto_aead *tfm, int enc, if (diff_dst) { output = xoutbuf[0]; + output += align_offset; sg_init_one(&sgout[0], output, template[i].ilen + (enc ? authsize : 0)); @@ -530,6 +532,10 @@ static int __test_aead(struct crypto_aead *tfm, int enc, } for (i = 0, j = 0; i < tcount; i++) { + /* alignment tests are only done with continuous buffers */ + if (align_offset != 0) + break; + if (template[i].np) { j++; @@ -732,15 +738,34 @@ out_noxbuf: static int test_aead(struct crypto_aead *tfm, int enc, struct aead_testvec *template, unsigned int tcount) { + unsigned int alignmask; int ret; /* test 'dst == src' case */ - ret = __test_aead(tfm, enc, template, tcount, false); + ret = __test_aead(tfm, enc, template, tcount, false, 0); if (ret) return ret; /* test 'dst != src' case */ - return __test_aead(tfm, enc, template, tcount, true); + ret = __test_aead(tfm, enc, template, tcount, true, 0); + if (ret) + return ret; + + /* test unaligned buffers, check with one byte offset */ + ret = __test_aead(tfm, enc, template, tcount, true, 1); + if (ret) + return ret; + + alignmask = crypto_tfm_alg_alignmask(&tfm->base); + if (alignmask) { + /* Check if alignment mask for tfm is correctly set. */ + ret = __test_aead(tfm, enc, template, tcount, true, + alignmask + 1); + if (ret) + return ret; + } + + return 0; } static int test_cipher(struct crypto_cipher *tfm, int enc, -- cgit v1.2.3 From dd6d4677801baf77552d5f416ccee22ec48fec59 Mon Sep 17 00:00:00 2001 From: Jussi Kivilinna Date: Thu, 13 Jun 2013 17:37:55 +0300 Subject: crypto: testmgr - test hash implementations with unaligned buffers This patch adds unaligned buffer tests for hashes. The first new test is with one byte offset and the second test checks if cra_alignmask for driver is big enough; for example, for testing a case where cra_alignmask is set to 7, but driver really needs buffers to be aligned to 16 bytes. Signed-off-by: Jussi Kivilinna Signed-off-by: Herbert Xu --- crypto/testmgr.c | 41 +++++++++++++++++++++++++++++++++++++++-- 1 file changed, 39 insertions(+), 2 deletions(-) (limited to 'crypto/testmgr.c') diff --git a/crypto/testmgr.c b/crypto/testmgr.c index f2053861..2f006070 100644 --- a/crypto/testmgr.c +++ b/crypto/testmgr.c @@ -184,8 +184,9 @@ static int do_one_async_hash_op(struct ahash_request *req, return ret; } -static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, - unsigned int tcount, bool use_digest) +static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, + unsigned int tcount, bool use_digest, + const int align_offset) { const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm)); unsigned int i, j, k, temp; @@ -216,10 +217,15 @@ static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, if (template[i].np) continue; + ret = -EINVAL; + if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE)) + goto out; + j++; memset(result, 0, 64); hash_buff = xbuf[0]; + hash_buff += align_offset; memcpy(hash_buff, template[i].plaintext, template[i].psize); sg_init_one(&sg[0], hash_buff, template[i].psize); @@ -281,6 +287,10 @@ static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, j = 0; for (i = 0; i < tcount; i++) { + /* alignment tests are only done with continuous buffers */ + if (align_offset != 0) + break; + if (template[i].np) { j++; memset(result, 0, 64); @@ -358,6 +368,33 @@ out_nobuf: return ret; } +static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, + unsigned int tcount, bool use_digest) +{ + unsigned int alignmask; + int ret; + + ret = __test_hash(tfm, template, tcount, use_digest, 0); + if (ret) + return ret; + + /* test unaligned buffers, check with one byte offset */ + ret = __test_hash(tfm, template, tcount, use_digest, 1); + if (ret) + return ret; + + alignmask = crypto_tfm_alg_alignmask(&tfm->base); + if (alignmask) { + /* Check if alignment mask for tfm is correctly set. */ + ret = __test_hash(tfm, template, tcount, use_digest, + alignmask + 1); + if (ret) + return ret; + } + + return 0; +} + static int __test_aead(struct crypto_aead *tfm, int enc, struct aead_testvec *template, unsigned int tcount, const bool diff_dst, const int align_offset) -- cgit v1.2.3