summaryrefslogtreecommitdiff
path: root/crypto/authenc.c
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2016-07-26 13:40:17 -0700
committerLinus Torvalds <torvalds@linux-foundation.org>2016-07-26 13:40:17 -0700
commitb4b0e3fca7c3b9b88fc9b1ee9c306ef6385084dc (patch)
tree1e0d0d6588042716cd61ded48ced36e8f93cc536 /crypto/authenc.c
parent2fc3750b483ab74709f99072ac6ccc5ea9f0ae13 (diff)
parentbb1788b73b29c3301fd303712edfcb70ad02696f (diff)
downloadlinux-crypto-b4b0e3fca7c3b9b88fc9b1ee9c306ef6385084dc.tar.gz
linux-crypto-b4b0e3fca7c3b9b88fc9b1ee9c306ef6385084dc.zip
Merge branch 'linus' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
Pull crypto updates from Herbert Xu: "Here is the crypto update for 4.8: API: - first part of skcipher low-level conversions - add KPP (Key-agreement Protocol Primitives) interface. Algorithms: - fix IPsec/cryptd reordering issues that affects aesni - RSA no longer does explicit leading zero removal - add SHA3 - add DH - add ECDH - improve DRBG performance by not doing CTR by hand Drivers: - add x86 AVX2 multibuffer SHA256/512 - add POWER8 optimised crc32c - add xts support to vmx - add DH support to qat - add RSA support to caam - add Layerscape support to caam - add SEC1 AEAD support to talitos - improve performance by chaining requests in marvell/cesa - add support for Araneus Alea I USB RNG - add support for Broadcom BCM5301 RNG - add support for Amlogic Meson RNG - add support Broadcom NSP SoC RNG" * 'linus' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (180 commits) crypto: vmx - Fix aes_p8_xts_decrypt build failure crypto: vmx - Ignore generated files crypto: vmx - Adding support for XTS crypto: vmx - Adding asm subroutines for XTS crypto: skcipher - add comment for skcipher_alg->base crypto: testmgr - Print akcipher algorithm name crypto: marvell - Fix wrong flag used for GFP in mv_cesa_dma_add_iv_op crypto: nx - off by one bug in nx_of_update_msc() crypto: rsa-pkcs1pad - fix rsa-pkcs1pad request struct crypto: scatterwalk - Inline start/map/done crypto: scatterwalk - Remove unnecessary BUG in scatterwalk_start crypto: scatterwalk - Remove unnecessary advance in scatterwalk_pagedone crypto: scatterwalk - Fix test in scatterwalk_done crypto: api - Optimise away crypto_yield when hard preemption is on crypto: scatterwalk - add no-copy support to copychunks crypto: scatterwalk - Remove scatterwalk_bytes_sglen crypto: omap - Stop using crypto scatterwalk_bytes_sglen crypto: skcipher - Remove top-level givcipher interface crypto: user - Remove crypto_lookup_skcipher call crypto: cts - Convert to skcipher ...
Diffstat (limited to 'crypto/authenc.c')
-rw-r--r--crypto/authenc.c116
1 files changed, 59 insertions, 57 deletions
diff --git a/crypto/authenc.c b/crypto/authenc.c
index 55a354d5..a7e1ac78 100644
--- a/crypto/authenc.c
+++ b/crypto/authenc.c
@@ -32,8 +32,8 @@ struct authenc_instance_ctx {
struct crypto_authenc_ctx {
struct crypto_ahash *auth;
- struct crypto_ablkcipher *enc;
- struct crypto_blkcipher *null;
+ struct crypto_skcipher *enc;
+ struct crypto_skcipher *null;
};
struct authenc_request_ctx {
@@ -83,7 +83,7 @@ static int crypto_authenc_setkey(struct crypto_aead *authenc, const u8 *key,
{
struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
struct crypto_ahash *auth = ctx->auth;
- struct crypto_ablkcipher *enc = ctx->enc;
+ struct crypto_skcipher *enc = ctx->enc;
struct crypto_authenc_keys keys;
int err = -EINVAL;
@@ -100,11 +100,11 @@ static int crypto_authenc_setkey(struct crypto_aead *authenc, const u8 *key,
if (err)
goto out;
- crypto_ablkcipher_clear_flags(enc, CRYPTO_TFM_REQ_MASK);
- crypto_ablkcipher_set_flags(enc, crypto_aead_get_flags(authenc) &
- CRYPTO_TFM_REQ_MASK);
- err = crypto_ablkcipher_setkey(enc, keys.enckey, keys.enckeylen);
- crypto_aead_set_flags(authenc, crypto_ablkcipher_get_flags(enc) &
+ crypto_skcipher_clear_flags(enc, CRYPTO_TFM_REQ_MASK);
+ crypto_skcipher_set_flags(enc, crypto_aead_get_flags(authenc) &
+ CRYPTO_TFM_REQ_MASK);
+ err = crypto_skcipher_setkey(enc, keys.enckey, keys.enckeylen);
+ crypto_aead_set_flags(authenc, crypto_skcipher_get_flags(enc) &
CRYPTO_TFM_RES_MASK);
out:
@@ -184,12 +184,15 @@ static int crypto_authenc_copy_assoc(struct aead_request *req)
{
struct crypto_aead *authenc = crypto_aead_reqtfm(req);
struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
- struct blkcipher_desc desc = {
- .tfm = ctx->null,
- };
+ SKCIPHER_REQUEST_ON_STACK(skreq, ctx->null);
- return crypto_blkcipher_encrypt(&desc, req->dst, req->src,
- req->assoclen);
+ skcipher_request_set_tfm(skreq, ctx->null);
+ skcipher_request_set_callback(skreq, aead_request_flags(req),
+ NULL, NULL);
+ skcipher_request_set_crypt(skreq, req->src, req->dst, req->assoclen,
+ NULL);
+
+ return crypto_skcipher_encrypt(skreq);
}
static int crypto_authenc_encrypt(struct aead_request *req)
@@ -199,14 +202,13 @@ static int crypto_authenc_encrypt(struct aead_request *req)
struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
struct authenc_instance_ctx *ictx = aead_instance_ctx(inst);
struct authenc_request_ctx *areq_ctx = aead_request_ctx(req);
- struct crypto_ablkcipher *enc = ctx->enc;
+ struct crypto_skcipher *enc = ctx->enc;
unsigned int cryptlen = req->cryptlen;
- struct ablkcipher_request *abreq = (void *)(areq_ctx->tail +
- ictx->reqoff);
+ struct skcipher_request *skreq = (void *)(areq_ctx->tail +
+ ictx->reqoff);
struct scatterlist *src, *dst;
int err;
- sg_init_table(areq_ctx->src, 2);
src = scatterwalk_ffwd(areq_ctx->src, req->src, req->assoclen);
dst = src;
@@ -215,16 +217,15 @@ static int crypto_authenc_encrypt(struct aead_request *req)
if (err)
return err;
- sg_init_table(areq_ctx->dst, 2);
dst = scatterwalk_ffwd(areq_ctx->dst, req->dst, req->assoclen);
}
- ablkcipher_request_set_tfm(abreq, enc);
- ablkcipher_request_set_callback(abreq, aead_request_flags(req),
- crypto_authenc_encrypt_done, req);
- ablkcipher_request_set_crypt(abreq, src, dst, cryptlen, req->iv);
+ skcipher_request_set_tfm(skreq, enc);
+ skcipher_request_set_callback(skreq, aead_request_flags(req),
+ crypto_authenc_encrypt_done, req);
+ skcipher_request_set_crypt(skreq, src, dst, cryptlen, req->iv);
- err = crypto_ablkcipher_encrypt(abreq);
+ err = crypto_skcipher_encrypt(skreq);
if (err)
return err;
@@ -240,8 +241,8 @@ static int crypto_authenc_decrypt_tail(struct aead_request *req,
struct authenc_instance_ctx *ictx = aead_instance_ctx(inst);
struct authenc_request_ctx *areq_ctx = aead_request_ctx(req);
struct ahash_request *ahreq = (void *)(areq_ctx->tail + ictx->reqoff);
- struct ablkcipher_request *abreq = (void *)(areq_ctx->tail +
- ictx->reqoff);
+ struct skcipher_request *skreq = (void *)(areq_ctx->tail +
+ ictx->reqoff);
unsigned int authsize = crypto_aead_authsize(authenc);
u8 *ihash = ahreq->result + authsize;
struct scatterlist *src, *dst;
@@ -251,22 +252,19 @@ static int crypto_authenc_decrypt_tail(struct aead_request *req,
if (crypto_memneq(ihash, ahreq->result, authsize))
return -EBADMSG;
- sg_init_table(areq_ctx->src, 2);
src = scatterwalk_ffwd(areq_ctx->src, req->src, req->assoclen);
dst = src;
- if (req->src != req->dst) {
- sg_init_table(areq_ctx->dst, 2);
+ if (req->src != req->dst)
dst = scatterwalk_ffwd(areq_ctx->dst, req->dst, req->assoclen);
- }
- ablkcipher_request_set_tfm(abreq, ctx->enc);
- ablkcipher_request_set_callback(abreq, aead_request_flags(req),
- req->base.complete, req->base.data);
- ablkcipher_request_set_crypt(abreq, src, dst,
- req->cryptlen - authsize, req->iv);
+ skcipher_request_set_tfm(skreq, ctx->enc);
+ skcipher_request_set_callback(skreq, aead_request_flags(req),
+ req->base.complete, req->base.data);
+ skcipher_request_set_crypt(skreq, src, dst,
+ req->cryptlen - authsize, req->iv);
- return crypto_ablkcipher_decrypt(abreq);
+ return crypto_skcipher_decrypt(skreq);
}
static void authenc_verify_ahash_done(struct crypto_async_request *areq,
@@ -318,20 +316,20 @@ static int crypto_authenc_init_tfm(struct crypto_aead *tfm)
struct authenc_instance_ctx *ictx = aead_instance_ctx(inst);
struct crypto_authenc_ctx *ctx = crypto_aead_ctx(tfm);
struct crypto_ahash *auth;
- struct crypto_ablkcipher *enc;
- struct crypto_blkcipher *null;
+ struct crypto_skcipher *enc;
+ struct crypto_skcipher *null;
int err;
auth = crypto_spawn_ahash(&ictx->auth);
if (IS_ERR(auth))
return PTR_ERR(auth);
- enc = crypto_spawn_skcipher(&ictx->enc);
+ enc = crypto_spawn_skcipher2(&ictx->enc);
err = PTR_ERR(enc);
if (IS_ERR(enc))
goto err_free_ahash;
- null = crypto_get_default_null_skcipher();
+ null = crypto_get_default_null_skcipher2();
err = PTR_ERR(null);
if (IS_ERR(null))
goto err_free_skcipher;
@@ -347,13 +345,13 @@ static int crypto_authenc_init_tfm(struct crypto_aead *tfm)
max_t(unsigned int,
crypto_ahash_reqsize(auth) +
sizeof(struct ahash_request),
- sizeof(struct ablkcipher_request) +
- crypto_ablkcipher_reqsize(enc)));
+ sizeof(struct skcipher_request) +
+ crypto_skcipher_reqsize(enc)));
return 0;
err_free_skcipher:
- crypto_free_ablkcipher(enc);
+ crypto_free_skcipher(enc);
err_free_ahash:
crypto_free_ahash(auth);
return err;
@@ -364,8 +362,8 @@ static void crypto_authenc_exit_tfm(struct crypto_aead *tfm)
struct crypto_authenc_ctx *ctx = crypto_aead_ctx(tfm);
crypto_free_ahash(ctx->auth);
- crypto_free_ablkcipher(ctx->enc);
- crypto_put_default_null_skcipher();
+ crypto_free_skcipher(ctx->enc);
+ crypto_put_default_null_skcipher2();
}
static void crypto_authenc_free(struct aead_instance *inst)
@@ -384,7 +382,7 @@ static int crypto_authenc_create(struct crypto_template *tmpl,
struct aead_instance *inst;
struct hash_alg_common *auth;
struct crypto_alg *auth_base;
- struct crypto_alg *enc;
+ struct skcipher_alg *enc;
struct authenc_instance_ctx *ctx;
const char *enc_name;
int err;
@@ -397,7 +395,8 @@ static int crypto_authenc_create(struct crypto_template *tmpl,
return -EINVAL;
auth = ahash_attr_alg(tb[1], CRYPTO_ALG_TYPE_HASH,
- CRYPTO_ALG_TYPE_AHASH_MASK);
+ CRYPTO_ALG_TYPE_AHASH_MASK |
+ crypto_requires_sync(algt->type, algt->mask));
if (IS_ERR(auth))
return PTR_ERR(auth);
@@ -421,37 +420,40 @@ static int crypto_authenc_create(struct crypto_template *tmpl,
goto err_free_inst;
crypto_set_skcipher_spawn(&ctx->enc, aead_crypto_instance(inst));
- err = crypto_grab_skcipher(&ctx->enc, enc_name, 0,
- crypto_requires_sync(algt->type,
- algt->mask));
+ err = crypto_grab_skcipher2(&ctx->enc, enc_name, 0,
+ crypto_requires_sync(algt->type,
+ algt->mask));
if (err)
goto err_drop_auth;
- enc = crypto_skcipher_spawn_alg(&ctx->enc);
+ enc = crypto_spawn_skcipher_alg(&ctx->enc);
ctx->reqoff = ALIGN(2 * auth->digestsize + auth_base->cra_alignmask,
auth_base->cra_alignmask + 1);
err = -ENAMETOOLONG;
if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
- "authenc(%s,%s)", auth_base->cra_name, enc->cra_name) >=
+ "authenc(%s,%s)", auth_base->cra_name,
+ enc->base.cra_name) >=
CRYPTO_MAX_ALG_NAME)
goto err_drop_enc;
if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
"authenc(%s,%s)", auth_base->cra_driver_name,
- enc->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
+ enc->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
goto err_drop_enc;
- inst->alg.base.cra_flags = enc->cra_flags & CRYPTO_ALG_ASYNC;
- inst->alg.base.cra_priority = enc->cra_priority * 10 +
+ inst->alg.base.cra_flags = (auth_base->cra_flags |
+ enc->base.cra_flags) & CRYPTO_ALG_ASYNC;
+ inst->alg.base.cra_priority = enc->base.cra_priority * 10 +
auth_base->cra_priority;
- inst->alg.base.cra_blocksize = enc->cra_blocksize;
+ inst->alg.base.cra_blocksize = enc->base.cra_blocksize;
inst->alg.base.cra_alignmask = auth_base->cra_alignmask |
- enc->cra_alignmask;
+ enc->base.cra_alignmask;
inst->alg.base.cra_ctxsize = sizeof(struct crypto_authenc_ctx);
- inst->alg.ivsize = enc->cra_ablkcipher.ivsize;
+ inst->alg.ivsize = crypto_skcipher_alg_ivsize(enc);
+ inst->alg.chunksize = crypto_skcipher_alg_chunksize(enc);
inst->alg.maxauthsize = auth->digestsize;
inst->alg.init = crypto_authenc_init_tfm;