From d23089c20adddd2d3ba409198719493695d56096 Mon Sep 17 00:00:00 2001 From: Eric Biggers Date: Thu, 31 Jan 2019 23:51:36 -0800 Subject: crypto: aegis - fix handling chunked inputs The generic AEGIS implementations all fail the improved AEAD tests because they produce the wrong result with some data layouts. The issue is that they assume that if the skcipher_walk API gives 'nbytes' not aligned to the walksize (a.k.a. walk.stride), then it is the end of the data. In fact, this can happen before the end. Fix them. Fixes: cc695b2e62a6 ("crypto: aegis - Add generic AEGIS AEAD implementations") Cc: # v4.18+ Cc: Ondrej Mosnacek Signed-off-by: Eric Biggers Reviewed-by: Ondrej Mosnacek Signed-off-by: Herbert Xu --- crypto/aegis128.c | 14 +++++++------- crypto/aegis128l.c | 14 +++++++------- crypto/aegis256.c | 14 +++++++------- 3 files changed, 21 insertions(+), 21 deletions(-) (limited to 'crypto') diff --git a/crypto/aegis128.c b/crypto/aegis128.c index 96e078a8..3718a834 100644 --- a/crypto/aegis128.c +++ b/crypto/aegis128.c @@ -286,19 +286,19 @@ static void crypto_aegis128_process_crypt(struct aegis_state *state, const struct aegis128_ops *ops) { struct skcipher_walk walk; - u8 *src, *dst; - unsigned int chunksize; ops->skcipher_walk_init(&walk, req, false); while (walk.nbytes) { - src = walk.src.virt.addr; - dst = walk.dst.virt.addr; - chunksize = walk.nbytes; + unsigned int nbytes = walk.nbytes; - ops->crypt_chunk(state, dst, src, chunksize); + if (nbytes < walk.total) + nbytes = round_down(nbytes, walk.stride); - skcipher_walk_done(&walk, 0); + ops->crypt_chunk(state, walk.dst.virt.addr, walk.src.virt.addr, + nbytes); + + skcipher_walk_done(&walk, walk.nbytes - nbytes); } } diff --git a/crypto/aegis128l.c b/crypto/aegis128l.c index a210e779..275a8616 100644 --- a/crypto/aegis128l.c +++ b/crypto/aegis128l.c @@ -349,19 +349,19 @@ static void crypto_aegis128l_process_crypt(struct aegis_state *state, const struct aegis128l_ops *ops) { struct skcipher_walk walk; - u8 *src, *dst; - unsigned int chunksize; ops->skcipher_walk_init(&walk, req, false); while (walk.nbytes) { - src = walk.src.virt.addr; - dst = walk.dst.virt.addr; - chunksize = walk.nbytes; + unsigned int nbytes = walk.nbytes; - ops->crypt_chunk(state, dst, src, chunksize); + if (nbytes < walk.total) + nbytes = round_down(nbytes, walk.stride); - skcipher_walk_done(&walk, 0); + ops->crypt_chunk(state, walk.dst.virt.addr, walk.src.virt.addr, + nbytes); + + skcipher_walk_done(&walk, walk.nbytes - nbytes); } } diff --git a/crypto/aegis256.c b/crypto/aegis256.c index 49882a28..ecd6b7f3 100644 --- a/crypto/aegis256.c +++ b/crypto/aegis256.c @@ -299,19 +299,19 @@ static void crypto_aegis256_process_crypt(struct aegis_state *state, const struct aegis256_ops *ops) { struct skcipher_walk walk; - u8 *src, *dst; - unsigned int chunksize; ops->skcipher_walk_init(&walk, req, false); while (walk.nbytes) { - src = walk.src.virt.addr; - dst = walk.dst.virt.addr; - chunksize = walk.nbytes; + unsigned int nbytes = walk.nbytes; - ops->crypt_chunk(state, dst, src, chunksize); + if (nbytes < walk.total) + nbytes = round_down(nbytes, walk.stride); - skcipher_walk_done(&walk, 0); + ops->crypt_chunk(state, walk.dst.virt.addr, walk.src.virt.addr, + nbytes); + + skcipher_walk_done(&walk, walk.nbytes - nbytes); } } -- cgit v1.2.3