4.19-stable review patch. If anyone has any objections, please let me know.
------------------
From: Eric Biggers ebiggers@google.com
commit 0f533e67d26f228ea5dfdacc8a4bdeb487af5208 upstream.
The generic AEGIS implementations all fail the improved AEAD tests because they produce the wrong result with some data layouts. The issue is that they assume that if the skcipher_walk API gives 'nbytes' not aligned to the walksize (a.k.a. walk.stride), then it is the end of the data. In fact, this can happen before the end. Fix them.
Fixes: f606a88e5823 ("crypto: aegis - Add generic AEGIS AEAD implementations") Cc: stable@vger.kernel.org # v4.18+ Cc: Ondrej Mosnacek omosnace@redhat.com Signed-off-by: Eric Biggers ebiggers@google.com Reviewed-by: Ondrej Mosnacek omosnace@redhat.com Signed-off-by: Herbert Xu herbert@gondor.apana.org.au Signed-off-by: Greg Kroah-Hartman gregkh@linuxfoundation.org
--- crypto/aegis128.c | 14 +++++++------- crypto/aegis128l.c | 14 +++++++------- crypto/aegis256.c | 14 +++++++------- 3 files changed, 21 insertions(+), 21 deletions(-)
--- a/crypto/aegis128.c +++ b/crypto/aegis128.c @@ -290,19 +290,19 @@ static void crypto_aegis128_process_cryp const struct aegis128_ops *ops) { struct skcipher_walk walk; - u8 *src, *dst; - unsigned int chunksize;
ops->skcipher_walk_init(&walk, req, false);
while (walk.nbytes) { - src = walk.src.virt.addr; - dst = walk.dst.virt.addr; - chunksize = walk.nbytes; + unsigned int nbytes = walk.nbytes;
- ops->crypt_chunk(state, dst, src, chunksize); + if (nbytes < walk.total) + nbytes = round_down(nbytes, walk.stride);
- skcipher_walk_done(&walk, 0); + ops->crypt_chunk(state, walk.dst.virt.addr, walk.src.virt.addr, + nbytes); + + skcipher_walk_done(&walk, walk.nbytes - nbytes); } }
--- a/crypto/aegis128l.c +++ b/crypto/aegis128l.c @@ -353,19 +353,19 @@ static void crypto_aegis128l_process_cry const struct aegis128l_ops *ops) { struct skcipher_walk walk; - u8 *src, *dst; - unsigned int chunksize;
ops->skcipher_walk_init(&walk, req, false);
while (walk.nbytes) { - src = walk.src.virt.addr; - dst = walk.dst.virt.addr; - chunksize = walk.nbytes; + unsigned int nbytes = walk.nbytes;
- ops->crypt_chunk(state, dst, src, chunksize); + if (nbytes < walk.total) + nbytes = round_down(nbytes, walk.stride);
- skcipher_walk_done(&walk, 0); + ops->crypt_chunk(state, walk.dst.virt.addr, walk.src.virt.addr, + nbytes); + + skcipher_walk_done(&walk, walk.nbytes - nbytes); } }
--- a/crypto/aegis256.c +++ b/crypto/aegis256.c @@ -303,19 +303,19 @@ static void crypto_aegis256_process_cryp const struct aegis256_ops *ops) { struct skcipher_walk walk; - u8 *src, *dst; - unsigned int chunksize;
ops->skcipher_walk_init(&walk, req, false);
while (walk.nbytes) { - src = walk.src.virt.addr; - dst = walk.dst.virt.addr; - chunksize = walk.nbytes; + unsigned int nbytes = walk.nbytes;
- ops->crypt_chunk(state, dst, src, chunksize); + if (nbytes < walk.total) + nbytes = round_down(nbytes, walk.stride);
- skcipher_walk_done(&walk, 0); + ops->crypt_chunk(state, walk.dst.virt.addr, walk.src.virt.addr, + nbytes); + + skcipher_walk_done(&walk, walk.nbytes - nbytes); } }