The patch below does not apply to the 6.1-stable tree. If someone wants it applied there, or to any other stable or longterm tree, then please email the backport, including the original git commit id to stable@vger.kernel.org.
To reproduce the conflict and resubmit, you may use the following commands:
git fetch https://git.kernel.org/pub/scm/linux/kernel/git/stable/linux.git/ linux-6.1.y git checkout FETCH_HEAD git cherry-pick -x 3d9eb180fbe8828cce43bce4c370124685b205c3 # <resolve conflicts, build, test, etc.> git commit -s git send-email --to 'stable@vger.kernel.org' --in-reply-to '2025082115-defensive-plasma-31fd@gregkh' --subject-prefix 'PATCH 6.1.y' HEAD^..
Possible dependencies:
thanks,
greg k-h
------------------ original commit in Linus's tree ------------------
From 3d9eb180fbe8828cce43bce4c370124685b205c3 Mon Sep 17 00:00:00 2001 From: Eric Biggers ebiggers@kernel.org Date: Tue, 8 Jul 2025 12:38:29 -0700 Subject: [PATCH] crypto: x86/aegis - Add missing error checks
The skcipher_walk functions can allocate memory and can fail, so checking for errors is necessary.
Fixes: 1d373d4e8e15 ("crypto: x86 - Add optimized AEGIS implementations") Cc: stable@vger.kernel.org Signed-off-by: Eric Biggers ebiggers@kernel.org Signed-off-by: Herbert Xu herbert@gondor.apana.org.au
diff --git a/arch/x86/crypto/aegis128-aesni-glue.c b/arch/x86/crypto/aegis128-aesni-glue.c index 3cb5c193038b..f1adfba1a76e 100644 --- a/arch/x86/crypto/aegis128-aesni-glue.c +++ b/arch/x86/crypto/aegis128-aesni-glue.c @@ -104,10 +104,12 @@ static void crypto_aegis128_aesni_process_ad( } }
-static __always_inline void +static __always_inline int crypto_aegis128_aesni_process_crypt(struct aegis_state *state, struct skcipher_walk *walk, bool enc) { + int err = 0; + while (walk->nbytes >= AEGIS128_BLOCK_SIZE) { if (enc) aegis128_aesni_enc(state, walk->src.virt.addr, @@ -120,7 +122,8 @@ crypto_aegis128_aesni_process_crypt(struct aegis_state *state, round_down(walk->nbytes, AEGIS128_BLOCK_SIZE)); kernel_fpu_end(); - skcipher_walk_done(walk, walk->nbytes % AEGIS128_BLOCK_SIZE); + err = skcipher_walk_done(walk, + walk->nbytes % AEGIS128_BLOCK_SIZE); kernel_fpu_begin(); }
@@ -134,9 +137,10 @@ crypto_aegis128_aesni_process_crypt(struct aegis_state *state, walk->dst.virt.addr, walk->nbytes); kernel_fpu_end(); - skcipher_walk_done(walk, 0); + err = skcipher_walk_done(walk, 0); kernel_fpu_begin(); } + return err; }
static struct aegis_ctx *crypto_aegis128_aesni_ctx(struct crypto_aead *aead) @@ -169,7 +173,7 @@ static int crypto_aegis128_aesni_setauthsize(struct crypto_aead *tfm, return 0; }
-static __always_inline void +static __always_inline int crypto_aegis128_aesni_crypt(struct aead_request *req, struct aegis_block *tag_xor, unsigned int cryptlen, bool enc) @@ -178,20 +182,24 @@ crypto_aegis128_aesni_crypt(struct aead_request *req, struct aegis_ctx *ctx = crypto_aegis128_aesni_ctx(tfm); struct skcipher_walk walk; struct aegis_state state; + int err;
if (enc) - skcipher_walk_aead_encrypt(&walk, req, false); + err = skcipher_walk_aead_encrypt(&walk, req, false); else - skcipher_walk_aead_decrypt(&walk, req, false); + err = skcipher_walk_aead_decrypt(&walk, req, false); + if (err) + return err;
kernel_fpu_begin();
aegis128_aesni_init(&state, &ctx->key, req->iv); crypto_aegis128_aesni_process_ad(&state, req->src, req->assoclen); - crypto_aegis128_aesni_process_crypt(&state, &walk, enc); - aegis128_aesni_final(&state, tag_xor, req->assoclen, cryptlen); - + err = crypto_aegis128_aesni_process_crypt(&state, &walk, enc); + if (err == 0) + aegis128_aesni_final(&state, tag_xor, req->assoclen, cryptlen); kernel_fpu_end(); + return err; }
static int crypto_aegis128_aesni_encrypt(struct aead_request *req) @@ -200,8 +208,11 @@ static int crypto_aegis128_aesni_encrypt(struct aead_request *req) struct aegis_block tag = {}; unsigned int authsize = crypto_aead_authsize(tfm); unsigned int cryptlen = req->cryptlen; + int err;
- crypto_aegis128_aesni_crypt(req, &tag, cryptlen, true); + err = crypto_aegis128_aesni_crypt(req, &tag, cryptlen, true); + if (err) + return err;
scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen, authsize, 1); @@ -216,11 +227,14 @@ static int crypto_aegis128_aesni_decrypt(struct aead_request *req) struct aegis_block tag; unsigned int authsize = crypto_aead_authsize(tfm); unsigned int cryptlen = req->cryptlen - authsize; + int err;
scatterwalk_map_and_copy(tag.bytes, req->src, req->assoclen + cryptlen, authsize, 0);
- crypto_aegis128_aesni_crypt(req, &tag, cryptlen, false); + err = crypto_aegis128_aesni_crypt(req, &tag, cryptlen, false); + if (err) + return err;
return crypto_memneq(tag.bytes, zeros.bytes, authsize) ? -EBADMSG : 0; }
linux-stable-mirror@lists.linaro.org