diff options
| author | Ard Biesheuvel <ardb@kernel.org> | 2025-12-03 17:38:06 +0100 |
|---|---|---|
| committer | Eric Biggers <ebiggers@kernel.org> | 2025-12-09 15:10:21 -0800 |
| commit | 6f7d9481920e1bc06ff21c1e6a84fdea49c6ec3d (patch) | |
| tree | 4823d1eb6c7ace05a73fcac4574bd1192382e4d4 | |
| parent | a9a8b1a383254c9f4ed7fe23b56937f8ad3ad3ab (diff) | |
crypto/arm64: sm4/xts - Merge ksimd scopes to reduce stack bloat
Merge the two ksimd scopes in the implementation of SM4-XTS to prevent
stack bloat in cases where the compiler fails to combine the stack slots
for the kernel mode FP/SIMD buffers.
Signed-off-by: Ard Biesheuvel <ardb@kernel.org>
Tested-by: Arnd Bergmann <arnd@arndb.de>
Link: https://lore.kernel.org/r/20251203163803.157541-6-ardb@kernel.org
Signed-off-by: Eric Biggers <ebiggers@kernel.org>
| -rw-r--r-- | arch/arm64/crypto/sm4-ce-glue.c | 42 |
1 files changed, 20 insertions, 22 deletions
diff --git a/arch/arm64/crypto/sm4-ce-glue.c b/arch/arm64/crypto/sm4-ce-glue.c index 5569cece5a0b..0eeabfa9ef25 100644 --- a/arch/arm64/crypto/sm4-ce-glue.c +++ b/arch/arm64/crypto/sm4-ce-glue.c @@ -346,11 +346,11 @@ static int sm4_xts_crypt(struct skcipher_request *req, bool encrypt) tail = 0; } - while ((nbytes = walk.nbytes) >= SM4_BLOCK_SIZE) { - if (nbytes < walk.total) - nbytes &= ~(SM4_BLOCK_SIZE - 1); + scoped_ksimd() { + while ((nbytes = walk.nbytes) >= SM4_BLOCK_SIZE) { + if (nbytes < walk.total) + nbytes &= ~(SM4_BLOCK_SIZE - 1); - scoped_ksimd() { if (encrypt) sm4_ce_xts_enc(ctx->key1.rkey_enc, walk.dst.virt.addr, walk.src.virt.addr, walk.iv, nbytes, @@ -359,32 +359,30 @@ static int sm4_xts_crypt(struct skcipher_request *req, bool encrypt) sm4_ce_xts_dec(ctx->key1.rkey_dec, walk.dst.virt.addr, walk.src.virt.addr, walk.iv, nbytes, rkey2_enc); - } - rkey2_enc = NULL; + rkey2_enc = NULL; - err = skcipher_walk_done(&walk, walk.nbytes - nbytes); - if (err) - return err; - } + err = skcipher_walk_done(&walk, walk.nbytes - nbytes); + if (err) + return err; + } - if (likely(tail == 0)) - return 0; + if (likely(tail == 0)) + return 0; - /* handle ciphertext stealing */ + /* handle ciphertext stealing */ - dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); - if (req->dst != req->src) - dst = scatterwalk_ffwd(sg_dst, req->dst, subreq.cryptlen); + dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); + if (req->dst != req->src) + dst = scatterwalk_ffwd(sg_dst, req->dst, subreq.cryptlen); - skcipher_request_set_crypt(&subreq, src, dst, SM4_BLOCK_SIZE + tail, - req->iv); + skcipher_request_set_crypt(&subreq, src, dst, + SM4_BLOCK_SIZE + tail, req->iv); - err = skcipher_walk_virt(&walk, &subreq, false); - if (err) - return err; + err = skcipher_walk_virt(&walk, &subreq, false); + if (err) + return err; - scoped_ksimd() { if (encrypt) sm4_ce_xts_enc(ctx->key1.rkey_enc, walk.dst.virt.addr, walk.src.virt.addr, walk.iv, walk.nbytes, |
