summaryrefslogtreecommitdiff
path: root/arch/arm64/crypto
diff options
context:
space:
mode:
authorArd Biesheuvel <ard.biesheuvel@linaro.org>2018-10-08 13:16:59 +0200
committerHerbert Xu <herbert@gondor.apana.org.au>2018-10-12 14:20:45 +0800
commitcc3cc48972371b3c2e94f16b6ac7bdad7fdfc93c (patch)
treea08b46acc09d09c546ff8e226344ee1b5f65f87e /arch/arm64/crypto
parent22a8118d329334833cd30f2ceb36d28e8cae8a4f (diff)
crypto: arm64/aes-blk - ensure XTS mask is always loaded
Commit 2e5d2f33d1db ("crypto: arm64/aes-blk - improve XTS mask handling") optimized away some reloads of the XTS mask vector, but failed to take into account that calls into the XTS en/decrypt routines will take a slightly different code path if a single block of input is split across different buffers. So let's ensure that the first load occurs unconditionally, and move the reload to the end so it doesn't occur needlessly. Fixes: 2e5d2f33d1db ("crypto: arm64/aes-blk - improve XTS mask handling") Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'arch/arm64/crypto')
-rw-r--r--arch/arm64/crypto/aes-modes.S8
1 files changed, 4 insertions, 4 deletions
diff --git a/arch/arm64/crypto/aes-modes.S b/arch/arm64/crypto/aes-modes.S
index 039738ae23f6..67700045a0e0 100644
--- a/arch/arm64/crypto/aes-modes.S
+++ b/arch/arm64/crypto/aes-modes.S
@@ -359,18 +359,17 @@ AES_ENTRY(aes_xts_encrypt)
mov x29, sp
ld1 {v4.16b}, [x6]
+ xts_load_mask v8
cbz w7, .Lxtsencnotfirst
enc_prepare w3, x5, x8
encrypt_block v4, w3, x5, x8, w7 /* first tweak */
enc_switch_key w3, x2, x8
- xts_load_mask v8
b .LxtsencNx
.Lxtsencnotfirst:
enc_prepare w3, x2, x8
.LxtsencloopNx:
- xts_reload_mask v8
next_tweak v4, v4, v8
.LxtsencNx:
subs w4, w4, #4
@@ -391,6 +390,7 @@ AES_ENTRY(aes_xts_encrypt)
st1 {v0.16b-v3.16b}, [x0], #64
mov v4.16b, v7.16b
cbz w4, .Lxtsencout
+ xts_reload_mask v8
b .LxtsencloopNx
.Lxtsenc1x:
adds w4, w4, #4
@@ -417,18 +417,17 @@ AES_ENTRY(aes_xts_decrypt)
mov x29, sp
ld1 {v4.16b}, [x6]
+ xts_load_mask v8
cbz w7, .Lxtsdecnotfirst
enc_prepare w3, x5, x8
encrypt_block v4, w3, x5, x8, w7 /* first tweak */
dec_prepare w3, x2, x8
- xts_load_mask v8
b .LxtsdecNx
.Lxtsdecnotfirst:
dec_prepare w3, x2, x8
.LxtsdecloopNx:
- xts_reload_mask v8
next_tweak v4, v4, v8
.LxtsdecNx:
subs w4, w4, #4
@@ -449,6 +448,7 @@ AES_ENTRY(aes_xts_decrypt)
st1 {v0.16b-v3.16b}, [x0], #64
mov v4.16b, v7.16b
cbz w4, .Lxtsdecout
+ xts_reload_mask v8
b .LxtsdecloopNx
.Lxtsdec1x:
adds w4, w4, #4