summaryrefslogtreecommitdiff
path: root/include/crypto
diff options
context:
space:
mode:
authorHerbert Xu <herbert@gondor.apana.org.au>2025-04-18 11:00:20 +0800
committerHerbert Xu <herbert@gondor.apana.org.au>2025-04-23 15:52:46 +0800
commitff3cb9de53baa3da7df0e08798a5638436d8aabc (patch)
treec538caed6f1cbf8675d4cca9121bff44b926a392 /include/crypto
parent201e9ec3b621c9542441eb7bd34facbc1749b975 (diff)
crypto: x86/sha512 - Use API partial block handling
Use the Crypto API partial block handling. Also remove the unnecessary SIMD fallback path. Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'include/crypto')
-rw-r--r--include/crypto/sha2.h1
-rw-r--r--include/crypto/sha512_base.h54
2 files changed, 50 insertions, 5 deletions
diff --git a/include/crypto/sha2.h b/include/crypto/sha2.h
index a913bad5dd3b..e9ad7ab955aa 100644
--- a/include/crypto/sha2.h
+++ b/include/crypto/sha2.h
@@ -19,6 +19,7 @@
#define SHA512_DIGEST_SIZE 64
#define SHA512_BLOCK_SIZE 128
+#define SHA512_STATE_SIZE 80
#define SHA224_H0 0xc1059ed8UL
#define SHA224_H1 0x367cd507UL
diff --git a/include/crypto/sha512_base.h b/include/crypto/sha512_base.h
index 679916a84cb2..8cb172e52dc0 100644
--- a/include/crypto/sha512_base.h
+++ b/include/crypto/sha512_base.h
@@ -10,10 +10,7 @@
#include <crypto/internal/hash.h>
#include <crypto/sha2.h>
-#include <linux/crypto.h>
-#include <linux/module.h>
#include <linux/string.h>
-
#include <linux/unaligned.h>
typedef void (sha512_block_fn)(struct sha512_state *sst, u8 const *src,
@@ -93,6 +90,55 @@ static inline int sha512_base_do_update(struct shash_desc *desc,
return 0;
}
+static inline int sha512_base_do_update_blocks(struct shash_desc *desc,
+ const u8 *data,
+ unsigned int len,
+ sha512_block_fn *block_fn)
+{
+ unsigned int remain = len - round_down(len, SHA512_BLOCK_SIZE);
+ struct sha512_state *sctx = shash_desc_ctx(desc);
+
+ len -= remain;
+ sctx->count[0] += len;
+ if (sctx->count[0] < len)
+ sctx->count[1]++;
+ block_fn(sctx, data, len / SHA512_BLOCK_SIZE);
+ return remain;
+}
+
+static inline int sha512_base_do_finup(struct shash_desc *desc, const u8 *src,
+ unsigned int len,
+ sha512_block_fn *block_fn)
+{
+ unsigned int bit_offset = SHA512_BLOCK_SIZE / 8 - 2;
+ struct sha512_state *sctx = shash_desc_ctx(desc);
+ union {
+ __be64 b64[SHA512_BLOCK_SIZE / 4];
+ u8 u8[SHA512_BLOCK_SIZE * 2];
+ } block = {};
+
+ if (len >= SHA512_BLOCK_SIZE) {
+ int remain;
+
+ remain = sha512_base_do_update_blocks(desc, src, len, block_fn);
+ src += len - remain;
+ len = remain;
+ }
+
+ if (len >= bit_offset * 8)
+ bit_offset += SHA512_BLOCK_SIZE / 8;
+ memcpy(&block, src, len);
+ block.u8[len] = 0x80;
+ sctx->count[0] += len;
+ block.b64[bit_offset] = cpu_to_be64(sctx->count[1] << 3 |
+ sctx->count[0] >> 61);
+ block.b64[bit_offset + 1] = cpu_to_be64(sctx->count[0] << 3);
+ block_fn(sctx, block.u8, (bit_offset + 2) * 8 / SHA512_BLOCK_SIZE);
+ memzero_explicit(&block, sizeof(block));
+
+ return 0;
+}
+
static inline int sha512_base_do_finalize(struct shash_desc *desc,
sha512_block_fn *block_fn)
{
@@ -126,8 +172,6 @@ static inline int sha512_base_finish(struct shash_desc *desc, u8 *out)
for (i = 0; digest_size > 0; i++, digest_size -= sizeof(__be64))
put_unaligned_be64(sctx->state[i], digest++);
-
- memzero_explicit(sctx, sizeof(*sctx));
return 0;
}