summaryrefslogtreecommitdiff
path: root/crypto/crc32c.c
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2025-05-26 13:32:06 -0700
committerLinus Torvalds <torvalds@linux-foundation.org>2025-05-26 13:32:06 -0700
commit15d90a5e5524532b7456a24f4626cf28c1629c4c (patch)
tree9f819198d724af6fd0c46fda0013a29ede2c16a0 /crypto/crc32c.c
parent14f19dc6440f23f417c83207c117b54698aa3934 (diff)
parent289c99bec7eed918ab37c62cbb29a2e3f58fb1fb (diff)
Merge tag 'crc-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/ebiggers/linux
Pull CRC updates from Eric Biggers: "Cleanups for the kernel's CRC (cyclic redundancy check) code: - Use __ro_after_init where appropriate - Remove unnecessary static_key on s390 - Rename some source code files - Rename the crc32 and crc32c crypto API modules - Use subsys_initcall instead of arch_initcall - Restore maintainers for crc_kunit.c - Fold crc16_byte() into crc16.c - Add some SPDX license identifiers" * tag 'crc-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/ebiggers/linux: lib/crc32: add SPDX license identifier lib/crc16: unexport crc16_table and crc16_byte() w1: ds2406: use crc16() instead of crc16_byte() loop MAINTAINERS: add crc_kunit.c back to CRC LIBRARY lib/crc: make arch-optimized code use subsys_initcall crypto: crc32 - remove "generic" from file and module names x86/crc: drop "glue" from filenames sparc/crc: drop "glue" from filenames s390/crc: drop "glue" from filenames powerpc/crc: rename crc32-vpmsum_core.S to crc-vpmsum-template.S powerpc/crc: drop "glue" from filenames arm64/crc: drop "glue" from filenames arm/crc: drop "glue" from filenames s390/crc32: Remove no-op module init and exit functions s390/crc32: Remove have_vxrs static key lib/crc: make the CPU feature static keys __ro_after_init
Diffstat (limited to 'crypto/crc32c.c')
-rw-r--r--crypto/crc32c.c222
1 files changed, 222 insertions, 0 deletions
diff --git a/crypto/crc32c.c b/crypto/crc32c.c
new file mode 100644
index 000000000000..b1a36d32dc50
--- /dev/null
+++ b/crypto/crc32c.c
@@ -0,0 +1,222 @@
+// SPDX-License-Identifier: GPL-2.0-or-later
+/*
+ * Cryptographic API.
+ *
+ * CRC32C chksum
+ *
+ *@Article{castagnoli-crc,
+ * author = { Guy Castagnoli and Stefan Braeuer and Martin Herrman},
+ * title = {{Optimization of Cyclic Redundancy-Check Codes with 24
+ * and 32 Parity Bits}},
+ * journal = IEEE Transactions on Communication,
+ * year = {1993},
+ * volume = {41},
+ * number = {6},
+ * pages = {},
+ * month = {June},
+ *}
+ * Used by the iSCSI driver, possibly others, and derived from
+ * the iscsi-crc.c module of the linux-iscsi driver at
+ * http://linux-iscsi.sourceforge.net.
+ *
+ * Following the example of lib/crc32, this function is intended to be
+ * flexible and useful for all users. Modules that currently have their
+ * own crc32c, but hopefully may be able to use this one are:
+ * net/sctp (please add all your doco to here if you change to
+ * use this one!)
+ * <endoflist>
+ *
+ * Copyright (c) 2004 Cisco Systems, Inc.
+ * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
+ */
+
+#include <linux/unaligned.h>
+#include <crypto/internal/hash.h>
+#include <linux/init.h>
+#include <linux/module.h>
+#include <linux/string.h>
+#include <linux/kernel.h>
+#include <linux/crc32.h>
+
+#define CHKSUM_BLOCK_SIZE 1
+#define CHKSUM_DIGEST_SIZE 4
+
+struct chksum_ctx {
+ u32 key;
+};
+
+struct chksum_desc_ctx {
+ u32 crc;
+};
+
+/*
+ * Steps through buffer one byte at a time, calculates reflected
+ * crc using table.
+ */
+
+static int chksum_init(struct shash_desc *desc)
+{
+ struct chksum_ctx *mctx = crypto_shash_ctx(desc->tfm);
+ struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
+
+ ctx->crc = mctx->key;
+
+ return 0;
+}
+
+/*
+ * Setting the seed allows arbitrary accumulators and flexible XOR policy
+ * If your algorithm starts with ~0, then XOR with ~0 before you set
+ * the seed.
+ */
+static int chksum_setkey(struct crypto_shash *tfm, const u8 *key,
+ unsigned int keylen)
+{
+ struct chksum_ctx *mctx = crypto_shash_ctx(tfm);
+
+ if (keylen != sizeof(mctx->key))
+ return -EINVAL;
+ mctx->key = get_unaligned_le32(key);
+ return 0;
+}
+
+static int chksum_update(struct shash_desc *desc, const u8 *data,
+ unsigned int length)
+{
+ struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
+
+ ctx->crc = crc32c_base(ctx->crc, data, length);
+ return 0;
+}
+
+static int chksum_update_arch(struct shash_desc *desc, const u8 *data,
+ unsigned int length)
+{
+ struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
+
+ ctx->crc = crc32c(ctx->crc, data, length);
+ return 0;
+}
+
+static int chksum_final(struct shash_desc *desc, u8 *out)
+{
+ struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
+
+ put_unaligned_le32(~ctx->crc, out);
+ return 0;
+}
+
+static int __chksum_finup(u32 *crcp, const u8 *data, unsigned int len, u8 *out)
+{
+ put_unaligned_le32(~crc32c_base(*crcp, data, len), out);
+ return 0;
+}
+
+static int __chksum_finup_arch(u32 *crcp, const u8 *data, unsigned int len,
+ u8 *out)
+{
+ put_unaligned_le32(~crc32c(*crcp, data, len), out);
+ return 0;
+}
+
+static int chksum_finup(struct shash_desc *desc, const u8 *data,
+ unsigned int len, u8 *out)
+{
+ struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
+
+ return __chksum_finup(&ctx->crc, data, len, out);
+}
+
+static int chksum_finup_arch(struct shash_desc *desc, const u8 *data,
+ unsigned int len, u8 *out)
+{
+ struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
+
+ return __chksum_finup_arch(&ctx->crc, data, len, out);
+}
+
+static int chksum_digest(struct shash_desc *desc, const u8 *data,
+ unsigned int length, u8 *out)
+{
+ struct chksum_ctx *mctx = crypto_shash_ctx(desc->tfm);
+
+ return __chksum_finup(&mctx->key, data, length, out);
+}
+
+static int chksum_digest_arch(struct shash_desc *desc, const u8 *data,
+ unsigned int length, u8 *out)
+{
+ struct chksum_ctx *mctx = crypto_shash_ctx(desc->tfm);
+
+ return __chksum_finup_arch(&mctx->key, data, length, out);
+}
+
+static int crc32c_cra_init(struct crypto_tfm *tfm)
+{
+ struct chksum_ctx *mctx = crypto_tfm_ctx(tfm);
+
+ mctx->key = ~0;
+ return 0;
+}
+
+static struct shash_alg algs[] = {{
+ .digestsize = CHKSUM_DIGEST_SIZE,
+ .setkey = chksum_setkey,
+ .init = chksum_init,
+ .update = chksum_update,
+ .final = chksum_final,
+ .finup = chksum_finup,
+ .digest = chksum_digest,
+ .descsize = sizeof(struct chksum_desc_ctx),
+
+ .base.cra_name = "crc32c",
+ .base.cra_driver_name = "crc32c-generic",
+ .base.cra_priority = 100,
+ .base.cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
+ .base.cra_blocksize = CHKSUM_BLOCK_SIZE,
+ .base.cra_ctxsize = sizeof(struct chksum_ctx),
+ .base.cra_module = THIS_MODULE,
+ .base.cra_init = crc32c_cra_init,
+}, {
+ .digestsize = CHKSUM_DIGEST_SIZE,
+ .setkey = chksum_setkey,
+ .init = chksum_init,
+ .update = chksum_update_arch,
+ .final = chksum_final,
+ .finup = chksum_finup_arch,
+ .digest = chksum_digest_arch,
+ .descsize = sizeof(struct chksum_desc_ctx),
+
+ .base.cra_name = "crc32c",
+ .base.cra_driver_name = "crc32c-" __stringify(ARCH),
+ .base.cra_priority = 150,
+ .base.cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
+ .base.cra_blocksize = CHKSUM_BLOCK_SIZE,
+ .base.cra_ctxsize = sizeof(struct chksum_ctx),
+ .base.cra_module = THIS_MODULE,
+ .base.cra_init = crc32c_cra_init,
+}};
+
+static int num_algs;
+
+static int __init crc32c_mod_init(void)
+{
+ /* register the arch flavor only if it differs from the generic one */
+ num_algs = 1 + ((crc32_optimizations() & CRC32C_OPTIMIZATION) != 0);
+
+ return crypto_register_shashes(algs, num_algs);
+}
+
+static void __exit crc32c_mod_fini(void)
+{
+ crypto_unregister_shashes(algs, num_algs);
+}
+
+subsys_initcall(crc32c_mod_init);
+module_exit(crc32c_mod_fini);
+
+MODULE_AUTHOR("Clay Haapala <chaapala@cisco.com>");
+MODULE_DESCRIPTION("CRC32c (Castagnoli) calculations wrapper for lib/crc32c");
+MODULE_LICENSE("GPL");
+MODULE_ALIAS_CRYPTO("crc32c");
+MODULE_ALIAS_CRYPTO("crc32c-generic");