aboutsummaryrefslogtreecommitdiffstats
path: root/crypto/sha256.c
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2025-05-26 13:47:28 -0700
committerLinus Torvalds <torvalds@linux-foundation.org>2025-05-26 13:47:28 -0700
commit14418ddcc2c2055743ac7ee53d5ac2cf8a8660a7 (patch)
tree4edb44583a986dce31f537ff3d55032118b2db3e /crypto/sha256.c
parent15d90a5e5524532b7456a24f4626cf28c1629c4c (diff)
parent2297554f01df6d3d4e98a3915c183ce3e491740a (diff)
downloadnet-14418ddcc2c2055743ac7ee53d5ac2cf8a8660a7.tar.gz
Merge tag 'v6.16-p1' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
Pull crypto updates from Herbert Xu: "API: - Fix memcpy_sglist to handle partially overlapping SG lists - Use memcpy_sglist to replace null skcipher - Rename CRYPTO_TESTS to CRYPTO_BENCHMARK - Flip CRYPTO_MANAGER_DISABLE_TEST into CRYPTO_SELFTESTS - Hide CRYPTO_MANAGER - Add delayed freeing of driver crypto_alg structures Compression: - Allocate large buffers on first use instead of initialisation in scomp - Drop destination linearisation buffer in scomp - Move scomp stream allocation into acomp - Add acomp scatter-gather walker - Remove request chaining - Add optional async request allocation Hashing: - Remove request chaining - Add optional async request allocation - Move partial block handling into API - Add ahash support to hmac - Fix shash documentation to disallow usage in hard IRQs Algorithms: - Remove unnecessary SIMD fallback code on x86 and arm/arm64 - Drop avx10_256 xts(aes)/ctr(aes) on x86 - Improve avx-512 optimisations for xts(aes) - Move chacha arch implementations into lib/crypto - Move poly1305 into lib/crypto and drop unused Crypto API algorithm - Disable powerpc/poly1305 as it has no SIMD fallback - Move sha256 arch implementations into lib/crypto - Convert deflate to acomp - Set block size correctly in cbcmac Drivers: - Do not use sg_dma_len before mapping in sun8i-ss - Fix warm-reboot failure by making shutdown do more work in qat - Add locking in zynqmp-sha - Remove cavium/zip - Add support for PCI device 0x17D8 to ccp - Add qat_6xxx support in qat - Add support for RK3576 in rockchip-rng - Add support for i.MX8QM in caam Others: - Fix irq_fpu_usable/kernel_fpu_begin inconsistency during CPU bring-up - Add new SEV/SNP platform shutdown API in ccp" * tag 'v6.16-p1' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (382 commits) x86/fpu: Fix irq_fpu_usable() to return false during CPU onlining crypto: qat - add missing header inclusion crypto: api - Redo lookup on EEXIST Revert "crypto: testmgr - Add hash export format testing" crypto: marvell/cesa - Do not chain submitted requests crypto: powerpc/poly1305 - add depends on BROKEN for now Revert "crypto: powerpc/poly1305 - Add SIMD fallback" crypto: ccp - Add missing tee info reg for teev2 crypto: ccp - Add missing bootloader info reg for pspv5 crypto: sun8i-ce - move fallback ahash_request to the end of the struct crypto: octeontx2 - Use dynamic allocated memory region for lmtst crypto: octeontx2 - Initialize cptlfs device info once crypto: xts - Only add ecb if it is not already there crypto: lrw - Only add ecb if it is not already there crypto: testmgr - Add hash export format testing crypto: testmgr - Use ahash for generic tfm crypto: hmac - Add ahash support crypto: testmgr - Ignore EEXIST on shash allocation crypto: algapi - Add driver template support to crypto_inst_setname crypto: shash - Set reqsize in shash_alg ...
Diffstat (limited to 'crypto/sha256.c')
-rw-r--r--crypto/sha256.c283
1 files changed, 283 insertions, 0 deletions
diff --git a/crypto/sha256.c b/crypto/sha256.c
new file mode 100644
index 00000000000000..4aeb213bab1173
--- /dev/null
+++ b/crypto/sha256.c
@@ -0,0 +1,283 @@
+// SPDX-License-Identifier: GPL-2.0-or-later
+/*
+ * Crypto API wrapper for the SHA-256 and SHA-224 library functions
+ *
+ * Copyright (c) Jean-Luc Cooke <jlcooke@certainkey.com>
+ * Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk>
+ * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
+ * SHA224 Support Copyright 2007 Intel Corporation <jonathan.lynch@intel.com>
+ */
+#include <crypto/internal/hash.h>
+#include <crypto/internal/sha2.h>
+#include <linux/kernel.h>
+#include <linux/module.h>
+
+const u8 sha224_zero_message_hash[SHA224_DIGEST_SIZE] = {
+ 0xd1, 0x4a, 0x02, 0x8c, 0x2a, 0x3a, 0x2b, 0xc9, 0x47,
+ 0x61, 0x02, 0xbb, 0x28, 0x82, 0x34, 0xc4, 0x15, 0xa2,
+ 0xb0, 0x1f, 0x82, 0x8e, 0xa6, 0x2a, 0xc5, 0xb3, 0xe4,
+ 0x2f
+};
+EXPORT_SYMBOL_GPL(sha224_zero_message_hash);
+
+const u8 sha256_zero_message_hash[SHA256_DIGEST_SIZE] = {
+ 0xe3, 0xb0, 0xc4, 0x42, 0x98, 0xfc, 0x1c, 0x14,
+ 0x9a, 0xfb, 0xf4, 0xc8, 0x99, 0x6f, 0xb9, 0x24,
+ 0x27, 0xae, 0x41, 0xe4, 0x64, 0x9b, 0x93, 0x4c,
+ 0xa4, 0x95, 0x99, 0x1b, 0x78, 0x52, 0xb8, 0x55
+};
+EXPORT_SYMBOL_GPL(sha256_zero_message_hash);
+
+static int crypto_sha256_init(struct shash_desc *desc)
+{
+ sha256_block_init(shash_desc_ctx(desc));
+ return 0;
+}
+
+static inline int crypto_sha256_update(struct shash_desc *desc, const u8 *data,
+ unsigned int len, bool force_generic)
+{
+ struct crypto_sha256_state *sctx = shash_desc_ctx(desc);
+ int remain = len % SHA256_BLOCK_SIZE;
+
+ sctx->count += len - remain;
+ sha256_choose_blocks(sctx->state, data, len / SHA256_BLOCK_SIZE,
+ force_generic, !force_generic);
+ return remain;
+}
+
+static int crypto_sha256_update_generic(struct shash_desc *desc, const u8 *data,
+ unsigned int len)
+{
+ return crypto_sha256_update(desc, data, len, true);
+}
+
+static int crypto_sha256_update_lib(struct shash_desc *desc, const u8 *data,
+ unsigned int len)
+{
+ sha256_update(shash_desc_ctx(desc), data, len);
+ return 0;
+}
+
+static int crypto_sha256_update_arch(struct shash_desc *desc, const u8 *data,
+ unsigned int len)
+{
+ return crypto_sha256_update(desc, data, len, false);
+}
+
+static int crypto_sha256_final_lib(struct shash_desc *desc, u8 *out)
+{
+ sha256_final(shash_desc_ctx(desc), out);
+ return 0;
+}
+
+static __always_inline int crypto_sha256_finup(struct shash_desc *desc,
+ const u8 *data,
+ unsigned int len, u8 *out,
+ bool force_generic)
+{
+ struct crypto_sha256_state *sctx = shash_desc_ctx(desc);
+ unsigned int remain = len;
+ u8 *buf;
+
+ if (len >= SHA256_BLOCK_SIZE)
+ remain = crypto_sha256_update(desc, data, len, force_generic);
+ sctx->count += remain;
+ buf = memcpy(sctx + 1, data + len - remain, remain);
+ sha256_finup(sctx, buf, remain, out,
+ crypto_shash_digestsize(desc->tfm), force_generic,
+ !force_generic);
+ return 0;
+}
+
+static int crypto_sha256_finup_generic(struct shash_desc *desc, const u8 *data,
+ unsigned int len, u8 *out)
+{
+ return crypto_sha256_finup(desc, data, len, out, true);
+}
+
+static int crypto_sha256_finup_arch(struct shash_desc *desc, const u8 *data,
+ unsigned int len, u8 *out)
+{
+ return crypto_sha256_finup(desc, data, len, out, false);
+}
+
+static int crypto_sha256_digest_generic(struct shash_desc *desc, const u8 *data,
+ unsigned int len, u8 *out)
+{
+ crypto_sha256_init(desc);
+ return crypto_sha256_finup_generic(desc, data, len, out);
+}
+
+static int crypto_sha256_digest_lib(struct shash_desc *desc, const u8 *data,
+ unsigned int len, u8 *out)
+{
+ sha256(data, len, out);
+ return 0;
+}
+
+static int crypto_sha256_digest_arch(struct shash_desc *desc, const u8 *data,
+ unsigned int len, u8 *out)
+{
+ crypto_sha256_init(desc);
+ return crypto_sha256_finup_arch(desc, data, len, out);
+}
+
+static int crypto_sha224_init(struct shash_desc *desc)
+{
+ sha224_block_init(shash_desc_ctx(desc));
+ return 0;
+}
+
+static int crypto_sha224_final_lib(struct shash_desc *desc, u8 *out)
+{
+ sha224_final(shash_desc_ctx(desc), out);
+ return 0;
+}
+
+static int crypto_sha256_import_lib(struct shash_desc *desc, const void *in)
+{
+ struct sha256_state *sctx = shash_desc_ctx(desc);
+ const u8 *p = in;
+
+ memcpy(sctx, p, sizeof(*sctx));
+ p += sizeof(*sctx);
+ sctx->count += *p;
+ return 0;
+}
+
+static int crypto_sha256_export_lib(struct shash_desc *desc, void *out)
+{
+ struct sha256_state *sctx0 = shash_desc_ctx(desc);
+ struct sha256_state sctx = *sctx0;
+ unsigned int partial;
+ u8 *p = out;
+
+ partial = sctx.count % SHA256_BLOCK_SIZE;
+ sctx.count -= partial;
+ memcpy(p, &sctx, sizeof(sctx));
+ p += sizeof(sctx);
+ *p = partial;
+ return 0;
+}
+
+static struct shash_alg algs[] = {
+ {
+ .base.cra_name = "sha256",
+ .base.cra_driver_name = "sha256-generic",
+ .base.cra_priority = 100,
+ .base.cra_flags = CRYPTO_AHASH_ALG_BLOCK_ONLY |
+ CRYPTO_AHASH_ALG_FINUP_MAX,
+ .base.cra_blocksize = SHA256_BLOCK_SIZE,
+ .base.cra_module = THIS_MODULE,
+ .digestsize = SHA256_DIGEST_SIZE,
+ .init = crypto_sha256_init,
+ .update = crypto_sha256_update_generic,
+ .finup = crypto_sha256_finup_generic,
+ .digest = crypto_sha256_digest_generic,
+ .descsize = sizeof(struct crypto_sha256_state),
+ },
+ {
+ .base.cra_name = "sha224",
+ .base.cra_driver_name = "sha224-generic",
+ .base.cra_priority = 100,
+ .base.cra_flags = CRYPTO_AHASH_ALG_BLOCK_ONLY |
+ CRYPTO_AHASH_ALG_FINUP_MAX,
+ .base.cra_blocksize = SHA224_BLOCK_SIZE,
+ .base.cra_module = THIS_MODULE,
+ .digestsize = SHA224_DIGEST_SIZE,
+ .init = crypto_sha224_init,
+ .update = crypto_sha256_update_generic,
+ .finup = crypto_sha256_finup_generic,
+ .descsize = sizeof(struct crypto_sha256_state),
+ },
+ {
+ .base.cra_name = "sha256",
+ .base.cra_driver_name = "sha256-lib",
+ .base.cra_blocksize = SHA256_BLOCK_SIZE,
+ .base.cra_module = THIS_MODULE,
+ .digestsize = SHA256_DIGEST_SIZE,
+ .init = crypto_sha256_init,
+ .update = crypto_sha256_update_lib,
+ .final = crypto_sha256_final_lib,
+ .digest = crypto_sha256_digest_lib,
+ .descsize = sizeof(struct sha256_state),
+ .statesize = sizeof(struct crypto_sha256_state) +
+ SHA256_BLOCK_SIZE + 1,
+ .import = crypto_sha256_import_lib,
+ .export = crypto_sha256_export_lib,
+ },
+ {
+ .base.cra_name = "sha224",
+ .base.cra_driver_name = "sha224-lib",
+ .base.cra_blocksize = SHA224_BLOCK_SIZE,
+ .base.cra_module = THIS_MODULE,
+ .digestsize = SHA224_DIGEST_SIZE,
+ .init = crypto_sha224_init,
+ .update = crypto_sha256_update_lib,
+ .final = crypto_sha224_final_lib,
+ .descsize = sizeof(struct sha256_state),
+ .statesize = sizeof(struct crypto_sha256_state) +
+ SHA256_BLOCK_SIZE + 1,
+ .import = crypto_sha256_import_lib,
+ .export = crypto_sha256_export_lib,
+ },
+ {
+ .base.cra_name = "sha256",
+ .base.cra_driver_name = "sha256-" __stringify(ARCH),
+ .base.cra_priority = 300,
+ .base.cra_flags = CRYPTO_AHASH_ALG_BLOCK_ONLY |
+ CRYPTO_AHASH_ALG_FINUP_MAX,
+ .base.cra_blocksize = SHA256_BLOCK_SIZE,
+ .base.cra_module = THIS_MODULE,
+ .digestsize = SHA256_DIGEST_SIZE,
+ .init = crypto_sha256_init,
+ .update = crypto_sha256_update_arch,
+ .finup = crypto_sha256_finup_arch,
+ .digest = crypto_sha256_digest_arch,
+ .descsize = sizeof(struct crypto_sha256_state),
+ },
+ {
+ .base.cra_name = "sha224",
+ .base.cra_driver_name = "sha224-" __stringify(ARCH),
+ .base.cra_priority = 300,
+ .base.cra_flags = CRYPTO_AHASH_ALG_BLOCK_ONLY |
+ CRYPTO_AHASH_ALG_FINUP_MAX,
+ .base.cra_blocksize = SHA224_BLOCK_SIZE,
+ .base.cra_module = THIS_MODULE,
+ .digestsize = SHA224_DIGEST_SIZE,
+ .init = crypto_sha224_init,
+ .update = crypto_sha256_update_arch,
+ .finup = crypto_sha256_finup_arch,
+ .descsize = sizeof(struct crypto_sha256_state),
+ },
+};
+
+static unsigned int num_algs;
+
+static int __init crypto_sha256_mod_init(void)
+{
+ /* register the arch flavours only if they differ from generic */
+ num_algs = ARRAY_SIZE(algs);
+ BUILD_BUG_ON(ARRAY_SIZE(algs) <= 2);
+ if (!sha256_is_arch_optimized())
+ num_algs -= 2;
+ return crypto_register_shashes(algs, ARRAY_SIZE(algs));
+}
+module_init(crypto_sha256_mod_init);
+
+static void __exit crypto_sha256_mod_exit(void)
+{
+ crypto_unregister_shashes(algs, num_algs);
+}
+module_exit(crypto_sha256_mod_exit);
+
+MODULE_LICENSE("GPL");
+MODULE_DESCRIPTION("Crypto API wrapper for the SHA-256 and SHA-224 library functions");
+
+MODULE_ALIAS_CRYPTO("sha256");
+MODULE_ALIAS_CRYPTO("sha256-generic");
+MODULE_ALIAS_CRYPTO("sha256-" __stringify(ARCH));
+MODULE_ALIAS_CRYPTO("sha224");
+MODULE_ALIAS_CRYPTO("sha224-generic");
+MODULE_ALIAS_CRYPTO("sha224-" __stringify(ARCH));