lists.openwall.net   lists  /  announce  owl-users  owl-dev  john-users  john-dev  passwdqc-users  yescrypt  popa3d-users  /  oss-security  kernel-hardening  musl  sabotage  tlsify  passwords  /  crypt-dev  xvendor  /  Bugtraq  Full-Disclosure  linux-kernel  linux-netdev  linux-ext4  linux-hardening  linux-cve-announce  PHC 
Open Source and information security mailing list archives
 
Hash Suite: Windows password security audit tool. GUI, reports in PDF.
[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-ID: <20250517022428.401622-8-ebiggers@kernel.org>
Date: Fri, 16 May 2025 19:24:27 -0700
From: Eric Biggers <ebiggers@...nel.org>
To: linux-crypto@...r.kernel.org
Cc: linux-kernel@...r.kernel.org
Subject: [PATCH 7/8] Revert "crypto: sha256 - Use the partial block API for generic"

From: Eric Biggers <ebiggers@...gle.com>

This reverts commit ff8f037d394f0900597ba527388a6eb95cd02695 which got
pushed out despite being nacked.

The library API already has to handle partial blocks, and it makes a lot
more sense to just use that.

Keep sha256_block_init() since drivers/crypto/padlock-sha.c is using it.

Signed-off-by: Eric Biggers <ebiggers@...gle.com>
---
 crypto/sha256.c       | 79 ++++++++++++++++++++-----------------------
 include/crypto/sha2.h |  7 +---
 2 files changed, 38 insertions(+), 48 deletions(-)

diff --git a/crypto/sha256.c b/crypto/sha256.c
index cf190114574ea..47ad7e4cc55f7 100644
--- a/crypto/sha256.c
+++ b/crypto/sha256.c
@@ -28,68 +28,48 @@ const u8 sha256_zero_message_hash[SHA256_DIGEST_SIZE] = {
 };
 EXPORT_SYMBOL_GPL(sha256_zero_message_hash);
 
 static int crypto_sha256_init(struct shash_desc *desc)
 {
-	sha256_block_init(shash_desc_ctx(desc));
+	sha256_init(shash_desc_ctx(desc));
 	return 0;
 }
 
-static inline int crypto_sha256_update(struct shash_desc *desc, const u8 *data,
-				       unsigned int len, bool force_generic)
-{
-	struct crypto_sha256_state *sctx = shash_desc_ctx(desc);
-	int remain = len % SHA256_BLOCK_SIZE;
-
-	sctx->count += len - remain;
-	sha256_choose_blocks(sctx->state, data, len / SHA256_BLOCK_SIZE,
-			     force_generic, !force_generic);
-	return remain;
-}
-
 static int crypto_sha256_update_generic(struct shash_desc *desc, const u8 *data,
 					unsigned int len)
 {
-	return crypto_sha256_update(desc, data, len, true);
+	sha256_update_generic(shash_desc_ctx(desc), data, len);
+	return 0;
 }
 
 static int crypto_sha256_update_arch(struct shash_desc *desc, const u8 *data,
 				     unsigned int len)
 {
 	sha256_update(shash_desc_ctx(desc), data, len);
 	return 0;
 }
 
-static int crypto_sha256_final_arch(struct shash_desc *desc, u8 *out)
+static int crypto_sha256_final_generic(struct shash_desc *desc, u8 *out)
 {
-	sha256_final(shash_desc_ctx(desc), out);
+	sha256_final_generic(shash_desc_ctx(desc), out);
 	return 0;
 }
 
-static __always_inline int crypto_sha256_finup(struct shash_desc *desc,
-					       const u8 *data,
-					       unsigned int len, u8 *out,
-					       bool force_generic)
+static int crypto_sha256_final_arch(struct shash_desc *desc, u8 *out)
 {
-	struct crypto_sha256_state *sctx = shash_desc_ctx(desc);
-	unsigned int remain = len;
-	u8 *buf;
-
-	if (len >= SHA256_BLOCK_SIZE)
-		remain = crypto_sha256_update(desc, data, len, force_generic);
-	sctx->count += remain;
-	buf = memcpy(sctx + 1, data + len - remain, remain);
-	sha256_finup(sctx, buf, remain, out,
-		     crypto_shash_digestsize(desc->tfm), force_generic,
-		     !force_generic);
+	sha256_final(shash_desc_ctx(desc), out);
 	return 0;
 }
 
 static int crypto_sha256_finup_generic(struct shash_desc *desc, const u8 *data,
 				       unsigned int len, u8 *out)
 {
-	return crypto_sha256_finup(desc, data, len, out, true);
+	struct sha256_state *sctx = shash_desc_ctx(desc);
+
+	sha256_update_generic(sctx, data, len);
+	sha256_final_generic(sctx, out);
+	return 0;
 }
 
 static int crypto_sha256_finup_arch(struct shash_desc *desc, const u8 *data,
 				    unsigned int len, u8 *out)
 {
@@ -101,12 +81,16 @@ static int crypto_sha256_finup_arch(struct shash_desc *desc, const u8 *data,
 }
 
 static int crypto_sha256_digest_generic(struct shash_desc *desc, const u8 *data,
 					unsigned int len, u8 *out)
 {
-	crypto_sha256_init(desc);
-	return crypto_sha256_finup_generic(desc, data, len, out);
+	struct sha256_state *sctx = shash_desc_ctx(desc);
+
+	sha256_init(sctx);
+	sha256_update_generic(sctx, data, len);
+	sha256_final_generic(sctx, out);
+	return 0;
 }
 
 static int crypto_sha256_digest_arch(struct shash_desc *desc, const u8 *data,
 				     unsigned int len, u8 *out)
 {
@@ -114,11 +98,17 @@ static int crypto_sha256_digest_arch(struct shash_desc *desc, const u8 *data,
 	return 0;
 }
 
 static int crypto_sha224_init(struct shash_desc *desc)
 {
-	sha224_block_init(shash_desc_ctx(desc));
+	sha224_init(shash_desc_ctx(desc));
+	return 0;
+}
+
+static int crypto_sha224_final_generic(struct shash_desc *desc, u8 *out)
+{
+	sha224_final_generic(shash_desc_ctx(desc), out);
 	return 0;
 }
 
 static int crypto_sha224_final_arch(struct shash_desc *desc, u8 *out)
 {
@@ -155,34 +145,39 @@ static int crypto_sha256_export_lib(struct shash_desc *desc, void *out)
 static struct shash_alg algs[] = {
 	{
 		.base.cra_name		= "sha256",
 		.base.cra_driver_name	= "sha256-generic",
 		.base.cra_priority	= 100,
-		.base.cra_flags		= CRYPTO_AHASH_ALG_BLOCK_ONLY |
-					  CRYPTO_AHASH_ALG_FINUP_MAX,
 		.base.cra_blocksize	= SHA256_BLOCK_SIZE,
 		.base.cra_module	= THIS_MODULE,
 		.digestsize		= SHA256_DIGEST_SIZE,
 		.init			= crypto_sha256_init,
 		.update			= crypto_sha256_update_generic,
+		.final			= crypto_sha256_final_generic,
 		.finup			= crypto_sha256_finup_generic,
 		.digest			= crypto_sha256_digest_generic,
-		.descsize		= sizeof(struct crypto_sha256_state),
+		.descsize		= sizeof(struct sha256_state),
+		.statesize		= sizeof(struct crypto_sha256_state) +
+					  SHA256_BLOCK_SIZE + 1,
+		.import			= crypto_sha256_import_lib,
+		.export			= crypto_sha256_export_lib,
 	},
 	{
 		.base.cra_name		= "sha224",
 		.base.cra_driver_name	= "sha224-generic",
 		.base.cra_priority	= 100,
-		.base.cra_flags		= CRYPTO_AHASH_ALG_BLOCK_ONLY |
-					  CRYPTO_AHASH_ALG_FINUP_MAX,
 		.base.cra_blocksize	= SHA224_BLOCK_SIZE,
 		.base.cra_module	= THIS_MODULE,
 		.digestsize		= SHA224_DIGEST_SIZE,
 		.init			= crypto_sha224_init,
 		.update			= crypto_sha256_update_generic,
-		.finup			= crypto_sha256_finup_generic,
-		.descsize		= sizeof(struct crypto_sha256_state),
+		.final			= crypto_sha224_final_generic,
+		.descsize		= sizeof(struct sha256_state),
+		.statesize		= sizeof(struct crypto_sha256_state) +
+					  SHA256_BLOCK_SIZE + 1,
+		.import			= crypto_sha256_import_lib,
+		.export			= crypto_sha256_export_lib,
 	},
 	{
 		.base.cra_name		= "sha256",
 		.base.cra_driver_name	= "sha256-" __stringify(ARCH),
 		.base.cra_priority	= 300,
diff --git a/include/crypto/sha2.h b/include/crypto/sha2.h
index 4912572578dc2..f2df3bb90d11a 100644
--- a/include/crypto/sha2.h
+++ b/include/crypto/sha2.h
@@ -107,11 +107,11 @@ static inline void sha256_init(struct sha256_state *sctx)
 }
 void sha256_update(struct sha256_state *sctx, const u8 *data, size_t len);
 void sha256_final(struct sha256_state *sctx, u8 out[SHA256_DIGEST_SIZE]);
 void sha256(const u8 *data, size_t len, u8 out[SHA256_DIGEST_SIZE]);
 
-static inline void sha224_block_init(struct crypto_sha256_state *sctx)
+static inline void sha224_init(struct sha256_state *sctx)
 {
 	sctx->state[0] = SHA224_H0;
 	sctx->state[1] = SHA224_H1;
 	sctx->state[2] = SHA224_H2;
 	sctx->state[3] = SHA224_H3;
@@ -119,14 +119,9 @@ static inline void sha224_block_init(struct crypto_sha256_state *sctx)
 	sctx->state[5] = SHA224_H5;
 	sctx->state[6] = SHA224_H6;
 	sctx->state[7] = SHA224_H7;
 	sctx->count = 0;
 }
-
-static inline void sha224_init(struct sha256_state *sctx)
-{
-	sha224_block_init(&sctx->ctx);
-}
 /* Simply use sha256_update as it is equivalent to sha224_update. */
 void sha224_final(struct sha256_state *sctx, u8 out[SHA224_DIGEST_SIZE]);
 
 #endif /* _CRYPTO_SHA2_H */
-- 
2.49.0


Powered by blists - more mailing lists

Powered by Openwall GNU/*/Linux Powered by OpenVZ