[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-ID: <20250623131830.2194643-3-stefanb@linux.ibm.com>
Date: Mon, 23 Jun 2025 09:18:28 -0400
From: Stefan Berger <stefanb@...ux.ibm.com>
To: linux-crypto@...r.kernel.org, herbert@...dor.apana.org.au,
davem@...emloft.net
Cc: linux-kernel@...r.kernel.org, James.Bottomley@...senPartnership.com,
dhowells@...hat.com, simo@...hat.com,
Stefan Berger <stefanb@...ux.ibm.com>
Subject: [RFC PATCH 2/4] crypto: Add shake128/256 to generic sha3 module
Extend the sha3 module with shake128 & shake256. For this, implement
functions to get (squeeze) a number of bytes or blocks from the keccak
sponge. A block here corresponds to the number of bytes available in a
buffer following a keccak permutation. On top of this functionality,
implement the general squeeze function that returns a requested number of
bytes to the user. Implement the 'final' function on top of the squeeze
function. The 'final' function will always request a fixed number of bytes
from the squeeze function and set the 'final' parameter to true, clearing
the state of the hash as usual.
Adjust the maximum hash description and block sizes due to shake128.
Extend the arrays for supported hashes with entries for shake128 and
shake256.
Signed-off-by: Stefan Berger <stefanb@...ux.ibm.com>
---
crypto/hash_info.c | 4 +
crypto/sha3_generic.c | 211 +++++++++++++++++++++++++++++++++
include/crypto/algapi.h | 2 +-
include/crypto/hash.h | 5 +-
include/crypto/sha3.h | 19 +++
include/uapi/linux/hash_info.h | 2 +
6 files changed, 239 insertions(+), 4 deletions(-)
diff --git a/crypto/hash_info.c b/crypto/hash_info.c
index 9a467638c971..2e426be89463 100644
--- a/crypto/hash_info.c
+++ b/crypto/hash_info.c
@@ -32,6 +32,8 @@ const char *const hash_algo_name[HASH_ALGO__LAST] = {
[HASH_ALGO_SHA3_256] = "sha3-256",
[HASH_ALGO_SHA3_384] = "sha3-384",
[HASH_ALGO_SHA3_512] = "sha3-512",
+ [HASH_ALGO_SHAKE128] = "shake128",
+ [HASH_ALGO_SHAKE256] = "shake256",
};
EXPORT_SYMBOL_GPL(hash_algo_name);
@@ -59,5 +61,7 @@ const int hash_digest_size[HASH_ALGO__LAST] = {
[HASH_ALGO_SHA3_256] = SHA3_256_DIGEST_SIZE,
[HASH_ALGO_SHA3_384] = SHA3_384_DIGEST_SIZE,
[HASH_ALGO_SHA3_512] = SHA3_512_DIGEST_SIZE,
+ [HASH_ALGO_SHAKE128] = SHAKE128_DIGEST_SIZE,
+ [HASH_ALGO_SHAKE256] = SHAKE256_DIGEST_SIZE,
};
EXPORT_SYMBOL_GPL(hash_digest_size);
diff --git a/crypto/sha3_generic.c b/crypto/sha3_generic.c
index b103642b56ea..4782303527fe 100644
--- a/crypto/sha3_generic.c
+++ b/crypto/sha3_generic.c
@@ -29,6 +29,8 @@
#define SHA3_INLINE noinline
#endif
+#define DOMAIN_SEPARATOR_SHAKE 0x1F
+
#define KECCAK_ROUNDS 24
static const u64 keccakf_rndc[24] = {
@@ -237,6 +239,189 @@ int crypto_sha3_final(struct shash_desc *desc, u8 *out)
}
EXPORT_SYMBOL(crypto_sha3_final);
+static int crypto_shake_init(struct shash_desc *desc)
+{
+ struct shake_state *sctx = shash_desc_ctx(desc);
+ unsigned int digest_size = crypto_shash_digestsize(desc->tfm);
+
+ sctx->rsiz = 200 - 2 * digest_size;
+ sctx->rsizw = sctx->rsiz / 8;
+ sctx->partial = 0;
+ sctx->ridx = 0;
+ sctx->finalized = false;
+ sctx->permute = false;
+
+ memset(sctx->st, 0, sizeof(sctx->st));
+ return 0;
+}
+
+static int crypto_shake_update(struct shash_desc *desc, const u8 *data,
+ unsigned int len)
+{
+ struct shake_state *sctx = shash_desc_ctx(desc);
+ unsigned int done;
+ const u8 *src;
+
+ done = 0;
+ src = data;
+
+ if ((sctx->partial + len) > (sctx->rsiz - 1)) {
+ if (sctx->partial) {
+ done = -sctx->partial;
+ memcpy(sctx->buf + sctx->partial, data,
+ done + sctx->rsiz);
+ src = sctx->buf;
+ }
+
+ do {
+ unsigned int i;
+
+ for (i = 0; i < sctx->rsizw; i++)
+ sctx->st[i] ^= get_unaligned_le64(src + 8 * i);
+ keccakf(sctx->st);
+
+ done += sctx->rsiz;
+ src = data + done;
+ } while (done + (sctx->rsiz - 1) < len);
+
+ sctx->partial = 0;
+ }
+ memcpy(sctx->buf + sctx->partial, src, len - done);
+ sctx->partial += (len - done);
+
+ return 0;
+}
+
+static void crypto_shake_squeeze_bytes(struct shake_state *sctx,
+ u8 **out, size_t n)
+{
+ size_t i, to_copy, loops;
+ __le64 *digest;
+ u8 *_out = *out;
+
+ if (n == 0)
+ return;
+
+ BUG_ON(sctx->ridx + n > sctx->rsiz);
+
+ if (sctx->permute) {
+ keccakf(sctx->st);
+ sctx->permute = false;
+ }
+
+ while (n) {
+ to_copy = (n < 8) ? n : 8 - (sctx->ridx & 7);
+ if (to_copy < 8) {
+ for (i = sctx->ridx; i < sctx->ridx + to_copy; i++)
+ *_out++ = sctx->st[i / 8] >> 8 * (i & 7);
+
+ sctx->ridx += to_copy;
+ n -= to_copy;
+ if (n == 0)
+ break;
+ }
+
+ BUG_ON((sctx->ridx & 7) != 0);
+ digest = (__le64 *)_out;
+ loops = n / 8;
+ for (i = sctx->ridx / 8; i < (sctx->ridx / 8) + loops; i++)
+ put_unaligned_le64(sctx->st[i], digest++);
+
+ sctx->ridx += 8 * loops;
+ n -= 8 * loops;
+ _out = (u8 *)digest;
+ }
+
+ if (sctx->ridx == sctx->rsiz) {
+ sctx->ridx = 0;
+ sctx->permute = true;
+ }
+ *out = _out;
+}
+
+static void crypto_shake_squeeze_blocks(struct shake_state *sctx,
+ u8 **out, size_t nblocks)
+{
+ __le64 *digest = (__le64 *)*out;
+ size_t i, j;
+
+ BUG_ON(sctx->ridx != 0);
+
+ for (i = 0; i < nblocks; i++) {
+ if (sctx->permute)
+ keccakf(sctx->st);
+ sctx->permute = true;
+
+ for (j = 0; j < sctx->rsiz / 8; j++)
+ put_unaligned_le64(sctx->st[j], digest++);
+ }
+ *out = (u8 *)digest;
+}
+
+static void crypto_shake_finalize(struct shake_state *sctx,
+ u8 domsep)
+{
+ unsigned int inlen, i;
+
+ if (sctx->finalized)
+ return;
+
+ inlen = sctx->partial;
+ sctx->buf[inlen++] = domsep;
+ memset(sctx->buf + inlen, 0, sctx->rsiz - inlen);
+ sctx->buf[sctx->rsiz - 1] |= 0x80;
+
+ for (i = 0; i < sctx->rsizw; i++)
+ sctx->st[i] ^= get_unaligned_le64(sctx->buf + 8 * i);
+
+ sctx->finalized = true;
+ sctx->permute = true;
+}
+
+static int crypto_shake_squeeze(struct shash_desc *desc,
+ u8 *out, size_t outlen,
+ bool final)
+{
+ struct shake_state *sctx = shash_desc_ctx(desc);
+ size_t nblocks, to_copy;
+
+ if (!sctx->finalized)
+ crypto_shake_finalize(sctx, DOMAIN_SEPARATOR_SHAKE);
+
+ if (sctx->ridx > 0) {
+ to_copy = min(outlen, sctx->rsiz - sctx->ridx);
+
+ crypto_shake_squeeze_bytes(sctx, &out, to_copy);
+ outlen -= to_copy;
+ if (outlen == 0)
+ goto done;
+ }
+
+ nblocks = outlen / sctx->rsiz;
+ if (nblocks) {
+ crypto_shake_squeeze_blocks(sctx, &out, nblocks);
+ outlen -= nblocks * sctx->rsiz;
+ }
+
+ crypto_shake_squeeze_bytes(sctx, &out, outlen);
+
+done:
+ if (final)
+ memset(sctx, 0, sizeof(*sctx));
+
+ return 0;
+}
+
+static int crypto_shake_final(struct shash_desc *desc, u8 *out)
+{
+ unsigned int digest_size = crypto_shash_digestsize(desc->tfm);
+
+ crypto_shake_squeeze(desc, out, digest_size, true);
+
+ return 0;
+}
+
+
static struct shash_alg algs[] = { {
.digestsize = SHA3_224_DIGEST_SIZE,
.init = crypto_sha3_init,
@@ -277,6 +462,28 @@ static struct shash_alg algs[] = { {
.base.cra_driver_name = "sha3-512-generic",
.base.cra_blocksize = SHA3_512_BLOCK_SIZE,
.base.cra_module = THIS_MODULE,
+}, {
+ .digestsize = SHAKE128_DIGEST_SIZE,
+ .init = crypto_shake_init,
+ .update = crypto_shake_update,
+ .final = crypto_shake_final,
+ .squeeze = crypto_shake_squeeze,
+ .descsize = sizeof(struct shake_state),
+ .base.cra_name = "shake128",
+ .base.cra_driver_name = "shake128-generic",
+ .base.cra_blocksize = SHAKE128_BLOCK_SIZE,
+ .base.cra_module = THIS_MODULE,
+}, {
+ .digestsize = SHAKE256_DIGEST_SIZE,
+ .init = crypto_shake_init,
+ .update = crypto_shake_update,
+ .final = crypto_shake_final,
+ .squeeze = crypto_shake_squeeze,
+ .descsize = sizeof(struct shake_state),
+ .base.cra_name = "shake256",
+ .base.cra_driver_name = "shake256-generic",
+ .base.cra_blocksize = SHAKE256_BLOCK_SIZE,
+ .base.cra_module = THIS_MODULE,
} };
static int __init sha3_generic_mod_init(void)
@@ -303,3 +510,7 @@ MODULE_ALIAS_CRYPTO("sha3-384");
MODULE_ALIAS_CRYPTO("sha3-384-generic");
MODULE_ALIAS_CRYPTO("sha3-512");
MODULE_ALIAS_CRYPTO("sha3-512-generic");
+MODULE_ALIAS_CRYPTO("shake128");
+MODULE_ALIAS_CRYPTO("shake128-generic");
+MODULE_ALIAS_CRYPTO("shake256");
+MODULE_ALIAS_CRYPTO("shake256-generic");
diff --git a/include/crypto/algapi.h b/include/crypto/algapi.h
index 6e07bbc04089..be30f895fe7b 100644
--- a/include/crypto/algapi.h
+++ b/include/crypto/algapi.h
@@ -20,7 +20,7 @@
* static buffers that are big enough for any combination of
* algs and architectures. Ciphers have a lower maximum size.
*/
-#define MAX_ALGAPI_BLOCKSIZE 160
+#define MAX_ALGAPI_BLOCKSIZE 168 /* shake128 */
#define MAX_ALGAPI_ALIGNMASK 127
#define MAX_CIPHER_BLOCKSIZE 16
#define MAX_CIPHER_ALIGNMASK 15
diff --git a/include/crypto/hash.h b/include/crypto/hash.h
index 9072652e8e60..5d69c2d69b96 100644
--- a/include/crypto/hash.h
+++ b/include/crypto/hash.h
@@ -166,10 +166,9 @@ struct shash_desc {
#define HASH_MAX_DIGESTSIZE 64
/*
- * Worst case is hmac(sha3-224-generic). Its context is a nested 'shash_desc'
- * containing a 'struct sha3_state'.
+ * Worst case is shake128
*/
-#define HASH_MAX_DESCSIZE (sizeof(struct shash_desc) + 360)
+#define HASH_MAX_DESCSIZE (sizeof(struct shash_desc) + 384)
#define SHASH_DESC_ON_STACK(shash, ctx) \
char __##shash##_desc[sizeof(struct shash_desc) + HASH_MAX_DESCSIZE] \
diff --git a/include/crypto/sha3.h b/include/crypto/sha3.h
index 080f60c2e6b1..d99d2bfbd27f 100644
--- a/include/crypto/sha3.h
+++ b/include/crypto/sha3.h
@@ -31,4 +31,23 @@ int crypto_sha3_update(struct shash_desc *desc, const u8 *data,
unsigned int len);
int crypto_sha3_final(struct shash_desc *desc, u8 *out);
+
+#define SHAKE128_DIGEST_SIZE (128 / 8)
+#define SHAKE128_BLOCK_SIZE (200 - 2 * SHAKE128_DIGEST_SIZE)
+
+#define SHAKE256_DIGEST_SIZE (256 / 8)
+#define SHAKE256_BLOCK_SIZE (200 - 2 * SHAKE256_DIGEST_SIZE)
+
+struct shake_state {
+ u64 st[25];
+ unsigned int rsiz;
+ unsigned int rsizw;
+
+ unsigned int partial;
+ u8 buf[SHAKE128_BLOCK_SIZE];
+ bool finalized;
+ bool permute;
+ unsigned int ridx;
+};
+
#endif
diff --git a/include/uapi/linux/hash_info.h b/include/uapi/linux/hash_info.h
index 0af23ec196d8..97af74326d31 100644
--- a/include/uapi/linux/hash_info.h
+++ b/include/uapi/linux/hash_info.h
@@ -38,6 +38,8 @@ enum hash_algo {
HASH_ALGO_SHA3_256,
HASH_ALGO_SHA3_384,
HASH_ALGO_SHA3_512,
+ HASH_ALGO_SHAKE128,
+ HASH_ALGO_SHAKE256,
HASH_ALGO__LAST
};
--
2.49.0
Powered by blists - more mailing lists