[<prev] [next>] [thread-next>] [day] [month] [year] [list]
Message-Id: <20240213101356.460376-1-arnd@kernel.org>
Date: Tue, 13 Feb 2024 11:13:44 +0100
From: Arnd Bergmann <arnd@...nel.org>
To: Herbert Xu <herbert@...dor.apana.org.au>,
"David S. Miller" <davem@...emloft.net>,
Russell King <linux@...linux.org.uk>,
Ard Biesheuvel <ardb@...nel.org>
Cc: Arnd Bergmann <arnd@...db.de>,
Nathan Chancellor <nathan@...nel.org>,
Nick Desaulniers <ndesaulniers@...gle.com>,
Bill Wendling <morbo@...gle.com>,
Justin Stitt <justinstitt@...gle.com>,
Jussi Kivilinna <jussi.kivilinna@....fi>,
linux-crypto@...r.kernel.org,
linux-arm-kernel@...ts.infradead.org,
linux-kernel@...r.kernel.org,
llvm@...ts.linux.dev
Subject: [PATCH] ARM: crypto: fix function cast warnings
From: Arnd Bergmann <arnd@...db.de>
clang-16 warns about casting between incompatible function types:
arch/arm/crypto/sha256_glue.c:37:5: error: cast from 'void (*)(u32 *, const void *, unsigned int)' (aka 'void (*)(unsigned int *, const void *, unsigned int)') to 'sha256_block_fn *' (aka 'void (*)(struct sha256_state *, const unsigned char *, int)') converts to incompatible function type [-Werror,-Wcast-function-type-strict]
37 | (sha256_block_fn *)sha256_block_data_order);
| ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
arch/arm/crypto/sha512-glue.c:34:3: error: cast from 'void (*)(u64 *, const u8 *, int)' (aka 'void (*)(unsigned long long *, const unsigned char *, int)') to 'sha512_block_fn *' (aka 'void (*)(struct sha512_state *, const unsigned char *, int)') converts to incompatible function type [-Werror,-Wcast-function-type-strict]
34 | (sha512_block_fn *)sha512_block_data_order);
| ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Rework the sha256/sha512 code to instead go through a trivial helper
function to preserve the calling conventions.
Fixes: c80ae7ca3726 ("crypto: arm/sha512 - accelerated SHA-512 using ARM generic ASM and NEON")
Fixes: b59e2ae3690c ("crypto: arm/sha256 - move SHA-224/256 ASM/NEON implementation to base layer")
Signed-off-by: Arnd Bergmann <arnd@...db.de>
---
arch/arm/crypto/sha256_glue.c | 18 ++++++++++--------
arch/arm/crypto/sha512-glue.c | 11 ++++++++---
2 files changed, 18 insertions(+), 11 deletions(-)
diff --git a/arch/arm/crypto/sha256_glue.c b/arch/arm/crypto/sha256_glue.c
index 433ee4ddce6c..d80448d96ab3 100644
--- a/arch/arm/crypto/sha256_glue.c
+++ b/arch/arm/crypto/sha256_glue.c
@@ -27,29 +27,31 @@
asmlinkage void sha256_block_data_order(u32 *digest, const void *data,
unsigned int num_blks);
-int crypto_sha256_arm_update(struct shash_desc *desc, const u8 *data,
- unsigned int len)
+static void sha256_block_data_order_wrapper(struct sha256_state *sst, u8 const *src, int blocks)
{
/* make sure casting to sha256_block_fn() is safe */
BUILD_BUG_ON(offsetof(struct sha256_state, state) != 0);
- return sha256_base_do_update(desc, data, len,
- (sha256_block_fn *)sha256_block_data_order);
+ return sha256_block_data_order((u32 *)sst, src, blocks);
+}
+
+int crypto_sha256_arm_update(struct shash_desc *desc, const u8 *data,
+ unsigned int len)
+{
+ return sha256_base_do_update(desc, data, len, sha256_block_data_order_wrapper);
}
EXPORT_SYMBOL(crypto_sha256_arm_update);
static int crypto_sha256_arm_final(struct shash_desc *desc, u8 *out)
{
- sha256_base_do_finalize(desc,
- (sha256_block_fn *)sha256_block_data_order);
+ sha256_base_do_finalize(desc, sha256_block_data_order_wrapper);
return sha256_base_finish(desc, out);
}
int crypto_sha256_arm_finup(struct shash_desc *desc, const u8 *data,
unsigned int len, u8 *out)
{
- sha256_base_do_update(desc, data, len,
- (sha256_block_fn *)sha256_block_data_order);
+ sha256_base_do_update(desc, data, len, sha256_block_data_order_wrapper);
return crypto_sha256_arm_final(desc, out);
}
EXPORT_SYMBOL(crypto_sha256_arm_finup);
diff --git a/arch/arm/crypto/sha512-glue.c b/arch/arm/crypto/sha512-glue.c
index 0635a65aa488..1b2c9c0c8a5f 100644
--- a/arch/arm/crypto/sha512-glue.c
+++ b/arch/arm/crypto/sha512-glue.c
@@ -27,17 +27,22 @@ MODULE_ALIAS_CRYPTO("sha512-arm");
asmlinkage void sha512_block_data_order(u64 *state, u8 const *src, int blocks);
+static void sha512_block_data_order_wrapper(struct sha512_state *sst, u8 const *src, int blocks)
+{
+ return sha512_block_data_order((u64 *)sst, src, blocks);
+}
+
int sha512_arm_update(struct shash_desc *desc, const u8 *data,
unsigned int len)
{
return sha512_base_do_update(desc, data, len,
- (sha512_block_fn *)sha512_block_data_order);
+ sha512_block_data_order_wrapper);
}
static int sha512_arm_final(struct shash_desc *desc, u8 *out)
{
sha512_base_do_finalize(desc,
- (sha512_block_fn *)sha512_block_data_order);
+ sha512_block_data_order_wrapper);
return sha512_base_finish(desc, out);
}
@@ -45,7 +50,7 @@ int sha512_arm_finup(struct shash_desc *desc, const u8 *data,
unsigned int len, u8 *out)
{
sha512_base_do_update(desc, data, len,
- (sha512_block_fn *)sha512_block_data_order);
+ sha512_block_data_order_wrapper);
return sha512_arm_final(desc, out);
}
--
2.39.2
Powered by blists - more mailing lists