[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-ID: <20241025191454.72616-13-ebiggers@kernel.org>
Date: Fri, 25 Oct 2024 12:14:48 -0700
From: Eric Biggers <ebiggers@...nel.org>
To: linux-kernel@...r.kernel.org
Cc: linux-arch@...r.kernel.org,
linux-arm-kernel@...ts.infradead.org,
linux-crypto@...r.kernel.org,
linux-ext4@...r.kernel.org,
linux-f2fs-devel@...ts.sourceforge.net,
linux-mips@...r.kernel.org,
linux-riscv@...ts.infradead.org,
linux-s390@...r.kernel.org,
linux-scsi@...r.kernel.org,
linuxppc-dev@...ts.ozlabs.org,
loongarch@...ts.linux.dev,
sparclinux@...r.kernel.org,
x86@...nel.org,
Ard Biesheuvel <ardb@...nel.org>
Subject: [PATCH v2 12/18] x86/crc32: update prototype for crc32_pclmul_le_16()
From: Eric Biggers <ebiggers@...gle.com>
- Change the len parameter from unsigned int to size_t, so that the
library function which takes a size_t can safely use this code.
- Move the crc parameter to the front, as this is the usual convention.
Reviewed-by: Ard Biesheuvel <ardb@...nel.org>
Signed-off-by: Eric Biggers <ebiggers@...gle.com>
---
arch/x86/crypto/crc32-pclmul_asm.S | 19 +++++++++----------
arch/x86/crypto/crc32-pclmul_glue.c | 4 ++--
2 files changed, 11 insertions(+), 12 deletions(-)
diff --git a/arch/x86/crypto/crc32-pclmul_asm.S b/arch/x86/crypto/crc32-pclmul_asm.S
index 5d31137e2c7d..f9637789cac1 100644
--- a/arch/x86/crypto/crc32-pclmul_asm.S
+++ b/arch/x86/crypto/crc32-pclmul_asm.S
@@ -56,30 +56,29 @@
.octa 0x00000001F701164100000001DB710641
#define CONSTANT %xmm0
#ifdef __x86_64__
-#define BUF %rdi
-#define LEN %rsi
-#define CRC %edx
+#define CRC %edi
+#define BUF %rsi
+#define LEN %rdx
#else
-#define BUF %eax
-#define LEN %edx
-#define CRC %ecx
+#define CRC %eax
+#define BUF %edx
+#define LEN %ecx
#endif
.text
/**
* Calculate crc32
- * BUF - buffer (16 bytes aligned)
- * LEN - sizeof buffer (16 bytes aligned), LEN should be grater than 63
* CRC - initial crc32
+ * BUF - buffer (16 bytes aligned)
+ * LEN - sizeof buffer (16 bytes aligned), LEN should be greater than 63
* return %eax crc32
- * uint crc32_pclmul_le_16(unsigned char const *buffer,
- * size_t len, uint crc32)
+ * u32 crc32_pclmul_le_16(u32 crc, const u8 *buffer, size_t len);
*/
SYM_FUNC_START(crc32_pclmul_le_16) /* buffer and buffer size are 16 bytes aligned */
movdqa (BUF), %xmm1
movdqa 0x10(BUF), %xmm2
diff --git a/arch/x86/crypto/crc32-pclmul_glue.c b/arch/x86/crypto/crc32-pclmul_glue.c
index 9f5e342b9845..9d14eac51c5b 100644
--- a/arch/x86/crypto/crc32-pclmul_glue.c
+++ b/arch/x86/crypto/crc32-pclmul_glue.c
@@ -44,11 +44,11 @@
#define PCLMUL_MIN_LEN 64L /* minimum size of buffer
* for crc32_pclmul_le_16 */
#define SCALE_F 16L /* size of xmm register */
#define SCALE_F_MASK (SCALE_F - 1)
-u32 crc32_pclmul_le_16(unsigned char const *buffer, size_t len, u32 crc32);
+u32 crc32_pclmul_le_16(u32 crc, const u8 *buffer, size_t len);
static u32 __attribute__((pure))
crc32_pclmul_le(u32 crc, unsigned char const *p, size_t len)
{
unsigned int iquotient;
@@ -69,11 +69,11 @@ static u32 __attribute__((pure))
}
iquotient = len & (~SCALE_F_MASK);
iremainder = len & SCALE_F_MASK;
kernel_fpu_begin();
- crc = crc32_pclmul_le_16(p, iquotient, crc);
+ crc = crc32_pclmul_le_16(crc, p, iquotient);
kernel_fpu_end();
if (iremainder)
crc = crc32_le(crc, p + iquotient, iremainder);
--
2.47.0
Powered by blists - more mailing lists