[<prev] [next>] [day] [month] [year] [list]
Message-ID: <98B4759B69BED4D1+20240912033403.1944625-1-wangyuli@uniontech.com>
Date: Thu, 12 Sep 2024 11:34:03 +0800
From: WangYuli <wangyuli@...ontech.com>
To: herbert@...dor.apana.org.au,
davem@...emloft.net,
tsbogend@...ha.franken.de
Cc: linux-crypto@...r.kernel.org,
linux-mips@...r.kernel.org,
linux-kernel@...r.kernel.org,
marcin.nowakowski@...s.com,
jhogan@...nel.org,
ralf@...ux-mips.org,
linux-mips@...ux-mips.org,
ebiggers@...gle.com,
guanwentao@...ontech.com,
zhanjun@...ontech.com,
WangYuli <wangyuli@...ontech.com>,
"Maciej W . Rozycki" <macro@...am.me.uk>
Subject: [RESEND. PATCH v3] MIPS: crypto: Clean up useless assignment operations
When entering the "len & sizeof(u32)" branch, len must be less than 8.
So after one operation, len must be less than 4.
At this time, "len -= sizeof(u32)" is not necessary for 64-bit CPUs.
After that, replace `while' loops with equivalent `for' to make the
code structure a little bit better by the way.
Suggested-by: Maciej W. Rozycki <macro@...am.me.uk>
Link: https://lore.kernel.org/all/alpine.DEB.2.21.2406281713040.43454@angie.orcam.me.uk/
Suggested-by: Herbert Xu <herbert@...dor.apana.org.au>
Link: https://lore.kernel.org/all/ZtqZpzMH_qMQqzyc@gondor.apana.org.au/
Signed-off-by: Guan Wentao <guanwentao@...ontech.com>
Signed-off-by: WangYuli <wangyuli@...ontech.com>
---
arch/mips/crypto/crc32-mips.c | 70 ++++++++++++++++++-----------------
1 file changed, 37 insertions(+), 33 deletions(-)
diff --git a/arch/mips/crypto/crc32-mips.c b/arch/mips/crypto/crc32-mips.c
index ec6d58008f8e..2a59b85f88aa 100644
--- a/arch/mips/crypto/crc32-mips.c
+++ b/arch/mips/crypto/crc32-mips.c
@@ -77,24 +77,26 @@ static u32 crc32_mips_le_hw(u32 crc_, const u8 *p, unsigned int len)
{
u32 crc = crc_;
-#ifdef CONFIG_64BIT
- while (len >= sizeof(u64)) {
- u64 value = get_unaligned_le64(p);
-
- CRC32(crc, value, d);
- p += sizeof(u64);
- len -= sizeof(u64);
- }
-
- if (len & sizeof(u32)) {
-#else /* !CONFIG_64BIT */
- while (len >= sizeof(u32)) {
-#endif
- u32 value = get_unaligned_le32(p);
-
- CRC32(crc, value, w);
- p += sizeof(u32);
- len -= sizeof(u32);
+ if (IS_ENABLED(CONFIG_64BIT)) {
+ for (; len >= sizeof(u64); p += sizeof(u64), len -= sizeof(u64)) {
+ u64 value = get_unaligned_le64(p);
+
+ CRC32(crc, value, d);
+ }
+
+ if (len & sizeof(u32)) {
+ u32 value = get_unaligned_le32(p);
+
+ CRC32(crc, value, w);
+ p += sizeof(u32);
+ }
+ } else {
+ for (; len >= sizeof(u32); len -= sizeof(u32)) {
+ u32 value = get_unaligned_le32(p);
+
+ CRC32(crc, value, w);
+ p += sizeof(u32);
+ }
}
if (len & sizeof(u16)) {
@@ -117,24 +119,26 @@ static u32 crc32c_mips_le_hw(u32 crc_, const u8 *p, unsigned int len)
{
u32 crc = crc_;
-#ifdef CONFIG_64BIT
- while (len >= sizeof(u64)) {
- u64 value = get_unaligned_le64(p);
+ if (IS_ENABLED(CONFIG_64BIT)) {
+ for (; len >= sizeof(u64); p += sizeof(u64), len -= sizeof(u64)) {
+ u64 value = get_unaligned_le64(p);
- CRC32C(crc, value, d);
- p += sizeof(u64);
- len -= sizeof(u64);
- }
+ CRC32(crc, value, d);
+ }
- if (len & sizeof(u32)) {
-#else /* !CONFIG_64BIT */
- while (len >= sizeof(u32)) {
-#endif
- u32 value = get_unaligned_le32(p);
+ if (len & sizeof(u32)) {
+ u32 value = get_unaligned_le32(p);
+
+ CRC32(crc, value, w);
+ p += sizeof(u32);
+ }
+ } else {
+ for (; len >= sizeof(u32); len -= sizeof(u32)) {
+ u32 value = get_unaligned_le32(p);
- CRC32C(crc, value, w);
- p += sizeof(u32);
- len -= sizeof(u32);
+ CRC32(crc, value, w);
+ p += sizeof(u32);
+ }
}
if (len & sizeof(u16)) {
--
2.43.4
Powered by blists - more mailing lists