[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-ID: <175577973263.1420.12918884895191770948.tip-bot2@tip-bot2>
Date: Thu, 21 Aug 2025 12:35:31 -0000
From: "tip-bot2 for Uros Bizjak" <tip-bot2@...utronix.de>
To: linux-tip-commits@...r.kernel.org
Cc: Uros Bizjak <ubizjak@...il.com>, "Borislav Petkov (AMD)" <bp@...en8.de>,
Herbert Xu <herbert@...dor.apana.org.au>, x86@...nel.org,
linux-kernel@...r.kernel.org
Subject: [tip: x86/cleanups] crypto: X86 - Remove CONFIG_AS_VAES
The following commit has been merged into the x86/cleanups branch of tip:
Commit-ID: 4593311290006793a38a9cbd91d4a65b63cd7b76
Gitweb: https://git.kernel.org/tip/4593311290006793a38a9cbd91d4a65b63cd7b76
Author: Uros Bizjak <ubizjak@...il.com>
AuthorDate: Tue, 19 Aug 2025 10:57:50 +02:00
Committer: Borislav Petkov (AMD) <bp@...en8.de>
CommitterDate: Thu, 21 Aug 2025 12:23:28 +02:00
crypto: X86 - Remove CONFIG_AS_VAES
Current minimum required version of binutils is 2.30, which supports VAES
instruction mnemonics.
Remove check for assembler support of VAES instructions and all relevant macros
for conditional compilation.
No functional change intended.
Signed-off-by: Uros Bizjak <ubizjak@...il.com>
Signed-off-by: Borislav Petkov (AMD) <bp@...en8.de>
Acked-by: Herbert Xu <herbert@...dor.apana.org.au>
Link: https://lore.kernel.org/20250819085855.333380-2-ubizjak@gmail.com
---
arch/x86/Kconfig.assembler | 5 -----
arch/x86/crypto/Makefile | 2 +-
arch/x86/crypto/aes-ctr-avx-x86_64.S | 4 ++--
arch/x86/crypto/aes-xts-avx-x86_64.S | 4 ++--
arch/x86/crypto/aesni-intel_glue.c | 14 +++++++-------
5 files changed, 12 insertions(+), 17 deletions(-)
diff --git a/arch/x86/Kconfig.assembler b/arch/x86/Kconfig.assembler
index 6b95be0..8d18084 100644
--- a/arch/x86/Kconfig.assembler
+++ b/arch/x86/Kconfig.assembler
@@ -6,11 +6,6 @@ config AS_AVX512
help
Supported by binutils >= 2.25 and LLVM integrated assembler
-config AS_VAES
- def_bool $(as-instr,vaesenc %ymm0$(comma)%ymm1$(comma)%ymm2)
- help
- Supported by binutils >= 2.30 and LLVM integrated assembler
-
config AS_VPCLMULQDQ
def_bool $(as-instr,vpclmulqdq \$0x10$(comma)%ymm0$(comma)%ymm1$(comma)%ymm2)
help
diff --git a/arch/x86/crypto/Makefile b/arch/x86/crypto/Makefile
index d402963..50f1c04 100644
--- a/arch/x86/crypto/Makefile
+++ b/arch/x86/crypto/Makefile
@@ -47,7 +47,7 @@ aesni-intel-y := aesni-intel_asm.o aesni-intel_glue.o
aesni-intel-$(CONFIG_64BIT) += aes-ctr-avx-x86_64.o \
aes-gcm-aesni-x86_64.o \
aes-xts-avx-x86_64.o
-ifeq ($(CONFIG_AS_VAES)$(CONFIG_AS_VPCLMULQDQ),yy)
+ifeq ($(CONFIG_AS_VPCLMULQDQ),y)
aesni-intel-$(CONFIG_64BIT) += aes-gcm-avx10-x86_64.o
endif
diff --git a/arch/x86/crypto/aes-ctr-avx-x86_64.S b/arch/x86/crypto/aes-ctr-avx-x86_64.S
index bbbfd80..ec957b2 100644
--- a/arch/x86/crypto/aes-ctr-avx-x86_64.S
+++ b/arch/x86/crypto/aes-ctr-avx-x86_64.S
@@ -552,7 +552,7 @@ SYM_TYPED_FUNC_START(aes_xctr_crypt_aesni_avx)
_aes_ctr_crypt 1
SYM_FUNC_END(aes_xctr_crypt_aesni_avx)
-#if defined(CONFIG_AS_VAES) && defined(CONFIG_AS_VPCLMULQDQ)
+#if defined(CONFIG_AS_VPCLMULQDQ)
.set VL, 32
.set USE_AVX512, 0
SYM_TYPED_FUNC_START(aes_ctr64_crypt_vaes_avx2)
@@ -570,4 +570,4 @@ SYM_FUNC_END(aes_ctr64_crypt_vaes_avx512)
SYM_TYPED_FUNC_START(aes_xctr_crypt_vaes_avx512)
_aes_ctr_crypt 1
SYM_FUNC_END(aes_xctr_crypt_vaes_avx512)
-#endif // CONFIG_AS_VAES && CONFIG_AS_VPCLMULQDQ
+#endif // CONFIG_AS_VPCLMULQDQ
diff --git a/arch/x86/crypto/aes-xts-avx-x86_64.S b/arch/x86/crypto/aes-xts-avx-x86_64.S
index db79cdf..e44e568 100644
--- a/arch/x86/crypto/aes-xts-avx-x86_64.S
+++ b/arch/x86/crypto/aes-xts-avx-x86_64.S
@@ -886,7 +886,7 @@ SYM_TYPED_FUNC_START(aes_xts_decrypt_aesni_avx)
_aes_xts_crypt 0
SYM_FUNC_END(aes_xts_decrypt_aesni_avx)
-#if defined(CONFIG_AS_VAES) && defined(CONFIG_AS_VPCLMULQDQ)
+#if defined(CONFIG_AS_VPCLMULQDQ)
.set VL, 32
.set USE_AVX512, 0
SYM_TYPED_FUNC_START(aes_xts_encrypt_vaes_avx2)
@@ -904,4 +904,4 @@ SYM_FUNC_END(aes_xts_encrypt_vaes_avx512)
SYM_TYPED_FUNC_START(aes_xts_decrypt_vaes_avx512)
_aes_xts_crypt 0
SYM_FUNC_END(aes_xts_decrypt_vaes_avx512)
-#endif /* CONFIG_AS_VAES && CONFIG_AS_VPCLMULQDQ */
+#endif /* CONFIG_AS_VPCLMULQDQ */
diff --git a/arch/x86/crypto/aesni-intel_glue.c b/arch/x86/crypto/aesni-intel_glue.c
index 061b1ce..d5a2f5b 100644
--- a/arch/x86/crypto/aesni-intel_glue.c
+++ b/arch/x86/crypto/aesni-intel_glue.c
@@ -828,7 +828,7 @@ static struct skcipher_alg skcipher_algs_##suffix[] = {{ \
}}
DEFINE_AVX_SKCIPHER_ALGS(aesni_avx, "aesni-avx", 500);
-#if defined(CONFIG_AS_VAES) && defined(CONFIG_AS_VPCLMULQDQ)
+#if defined(CONFIG_AS_VPCLMULQDQ)
DEFINE_AVX_SKCIPHER_ALGS(vaes_avx2, "vaes-avx2", 600);
DEFINE_AVX_SKCIPHER_ALGS(vaes_avx512, "vaes-avx512", 800);
#endif
@@ -912,7 +912,7 @@ struct aes_gcm_key_avx10 {
#define FLAG_RFC4106 BIT(0)
#define FLAG_ENC BIT(1)
#define FLAG_AVX BIT(2)
-#if defined(CONFIG_AS_VAES) && defined(CONFIG_AS_VPCLMULQDQ)
+#if defined(CONFIG_AS_VPCLMULQDQ)
# define FLAG_AVX10_256 BIT(3)
# define FLAG_AVX10_512 BIT(4)
#else
@@ -1519,7 +1519,7 @@ DEFINE_GCM_ALGS(aesni_avx, FLAG_AVX,
"generic-gcm-aesni-avx", "rfc4106-gcm-aesni-avx",
AES_GCM_KEY_AESNI_SIZE, 500);
-#if defined(CONFIG_AS_VAES) && defined(CONFIG_AS_VPCLMULQDQ)
+#if defined(CONFIG_AS_VPCLMULQDQ)
/* aes_gcm_algs_vaes_avx10_256 */
DEFINE_GCM_ALGS(vaes_avx10_256, FLAG_AVX10_256,
"generic-gcm-vaes-avx10_256", "rfc4106-gcm-vaes-avx10_256",
@@ -1529,7 +1529,7 @@ DEFINE_GCM_ALGS(vaes_avx10_256, FLAG_AVX10_256,
DEFINE_GCM_ALGS(vaes_avx10_512, FLAG_AVX10_512,
"generic-gcm-vaes-avx10_512", "rfc4106-gcm-vaes-avx10_512",
AES_GCM_KEY_AVX10_SIZE, 800);
-#endif /* CONFIG_AS_VAES && CONFIG_AS_VPCLMULQDQ */
+#endif /* CONFIG_AS_VPCLMULQDQ */
static int __init register_avx_algs(void)
{
@@ -1551,7 +1551,7 @@ static int __init register_avx_algs(void)
* Similarly, the assembler support was added at about the same time.
* For simplicity, just always check for VAES and VPCLMULQDQ together.
*/
-#if defined(CONFIG_AS_VAES) && defined(CONFIG_AS_VPCLMULQDQ)
+#if defined(CONFIG_AS_VPCLMULQDQ)
if (!boot_cpu_has(X86_FEATURE_AVX2) ||
!boot_cpu_has(X86_FEATURE_VAES) ||
!boot_cpu_has(X86_FEATURE_VPCLMULQDQ) ||
@@ -1592,7 +1592,7 @@ static int __init register_avx_algs(void)
ARRAY_SIZE(aes_gcm_algs_vaes_avx10_512));
if (err)
return err;
-#endif /* CONFIG_AS_VAES && CONFIG_AS_VPCLMULQDQ */
+#endif /* CONFIG_AS_VPCLMULQDQ */
return 0;
}
@@ -1607,7 +1607,7 @@ static void unregister_avx_algs(void)
{
unregister_skciphers(skcipher_algs_aesni_avx);
unregister_aeads(aes_gcm_algs_aesni_avx);
-#if defined(CONFIG_AS_VAES) && defined(CONFIG_AS_VPCLMULQDQ)
+#if defined(CONFIG_AS_VPCLMULQDQ)
unregister_skciphers(skcipher_algs_vaes_avx2);
unregister_skciphers(skcipher_algs_vaes_avx512);
unregister_aeads(aes_gcm_algs_vaes_avx10_256);
Powered by blists - more mailing lists