[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-ID: <20250402002420.89233-9-ebiggers@kernel.org>
Date: Tue, 1 Apr 2025 17:24:08 -0700
From: Eric Biggers <ebiggers@...nel.org>
To: linux-crypto@...r.kernel.org
Cc: linux-kernel@...r.kernel.org,
x86@...nel.org
Subject: [PATCH v2 8/9] crypto: x86/sm4 - stop using the SIMD helper
From: Eric Biggers <ebiggers@...gle.com>
Stop wrapping skcipher and aead algorithms with the crypto SIMD helper
(crypto/simd.c). The only purpose of doing so was to work around x86
not always supporting kernel-mode FPU in softirqs. Specifically, if a
hardirq interrupted a task context kernel-mode FPU section and then a
softirqs were run at the end of that hardirq, those softirqs could not
use kernel-mode FPU. This has now been fixed. In combination with the
fact that the skcipher and aead APIs only support task and softirq
contexts, these can now just use kernel-mode FPU unconditionally on x86.
This simplifies the code and improves performance.
Signed-off-by: Eric Biggers <ebiggers@...gle.com>
---
arch/x86/crypto/Kconfig | 2 --
arch/x86/crypto/sm4_aesni_avx2_glue.c | 31 ++++++++++-----------------
arch/x86/crypto/sm4_aesni_avx_glue.c | 31 ++++++++++-----------------
3 files changed, 22 insertions(+), 42 deletions(-)
diff --git a/arch/x86/crypto/Kconfig b/arch/x86/crypto/Kconfig
index 51c74a496126d..afc1a05e663dd 100644
--- a/arch/x86/crypto/Kconfig
+++ b/arch/x86/crypto/Kconfig
@@ -190,11 +190,10 @@ config CRYPTO_SERPENT_AVX2_X86_64
config CRYPTO_SM4_AESNI_AVX_X86_64
tristate "Ciphers: SM4 with modes: ECB, CBC, CTR (AES-NI/AVX)"
depends on X86 && 64BIT
select CRYPTO_SKCIPHER
- select CRYPTO_SIMD
select CRYPTO_ALGAPI
select CRYPTO_SM4
help
Length-preserving ciphers: SM4 cipher algorithms
(OSCCA GB/T 32907-2016) with ECB, CBC, and CTR modes
@@ -211,11 +210,10 @@ config CRYPTO_SM4_AESNI_AVX_X86_64
config CRYPTO_SM4_AESNI_AVX2_X86_64
tristate "Ciphers: SM4 with modes: ECB, CBC, CTR (AES-NI/AVX2)"
depends on X86 && 64BIT
select CRYPTO_SKCIPHER
- select CRYPTO_SIMD
select CRYPTO_ALGAPI
select CRYPTO_SM4
select CRYPTO_SM4_AESNI_AVX_X86_64
help
Length-preserving ciphers: SM4 cipher algorithms
diff --git a/arch/x86/crypto/sm4_aesni_avx2_glue.c b/arch/x86/crypto/sm4_aesni_avx2_glue.c
index 1148fd4cd57f8..fec0ab7a63dd4 100644
--- a/arch/x86/crypto/sm4_aesni_avx2_glue.c
+++ b/arch/x86/crypto/sm4_aesni_avx2_glue.c
@@ -6,15 +6,14 @@
*
* Copyright (c) 2021, Alibaba Group.
* Copyright (c) 2021 Tianjia Zhang <tianjia.zhang@...ux.alibaba.com>
*/
+#include <asm/fpu/api.h>
#include <linux/module.h>
#include <linux/crypto.h>
#include <linux/kernel.h>
-#include <asm/simd.h>
-#include <crypto/internal/simd.h>
#include <crypto/internal/skcipher.h>
#include <crypto/sm4.h>
#include "sm4-avx.h"
#define SM4_CRYPT16_BLOCK_SIZE (SM4_BLOCK_SIZE * 16)
@@ -46,14 +45,13 @@ static int ctr_crypt(struct skcipher_request *req)
}
static struct skcipher_alg sm4_aesni_avx2_skciphers[] = {
{
.base = {
- .cra_name = "__ecb(sm4)",
- .cra_driver_name = "__ecb-sm4-aesni-avx2",
+ .cra_name = "ecb(sm4)",
+ .cra_driver_name = "ecb-sm4-aesni-avx2",
.cra_priority = 500,
- .cra_flags = CRYPTO_ALG_INTERNAL,
.cra_blocksize = SM4_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct sm4_ctx),
.cra_module = THIS_MODULE,
},
.min_keysize = SM4_KEY_SIZE,
@@ -62,14 +60,13 @@ static struct skcipher_alg sm4_aesni_avx2_skciphers[] = {
.setkey = sm4_skcipher_setkey,
.encrypt = sm4_avx_ecb_encrypt,
.decrypt = sm4_avx_ecb_decrypt,
}, {
.base = {
- .cra_name = "__cbc(sm4)",
- .cra_driver_name = "__cbc-sm4-aesni-avx2",
+ .cra_name = "cbc(sm4)",
+ .cra_driver_name = "cbc-sm4-aesni-avx2",
.cra_priority = 500,
- .cra_flags = CRYPTO_ALG_INTERNAL,
.cra_blocksize = SM4_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct sm4_ctx),
.cra_module = THIS_MODULE,
},
.min_keysize = SM4_KEY_SIZE,
@@ -79,14 +76,13 @@ static struct skcipher_alg sm4_aesni_avx2_skciphers[] = {
.setkey = sm4_skcipher_setkey,
.encrypt = sm4_cbc_encrypt,
.decrypt = cbc_decrypt,
}, {
.base = {
- .cra_name = "__ctr(sm4)",
- .cra_driver_name = "__ctr-sm4-aesni-avx2",
+ .cra_name = "ctr(sm4)",
+ .cra_driver_name = "ctr-sm4-aesni-avx2",
.cra_priority = 500,
- .cra_flags = CRYPTO_ALG_INTERNAL,
.cra_blocksize = 1,
.cra_ctxsize = sizeof(struct sm4_ctx),
.cra_module = THIS_MODULE,
},
.min_keysize = SM4_KEY_SIZE,
@@ -98,13 +94,10 @@ static struct skcipher_alg sm4_aesni_avx2_skciphers[] = {
.encrypt = ctr_crypt,
.decrypt = ctr_crypt,
}
};
-static struct simd_skcipher_alg *
-simd_sm4_aesni_avx2_skciphers[ARRAY_SIZE(sm4_aesni_avx2_skciphers)];
-
static int __init sm4_init(void)
{
const char *feature_name;
if (!boot_cpu_has(X86_FEATURE_AVX) ||
@@ -119,20 +112,18 @@ static int __init sm4_init(void)
&feature_name)) {
pr_info("CPU feature '%s' is not supported.\n", feature_name);
return -ENODEV;
}
- return simd_register_skciphers_compat(sm4_aesni_avx2_skciphers,
- ARRAY_SIZE(sm4_aesni_avx2_skciphers),
- simd_sm4_aesni_avx2_skciphers);
+ return crypto_register_skciphers(sm4_aesni_avx2_skciphers,
+ ARRAY_SIZE(sm4_aesni_avx2_skciphers));
}
static void __exit sm4_exit(void)
{
- simd_unregister_skciphers(sm4_aesni_avx2_skciphers,
- ARRAY_SIZE(sm4_aesni_avx2_skciphers),
- simd_sm4_aesni_avx2_skciphers);
+ crypto_unregister_skciphers(sm4_aesni_avx2_skciphers,
+ ARRAY_SIZE(sm4_aesni_avx2_skciphers));
}
module_init(sm4_init);
module_exit(sm4_exit);
diff --git a/arch/x86/crypto/sm4_aesni_avx_glue.c b/arch/x86/crypto/sm4_aesni_avx_glue.c
index 85b4ca78b47b5..72867fc49ce8e 100644
--- a/arch/x86/crypto/sm4_aesni_avx_glue.c
+++ b/arch/x86/crypto/sm4_aesni_avx_glue.c
@@ -6,15 +6,14 @@
*
* Copyright (c) 2021, Alibaba Group.
* Copyright (c) 2021 Tianjia Zhang <tianjia.zhang@...ux.alibaba.com>
*/
+#include <asm/fpu/api.h>
#include <linux/module.h>
#include <linux/crypto.h>
#include <linux/kernel.h>
-#include <asm/simd.h>
-#include <crypto/internal/simd.h>
#include <crypto/internal/skcipher.h>
#include <crypto/sm4.h>
#include "sm4-avx.h"
#define SM4_CRYPT8_BLOCK_SIZE (SM4_BLOCK_SIZE * 8)
@@ -261,14 +260,13 @@ static int ctr_crypt(struct skcipher_request *req)
}
static struct skcipher_alg sm4_aesni_avx_skciphers[] = {
{
.base = {
- .cra_name = "__ecb(sm4)",
- .cra_driver_name = "__ecb-sm4-aesni-avx",
+ .cra_name = "ecb(sm4)",
+ .cra_driver_name = "ecb-sm4-aesni-avx",
.cra_priority = 400,
- .cra_flags = CRYPTO_ALG_INTERNAL,
.cra_blocksize = SM4_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct sm4_ctx),
.cra_module = THIS_MODULE,
},
.min_keysize = SM4_KEY_SIZE,
@@ -277,14 +275,13 @@ static struct skcipher_alg sm4_aesni_avx_skciphers[] = {
.setkey = sm4_skcipher_setkey,
.encrypt = sm4_avx_ecb_encrypt,
.decrypt = sm4_avx_ecb_decrypt,
}, {
.base = {
- .cra_name = "__cbc(sm4)",
- .cra_driver_name = "__cbc-sm4-aesni-avx",
+ .cra_name = "cbc(sm4)",
+ .cra_driver_name = "cbc-sm4-aesni-avx",
.cra_priority = 400,
- .cra_flags = CRYPTO_ALG_INTERNAL,
.cra_blocksize = SM4_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct sm4_ctx),
.cra_module = THIS_MODULE,
},
.min_keysize = SM4_KEY_SIZE,
@@ -294,14 +291,13 @@ static struct skcipher_alg sm4_aesni_avx_skciphers[] = {
.setkey = sm4_skcipher_setkey,
.encrypt = sm4_cbc_encrypt,
.decrypt = cbc_decrypt,
}, {
.base = {
- .cra_name = "__ctr(sm4)",
- .cra_driver_name = "__ctr-sm4-aesni-avx",
+ .cra_name = "ctr(sm4)",
+ .cra_driver_name = "ctr-sm4-aesni-avx",
.cra_priority = 400,
- .cra_flags = CRYPTO_ALG_INTERNAL,
.cra_blocksize = 1,
.cra_ctxsize = sizeof(struct sm4_ctx),
.cra_module = THIS_MODULE,
},
.min_keysize = SM4_KEY_SIZE,
@@ -313,13 +309,10 @@ static struct skcipher_alg sm4_aesni_avx_skciphers[] = {
.encrypt = ctr_crypt,
.decrypt = ctr_crypt,
}
};
-static struct simd_skcipher_alg *
-simd_sm4_aesni_avx_skciphers[ARRAY_SIZE(sm4_aesni_avx_skciphers)];
-
static int __init sm4_init(void)
{
const char *feature_name;
if (!boot_cpu_has(X86_FEATURE_AVX) ||
@@ -333,20 +326,18 @@ static int __init sm4_init(void)
&feature_name)) {
pr_info("CPU feature '%s' is not supported.\n", feature_name);
return -ENODEV;
}
- return simd_register_skciphers_compat(sm4_aesni_avx_skciphers,
- ARRAY_SIZE(sm4_aesni_avx_skciphers),
- simd_sm4_aesni_avx_skciphers);
+ return crypto_register_skciphers(sm4_aesni_avx_skciphers,
+ ARRAY_SIZE(sm4_aesni_avx_skciphers));
}
static void __exit sm4_exit(void)
{
- simd_unregister_skciphers(sm4_aesni_avx_skciphers,
- ARRAY_SIZE(sm4_aesni_avx_skciphers),
- simd_sm4_aesni_avx_skciphers);
+ crypto_unregister_skciphers(sm4_aesni_avx_skciphers,
+ ARRAY_SIZE(sm4_aesni_avx_skciphers));
}
module_init(sm4_init);
module_exit(sm4_exit);
--
2.49.0
Powered by blists - more mailing lists