[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-ID: <08b21e5705b45fe799463d4fa708c7b73fa24625.1544471415.git.davejwatson@fb.com>
Date: Mon, 10 Dec 2018 19:57:12 +0000
From: Dave Watson <davejwatson@...com>
To: Herbert Xu <herbert@...dor.apana.org.au>,
Junaid Shahid <junaids@...gle.com>,
Steffen Klassert <steffen.klassert@...unet.com>,
"linux-crypto@...r.kernel.org" <linux-crypto@...r.kernel.org>
CC: Doron Roberts-Kedes <doronrk@...com>,
Sabrina Dubroca <sd@...asysnail.net>,
"linux-kernel@...r.kernel.org" <linux-kernel@...r.kernel.org>,
Stephan Mueller <smueller@...onox.de>
Subject: [PATCH 03/12] x86/crypto: aesni: Macro-ify func save/restore
Macro-ify function save and restore. These will be used in new functions
added for scatter/gather update operations.
Signed-off-by: Dave Watson <davejwatson@...com>
---
arch/x86/crypto/aesni-intel_avx-x86_64.S | 94 +++++++++---------------
1 file changed, 36 insertions(+), 58 deletions(-)
diff --git a/arch/x86/crypto/aesni-intel_avx-x86_64.S b/arch/x86/crypto/aesni-intel_avx-x86_64.S
index 284f1b8b88fc..dd895f69399b 100644
--- a/arch/x86/crypto/aesni-intel_avx-x86_64.S
+++ b/arch/x86/crypto/aesni-intel_avx-x86_64.S
@@ -247,6 +247,30 @@ VARIABLE_OFFSET = 16*8
# Utility Macros
################################
+.macro FUNC_SAVE
+ #the number of pushes must equal STACK_OFFSET
+ push %r12
+ push %r13
+ push %r14
+ push %r15
+
+ mov %rsp, %r14
+
+
+
+ sub $VARIABLE_OFFSET, %rsp
+ and $~63, %rsp # align rsp to 64 bytes
+.endm
+
+.macro FUNC_RESTORE
+ mov %r14, %rsp
+
+ pop %r15
+ pop %r14
+ pop %r13
+ pop %r12
+.endm
+
# Encryption of a single block
.macro ENCRYPT_SINGLE_BLOCK XMM0
vpxor (arg1), \XMM0, \XMM0
@@ -264,22 +288,6 @@ VARIABLE_OFFSET = 16*8
# clobbering all xmm registers
# clobbering r10, r11, r12, r13, r14, r15
.macro GCM_ENC_DEC INITIAL_BLOCKS GHASH_8_ENCRYPT_8_PARALLEL GHASH_LAST_8 GHASH_MUL ENC_DEC
-
- #the number of pushes must equal STACK_OFFSET
- push %r12
- push %r13
- push %r14
- push %r15
-
- mov %rsp, %r14
-
-
-
-
- sub $VARIABLE_OFFSET, %rsp
- and $~63, %rsp # align rsp to 64 bytes
-
-
vmovdqu HashKey(arg2), %xmm13 # xmm13 = HashKey
mov arg5, %r13 # save the number of bytes of plaintext/ciphertext
@@ -566,12 +574,6 @@ _T_16\@:
vmovdqu %xmm9, (%r10)
_return_T_done\@:
- mov %r14, %rsp
-
- pop %r15
- pop %r14
- pop %r13
- pop %r12
.endm
#ifdef CONFIG_AS_AVX
@@ -1511,18 +1513,7 @@ _initial_blocks_done\@:
# u8 *hash_subkey)# /* H, the Hash sub key input. Data starts on a 16-byte boundary. */
#############################################################
ENTRY(aesni_gcm_precomp_avx_gen2)
- #the number of pushes must equal STACK_OFFSET
- push %r12
- push %r13
- push %r14
- push %r15
-
- mov %rsp, %r14
-
-
-
- sub $VARIABLE_OFFSET, %rsp
- and $~63, %rsp # align rsp to 64 bytes
+ FUNC_SAVE
vmovdqu (arg3), %xmm6 # xmm6 = HashKey
@@ -1546,12 +1537,7 @@ ENTRY(aesni_gcm_precomp_avx_gen2)
PRECOMPUTE_AVX %xmm6, %xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5
- mov %r14, %rsp
-
- pop %r15
- pop %r14
- pop %r13
- pop %r12
+ FUNC_RESTORE
ret
ENDPROC(aesni_gcm_precomp_avx_gen2)
@@ -1573,7 +1559,9 @@ ENDPROC(aesni_gcm_precomp_avx_gen2)
# Valid values are 16 (most likely), 12 or 8. */
###############################################################################
ENTRY(aesni_gcm_enc_avx_gen2)
+ FUNC_SAVE
GCM_ENC_DEC INITIAL_BLOCKS_AVX GHASH_8_ENCRYPT_8_PARALLEL_AVX GHASH_LAST_8_AVX GHASH_MUL_AVX ENC
+ FUNC_RESTORE
ret
ENDPROC(aesni_gcm_enc_avx_gen2)
@@ -1595,7 +1583,9 @@ ENDPROC(aesni_gcm_enc_avx_gen2)
# Valid values are 16 (most likely), 12 or 8. */
###############################################################################
ENTRY(aesni_gcm_dec_avx_gen2)
+ FUNC_SAVE
GCM_ENC_DEC INITIAL_BLOCKS_AVX GHASH_8_ENCRYPT_8_PARALLEL_AVX GHASH_LAST_8_AVX GHASH_MUL_AVX DEC
+ FUNC_RESTORE
ret
ENDPROC(aesni_gcm_dec_avx_gen2)
#endif /* CONFIG_AS_AVX */
@@ -2525,18 +2515,7 @@ _initial_blocks_done\@:
# Data starts on a 16-byte boundary. */
#############################################################
ENTRY(aesni_gcm_precomp_avx_gen4)
- #the number of pushes must equal STACK_OFFSET
- push %r12
- push %r13
- push %r14
- push %r15
-
- mov %rsp, %r14
-
-
-
- sub $VARIABLE_OFFSET, %rsp
- and $~63, %rsp # align rsp to 64 bytes
+ FUNC_SAVE
vmovdqu (arg3), %xmm6 # xmm6 = HashKey
@@ -2560,12 +2539,7 @@ ENTRY(aesni_gcm_precomp_avx_gen4)
PRECOMPUTE_AVX2 %xmm6, %xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5
- mov %r14, %rsp
-
- pop %r15
- pop %r14
- pop %r13
- pop %r12
+ FUNC_RESTORE
ret
ENDPROC(aesni_gcm_precomp_avx_gen4)
@@ -2588,7 +2562,9 @@ ENDPROC(aesni_gcm_precomp_avx_gen4)
# Valid values are 16 (most likely), 12 or 8. */
###############################################################################
ENTRY(aesni_gcm_enc_avx_gen4)
+ FUNC_SAVE
GCM_ENC_DEC INITIAL_BLOCKS_AVX2 GHASH_8_ENCRYPT_8_PARALLEL_AVX2 GHASH_LAST_8_AVX2 GHASH_MUL_AVX2 ENC
+ FUNC_RESTORE
ret
ENDPROC(aesni_gcm_enc_avx_gen4)
@@ -2610,7 +2586,9 @@ ENDPROC(aesni_gcm_enc_avx_gen4)
# Valid values are 16 (most likely), 12 or 8. */
###############################################################################
ENTRY(aesni_gcm_dec_avx_gen4)
+ FUNC_SAVE
GCM_ENC_DEC INITIAL_BLOCKS_AVX2 GHASH_8_ENCRYPT_8_PARALLEL_AVX2 GHASH_LAST_8_AVX2 GHASH_MUL_AVX2 DEC
+ FUNC_RESTORE
ret
ENDPROC(aesni_gcm_dec_avx_gen4)
--
2.17.1
Powered by blists - more mailing lists