lists.openwall.net   lists  /  announce  owl-users  owl-dev  john-users  john-dev  passwdqc-users  yescrypt  popa3d-users  /  oss-security  kernel-hardening  musl  sabotage  tlsify  passwords  /  crypt-dev  xvendor  /  Bugtraq  Full-Disclosure  linux-kernel  linux-netdev  linux-ext4  linux-hardening  linux-cve-announce  PHC 
Open Source and information security mailing list archives
 
Hash Suite: Windows password security audit tool. GUI, reports in PDF.
[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Date:	Fri, 17 Jul 2015 11:47:29 -0500
From:	Josh Poimboeuf <jpoimboe@...hat.com>
To:	Thomas Gleixner <tglx@...utronix.de>,
	Ingo Molnar <mingo@...hat.com>,
	"H. Peter Anvin" <hpa@...or.com>
Cc:	Michal Marek <mmarek@...e.cz>,
	Peter Zijlstra <peterz@...radead.org>,
	Andy Lutomirski <luto@...nel.org>,
	Borislav Petkov <bp@...en8.de>,
	Linus Torvalds <torvalds@...ux-foundation.org>,
	Andi Kleen <andi@...stfloor.org>,
	Pedro Alves <palves@...hat.com>, x86@...nel.org,
	live-patching@...r.kernel.org, linux-kernel@...r.kernel.org
Subject: [RFC PATCH 13/21] x86/asm/crypto: Fix frame pointer usage in aesni-intel_asm.S

aesni-intel_asm.S has several callable non-leaf functions which don't
honor CONFIG_FRAME_POINTER, which can result in bad stack traces.

Create stack frames for them when CONFIG_FRAME_POINTER is enabled.

Signed-off-by: Josh Poimboeuf <jpoimboe@...hat.com>
---
 arch/x86/crypto/aesni-intel_asm.S | 19 +++++++++++++++++++
 1 file changed, 19 insertions(+)

diff --git a/arch/x86/crypto/aesni-intel_asm.S b/arch/x86/crypto/aesni-intel_asm.S
index 6bd2c6c..3df557b 100644
--- a/arch/x86/crypto/aesni-intel_asm.S
+++ b/arch/x86/crypto/aesni-intel_asm.S
@@ -31,6 +31,7 @@
 
 #include <linux/linkage.h>
 #include <asm/inst.h>
+#include <asm/frame.h>
 
 /*
  * The following macros are used to move an (un)aligned 16 byte value to/from
@@ -1800,6 +1801,7 @@ ENDPROC(_key_expansion_256b)
  *                   unsigned int key_len)
  */
 ENTRY(aesni_set_key)
+	FRAME
 #ifndef __x86_64__
 	pushl KEYP
 	movl 8(%esp), KEYP		# ctx
@@ -1905,6 +1907,7 @@ ENTRY(aesni_set_key)
 #ifndef __x86_64__
 	popl KEYP
 #endif
+	ENDFRAME
 	ret
 ENDPROC(aesni_set_key)
 
@@ -1912,6 +1915,7 @@ ENDPROC(aesni_set_key)
  * void aesni_enc(struct crypto_aes_ctx *ctx, u8 *dst, const u8 *src)
  */
 ENTRY(aesni_enc)
+	FRAME
 #ifndef __x86_64__
 	pushl KEYP
 	pushl KLEN
@@ -1927,6 +1931,7 @@ ENTRY(aesni_enc)
 	popl KLEN
 	popl KEYP
 #endif
+	ENDFRAME
 	ret
 ENDPROC(aesni_enc)
 
@@ -2101,6 +2106,7 @@ ENDPROC(_aesni_enc4)
  * void aesni_dec (struct crypto_aes_ctx *ctx, u8 *dst, const u8 *src)
  */
 ENTRY(aesni_dec)
+	FRAME
 #ifndef __x86_64__
 	pushl KEYP
 	pushl KLEN
@@ -2117,6 +2123,7 @@ ENTRY(aesni_dec)
 	popl KLEN
 	popl KEYP
 #endif
+	ENDFRAME
 	ret
 ENDPROC(aesni_dec)
 
@@ -2292,6 +2299,7 @@ ENDPROC(_aesni_dec4)
  *		      size_t len)
  */
 ENTRY(aesni_ecb_enc)
+	FRAME
 #ifndef __x86_64__
 	pushl LEN
 	pushl KEYP
@@ -2342,6 +2350,7 @@ ENTRY(aesni_ecb_enc)
 	popl KEYP
 	popl LEN
 #endif
+	ENDFRAME
 	ret
 ENDPROC(aesni_ecb_enc)
 
@@ -2350,6 +2359,7 @@ ENDPROC(aesni_ecb_enc)
  *		      size_t len);
  */
 ENTRY(aesni_ecb_dec)
+	FRAME
 #ifndef __x86_64__
 	pushl LEN
 	pushl KEYP
@@ -2401,6 +2411,7 @@ ENTRY(aesni_ecb_dec)
 	popl KEYP
 	popl LEN
 #endif
+	ENDFRAME
 	ret
 ENDPROC(aesni_ecb_dec)
 
@@ -2409,6 +2420,7 @@ ENDPROC(aesni_ecb_dec)
  *		      size_t len, u8 *iv)
  */
 ENTRY(aesni_cbc_enc)
+	FRAME
 #ifndef __x86_64__
 	pushl IVP
 	pushl LEN
@@ -2443,6 +2455,7 @@ ENTRY(aesni_cbc_enc)
 	popl LEN
 	popl IVP
 #endif
+	ENDFRAME
 	ret
 ENDPROC(aesni_cbc_enc)
 
@@ -2451,6 +2464,7 @@ ENDPROC(aesni_cbc_enc)
  *		      size_t len, u8 *iv)
  */
 ENTRY(aesni_cbc_dec)
+	FRAME
 #ifndef __x86_64__
 	pushl IVP
 	pushl LEN
@@ -2534,6 +2548,7 @@ ENTRY(aesni_cbc_dec)
 	popl LEN
 	popl IVP
 #endif
+	ENDFRAME
 	ret
 ENDPROC(aesni_cbc_dec)
 
@@ -2598,6 +2613,7 @@ ENDPROC(_aesni_inc)
  *		      size_t len, u8 *iv)
  */
 ENTRY(aesni_ctr_enc)
+	FRAME
 	cmp $16, LEN
 	jb .Lctr_enc_just_ret
 	mov 480(KEYP), KLEN
@@ -2651,6 +2667,7 @@ ENTRY(aesni_ctr_enc)
 .Lctr_enc_ret:
 	movups IV, (IVP)
 .Lctr_enc_just_ret:
+	ENDFRAME
 	ret
 ENDPROC(aesni_ctr_enc)
 
@@ -2677,6 +2694,7 @@ ENDPROC(aesni_ctr_enc)
  *			 bool enc, u8 *iv)
  */
 ENTRY(aesni_xts_crypt8)
+	FRAME
 	cmpb $0, %cl
 	movl $0, %ecx
 	movl $240, %r10d
@@ -2777,6 +2795,7 @@ ENTRY(aesni_xts_crypt8)
 	pxor INC, STATE4
 	movdqu STATE4, 0x70(OUTP)
 
+	ENDFRAME
 	ret
 ENDPROC(aesni_xts_crypt8)
 
-- 
2.1.0

--
To unsubscribe from this list: send the line "unsubscribe linux-kernel" in
the body of a message to majordomo@...r.kernel.org
More majordomo info at  http://vger.kernel.org/majordomo-info.html
Please read the FAQ at  http://www.tux.org/lkml/

Powered by blists - more mailing lists