--- arch/x86/crypto/aes-x86_64-asm_64.S | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) --- a/arch/x86/crypto/aes-x86_64-asm_64.S +++ b/arch/x86/crypto/aes-x86_64-asm_64.S @@ -90,13 +90,13 @@ FUNC: subq $8, R16; \ #define round(TAB,OFFSET,r1,r2,r3,r4,r5,r6,r7,r8,ra,rb,rc,rd) \ movzbl r2 ## H,r5 ## E; \ movzbl r2 ## L,r6 ## E; \ + movq r4,R8; \ + shrl $16,r4 ## E; \ movl TAB+1024(,r5,4),r5 ## E;\ - movw r4 ## X,r2 ## X; \ movl TAB(,r6,4),r6 ## E; \ - roll $16,r2 ## E; \ - shrl $16,r4 ## E; \ movzbl r4 ## H,r7 ## E; \ movzbl r4 ## L,r4 ## E; \ + shrl $16,r2 ## E; \ xorl OFFSET(r8),ra ## E; \ xorl OFFSET+4(r8),rb ## E; \ xorl TAB+3072(,r7,4),r5 ## E;\ @@ -123,7 +123,7 @@ FUNC: subq $8, R16; \ xorl TAB(,r1,4),r3 ## E; \ movzbl r2 ## H,r1 ## E; \ movzbl r2 ## L,r7 ## E; \ - shrl $16,r2 ## E; \ + movq R8,r2; \ xorl TAB+3072(,r1,4),r3 ## E;\ xorl TAB+2048(,r7,4),r4 ## E;\ movzbl r2 ## H,r1 ## E; \