arch/x86/include/asm/uaccess_64.h | 41 +++++++++++++++++++++++++++------------ 1 file changed, 29 insertions(+), 12 deletions(-) diff --git a/arch/x86/include/asm/uaccess_64.h b/arch/x86/include/asm/uaccess_64.h index f2c02e4469cc..017665052036 100644 --- a/arch/x86/include/asm/uaccess_64.h +++ b/arch/x86/include/asm/uaccess_64.h @@ -12,6 +12,9 @@ #include #include +#define INLINE_COPY_FROM_USER +#define INLINE_COPY_TO_USER + #ifdef CONFIG_ADDRESS_MASKING /* * Mask out tag bits from the address. @@ -101,22 +104,36 @@ static inline bool __access_ok(const void __user *ptr, unsigned long size) __must_check unsigned long rep_movs_alternative(void *to, const void *from, unsigned len); +#define statically_true(x) (__builtin_constant_p(x) && (x)) + static __always_inline __must_check unsigned long copy_user_generic(void *to, const void *from, unsigned long len) { stac(); - /* - * If CPU has FSRM feature, use 'rep movs'. - * Otherwise, use rep_movs_alternative. - */ - asm volatile( - "1:\n\t" - ALTERNATIVE("rep movsb", - "call rep_movs_alternative", ALT_NOT(X86_FEATURE_FSRM)) - "2:\n" - _ASM_EXTABLE_UA(1b, 2b) - :"+c" (len), "+D" (to), "+S" (from), ASM_CALL_CONSTRAINT - : : "memory", "rax"); + if (statically_true(len >= 64 && !(len & 7))) { + len >>= 3; + asm volatile( + "\n1:\t" + "rep movsq" + "\n2:\n" + _ASM_EXTABLE_UA(1b, 2b) + :"+c" (len), "+D" (to), "+S" (from) + : :"memory"); + len <<= 3; + } else { + /* + * If CPU has FSRM feature, use 'rep movs'. + * Otherwise, use rep_movs_alternative. + */ + asm volatile( + "1:\n\t" + ALTERNATIVE("rep movsb", + "call rep_movs_alternative", ALT_NOT(X86_FEATURE_FSRM)) + "2:\n" + _ASM_EXTABLE_UA(1b, 2b) + :"+c" (len), "+D" (to), "+S" (from), ASM_CALL_CONSTRAINT + : : "memory", "rax"); + } clac(); return len; }