Use "asm goto" with outputs for clang testing --- arch/x86/include/asm/uaccess.h | 72 +++++++++++++++++++----------------------- 1 file changed, 32 insertions(+), 40 deletions(-) diff --git a/arch/x86/include/asm/uaccess.h b/arch/x86/include/asm/uaccess.h index c8247a84244b..9e8b04d4560a 100644 --- a/arch/x86/include/asm/uaccess.h +++ b/arch/x86/include/asm/uaccess.h @@ -279,65 +279,52 @@ do { \ } while (0) #ifdef CONFIG_X86_32 -#define __get_user_asm_u64(x, ptr, retval, errret) \ +#define __get_user_asm_u64(x, ptr, label) \ ({ \ __typeof__(ptr) __ptr = (ptr); \ - asm volatile("\n" \ - "1: movl %2,%%eax\n" \ - "2: movl %3,%%edx\n" \ - "3:\n" \ - ".section .fixup,\"ax\"\n" \ - "4: mov %4,%0\n" \ - " xorl %%eax,%%eax\n" \ - " xorl %%edx,%%edx\n" \ - " jmp 3b\n" \ - ".previous\n" \ - _ASM_EXTABLE_UA(1b, 4b) \ - _ASM_EXTABLE_UA(2b, 4b) \ - : "=r" (retval), "=&A"(x) \ - : "m" (__m(__ptr)), "m" __m(((u32 __user *)(__ptr)) + 1), \ - "i" (errret), "0" (retval)); \ + asm_volatile_goto("\n" \ + "1: movl %1,%%eax\n" \ + "2: movl %2,%%edx\n" \ + _ASM_EXTABLE_UA(1b, %l3) \ + _ASM_EXTABLE_UA(2b, %l3) \ + : "=&A"(x) \ + : "m" (__m(__ptr)), "m" __m(((u32 __user *)(__ptr)) + 1) \ + : : label); \ }) #else -#define __get_user_asm_u64(x, ptr, retval, errret) \ - __get_user_asm(x, ptr, retval, "q", "", "=r", errret) +#define __get_user_asm_u64(x, ptr, label) \ + __get_user_asm(x, ptr, "q", "", "=r", label) #endif -#define __get_user_size(x, ptr, size, retval, errret) \ +#define __get_user_size(x, ptr, size, label) \ do { \ - retval = 0; \ __chk_user_ptr(ptr); \ switch (size) { \ case 1: \ - __get_user_asm(x, ptr, retval, "b", "b", "=q", errret); \ + __get_user_asm(x, ptr, "b", "b", "=q", label); \ break; \ case 2: \ - __get_user_asm(x, ptr, retval, "w", "w", "=r", errret); \ + __get_user_asm(x, ptr, "w", "w", "=r", label); \ break; \ case 4: \ - __get_user_asm(x, ptr, retval, "l", "k", "=r", errret); \ + __get_user_asm(x, ptr, "l", "k", "=r", label); \ break; \ case 8: \ - __get_user_asm_u64(x, ptr, retval, errret); \ + __get_user_asm_u64(x, ptr, label); \ break; \ default: \ (x) = __get_user_bad(); \ } \ } while (0) -#define __get_user_asm(x, addr, err, itype, rtype, ltype, errret) \ - asm volatile("\n" \ - "1: mov"itype" %2,%"rtype"1\n" \ - "2:\n" \ - ".section .fixup,\"ax\"\n" \ - "3: mov %3,%0\n" \ - " xor"itype" %"rtype"1,%"rtype"1\n" \ - " jmp 2b\n" \ - ".previous\n" \ - _ASM_EXTABLE_UA(1b, 3b) \ - : "=r" (err), ltype(x) \ - : "m" (__m(addr)), "i" (errret), "0" (err)) +#define __get_user_asm(x, addr, itype, rtype, ltype, label) \ + asm_volatile_goto("\n" \ + "1: mov"itype" %1,%"rtype"0\n" \ + _ASM_EXTABLE_UA(1b, %l2) \ + : ltype(x) \ + : "m" (__m(addr)) \ + : : label) #define __put_user_nocheck(x, ptr, size) \ ({ \ @@ -356,14 +343,21 @@ __pu_label: \ #define __get_user_nocheck(x, ptr, size) \ ({ \ + __label__ __gu_label; \ int __gu_err; \ __inttype(*(ptr)) __gu_val; \ __typeof__(ptr) __gu_ptr = (ptr); \ __typeof__(size) __gu_size = (size); \ __uaccess_begin_nospec(); \ - __get_user_size(__gu_val, __gu_ptr, __gu_size, __gu_err, -EFAULT); \ + __get_user_size(__gu_val, __gu_ptr, __gu_size, __gu_label); \ __uaccess_end(); \ (x) = (__force __typeof__(*(ptr)))__gu_val; \ + __gu_err = 0; \ + if (0) { \ +__gu_label: \ + __uaccess_end(); \ + __gu_err = -EFAULT; \ + } \ __builtin_expect(__gu_err, 0); \ }) @@ -494,11 +488,9 @@ static __must_check __always_inline bool user_access_begin(const void __user *pt #define unsafe_get_user(x, ptr, err_label) \ do { \ - int __gu_err; \ __inttype(*(ptr)) __gu_val; \ - __get_user_size(__gu_val, (ptr), sizeof(*(ptr)), __gu_err, -EFAULT); \ + __get_user_size(__gu_val, (ptr), sizeof(*(ptr)), err_label); \ (x) = (__force __typeof__(*(ptr)))__gu_val; \ - if (unlikely(__gu_err)) goto err_label; \ } while (0) /*