lists.openwall.net   lists  /  announce  owl-users  owl-dev  john-users  john-dev  passwdqc-users  yescrypt  popa3d-users  /  oss-security  kernel-hardening  musl  sabotage  tlsify  passwords  /  crypt-dev  xvendor  /  Bugtraq  Full-Disclosure  linux-kernel  linux-netdev  linux-ext4  linux-hardening  linux-cve-announce  PHC 
Open Source and information security mailing list archives
 
Hash Suite: Windows password security audit tool. GUI, reports in PDF.
[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-ID: <20211105180152.oi7h3p6fb6c5pjkf@treble>
Date:   Fri, 5 Nov 2021 11:01:52 -0700
From:   Josh Poimboeuf <jpoimboe@...hat.com>
To:     Peter Zijlstra <peterz@...radead.org>
Cc:     x86@...nel.org, linux-kernel@...r.kernel.org, mark.rutland@....com,
        dvyukov@...gle.com, seanjc@...gle.com, pbonzini@...hat.com,
        mbenes@...e.cz
Subject: Re: [PATCH 20/22] x86,word-at-a-time: Remove .fixup usage

On Fri, Nov 05, 2021 at 06:10:43PM +0100, Peter Zijlstra wrote:
> +#ifdef CONFIG_CC_HAS_ASM_GOTO_OUTPUT
> +
> +static inline unsigned long load_unaligned_zeropad(const void *addr)
> +{
> +	unsigned long offset, data;
> +	unsigned long ret;
> +
> +	asm_volatile_goto(
> +		"1:	mov %[mem], %[ret]\n"
> +
> +		_ASM_EXTABLE(1b, %l[do_exception])
> +
> +		: [ret] "=&r" (ret)
> +		: [mem] "m" (*(unsigned long *)addr)
> +		: : do_exception);
> +
> +out:
> +	return ret;
> +
> +do_exception: __cold;
> +
> +	offset = (unsigned long)addr & (sizeof(long) - 1);
> +	addr = (void *)((unsigned long)addr & ~(sizeof(long) - 1));
> +	data = *(unsigned long *)addr;
> +	ret = data >> offset * 8;
> +	goto out;

Superfluous goto, can just return here?

>  static inline unsigned long load_unaligned_zeropad(const void *addr)
>  {
> -	unsigned long ret, dummy;
> +	unsigned long offset, data;
> +	unsigned long ret, err = 0;
>  
> -	asm(
> -		"1:\tmov %2,%0\n"
> +	asm(	"1:	mov %[mem], %[ret]\n"
>  		"2:\n"
> -		".section .fixup,\"ax\"\n"
> -		"3:\t"
> -		"lea %2,%1\n\t"
> -		"and %3,%1\n\t"
> -		"mov (%1),%0\n\t"
> -		"leal %2,%%ecx\n\t"
> -		"andl %4,%%ecx\n\t"
> -		"shll $3,%%ecx\n\t"
> -		"shr %%cl,%0\n\t"
> -		"jmp 2b\n"
> -		".previous\n"
> -		_ASM_EXTABLE(1b, 3b)
> -		:"=&r" (ret),"=&c" (dummy)
> -		:"m" (*(unsigned long *)addr),
> -		 "i" (-sizeof(unsigned long)),
> -		 "i" (sizeof(unsigned long)-1));
> +
> +		_ASM_EXTABLE_FAULT(1b, 2b)
> +
> +		: [ret] "=&r" (ret), "+a" (err)
> +		: [mem] "m" (*(unsigned long *)addr));
> +
> +	if (unlikely(err)) {
> +		offset = (unsigned long)addr & (sizeof(long) - 1);
> +		addr = (void *)((unsigned long)addr & ~(sizeof(long) - 1));
> +		data = *(unsigned long *)addr;
> +		ret = data >> offset * 8;
> +	}
> +
>  	return ret;

This adds a (normally not taken) conditional jump, would a straight jmp
over the fixup not be better?

i.e.

	1: mov %[mem], %[ret]
	jmp 2
	... fixup code ...
	2:

-- 
Josh

Powered by blists - more mailing lists

Powered by Openwall GNU/*/Linux Powered by OpenVZ