arch/x86/include/asm/cmpxchg_32.h | 28 ++++++++++++---------------- 1 file changed, 12 insertions(+), 16 deletions(-) diff --git a/arch/x86/include/asm/cmpxchg_32.h b/arch/x86/include/asm/cmpxchg_32.h index ed2797f132ce..b19985520f3c 100644 --- a/arch/x86/include/asm/cmpxchg_32.h +++ b/arch/x86/include/asm/cmpxchg_32.h @@ -88,18 +88,17 @@ static __always_inline bool __try_cmpxchg64_local(volatile u64 *ptr, u64 *oldp, #define __arch_cmpxchg64_emu(_ptr, _old, _new, _lock_loc, _lock) \ ({ \ - union __u64_halves o = { .full = (_old), }, \ - n = { .full = (_new), }; \ + __u64 o = (_old); \ + union __u64_halves n = { .full = (_new), }; \ \ asm volatile(ALTERNATIVE(_lock_loc \ "call cmpxchg8b_emu", \ - _lock "cmpxchg8b %[ptr]", X86_FEATURE_CX8) \ - : [ptr] "+m" (*(_ptr)), \ - "+a" (o.low), "+d" (o.high) \ - : "b" (n.low), "c" (n.high), "S" (_ptr) \ + _lock "cmpxchg8b 0(%[ptr])", X86_FEATURE_CX8) \ + : "+A" (o) \ + : "b" (n.low), "c" (n.high), [ptr] "S" (_ptr) \ : "memory"); \ \ - o.full; \ + o; \ }) static __always_inline u64 arch_cmpxchg64(volatile u64 *ptr, u64 old, u64 new) @@ -116,22 +115,19 @@ static __always_inline u64 arch_cmpxchg64_local(volatile u64 *ptr, u64 old, u64 #define __arch_try_cmpxchg64_emu(_ptr, _oldp, _new, _lock_loc, _lock) \ ({ \ - union __u64_halves o = { .full = *(_oldp), }, \ - n = { .full = (_new), }; \ + __u64 o = *(_oldp); \ + union __u64_halves n = { .full = (_new), }; \ bool ret; \ \ asm volatile(ALTERNATIVE(_lock_loc \ "call cmpxchg8b_emu", \ - _lock "cmpxchg8b %[ptr]", X86_FEATURE_CX8) \ + _lock "cmpxchg8b 0(%[ptr])", X86_FEATURE_CX8) \ CC_SET(e) \ - : CC_OUT(e) (ret), \ - [ptr] "+m" (*(_ptr)), \ - "+a" (o.low), "+d" (o.high) \ - : "b" (n.low), "c" (n.high), "S" (_ptr) \ + : CC_OUT(e) (ret), "+A" (o) \ + : "b" (n.low), "c" (n.high), [ptr] "S" (_ptr) \ : "memory"); \ \ - if (unlikely(!ret)) \ - *(_oldp) = o.full; \ + *(_oldp) = o; \ \ likely(ret); \ })