#include #define __aligned_u64 u64 __attribute__((aligned(8))) #define LOCK_PREFIX "lock " # define likely(x) __builtin_expect(!!(x), 1) # define unlikely(x) __builtin_expect(!!(x), 0) typedef uint64_t u64; typedef uint32_t u32; static inline void cpu_relax(void) { asm volatile("rep; nop" ::: "memory"); } static inline u64 cmpxchg64(volatile u64 *ptr, u64 old, u64 new) { u64 prev; asm volatile(LOCK_PREFIX "cmpxchg8b %1" : "=A" (prev), "+m" (*ptr) : "b" ((u32)new), "c" ((u32)(new >> 32)), "0" (old) : "memory"); return prev; } #define CMPXCHG_LOOP_OLD(CODE, SUCCESS) do { \ int retry = 100; \ __aligned_u64 old = *lockref; \ while (t) { \ __aligned_u64 new = old, prev = old; \ CODE \ old = cmpxchg64(lockref, old, new); \ if (likely(old == prev)) { \ SUCCESS; \ } \ if (!--retry) \ break; \ cpu_relax(); \ } \ } while (0) void lockref_get_old(u64 *lockref, _Bool t) { CMPXCHG_LOOP_OLD( new++; , return; ); } #define try_cmpxchg64_fallback(_ptr, _oldp, _new) \ ({ \ typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ ___r = cmpxchg64((_ptr), ___o, (_new)); \ if (unlikely(___r != ___o)) \ *___op = ___r; \ likely(___r == ___o); \ }) #define CMPXCHG_LOOP_FALLBACK(CODE, SUCCESS) do { \ int retry = 100; \ __aligned_u64 old = *lockref; \ while (t) { \ __aligned_u64 new = old; \ CODE \ if (likely(try_cmpxchg64_fallback(lockref, &old, new))) { \ SUCCESS; \ } \ if (!--retry) \ break; \ cpu_relax(); \ } \ } while (0) void lockref_get_fallback(u64 *lockref, _Bool t) { CMPXCHG_LOOP_FALLBACK( new++; , return; ); } static inline _Bool try_cmpxchg64(volatile u64 *ptr, u64 *pold, u64 new) { _Bool success; u64 old = *pold; asm volatile(LOCK_PREFIX "cmpxchg8b %[ptr]" : "=@ccz" (success), [ptr] "+m" (*ptr), "+A" (old) : "b" ((u32)new), "c" ((u32)(new >> 32)) : "memory"); if (unlikely(!success)) *pold = old; return success; } #define CMPXCHG_LOOP_NEW(CODE, SUCCESS) do { \ int retry = 100; \ __aligned_u64 old = *lockref; \ while (t) { \ __aligned_u64 new = old; \ CODE \ if (likely(try_cmpxchg64(lockref, &old, new))) { \ SUCCESS; \ } \ if (!--retry) \ break; \ cpu_relax(); \ } \ } while (0) void lockref_get_new(u64 *lockref, _Bool t) { CMPXCHG_LOOP_NEW( new++; , return; ); }