diff --git a/arch/x86/include/asm/bitops.h b/arch/x86/include/asm/bitops.h index 02b47a6..56fd9cc 100644 --- a/arch/x86/include/asm/bitops.h +++ b/arch/x86/include/asm/bitops.h @@ -41,6 +41,16 @@ #define CONST_MASK_ADDR(nr, addr) BITOP_ADDR((void *)(addr) + ((nr)>>3)) #define CONST_MASK(nr) (1 << ((nr) & 7)) +/* + * How to treat the bitops index for bitops instructions. Casting this + * to unsigned long correctly generates 64-bit operations on 64 bits. + */ +#ifdef CONFIG_X86_64 +#define IDX(nr) "Jr" (nr) +#else +#define IDX(nr) "Ir" (nr) +#endif + /** * set_bit - Atomically set a bit in memory * @nr: the bit to set @@ -57,7 +67,7 @@ * restricted to acting on a single-word quantity. */ static __always_inline void -set_bit(unsigned int nr, volatile unsigned long *addr) +set_bit(unsigned long nr, volatile unsigned long *addr) { if (IS_IMMEDIATE(nr)) { asm volatile(LOCK_PREFIX "orb %1,%0" @@ -66,7 +76,7 @@ set_bit(unsigned int nr, volatile unsigned long *addr) : "memory"); } else { asm volatile(LOCK_PREFIX "bts %1,%0" - : BITOP_ADDR(addr) : "Ir" (nr) : "memory"); + : BITOP_ADDR(addr) : IDX(nr) : "memory"); } } @@ -79,9 +89,9 @@ set_bit(unsigned int nr, volatile unsigned long *addr) * If it's called on the same region of memory simultaneously, the effect * may be that only one operation succeeds. */ -static inline void __set_bit(int nr, volatile unsigned long *addr) +static inline void __set_bit(unsigned long nr, volatile unsigned long *addr) { - asm volatile("bts %1,%0" : ADDR : "Ir" (nr) : "memory"); + asm volatile("bts %1,%0" : ADDR : IDX(nr) : "memory"); } /** @@ -95,7 +105,7 @@ static inline void __set_bit(int nr, volatile unsigned long *addr) * in order to ensure changes are visible on other processors. */ static __always_inline void -clear_bit(int nr, volatile unsigned long *addr) +clear_bit(unsigned long nr, volatile unsigned long *addr) { if (IS_IMMEDIATE(nr)) { asm volatile(LOCK_PREFIX "andb %1,%0" @@ -104,7 +114,7 @@ clear_bit(int nr, volatile unsigned long *addr) } else { asm volatile(LOCK_PREFIX "btr %1,%0" : BITOP_ADDR(addr) - : "Ir" (nr)); + : IDX(nr)); } } @@ -116,15 +126,15 @@ clear_bit(int nr, volatile unsigned long *addr) * clear_bit() is atomic and implies release semantics before the memory * operation. It can be used for an unlock. */ -static inline void clear_bit_unlock(unsigned nr, volatile unsigned long *addr) +static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *addr) { barrier(); clear_bit(nr, addr); } -static inline void __clear_bit(int nr, volatile unsigned long *addr) +static inline void __clear_bit(unsigned long nr, volatile unsigned long *addr) { - asm volatile("btr %1,%0" : ADDR : "Ir" (nr)); + asm volatile("btr %1,%0" : ADDR : IDX(nr)); } /* @@ -139,7 +149,7 @@ static inline void __clear_bit(int nr, volatile unsigned long *addr) * No memory barrier is required here, because x86 cannot reorder stores past * older loads. Same principle as spin_unlock. */ -static inline void __clear_bit_unlock(unsigned nr, volatile unsigned long *addr) +static inline void __clear_bit_unlock(unsigned long nr, volatile unsigned long *addr) { barrier(); __clear_bit(nr, addr); @@ -157,9 +167,9 @@ static inline void __clear_bit_unlock(unsigned nr, volatile unsigned long *addr) * If it's called on the same region of memory simultaneously, the effect * may be that only one operation succeeds. */ -static inline void __change_bit(int nr, volatile unsigned long *addr) +static inline void __change_bit(unsigned long nr, volatile unsigned long *addr) { - asm volatile("btc %1,%0" : ADDR : "Ir" (nr)); + asm volatile("btc %1,%0" : ADDR : IDX(nr)); } /** @@ -171,7 +181,7 @@ static inline void __change_bit(int nr, volatile unsigned long *addr) * Note that @nr may be almost arbitrarily large; this function is not * restricted to acting on a single-word quantity. */ -static inline void change_bit(int nr, volatile unsigned long *addr) +static inline void change_bit(unsigned long nr, volatile unsigned long *addr) { if (IS_IMMEDIATE(nr)) { asm volatile(LOCK_PREFIX "xorb %1,%0" @@ -180,7 +190,7 @@ static inline void change_bit(int nr, volatile unsigned long *addr) } else { asm volatile(LOCK_PREFIX "btc %1,%0" : BITOP_ADDR(addr) - : "Ir" (nr)); + : IDX(nr)); } } @@ -192,12 +202,12 @@ static inline void change_bit(int nr, volatile unsigned long *addr) * This operation is atomic and cannot be reordered. * It also implies a memory barrier. */ -static inline int test_and_set_bit(int nr, volatile unsigned long *addr) +static inline int test_and_set_bit(unsigned long nr, volatile unsigned long *addr) { int oldbit; asm volatile(LOCK_PREFIX "bts %2,%1\n\t" - "sbb %0,%0" : "=r" (oldbit), ADDR : "Ir" (nr) : "memory"); + "sbb %0,%0" : "=r" (oldbit), ADDR : IDX(nr) : "memory"); return oldbit; } @@ -210,7 +220,7 @@ static inline int test_and_set_bit(int nr, volatile unsigned long *addr) * This is the same as test_and_set_bit on x86. */ static __always_inline int -test_and_set_bit_lock(int nr, volatile unsigned long *addr) +test_and_set_bit_lock(unsigned long nr, volatile unsigned long *addr) { return test_and_set_bit(nr, addr); } @@ -224,14 +234,14 @@ test_and_set_bit_lock(int nr, volatile unsigned long *addr) * If two examples of this operation race, one can appear to succeed * but actually fail. You must protect multiple accesses with a lock. */ -static inline int __test_and_set_bit(int nr, volatile unsigned long *addr) +static inline int __test_and_set_bit(unsigned long nr, volatile unsigned long *addr) { int oldbit; asm("bts %2,%1\n\t" "sbb %0,%0" : "=r" (oldbit), ADDR - : "Ir" (nr)); + : IDX(nr)); return oldbit; } @@ -243,13 +253,13 @@ static inline int __test_and_set_bit(int nr, volatile unsigned long *addr) * This operation is atomic and cannot be reordered. * It also implies a memory barrier. */ -static inline int test_and_clear_bit(int nr, volatile unsigned long *addr) +static inline int test_and_clear_bit(unsigned long nr, volatile unsigned long *addr) { int oldbit; asm volatile(LOCK_PREFIX "btr %2,%1\n\t" "sbb %0,%0" - : "=r" (oldbit), ADDR : "Ir" (nr) : "memory"); + : "=r" (oldbit), ADDR : IDX(nr) : "memory"); return oldbit; } @@ -263,26 +273,26 @@ static inline int test_and_clear_bit(int nr, volatile unsigned long *addr) * If two examples of this operation race, one can appear to succeed * but actually fail. You must protect multiple accesses with a lock. */ -static inline int __test_and_clear_bit(int nr, volatile unsigned long *addr) +static inline int __test_and_clear_bit(unsigned long nr, volatile unsigned long *addr) { int oldbit; asm volatile("btr %2,%1\n\t" "sbb %0,%0" : "=r" (oldbit), ADDR - : "Ir" (nr)); + : IDX(nr)); return oldbit; } /* WARNING: non atomic and it can be reordered! */ -static inline int __test_and_change_bit(int nr, volatile unsigned long *addr) +static inline int __test_and_change_bit(unsigned long nr, volatile unsigned long *addr) { int oldbit; asm volatile("btc %2,%1\n\t" "sbb %0,%0" : "=r" (oldbit), ADDR - : "Ir" (nr) : "memory"); + : IDX(nr) : "memory"); return oldbit; } @@ -295,31 +305,31 @@ static inline int __test_and_change_bit(int nr, volatile unsigned long *addr) * This operation is atomic and cannot be reordered. * It also implies a memory barrier. */ -static inline int test_and_change_bit(int nr, volatile unsigned long *addr) +static inline int test_and_change_bit(unsigned long nr, volatile unsigned long *addr) { int oldbit; asm volatile(LOCK_PREFIX "btc %2,%1\n\t" "sbb %0,%0" - : "=r" (oldbit), ADDR : "Ir" (nr) : "memory"); + : "=r" (oldbit), ADDR : IDX(nr) : "memory"); return oldbit; } -static __always_inline int constant_test_bit(unsigned int nr, const volatile unsigned long *addr) +static __always_inline int constant_test_bit(unsigned long nr, const volatile unsigned long *addr) { return ((1UL << (nr % BITS_PER_LONG)) & (((unsigned long *)addr)[nr / BITS_PER_LONG])) != 0; } -static inline int variable_test_bit(int nr, volatile const unsigned long *addr) +static inline int variable_test_bit(unsigned long nr, volatile const unsigned long *addr) { int oldbit; asm volatile("bt %2,%1\n\t" "sbb %0,%0" : "=r" (oldbit) - : "m" (*(unsigned long *)addr), "Ir" (nr)); + : "m" (*(unsigned long *)addr), IDX(nr)); return oldbit; } @@ -330,7 +340,7 @@ static inline int variable_test_bit(int nr, volatile const unsigned long *addr) * @nr: bit number to test * @addr: Address to start counting from */ -static int test_bit(int nr, const volatile unsigned long *addr); +static int test_bit(unsigned long nr, const volatile unsigned long *addr); #endif #define test_bit(nr, addr) \