Lose constraints on cmpxchg From: Jan Beulich While only cosmetic for x86-64, this adjusts the cmpxchg code appearantly inherited from i386 to use more generic constraints. Signed-off-by: Jan Beulich Signed-off-by: Andi Kleen Index: linux/include/asm-x86_64/system.h =================================================================== --- linux.orig/include/asm-x86_64/system.h +++ linux/include/asm-x86_64/system.h @@ -247,25 +247,25 @@ static inline unsigned long __cmpxchg(vo case 1: __asm__ __volatile__(LOCK_PREFIX "cmpxchgb %b1,%2" : "=a"(prev) - : "q"(new), "m"(*__xg(ptr)), "0"(old) + : "r"(new), "m"(*__xg(ptr)), "0"(old) : "memory"); return prev; case 2: __asm__ __volatile__(LOCK_PREFIX "cmpxchgw %w1,%2" : "=a"(prev) - : "q"(new), "m"(*__xg(ptr)), "0"(old) + : "r"(new), "m"(*__xg(ptr)), "0"(old) : "memory"); return prev; case 4: __asm__ __volatile__(LOCK_PREFIX "cmpxchgl %k1,%2" : "=a"(prev) - : "q"(new), "m"(*__xg(ptr)), "0"(old) + : "r"(new), "m"(*__xg(ptr)), "0"(old) : "memory"); return prev; case 8: __asm__ __volatile__(LOCK_PREFIX "cmpxchgq %1,%2" : "=a"(prev) - : "q"(new), "m"(*__xg(ptr)), "0"(old) + : "r"(new), "m"(*__xg(ptr)), "0"(old) : "memory"); return prev; }