x86, asm: Clean up and simplify <asm/cmpxchg.h>
authorH. Peter Anvin <hpa@linux.intel.com>
Wed, 28 Jul 2010 22:18:35 +0000 (15:18 -0700)
committerH. Peter Anvin <hpa@linux.intel.com>
Wed, 28 Jul 2010 22:24:09 +0000 (15:24 -0700)
Remove the __xg() hack to create a memory barrier near xchg and
cmpxchg; it has been there since 1.3.11 but should not be necessary
with "asm volatile" and a "memory" clobber, neither of which were
there in the original implementation.

However, we *should* make this a volatile reference.

Signed-off-by: H. Peter Anvin <hpa@linux.intel.com>
LKML-Reference: <AANLkTikAmaDPji-TVDarmG1yD=fwbffcsmEU=YEuP+8r@mail.gmail.com>

arch/x86/include/asm/cmpxchg_32.h
arch/x86/include/asm/cmpxchg_64.h

index 20955ea..f5bd1fd 100644 (file)
 extern void __xchg_wrong_size(void);
 
 /*
- * Note: no "lock" prefix even on SMP: xchg always implies lock anyway
- * Note 2: xchg has side effect, so that attribute volatile is necessary,
- *       but generally the primitive is invalid, *ptr is output argument. --ANK
+ * Note: no "lock" prefix even on SMP: xchg always implies lock anyway.
+ * Since this is generally used to protect other memory information, we
+ * use "asm volatile" and "memory" clobbers to prevent gcc from moving
+ * information around.
  */
-
-struct __xchg_dummy {
-       unsigned long a[100];
-};
-#define __xg(x) ((struct __xchg_dummy *)(x))
-
 #define __xchg(x, ptr, size)                                           \
 ({                                                                     \
        __typeof(*(ptr)) __x = (x);                                     \
        switch (size) {                                                 \
        case 1:                                                         \
-               asm volatile("xchgb %b0,%1"                             \
-                            : "=q" (__x), "+m" (*__xg(ptr))            \
+       {                                                               \
+               volatile u8 *__ptr = (volatile u8 *)(ptr);              \
+               asm volatile("xchgb %0,%1"                              \
+                            : "=q" (__x), "+m" (*__ptr)                \
                             : "0" (__x)                                \
                             : "memory");                               \
                break;                                                  \
+       }                                                               \
        case 2:                                                         \
-               asm volatile("xchgw %w0,%1"                             \
-                            : "=r" (__x), "+m" (*__xg(ptr))            \
+       {                                                               \
+               volatile u16 *__ptr = (volatile u16 *)(ptr);            \
+               asm volatile("xchgw %0,%1"                              \
+                            : "=r" (__x), "+m" (*__ptr)                \
                             : "0" (__x)                                \
                             : "memory");                               \
                break;                                                  \
+       }                                                               \
        case 4:                                                         \
+       {                                                               \
+               volatile u32 *__ptr = (volatile u32 *)(ptr);            \
                asm volatile("xchgl %0,%1"                              \
-                            : "=r" (__x), "+m" (*__xg(ptr))            \
+                            : "=r" (__x), "+m" (*__ptr)                \
                             : "0" (__x)                                \
                             : "memory");                               \
                break;                                                  \
+       }                                                               \
        default:                                                        \
                __xchg_wrong_size();                                    \
        }                                                               \
@@ -94,23 +98,32 @@ extern void __cmpxchg_wrong_size(void);
        __typeof__(*(ptr)) __new = (new);                               \
        switch (size) {                                                 \
        case 1:                                                         \
-               asm volatile(lock "cmpxchgb %b2,%1"                     \
-                            : "=a" (__ret), "+m" (*__xg(ptr))          \
+       {                                                               \
+               volatile u8 *__ptr = (volatile u8 *)(ptr);              \
+               asm volatile(lock "cmpxchgb %2,%1"                      \
+                            : "=a" (__ret), "+m" (*__ptr)              \
                             : "q" (__new), "0" (__old)                 \
                             : "memory");                               \
                break;                                                  \
+       }                                                               \
        case 2:                                                         \
-               asm volatile(lock "cmpxchgw %w2,%1"                     \
-                            : "=a" (__ret), "+m" (*__xg(ptr))          \
+       {                                                               \
+               volatile u16 *__ptr = (volatile u16 *)(ptr);            \
+               asm volatile(lock "cmpxchgw %2,%1"                      \
+                            : "=a" (__ret), "+m" (*__ptr)              \
                             : "r" (__new), "0" (__old)                 \
                             : "memory");                               \
                break;                                                  \
+       }                                                               \
        case 4:                                                         \
+       {                                                               \
+               volatile u32 *__ptr = (volatile u32 *)(ptr);            \
                asm volatile(lock "cmpxchgl %2,%1"                      \
-                            : "=a" (__ret), "+m" (*__xg(ptr))          \
+                            : "=a" (__ret), "+m" (*__ptr)              \
                             : "r" (__new), "0" (__old)                 \
                             : "memory");                               \
                break;                                                  \
+       }                                                               \
        default:                                                        \
                __cmpxchg_wrong_size();                                 \
        }                                                               \
@@ -148,31 +161,27 @@ extern void __cmpxchg_wrong_size(void);
                                               (unsigned long long)(n)))
 #endif
 
-static inline unsigned long long __cmpxchg64(volatile void *ptr,
-                                            unsigned long long old,
-                                            unsigned long long new)
+static inline u64 __cmpxchg64(volatile u64 *ptr, u64 old, u64 new)
 {
-       unsigned long long prev;
+       u64 prev;
        asm volatile(LOCK_PREFIX "cmpxchg8b %1"
                     : "=A" (prev),
-                      "+m" (*__xg(ptr))
-                    : "b" ((unsigned long)new),
-                      "c" ((unsigned long)(new >> 32)),
+                      "+m" (*ptr)
+                    : "b" ((u32)new),
+                      "c" ((u32)(new >> 32)),
                       "0" (old)
                     : "memory");
        return prev;
 }
 
-static inline unsigned long long __cmpxchg64_local(volatile void *ptr,
-                                                  unsigned long long old,
-                                                  unsigned long long new)
+static inline u64 __cmpxchg64_local(volatile u64 *ptr, u64 old, u64 new)
 {
-       unsigned long long prev;
+       u64 prev;
        asm volatile("cmpxchg8b %1"
                     : "=A" (prev),
-                      "+m" (*__xg(ptr))
-                    : "b" ((unsigned long)new),
-                      "c" ((unsigned long)(new >> 32)),
+                      "+m" (*ptr)
+                    : "b" ((u32)new),
+                      "c" ((u32)(new >> 32)),
                       "0" (old)
                     : "memory");
        return prev;
index 9596e7c..423ae58 100644 (file)
@@ -3,8 +3,6 @@
 
 #include <asm/alternative.h> /* Provides LOCK_PREFIX */
 
-#define __xg(x) ((volatile long *)(x))
-
 static inline void set_64bit(volatile u64 *ptr, u64 val)
 {
        *ptr = val;
@@ -14,38 +12,51 @@ extern void __xchg_wrong_size(void);
 extern void __cmpxchg_wrong_size(void);
 
 /*
- * Note: no "lock" prefix even on SMP: xchg always implies lock anyway
- * Note 2: xchg has side effect, so that attribute volatile is necessary,
- *       but generally the primitive is invalid, *ptr is output argument. --ANK
+ * Note: no "lock" prefix even on SMP: xchg always implies lock anyway.
+ * Since this is generally used to protect other memory information, we
+ * use "asm volatile" and "memory" clobbers to prevent gcc from moving
+ * information around.
  */
 #define __xchg(x, ptr, size)                                           \
 ({                                                                     \
        __typeof(*(ptr)) __x = (x);                                     \
        switch (size) {                                                 \
        case 1:                                                         \
-               asm volatile("xchgb %b0,%1"                             \
-                            : "=q" (__x), "+m" (*__xg(ptr))            \
+       {                                                               \
+               volatile u8 *__ptr = (volatile u8 *)(ptr);              \
+               asm volatile("xchgb %0,%1"                              \
+                            : "=q" (__x), "+m" (*__ptr)                \
                             : "0" (__x)                                \
                             : "memory");                               \
                break;                                                  \
+       }                                                               \
        case 2:                                                         \
-               asm volatile("xchgw %w0,%1"                             \
-                            : "=r" (__x), "+m" (*__xg(ptr))            \
+       {                                                               \
+               volatile u16 *__ptr = (volatile u16 *)(ptr);            \
+               asm volatile("xchgw %0,%1"                              \
+                            : "=r" (__x), "+m" (*__ptr)                \
                             : "0" (__x)                                \
                             : "memory");                               \
                break;                                                  \
+       }                                                               \
        case 4:                                                         \
-               asm volatile("xchgl %k0,%1"                             \
-                            : "=r" (__x), "+m" (*__xg(ptr))            \
+       {                                                               \
+               volatile u32 *__ptr = (volatile u32 *)(ptr);            \
+               asm volatile("xchgl %0,%1"                              \
+                            : "=r" (__x), "+m" (*__ptr)                \
                             : "0" (__x)                                \
                             : "memory");                               \
                break;                                                  \
+       }                                                               \
        case 8:                                                         \
+       {                                                               \
+               volatile u64 *__ptr = (volatile u64 *)(ptr);            \
                asm volatile("xchgq %0,%1"                              \
-                            : "=r" (__x), "+m" (*__xg(ptr))            \
+                            : "=r" (__x), "+m" (*__ptr)                \
                             : "0" (__x)                                \
                             : "memory");                               \
                break;                                                  \
+       }                                                               \
        default:                                                        \
                __xchg_wrong_size();                                    \
        }                                                               \
@@ -69,29 +80,41 @@ extern void __cmpxchg_wrong_size(void);
        __typeof__(*(ptr)) __new = (new);                               \
        switch (size) {                                                 \
        case 1:                                                         \
-               asm volatile(lock "cmpxchgb %b2,%1"                     \
-                            : "=a" (__ret), "+m" (*__xg(ptr))          \
+       {                                                               \
+               volatile u8 *__ptr = (volatile u8 *)(ptr);              \
+               asm volatile(lock "cmpxchgb %2,%1"                      \
+                            : "=a" (__ret), "+m" (*__ptr)              \
                             : "q" (__new), "0" (__old)                 \
                             : "memory");                               \
                break;                                                  \
+       }                                                               \
        case 2:                                                         \
-               asm volatile(lock "cmpxchgw %w2,%1"                     \
-                            : "=a" (__ret), "+m" (*__xg(ptr))          \
+       {                                                               \
+               volatile u16 *__ptr = (volatile u16 *)(ptr);            \
+               asm volatile(lock "cmpxchgw %2,%1"                      \
+                            : "=a" (__ret), "+m" (*__ptr)              \
                             : "r" (__new), "0" (__old)                 \
                             : "memory");                               \
                break;                                                  \
+       }                                                               \
        case 4:                                                         \
-               asm volatile(lock "cmpxchgl %k2,%1"                     \
-                            : "=a" (__ret), "+m" (*__xg(ptr))          \
+       {                                                               \
+               volatile u32 *__ptr = (volatile u32 *)(ptr);            \
+               asm volatile(lock "cmpxchgl %2,%1"                      \
+                            : "=a" (__ret), "+m" (*__ptr)              \
                             : "r" (__new), "0" (__old)                 \
                             : "memory");                               \
                break;                                                  \
+       }                                                               \
        case 8:                                                         \
+       {                                                               \
+               volatile u64 *__ptr = (volatile u64 *)(ptr);            \
                asm volatile(lock "cmpxchgq %2,%1"                      \
-                            : "=a" (__ret), "+m" (*__xg(ptr))          \
+                            : "=a" (__ret), "+m" (*__ptr)              \
                             : "r" (__new), "0" (__old)                 \
                             : "memory");                               \
                break;                                                  \
+       }                                                               \
        default:                                                        \
                __cmpxchg_wrong_size();                                 \
        }                                                               \