These changes are the raw update to linux-4.4.6-rt14. Kernel sources
[kvmfornfv.git] / kernel / arch / arm / include / asm / cmpxchg.h
index 2386e97..97882f9 100644 (file)
@@ -35,11 +35,11 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size
        unsigned int tmp;
 #endif
 
-       smp_mb();
        prefetchw((const void *)ptr);
 
        switch (size) {
 #if __LINUX_ARM_ARCH__ >= 6
+#ifndef CONFIG_CPU_V6 /* MIN ARCH >= V6K */
        case 1:
                asm volatile("@ __xchg1\n"
                "1:     ldrexb  %0, [%3]\n"
@@ -50,6 +50,17 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size
                        : "r" (x), "r" (ptr)
                        : "memory", "cc");
                break;
+       case 2:
+               asm volatile("@ __xchg2\n"
+               "1:     ldrexh  %0, [%3]\n"
+               "       strexh  %1, %2, [%3]\n"
+               "       teq     %1, #0\n"
+               "       bne     1b"
+                       : "=&r" (ret), "=&r" (tmp)
+                       : "r" (x), "r" (ptr)
+                       : "memory", "cc");
+               break;
+#endif
        case 4:
                asm volatile("@ __xchg4\n"
                "1:     ldrex   %0, [%3]\n"
@@ -94,16 +105,18 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size
                break;
 #endif
        default:
+               /* Cause a link-time error, the xchg() size is not supported */
                __bad_xchg(ptr, size), ret = 0;
                break;
        }
-       smp_mb();
 
        return ret;
 }
 
-#define xchg(ptr,x) \
-       ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
+#define xchg_relaxed(ptr, x) ({                                                \
+       (__typeof__(*(ptr)))__xchg((unsigned long)(x), (ptr),           \
+                                  sizeof(*(ptr)));                     \
+})
 
 #include <asm-generic/cmpxchg-local.h>
 
@@ -114,23 +127,25 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size
 #error "SMP is not supported on this platform"
 #endif
 
+#define xchg xchg_relaxed
+
 /*
  * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
  * them available.
  */
-#define cmpxchg_local(ptr, o, n)                                              \
-       ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
-                       (unsigned long)(n), sizeof(*(ptr))))
+#define cmpxchg_local(ptr, o, n) ({                                    \
+       (__typeof(*ptr))__cmpxchg_local_generic((ptr),                  \
+                                               (unsigned long)(o),     \
+                                               (unsigned long)(n),     \
+                                               sizeof(*(ptr)));        \
+})
+
 #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
 
-#ifndef CONFIG_SMP
 #include <asm-generic/cmpxchg.h>
-#endif
 
 #else  /* min ARCH >= ARMv6 */
 
-#define __HAVE_ARCH_CMPXCHG 1
-
 extern void __bad_cmpxchg(volatile void *ptr, int size);
 
 /*
@@ -191,23 +206,12 @@ static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
        return oldval;
 }
 
-static inline unsigned long __cmpxchg_mb(volatile void *ptr, unsigned long old,
-                                        unsigned long new, int size)
-{
-       unsigned long ret;
-
-       smp_mb();
-       ret = __cmpxchg(ptr, old, new, size);
-       smp_mb();
-
-       return ret;
-}
-
-#define cmpxchg(ptr,o,n)                                               \
-       ((__typeof__(*(ptr)))__cmpxchg_mb((ptr),                        \
-                                         (unsigned long)(o),           \
-                                         (unsigned long)(n),           \
-                                         sizeof(*(ptr))))
+#define cmpxchg_relaxed(ptr,o,n) ({                                    \
+       (__typeof__(*(ptr)))__cmpxchg((ptr),                            \
+                                     (unsigned long)(o),               \
+                                     (unsigned long)(n),               \
+                                     sizeof(*(ptr)));                  \
+})
 
 static inline unsigned long __cmpxchg_local(volatile void *ptr,
                                            unsigned long old,
@@ -229,6 +233,13 @@ static inline unsigned long __cmpxchg_local(volatile void *ptr,
        return ret;
 }
 
+#define cmpxchg_local(ptr, o, n) ({                                    \
+       (__typeof(*ptr))__cmpxchg_local((ptr),                          \
+                                       (unsigned long)(o),             \
+                                       (unsigned long)(n),             \
+                                       sizeof(*(ptr)));                \
+})
+
 static inline unsigned long long __cmpxchg64(unsigned long long *ptr,
                                             unsigned long long old,
                                             unsigned long long new)
@@ -254,36 +265,13 @@ static inline unsigned long long __cmpxchg64(unsigned long long *ptr,
        return oldval;
 }
 
-static inline unsigned long long __cmpxchg64_mb(unsigned long long *ptr,
-                                               unsigned long long old,
-                                               unsigned long long new)
-{
-       unsigned long long ret;
-
-       smp_mb();
-       ret = __cmpxchg64(ptr, old, new);
-       smp_mb();
-
-       return ret;
-}
-
-#define cmpxchg_local(ptr,o,n)                                         \
-       ((__typeof__(*(ptr)))__cmpxchg_local((ptr),                     \
-                                      (unsigned long)(o),              \
-                                      (unsigned long)(n),              \
-                                      sizeof(*(ptr))))
-
-#define cmpxchg64(ptr, o, n)                                           \
-       ((__typeof__(*(ptr)))__cmpxchg64_mb((ptr),                      \
-                                       (unsigned long long)(o),        \
-                                       (unsigned long long)(n)))
-
-#define cmpxchg64_relaxed(ptr, o, n)                                   \
-       ((__typeof__(*(ptr)))__cmpxchg64((ptr),                         \
+#define cmpxchg64_relaxed(ptr, o, n) ({                                        \
+       (__typeof__(*(ptr)))__cmpxchg64((ptr),                          \
                                        (unsigned long long)(o),        \
-                                       (unsigned long long)(n)))
+                                       (unsigned long long)(n));       \
+})
 
-#define cmpxchg64_local(ptr, o, n)     cmpxchg64_relaxed((ptr), (o), (n))
+#define cmpxchg64_local(ptr, o, n) cmpxchg64_relaxed((ptr), (o), (n))
 
 #endif /* __LINUX_ARM_ARCH__ >= 6 */