These changes are the raw update to linux-4.4.6-rt14. Kernel sources
[kvmfornfv.git] / kernel / arch / ia64 / include / asm / atomic.h
index 0bf0350..8dfb5f6 100644 (file)
 #define ATOMIC_INIT(i)         { (i) }
 #define ATOMIC64_INIT(i)       { (i) }
 
-#define atomic_read(v)         ACCESS_ONCE((v)->counter)
-#define atomic64_read(v)       ACCESS_ONCE((v)->counter)
+#define atomic_read(v)         READ_ONCE((v)->counter)
+#define atomic64_read(v)       READ_ONCE((v)->counter)
 
-#define atomic_set(v,i)                (((v)->counter) = (i))
-#define atomic64_set(v,i)      (((v)->counter) = (i))
+#define atomic_set(v,i)                WRITE_ONCE(((v)->counter), (i))
+#define atomic64_set(v,i)      WRITE_ONCE(((v)->counter), (i))
 
 #define ATOMIC_OP(op, c_op)                                            \
 static __inline__ int                                                  \
@@ -45,8 +45,6 @@ ia64_atomic_##op (int i, atomic_t *v)                                 \
 ATOMIC_OP(add, +)
 ATOMIC_OP(sub, -)
 
-#undef ATOMIC_OP
-
 #define atomic_add_return(i,v)                                         \
 ({                                                                     \
        int __ia64_aar_i = (i);                                         \
@@ -71,6 +69,16 @@ ATOMIC_OP(sub, -)
                : ia64_atomic_sub(__ia64_asr_i, v);                     \
 })
 
+ATOMIC_OP(and, &)
+ATOMIC_OP(or, |)
+ATOMIC_OP(xor, ^)
+
+#define atomic_and(i,v)        (void)ia64_atomic_and(i,v)
+#define atomic_or(i,v) (void)ia64_atomic_or(i,v)
+#define atomic_xor(i,v)        (void)ia64_atomic_xor(i,v)
+
+#undef ATOMIC_OP
+
 #define ATOMIC64_OP(op, c_op)                                          \
 static __inline__ long                                                 \
 ia64_atomic64_##op (__s64 i, atomic64_t *v)                            \
@@ -89,8 +97,6 @@ ia64_atomic64_##op (__s64 i, atomic64_t *v)                           \
 ATOMIC64_OP(add, +)
 ATOMIC64_OP(sub, -)
 
-#undef ATOMIC64_OP
-
 #define atomic64_add_return(i,v)                                       \
 ({                                                                     \
        long __ia64_aar_i = (i);                                        \
@@ -115,6 +121,16 @@ ATOMIC64_OP(sub, -)
                : ia64_atomic64_sub(__ia64_asr_i, v);                   \
 })
 
+ATOMIC64_OP(and, &)
+ATOMIC64_OP(or, |)
+ATOMIC64_OP(xor, ^)
+
+#define atomic64_and(i,v)      (void)ia64_atomic64_and(i,v)
+#define atomic64_or(i,v)       (void)ia64_atomic64_or(i,v)
+#define atomic64_xor(i,v)      (void)ia64_atomic64_xor(i,v)
+
+#undef ATOMIC64_OP
+
 #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))