x86: Provide atomic_{or,xor,and}
authorPeter Zijlstra <peterz@infradead.org>
Wed, 23 Apr 2014 18:28:37 +0000 (20:28 +0200)
committerThomas Gleixner <tglx@linutronix.de>
Mon, 27 Jul 2015 12:06:23 +0000 (14:06 +0200)
Implement atomic logic ops -- atomic_{or,xor,and}.

These will replace the atomic_{set,clear}_mask functions that are
available on some archs.

Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
arch/x86/include/asm/atomic.h
arch/x86/include/asm/atomic64_32.h
arch/x86/include/asm/atomic64_64.h

index e9168955c42f4ee8b18e726e28ecacf39b75a7f2..f3a3ec0406946ecdff0cbb71ae1388a9a281a6bd 100644 (file)
@@ -182,6 +182,23 @@ static inline int atomic_xchg(atomic_t *v, int new)
        return xchg(&v->counter, new);
 }
 
+#define ATOMIC_OP(op)                                                  \
+static inline void atomic_##op(int i, atomic_t *v)                     \
+{                                                                      \
+       asm volatile(LOCK_PREFIX #op"l %1,%0"                           \
+                       : "+m" (v->counter)                             \
+                       : "ir" (i)                                      \
+                       : "memory");                                    \
+}
+
+#define CONFIG_ARCH_HAS_ATOMIC_OR
+
+ATOMIC_OP(and)
+ATOMIC_OP(or)
+ATOMIC_OP(xor)
+
+#undef ATOMIC_OP
+
 /**
  * __atomic_add_unless - add unless the number is already a given value
  * @v: pointer of type atomic_t
@@ -219,15 +236,15 @@ static __always_inline short int atomic_inc_short(short int *v)
        return *v;
 }
 
-/* These are x86-specific, used by some header files */
-#define atomic_clear_mask(mask, addr)                          \
-       asm volatile(LOCK_PREFIX "andl %0,%1"                   \
-                    : : "r" (~(mask)), "m" (*(addr)) : "memory")
+static inline __deprecated void atomic_clear_mask(unsigned int mask, atomic_t *v)
+{
+       atomic_and(~mask, v);
+}
 
-#define atomic_set_mask(mask, addr)                            \
-       asm volatile(LOCK_PREFIX "orl %0,%1"                    \
-                    : : "r" ((unsigned)(mask)), "m" (*(addr))  \
-                    : "memory")
+static inline __deprecated void atomic_set_mask(unsigned int mask, atomic_t *v)
+{
+       atomic_or(mask, v);
+}
 
 #ifdef CONFIG_X86_32
 # include <asm/atomic64_32.h>
index b154de75c90cb54f3639bb49b04f6a01b3237fc4..a11c30b77fb57dfe75b9645ac4f6197d67048e9d 100644 (file)
@@ -313,4 +313,18 @@ static inline long long atomic64_dec_if_positive(atomic64_t *v)
 #undef alternative_atomic64
 #undef __alternative_atomic64
 
+#define ATOMIC64_OP(op, c_op)                                          \
+static inline void atomic64_##op(long long i, atomic64_t *v)           \
+{                                                                      \
+       long long old, c = 0;                                           \
+       while ((old = atomic64_cmpxchg(v, c, c c_op i)) != c)           \
+               c = old;                                                \
+}
+
+ATOMIC64_OP(and, &)
+ATOMIC64_OP(or, |)
+ATOMIC64_OP(xor, ^)
+
+#undef ATOMIC64_OP
+
 #endif /* _ASM_X86_ATOMIC64_32_H */
index b965f9e03f2a04b2291fc0b62d537b6d9416bdaf..50e33eff58de7fde09c770e229c9a30fccf49d03 100644 (file)
@@ -220,4 +220,19 @@ static inline long atomic64_dec_if_positive(atomic64_t *v)
        return dec;
 }
 
+#define ATOMIC64_OP(op)                                                        \
+static inline void atomic64_##op(long i, atomic64_t *v)                        \
+{                                                                      \
+       asm volatile(LOCK_PREFIX #op"q %1,%0"                           \
+                       : "+m" (v->counter)                             \
+                       : "er" (i)                                      \
+                       : "memory");                                    \
+}
+
+ATOMIC64_OP(and)
+ATOMIC64_OP(or)
+ATOMIC64_OP(xor)
+
+#undef ATOMIC64_OP
+
 #endif /* _ASM_X86_ATOMIC64_64_H */