mn10300: Provide atomic_{or,xor,and}
authorPeter Zijlstra <peterz@infradead.org>
Wed, 23 Apr 2014 17:50:20 +0000 (19:50 +0200)
committerThomas Gleixner <tglx@linutronix.de>
Mon, 27 Jul 2015 12:06:23 +0000 (14:06 +0200)
Implement atomic logic ops -- atomic_{or,xor,and}.

These will replace the atomic_{set,clear}_mask functions that are
available on some archs.

Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
arch/mn10300/include/asm/atomic.h

index 5be655e83e709dbaf762efac0e3befea8dfd0d9f..03eea8158cf955330b332b0e34de0beb08d20aff 100644 (file)
@@ -89,6 +89,12 @@ static inline int atomic_##op##_return(int i, atomic_t *v)           \
 ATOMIC_OPS(add)
 ATOMIC_OPS(sub)
 
+#define CONFIG_ARCH_HAS_ATOMIC_OR
+
+ATOMIC_OP(and)
+ATOMIC_OP(or)
+ATOMIC_OP(xor)
+
 #undef ATOMIC_OPS
 #undef ATOMIC_OP_RETURN
 #undef ATOMIC_OP
@@ -134,31 +140,9 @@ static inline void atomic_dec(atomic_t *v)
  *
  * Atomically clears the bits set in mask from the memory word specified.
  */
-static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
+static inline __deprecated void atomic_clear_mask(unsigned int mask, atomic_t *v)
 {
-#ifdef CONFIG_SMP
-       int status;
-
-       asm volatile(
-               "1:     mov     %3,(_AAR,%2)    \n"
-               "       mov     (_ADR,%2),%0    \n"
-               "       and     %4,%0           \n"
-               "       mov     %0,(_ADR,%2)    \n"
-               "       mov     (_ADR,%2),%0    \n"     /* flush */
-               "       mov     (_ASR,%2),%0    \n"
-               "       or      %0,%0           \n"
-               "       bne     1b              \n"
-               : "=&r"(status), "=m"(*addr)
-               : "a"(ATOMIC_OPS_BASE_ADDR), "r"(addr), "r"(~mask)
-               : "memory", "cc");
-#else
-       unsigned long flags;
-
-       mask = ~mask;
-       flags = arch_local_cli_save();
-       *addr &= mask;
-       arch_local_irq_restore(flags);
-#endif
+       atomic_and(~mask, v);
 }
 
 /**
@@ -168,30 +152,9 @@ static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
  *
  * Atomically sets the bits set in mask from the memory word specified.
  */
-static inline void atomic_set_mask(unsigned long mask, unsigned long *addr)
+static inline __deprecated void atomic_set_mask(unsigned int mask, atomic_t *v)
 {
-#ifdef CONFIG_SMP
-       int status;
-
-       asm volatile(
-               "1:     mov     %3,(_AAR,%2)    \n"
-               "       mov     (_ADR,%2),%0    \n"
-               "       or      %4,%0           \n"
-               "       mov     %0,(_ADR,%2)    \n"
-               "       mov     (_ADR,%2),%0    \n"     /* flush */
-               "       mov     (_ASR,%2),%0    \n"
-               "       or      %0,%0           \n"
-               "       bne     1b              \n"
-               : "=&r"(status), "=m"(*addr)
-               : "a"(ATOMIC_OPS_BASE_ADDR), "r"(addr), "r"(mask)
-               : "memory", "cc");
-#else
-       unsigned long flags;
-
-       flags = arch_local_cli_save();
-       *addr |= mask;
-       arch_local_irq_restore(flags);
-#endif
+       atomic_or(mask, v);
 }
 
 #endif /* __KERNEL__ */