arm64: atomics: implement atomic{,64}_cmpxchg using cmpxchg
authorWill Deacon <will.deacon@arm.com>
Thu, 4 Jun 2015 16:46:37 +0000 (17:46 +0100)
committerWill Deacon <will.deacon@arm.com>
Mon, 27 Jul 2015 14:28:53 +0000 (15:28 +0100)
We don't need duplicate cmpxchg implementations, so use cmpxchg to
implement atomic{,64}_cmpxchg, like we do for xchg already.

Reviewed-by: Steve Capper <steve.capper@arm.com>
Reviewed-by: Catalin Marinas <catalin.marinas@arm.com>
Signed-off-by: Will Deacon <will.deacon@arm.com>
arch/arm64/include/asm/atomic.h
arch/arm64/include/asm/atomic_ll_sc.h
arch/arm64/include/asm/atomic_lse.h

index 0b26da365f3be2bfa059a817fc3aec519d1e9f76..35a67783cfa088d4166de9ff7ed6f993b899c09b 100644 (file)
@@ -56,6 +56,7 @@
 #define atomic_read(v)                 READ_ONCE((v)->counter)
 #define atomic_set(v, i)               (((v)->counter) = (i))
 #define atomic_xchg(v, new)            xchg(&((v)->counter), (new))
+#define atomic_cmpxchg(v, old, new)    cmpxchg(&((v)->counter), (old), (new))
 
 #define atomic_inc(v)                  atomic_add(1, (v))
 #define atomic_dec(v)                  atomic_sub(1, (v))
@@ -75,6 +76,7 @@
 #define atomic64_read                  atomic_read
 #define atomic64_set                   atomic_set
 #define atomic64_xchg                  atomic_xchg
+#define atomic64_cmpxchg               atomic_cmpxchg
 
 #define atomic64_inc(v)                        atomic64_add(1, (v))
 #define atomic64_dec(v)                        atomic64_sub(1, (v))
index 50d6abd3c43997f0c3319f37f1cfd73a16fb4964..5e2d1db3a1db56aec94cdf2a3bb45a9888719179 100644 (file)
@@ -93,29 +93,6 @@ ATOMIC_OP(xor, eor)
 #undef ATOMIC_OP_RETURN
 #undef ATOMIC_OP
 
-__LL_SC_INLINE int
-__LL_SC_PREFIX(atomic_cmpxchg(atomic_t *ptr, int old, int new))
-{
-       unsigned long tmp;
-       int oldval;
-
-       asm volatile("// atomic_cmpxchg\n"
-"      prfm    pstl1strm, %2\n"
-"1:    ldxr    %w1, %2\n"
-"      eor     %w0, %w1, %w3\n"
-"      cbnz    %w0, 2f\n"
-"      stlxr   %w0, %w4, %2\n"
-"      cbnz    %w0, 1b\n"
-"      dmb     ish\n"
-"2:"
-       : "=&r" (tmp), "=&r" (oldval), "+Q" (ptr->counter)
-       : "Lr" (old), "r" (new)
-       : "memory");
-
-       return oldval;
-}
-__LL_SC_EXPORT(atomic_cmpxchg);
-
 #define ATOMIC64_OP(op, asm_op)                                                \
 __LL_SC_INLINE void                                                    \
 __LL_SC_PREFIX(atomic64_##op(long i, atomic64_t *v))                   \
@@ -172,29 +149,6 @@ ATOMIC64_OP(xor, eor)
 #undef ATOMIC64_OP_RETURN
 #undef ATOMIC64_OP
 
-__LL_SC_INLINE long
-__LL_SC_PREFIX(atomic64_cmpxchg(atomic64_t *ptr, long old, long new))
-{
-       long oldval;
-       unsigned long res;
-
-       asm volatile("// atomic64_cmpxchg\n"
-"      prfm    pstl1strm, %2\n"
-"1:    ldxr    %1, %2\n"
-"      eor     %0, %1, %3\n"
-"      cbnz    %w0, 2f\n"
-"      stlxr   %w0, %4, %2\n"
-"      cbnz    %w0, 1b\n"
-"      dmb     ish\n"
-"2:"
-       : "=&r" (res), "=&r" (oldval), "+Q" (ptr->counter)
-       : "Lr" (old), "r" (new)
-       : "memory");
-
-       return oldval;
-}
-__LL_SC_EXPORT(atomic64_cmpxchg);
-
 __LL_SC_INLINE long
 __LL_SC_PREFIX(atomic64_dec_if_positive(atomic64_t *v))
 {
index a3d21e7cee4f2efa1d023e958a977cda28f3cadc..30e5cbcfc707cd2194215a44590f4f2eb9306c2f 100644 (file)
@@ -149,28 +149,6 @@ static inline int atomic_sub_return(int i, atomic_t *v)
        return w0;
 }
 
-static inline int atomic_cmpxchg(atomic_t *ptr, int old, int new)
-{
-       register unsigned long x0 asm ("x0") = (unsigned long)ptr;
-       register int w1 asm ("w1") = old;
-       register int w2 asm ("w2") = new;
-
-       asm volatile(ARM64_LSE_ATOMIC_INSN(
-       /* LL/SC */
-       "       nop\n"
-       __LL_SC_ATOMIC(cmpxchg)
-       "       nop",
-       /* LSE atomics */
-       "       mov     w30, %w[old]\n"
-       "       casal   w30, %w[new], %[v]\n"
-       "       mov     %w[ret], w30")
-       : [ret] "+r" (x0), [v] "+Q" (ptr->counter)
-       : [old] "r" (w1), [new] "r" (w2)
-       : "x30", "memory");
-
-       return x0;
-}
-
 #undef __LL_SC_ATOMIC
 
 #define __LL_SC_ATOMIC64(op)   __LL_SC_CALL(atomic64_##op)
@@ -296,27 +274,6 @@ static inline long atomic64_sub_return(long i, atomic64_t *v)
 
        return x0;
 }
-static inline long atomic64_cmpxchg(atomic64_t *ptr, long old, long new)
-{
-       register unsigned long x0 asm ("x0") = (unsigned long)ptr;
-       register long x1 asm ("x1") = old;
-       register long x2 asm ("x2") = new;
-
-       asm volatile(ARM64_LSE_ATOMIC_INSN(
-       /* LL/SC */
-       "       nop\n"
-       __LL_SC_ATOMIC64(cmpxchg)
-       "       nop",
-       /* LSE atomics */
-       "       mov     x30, %[old]\n"
-       "       casal   x30, %[new], %[v]\n"
-       "       mov     %[ret], x30")
-       : [ret] "+r" (x0), [v] "+Q" (ptr->counter)
-       : [old] "r" (x1), [new] "r" (x2)
-       : "x30", "memory");
-
-       return x0;
-}
 
 static inline long atomic64_dec_if_positive(atomic64_t *v)
 {