arm64: barriers: wire up new barrier options
authorWill Deacon <will.deacon@arm.com>
Fri, 2 May 2014 15:24:11 +0000 (16:24 +0100)
committerMark Brown <broonie@kernel.org>
Thu, 23 Oct 2014 18:47:06 +0000 (19:47 +0100)
Now that all callers of the barrier macros are updated to pass the
mandatory options, update the macros so the option is actually used.

Acked-by: Catalin Marinas <catalin.marinas@arm.com>
Signed-off-by: Will Deacon <will.deacon@arm.com>
Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
(cherry picked from commit 493e68747e07b69da3d746352525a1ebd6b61d82)
Signed-off-by: Mark Brown <broonie@kernel.org>
Conflicts:
arch/arm64/include/asm/barrier.h

arch/arm64/include/asm/barrier.h

index c98d0a88916a4bfd824f11a4014e9d719518a24b..9a4c3d5b402e35acc8cb7e5636db73cb8a62e365 100644 (file)
 #define wfi()          asm volatile("wfi" : : : "memory")
 
 #define isb()          asm volatile("isb" : : : "memory")
-#define dmb(opt)       asm volatile("dmb sy" : : : "memory")
-#define dsb(opt)       asm volatile("dsb sy" : : : "memory")
+#define dmb(opt)       asm volatile("dmb " #opt : : : "memory")
+#define dsb(opt)       asm volatile("dsb " #opt : : : "memory")
 
 #define mb()           dsb(sy)
-#define rmb()          asm volatile("dsb ld" : : : "memory")
-#define wmb()          asm volatile("dsb st" : : : "memory")
+#define rmb()          dsb(ld)
+#define wmb()          dsb(st)
 
 #ifndef CONFIG_SMP
 #define smp_mb()       barrier()
 #define smp_rmb()      barrier()
 #define smp_wmb()      barrier()
 #else
-#define smp_mb()       asm volatile("dmb ish" : : : "memory")
-#define smp_rmb()      asm volatile("dmb ishld" : : : "memory")
-#define smp_wmb()      asm volatile("dmb ishst" : : : "memory")
+
+#define smp_mb()       dmb(ish)
+#define smp_rmb()      dmb(ishld)
+#define smp_wmb()      dmb(ishst)
+
+#define smp_store_release(p, v)                                                \
+do {                                                                   \
+       compiletime_assert_atomic_type(*p);                             \
+       switch (sizeof(*p)) {                                           \
+       case 4:                                                         \
+               asm volatile ("stlr %w1, %0"                            \
+                               : "=Q" (*p) : "r" (v) : "memory");      \
+               break;                                                  \
+       case 8:                                                         \
+               asm volatile ("stlr %1, %0"                             \
+                               : "=Q" (*p) : "r" (v) : "memory");      \
+               break;                                                  \
+       }                                                               \
+} while (0)
+
+#define smp_load_acquire(p)                                            \
+({                                                                     \
+       typeof(*p) ___p1;                                               \
+       compiletime_assert_atomic_type(*p);                             \
+       switch (sizeof(*p)) {                                           \
+       case 4:                                                         \
+               asm volatile ("ldar %w0, %1"                            \
+                       : "=r" (___p1) : "Q" (*p) : "memory");          \
+               break;                                                  \
+       case 8:                                                         \
+               asm volatile ("ldar %0, %1"                             \
+                       : "=r" (___p1) : "Q" (*p) : "memory");          \
+               break;                                                  \
+       }                                                               \
+       ___p1;                                                          \
+})
+
 #endif
 
 #define read_barrier_depends()         do { } while(0)