3 #ifndef CDSLIB_COMPILER_GCC_AMD64_CXX11_ATOMIC_H
4 #define CDSLIB_COMPILER_GCC_AMD64_CXX11_ATOMIC_H
7 #include <cds/compiler/gcc/x86/cxx11_atomic32.h>
10 namespace cds { namespace cxx11_atomic {
11 namespace platform { CDS_CXX11_INLINE_NAMESPACE namespace gcc { CDS_CXX11_INLINE_NAMESPACE namespace amd64 {
12 # ifndef CDS_CXX11_INLINE_NAMESPACE_SUPPORT
13 // primitives up to 32bit + fences
14 using namespace cds::cxx11_atomic::platform::gcc::x86;
17 //-----------------------------------------------------------------------------
19 //-----------------------------------------------------------------------------
22 static inline bool cas64_strong( T volatile * pDest, T& expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
24 static_assert( sizeof(T) == 8, "Illegal size of operand" );
25 assert( cds::details::is_aligned( pDest, 8 ));
28 fence_before(mo_success);
29 __asm__ __volatile__ (
30 "lock ; cmpxchgq %[desired], %[pDest]"
31 : [prev] "+a" (prev), [pDest] "+m" (*pDest)
32 : [desired] "r" (desired)
34 bool success = (prev == expected);
37 fence_after(mo_success);
44 static inline bool cas64_weak( T volatile * pDest, T& expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
46 return cas64_strong( pDest, expected, desired, mo_success, mo_fail );
50 static inline T load64( T volatile const * pSrc, memory_order order ) CDS_NOEXCEPT
52 static_assert( sizeof(T) == 8, "Illegal size of operand" );
53 assert( order == memory_order_relaxed
54 || order == memory_order_consume
55 || order == memory_order_acquire
56 || order == memory_order_seq_cst
59 assert( cds::details::is_aligned( pSrc, 8 ));
62 fence_after_load( order );
68 static inline T exchange64( T volatile * pDest, T v, memory_order order ) CDS_NOEXCEPT
70 static_assert( sizeof(T) == 8, "Illegal size of operand" );
71 assert( cds::details::is_aligned( pDest, 8 ));
74 __asm__ __volatile__ (
75 "xchgq %[v], %[pDest]"
76 : [v] "+r" (v), [pDest] "+m" (*pDest)
83 static inline void store64( T volatile * pDest, T val, memory_order order ) CDS_NOEXCEPT
85 static_assert( sizeof(T) == 8, "Illegal size of operand" );
86 assert( order == memory_order_relaxed
87 || order == memory_order_release
88 || order == memory_order_seq_cst
91 assert( cds::details::is_aligned( pDest, 8 ));
93 if (order != memory_order_seq_cst) {
98 exchange64( pDest, val, order);
102 # define CDS_ATOMIC_fetch64_add_defined
103 template <typename T>
104 static inline T fetch64_add( T volatile * pDest, T v, memory_order order) CDS_NOEXCEPT
106 static_assert( sizeof(T) == 8, "Illegal size of operand" );
107 assert( cds::details::is_aligned( pDest, 8 ));
110 __asm__ __volatile__ (
111 "lock ; xaddq %[v], %[pDest]"
112 : [v] "+r" (v), [pDest] "+m" (*pDest)
118 # define CDS_ATOMIC_fetch64_sub_defined
119 template <typename T>
120 static inline T fetch64_sub( T volatile * pDest, T v, memory_order order) CDS_NOEXCEPT
122 static_assert( sizeof(T) == 8, "Illegal size of operand" );
123 assert( cds::details::is_aligned( pDest, 8 ));
126 __asm__ __volatile__ (
128 "lock ; xaddq %[v], %[pDest]"
129 : [v] "+r" (v), [pDest] "+m" (*pDest)
136 //-----------------------------------------------------------------------------
137 // pointer primitives
138 //-----------------------------------------------------------------------------
140 template <typename T>
141 static inline T * exchange_ptr( T * volatile * pDest, T * v, memory_order order ) CDS_NOEXCEPT
143 static_assert( sizeof(T *) == sizeof(void *), "Illegal size of operand" );
145 return (T *) exchange64( (uint64_t volatile *) pDest, (uint64_t) v, order );
148 template <typename T>
149 static inline void store_ptr( T * volatile * pDest, T * src, memory_order order ) CDS_NOEXCEPT
151 static_assert( sizeof(T *) == sizeof(void *), "Illegal size of operand" );
152 assert( order == memory_order_relaxed
153 || order == memory_order_release
154 || order == memory_order_seq_cst
158 if ( order != memory_order_seq_cst ) {
159 fence_before( order );
163 exchange_ptr( pDest, src, order );
167 template <typename T>
168 static inline T * load_ptr( T * volatile const * pSrc, memory_order order ) CDS_NOEXCEPT
170 static_assert( sizeof(T *) == sizeof(void *), "Illegal size of operand" );
171 assert( order == memory_order_relaxed
172 || order == memory_order_consume
173 || order == memory_order_acquire
174 || order == memory_order_seq_cst
179 fence_after_load( order );
183 template <typename T>
184 static inline bool cas_ptr_strong( T * volatile * pDest, T *& expected, T * desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
186 static_assert( sizeof(T *) == sizeof(void *), "Illegal size of operand" );
188 return cas64_strong( (uint64_t volatile *) pDest, *reinterpret_cast<uint64_t *>( &expected ), (uint64_t) desired, mo_success, mo_fail );
191 template <typename T>
192 static inline bool cas_ptr_weak( T * volatile * pDest, T *& expected, T * desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
194 return cas_ptr_strong( pDest, expected, desired, mo_success, mo_fail );
197 }} // namespace gcc::amd64
199 #ifndef CDS_CXX11_INLINE_NAMESPACE_SUPPORT
200 using namespace gcc::amd64;
202 } // namespace platform
204 }} // namespace cds::cxx11_atomic
207 #endif // #ifndef CDSLIB_COMPILER_GCC_AMD64_CXX11_ATOMIC_H