3 #ifndef __CDS_COMPILER_GCC_SPARC_CXX11_ATOMIC_H
4 #define __CDS_COMPILER_GCC_SPARC_CXX11_ATOMIC_H
11 1. [Doug Lea "JSR-133 Cookbook for Compiler Writers]:
13 Acquire semantics: load; LoadLoad+LoadStore
14 Release semantics: LoadStore+StoreStore; store
16 2. boost::atomic library by Helge Bahman
17 3. OpenSparc source code
20 #if CDS_OS_TYPE == CDS_OS_LINUX
21 # define CDS_SPARC_RMO_MEMORY_MODEL
24 #define CDS_SPARC_MB_FULL "membar #Sync \n\t"
25 #ifdef CDS_SPARC_RMO_MEMORY_MODEL
26 // RMO memory model (Linux only?..) Untested
27 # define CDS_SPARC_MB_LL_LS "membar #LoadLoad|#LoadStore \n\t"
28 # define CDS_SPARC_MB_LS_SS "membar #LoadStore|#StoreStore \n\t"
29 # define CDS_SPARC_MB_LL_LS_SS "membar #LoadLoad|#LoadStore|#StoreStore \n\t"
31 // TSO memory model (default; Solaris uses this model)
32 # define CDS_SPARC_MB_LL_LS
33 # define CDS_SPARC_MB_LS_SS
34 # define CDS_SPARC_MB_LL_LS_SS
37 #define CDS_SPARC_MB_ACQ CDS_SPARC_MB_LL_LS
38 #define CDS_SPARC_MB_REL CDS_SPARC_MB_LS_SS
39 #define CDS_SPARC_MB_ACQ_REL CDS_SPARC_MB_LL_LS_SS
40 #define CDS_SPARC_MB_SEQ_CST CDS_SPARC_MB_FULL
43 namespace cds { namespace cxx11_atomics {
44 namespace platform { CDS_CXX11_INLINE_NAMESPACE namespace gcc { CDS_CXX11_INLINE_NAMESPACE namespace Sparc {
46 static inline void fence_before( memory_order order ) CDS_NOEXCEPT
49 case memory_order_relaxed:
50 case memory_order_acquire:
51 case memory_order_consume:
53 case memory_order_release:
54 case memory_order_acq_rel:
55 __asm__ __volatile__ ( "" CDS_SPARC_MB_REL ::: "memory" );
57 case memory_order_seq_cst:
58 __asm__ __volatile__ ( "" CDS_SPARC_MB_FULL ::: "memory" );
63 static inline void fence_after( memory_order order ) CDS_NOEXCEPT
66 case memory_order_relaxed:
67 case memory_order_consume:
68 case memory_order_release:
70 case memory_order_acquire:
71 case memory_order_acq_rel:
72 __asm__ __volatile__ ( "" CDS_SPARC_MB_ACQ ::: "memory" );
74 case memory_order_seq_cst:
75 __asm__ __volatile__ ( "" CDS_SPARC_MB_FULL ::: "memory" );
81 //-----------------------------------------------------------------------------
83 //-----------------------------------------------------------------------------
84 static inline void thread_fence(memory_order order) CDS_NOEXCEPT
88 case memory_order_relaxed:
89 case memory_order_consume:
91 case memory_order_acquire:
92 __asm__ __volatile__ ( "" CDS_SPARC_MB_ACQ ::: "memory" );
94 case memory_order_release:
95 __asm__ __volatile__ ( "" CDS_SPARC_MB_REL ::: "memory" );
97 case memory_order_acq_rel:
98 __asm__ __volatile__ ( "" CDS_SPARC_MB_ACQ_REL ::: "memory" );
100 case memory_order_seq_cst:
101 __asm__ __volatile__ ( "" CDS_SPARC_MB_SEQ_CST ::: "memory" );
107 static inline void signal_fence(memory_order order) CDS_NOEXCEPT
109 // C++11: 29.8.8: only compiler optimization, no hardware instructions
112 case memory_order_relaxed:
114 case memory_order_consume:
115 case memory_order_release:
116 case memory_order_acquire:
117 case memory_order_acq_rel:
118 case memory_order_seq_cst:
119 CDS_COMPILER_RW_BARRIER;
125 //-----------------------------------------------------------------------------
126 // atomic flag primitives
127 //-----------------------------------------------------------------------------
129 typedef unsigned char atomic_flag_type;
130 static inline bool atomic_flag_tas( atomic_flag_type volatile * pFlag, memory_order order ) CDS_NOEXCEPT
132 atomic_flag_type fCur;
133 fence_before( order );
134 __asm__ __volatile__(
135 "ldstub [%[pFlag]], %[fCur] \n\t"
140 fence_after( order );
144 static inline void atomic_flag_clear( atomic_flag_type volatile * pFlag, memory_order order ) CDS_NOEXCEPT
146 fence_before( order );
147 __asm__ __volatile__(
149 "stub %%g0, [%[pFlag]] \n\t"
150 :: [pFlag] "r"(pFlag)
153 fence_after( order );
156 //-----------------------------------------------------------------------------
158 //-----------------------------------------------------------------------------
160 template <typename T>
161 static inline void store32( T volatile * pDest, T src, memory_order order ) CDS_NOEXCEPT
163 static_assert( sizeof(T) == 4, "Illegal size of operand" );
164 assert( order == memory_order_relaxed
165 || order == memory_order_release
166 || order == memory_order_seq_cst
168 assert( pDest != NULL );
175 template <typename T>
176 static inline T load32( T volatile const * pSrc, memory_order order ) CDS_NOEXCEPT
178 static_assert( sizeof(T) == 4, "Illegal size of operand" );
179 assert( order == memory_order_relaxed
180 || order == memory_order_consume
181 || order == memory_order_acquire
182 || order == memory_order_seq_cst
184 assert( pSrc != NULL );
192 template <typename T>
193 static inline bool cas32_strong( T volatile * pDest, T& expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
195 static_assert( sizeof(T) == 4, "Illegal size of operand" );
196 assert( pDest != NULL );
198 fence_before( mo_success );
199 __asm__ __volatile__(
200 "cas [%[pDest]], %[expected], %[desired]"
201 : [desired] "+r" (desired)
202 : [pDest] "r" (pDest), [expected] "r" (expected)
206 // desired contains current value
208 bool bSuccess = desired == expected;
210 fence_after( mo_success );
212 fence_after(mo_fail);
219 template <typename T>
220 static inline bool cas32_weak( T volatile * pDest, T& expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
222 return cas32_strong( pDest, expected, desired, mo_success, mo_fail );
225 template <typename T>
226 static inline T exchange32( T volatile * pDest, T v, memory_order order ) CDS_NOEXCEPT
228 static_assert( sizeof(T) == 4, "Illegal size of operand" );
229 assert( pDest != NULL );
231 // This primitive could be implemented via "swap" instruction but "swap" is deprecated in UltraSparc
233 T cur = load32( pDest, memory_order_relaxed );
234 do {} while ( !cas32_strong( pDest, cur, v, order, memory_order_relaxed ));
238 //-----------------------------------------------------------------------------
240 //-----------------------------------------------------------------------------
242 template <typename T>
243 static inline T load64( T volatile const * pSrc, memory_order order ) CDS_NOEXCEPT
245 static_assert( sizeof(T) == 8, "Illegal size of operand" );
246 assert( order == memory_order_relaxed
247 || order == memory_order_consume
248 || order == memory_order_acquire
249 || order == memory_order_seq_cst
251 assert( pSrc != NULL );
259 template <typename T>
260 static inline void store64( T volatile * pDest, T val, memory_order order ) CDS_NOEXCEPT
262 static_assert( sizeof(T) == 8, "Illegal size of operand" );
263 assert( order == memory_order_relaxed
264 || order == memory_order_release
265 || order == memory_order_seq_cst
267 assert( pDest != NULL );
275 template <typename T>
276 static inline bool cas64_strong( T volatile * pDest, T& expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
278 static_assert( sizeof(T) == 8, "Illegal size of operand" );
279 assert( pDest != NULL );
281 fence_before( mo_success );
282 __asm__ __volatile__(
283 "casx [%[pDest]], %[expected], %[desired]"
284 : [desired] "+r" (desired)
285 : [pDest] "r" (pDest), [expected] "r" (expected)
289 // desired contains current value
291 bool bSuccess = desired == expected;
293 fence_after( mo_success );
296 fence_after(mo_fail);
303 template <typename T>
304 static inline bool cas64_weak( T volatile * pDest, T& expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
306 return cas64_strong( pDest, expected, desired, mo_success, mo_fail );
309 template <typename T>
310 static inline T exchange64( T volatile * pDest, T v, memory_order order ) CDS_NOEXCEPT
312 static_assert( sizeof(T) == 8, "Illegal size of operand" );
313 assert( pDest != NULL );
315 T cur = load64( pDest, memory_order_relaxed );
316 do {} while ( !cas64_strong( pDest, cur, v, order, memory_order_relaxed ));
320 //-----------------------------------------------------------------------------
322 //-----------------------------------------------------------------------------
324 template <typename T>
325 static inline void store8( T volatile * pDest, T src, memory_order order ) CDS_NOEXCEPT
327 static_assert( sizeof(T) == 1, "Illegal size of operand" );
328 assert( order == memory_order_relaxed
329 || order == memory_order_release
330 || order == memory_order_seq_cst
332 assert( pDest != NULL );
334 fence_before( order );
336 fence_after( order );
339 template <typename T>
340 static inline T load8( T volatile const * pSrc, memory_order order ) CDS_NOEXCEPT
342 static_assert( sizeof(T) == 1, "Illegal size of operand" );
343 assert( order == memory_order_relaxed
344 || order == memory_order_consume
345 || order == memory_order_acquire
346 || order == memory_order_seq_cst
348 assert( pSrc != NULL );
350 fence_before( order );
352 fence_after( order );
356 template <typename T>
357 static inline bool cas8_strong( T volatile * pDest, T& expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
359 static_assert( sizeof(T) == 1, "Illegal size of operand" );
360 assert( pDest != NULL );
366 static_assert( sizeof(u32) == sizeof(uint32_t), "Argument size error" );
368 u32 volatile * pDest32 = (u32 *)( uintptr_t( pDest ) & ~0x03 );
369 size_t const nCharIdx = (size_t)( uintptr_t( pDest ) & 0x03 );
376 uDesired.w = pDest32->w;
377 uExpected.c[nCharIdx] = expected;
378 uDesired.c[nCharIdx] = desired;
380 bSuccess = cas32_weak( reinterpret_cast<uint32_t volatile *>(pDest32), uExpected.w, uDesired.w, mo_success, mo_fail );
381 if ( bSuccess || uExpected.c[nCharIdx] != expected )
385 expected = uExpected.c[nCharIdx];
389 template <typename T>
390 static inline bool cas8_weak( T volatile * pDest, T& expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
392 static_assert( sizeof(T) == 1, "Illegal size of operand" );
393 assert( pDest != NULL );
399 static_assert( sizeof(u32) == sizeof(uint32_t), "Argument size error" );
401 u32 volatile * pDest32 = (u32 *)( uintptr_t( pDest ) & ~0x03 );
402 size_t const nCharIdx = (size_t)( uintptr_t( pDest ) & 0x03 );
407 uDesired.w = pDest32->w;
408 uExpected.c[nCharIdx] = expected;
409 uDesired.c[nCharIdx] = desired;
411 bool bSuccess = cas32_weak( reinterpret_cast<uint32_t volatile *>(pDest32), uExpected.w, uDesired.w, mo_success, mo_fail );
413 expected = uExpected.c[nCharIdx];
417 template <typename T>
418 static inline T exchange8( T volatile * pDest, T v, memory_order order ) CDS_NOEXCEPT
420 static_assert( sizeof(T) == 1, "Illegal size of operand" );
421 assert( pDest != NULL );
423 T cur = load8( pDest, memory_order_relaxed );
424 do {} while ( !cas8_strong( pDest, cur, v, order, memory_order_relaxed ));
428 //-----------------------------------------------------------------------------
430 //-----------------------------------------------------------------------------
432 template <typename T>
433 static inline T load16( T volatile const * pSrc, memory_order order ) CDS_NOEXCEPT
435 static_assert( sizeof(T) == 2, "Illegal size of operand" );
436 assert( order == memory_order_relaxed
437 || order == memory_order_consume
438 || order == memory_order_acquire
439 || order == memory_order_seq_cst
441 assert( pSrc != NULL );
443 fence_before( order );
445 fence_after( order );
449 template <typename T>
450 static inline void store16( T volatile * pDest, T src, memory_order order ) CDS_NOEXCEPT
452 static_assert( sizeof(T) == 2, "Illegal size of operand" );
453 assert( order == memory_order_relaxed
454 || order == memory_order_release
455 || order == memory_order_seq_cst
457 assert( pDest != NULL );
464 template <typename T>
465 static inline bool cas16_strong( T volatile * pDest, T& expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
467 static_assert( sizeof(T) == 2, "Illegal size of operand" );
468 assert( pDest != NULL );
474 static_assert( sizeof(u32) == sizeof(uint32_t), "Argument size error" );
476 u32 volatile * pDest32 = (u32 *)( uintptr_t( pDest ) & ~0x03 );
477 size_t const nIdx = (size_t)( (uintptr_t( pDest ) >> 1) & 0x01 );
484 uDesired.w = pDest32->w;
485 uExpected.c[nIdx] = expected;
486 uDesired.c[nIdx] = desired;
488 bSuccess = cas32_weak( reinterpret_cast<uint32_t volatile *>(pDest32), uExpected.w, uDesired.w, mo_success, mo_fail );
489 if ( bSuccess || uExpected.c[nIdx] != expected )
493 expected = uExpected.c[nIdx];
497 template <typename T>
498 static inline bool cas16_weak( T volatile * pDest, T& expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
500 static_assert( sizeof(T) == 2, "Illegal size of operand" );
501 assert( pDest != NULL );
507 static_assert( sizeof(u32) == sizeof(uint32_t), "Argument size error" );
509 u32 volatile * pDest32 = (u32 *)( uintptr_t( pDest ) & ~0x03 );
510 size_t const nIdx = (size_t)( (uintptr_t( pDest ) >> 1) & 0x01 );
515 uDesired.w = pDest32->w;
516 uExpected.c[nIdx] = expected;
517 uDesired.c[nIdx] = desired;
519 bool bSuccess = cas32_weak( reinterpret_cast<uint32_t volatile *>(pDest32), uExpected.w, uDesired.w, mo_success, mo_fail );
521 expected = uExpected.c[nIdx];
525 template <typename T>
526 static inline T exchange16( T volatile * pDest, T v, memory_order order ) CDS_NOEXCEPT
528 static_assert( sizeof(T) == 2, "Illegal size of operand" );
529 assert( pDest != NULL );
531 T cur = load16( pDest, memory_order_relaxed );
532 do {} while ( !cas16_strong( pDest, cur, v, order, memory_order_relaxed ));
536 //-----------------------------------------------------------------------------
537 // pointer primitives
538 //-----------------------------------------------------------------------------
540 template <typename T>
541 static inline void store_ptr( T * volatile * pDest, T * src, memory_order order ) CDS_NOEXCEPT
543 static_assert( sizeof(T *) == sizeof(void *), "Illegal size of operand" );
544 assert( order == memory_order_relaxed
545 || order == memory_order_release
546 || order == memory_order_seq_cst
548 assert( pDest != NULL );
555 template <typename T>
556 static inline T * load_ptr( T * volatile const * pSrc, memory_order order ) CDS_NOEXCEPT
558 static_assert( sizeof(T *) == sizeof(void *), "Illegal size of operand" );
559 assert( order == memory_order_relaxed
560 || order == memory_order_consume
561 || order == memory_order_acquire
562 || order == memory_order_seq_cst
564 assert( pSrc != NULL );
566 fence_before( order );
568 fence_after( order );
572 template <typename T>
573 static inline bool cas_ptr_strong( T * volatile * pDest, T *& expected, T * desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
575 static_assert( sizeof(T *) == sizeof(void *), "Illegal size of operand" );
577 return cas64_strong( (uint64_t volatile *) pDest, *reinterpret_cast<uint64_t *>( &expected ), (uint64_t) desired, mo_success, mo_fail );
580 template <typename T>
581 static inline bool cas_ptr_weak( T * volatile * pDest, T *& expected, T * desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
583 return cas_ptr_strong( pDest, expected, desired, mo_success, mo_fail );
586 template <typename T>
587 static inline T * exchange_ptr( T * volatile * pDest, T * v, memory_order order ) CDS_NOEXCEPT
589 static_assert( sizeof(T *) == sizeof(void *), "Illegal size of operand" );
590 return (T *) exchange64( (uint64_t volatile *) pDest, (uint64_t) v, order );
593 }} // namespace gcc::Sparc
595 #ifndef CDS_CXX11_INLINE_NAMESPACE_SUPPORT
596 using namespace gcc::Sparc;
598 } // namespace platform
599 }} // namespace cds::cxx11_atomics
602 #undef CDS_SPARC_MB_ACQ
603 #undef CDS_SPARC_MB_REL
604 #undef CDS_SPARC_MB_SEQ_CST
605 #undef CDS_SPARC_MB_FULL
606 #undef CDS_SPARC_MB_LL_LS
607 #undef CDS_SPARC_MB_LS_SS
608 #undef CDS_SPARC_MB_LL_LS_SS
610 #endif // #ifndef __CDS_COMPILER_GCC_AMD64_CXX11_ATOMIC_H