2 This file is a part of libcds - Concurrent Data Structures library
4 (C) Copyright Maxim Khizhinsky (libcds.dev@gmail.com) 2006-2017
6 Source code repo: http://github.com/khizmax/libcds/
7 Download: http://sourceforge.net/projects/libcds/files/
9 Redistribution and use in source and binary forms, with or without
10 modification, are permitted provided that the following conditions are met:
12 * Redistributions of source code must retain the above copyright notice, this
13 list of conditions and the following disclaimer.
15 * Redistributions in binary form must reproduce the above copyright notice,
16 this list of conditions and the following disclaimer in the documentation
17 and/or other materials provided with the distribution.
19 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
20 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
21 IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22 DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
23 FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
24 DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
25 SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
26 CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
27 OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #ifndef CDSLIB_COMPILER_GCC_SPARC_CXX11_ATOMIC_H
32 #define CDSLIB_COMPILER_GCC_SPARC_CXX11_ATOMIC_H
39 1. [Doug Lea "JSR-133 Cookbook for Compiler Writers]:
41 Acquire semantics: load; LoadLoad+LoadStore
42 Release semantics: LoadStore+StoreStore; store
44 2. boost::atomic library by Helge Bahman
45 3. OpenSparc source code
48 #if CDS_OS_TYPE == CDS_OS_LINUX
49 # define CDS_SPARC_RMO_MEMORY_MODEL
52 #define CDS_SPARC_MB_FULL "membar #Sync \n\t"
53 #ifdef CDS_SPARC_RMO_MEMORY_MODEL
54 // RMO memory model (Linux only?..) Untested
55 # define CDS_SPARC_MB_LL_LS "membar #LoadLoad|#LoadStore \n\t"
56 # define CDS_SPARC_MB_LS_SS "membar #LoadStore|#StoreStore \n\t"
57 # define CDS_SPARC_MB_LL_LS_SS "membar #LoadLoad|#LoadStore|#StoreStore \n\t"
59 // TSO memory model (default; Solaris uses this model)
60 # define CDS_SPARC_MB_LL_LS
61 # define CDS_SPARC_MB_LS_SS
62 # define CDS_SPARC_MB_LL_LS_SS
65 #define CDS_SPARC_MB_ACQ CDS_SPARC_MB_LL_LS
66 #define CDS_SPARC_MB_REL CDS_SPARC_MB_LS_SS
67 #define CDS_SPARC_MB_ACQ_REL CDS_SPARC_MB_LL_LS_SS
68 #define CDS_SPARC_MB_SEQ_CST CDS_SPARC_MB_FULL
71 namespace cds { namespace cxx11_atomic {
72 namespace platform { CDS_CXX11_INLINE_NAMESPACE namespace gcc { CDS_CXX11_INLINE_NAMESPACE namespace Sparc {
74 static inline void fence_before( memory_order order ) CDS_NOEXCEPT
77 case memory_order_relaxed:
78 case memory_order_acquire:
79 case memory_order_consume:
81 case memory_order_release:
82 case memory_order_acq_rel:
83 __asm__ __volatile__ ( "" CDS_SPARC_MB_REL ::: "memory" );
85 case memory_order_seq_cst:
86 __asm__ __volatile__ ( "" CDS_SPARC_MB_FULL ::: "memory" );
91 static inline void fence_after( memory_order order ) CDS_NOEXCEPT
94 case memory_order_relaxed:
95 case memory_order_consume:
96 case memory_order_release:
98 case memory_order_acquire:
99 case memory_order_acq_rel:
100 __asm__ __volatile__ ( "" CDS_SPARC_MB_ACQ ::: "memory" );
102 case memory_order_seq_cst:
103 __asm__ __volatile__ ( "" CDS_SPARC_MB_FULL ::: "memory" );
109 //-----------------------------------------------------------------------------
111 //-----------------------------------------------------------------------------
112 static inline void thread_fence(memory_order order) CDS_NOEXCEPT
116 case memory_order_relaxed:
117 case memory_order_consume:
119 case memory_order_acquire:
120 __asm__ __volatile__ ( "" CDS_SPARC_MB_ACQ ::: "memory" );
122 case memory_order_release:
123 __asm__ __volatile__ ( "" CDS_SPARC_MB_REL ::: "memory" );
125 case memory_order_acq_rel:
126 __asm__ __volatile__ ( "" CDS_SPARC_MB_ACQ_REL ::: "memory" );
128 case memory_order_seq_cst:
129 __asm__ __volatile__ ( "" CDS_SPARC_MB_SEQ_CST ::: "memory" );
135 static inline void signal_fence(memory_order order) CDS_NOEXCEPT
137 // C++11: 29.8.8: only compiler optimization, no hardware instructions
140 case memory_order_relaxed:
142 case memory_order_consume:
143 case memory_order_release:
144 case memory_order_acquire:
145 case memory_order_acq_rel:
146 case memory_order_seq_cst:
147 CDS_COMPILER_RW_BARRIER;
153 //-----------------------------------------------------------------------------
154 // atomic flag primitives
155 //-----------------------------------------------------------------------------
157 typedef unsigned char atomic_flag_type;
158 static inline bool atomic_flag_tas( atomic_flag_type volatile * pFlag, memory_order order ) CDS_NOEXCEPT
160 atomic_flag_type fCur;
161 fence_before( order );
162 __asm__ __volatile__(
163 "ldstub [%[pFlag]], %[fCur] \n\t"
168 fence_after( order );
172 static inline void atomic_flag_clear( atomic_flag_type volatile * pFlag, memory_order order ) CDS_NOEXCEPT
174 fence_before( order );
175 __asm__ __volatile__(
177 "stub %%g0, [%[pFlag]] \n\t"
178 :: [pFlag] "r"(pFlag)
181 fence_after( order );
184 //-----------------------------------------------------------------------------
186 //-----------------------------------------------------------------------------
188 template <typename T>
189 static inline void store32( T volatile * pDest, T src, memory_order order ) CDS_NOEXCEPT
191 static_assert( sizeof(T) == 4, "Illegal size of operand" );
192 assert( order == memory_order_relaxed
193 || order == memory_order_release
194 || order == memory_order_seq_cst
203 template <typename T>
204 static inline T load32( T volatile const * pSrc, memory_order order ) CDS_NOEXCEPT
206 static_assert( sizeof(T) == 4, "Illegal size of operand" );
207 assert( order == memory_order_relaxed
208 || order == memory_order_consume
209 || order == memory_order_acquire
210 || order == memory_order_seq_cst
220 template <typename T>
221 static inline bool cas32_strong( T volatile * pDest, T& expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
223 static_assert( sizeof(T) == 4, "Illegal size of operand" );
226 fence_before( mo_success );
227 __asm__ __volatile__(
228 "cas [%[pDest]], %[expected], %[desired]"
229 : [desired] "+r" (desired)
230 : [pDest] "r" (pDest), [expected] "r" (expected)
234 // desired contains current value
236 bool bSuccess = desired == expected;
238 fence_after( mo_success );
240 fence_after(mo_fail);
247 template <typename T>
248 static inline bool cas32_weak( T volatile * pDest, T& expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
250 return cas32_strong( pDest, expected, desired, mo_success, mo_fail );
253 template <typename T>
254 static inline T exchange32( T volatile * pDest, T v, memory_order order ) CDS_NOEXCEPT
256 static_assert( sizeof(T) == 4, "Illegal size of operand" );
259 // This primitive could be implemented via "swap" instruction but "swap" is deprecated in UltraSparc
261 T cur = load32( pDest, memory_order_relaxed );
262 do {} while ( !cas32_strong( pDest, cur, v, order, memory_order_relaxed ));
266 //-----------------------------------------------------------------------------
268 //-----------------------------------------------------------------------------
270 template <typename T>
271 static inline T load64( T volatile const * pSrc, memory_order order ) CDS_NOEXCEPT
273 static_assert( sizeof(T) == 8, "Illegal size of operand" );
274 assert( order == memory_order_relaxed
275 || order == memory_order_consume
276 || order == memory_order_acquire
277 || order == memory_order_seq_cst
287 template <typename T>
288 static inline void store64( T volatile * pDest, T val, memory_order order ) CDS_NOEXCEPT
290 static_assert( sizeof(T) == 8, "Illegal size of operand" );
291 assert( order == memory_order_relaxed
292 || order == memory_order_release
293 || order == memory_order_seq_cst
303 template <typename T>
304 static inline bool cas64_strong( T volatile * pDest, T& expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
306 static_assert( sizeof(T) == 8, "Illegal size of operand" );
309 fence_before( mo_success );
310 __asm__ __volatile__(
311 "casx [%[pDest]], %[expected], %[desired]"
312 : [desired] "+r" (desired)
313 : [pDest] "r" (pDest), [expected] "r" (expected)
317 // desired contains current value
319 bool bSuccess = desired == expected;
321 fence_after( mo_success );
324 fence_after(mo_fail);
331 template <typename T>
332 static inline bool cas64_weak( T volatile * pDest, T& expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
334 return cas64_strong( pDest, expected, desired, mo_success, mo_fail );
337 template <typename T>
338 static inline T exchange64( T volatile * pDest, T v, memory_order order ) CDS_NOEXCEPT
340 static_assert( sizeof(T) == 8, "Illegal size of operand" );
343 T cur = load64( pDest, memory_order_relaxed );
344 do {} while ( !cas64_strong( pDest, cur, v, order, memory_order_relaxed ));
348 //-----------------------------------------------------------------------------
350 //-----------------------------------------------------------------------------
352 template <typename T>
353 static inline void store8( T volatile * pDest, T src, memory_order order ) CDS_NOEXCEPT
355 static_assert( sizeof(T) == 1, "Illegal size of operand" );
356 assert( order == memory_order_relaxed
357 || order == memory_order_release
358 || order == memory_order_seq_cst
362 fence_before( order );
364 fence_after( order );
367 template <typename T>
368 static inline T load8( T volatile const * pSrc, memory_order order ) CDS_NOEXCEPT
370 static_assert( sizeof(T) == 1, "Illegal size of operand" );
371 assert( order == memory_order_relaxed
372 || order == memory_order_consume
373 || order == memory_order_acquire
374 || order == memory_order_seq_cst
378 fence_before( order );
380 fence_after( order );
384 template <typename T>
385 static inline bool cas8_strong( T volatile * pDest, T& expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
387 static_assert( sizeof(T) == 1, "Illegal size of operand" );
394 static_assert( sizeof(u32) == sizeof(uint32_t), "Argument size error" );
396 u32 volatile * pDest32 = (u32 *)( uintptr_t( pDest ) & ~0x03 );
397 size_t const nCharIdx = (size_t)( uintptr_t( pDest ) & 0x03 );
404 uDesired.w = pDest32->w;
405 uExpected.c[nCharIdx] = expected;
406 uDesired.c[nCharIdx] = desired;
408 bSuccess = cas32_weak( reinterpret_cast<uint32_t volatile *>(pDest32), uExpected.w, uDesired.w, mo_success, mo_fail );
409 if ( bSuccess || uExpected.c[nCharIdx] != expected )
413 expected = uExpected.c[nCharIdx];
417 template <typename T>
418 static inline bool cas8_weak( T volatile * pDest, T& expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
420 static_assert( sizeof(T) == 1, "Illegal size of operand" );
427 static_assert( sizeof(u32) == sizeof(uint32_t), "Argument size error" );
429 u32 volatile * pDest32 = (u32 *)( uintptr_t( pDest ) & ~0x03 );
430 size_t const nCharIdx = (size_t)( uintptr_t( pDest ) & 0x03 );
435 uDesired.w = pDest32->w;
436 uExpected.c[nCharIdx] = expected;
437 uDesired.c[nCharIdx] = desired;
439 bool bSuccess = cas32_weak( reinterpret_cast<uint32_t volatile *>(pDest32), uExpected.w, uDesired.w, mo_success, mo_fail );
441 expected = uExpected.c[nCharIdx];
445 template <typename T>
446 static inline T exchange8( T volatile * pDest, T v, memory_order order ) CDS_NOEXCEPT
448 static_assert( sizeof(T) == 1, "Illegal size of operand" );
451 T cur = load8( pDest, memory_order_relaxed );
452 do {} while ( !cas8_strong( pDest, cur, v, order, memory_order_relaxed ));
456 //-----------------------------------------------------------------------------
458 //-----------------------------------------------------------------------------
460 template <typename T>
461 static inline T load16( T volatile const * pSrc, memory_order order ) CDS_NOEXCEPT
463 static_assert( sizeof(T) == 2, "Illegal size of operand" );
464 assert( order == memory_order_relaxed
465 || order == memory_order_consume
466 || order == memory_order_acquire
467 || order == memory_order_seq_cst
471 fence_before( order );
473 fence_after( order );
477 template <typename T>
478 static inline void store16( T volatile * pDest, T src, memory_order order ) CDS_NOEXCEPT
480 static_assert( sizeof(T) == 2, "Illegal size of operand" );
481 assert( order == memory_order_relaxed
482 || order == memory_order_release
483 || order == memory_order_seq_cst
492 template <typename T>
493 static inline bool cas16_strong( T volatile * pDest, T& expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
495 static_assert( sizeof(T) == 2, "Illegal size of operand" );
502 static_assert( sizeof(u32) == sizeof(uint32_t), "Argument size error" );
504 u32 volatile * pDest32 = (u32 *)( uintptr_t( pDest ) & ~0x03 );
505 size_t const nIdx = (size_t)( (uintptr_t( pDest ) >> 1) & 0x01 );
512 uDesired.w = pDest32->w;
513 uExpected.c[nIdx] = expected;
514 uDesired.c[nIdx] = desired;
516 bSuccess = cas32_weak( reinterpret_cast<uint32_t volatile *>(pDest32), uExpected.w, uDesired.w, mo_success, mo_fail );
517 if ( bSuccess || uExpected.c[nIdx] != expected )
521 expected = uExpected.c[nIdx];
525 template <typename T>
526 static inline bool cas16_weak( T volatile * pDest, T& expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
528 static_assert( sizeof(T) == 2, "Illegal size of operand" );
535 static_assert( sizeof(u32) == sizeof(uint32_t), "Argument size error" );
537 u32 volatile * pDest32 = (u32 *)( uintptr_t( pDest ) & ~0x03 );
538 size_t const nIdx = (size_t)( (uintptr_t( pDest ) >> 1) & 0x01 );
543 uDesired.w = pDest32->w;
544 uExpected.c[nIdx] = expected;
545 uDesired.c[nIdx] = desired;
547 bool bSuccess = cas32_weak( reinterpret_cast<uint32_t volatile *>(pDest32), uExpected.w, uDesired.w, mo_success, mo_fail );
549 expected = uExpected.c[nIdx];
553 template <typename T>
554 static inline T exchange16( T volatile * pDest, T v, memory_order order ) CDS_NOEXCEPT
556 static_assert( sizeof(T) == 2, "Illegal size of operand" );
559 T cur = load16( pDest, memory_order_relaxed );
560 do {} while ( !cas16_strong( pDest, cur, v, order, memory_order_relaxed ));
564 //-----------------------------------------------------------------------------
565 // pointer primitives
566 //-----------------------------------------------------------------------------
568 template <typename T>
569 static inline void store_ptr( T * volatile * pDest, T * src, memory_order order ) CDS_NOEXCEPT
571 static_assert( sizeof(T *) == sizeof(void *), "Illegal size of operand" );
572 assert( order == memory_order_relaxed
573 || order == memory_order_release
574 || order == memory_order_seq_cst
583 template <typename T>
584 static inline T * load_ptr( T * volatile const * pSrc, memory_order order ) CDS_NOEXCEPT
586 static_assert( sizeof(T *) == sizeof(void *), "Illegal size of operand" );
587 assert( order == memory_order_relaxed
588 || order == memory_order_consume
589 || order == memory_order_acquire
590 || order == memory_order_seq_cst
594 fence_before( order );
596 fence_after( order );
600 template <typename T>
601 static inline bool cas_ptr_strong( T * volatile * pDest, T *& expected, T * desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
603 static_assert( sizeof(T *) == sizeof(void *), "Illegal size of operand" );
605 return cas64_strong( (uint64_t volatile *) pDest, *reinterpret_cast<uint64_t *>( &expected ), (uint64_t) desired, mo_success, mo_fail );
608 template <typename T>
609 static inline bool cas_ptr_weak( T * volatile * pDest, T *& expected, T * desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
611 return cas_ptr_strong( pDest, expected, desired, mo_success, mo_fail );
614 template <typename T>
615 static inline T * exchange_ptr( T * volatile * pDest, T * v, memory_order order ) CDS_NOEXCEPT
617 static_assert( sizeof(T *) == sizeof(void *), "Illegal size of operand" );
618 return (T *) exchange64( (uint64_t volatile *) pDest, (uint64_t) v, order );
621 }} // namespace gcc::Sparc
623 #ifndef CDS_CXX11_INLINE_NAMESPACE_SUPPORT
624 using namespace gcc::Sparc;
626 } // namespace platform
627 }} // namespace cds::cxx11_atomic
630 #undef CDS_SPARC_MB_ACQ
631 #undef CDS_SPARC_MB_REL
632 #undef CDS_SPARC_MB_SEQ_CST
633 #undef CDS_SPARC_MB_FULL
634 #undef CDS_SPARC_MB_LL_LS
635 #undef CDS_SPARC_MB_LS_SS
636 #undef CDS_SPARC_MB_LL_LS_SS
638 #endif // #ifndef CDSLIB_COMPILER_GCC_AMD64_CXX11_ATOMIC_H