Implemented support for ARMv8 (64 bit arm)
[libcds.git] / cds / compiler / cxx11_atomic.h
1 /*
2     This file is a part of libcds - Concurrent Data Structures library
3
4     (C) Copyright Maxim Khizhinsky (libcds.dev@gmail.com) 2006-2016
5
6     Source code repo: http://github.com/khizmax/libcds/
7     Download: http://sourceforge.net/projects/libcds/files/
8
9     Redistribution and use in source and binary forms, with or without
10     modification, are permitted provided that the following conditions are met:
11
12     * Redistributions of source code must retain the above copyright notice, this
13       list of conditions and the following disclaimer.
14
15     * Redistributions in binary form must reproduce the above copyright notice,
16       this list of conditions and the following disclaimer in the documentation
17       and/or other materials provided with the distribution.
18
19     THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
20     AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
21     IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22     DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
23     FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
24     DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
25     SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
26     CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
27     OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28     OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 */
30
31 #ifndef CDSLIB_COMPILER_CXX11_ATOMIC_H
32 #define CDSLIB_COMPILER_CXX11_ATOMIC_H
33 //@cond
34
35 #include <type_traits>  // make_unsigned
36 #include <cds/details/defs.h>
37 #include <cds/details/aligned_type.h>
38
39 namespace cds { namespace cxx11_atomic {
40     typedef enum memory_order {
41         memory_order_relaxed,
42         memory_order_consume,
43         memory_order_acquire,
44         memory_order_release,
45         memory_order_acq_rel,
46         memory_order_seq_cst
47     } memory_order;
48
49 }}  // namespace cds::cxx11_atomic
50
51
52 #if CDS_COMPILER == CDS_COMPILER_MSVC || (CDS_COMPILER == CDS_COMPILER_INTEL && CDS_OS_INTERFACE == CDS_OSI_WINDOWS)
53 #   if CDS_PROCESSOR_ARCH == CDS_PROCESSOR_X86
54 #       include <cds/compiler/vc/x86/cxx11_atomic.h>
55 #   elif CDS_PROCESSOR_ARCH == CDS_PROCESSOR_AMD64
56 #       include <cds/compiler/vc/amd64/cxx11_atomic.h>
57 #   else
58 #       error "MS VC++ compiler: unsupported processor architecture"
59 #   endif
60 #elif CDS_COMPILER == CDS_COMPILER_GCC || CDS_COMPILER == CDS_COMPILER_CLANG || CDS_COMPILER == CDS_COMPILER_INTEL
61 #   if CDS_PROCESSOR_ARCH == CDS_PROCESSOR_X86
62 #       include <cds/compiler/gcc/x86/cxx11_atomic.h>
63 #   elif CDS_PROCESSOR_ARCH == CDS_PROCESSOR_AMD64
64 #       include <cds/compiler/gcc/amd64/cxx11_atomic.h>
65 #   elif CDS_PROCESSOR_ARCH == CDS_PROCESSOR_IA64
66 #       include <cds/compiler/gcc/ia64/cxx11_atomic.h>
67 #   elif CDS_PROCESSOR_ARCH == CDS_PROCESSOR_SPARC
68 #       include <cds/compiler/gcc/sparc/cxx11_atomic.h>
69 #   elif CDS_PROCESSOR_ARCH == CDS_PROCESSOR_PPC64
70 #       include <cds/compiler/gcc/ppc64/cxx11_atomic.h>
71 //#   elif CDS_PROCESSOR_ARCH == CDS_PROCESSOR_ARM7
72 //#       include <cds/compiler/gcc/arm7/cxx11_atomic.h>
73 #   else
74 #       error "GCC compiler: unsupported processor architecture. Try to use native C++11 atomic or boost.atomic"
75 #   endif
76 #else
77 #   error "Undefined compiler"
78 #endif
79
80 namespace cds { namespace cxx11_atomic {
81
82     // forward declarations
83     template <class T>
84     struct atomic;
85
86     namespace details {
87
88         template <typename T, size_t Size, typename Primary = T >
89         struct atomic_generic_ops;
90
91         template <typename T, size_t Size>
92         struct atomic_integral_ops;
93
94         template <size_t TypeSize>
95         struct primary_type;
96
97         template <>
98         struct primary_type<1>
99         {
100             typedef std::uint8_t type;
101         };
102         template <>
103         struct primary_type<2>
104         {
105             typedef std::uint16_t type;
106         };
107         template <>
108         struct primary_type<4>
109         {
110             typedef std::uint32_t type;
111         };
112         template <>
113         struct primary_type<8>
114         {
115             typedef std::uint64_t type;
116         };
117 #if CDS_BUILD_BITS == 64 && CDS_DCAS_SUPPORT
118         template <>
119         struct primary_type<16>
120         {
121             typedef unsigned __int128_t type;
122         };
123 #endif
124
125         template <typename T, typename Primary>
126         struct make_atomic_primary
127         {
128             typedef T       source_type;
129             typedef Primary primary_type;
130
131             static primary_type volatile * ptr( source_type volatile * p ) CDS_NOEXCEPT
132             {
133                 return reinterpret_cast<primary_type volatile *>(p);
134             }
135             static primary_type const volatile * ptr( source_type const volatile * p ) CDS_NOEXCEPT
136             {
137                 return reinterpret_cast<primary_type const volatile *>(p);
138             }
139
140             static primary_type val( source_type v ) CDS_NOEXCEPT
141             {
142                 return *reinterpret_cast<primary_type*>(&v);
143             }
144
145             static primary_type& ref( source_type& v ) CDS_NOEXCEPT
146             {
147                 return reinterpret_cast<primary_type&>(v);
148             }
149
150             static primary_type const& ref( source_type const& v ) CDS_NOEXCEPT
151             {
152                 return reinterpret_cast<primary_type const&>(v);
153             }
154
155             static source_type ret( primary_type r ) CDS_NOEXCEPT
156             {
157                 return *reinterpret_cast<source_type *>(&r);
158             }
159         };
160
161         template <typename T>
162         struct make_atomic_primary<T, T>
163         {
164             typedef T source_type;
165             typedef T primary_type;
166
167             static primary_type volatile * ptr( source_type volatile * p ) CDS_NOEXCEPT
168             {
169                 return p;
170             }
171             static primary_type const volatile * ptr( source_type const volatile * p ) CDS_NOEXCEPT
172             {
173                 return p;
174             }
175
176             static primary_type val( source_type v ) CDS_NOEXCEPT
177             {
178                 return v;
179             }
180
181             static primary_type& ref( source_type& v ) CDS_NOEXCEPT
182             {
183                 return v;
184             }
185
186             static source_type ret( primary_type r ) CDS_NOEXCEPT
187             {
188                 return r;
189             }
190         };
191
192         template <typename T>
193         struct atomic_integral_bitwise_ops
194         {
195         public:
196             typedef typename std::make_unsigned<T>::type unsigned_type;
197             typedef atomic_generic_ops<unsigned_type, sizeof(unsigned_type)> atomic_ops;
198
199             static T fetch_and(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
200             {
201                 unsigned_type cur = atomic_ops::atomic_load_explicit( reinterpret_cast<unsigned_type volatile *>(pDest), memory_order_relaxed );
202                 do {} while ( !atomic_ops::atomic_compare_exchange_weak_explicit(
203                     reinterpret_cast<unsigned_type volatile *>(pDest), &cur, cur & unsigned_type(val), order, memory_order_relaxed ));
204                 return T(cur);
205             }
206
207             static T fetch_or(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
208             {
209                 unsigned_type cur = atomic_ops::atomic_load_explicit( reinterpret_cast<unsigned_type volatile *>(pDest), memory_order_relaxed );
210                 do {} while ( !atomic_ops::atomic_compare_exchange_weak_explicit(
211                     reinterpret_cast<unsigned_type volatile *>(pDest), &cur, cur | unsigned_type(val), order, memory_order_relaxed ));
212                 return T(cur);
213             }
214
215             static T fetch_xor(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
216             {
217                 unsigned_type cur = atomic_ops::atomic_load_explicit( reinterpret_cast<unsigned_type volatile *>(pDest), memory_order_relaxed );
218                 do {} while ( !atomic_ops::atomic_compare_exchange_weak_explicit(
219                     reinterpret_cast<unsigned_type volatile *>(pDest), &cur, cur ^ unsigned_type(val), order, memory_order_relaxed ));
220                 return T(cur);
221             }
222         };
223
224
225         // 8-bit atomic operations
226
227         template <typename T, typename Primary>
228         struct atomic_generic_ops< T, 1, Primary >
229         {
230             typedef make_atomic_primary<T, Primary> primary;
231
232             // store
233             static void atomic_store_explicit( T volatile * pDest, T v, memory_order order ) CDS_NOEXCEPT
234             {
235                 platform::store8( primary::ptr(pDest), primary::val(v), order );
236             }
237             static void atomic_store_explicit( T * pDest, T v, memory_order order ) CDS_NOEXCEPT
238             {
239                 platform::store8( primary::ptr(pDest), primary::val(v), order );
240             }
241             static void atomic_store( T volatile * pDest, T v ) CDS_NOEXCEPT
242             {
243                 atomic_store_explicit( pDest, v, memory_order_seq_cst );
244             }
245             static void atomic_store( T * pDest, T v ) CDS_NOEXCEPT
246             {
247                 atomic_store_explicit( pDest, v, memory_order_seq_cst );
248             }
249
250             // load
251             static T atomic_load_explicit( T volatile const * pSrc, memory_order order ) CDS_NOEXCEPT
252             {
253                 return primary::ret( platform::load8( primary::ptr(pSrc), order ));
254             }
255             static T atomic_load_explicit( T const * pSrc, memory_order order ) CDS_NOEXCEPT
256             {
257                 return primary::ret( platform::load8( primary::ptr(pSrc), order ));
258             }
259             static T atomic_load( T volatile const * pSrc ) CDS_NOEXCEPT
260             {
261                 return atomic_load_explicit( pSrc, memory_order_seq_cst );
262             }
263             static T atomic_load( T const * pSrc ) CDS_NOEXCEPT
264             {
265                 return atomic_load_explicit( pSrc, memory_order_seq_cst );
266             }
267
268             // exchange
269             static T atomic_exchange_explicit( T volatile * pDest, T val, memory_order order ) CDS_NOEXCEPT
270             {
271                 return primary::ret( platform::exchange8( primary::ptr(pDest), primary::val(val), order ));
272             }
273             static T atomic_exchange_explicit( T * pDest, T val, memory_order order ) CDS_NOEXCEPT
274             {
275                 return primary::ret( platform::exchange8( primary::ptr(pDest), primary::val(val), order ));
276             }
277             static T atomic_exchange( T volatile * pDest, T val ) CDS_NOEXCEPT
278             {
279                 return atomic_exchange_explicit( pDest, val, memory_order_seq_cst );
280             }
281             static T atomic_exchange( T * pDest, T val ) CDS_NOEXCEPT
282             {
283                 return atomic_exchange_explicit( pDest, val, memory_order_seq_cst );
284             }
285
286             // cas
287             static bool atomic_compare_exchange_weak_explicit( T volatile * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
288             {
289                 assert( expected );
290                 return platform::cas8_weak( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
291             }
292             static bool atomic_compare_exchange_weak_explicit( T * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
293             {
294                 assert( expected );
295                 return platform::cas8_weak( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
296             }
297             static bool atomic_compare_exchange_weak( T volatile * pDest, T * expected, T desired ) CDS_NOEXCEPT
298             {
299                 return atomic_compare_exchange_weak_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
300             }
301             static bool atomic_compare_exchange_weak( T * pDest, T * expected, T desired ) CDS_NOEXCEPT
302             {
303                 return atomic_compare_exchange_weak_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
304             }
305             static bool atomic_compare_exchange_strong_explicit( T volatile * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
306             {
307                 assert( expected );
308                 return platform::cas8_strong( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
309             }
310             static bool atomic_compare_exchange_strong_explicit( T * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
311             {
312                 assert( expected );
313                 return platform::cas8_strong( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
314             }
315             static bool atomic_compare_exchange_strong( T volatile * pDest, T * expected, T desired ) CDS_NOEXCEPT
316             {
317                 return atomic_compare_exchange_strong_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
318             }
319             static bool atomic_compare_exchange_strong( T * pDest, T * expected, T desired ) CDS_NOEXCEPT
320             {
321                 return atomic_compare_exchange_strong_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
322             }
323         };
324
325         template <typename T>
326         struct atomic_integral_ops< T, 1 >
327             : atomic_generic_ops<T, 1, T >
328             , atomic_integral_bitwise_ops<T>
329         {
330             typedef atomic_integral_bitwise_ops<T> bitwise_ops;
331
332             // fetch_add
333             static T atomic_fetch_add_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
334             {
335 #           ifdef CDS_ATOMIC_fetch8_add_defined
336                 return platform::fetch8_add( pDest, val, order );
337 #           else
338                 T cur = atomic_load_explicit( pDest, memory_order_relaxed );
339                 do {} while ( !atomic_compare_exchange_weak_explicit( pDest, &cur, cur + val, order, memory_order_relaxed ));
340                 return cur;
341 #           endif
342             }
343             static T atomic_fetch_add_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
344             {
345                 return atomic_fetch_add_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
346             }
347             static T atomic_fetch_add( T volatile * pDest, T val ) CDS_NOEXCEPT
348             {
349                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
350             }
351             static T atomic_fetch_add( T * pDest, T val ) CDS_NOEXCEPT
352             {
353                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
354             }
355
356             // fetch_sub
357             static T atomic_fetch_sub_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
358             {
359 #           ifdef CDS_ATOMIC_fetch8_sub_defined
360                 return platform::fetch8_sub( pDest, val, order );
361 #           else
362                 T cur = atomic_load_explicit( pDest, memory_order_relaxed );
363                 do {} while ( !atomic_compare_exchange_weak_explicit( pDest, &cur, cur - val, order, memory_order_relaxed ));
364                 return cur;
365 #           endif
366             }
367             static T atomic_fetch_sub_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
368             {
369                 return atomic_fetch_sub_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
370             }
371             static T atomic_fetch_sub( T volatile * pDest, T val ) CDS_NOEXCEPT
372             {
373                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
374             }
375             static T atomic_fetch_sub( T * pDest, T val ) CDS_NOEXCEPT
376             {
377                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
378             }
379
380             // fetch_and
381             static T atomic_fetch_and_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
382             {
383 #           ifdef CDS_ATOMIC_fetch8_and_defined
384                 return platform::fetch8_and( pDest, val, order );
385 #           else
386                 return bitwise_ops::fetch_and( pDest, val, order );
387 #           endif
388             }
389             static T atomic_fetch_and_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
390             {
391                 return atomic_fetch_and_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
392             }
393             static T atomic_fetch_and( T volatile * pDest, T val ) CDS_NOEXCEPT
394             {
395                 return atomic_fetch_and_explicit( pDest, val, memory_order_seq_cst );
396             }
397             static T atomic_fetch_and( T * pDest, T val ) CDS_NOEXCEPT
398             {
399                 return atomic_fetch_and_explicit( pDest, val, memory_order_seq_cst );
400             }
401
402             // fetch_or
403             static T atomic_fetch_or_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
404             {
405 #           ifdef CDS_ATOMIC_fetch8_or_defined
406                 return platform::fetch8_or( pDest, val, order );
407 #           else
408                 return bitwise_ops::fetch_or( pDest, val, order );
409 #           endif
410             }
411             static T atomic_fetch_or_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
412             {
413                 return atomic_fetch_or_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
414             }
415             static T atomic_fetch_or( T volatile * pDest, T val ) CDS_NOEXCEPT
416             {
417                 return atomic_fetch_or_explicit( pDest, val, memory_order_seq_cst );
418             }
419             static T atomic_fetch_or( T * pDest, T val ) CDS_NOEXCEPT
420             {
421                 return atomic_fetch_or_explicit( pDest, val, memory_order_seq_cst );
422             }
423
424             // fetch_xor
425             static T atomic_fetch_xor_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
426             {
427 #           ifdef CDS_ATOMIC_fetch8_xor_defined
428                 return platform::fetch8_xor( pDest, val, order );
429 #           else
430                 return bitwise_ops::fetch_xor( pDest, val, order );
431 #           endif
432             }
433             static T atomic_fetch_xor_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
434             {
435                 return atomic_fetch_xor_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
436             }
437             static T atomic_fetch_xor( T volatile * pDest, T val ) CDS_NOEXCEPT
438             {
439                 return atomic_fetch_xor_explicit( pDest, val, memory_order_seq_cst );
440             }
441             static T atomic_fetch_xor( T * pDest, T val ) CDS_NOEXCEPT
442             {
443                 return atomic_fetch_xor_explicit( pDest, val, memory_order_seq_cst );
444             }
445         };
446
447         // 16-bit atomic operations
448
449         template <typename T, typename Primary>
450         struct atomic_generic_ops< T, 2, Primary >
451         {
452             typedef make_atomic_primary<T, Primary> primary;
453
454             // store
455             static void atomic_store_explicit( T volatile * pDest, T v, memory_order order ) CDS_NOEXCEPT
456             {
457                 platform::store16( primary::ptr(pDest), primary::val(v), order );
458             }
459             static void atomic_store_explicit( T * pDest, T v, memory_order order ) CDS_NOEXCEPT
460             {
461                 platform::store16( primary::ptr(pDest), primary::val(v), order );
462             }
463             static void atomic_store( T volatile * pDest, T v ) CDS_NOEXCEPT
464             {
465                 atomic_store_explicit( pDest, v, memory_order_seq_cst );
466             }
467             static void atomic_store( T * pDest, T v ) CDS_NOEXCEPT
468             {
469                 atomic_store_explicit( pDest, v, memory_order_seq_cst );
470             }
471
472             // load
473             static T atomic_load_explicit( T volatile const * pSrc, memory_order order ) CDS_NOEXCEPT
474             {
475                 return primary::ret( platform::load16( primary::ptr(pSrc), order ));
476             }
477             static T atomic_load_explicit( T const * pSrc, memory_order order ) CDS_NOEXCEPT
478             {
479                 return primary::ret( platform::load16( primary::ptr(pSrc), order ));
480             }
481             static T atomic_load( T volatile const * pSrc ) CDS_NOEXCEPT
482             {
483                 return atomic_load_explicit( pSrc, memory_order_seq_cst );
484             }
485             static T atomic_load( T const * pSrc ) CDS_NOEXCEPT
486             {
487                 return atomic_load_explicit( pSrc, memory_order_seq_cst );
488             }
489
490             // exchange
491             static T atomic_exchange_explicit( T volatile * pDest, T val, memory_order order ) CDS_NOEXCEPT
492             {
493                 return primary::ret( platform::exchange16( primary::ptr(pDest), primary::val(val), order ));
494             }
495             static T atomic_exchange_explicit( T * pDest, T val, memory_order order ) CDS_NOEXCEPT
496             {
497                 return primary::ret( platform::exchange16( primary::ptr(pDest), primary::val(val), order ));
498             }
499             static T atomic_exchange( T volatile * pDest, T val ) CDS_NOEXCEPT
500             {
501                 return atomic_exchange_explicit( pDest, val, memory_order_seq_cst );
502             }
503             static T atomic_exchange( T * pDest, T val ) CDS_NOEXCEPT
504             {
505                 return atomic_exchange_explicit( pDest, val, memory_order_seq_cst );
506             }
507
508             // cas
509             static bool atomic_compare_exchange_weak_explicit( T volatile * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
510             {
511                 assert( expected );
512                 return platform::cas16_weak( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
513             }
514             static bool atomic_compare_exchange_weak_explicit( T * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
515             {
516                 assert( expected );
517                 return platform::cas16_weak( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
518             }
519             static bool atomic_compare_exchange_weak( T volatile * pDest, T * expected, T desired ) CDS_NOEXCEPT
520             {
521                 return atomic_compare_exchange_weak_explicit( pDest, expected, primary::val(desired), memory_order_seq_cst, memory_order_relaxed );
522             }
523             static bool atomic_compare_exchange_weak( T * pDest, T * expected, T desired ) CDS_NOEXCEPT
524             {
525                 return atomic_compare_exchange_weak_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
526             }
527             static bool atomic_compare_exchange_strong_explicit( T volatile * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
528             {
529                 assert( expected );
530                 return platform::cas16_strong( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
531             }
532             static bool atomic_compare_exchange_strong_explicit( T * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
533             {
534                 assert( expected );
535                 return platform::cas16_strong( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
536             }
537             static bool atomic_compare_exchange_strong( T volatile * pDest, T * expected, T desired ) CDS_NOEXCEPT
538             {
539                 return atomic_compare_exchange_strong_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
540             }
541             static bool atomic_compare_exchange_strong( T * pDest, T * expected, T desired ) CDS_NOEXCEPT
542             {
543                 return atomic_compare_exchange_strong_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
544             }
545         };
546
547         template <typename T>
548         struct atomic_integral_ops< T, 2 >
549             : atomic_generic_ops< T, 2, T >
550             , atomic_integral_bitwise_ops<T>
551         {
552             typedef atomic_integral_bitwise_ops<T> bitwise_ops;
553
554             // fetch_add
555             static T atomic_fetch_add_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
556             {
557 #           ifdef CDS_ATOMIC_fetch16_add_defined
558                 return platform::fetch16_add( pDest, val, order );
559 #           else
560                 T cur = atomic_load_explicit( pDest, memory_order_relaxed );
561                 do {} while ( !atomic_compare_exchange_weak_explicit( pDest, &cur, cur + val, order, memory_order_relaxed ));
562                 return cur;
563 #           endif
564             }
565             static T atomic_fetch_add_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
566             {
567                 return atomic_fetch_add_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
568             }
569             static T atomic_fetch_add( T volatile * pDest, T val ) CDS_NOEXCEPT
570             {
571                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
572             }
573             static T atomic_fetch_add( T * pDest, T val ) CDS_NOEXCEPT
574             {
575                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
576             }
577
578             // fetch_sub
579             static T atomic_fetch_sub_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
580             {
581 #           ifdef CDS_ATOMIC_fetch16_sub_defined
582                 return platform::fetch16_sub( pDest, val, order );
583 #           else
584                 T cur = atomic_load_explicit( pDest, memory_order_relaxed );
585                 do {} while ( !atomic_compare_exchange_weak_explicit( pDest, &cur, cur - val, order, memory_order_relaxed ));
586                 return cur;
587 #           endif
588             }
589             static T atomic_fetch_sub_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
590             {
591                 return atomic_fetch_sub_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
592             }
593             static T atomic_fetch_sub( T volatile * pDest, T val ) CDS_NOEXCEPT
594             {
595                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
596             }
597             static T atomic_fetch_sub( T * pDest, T val ) CDS_NOEXCEPT
598             {
599                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
600             }
601
602             // fetch_and
603             static T atomic_fetch_and_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
604             {
605 #           ifdef CDS_ATOMIC_fetch16_and_defined
606                 return platform::fetch16_and( pDest, val, order );
607 #           else
608                 return bitwise_ops::fetch_and( pDest, val, order );
609 #           endif
610             }
611             static T atomic_fetch_and_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
612             {
613                 return atomic_fetch_and_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
614             }
615             static T atomic_fetch_and( T volatile * pDest, T val ) CDS_NOEXCEPT
616             {
617                 return atomic_fetch_and_explicit( pDest, val, memory_order_seq_cst );
618             }
619             static T atomic_fetch_and( T * pDest, T val ) CDS_NOEXCEPT
620             {
621                 return atomic_fetch_and_explicit( pDest, val, memory_order_seq_cst );
622             }
623
624             // fetch_or
625             static T atomic_fetch_or_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
626             {
627 #           ifdef CDS_ATOMIC_fetch16_or_defined
628                 return platform::fetch16_or( pDest, val, order );
629 #           else
630                 return bitwise_ops::fetch_or( pDest, val, order );
631 #           endif
632             }
633             static T atomic_fetch_or_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
634             {
635                 return atomic_fetch_or_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
636             }
637             static T atomic_fetch_or( T volatile * pDest, T val ) CDS_NOEXCEPT
638             {
639                 return atomic_fetch_or_explicit( pDest, val, memory_order_seq_cst );
640             }
641             static T atomic_fetch_or( T * pDest, T val ) CDS_NOEXCEPT
642             {
643                 return atomic_fetch_or_explicit( pDest, val, memory_order_seq_cst );
644             }
645
646             // fetch_xor
647             static T atomic_fetch_xor_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
648             {
649 #           ifdef CDS_ATOMIC_fetch16_xor_defined
650                 return platform::fetch16_xor( pDest, val, order );
651 #           else
652                 return bitwise_ops::fetch_xor( pDest, val, order );
653 #           endif
654             }
655             static T atomic_fetch_xor_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
656             {
657                 return atomic_fetch_xor_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
658             }
659             static T atomic_fetch_xor( T volatile * pDest, T val ) CDS_NOEXCEPT
660             {
661                 return atomic_fetch_xor_explicit( pDest, val, memory_order_seq_cst );
662             }
663             static T atomic_fetch_xor( T * pDest, T val ) CDS_NOEXCEPT
664             {
665                 return atomic_fetch_xor_explicit( pDest, val, memory_order_seq_cst );
666             }
667         };
668
669         // 32-bit atomic operations
670
671         template <typename T, typename Primary>
672         struct atomic_generic_ops< T, 4, Primary >
673         {
674             typedef make_atomic_primary<T, Primary> primary;
675
676             // store
677             static void atomic_store_explicit( T volatile * pDest, T v, memory_order order ) CDS_NOEXCEPT
678             {
679                 platform::store32( primary::ptr(pDest), primary::val(v), order );
680             }
681             static void atomic_store_explicit( T * pDest, T v, memory_order order ) CDS_NOEXCEPT
682             {
683                 platform::store32( primary::ptr(pDest), primary::val(v), order );
684             }
685             static void atomic_store( T volatile * pDest, T v ) CDS_NOEXCEPT
686             {
687                 atomic_store_explicit( pDest, v, memory_order_seq_cst );
688             }
689             static void atomic_store( T * pDest, T v ) CDS_NOEXCEPT
690             {
691                 atomic_store_explicit( pDest, v, memory_order_seq_cst );
692             }
693
694             // load
695             static T atomic_load_explicit( T volatile const * pSrc, memory_order order ) CDS_NOEXCEPT
696             {
697                 return primary::ret( platform::load32( primary::ptr(pSrc), order ));
698             }
699             static T atomic_load_explicit( T const * pSrc, memory_order order ) CDS_NOEXCEPT
700             {
701                 return primary::ret( platform::load32( primary::ptr(pSrc), order ));
702             }
703             static T atomic_load( T volatile const * pSrc ) CDS_NOEXCEPT
704             {
705                 return atomic_load_explicit( pSrc, memory_order_seq_cst );
706             }
707             static T atomic_load( T const * pSrc ) CDS_NOEXCEPT
708             {
709                 return atomic_load_explicit( pSrc, memory_order_seq_cst );
710             }
711
712             // exchange
713             static T atomic_exchange_explicit( T volatile * pDest, T val, memory_order order ) CDS_NOEXCEPT
714             {
715                 return primary::ret( platform::exchange32( primary::ptr(pDest), primary::val(val), order ));
716             }
717             static T atomic_exchange_explicit( T * pDest, T val, memory_order order ) CDS_NOEXCEPT
718             {
719                 return primary::ret( platform::exchange32( primary::ptr(pDest), primary::val(val), order ));
720             }
721             static T atomic_exchange( T volatile * pDest, T val ) CDS_NOEXCEPT
722             {
723                 return atomic_exchange_explicit( pDest, val, memory_order_seq_cst );
724             }
725             static T atomic_exchange( T * pDest, T val ) CDS_NOEXCEPT
726             {
727                 return atomic_exchange_explicit( pDest, val, memory_order_seq_cst );
728             }
729
730             // cas
731             static bool atomic_compare_exchange_weak_explicit( T volatile * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
732             {
733                 assert( expected );
734                 return platform::cas32_weak( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
735             }
736             static bool atomic_compare_exchange_weak_explicit( T * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
737             {
738                 assert( expected );
739                 return platform::cas32_weak( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
740             }
741             static bool atomic_compare_exchange_weak( T volatile * pDest, T * expected, T desired ) CDS_NOEXCEPT
742             {
743                 return atomic_compare_exchange_weak_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
744             }
745             static bool atomic_compare_exchange_weak( T * pDest, T * expected, T desired ) CDS_NOEXCEPT
746             {
747                 return atomic_compare_exchange_weak_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
748             }
749             static bool atomic_compare_exchange_strong_explicit( T volatile * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
750             {
751                 assert( expected );
752                 return platform::cas32_strong( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
753             }
754             static bool atomic_compare_exchange_strong_explicit( T * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
755             {
756                 assert( expected );
757                 return platform::cas32_strong( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
758             }
759             static bool atomic_compare_exchange_strong( T volatile * pDest, T * expected, T desired ) CDS_NOEXCEPT
760             {
761                 return atomic_compare_exchange_strong_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
762             }
763             static bool atomic_compare_exchange_strong( T * pDest, T * expected, T desired ) CDS_NOEXCEPT
764             {
765                 return atomic_compare_exchange_strong_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
766             }
767         };
768
769         template <typename T>
770         struct atomic_integral_ops< T, 4 >
771             : atomic_generic_ops< T, 4, T >
772             , atomic_integral_bitwise_ops<T>
773         {
774             typedef atomic_integral_bitwise_ops<T> bitwise_ops;
775             // fetch_add
776             static T atomic_fetch_add_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
777             {
778 #           ifdef CDS_ATOMIC_fetch32_add_defined
779                 return platform::fetch32_add( pDest, val, order );
780 #           else
781                 T cur = atomic_load_explicit( pDest, memory_order_relaxed );
782                 do {} while ( !atomic_compare_exchange_weak_explicit( pDest, &cur, cur + val, order, memory_order_relaxed ));
783                 return cur;
784 #           endif
785             }
786             static T atomic_fetch_add_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
787             {
788                 return atomic_fetch_add_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
789             }
790             static T atomic_fetch_add( T volatile * pDest, T val ) CDS_NOEXCEPT
791             {
792                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
793             }
794             static T atomic_fetch_add( T * pDest, T val ) CDS_NOEXCEPT
795             {
796                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
797             }
798
799             // fetch_sub
800             static T atomic_fetch_sub_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
801             {
802 #           ifdef CDS_ATOMIC_fetch32_sub_defined
803                 return platform::fetch32_sub( pDest, val, order );
804 #           else
805                 T cur = atomic_load_explicit( pDest, memory_order_relaxed );
806                 do {} while ( !atomic_compare_exchange_weak_explicit( pDest, &cur, cur - val, order, memory_order_relaxed ));
807                 return cur;
808 #           endif
809             }
810             static T atomic_fetch_sub_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
811             {
812                 return atomic_fetch_sub_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
813             }
814             static T atomic_fetch_sub( T volatile * pDest, T val ) CDS_NOEXCEPT
815             {
816                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
817             }
818             static T atomic_fetch_sub( T * pDest, T val ) CDS_NOEXCEPT
819             {
820                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
821             }
822
823             // fetch_and
824             static T atomic_fetch_and_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
825             {
826 #           ifdef CDS_ATOMIC_fetch32_and_defined
827                 return platform::fetch32_and( pDest, val, order );
828 #           else
829                 return bitwise_ops::fetch_and( pDest, val, order );
830 #           endif
831             }
832             static T atomic_fetch_and_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
833             {
834                 return atomic_fetch_and_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
835             }
836             static T atomic_fetch_and( T volatile * pDest, T val ) CDS_NOEXCEPT
837             {
838                 return atomic_fetch_and_explicit( pDest, val, memory_order_seq_cst );
839             }
840             static T atomic_fetch_and( T * pDest, T val ) CDS_NOEXCEPT
841             {
842                 return atomic_fetch_and_explicit( pDest, val, memory_order_seq_cst );
843             }
844
845             // fetch_or
846             static T atomic_fetch_or_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
847             {
848 #           ifdef CDS_ATOMIC_fetch32_or_defined
849                 return platform::fetch32_or( pDest, val, order );
850 #           else
851                 return bitwise_ops::fetch_or( pDest, val, order );
852 #           endif
853             }
854             static T atomic_fetch_or_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
855             {
856                 return atomic_fetch_or_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
857             }
858             static T atomic_fetch_or( T volatile * pDest, T val ) CDS_NOEXCEPT
859             {
860                 return atomic_fetch_or_explicit( pDest, val, memory_order_seq_cst );
861             }
862             static T atomic_fetch_or( T * pDest, T val ) CDS_NOEXCEPT
863             {
864                 return atomic_fetch_or_explicit( pDest, val, memory_order_seq_cst );
865             }
866
867             // fetch_xor
868             static T atomic_fetch_xor_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
869             {
870 #           ifdef CDS_ATOMIC_fetch32_xor_defined
871                 return platform::fetch32_xor( pDest, val, order );
872 #           else
873                 return bitwise_ops::fetch_xor( pDest, val, order );
874 #           endif
875             }
876             static T atomic_fetch_xor_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
877             {
878                 return atomic_fetch_xor_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
879             }
880             static T atomic_fetch_xor( T volatile * pDest, T val ) CDS_NOEXCEPT
881             {
882                 return atomic_fetch_xor_explicit( pDest, val, memory_order_seq_cst );
883             }
884             static T atomic_fetch_xor( T * pDest, T val ) CDS_NOEXCEPT
885             {
886                 return atomic_fetch_xor_explicit( pDest, val, memory_order_seq_cst );
887             }
888         };
889
890
891         // 64-bit atomic operations
892
893         template <typename T, typename Primary>
894         struct atomic_generic_ops< T, 8, Primary >
895         {
896             typedef make_atomic_primary<T, Primary> primary;
897
898             // store
899             static void atomic_store_explicit( T volatile * pDest, T v, memory_order order ) CDS_NOEXCEPT
900             {
901                 platform::store64( primary::ptr(pDest), primary::val(v), order );
902             }
903             static void atomic_store_explicit( T * pDest, T v, memory_order order ) CDS_NOEXCEPT
904             {
905                 platform::store64( primary::ptr(pDest), primary::val(v), order );
906             }
907             static void atomic_store( T volatile * pDest, T v ) CDS_NOEXCEPT
908             {
909                 atomic_store_explicit( pDest, v, memory_order_seq_cst );
910             }
911             static void atomic_store( T * pDest, T v ) CDS_NOEXCEPT
912             {
913                 atomic_store_explicit( pDest, v, memory_order_seq_cst );
914             }
915
916             // load
917             static T atomic_load_explicit( T volatile const * pSrc, memory_order order ) CDS_NOEXCEPT
918             {
919                 return primary::ret( platform::load64( primary::ptr(pSrc), order ));
920             }
921             static T atomic_load_explicit( T const * pSrc, memory_order order ) CDS_NOEXCEPT
922             {
923                 return primary::ret( platform::load64( primary::ptr(pSrc), order ));
924             }
925             static T atomic_load( T volatile const * pSrc ) CDS_NOEXCEPT
926             {
927                 return atomic_load_explicit( pSrc, memory_order_seq_cst );
928             }
929             static T atomic_load( T const * pSrc ) CDS_NOEXCEPT
930             {
931                 return atomic_load_explicit( pSrc, memory_order_seq_cst );
932             }
933
934             // exchange
935             static T atomic_exchange_explicit( T volatile * pDest, T val, memory_order order ) CDS_NOEXCEPT
936             {
937                 return primary::ret( platform::exchange64( primary::ptr(pDest), primary::val(val), order ));
938             }
939             static T atomic_exchange_explicit( T * pDest, T val, memory_order order ) CDS_NOEXCEPT
940             {
941                 return primary::ret( platform::exchange64( primary::ptr(pDest), primary::val(val), order ));
942             }
943             static T atomic_exchange( T volatile * pDest, T val ) CDS_NOEXCEPT
944             {
945                 return atomic_exchange_explicit( pDest, val, memory_order_seq_cst );
946             }
947             static T atomic_exchange( T * pDest, T val ) CDS_NOEXCEPT
948             {
949                 return atomic_exchange_explicit( pDest, val, memory_order_seq_cst );
950             }
951
952             // cas
953             static bool atomic_compare_exchange_weak_explicit( T volatile * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
954             {
955                 assert( expected );
956                 return platform::cas64_weak( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
957             }
958             static bool atomic_compare_exchange_weak_explicit( T * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
959             {
960                 assert( expected );
961                 return platform::cas64_weak( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
962             }
963             static bool atomic_compare_exchange_weak( T volatile * pDest, T * expected, T desired ) CDS_NOEXCEPT
964             {
965                 return atomic_compare_exchange_weak_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
966             }
967             static bool atomic_compare_exchange_weak( T * pDest, T * expected, T desired ) CDS_NOEXCEPT
968             {
969                 return atomic_compare_exchange_weak_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
970             }
971             static bool atomic_compare_exchange_strong_explicit( T volatile * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
972             {
973                 assert( expected );
974                 return platform::cas64_strong( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
975             }
976             static bool atomic_compare_exchange_strong_explicit( T * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
977             {
978                 assert( expected );
979                 return platform::cas64_strong( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
980             }
981             static bool atomic_compare_exchange_strong( T volatile * pDest, T * expected, T desired ) CDS_NOEXCEPT
982             {
983                 return atomic_compare_exchange_strong_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
984             }
985             static bool atomic_compare_exchange_strong( T * pDest, T * expected, T desired ) CDS_NOEXCEPT
986             {
987                 return atomic_compare_exchange_strong_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
988             }
989         };
990
991
992         template <typename T>
993         struct atomic_integral_ops< T, 8 >
994             : atomic_generic_ops< T, 8, T >
995             , atomic_integral_bitwise_ops<T>
996         {
997             typedef atomic_integral_bitwise_ops<T>  bitwise_ops;
998             typedef atomic_generic_ops<T, 8, T>     general_ops;
999
1000             // fetch_add
1001             static T atomic_fetch_add_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
1002             {
1003 #           ifdef CDS_ATOMIC_fetch64_add_defined
1004                 return platform::fetch64_add( pDest, val, order );
1005 #           else
1006                 T cur = general_ops::atomic_load_explicit( pDest, memory_order_relaxed );
1007                 do {} while ( !general_ops::atomic_compare_exchange_weak_explicit( pDest, &cur, cur + val, order, memory_order_relaxed ));
1008                 return cur;
1009 #           endif
1010             }
1011             static T atomic_fetch_add_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
1012             {
1013                 return atomic_fetch_add_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
1014             }
1015             static T atomic_fetch_add( T volatile * pDest, T val ) CDS_NOEXCEPT
1016             {
1017                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
1018             }
1019             static T atomic_fetch_add( T * pDest, T val ) CDS_NOEXCEPT
1020             {
1021                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
1022             }
1023
1024             // fetch_sub
1025             static T atomic_fetch_sub_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
1026             {
1027 #           ifdef CDS_ATOMIC_fetch64_sub_defined
1028                 return platform::fetch64_sub( pDest, val, order );
1029 #           else
1030                 T cur = general_ops::atomic_load_explicit( pDest, memory_order_relaxed );
1031                 do {} while ( !general_ops::atomic_compare_exchange_weak_explicit( pDest, &cur, cur - val, order, memory_order_relaxed ));
1032                 return cur;
1033 #           endif
1034             }
1035             static T atomic_fetch_sub_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
1036             {
1037                 return atomic_fetch_sub_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
1038             }
1039             static T atomic_fetch_sub( T volatile * pDest, T val ) CDS_NOEXCEPT
1040             {
1041                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
1042             }
1043             static T atomic_fetch_sub( T * pDest, T val ) CDS_NOEXCEPT
1044             {
1045                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
1046             }
1047
1048             // fetch_and
1049             static T atomic_fetch_and_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
1050             {
1051 #           ifdef CDS_ATOMIC_fetch64_and_defined
1052                 return platform::fetch64_and( pDest, val, order );
1053 #           else
1054                 return bitwise_ops::fetch_and( pDest, val, order );
1055 #           endif
1056             }
1057             static T atomic_fetch_and_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
1058             {
1059                 return atomic_fetch_and_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
1060             }
1061             static T atomic_fetch_and( T volatile * pDest, T val ) CDS_NOEXCEPT
1062             {
1063                 return atomic_fetch_and_explicit( pDest, val, memory_order_seq_cst );
1064             }
1065             static T atomic_fetch_and( T * pDest, T val ) CDS_NOEXCEPT
1066             {
1067                 return atomic_fetch_and_explicit( pDest, val, memory_order_seq_cst );
1068             }
1069
1070             // fetch_or
1071             static T atomic_fetch_or_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
1072             {
1073 #           ifdef CDS_ATOMIC_fetch64_or_defined
1074                 return platform::fetch64_or( pDest, val, order );
1075 #           else
1076                 return bitwise_ops::fetch_or( pDest, val, order );
1077 #           endif
1078             }
1079             static T atomic_fetch_or_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
1080             {
1081                 return atomic_fetch_or_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
1082             }
1083             static T atomic_fetch_or( T volatile * pDest, T val ) CDS_NOEXCEPT
1084             {
1085                 return atomic_fetch_or_explicit( pDest, val, memory_order_seq_cst );
1086             }
1087             static T atomic_fetch_or( T * pDest, T val ) CDS_NOEXCEPT
1088             {
1089                 return atomic_fetch_or_explicit( pDest, val, memory_order_seq_cst );
1090             }
1091
1092             // fetch_xor
1093             static T atomic_fetch_xor_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
1094             {
1095 #           ifdef CDS_ATOMIC_fetch64_xor_defined
1096                 return platform::fetch64_xor( pDest, val, order );
1097 #           else
1098                 return bitwise_ops::fetch_xor( pDest, val, order );
1099 #           endif
1100             }
1101             static T atomic_fetch_xor_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
1102             {
1103                 return atomic_fetch_xor_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
1104             }
1105             static T atomic_fetch_xor( T volatile * pDest, T val ) CDS_NOEXCEPT
1106             {
1107                 return atomic_fetch_xor_explicit( pDest, val, memory_order_seq_cst );
1108             }
1109             static T atomic_fetch_xor( T * pDest, T val ) CDS_NOEXCEPT
1110             {
1111                 return atomic_fetch_xor_explicit( pDest, val, memory_order_seq_cst );
1112             }
1113         };
1114
1115
1116         // atomic pointer operations
1117         template <typename T>
1118         struct atomic_pointer_base
1119         {
1120             // store
1121             static void atomic_store_explicit( T * volatile * pDest, T * v, memory_order order ) CDS_NOEXCEPT
1122             {
1123                 platform::store_ptr( pDest, v, order );
1124             }
1125             static void atomic_store_explicit( T * * pDest, T * v, memory_order order ) CDS_NOEXCEPT
1126             {
1127                 platform::store_ptr( pDest, v, order );
1128             }
1129             static void atomic_store( T * volatile * pDest, T * v ) CDS_NOEXCEPT
1130             {
1131                 atomic_store_explicit( pDest, v, memory_order_seq_cst );
1132             }
1133             static void atomic_store( T * * pDest, T * v ) CDS_NOEXCEPT
1134             {
1135                 atomic_store_explicit( pDest, v, memory_order_seq_cst );
1136             }
1137
1138             // load
1139             static T * atomic_load_explicit( T * volatile const * pSrc, memory_order order ) CDS_NOEXCEPT
1140             {
1141                 return platform::load_ptr( pSrc, order );
1142             }
1143             static T * atomic_load_explicit( T * const * pSrc, memory_order order ) CDS_NOEXCEPT
1144             {
1145                 return platform::load_ptr( pSrc, order );
1146             }
1147             static T * atomic_load( T * volatile const * pSrc ) CDS_NOEXCEPT
1148             {
1149                 return atomic_load_explicit( pSrc, memory_order_seq_cst );
1150             }
1151             static T * atomic_load( T * const * pSrc ) CDS_NOEXCEPT
1152             {
1153                 return atomic_load_explicit( pSrc, memory_order_seq_cst );
1154             }
1155
1156             // exchange
1157             static T * atomic_exchange_explicit( T * volatile * pDest, T * val, memory_order order ) CDS_NOEXCEPT
1158             {
1159                 return platform::exchange_ptr( pDest, val, order );
1160             }
1161             static T * atomic_exchange_explicit( T * * pDest, T * val, memory_order order ) CDS_NOEXCEPT
1162             {
1163                 return platform::exchange_ptr( pDest, val, order );
1164             }
1165             static T * atomic_exchange( T * volatile * pDest, T * val ) CDS_NOEXCEPT
1166             {
1167                 return atomic_exchange_explicit( pDest, val, memory_order_seq_cst );
1168             }
1169             static T * atomic_exchange( T * * pDest, T * val ) CDS_NOEXCEPT
1170             {
1171                 return atomic_exchange_explicit( pDest, val, memory_order_seq_cst );
1172             }
1173
1174             // cas
1175             static bool atomic_compare_exchange_weak_explicit( T * volatile * pDest, T * * expected, T * desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
1176             {
1177                 assert( expected );
1178                 return platform::cas_ptr_weak( pDest, *expected, desired, mo_success, mo_fail );
1179             }
1180             static bool atomic_compare_exchange_weak_explicit( T * * pDest, T * * expected, T * desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
1181             {
1182                 assert( expected );
1183                 return platform::cas_ptr_weak( pDest, *expected, desired, mo_success, mo_fail );
1184             }
1185             static bool atomic_compare_exchange_weak( T * volatile * pDest, T ** expected, T * desired ) CDS_NOEXCEPT
1186             {
1187                 return atomic_compare_exchange_weak_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
1188             }
1189             static bool atomic_compare_exchange_weak( T ** pDest, T ** expected, T * desired ) CDS_NOEXCEPT
1190             {
1191                 return atomic_compare_exchange_weak_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
1192             }
1193             static bool atomic_compare_exchange_strong_explicit( T * volatile * pDest, T ** expected, T * desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
1194             {
1195                 assert( expected );
1196                 return platform::cas_ptr_strong( pDest, *expected, desired, mo_success, mo_fail );
1197             }
1198             static bool atomic_compare_exchange_strong_explicit( T ** pDest, T ** expected, T * desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
1199             {
1200                 assert( expected );
1201                 return platform::cas_ptr_strong( pDest, *expected, desired, mo_success, mo_fail );
1202             }
1203             static bool atomic_compare_exchange_strong( T * volatile * pDest, T ** expected, T * desired ) CDS_NOEXCEPT
1204             {
1205                 return atomic_compare_exchange_strong_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
1206             }
1207             static bool atomic_compare_exchange_strong( T ** pDest, T ** expected, T * desired ) CDS_NOEXCEPT
1208             {
1209                 return atomic_compare_exchange_strong_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
1210             }
1211         };
1212
1213         template <typename T>
1214         struct atomic_pointer: public atomic_pointer_base<T>
1215         {
1216             typedef atomic_pointer_base<T> base_class;
1217             // fetch_add
1218             static T * atomic_fetch_add_explicit(T * volatile * pDest, ptrdiff_t val, memory_order order) CDS_NOEXCEPT
1219             {
1220 #           ifdef CDS_ATOMIC_fetch_ptr_add_defined
1221                 platform::fetch_ptr_add( pDest, val, order );
1222 #           else
1223                 T * cur = base_class::atomic_load_explicit( pDest, memory_order_relaxed );
1224                 do {} while ( !base_class::atomic_compare_exchange_weak_explicit( pDest, &cur, cur + val, order, memory_order_relaxed ));
1225                 return cur;
1226 #           endif
1227             }
1228             static T * atomic_fetch_add_explicit(T * * pDest, ptrdiff_t val , memory_order order) CDS_NOEXCEPT
1229             {
1230                 return atomic_fetch_add_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
1231             }
1232             static T * atomic_fetch_add( T * volatile * pDest, ptrdiff_t val ) CDS_NOEXCEPT
1233             {
1234                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
1235             }
1236             static T * atomic_fetch_add( T ** pDest, ptrdiff_t val ) CDS_NOEXCEPT
1237             {
1238                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
1239             }
1240
1241             // fetch_sub
1242             static T * atomic_fetch_sub_explicit(T * volatile * pDest, ptrdiff_t val, memory_order order) CDS_NOEXCEPT
1243             {
1244 #           ifdef CDS_ATOMIC_fetch_ptr_sub_defined
1245                 platform::fetch_ptr_sub( pDest, val, order );
1246 #           else
1247                 T * cur = base_class::atomic_load_explicit( pDest, memory_order_relaxed );
1248                 do {} while ( !base_class::atomic_compare_exchange_weak_explicit( pDest, &cur, cur - val, order, memory_order_relaxed ));
1249                 return cur;
1250 #           endif
1251             }
1252             static T * atomic_fetch_sub_explicit(T ** pDest, ptrdiff_t val , memory_order order) CDS_NOEXCEPT
1253             {
1254                 return atomic_fetch_sub_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
1255             }
1256             static T * atomic_fetch_sub( T volatile * pDest, ptrdiff_t val ) CDS_NOEXCEPT
1257             {
1258                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
1259             }
1260             static T * atomic_fetch_sub( T * pDest, ptrdiff_t val ) CDS_NOEXCEPT
1261             {
1262                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
1263             }
1264         };
1265
1266         template <>
1267         struct atomic_pointer<void>: public atomic_pointer_base<void>
1268         {
1269             typedef atomic_pointer_base<void>   base_class;
1270
1271             // fetch_add
1272             static void * atomic_fetch_add_explicit(void * volatile * pDest, ptrdiff_t val, memory_order order) CDS_NOEXCEPT
1273             {
1274                 void * cur = base_class::atomic_load_explicit( pDest, memory_order_relaxed );
1275                 do {} while ( !base_class::atomic_compare_exchange_weak_explicit( pDest, &cur, reinterpret_cast<char *>(cur) + val, order, memory_order_relaxed ));
1276                 return cur;
1277             }
1278             static void * atomic_fetch_add_explicit(void * * pDest, ptrdiff_t val , memory_order order) CDS_NOEXCEPT
1279             {
1280                 return atomic_fetch_add_explicit( reinterpret_cast<void * volatile *>( pDest ), val, order );
1281             }
1282             static void * atomic_fetch_add( void * volatile * pDest, ptrdiff_t val ) CDS_NOEXCEPT
1283             {
1284                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
1285             }
1286             static void * atomic_fetch_add( void ** pDest, ptrdiff_t val ) CDS_NOEXCEPT
1287             {
1288                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
1289             }
1290
1291             // fetch_sub
1292             static void * atomic_fetch_sub_explicit(void * volatile * pDest, ptrdiff_t val, memory_order order) CDS_NOEXCEPT
1293             {
1294                 void * cur = base_class::atomic_load_explicit( pDest, memory_order_relaxed );
1295                 do {} while ( !base_class::atomic_compare_exchange_weak_explicit( pDest, &cur, reinterpret_cast<char *>(cur) - val, order, memory_order_relaxed ));
1296                 return cur;
1297             }
1298             static void * atomic_fetch_sub_explicit(void ** pDest, ptrdiff_t val , memory_order order) CDS_NOEXCEPT
1299             {
1300                 return atomic_fetch_sub_explicit( reinterpret_cast<void * volatile *>( pDest ), val, order );
1301             }
1302             static void * atomic_fetch_sub( void * volatile * pDest, ptrdiff_t val ) CDS_NOEXCEPT
1303             {
1304                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
1305             }
1306             static void * atomic_fetch_sub( void ** pDest, ptrdiff_t val ) CDS_NOEXCEPT
1307             {
1308                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
1309             }
1310         };
1311
1312         template <typename T>
1313         struct atomic_integral
1314         {
1315         private:
1316             typename cds::details::aligned_type<T, sizeof(T)>::type volatile m_val;
1317             //T volatile  m_val;
1318             typedef atomic_integral_ops<T, sizeof(T)>   atomic_ops;
1319         public:
1320             typedef T   atomic_type;
1321         public:
1322             bool is_lock_free() const volatile CDS_NOEXCEPT
1323             {
1324                 return true;
1325             }
1326             bool is_lock_free() const CDS_NOEXCEPT
1327             {
1328                 return true;
1329             }
1330             void store(T val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1331             {
1332                 atomic_ops::atomic_store_explicit( &m_val, val, order );
1333             }
1334             void store(T val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1335             {
1336                 atomic_ops::atomic_store_explicit( &m_val, val, order );
1337             }
1338
1339             T load(memory_order order = memory_order_seq_cst) const volatile CDS_NOEXCEPT
1340             {
1341                 return atomic_ops::atomic_load_explicit( &m_val, order );
1342             }
1343             T load(memory_order order  = memory_order_seq_cst) const CDS_NOEXCEPT
1344             {
1345                 return atomic_ops::atomic_load_explicit( &m_val, order );
1346             }
1347
1348             operator T() const volatile CDS_NOEXCEPT
1349             {
1350                 return load();
1351             }
1352             operator T() const CDS_NOEXCEPT
1353             {
1354                 return load();
1355             }
1356
1357             T exchange(T val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1358             {
1359                 return atomic_ops::atomic_exchange_explicit( &m_val, val, order );
1360             }
1361             T exchange(T val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1362             {
1363                 return atomic_ops::atomic_exchange_explicit( &m_val, val, order );
1364             }
1365
1366             bool compare_exchange_weak(T& expected, T desired , memory_order success_order, memory_order failure_order) volatile CDS_NOEXCEPT
1367             {
1368                 return atomic_ops::atomic_compare_exchange_weak_explicit( &m_val, &expected, desired, success_order, failure_order );
1369             }
1370             bool compare_exchange_weak(T& expected, T desired , memory_order success_order, memory_order failure_order) CDS_NOEXCEPT
1371             {
1372                 return atomic_ops::atomic_compare_exchange_weak_explicit( &m_val, &expected, desired, success_order, failure_order );
1373             }
1374             bool compare_exchange_strong(T& expected, T desired , memory_order success_order, memory_order failure_order) volatile CDS_NOEXCEPT
1375             {
1376                 return atomic_ops::atomic_compare_exchange_strong_explicit( &m_val, &expected, desired, success_order, failure_order );
1377             }
1378             bool compare_exchange_strong(T& expected, T desired , memory_order success_order, memory_order failure_order) CDS_NOEXCEPT
1379             {
1380                 return atomic_ops::atomic_compare_exchange_strong_explicit( &m_val, &expected, desired, success_order, failure_order );
1381             }
1382             bool compare_exchange_weak(T& expected, T desired , memory_order success_order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1383             {
1384                 return compare_exchange_weak( expected, desired, success_order, memory_order_relaxed );
1385             }
1386             bool compare_exchange_weak(T& expected, T desired , memory_order success_order = memory_order_seq_cst) CDS_NOEXCEPT
1387             {
1388                 return compare_exchange_weak( expected, desired, success_order, memory_order_relaxed );
1389             }
1390             bool compare_exchange_strong(T& expected, T desired , memory_order success_order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1391             {
1392                 return compare_exchange_strong( expected, desired, success_order, memory_order_relaxed );
1393             }
1394             bool compare_exchange_strong(T& expected, T desired , memory_order success_order = memory_order_seq_cst) CDS_NOEXCEPT
1395             {
1396                 return compare_exchange_strong( expected, desired, success_order, memory_order_relaxed );
1397             }
1398
1399             T fetch_add(T val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1400             {
1401                 return atomic_ops::atomic_fetch_add_explicit( &m_val, val, order );
1402             }
1403             T fetch_add(T val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1404             {
1405                 return atomic_ops::atomic_fetch_add_explicit( &m_val, val, order );
1406             }
1407             T fetch_sub(T val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1408             {
1409                 return atomic_ops::atomic_fetch_sub_explicit( &m_val, val, order );
1410             }
1411             T fetch_sub(T val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1412             {
1413                 return atomic_ops::atomic_fetch_sub_explicit( &m_val, val, order );
1414             }
1415             T fetch_and(T val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1416             {
1417                 return atomic_ops::atomic_fetch_and_explicit( &m_val, val, order );
1418             }
1419             T fetch_and(T val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1420             {
1421                 return atomic_ops::atomic_fetch_and_explicit( &m_val, val, order );
1422             }
1423
1424             T fetch_or(T val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1425             {
1426                 return atomic_ops::atomic_fetch_or_explicit( &m_val, val, order );
1427             }
1428             T fetch_or(T val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1429             {
1430                 return atomic_ops::atomic_fetch_or_explicit( &m_val, val, order );
1431             }
1432             T fetch_xor(T val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1433             {
1434                 return atomic_ops::atomic_fetch_xor_explicit( &m_val, val, order );
1435             }
1436             T fetch_xor(T val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1437             {
1438                 return atomic_ops::atomic_fetch_xor_explicit( &m_val, val, order );
1439             }
1440
1441             atomic_integral() = default;
1442             CDS_CONSTEXPR atomic_integral(T val) CDS_NOEXCEPT
1443                 : m_val(val)
1444                 {}
1445
1446             atomic_integral(const atomic_integral&) = delete;
1447             atomic_integral& operator=(const atomic_integral&) = delete;
1448             atomic_integral& operator=(const atomic_integral&) volatile = delete;
1449
1450             T operator=(T val) volatile CDS_NOEXCEPT
1451             {
1452                 store(val);
1453                 return val;
1454             }
1455             T operator=(T val) CDS_NOEXCEPT
1456             {
1457                 store(val);
1458                 return val;
1459             }
1460
1461             // Post inc/dec
1462             T operator++(int) volatile CDS_NOEXCEPT
1463             {
1464                 return fetch_add( 1 );
1465             }
1466             T operator++(int) CDS_NOEXCEPT
1467             {
1468                 return fetch_add( 1 );
1469             }
1470             T operator--(int) volatile CDS_NOEXCEPT
1471             {
1472                 return fetch_sub( 1 );
1473             }
1474             T operator--(int) CDS_NOEXCEPT
1475             {
1476                 return fetch_sub( 1 );
1477             }
1478
1479             // Pre inc/dec
1480             T operator++() volatile CDS_NOEXCEPT
1481             {
1482                 return fetch_add( 1 ) + 1;
1483             }
1484             T operator++() CDS_NOEXCEPT
1485             {
1486                 return fetch_add( 1 ) + 1;
1487             }
1488             T operator--() volatile CDS_NOEXCEPT
1489             {
1490                 return fetch_sub( 1 ) - 1;
1491             }
1492             T operator--() CDS_NOEXCEPT
1493             {
1494                 return fetch_sub( 1 ) - 1;
1495             }
1496
1497             // op=
1498             T operator+=(T val) volatile CDS_NOEXCEPT
1499             {
1500                 return fetch_add( val ) + val;
1501             }
1502             T operator+=(T val) CDS_NOEXCEPT
1503             {
1504                 return fetch_add( val ) + val;
1505             }
1506             T operator-=(T val) volatile CDS_NOEXCEPT
1507             {
1508                 return fetch_sub( val ) - val;
1509             }
1510             T operator-=(T val) CDS_NOEXCEPT
1511             {
1512                 return fetch_sub( val ) - val;
1513             }
1514             T operator&=(T val) volatile CDS_NOEXCEPT
1515             {
1516                 return fetch_and( val ) & val;
1517             }
1518             T operator&=(T val) CDS_NOEXCEPT
1519             {
1520                 return fetch_and( val ) & val;
1521             }
1522             T operator|=(T val) volatile CDS_NOEXCEPT
1523             {
1524                 return fetch_or( val ) | val;
1525             }
1526             T operator|=(T val) CDS_NOEXCEPT
1527             {
1528                 return fetch_or( val ) | val;
1529             }
1530             T operator^=(T val) volatile CDS_NOEXCEPT
1531             {
1532                 return fetch_xor( val ) ^ val;
1533             }
1534             T operator^=(T val) CDS_NOEXCEPT
1535             {
1536                 return fetch_xor( val ) ^ val;
1537             }
1538         };
1539
1540         template <typename Type>
1541         struct select_primary_type {
1542             typedef typename details::primary_type<sizeof(Type)>::type type;
1543         };
1544         template <>
1545         struct select_primary_type<bool> {
1546             typedef bool type;
1547         };
1548
1549     }   // namespace details
1550
1551     template <class T>
1552     struct atomic
1553     {
1554     private:
1555         typedef details::atomic_generic_ops<T, sizeof(T), typename details::select_primary_type<T>::type >  atomic_ops;
1556
1557         T volatile m_data;
1558     public:
1559         bool is_lock_free() const volatile CDS_NOEXCEPT
1560         {
1561             return true;
1562         }
1563         bool is_lock_free() const CDS_NOEXCEPT
1564         {
1565             return true;
1566         }
1567
1568         void store(T val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1569         {
1570             atomic_ops::atomic_store_explicit( &m_data, val, order );
1571         }
1572         void store(T val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1573         {
1574             atomic_ops::atomic_store_explicit( &m_data, val, order );
1575         }
1576
1577         T load(memory_order order = memory_order_seq_cst) const volatile CDS_NOEXCEPT
1578         {
1579             return atomic_ops::atomic_load_explicit( &m_data, order );
1580         }
1581         T load(memory_order order = memory_order_seq_cst) const CDS_NOEXCEPT
1582         {
1583            return atomic_ops::atomic_load_explicit( &m_data, order );
1584         }
1585
1586         operator T() const volatile CDS_NOEXCEPT
1587         {
1588             return load();
1589         }
1590         operator T() const CDS_NOEXCEPT
1591         {
1592             return load();
1593         }
1594
1595         T exchange(T val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1596         {
1597             return atomic_ops::atomic_exchange_explicit( &m_data, val, order );
1598         }
1599         T exchange(T val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1600         {
1601             return atomic_ops::atomic_exchange_explicit( &m_data, val, order );
1602         }
1603
1604         bool compare_exchange_weak(T& expected, T desired, memory_order success_order, memory_order failure_order) volatile CDS_NOEXCEPT
1605         {
1606             return atomic_ops::atomic_compare_exchange_weak_explicit( &m_data, &expected, desired, success_order, failure_order );
1607         }
1608         bool compare_exchange_weak(T& expected, T desired, memory_order success_order, memory_order failure_order) CDS_NOEXCEPT
1609         {
1610             return atomic_ops::atomic_compare_exchange_weak_explicit( &m_data, &expected, desired, success_order, failure_order );
1611         }
1612         bool compare_exchange_strong(T& expected, T desired, memory_order success_order, memory_order failure_order) volatile CDS_NOEXCEPT
1613         {
1614             return atomic_ops::atomic_compare_exchange_strong_explicit( &m_data, &expected, desired, success_order, failure_order );
1615         }
1616         bool compare_exchange_strong(T& expected, T desired, memory_order success_order, memory_order failure_order) CDS_NOEXCEPT
1617         {
1618             return atomic_ops::atomic_compare_exchange_strong_explicit( &m_data, &expected, desired, success_order, failure_order );
1619         }
1620         bool compare_exchange_weak(T& expected, T desired, memory_order success_order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1621         {
1622             return compare_exchange_weak( expected, desired, success_order, memory_order_relaxed );
1623         }
1624         bool compare_exchange_weak(T& expected, T desired, memory_order success_order = memory_order_seq_cst) CDS_NOEXCEPT
1625         {
1626             return compare_exchange_weak( expected, desired, success_order, memory_order_relaxed );
1627         }
1628         bool compare_exchange_strong(T& expected, T desired, memory_order success_order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1629         {
1630             return compare_exchange_strong( expected, desired, success_order, memory_order_relaxed );
1631         }
1632         bool compare_exchange_strong(T& expected, T desired, memory_order success_order = memory_order_seq_cst) CDS_NOEXCEPT
1633         {
1634             return compare_exchange_strong( expected, desired, success_order, memory_order_relaxed );
1635         }
1636
1637         atomic() = default;
1638         CDS_CONSTEXPR atomic(T val)
1639             : m_data( val )
1640             {}
1641
1642         atomic(const atomic&) = delete;
1643         atomic& operator=(const atomic&) = delete;
1644         atomic& operator=(const atomic&) volatile = delete;
1645
1646         T operator=(T val) volatile CDS_NOEXCEPT
1647         {
1648             store( val );
1649             return val;
1650         }
1651         T operator=(T val) CDS_NOEXCEPT
1652         {
1653             store( val );
1654             return val;
1655         }
1656     };
1657
1658 #   define CDS_DECLARE_ATOMIC_INTEGRAL( _type ) \
1659     template <> \
1660     struct atomic<_type>: public details::atomic_integral<_type> \
1661     { \
1662     private: \
1663         typedef details::atomic_integral<_type>   base_class  ; \
1664     public: \
1665         atomic() = default; \
1666         atomic(_type val) CDS_NOEXCEPT : base_class(val) {} \
1667         atomic(const atomic&) = delete; \
1668         atomic& operator=(const atomic&) = delete; \
1669         atomic& operator=(const atomic&) volatile = delete; \
1670         _type operator=(_type val) volatile CDS_NOEXCEPT { return base_class::operator=(val); } \
1671         _type operator=(_type val) CDS_NOEXCEPT { return base_class::operator=(val); } \
1672     };
1673
1674     CDS_DECLARE_ATOMIC_INTEGRAL(char)
1675     CDS_DECLARE_ATOMIC_INTEGRAL(signed char)
1676     CDS_DECLARE_ATOMIC_INTEGRAL(unsigned char)
1677     CDS_DECLARE_ATOMIC_INTEGRAL(short)
1678     CDS_DECLARE_ATOMIC_INTEGRAL(unsigned short)
1679     CDS_DECLARE_ATOMIC_INTEGRAL(int)
1680     CDS_DECLARE_ATOMIC_INTEGRAL(unsigned int)
1681     CDS_DECLARE_ATOMIC_INTEGRAL(long)
1682     CDS_DECLARE_ATOMIC_INTEGRAL(unsigned long)
1683     CDS_DECLARE_ATOMIC_INTEGRAL(long long)
1684     CDS_DECLARE_ATOMIC_INTEGRAL(unsigned long long)
1685 //#if CDS_COMPILER == CDS_COMPILER_GCC && CDS_COMPILER_VERSION >= 40400
1686 //    CDS_DECLARE_ATOMIC_INTEGRAL(char16_t)
1687 //    CDS_DECLARE_ATOMIC_INTEGRAL(char32_t)
1688 //#endif
1689 //    CDS_DECLARE_ATOMIC_INTEGRAL(wchar_t)
1690
1691 #   undef CDS_DECLARE_ATOMIC_INTEGRAL
1692
1693
1694     template <typename T>
1695     class atomic<T *>
1696     {
1697     private:
1698         T * volatile m_ptr;
1699         typedef details::atomic_pointer<T>  atomic_ops;
1700     public:
1701         bool is_lock_free() const volatile CDS_NOEXCEPT
1702         {
1703             return true;
1704         }
1705         bool is_lock_free() const CDS_NOEXCEPT
1706         {
1707             return true;
1708         }
1709
1710         void store(T * val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1711         {
1712             atomic_ops::atomic_store_explicit( &m_ptr, val, order );
1713         }
1714         void store(T * val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1715         {
1716             atomic_ops::atomic_store_explicit( &m_ptr, val, order );
1717         }
1718
1719         T * load(memory_order order = memory_order_seq_cst) const volatile CDS_NOEXCEPT
1720         {
1721             return atomic_ops::atomic_load_explicit( &m_ptr, order );
1722         }
1723         T * load(memory_order order = memory_order_seq_cst) const CDS_NOEXCEPT
1724         {
1725             return atomic_ops::atomic_load_explicit( &m_ptr, order );
1726         }
1727
1728         operator T *() const volatile CDS_NOEXCEPT
1729         {
1730             return load();
1731         }
1732         operator T *() const CDS_NOEXCEPT
1733         {
1734             return load();
1735         }
1736
1737         T * exchange(T * val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1738         {
1739             return atomic_ops::atomic_exchange_explicit( &m_ptr, val, order );
1740         }
1741         T * exchange(T * val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1742         {
1743             return atomic_ops::atomic_exchange_explicit( &m_ptr, val, order );
1744         }
1745
1746         bool compare_exchange_weak(T *& expected, T * desired, memory_order success_order, memory_order failure_order) volatile CDS_NOEXCEPT
1747         {
1748             return atomic_ops::atomic_compare_exchange_weak_explicit( &m_ptr, &expected, desired, success_order, failure_order );
1749         }
1750         bool compare_exchange_weak(T *& expected, T * desired, memory_order success_order, memory_order failure_order) CDS_NOEXCEPT
1751         {
1752             return atomic_ops::atomic_compare_exchange_weak_explicit( &m_ptr, &expected, desired, success_order, failure_order );
1753         }
1754         bool compare_exchange_strong(T *& expected, T * desired, memory_order success_order, memory_order failure_order) volatile CDS_NOEXCEPT
1755         {
1756             return atomic_ops::atomic_compare_exchange_strong_explicit( &m_ptr, &expected, desired, success_order, failure_order );
1757         }
1758         bool compare_exchange_strong(T *& expected, T * desired, memory_order success_order, memory_order failure_order) CDS_NOEXCEPT
1759         {
1760             return atomic_ops::atomic_compare_exchange_strong_explicit( &m_ptr, &expected, desired, success_order, failure_order );
1761         }
1762         bool compare_exchange_weak(T *& expected, T * desired, memory_order success_order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1763         {
1764             return compare_exchange_weak( expected, desired, success_order, memory_order_relaxed );
1765         }
1766         bool compare_exchange_weak(T *& expected, T * desired, memory_order success_order = memory_order_seq_cst) CDS_NOEXCEPT
1767         {
1768             return compare_exchange_weak( expected, desired, success_order, memory_order_relaxed );
1769         }
1770         bool compare_exchange_strong(T *& expected, T * desired, memory_order success_order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1771         {
1772             return compare_exchange_strong( expected, desired, success_order, memory_order_relaxed );
1773         }
1774         bool compare_exchange_strong(T *& expected, T * desired, memory_order success_order = memory_order_seq_cst) CDS_NOEXCEPT
1775         {
1776             return compare_exchange_strong( expected, desired, success_order, memory_order_relaxed );
1777         }
1778
1779         T * fetch_add(ptrdiff_t offset, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1780         {
1781             return atomic_ops::atomic_fetch_add_explicit( &m_ptr, offset, order );
1782         }
1783         T * fetch_add(ptrdiff_t offset, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1784         {
1785             return atomic_ops::atomic_fetch_add_explicit( &m_ptr, offset, order );
1786         }
1787
1788         T * fetch_sub(ptrdiff_t offset, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1789         {
1790             return atomic_ops::atomic_fetch_sub_explicit( &m_ptr, offset, order );
1791         }
1792         T * fetch_sub(ptrdiff_t offset, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1793         {
1794             return atomic_ops::atomic_fetch_sub_explicit( &m_ptr, offset, order );
1795         }
1796
1797         atomic() = default;
1798         CDS_CONSTEXPR atomic(T * val) CDS_NOEXCEPT
1799             : m_ptr( val )
1800         {}
1801
1802         atomic(const atomic&) = delete;
1803         atomic& operator=(const atomic&) = delete;
1804         atomic& operator=(const atomic&) volatile = delete;
1805
1806         T * operator=(T * val) volatile CDS_NOEXCEPT
1807         {
1808             store( val );
1809             return val;
1810         }
1811         T * operator=(T * val) CDS_NOEXCEPT
1812         {
1813             store( val );
1814             return val;
1815         }
1816     };
1817
1818     // Atomic typedefs
1819     typedef atomic<bool>            atomic_bool;
1820     typedef atomic<char>            atomic_char;
1821     typedef atomic<signed char>     atomic_schar;
1822     typedef atomic<unsigned char>   atomic_uchar;
1823     typedef atomic<short>           atomic_short;
1824     typedef atomic<unsigned short>  atomic_ushort;
1825     typedef atomic<int>             atomic_int;
1826     typedef atomic<unsigned int>    atomic_uint;
1827     typedef atomic<long>            atomic_long;
1828     typedef atomic<unsigned long>   atomic_ulong;
1829     typedef atomic<long long>       atomic_llong;
1830     typedef atomic<unsigned long long> atomic_ullong;
1831 #if ( CDS_COMPILER == CDS_COMPILER_GCC && CDS_COMPILER_VERSION >= 40400 ) || CDS_COMPILER == CDS_COMPILER_CLANG
1832     typedef atomic<char16_t>        atomic_char16_t;
1833     typedef atomic<char32_t>        atomic_char32_t;
1834 #endif
1835     typedef atomic<wchar_t>         atomic_wchar_t;
1836
1837
1838     typedef atomic<std::int_least8_t>    atomic_int_least8_t;
1839     typedef atomic<std::uint_least8_t>   atomic_uint_least8_t;
1840     typedef atomic<std::int_least16_t>   atomic_int_least16_t;
1841     typedef atomic<std::uint_least16_t>  atomic_uint_least16_t;
1842     typedef atomic<std::int_least32_t>   atomic_int_least32_t;
1843     typedef atomic<std::uint_least32_t>  atomic_uint_least32_t;
1844     typedef atomic<std::int_least64_t>   atomic_int_least64_t;
1845     typedef atomic<std::uint_least64_t>  atomic_uint_least64_t;
1846     typedef atomic<std::int_fast8_t>     atomic_int_fast8_t;
1847     typedef atomic<std::uint_fast8_t>    atomic_uint_fast8_t;
1848     typedef atomic<std::int_fast16_t>    atomic_int_fast16_t;
1849     typedef atomic<std::uint_fast16_t>   atomic_uint_fast16_t;
1850     typedef atomic<std::int_fast32_t>    atomic_int_fast32_t;
1851     typedef atomic<std::uint_fast32_t>   atomic_uint_fast32_t;
1852     typedef atomic<std::int_fast64_t>    atomic_int_fast64_t;
1853     typedef atomic<std::uint_fast64_t>   atomic_uint_fast64_t;
1854     typedef atomic<intptr_t>             atomic_intptr_t;
1855     typedef atomic<uintptr_t>            atomic_uintptr_t;
1856     typedef atomic<size_t>               atomic_size_t;
1857     typedef atomic<ptrdiff_t>            atomic_ptrdiff_t;
1858     typedef atomic<std::intmax_t>        atomic_intmax_t;
1859     typedef atomic<std::uintmax_t>       atomic_uintmax_t;
1860
1861     template <class T>
1862     static inline bool atomic_is_lock_free(const volatile atomic<T> * p) CDS_NOEXCEPT
1863     {
1864         return p->is_lock_free();
1865     }
1866
1867     template <class T>
1868     static inline bool atomic_is_lock_free(const atomic<T> * p ) CDS_NOEXCEPT
1869     {
1870         return p->is_lock_free();
1871     }
1872
1873     /*
1874     template <class T>
1875     static inline void atomic_init(volatile atomic<T> * p, T val) CDS_NOEXCEPT
1876     {
1877         p->init( val );
1878     }
1879
1880     template <class T>
1881     static inline void atomic_init( atomic<T> * p, T val) CDS_NOEXCEPT
1882     {
1883         p->init( val );
1884     }
1885     */
1886
1887     template <class T>
1888     static inline void atomic_store(volatile atomic<T>* p, T val) CDS_NOEXCEPT
1889     {
1890         p->store(val);
1891     }
1892     template <class T>
1893     static inline void atomic_store(atomic<T>* p, T val) CDS_NOEXCEPT
1894     {
1895         p->store( val );
1896     }
1897
1898     template <class T>
1899     static inline void atomic_store_explicit(volatile atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
1900     {
1901         p->store( val, order );
1902     }
1903     template <class T>
1904     static inline void atomic_store_explicit(atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
1905     {
1906         p->store( val, order );
1907     }
1908
1909     template <class T>
1910     static inline T atomic_load(const volatile atomic<T>* p) CDS_NOEXCEPT
1911     {
1912         return p->load();
1913     }
1914     template <class T>
1915     static inline T atomic_load(const atomic<T>* p) CDS_NOEXCEPT
1916     {
1917         return p->load();
1918     }
1919
1920     template <class T>
1921     static inline T atomic_load_explicit(const volatile atomic<T>* p, memory_order order) CDS_NOEXCEPT
1922     {
1923         return p->load( order );
1924     }
1925     template <class T>
1926     static inline T atomic_load_explicit(const atomic<T>* p, memory_order order) CDS_NOEXCEPT
1927     {
1928         return p->load( order );
1929     }
1930
1931     template <class T>
1932     static inline T atomic_exchange(volatile atomic<T>* p, T val) CDS_NOEXCEPT
1933     {
1934         return p->exchange( val );
1935     }
1936     template <class T>
1937     static inline T atomic_exchange(atomic<T>* p, T val ) CDS_NOEXCEPT
1938     {
1939         return p->exchange( val );
1940     }
1941
1942     template <class T>
1943     static inline T atomic_exchange_explicit(volatile atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
1944     {
1945         return p->exchange( val, order );
1946     }
1947     template <class T>
1948     static inline T atomic_exchange_explicit(atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
1949     {
1950         return p->exchange( val, order );
1951     }
1952
1953     template <class T>
1954     static inline bool atomic_compare_exchange_weak(volatile atomic<T>* p, T* expected, T desired) CDS_NOEXCEPT
1955     {
1956         return p->compare_exchange_weak( *expected, desired );
1957     }
1958     template <class T>
1959     static inline bool atomic_compare_exchange_weak(atomic<T>* p, T* expected, T desired) CDS_NOEXCEPT
1960     {
1961         return p->compare_exchange_weak( *expected, desired );
1962     }
1963
1964     template <class T>
1965     static inline bool atomic_compare_exchange_strong(volatile atomic<T>* p, T* expected, T desired) CDS_NOEXCEPT
1966     {
1967         return p->compare_exchange_strong( *expected, desired );
1968     }
1969     template <class T>
1970     static inline bool atomic_compare_exchange_strong(atomic<T>* p, T* expected, T desired) CDS_NOEXCEPT
1971     {
1972         return p->compare_exchange_strong( *expected, desired );
1973     }
1974
1975     template <class T>
1976     static inline bool atomic_compare_exchange_weak_explicit(volatile atomic<T>* p, T* expected, T desired, memory_order success_order, memory_order failure_order) CDS_NOEXCEPT
1977     {
1978         return p->compare_exchange_weak( *expected, desired, success_order, failure_order );
1979     }
1980     template <class T>
1981     static inline bool atomic_compare_exchange_weak_explicit(atomic<T>* p, T* expected, T desired, memory_order success_order, memory_order failure_order) CDS_NOEXCEPT
1982     {
1983         return p->compare_exchange_weak( *expected, desired, success_order, failure_order );
1984     }
1985
1986     template <class T>
1987     static inline bool atomic_compare_exchange_strong_explicit(volatile atomic<T>* p, T* expected, T desired, memory_order success_order, memory_order failure_order) CDS_NOEXCEPT
1988     {
1989         return p->compare_exchange_strong( *expected, desired, success_order, failure_order );
1990     }
1991     template <class T>
1992     static inline bool atomic_compare_exchange_strong_explicit(atomic<T>* p, T* expected, T desired, memory_order success_order, memory_order failure_order) CDS_NOEXCEPT
1993     {
1994         return p->compare_exchange_strong( *expected, desired, success_order, failure_order );
1995     }
1996
1997     template <class T>
1998     static inline T atomic_fetch_add(volatile atomic<T>* p, T val) CDS_NOEXCEPT
1999     {
2000         return p->fetch_add( val );
2001     }
2002     template <class T>
2003     static inline T atomic_fetch_add(atomic<T>* p, T val) CDS_NOEXCEPT
2004     {
2005         return p->fetch_add( val );
2006     }
2007     template <class T>
2008     static inline T * atomic_fetch_add(volatile atomic<T *>* p, ptrdiff_t offset) CDS_NOEXCEPT
2009     {
2010         return p->fetch_add( offset );
2011     }
2012     template <class T>
2013     static inline T * atomic_fetch_add(atomic<T *>* p, ptrdiff_t offset) CDS_NOEXCEPT
2014     {
2015         return p->fetch_add( offset );
2016     }
2017
2018     template <class T>
2019     static inline T atomic_fetch_add_explicit(volatile atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
2020     {
2021         return p->fetch_add( val, order );
2022     }
2023     template <class T>
2024     static inline T atomic_fetch_add_explicit(atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
2025     {
2026         return p->fetch_add( val, order );
2027     }
2028     template <class T>
2029     static inline T * atomic_fetch_add_explicit(volatile atomic<T *>* p, ptrdiff_t offset, memory_order order) CDS_NOEXCEPT
2030     {
2031         return p->fetch_add( offset, order );
2032     }
2033     template <class T>
2034     static inline T * atomic_fetch_add_explicit(atomic<T *>* p, ptrdiff_t offset, memory_order order) CDS_NOEXCEPT
2035     {
2036         return p->fetch_add( offset, order );
2037     }
2038
2039     template <class T>
2040     static inline T atomic_fetch_sub(volatile atomic<T>* p, T val) CDS_NOEXCEPT
2041     {
2042         return p->fetch_sub( val );
2043     }
2044     template <class T>
2045     static inline T atomic_fetch_sub(atomic<T>* p, T val) CDS_NOEXCEPT
2046     {
2047         return p->fetch_sub( val );
2048     }
2049     template <class T>
2050     static inline T * atomic_fetch_sub(volatile atomic<T *>* p, ptrdiff_t offset) CDS_NOEXCEPT
2051     {
2052         return p->fetch_sub( offset );
2053     }
2054     template <class T>
2055     static inline T * atomic_fetch_sub(atomic<T *>* p, ptrdiff_t offset) CDS_NOEXCEPT
2056     {
2057         return p->fetch_sub( offset );
2058     }
2059
2060     template <class T>
2061     static inline T atomic_fetch_sub_explicit(volatile atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
2062     {
2063         return p->fetch_sub( val, order );
2064     }
2065     template <class T>
2066     static inline T atomic_fetch_sub_explicit(atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
2067     {
2068         return p->fetch_sub( val, order );
2069     }
2070     template <class T>
2071     static inline T * atomic_fetch_sub_explicit(volatile atomic<T *>* p, ptrdiff_t offset, memory_order order) CDS_NOEXCEPT
2072     {
2073         return p->fetch_sub( offset, order );
2074     }
2075     template <class T>
2076     static inline T * atomic_fetch_sub_explicit(atomic<T *>* p, ptrdiff_t offset, memory_order order) CDS_NOEXCEPT
2077     {
2078         return p->fetch_sub( offset, order );
2079     }
2080
2081     template <class T>
2082     static inline T atomic_fetch_and(volatile atomic<T>* p, T val) CDS_NOEXCEPT
2083     {
2084         return p->fetch_and( val );
2085     }
2086     template <class T>
2087     static inline T atomic_fetch_and(atomic<T>* p, T val) CDS_NOEXCEPT
2088     {
2089         return p->fetch_and( val );
2090     }
2091
2092     template <class T>
2093     static inline T atomic_fetch_and_explicit(volatile atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
2094     {
2095         return p->fetch_and( val, order );
2096     }
2097     template <class T>
2098     static inline T atomic_fetch_and_explicit(atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
2099     {
2100         return p->fetch_and( val, order );
2101     }
2102
2103     template <class T>
2104     static inline T atomic_fetch_or(volatile atomic<T>* p, T val) CDS_NOEXCEPT
2105     {
2106         return p->fetch_or( val );
2107     }
2108     template <class T>
2109     static inline T atomic_fetch_or(atomic<T>* p, T val) CDS_NOEXCEPT
2110     {
2111         return p->fetch_or( val );
2112     }
2113
2114     template <class T>
2115     static inline T atomic_fetch_or_explicit(volatile atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
2116     {
2117         return p->fetch_or( val, order );
2118     }
2119     template <class T>
2120     static inline T atomic_fetch_or_explicit(atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
2121     {
2122         return p->fetch_or( val, order );
2123     }
2124
2125     template <class T>
2126     static inline T atomic_fetch_xor(volatile atomic<T>* p, T val) CDS_NOEXCEPT
2127     {
2128         return p->fetch_xor( val );
2129     }
2130     template <class T>
2131     static inline T atomic_fetch_xor(atomic<T>* p, T val) CDS_NOEXCEPT
2132     {
2133         return p->fetch_xor( val );
2134     }
2135
2136     template <class T>
2137     static inline T atomic_fetch_xor_explicit(volatile atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
2138     {
2139         return p->fetch_xor( val, order );
2140     }
2141     template <class T>
2142     static inline T atomic_fetch_xor_explicit(atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
2143     {
2144         return p->fetch_xor( val, order );
2145     }
2146
2147     // Atomic flag type
2148     typedef struct atomic_flag
2149     {
2150         void clear( memory_order order = memory_order_seq_cst ) volatile CDS_NOEXCEPT
2151         {
2152             assert( order != memory_order_acquire
2153                 && order != memory_order_acq_rel
2154                 && order != memory_order_consume
2155                 );
2156             platform::atomic_flag_clear( &m_Flag, order );
2157         }
2158         void clear( memory_order order = memory_order_seq_cst ) CDS_NOEXCEPT
2159         {
2160             assert( order != memory_order_acquire
2161                 && order != memory_order_acq_rel
2162                 && order != memory_order_consume
2163                 );
2164             platform::atomic_flag_clear( &m_Flag, order );
2165         }
2166
2167         bool test_and_set( memory_order order = memory_order_seq_cst ) volatile CDS_NOEXCEPT
2168         {
2169             return platform::atomic_flag_tas( &m_Flag, order );
2170         }
2171         bool test_and_set( memory_order order = memory_order_seq_cst ) CDS_NOEXCEPT
2172         {
2173             return platform::atomic_flag_tas( &m_Flag, order );
2174         }
2175
2176         atomic_flag() = default;
2177
2178         atomic_flag(const atomic_flag&) = delete;
2179         atomic_flag& operator=(const atomic_flag&) = delete;
2180         atomic_flag& operator=(const atomic_flag&) volatile = delete;
2181
2182         platform::atomic_flag_type volatile m_Flag;
2183     } atomic_flag;
2184
2185     static inline bool atomic_flag_test_and_set(volatile atomic_flag* p) CDS_NOEXCEPT
2186     {
2187         return p->test_and_set();
2188     }
2189     static inline bool atomic_flag_test_and_set(atomic_flag * p) CDS_NOEXCEPT
2190     {
2191         return p->test_and_set();
2192     }
2193     static inline bool atomic_flag_test_and_set_explicit(volatile atomic_flag* p, memory_order order) CDS_NOEXCEPT
2194     {
2195         return p->test_and_set( order );
2196     }
2197     static inline bool atomic_flag_test_and_set_explicit(atomic_flag* p, memory_order order) CDS_NOEXCEPT
2198     {
2199         return p->test_and_set( order );
2200     }
2201     static inline void atomic_flag_clear(volatile atomic_flag* p) CDS_NOEXCEPT
2202     {
2203         return p->clear();
2204     }
2205     static inline void atomic_flag_clear(atomic_flag* p) CDS_NOEXCEPT
2206     {
2207         return p->clear();
2208     }
2209     static inline void atomic_flag_clear_explicit(volatile atomic_flag* p, memory_order order) CDS_NOEXCEPT
2210     {
2211         return p->clear( order );
2212     }
2213     static inline void atomic_flag_clear_explicit(atomic_flag* p, memory_order order) CDS_NOEXCEPT
2214     {
2215         return p->clear( order );
2216     }
2217
2218     // Fences
2219     static inline void atomic_thread_fence(memory_order order) CDS_NOEXCEPT
2220     {
2221         platform::thread_fence( order );
2222         CDS_COMPILER_RW_BARRIER;
2223     }
2224     static inline void atomic_signal_fence(memory_order order) CDS_NOEXCEPT
2225     {
2226         platform::signal_fence( order );
2227     }
2228
2229 }}  // namespace cds::cxx11_atomic
2230
2231 //@endcond
2232 #endif // #ifndef CDSLIB_COMPILER_CXX11_ATOMIC_H