e723f8b0e280a6ebf21c2dd5622cc1b0dd89ae68
[libcds.git] / cds / compiler / cxx11_atomic.h
1 /*
2     This file is a part of libcds - Concurrent Data Structures library
3
4     (C) Copyright Maxim Khizhinsky (libcds.dev@gmail.com) 2006-2016
5
6     Source code repo: http://github.com/khizmax/libcds/
7     Download: http://sourceforge.net/projects/libcds/files/
8     
9     Redistribution and use in source and binary forms, with or without
10     modification, are permitted provided that the following conditions are met:
11
12     * Redistributions of source code must retain the above copyright notice, this
13       list of conditions and the following disclaimer.
14
15     * Redistributions in binary form must reproduce the above copyright notice,
16       this list of conditions and the following disclaimer in the documentation
17       and/or other materials provided with the distribution.
18
19     THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
20     AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
21     IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22     DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
23     FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
24     DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
25     SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
26     CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
27     OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28     OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.     
29 */
30
31 #ifndef CDSLIB_COMPILER_CXX11_ATOMIC_H
32 #define CDSLIB_COMPILER_CXX11_ATOMIC_H
33 //@cond
34
35 #include <type_traits>  // make_unsigned
36 #include <cds/details/defs.h>
37 #include <cds/details/aligned_type.h>
38
39 namespace cds { namespace cxx11_atomic {
40     typedef enum memory_order {
41         memory_order_relaxed,
42         memory_order_consume,
43         memory_order_acquire,
44         memory_order_release,
45         memory_order_acq_rel,
46         memory_order_seq_cst
47     } memory_order;
48
49 }}  // namespace cds::cxx11_atomic
50
51
52 #if CDS_COMPILER == CDS_COMPILER_MSVC || (CDS_COMPILER == CDS_COMPILER_INTEL && CDS_OS_INTERFACE == CDS_OSI_WINDOWS)
53 #   if CDS_PROCESSOR_ARCH == CDS_PROCESSOR_X86
54 #       include <cds/compiler/vc/x86/cxx11_atomic.h>
55 #   elif CDS_PROCESSOR_ARCH == CDS_PROCESSOR_AMD64
56 #       include <cds/compiler/vc/amd64/cxx11_atomic.h>
57 #   else
58 #       error "MS VC++ compiler: unsupported processor architecture"
59 #   endif
60 #elif CDS_COMPILER == CDS_COMPILER_GCC || CDS_COMPILER == CDS_COMPILER_CLANG || CDS_COMPILER == CDS_COMPILER_INTEL
61 #   if CDS_PROCESSOR_ARCH == CDS_PROCESSOR_X86
62 #       include <cds/compiler/gcc/x86/cxx11_atomic.h>
63 #   elif CDS_PROCESSOR_ARCH == CDS_PROCESSOR_AMD64
64 #       include <cds/compiler/gcc/amd64/cxx11_atomic.h>
65 #   elif CDS_PROCESSOR_ARCH == CDS_PROCESSOR_IA64
66 #       include <cds/compiler/gcc/ia64/cxx11_atomic.h>
67 #   elif CDS_PROCESSOR_ARCH == CDS_PROCESSOR_SPARC
68 #       include <cds/compiler/gcc/sparc/cxx11_atomic.h>
69 #   elif CDS_PROCESSOR_ARCH == CDS_PROCESSOR_PPC64
70 #       include <cds/compiler/gcc/ppc64/cxx11_atomic.h>
71 //#   elif CDS_PROCESSOR_ARCH == CDS_PROCESSOR_ARM7
72 //#       include <cds/compiler/gcc/arm7/cxx11_atomic.h>
73 #   else
74 #       error "GCC compiler: unsupported processor architecture. Try to use native C++11 atomic or boost.atomic"
75 #   endif
76 #else
77 #   error "Undefined compiler"
78 #endif
79
80 namespace cds { namespace cxx11_atomic {
81
82     // forward declarations
83     template <class T>
84     struct atomic;
85
86     namespace details {
87
88         template <typename T, size_t Size, typename Primary = T >
89         struct atomic_generic_ops;
90
91         template <typename T, size_t Size>
92         struct atomic_integral_ops;
93
94         template <size_t TypeSize>
95         struct primary_type;
96
97         template <>
98         struct primary_type<1>
99         {
100             typedef std::uint8_t type;
101         };
102         template <>
103         struct primary_type<2>
104         {
105             typedef std::uint16_t type;
106         };
107         template <>
108         struct primary_type<4>
109         {
110             typedef std::uint32_t type;
111         };
112         template <>
113         struct primary_type<8>
114         {
115             typedef std::uint64_t type;
116         };
117
118         template <typename T, typename Primary>
119         struct make_atomic_primary
120         {
121             typedef T       source_type;
122             typedef Primary primary_type;
123
124             static primary_type volatile * ptr( source_type volatile * p ) CDS_NOEXCEPT
125             {
126                 return reinterpret_cast<primary_type volatile *>(p);
127             }
128             static primary_type const volatile * ptr( source_type const volatile * p ) CDS_NOEXCEPT
129             {
130                 return reinterpret_cast<primary_type const volatile *>(p);
131             }
132
133             static primary_type val( source_type v ) CDS_NOEXCEPT
134             {
135                 return *reinterpret_cast<primary_type*>(&v);
136             }
137
138             static primary_type& ref( source_type& v ) CDS_NOEXCEPT
139             {
140                 return reinterpret_cast<primary_type&>(v);
141             }
142
143             static primary_type const& ref( source_type const& v ) CDS_NOEXCEPT
144             {
145                 return reinterpret_cast<primary_type const&>(v);
146             }
147
148             static source_type ret( primary_type r ) CDS_NOEXCEPT
149             {
150                 return *reinterpret_cast<source_type *>(&r);
151             }
152         };
153
154         template <typename T>
155         struct make_atomic_primary<T, T>
156         {
157             typedef T source_type;
158             typedef T primary_type;
159
160             static primary_type volatile * ptr( source_type volatile * p ) CDS_NOEXCEPT
161             {
162                 return p;
163             }
164             static primary_type const volatile * ptr( source_type const volatile * p ) CDS_NOEXCEPT
165             {
166                 return p;
167             }
168
169             static primary_type val( source_type v ) CDS_NOEXCEPT
170             {
171                 return v;
172             }
173
174             static primary_type& ref( source_type& v ) CDS_NOEXCEPT
175             {
176                 return v;
177             }
178
179             static source_type ret( primary_type r ) CDS_NOEXCEPT
180             {
181                 return r;
182             }
183         };
184
185         template <typename T>
186         struct atomic_integral_bitwise_ops
187         {
188         public:
189             typedef typename std::make_unsigned<T>::type unsigned_type;
190             typedef atomic_generic_ops<unsigned_type, sizeof(unsigned_type)> atomic_ops;
191
192             static T fetch_and(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
193             {
194                 unsigned_type cur = atomic_ops::atomic_load_explicit( reinterpret_cast<unsigned_type volatile *>(pDest), memory_order_relaxed );
195                 do {} while ( !atomic_ops::atomic_compare_exchange_weak_explicit(
196                     reinterpret_cast<unsigned_type volatile *>(pDest), &cur, cur & unsigned_type(val), order, memory_order_relaxed ));
197                 return T(cur);
198             }
199
200             static T fetch_or(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
201             {
202                 unsigned_type cur = atomic_ops::atomic_load_explicit( reinterpret_cast<unsigned_type volatile *>(pDest), memory_order_relaxed );
203                 do {} while ( !atomic_ops::atomic_compare_exchange_weak_explicit(
204                     reinterpret_cast<unsigned_type volatile *>(pDest), &cur, cur | unsigned_type(val), order, memory_order_relaxed ));
205                 return T(cur);
206             }
207
208             static T fetch_xor(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
209             {
210                 unsigned_type cur = atomic_ops::atomic_load_explicit( reinterpret_cast<unsigned_type volatile *>(pDest), memory_order_relaxed );
211                 do {} while ( !atomic_ops::atomic_compare_exchange_weak_explicit(
212                     reinterpret_cast<unsigned_type volatile *>(pDest), &cur, cur ^ unsigned_type(val), order, memory_order_relaxed ));
213                 return T(cur);
214             }
215         };
216
217
218         // 8-bit atomic operations
219
220         template <typename T, typename Primary>
221         struct atomic_generic_ops< T, 1, Primary >
222         {
223             typedef make_atomic_primary<T, Primary> primary;
224
225             // store
226             static void atomic_store_explicit( T volatile * pDest, T v, memory_order order ) CDS_NOEXCEPT
227             {
228                 platform::store8( primary::ptr(pDest), primary::val(v), order );
229             }
230             static void atomic_store_explicit( T * pDest, T v, memory_order order ) CDS_NOEXCEPT
231             {
232                 platform::store8( primary::ptr(pDest), primary::val(v), order );
233             }
234             static void atomic_store( T volatile * pDest, T v ) CDS_NOEXCEPT
235             {
236                 atomic_store_explicit( pDest, v, memory_order_seq_cst );
237             }
238             static void atomic_store( T * pDest, T v ) CDS_NOEXCEPT
239             {
240                 atomic_store_explicit( pDest, v, memory_order_seq_cst );
241             }
242
243             // load
244             static T atomic_load_explicit( T volatile const * pSrc, memory_order order ) CDS_NOEXCEPT
245             {
246                 return primary::ret( platform::load8( primary::ptr(pSrc), order ));
247             }
248             static T atomic_load_explicit( T const * pSrc, memory_order order ) CDS_NOEXCEPT
249             {
250                 return primary::ret( platform::load8( primary::ptr(pSrc), order ));
251             }
252             static T atomic_load( T volatile const * pSrc ) CDS_NOEXCEPT
253             {
254                 return atomic_load_explicit( pSrc, memory_order_seq_cst );
255             }
256             static T atomic_load( T const * pSrc ) CDS_NOEXCEPT
257             {
258                 return atomic_load_explicit( pSrc, memory_order_seq_cst );
259             }
260
261             // exchange
262             static T atomic_exchange_explicit( T volatile * pDest, T val, memory_order order ) CDS_NOEXCEPT
263             {
264                 return primary::ret( platform::exchange8( primary::ptr(pDest), primary::val(val), order ));
265             }
266             static T atomic_exchange_explicit( T * pDest, T val, memory_order order ) CDS_NOEXCEPT
267             {
268                 return primary::ret( platform::exchange8( primary::ptr(pDest), primary::val(val), order ));
269             }
270             static T atomic_exchange( T volatile * pDest, T val ) CDS_NOEXCEPT
271             {
272                 return atomic_exchange_explicit( pDest, val, memory_order_seq_cst );
273             }
274             static T atomic_exchange( T * pDest, T val ) CDS_NOEXCEPT
275             {
276                 return atomic_exchange_explicit( pDest, val, memory_order_seq_cst );
277             }
278
279             // cas
280             static bool atomic_compare_exchange_weak_explicit( T volatile * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
281             {
282                 assert( expected );
283                 return platform::cas8_weak( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
284             }
285             static bool atomic_compare_exchange_weak_explicit( T * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
286             {
287                 assert( expected );
288                 return platform::cas8_weak( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
289             }
290             static bool atomic_compare_exchange_weak( T volatile * pDest, T * expected, T desired ) CDS_NOEXCEPT
291             {
292                 return atomic_compare_exchange_weak_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
293             }
294             static bool atomic_compare_exchange_weak( T * pDest, T * expected, T desired ) CDS_NOEXCEPT
295             {
296                 return atomic_compare_exchange_weak_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
297             }
298             static bool atomic_compare_exchange_strong_explicit( T volatile * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
299             {
300                 assert( expected );
301                 return platform::cas8_strong( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
302             }
303             static bool atomic_compare_exchange_strong_explicit( T * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
304             {
305                 assert( expected );
306                 return platform::cas8_strong( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
307             }
308             static bool atomic_compare_exchange_strong( T volatile * pDest, T * expected, T desired ) CDS_NOEXCEPT
309             {
310                 return atomic_compare_exchange_strong_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
311             }
312             static bool atomic_compare_exchange_strong( T * pDest, T * expected, T desired ) CDS_NOEXCEPT
313             {
314                 return atomic_compare_exchange_strong_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
315             }
316         };
317
318         template <typename T>
319         struct atomic_integral_ops< T, 1 >
320             : atomic_generic_ops<T, 1, T >
321             , atomic_integral_bitwise_ops<T>
322         {
323             typedef atomic_integral_bitwise_ops<T> bitwise_ops;
324
325             // fetch_add
326             static T atomic_fetch_add_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
327             {
328 #           ifdef CDS_ATOMIC_fetch8_add_defined
329                 return platform::fetch8_add( pDest, val, order );
330 #           else
331                 T cur = atomic_load_explicit( pDest, memory_order_relaxed );
332                 do {} while ( !atomic_compare_exchange_weak_explicit( pDest, &cur, cur + val, order, memory_order_relaxed ));
333                 return cur;
334 #           endif
335             }
336             static T atomic_fetch_add_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
337             {
338                 return atomic_fetch_add_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
339             }
340             static T atomic_fetch_add( T volatile * pDest, T val ) CDS_NOEXCEPT
341             {
342                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
343             }
344             static T atomic_fetch_add( T * pDest, T val ) CDS_NOEXCEPT
345             {
346                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
347             }
348
349             // fetch_sub
350             static T atomic_fetch_sub_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
351             {
352 #           ifdef CDS_ATOMIC_fetch8_sub_defined
353                 return platform::fetch8_sub( pDest, val, order );
354 #           else
355                 T cur = atomic_load_explicit( pDest, memory_order_relaxed );
356                 do {} while ( !atomic_compare_exchange_weak_explicit( pDest, &cur, cur - val, order, memory_order_relaxed ));
357                 return cur;
358 #           endif
359             }
360             static T atomic_fetch_sub_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
361             {
362                 return atomic_fetch_sub_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
363             }
364             static T atomic_fetch_sub( T volatile * pDest, T val ) CDS_NOEXCEPT
365             {
366                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
367             }
368             static T atomic_fetch_sub( T * pDest, T val ) CDS_NOEXCEPT
369             {
370                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
371             }
372
373             // fetch_and
374             static T atomic_fetch_and_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
375             {
376 #           ifdef CDS_ATOMIC_fetch8_and_defined
377                 return platform::fetch8_and( pDest, val, order );
378 #           else
379                 return bitwise_ops::fetch_and( pDest, val, order );
380 #           endif
381             }
382             static T atomic_fetch_and_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
383             {
384                 return atomic_fetch_and_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
385             }
386             static T atomic_fetch_and( T volatile * pDest, T val ) CDS_NOEXCEPT
387             {
388                 return atomic_fetch_and_explicit( pDest, val, memory_order_seq_cst );
389             }
390             static T atomic_fetch_and( T * pDest, T val ) CDS_NOEXCEPT
391             {
392                 return atomic_fetch_and_explicit( pDest, val, memory_order_seq_cst );
393             }
394
395             // fetch_or
396             static T atomic_fetch_or_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
397             {
398 #           ifdef CDS_ATOMIC_fetch8_or_defined
399                 return platform::fetch8_or( pDest, val, order );
400 #           else
401                 return bitwise_ops::fetch_or( pDest, val, order );
402 #           endif
403             }
404             static T atomic_fetch_or_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
405             {
406                 return atomic_fetch_or_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
407             }
408             static T atomic_fetch_or( T volatile * pDest, T val ) CDS_NOEXCEPT
409             {
410                 return atomic_fetch_or_explicit( pDest, val, memory_order_seq_cst );
411             }
412             static T atomic_fetch_or( T * pDest, T val ) CDS_NOEXCEPT
413             {
414                 return atomic_fetch_or_explicit( pDest, val, memory_order_seq_cst );
415             }
416
417             // fetch_xor
418             static T atomic_fetch_xor_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
419             {
420 #           ifdef CDS_ATOMIC_fetch8_xor_defined
421                 return platform::fetch8_xor( pDest, val, order );
422 #           else
423                 return bitwise_ops::fetch_xor( pDest, val, order );
424 #           endif
425             }
426             static T atomic_fetch_xor_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
427             {
428                 return atomic_fetch_xor_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
429             }
430             static T atomic_fetch_xor( T volatile * pDest, T val ) CDS_NOEXCEPT
431             {
432                 return atomic_fetch_xor_explicit( pDest, val, memory_order_seq_cst );
433             }
434             static T atomic_fetch_xor( T * pDest, T val ) CDS_NOEXCEPT
435             {
436                 return atomic_fetch_xor_explicit( pDest, val, memory_order_seq_cst );
437             }
438         };
439
440         // 16-bit atomic operations
441
442         template <typename T, typename Primary>
443         struct atomic_generic_ops< T, 2, Primary >
444         {
445             typedef make_atomic_primary<T, Primary> primary;
446
447             // store
448             static void atomic_store_explicit( T volatile * pDest, T v, memory_order order ) CDS_NOEXCEPT
449             {
450                 platform::store16( primary::ptr(pDest), primary::val(v), order );
451             }
452             static void atomic_store_explicit( T * pDest, T v, memory_order order ) CDS_NOEXCEPT
453             {
454                 platform::store16( primary::ptr(pDest), primary::val(v), order );
455             }
456             static void atomic_store( T volatile * pDest, T v ) CDS_NOEXCEPT
457             {
458                 atomic_store_explicit( pDest, v, memory_order_seq_cst );
459             }
460             static void atomic_store( T * pDest, T v ) CDS_NOEXCEPT
461             {
462                 atomic_store_explicit( pDest, v, memory_order_seq_cst );
463             }
464
465             // load
466             static T atomic_load_explicit( T volatile const * pSrc, memory_order order ) CDS_NOEXCEPT
467             {
468                 return primary::ret( platform::load16( primary::ptr(pSrc), order ));
469             }
470             static T atomic_load_explicit( T const * pSrc, memory_order order ) CDS_NOEXCEPT
471             {
472                 return primary::ret( platform::load16( primary::ptr(pSrc), order ));
473             }
474             static T atomic_load( T volatile const * pSrc ) CDS_NOEXCEPT
475             {
476                 return atomic_load_explicit( pSrc, memory_order_seq_cst );
477             }
478             static T atomic_load( T const * pSrc ) CDS_NOEXCEPT
479             {
480                 return atomic_load_explicit( pSrc, memory_order_seq_cst );
481             }
482
483             // exchange
484             static T atomic_exchange_explicit( T volatile * pDest, T val, memory_order order ) CDS_NOEXCEPT
485             {
486                 return primary::ret( platform::exchange16( primary::ptr(pDest), primary::val(val), order ));
487             }
488             static T atomic_exchange_explicit( T * pDest, T val, memory_order order ) CDS_NOEXCEPT
489             {
490                 return primary::ret( platform::exchange16( primary::ptr(pDest), primary::val(val), order ));
491             }
492             static T atomic_exchange( T volatile * pDest, T val ) CDS_NOEXCEPT
493             {
494                 return atomic_exchange_explicit( pDest, val, memory_order_seq_cst );
495             }
496             static T atomic_exchange( T * pDest, T val ) CDS_NOEXCEPT
497             {
498                 return atomic_exchange_explicit( pDest, val, memory_order_seq_cst );
499             }
500
501             // cas
502             static bool atomic_compare_exchange_weak_explicit( T volatile * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
503             {
504                 assert( expected );
505                 return platform::cas16_weak( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
506             }
507             static bool atomic_compare_exchange_weak_explicit( T * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
508             {
509                 assert( expected );
510                 return platform::cas16_weak( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
511             }
512             static bool atomic_compare_exchange_weak( T volatile * pDest, T * expected, T desired ) CDS_NOEXCEPT
513             {
514                 return atomic_compare_exchange_weak_explicit( pDest, expected, primary::val(desired), memory_order_seq_cst, memory_order_relaxed );
515             }
516             static bool atomic_compare_exchange_weak( T * pDest, T * expected, T desired ) CDS_NOEXCEPT
517             {
518                 return atomic_compare_exchange_weak_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
519             }
520             static bool atomic_compare_exchange_strong_explicit( T volatile * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
521             {
522                 assert( expected );
523                 return platform::cas16_strong( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
524             }
525             static bool atomic_compare_exchange_strong_explicit( T * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
526             {
527                 assert( expected );
528                 return platform::cas16_strong( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
529             }
530             static bool atomic_compare_exchange_strong( T volatile * pDest, T * expected, T desired ) CDS_NOEXCEPT
531             {
532                 return atomic_compare_exchange_strong_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
533             }
534             static bool atomic_compare_exchange_strong( T * pDest, T * expected, T desired ) CDS_NOEXCEPT
535             {
536                 return atomic_compare_exchange_strong_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
537             }
538         };
539
540         template <typename T>
541         struct atomic_integral_ops< T, 2 >
542             : atomic_generic_ops< T, 2, T >
543             , atomic_integral_bitwise_ops<T>
544         {
545             typedef atomic_integral_bitwise_ops<T> bitwise_ops;
546
547             // fetch_add
548             static T atomic_fetch_add_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
549             {
550 #           ifdef CDS_ATOMIC_fetch16_add_defined
551                 return platform::fetch16_add( pDest, val, order );
552 #           else
553                 T cur = atomic_load_explicit( pDest, memory_order_relaxed );
554                 do {} while ( !atomic_compare_exchange_weak_explicit( pDest, &cur, cur + val, order, memory_order_relaxed ));
555                 return cur;
556 #           endif
557             }
558             static T atomic_fetch_add_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
559             {
560                 return atomic_fetch_add_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
561             }
562             static T atomic_fetch_add( T volatile * pDest, T val ) CDS_NOEXCEPT
563             {
564                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
565             }
566             static T atomic_fetch_add( T * pDest, T val ) CDS_NOEXCEPT
567             {
568                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
569             }
570
571             // fetch_sub
572             static T atomic_fetch_sub_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
573             {
574 #           ifdef CDS_ATOMIC_fetch16_sub_defined
575                 return platform::fetch16_sub( pDest, val, order );
576 #           else
577                 T cur = atomic_load_explicit( pDest, memory_order_relaxed );
578                 do {} while ( !atomic_compare_exchange_weak_explicit( pDest, &cur, cur - val, order, memory_order_relaxed ));
579                 return cur;
580 #           endif
581             }
582             static T atomic_fetch_sub_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
583             {
584                 return atomic_fetch_sub_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
585             }
586             static T atomic_fetch_sub( T volatile * pDest, T val ) CDS_NOEXCEPT
587             {
588                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
589             }
590             static T atomic_fetch_sub( T * pDest, T val ) CDS_NOEXCEPT
591             {
592                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
593             }
594
595             // fetch_and
596             static T atomic_fetch_and_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
597             {
598 #           ifdef CDS_ATOMIC_fetch16_and_defined
599                 return platform::fetch16_and( pDest, val, order );
600 #           else
601                 return bitwise_ops::fetch_and( pDest, val, order );
602 #           endif
603             }
604             static T atomic_fetch_and_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
605             {
606                 return atomic_fetch_and_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
607             }
608             static T atomic_fetch_and( T volatile * pDest, T val ) CDS_NOEXCEPT
609             {
610                 return atomic_fetch_and_explicit( pDest, val, memory_order_seq_cst );
611             }
612             static T atomic_fetch_and( T * pDest, T val ) CDS_NOEXCEPT
613             {
614                 return atomic_fetch_and_explicit( pDest, val, memory_order_seq_cst );
615             }
616
617             // fetch_or
618             static T atomic_fetch_or_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
619             {
620 #           ifdef CDS_ATOMIC_fetch16_or_defined
621                 return platform::fetch16_or( pDest, val, order );
622 #           else
623                 return bitwise_ops::fetch_or( pDest, val, order );
624 #           endif
625             }
626             static T atomic_fetch_or_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
627             {
628                 return atomic_fetch_or_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
629             }
630             static T atomic_fetch_or( T volatile * pDest, T val ) CDS_NOEXCEPT
631             {
632                 return atomic_fetch_or_explicit( pDest, val, memory_order_seq_cst );
633             }
634             static T atomic_fetch_or( T * pDest, T val ) CDS_NOEXCEPT
635             {
636                 return atomic_fetch_or_explicit( pDest, val, memory_order_seq_cst );
637             }
638
639             // fetch_xor
640             static T atomic_fetch_xor_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
641             {
642 #           ifdef CDS_ATOMIC_fetch16_xor_defined
643                 return platform::fetch16_xor( pDest, val, order );
644 #           else
645                 return bitwise_ops::fetch_xor( pDest, val, order );
646 #           endif
647             }
648             static T atomic_fetch_xor_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
649             {
650                 return atomic_fetch_xor_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
651             }
652             static T atomic_fetch_xor( T volatile * pDest, T val ) CDS_NOEXCEPT
653             {
654                 return atomic_fetch_xor_explicit( pDest, val, memory_order_seq_cst );
655             }
656             static T atomic_fetch_xor( T * pDest, T val ) CDS_NOEXCEPT
657             {
658                 return atomic_fetch_xor_explicit( pDest, val, memory_order_seq_cst );
659             }
660         };
661
662         // 32-bit atomic operations
663
664         template <typename T, typename Primary>
665         struct atomic_generic_ops< T, 4, Primary >
666         {
667             typedef make_atomic_primary<T, Primary> primary;
668
669             // store
670             static void atomic_store_explicit( T volatile * pDest, T v, memory_order order ) CDS_NOEXCEPT
671             {
672                 platform::store32( primary::ptr(pDest), primary::val(v), order );
673             }
674             static void atomic_store_explicit( T * pDest, T v, memory_order order ) CDS_NOEXCEPT
675             {
676                 platform::store32( primary::ptr(pDest), primary::val(v), order );
677             }
678             static void atomic_store( T volatile * pDest, T v ) CDS_NOEXCEPT
679             {
680                 atomic_store_explicit( pDest, v, memory_order_seq_cst );
681             }
682             static void atomic_store( T * pDest, T v ) CDS_NOEXCEPT
683             {
684                 atomic_store_explicit( pDest, v, memory_order_seq_cst );
685             }
686
687             // load
688             static T atomic_load_explicit( T volatile const * pSrc, memory_order order ) CDS_NOEXCEPT
689             {
690                 return primary::ret( platform::load32( primary::ptr(pSrc), order ));
691             }
692             static T atomic_load_explicit( T const * pSrc, memory_order order ) CDS_NOEXCEPT
693             {
694                 return primary::ret( platform::load32( primary::ptr(pSrc), order ));
695             }
696             static T atomic_load( T volatile const * pSrc ) CDS_NOEXCEPT
697             {
698                 return atomic_load_explicit( pSrc, memory_order_seq_cst );
699             }
700             static T atomic_load( T const * pSrc ) CDS_NOEXCEPT
701             {
702                 return atomic_load_explicit( pSrc, memory_order_seq_cst );
703             }
704
705             // exchange
706             static T atomic_exchange_explicit( T volatile * pDest, T val, memory_order order ) CDS_NOEXCEPT
707             {
708                 return primary::ret( platform::exchange32( primary::ptr(pDest), primary::val(val), order ));
709             }
710             static T atomic_exchange_explicit( T * pDest, T val, memory_order order ) CDS_NOEXCEPT
711             {
712                 return primary::ret( platform::exchange32( primary::ptr(pDest), primary::val(val), order ));
713             }
714             static T atomic_exchange( T volatile * pDest, T val ) CDS_NOEXCEPT
715             {
716                 return atomic_exchange_explicit( pDest, val, memory_order_seq_cst );
717             }
718             static T atomic_exchange( T * pDest, T val ) CDS_NOEXCEPT
719             {
720                 return atomic_exchange_explicit( pDest, val, memory_order_seq_cst );
721             }
722
723             // cas
724             static bool atomic_compare_exchange_weak_explicit( T volatile * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
725             {
726                 assert( expected );
727                 return platform::cas32_weak( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
728             }
729             static bool atomic_compare_exchange_weak_explicit( T * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
730             {
731                 assert( expected );
732                 return platform::cas32_weak( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
733             }
734             static bool atomic_compare_exchange_weak( T volatile * pDest, T * expected, T desired ) CDS_NOEXCEPT
735             {
736                 return atomic_compare_exchange_weak_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
737             }
738             static bool atomic_compare_exchange_weak( T * pDest, T * expected, T desired ) CDS_NOEXCEPT
739             {
740                 return atomic_compare_exchange_weak_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
741             }
742             static bool atomic_compare_exchange_strong_explicit( T volatile * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
743             {
744                 assert( expected );
745                 return platform::cas32_strong( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
746             }
747             static bool atomic_compare_exchange_strong_explicit( T * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
748             {
749                 assert( expected );
750                 return platform::cas32_strong( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
751             }
752             static bool atomic_compare_exchange_strong( T volatile * pDest, T * expected, T desired ) CDS_NOEXCEPT
753             {
754                 return atomic_compare_exchange_strong_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
755             }
756             static bool atomic_compare_exchange_strong( T * pDest, T * expected, T desired ) CDS_NOEXCEPT
757             {
758                 return atomic_compare_exchange_strong_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
759             }
760         };
761
762         template <typename T>
763         struct atomic_integral_ops< T, 4 >
764             : atomic_generic_ops< T, 4, T >
765             , atomic_integral_bitwise_ops<T>
766         {
767             typedef atomic_integral_bitwise_ops<T> bitwise_ops;
768             // fetch_add
769             static T atomic_fetch_add_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
770             {
771 #           ifdef CDS_ATOMIC_fetch32_add_defined
772                 return platform::fetch32_add( pDest, val, order );
773 #           else
774                 T cur = atomic_load_explicit( pDest, memory_order_relaxed );
775                 do {} while ( !atomic_compare_exchange_weak_explicit( pDest, &cur, cur + val, order, memory_order_relaxed ));
776                 return cur;
777 #           endif
778             }
779             static T atomic_fetch_add_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
780             {
781                 return atomic_fetch_add_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
782             }
783             static T atomic_fetch_add( T volatile * pDest, T val ) CDS_NOEXCEPT
784             {
785                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
786             }
787             static T atomic_fetch_add( T * pDest, T val ) CDS_NOEXCEPT
788             {
789                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
790             }
791
792             // fetch_sub
793             static T atomic_fetch_sub_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
794             {
795 #           ifdef CDS_ATOMIC_fetch32_sub_defined
796                 return platform::fetch32_sub( pDest, val, order );
797 #           else
798                 T cur = atomic_load_explicit( pDest, memory_order_relaxed );
799                 do {} while ( !atomic_compare_exchange_weak_explicit( pDest, &cur, cur - val, order, memory_order_relaxed ));
800                 return cur;
801 #           endif
802             }
803             static T atomic_fetch_sub_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
804             {
805                 return atomic_fetch_sub_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
806             }
807             static T atomic_fetch_sub( T volatile * pDest, T val ) CDS_NOEXCEPT
808             {
809                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
810             }
811             static T atomic_fetch_sub( T * pDest, T val ) CDS_NOEXCEPT
812             {
813                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
814             }
815
816             // fetch_and
817             static T atomic_fetch_and_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
818             {
819 #           ifdef CDS_ATOMIC_fetch32_and_defined
820                 return platform::fetch32_and( pDest, val, order );
821 #           else
822                 return bitwise_ops::fetch_and( pDest, val, order );
823 #           endif
824             }
825             static T atomic_fetch_and_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
826             {
827                 return atomic_fetch_and_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
828             }
829             static T atomic_fetch_and( T volatile * pDest, T val ) CDS_NOEXCEPT
830             {
831                 return atomic_fetch_and_explicit( pDest, val, memory_order_seq_cst );
832             }
833             static T atomic_fetch_and( T * pDest, T val ) CDS_NOEXCEPT
834             {
835                 return atomic_fetch_and_explicit( pDest, val, memory_order_seq_cst );
836             }
837
838             // fetch_or
839             static T atomic_fetch_or_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
840             {
841 #           ifdef CDS_ATOMIC_fetch32_or_defined
842                 return platform::fetch32_or( pDest, val, order );
843 #           else
844                 return bitwise_ops::fetch_or( pDest, val, order );
845 #           endif
846             }
847             static T atomic_fetch_or_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
848             {
849                 return atomic_fetch_or_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
850             }
851             static T atomic_fetch_or( T volatile * pDest, T val ) CDS_NOEXCEPT
852             {
853                 return atomic_fetch_or_explicit( pDest, val, memory_order_seq_cst );
854             }
855             static T atomic_fetch_or( T * pDest, T val ) CDS_NOEXCEPT
856             {
857                 return atomic_fetch_or_explicit( pDest, val, memory_order_seq_cst );
858             }
859
860             // fetch_xor
861             static T atomic_fetch_xor_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
862             {
863 #           ifdef CDS_ATOMIC_fetch32_xor_defined
864                 return platform::fetch32_xor( pDest, val, order );
865 #           else
866                 return bitwise_ops::fetch_xor( pDest, val, order );
867 #           endif
868             }
869             static T atomic_fetch_xor_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
870             {
871                 return atomic_fetch_xor_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
872             }
873             static T atomic_fetch_xor( T volatile * pDest, T val ) CDS_NOEXCEPT
874             {
875                 return atomic_fetch_xor_explicit( pDest, val, memory_order_seq_cst );
876             }
877             static T atomic_fetch_xor( T * pDest, T val ) CDS_NOEXCEPT
878             {
879                 return atomic_fetch_xor_explicit( pDest, val, memory_order_seq_cst );
880             }
881         };
882
883
884         // 64-bit atomic operations
885
886         template <typename T, typename Primary>
887         struct atomic_generic_ops< T, 8, Primary >
888         {
889             typedef make_atomic_primary<T, Primary> primary;
890
891             // store
892             static void atomic_store_explicit( T volatile * pDest, T v, memory_order order ) CDS_NOEXCEPT
893             {
894                 platform::store64( primary::ptr(pDest), primary::val(v), order );
895             }
896             static void atomic_store_explicit( T * pDest, T v, memory_order order ) CDS_NOEXCEPT
897             {
898                 platform::store64( primary::ptr(pDest), primary::val(v), order );
899             }
900             static void atomic_store( T volatile * pDest, T v ) CDS_NOEXCEPT
901             {
902                 atomic_store_explicit( pDest, v, memory_order_seq_cst );
903             }
904             static void atomic_store( T * pDest, T v ) CDS_NOEXCEPT
905             {
906                 atomic_store_explicit( pDest, v, memory_order_seq_cst );
907             }
908
909             // load
910             static T atomic_load_explicit( T volatile const * pSrc, memory_order order ) CDS_NOEXCEPT
911             {
912                 return primary::ret( platform::load64( primary::ptr(pSrc), order ));
913             }
914             static T atomic_load_explicit( T const * pSrc, memory_order order ) CDS_NOEXCEPT
915             {
916                 return primary::ret( platform::load64( primary::ptr(pSrc), order ));
917             }
918             static T atomic_load( T volatile const * pSrc ) CDS_NOEXCEPT
919             {
920                 return atomic_load_explicit( pSrc, memory_order_seq_cst );
921             }
922             static T atomic_load( T const * pSrc ) CDS_NOEXCEPT
923             {
924                 return atomic_load_explicit( pSrc, memory_order_seq_cst );
925             }
926
927             // exchange
928             static T atomic_exchange_explicit( T volatile * pDest, T val, memory_order order ) CDS_NOEXCEPT
929             {
930                 return primary::ret( platform::exchange64( primary::ptr(pDest), primary::val(val), order ));
931             }
932             static T atomic_exchange_explicit( T * pDest, T val, memory_order order ) CDS_NOEXCEPT
933             {
934                 return primary::ret( platform::exchange64( primary::ptr(pDest), primary::val(val), order ));
935             }
936             static T atomic_exchange( T volatile * pDest, T val ) CDS_NOEXCEPT
937             {
938                 return atomic_exchange_explicit( pDest, val, memory_order_seq_cst );
939             }
940             static T atomic_exchange( T * pDest, T val ) CDS_NOEXCEPT
941             {
942                 return atomic_exchange_explicit( pDest, val, memory_order_seq_cst );
943             }
944
945             // cas
946             static bool atomic_compare_exchange_weak_explicit( T volatile * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
947             {
948                 assert( expected );
949                 return platform::cas64_weak( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
950             }
951             static bool atomic_compare_exchange_weak_explicit( T * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
952             {
953                 assert( expected );
954                 return platform::cas64_weak( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
955             }
956             static bool atomic_compare_exchange_weak( T volatile * pDest, T * expected, T desired ) CDS_NOEXCEPT
957             {
958                 return atomic_compare_exchange_weak_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
959             }
960             static bool atomic_compare_exchange_weak( T * pDest, T * expected, T desired ) CDS_NOEXCEPT
961             {
962                 return atomic_compare_exchange_weak_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
963             }
964             static bool atomic_compare_exchange_strong_explicit( T volatile * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
965             {
966                 assert( expected );
967                 return platform::cas64_strong( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
968             }
969             static bool atomic_compare_exchange_strong_explicit( T * pDest, T * expected, T desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
970             {
971                 assert( expected );
972                 return platform::cas64_strong( primary::ptr(pDest), primary::ref(*expected), primary::val(desired), mo_success, mo_fail );
973             }
974             static bool atomic_compare_exchange_strong( T volatile * pDest, T * expected, T desired ) CDS_NOEXCEPT
975             {
976                 return atomic_compare_exchange_strong_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
977             }
978             static bool atomic_compare_exchange_strong( T * pDest, T * expected, T desired ) CDS_NOEXCEPT
979             {
980                 return atomic_compare_exchange_strong_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
981             }
982         };
983
984
985         template <typename T>
986         struct atomic_integral_ops< T, 8 >
987             : atomic_generic_ops< T, 8, T >
988             , atomic_integral_bitwise_ops<T>
989         {
990             typedef atomic_integral_bitwise_ops<T>  bitwise_ops;
991             typedef atomic_generic_ops<T, 8, T>     general_ops;
992
993             // fetch_add
994             static T atomic_fetch_add_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
995             {
996 #           ifdef CDS_ATOMIC_fetch64_add_defined
997                 return platform::fetch64_add( pDest, val, order );
998 #           else
999                 T cur = general_ops::atomic_load_explicit( pDest, memory_order_relaxed );
1000                 do {} while ( !general_ops::atomic_compare_exchange_weak_explicit( pDest, &cur, cur + val, order, memory_order_relaxed ));
1001                 return cur;
1002 #           endif
1003             }
1004             static T atomic_fetch_add_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
1005             {
1006                 return atomic_fetch_add_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
1007             }
1008             static T atomic_fetch_add( T volatile * pDest, T val ) CDS_NOEXCEPT
1009             {
1010                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
1011             }
1012             static T atomic_fetch_add( T * pDest, T val ) CDS_NOEXCEPT
1013             {
1014                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
1015             }
1016
1017             // fetch_sub
1018             static T atomic_fetch_sub_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
1019             {
1020 #           ifdef CDS_ATOMIC_fetch64_sub_defined
1021                 return platform::fetch64_sub( pDest, val, order );
1022 #           else
1023                 T cur = general_ops::atomic_load_explicit( pDest, memory_order_relaxed );
1024                 do {} while ( !general_ops::atomic_compare_exchange_weak_explicit( pDest, &cur, cur - val, order, memory_order_relaxed ));
1025                 return cur;
1026 #           endif
1027             }
1028             static T atomic_fetch_sub_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
1029             {
1030                 return atomic_fetch_sub_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
1031             }
1032             static T atomic_fetch_sub( T volatile * pDest, T val ) CDS_NOEXCEPT
1033             {
1034                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
1035             }
1036             static T atomic_fetch_sub( T * pDest, T val ) CDS_NOEXCEPT
1037             {
1038                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
1039             }
1040
1041             // fetch_and
1042             static T atomic_fetch_and_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
1043             {
1044 #           ifdef CDS_ATOMIC_fetch64_and_defined
1045                 return platform::fetch64_and( pDest, val, order );
1046 #           else
1047                 return bitwise_ops::fetch_and( pDest, val, order );
1048 #           endif
1049             }
1050             static T atomic_fetch_and_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
1051             {
1052                 return atomic_fetch_and_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
1053             }
1054             static T atomic_fetch_and( T volatile * pDest, T val ) CDS_NOEXCEPT
1055             {
1056                 return atomic_fetch_and_explicit( pDest, val, memory_order_seq_cst );
1057             }
1058             static T atomic_fetch_and( T * pDest, T val ) CDS_NOEXCEPT
1059             {
1060                 return atomic_fetch_and_explicit( pDest, val, memory_order_seq_cst );
1061             }
1062
1063             // fetch_or
1064             static T atomic_fetch_or_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
1065             {
1066 #           ifdef CDS_ATOMIC_fetch64_or_defined
1067                 return platform::fetch64_or( pDest, val, order );
1068 #           else
1069                 return bitwise_ops::fetch_or( pDest, val, order );
1070 #           endif
1071             }
1072             static T atomic_fetch_or_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
1073             {
1074                 return atomic_fetch_or_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
1075             }
1076             static T atomic_fetch_or( T volatile * pDest, T val ) CDS_NOEXCEPT
1077             {
1078                 return atomic_fetch_or_explicit( pDest, val, memory_order_seq_cst );
1079             }
1080             static T atomic_fetch_or( T * pDest, T val ) CDS_NOEXCEPT
1081             {
1082                 return atomic_fetch_or_explicit( pDest, val, memory_order_seq_cst );
1083             }
1084
1085             // fetch_xor
1086             static T atomic_fetch_xor_explicit(T volatile * pDest, T val, memory_order order) CDS_NOEXCEPT
1087             {
1088 #           ifdef CDS_ATOMIC_fetch64_xor_defined
1089                 return platform::fetch64_xor( pDest, val, order );
1090 #           else
1091                 return bitwise_ops::fetch_xor( pDest, val, order );
1092 #           endif
1093             }
1094             static T atomic_fetch_xor_explicit(T * pDest, T val , memory_order order) CDS_NOEXCEPT
1095             {
1096                 return atomic_fetch_xor_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
1097             }
1098             static T atomic_fetch_xor( T volatile * pDest, T val ) CDS_NOEXCEPT
1099             {
1100                 return atomic_fetch_xor_explicit( pDest, val, memory_order_seq_cst );
1101             }
1102             static T atomic_fetch_xor( T * pDest, T val ) CDS_NOEXCEPT
1103             {
1104                 return atomic_fetch_xor_explicit( pDest, val, memory_order_seq_cst );
1105             }
1106         };
1107
1108
1109         // atomic pointer operations
1110         template <typename T>
1111         struct atomic_pointer_base
1112         {
1113             // store
1114             static void atomic_store_explicit( T * volatile * pDest, T * v, memory_order order ) CDS_NOEXCEPT
1115             {
1116                 platform::store_ptr( pDest, v, order );
1117             }
1118             static void atomic_store_explicit( T * * pDest, T * v, memory_order order ) CDS_NOEXCEPT
1119             {
1120                 platform::store_ptr( pDest, v, order );
1121             }
1122             static void atomic_store( T * volatile * pDest, T * v ) CDS_NOEXCEPT
1123             {
1124                 atomic_store_explicit( pDest, v, memory_order_seq_cst );
1125             }
1126             static void atomic_store( T * * pDest, T * v ) CDS_NOEXCEPT
1127             {
1128                 atomic_store_explicit( pDest, v, memory_order_seq_cst );
1129             }
1130
1131             // load
1132             static T * atomic_load_explicit( T * volatile const * pSrc, memory_order order ) CDS_NOEXCEPT
1133             {
1134                 return platform::load_ptr( pSrc, order );
1135             }
1136             static T * atomic_load_explicit( T * const * pSrc, memory_order order ) CDS_NOEXCEPT
1137             {
1138                 return platform::load_ptr( pSrc, order );
1139             }
1140             static T * atomic_load( T * volatile const * pSrc ) CDS_NOEXCEPT
1141             {
1142                 return atomic_load_explicit( pSrc, memory_order_seq_cst );
1143             }
1144             static T * atomic_load( T * const * pSrc ) CDS_NOEXCEPT
1145             {
1146                 return atomic_load_explicit( pSrc, memory_order_seq_cst );
1147             }
1148
1149             // exchange
1150             static T * atomic_exchange_explicit( T * volatile * pDest, T * val, memory_order order ) CDS_NOEXCEPT
1151             {
1152                 return platform::exchange_ptr( pDest, val, order );
1153             }
1154             static T * atomic_exchange_explicit( T * * pDest, T * val, memory_order order ) CDS_NOEXCEPT
1155             {
1156                 return platform::exchange_ptr( pDest, val, order );
1157             }
1158             static T * atomic_exchange( T * volatile * pDest, T * val ) CDS_NOEXCEPT
1159             {
1160                 return atomic_exchange_explicit( pDest, val, memory_order_seq_cst );
1161             }
1162             static T * atomic_exchange( T * * pDest, T * val ) CDS_NOEXCEPT
1163             {
1164                 return atomic_exchange_explicit( pDest, val, memory_order_seq_cst );
1165             }
1166
1167             // cas
1168             static bool atomic_compare_exchange_weak_explicit( T * volatile * pDest, T * * expected, T * desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
1169             {
1170                 assert( expected );
1171                 return platform::cas_ptr_weak( pDest, *expected, desired, mo_success, mo_fail );
1172             }
1173             static bool atomic_compare_exchange_weak_explicit( T * * pDest, T * * expected, T * desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
1174             {
1175                 assert( expected );
1176                 return platform::cas_ptr_weak( pDest, *expected, desired, mo_success, mo_fail );
1177             }
1178             static bool atomic_compare_exchange_weak( T * volatile * pDest, T ** expected, T * desired ) CDS_NOEXCEPT
1179             {
1180                 return atomic_compare_exchange_weak_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
1181             }
1182             static bool atomic_compare_exchange_weak( T ** pDest, T ** expected, T * desired ) CDS_NOEXCEPT
1183             {
1184                 return atomic_compare_exchange_weak_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
1185             }
1186             static bool atomic_compare_exchange_strong_explicit( T * volatile * pDest, T ** expected, T * desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
1187             {
1188                 assert( expected );
1189                 return platform::cas_ptr_strong( pDest, *expected, desired, mo_success, mo_fail );
1190             }
1191             static bool atomic_compare_exchange_strong_explicit( T ** pDest, T ** expected, T * desired, memory_order mo_success, memory_order mo_fail ) CDS_NOEXCEPT
1192             {
1193                 assert( expected );
1194                 return platform::cas_ptr_strong( pDest, *expected, desired, mo_success, mo_fail );
1195             }
1196             static bool atomic_compare_exchange_strong( T * volatile * pDest, T ** expected, T * desired ) CDS_NOEXCEPT
1197             {
1198                 return atomic_compare_exchange_strong_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
1199             }
1200             static bool atomic_compare_exchange_strong( T ** pDest, T ** expected, T * desired ) CDS_NOEXCEPT
1201             {
1202                 return atomic_compare_exchange_strong_explicit( pDest, expected, desired, memory_order_seq_cst, memory_order_relaxed );
1203             }
1204         };
1205
1206         template <typename T>
1207         struct atomic_pointer: public atomic_pointer_base<T>
1208         {
1209             typedef atomic_pointer_base<T> base_class;
1210             // fetch_add
1211             static T * atomic_fetch_add_explicit(T * volatile * pDest, ptrdiff_t val, memory_order order) CDS_NOEXCEPT
1212             {
1213 #           ifdef CDS_ATOMIC_fetch_ptr_add_defined
1214                 platform::fetch_ptr_add( pDest, val, order );
1215 #           else
1216                 T * cur = base_class::atomic_load_explicit( pDest, memory_order_relaxed );
1217                 do {} while ( !base_class::atomic_compare_exchange_weak_explicit( pDest, &cur, cur + val, order, memory_order_relaxed ));
1218                 return cur;
1219 #           endif
1220             }
1221             static T * atomic_fetch_add_explicit(T * * pDest, ptrdiff_t val , memory_order order) CDS_NOEXCEPT
1222             {
1223                 return atomic_fetch_add_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
1224             }
1225             static T * atomic_fetch_add( T * volatile * pDest, ptrdiff_t val ) CDS_NOEXCEPT
1226             {
1227                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
1228             }
1229             static T * atomic_fetch_add( T ** pDest, ptrdiff_t val ) CDS_NOEXCEPT
1230             {
1231                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
1232             }
1233
1234             // fetch_sub
1235             static T * atomic_fetch_sub_explicit(T * volatile * pDest, ptrdiff_t val, memory_order order) CDS_NOEXCEPT
1236             {
1237 #           ifdef CDS_ATOMIC_fetch_ptr_sub_defined
1238                 platform::fetch_ptr_sub( pDest, val, order );
1239 #           else
1240                 T * cur = base_class::atomic_load_explicit( pDest, memory_order_relaxed );
1241                 do {} while ( !base_class::atomic_compare_exchange_weak_explicit( pDest, &cur, cur - val, order, memory_order_relaxed ));
1242                 return cur;
1243 #           endif
1244             }
1245             static T * atomic_fetch_sub_explicit(T ** pDest, ptrdiff_t val , memory_order order) CDS_NOEXCEPT
1246             {
1247                 return atomic_fetch_sub_explicit( reinterpret_cast<T volatile *>( pDest ), val, order );
1248             }
1249             static T * atomic_fetch_sub( T volatile * pDest, ptrdiff_t val ) CDS_NOEXCEPT
1250             {
1251                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
1252             }
1253             static T * atomic_fetch_sub( T * pDest, ptrdiff_t val ) CDS_NOEXCEPT
1254             {
1255                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
1256             }
1257         };
1258
1259         template <>
1260         struct atomic_pointer<void>: public atomic_pointer_base<void>
1261         {
1262             typedef atomic_pointer_base<void>   base_class;
1263
1264             // fetch_add
1265             static void * atomic_fetch_add_explicit(void * volatile * pDest, ptrdiff_t val, memory_order order) CDS_NOEXCEPT
1266             {
1267                 void * cur = base_class::atomic_load_explicit( pDest, memory_order_relaxed );
1268                 do {} while ( !base_class::atomic_compare_exchange_weak_explicit( pDest, &cur, reinterpret_cast<char *>(cur) + val, order, memory_order_relaxed ));
1269                 return cur;
1270             }
1271             static void * atomic_fetch_add_explicit(void * * pDest, ptrdiff_t val , memory_order order) CDS_NOEXCEPT
1272             {
1273                 return atomic_fetch_add_explicit( reinterpret_cast<void * volatile *>( pDest ), val, order );
1274             }
1275             static void * atomic_fetch_add( void * volatile * pDest, ptrdiff_t val ) CDS_NOEXCEPT
1276             {
1277                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
1278             }
1279             static void * atomic_fetch_add( void ** pDest, ptrdiff_t val ) CDS_NOEXCEPT
1280             {
1281                 return atomic_fetch_add_explicit( pDest, val, memory_order_seq_cst );
1282             }
1283
1284             // fetch_sub
1285             static void * atomic_fetch_sub_explicit(void * volatile * pDest, ptrdiff_t val, memory_order order) CDS_NOEXCEPT
1286             {
1287                 void * cur = base_class::atomic_load_explicit( pDest, memory_order_relaxed );
1288                 do {} while ( !base_class::atomic_compare_exchange_weak_explicit( pDest, &cur, reinterpret_cast<char *>(cur) - val, order, memory_order_relaxed ));
1289                 return cur;
1290             }
1291             static void * atomic_fetch_sub_explicit(void ** pDest, ptrdiff_t val , memory_order order) CDS_NOEXCEPT
1292             {
1293                 return atomic_fetch_sub_explicit( reinterpret_cast<void * volatile *>( pDest ), val, order );
1294             }
1295             static void * atomic_fetch_sub( void * volatile * pDest, ptrdiff_t val ) CDS_NOEXCEPT
1296             {
1297                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
1298             }
1299             static void * atomic_fetch_sub( void ** pDest, ptrdiff_t val ) CDS_NOEXCEPT
1300             {
1301                 return atomic_fetch_sub_explicit( pDest, val, memory_order_seq_cst );
1302             }
1303         };
1304
1305         template <typename T>
1306         struct atomic_integral
1307         {
1308         private:
1309             typename cds::details::aligned_type<T, sizeof(T)>::type volatile m_val;
1310             //T volatile  m_val;
1311             typedef atomic_integral_ops<T, sizeof(T)>   atomic_ops;
1312         public:
1313             typedef T   atomic_type;
1314         public:
1315             bool is_lock_free() const volatile CDS_NOEXCEPT
1316             {
1317                 return true;
1318             }
1319             bool is_lock_free() const CDS_NOEXCEPT
1320             {
1321                 return true;
1322             }
1323             void store(T val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1324             {
1325                 atomic_ops::atomic_store_explicit( &m_val, val, order );
1326             }
1327             void store(T val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1328             {
1329                 atomic_ops::atomic_store_explicit( &m_val, val, order );
1330             }
1331
1332             T load(memory_order order = memory_order_seq_cst) const volatile CDS_NOEXCEPT
1333             {
1334                 return atomic_ops::atomic_load_explicit( &m_val, order );
1335             }
1336             T load(memory_order order  = memory_order_seq_cst) const CDS_NOEXCEPT
1337             {
1338                 return atomic_ops::atomic_load_explicit( &m_val, order );
1339             }
1340
1341             operator T() const volatile CDS_NOEXCEPT
1342             {
1343                 return load();
1344             }
1345             operator T() const CDS_NOEXCEPT
1346             {
1347                 return load();
1348             }
1349
1350             T exchange(T val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1351             {
1352                 return atomic_ops::atomic_exchange_explicit( &m_val, val, order );
1353             }
1354             T exchange(T val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1355             {
1356                 return atomic_ops::atomic_exchange_explicit( &m_val, val, order );
1357             }
1358
1359             bool compare_exchange_weak(T& expected, T desired , memory_order success_order, memory_order failure_order) volatile CDS_NOEXCEPT
1360             {
1361                 return atomic_ops::atomic_compare_exchange_weak_explicit( &m_val, &expected, desired, success_order, failure_order );
1362             }
1363             bool compare_exchange_weak(T& expected, T desired , memory_order success_order, memory_order failure_order) CDS_NOEXCEPT
1364             {
1365                 return atomic_ops::atomic_compare_exchange_weak_explicit( &m_val, &expected, desired, success_order, failure_order );
1366             }
1367             bool compare_exchange_strong(T& expected, T desired , memory_order success_order, memory_order failure_order) volatile CDS_NOEXCEPT
1368             {
1369                 return atomic_ops::atomic_compare_exchange_strong_explicit( &m_val, &expected, desired, success_order, failure_order );
1370             }
1371             bool compare_exchange_strong(T& expected, T desired , memory_order success_order, memory_order failure_order) CDS_NOEXCEPT
1372             {
1373                 return atomic_ops::atomic_compare_exchange_strong_explicit( &m_val, &expected, desired, success_order, failure_order );
1374             }
1375             bool compare_exchange_weak(T& expected, T desired , memory_order success_order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1376             {
1377                 return compare_exchange_weak( expected, desired, success_order, memory_order_relaxed );
1378             }
1379             bool compare_exchange_weak(T& expected, T desired , memory_order success_order = memory_order_seq_cst) CDS_NOEXCEPT
1380             {
1381                 return compare_exchange_weak( expected, desired, success_order, memory_order_relaxed );
1382             }
1383             bool compare_exchange_strong(T& expected, T desired , memory_order success_order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1384             {
1385                 return compare_exchange_strong( expected, desired, success_order, memory_order_relaxed );
1386             }
1387             bool compare_exchange_strong(T& expected, T desired , memory_order success_order = memory_order_seq_cst) CDS_NOEXCEPT
1388             {
1389                 return compare_exchange_strong( expected, desired, success_order, memory_order_relaxed );
1390             }
1391
1392             T fetch_add(T val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1393             {
1394                 return atomic_ops::atomic_fetch_add_explicit( &m_val, val, order );
1395             }
1396             T fetch_add(T val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1397             {
1398                 return atomic_ops::atomic_fetch_add_explicit( &m_val, val, order );
1399             }
1400             T fetch_sub(T val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1401             {
1402                 return atomic_ops::atomic_fetch_sub_explicit( &m_val, val, order );
1403             }
1404             T fetch_sub(T val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1405             {
1406                 return atomic_ops::atomic_fetch_sub_explicit( &m_val, val, order );
1407             }
1408             T fetch_and(T val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1409             {
1410                 return atomic_ops::atomic_fetch_and_explicit( &m_val, val, order );
1411             }
1412             T fetch_and(T val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1413             {
1414                 return atomic_ops::atomic_fetch_and_explicit( &m_val, val, order );
1415             }
1416
1417             T fetch_or(T val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1418             {
1419                 return atomic_ops::atomic_fetch_or_explicit( &m_val, val, order );
1420             }
1421             T fetch_or(T val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1422             {
1423                 return atomic_ops::atomic_fetch_or_explicit( &m_val, val, order );
1424             }
1425             T fetch_xor(T val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1426             {
1427                 return atomic_ops::atomic_fetch_xor_explicit( &m_val, val, order );
1428             }
1429             T fetch_xor(T val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1430             {
1431                 return atomic_ops::atomic_fetch_xor_explicit( &m_val, val, order );
1432             }
1433
1434             atomic_integral() = default;
1435             CDS_CONSTEXPR atomic_integral(T val) CDS_NOEXCEPT
1436                 : m_val(val)
1437                 {}
1438
1439             atomic_integral(const atomic_integral&) = delete;
1440             atomic_integral& operator=(const atomic_integral&) = delete;
1441             atomic_integral& operator=(const atomic_integral&) volatile = delete;
1442
1443             T operator=(T val) volatile CDS_NOEXCEPT
1444             {
1445                 store(val);
1446                 return val;
1447             }
1448             T operator=(T val) CDS_NOEXCEPT
1449             {
1450                 store(val);
1451                 return val;
1452             }
1453
1454             // Post inc/dec
1455             T operator++(int) volatile CDS_NOEXCEPT
1456             {
1457                 return fetch_add( 1 );
1458             }
1459             T operator++(int) CDS_NOEXCEPT
1460             {
1461                 return fetch_add( 1 );
1462             }
1463             T operator--(int) volatile CDS_NOEXCEPT
1464             {
1465                 return fetch_sub( 1 );
1466             }
1467             T operator--(int) CDS_NOEXCEPT
1468             {
1469                 return fetch_sub( 1 );
1470             }
1471
1472             // Pre inc/dec
1473             T operator++() volatile CDS_NOEXCEPT
1474             {
1475                 return fetch_add( 1 ) + 1;
1476             }
1477             T operator++() CDS_NOEXCEPT
1478             {
1479                 return fetch_add( 1 ) + 1;
1480             }
1481             T operator--() volatile CDS_NOEXCEPT
1482             {
1483                 return fetch_sub( 1 ) - 1;
1484             }
1485             T operator--() CDS_NOEXCEPT
1486             {
1487                 return fetch_sub( 1 ) - 1;
1488             }
1489
1490             // op=
1491             T operator+=(T val) volatile CDS_NOEXCEPT
1492             {
1493                 return fetch_add( val ) + val;
1494             }
1495             T operator+=(T val) CDS_NOEXCEPT
1496             {
1497                 return fetch_add( val ) + val;
1498             }
1499             T operator-=(T val) volatile CDS_NOEXCEPT
1500             {
1501                 return fetch_sub( val ) - val;
1502             }
1503             T operator-=(T val) CDS_NOEXCEPT
1504             {
1505                 return fetch_sub( val ) - val;
1506             }
1507             T operator&=(T val) volatile CDS_NOEXCEPT
1508             {
1509                 return fetch_and( val ) & val;
1510             }
1511             T operator&=(T val) CDS_NOEXCEPT
1512             {
1513                 return fetch_and( val ) & val;
1514             }
1515             T operator|=(T val) volatile CDS_NOEXCEPT
1516             {
1517                 return fetch_or( val ) | val;
1518             }
1519             T operator|=(T val) CDS_NOEXCEPT
1520             {
1521                 return fetch_or( val ) | val;
1522             }
1523             T operator^=(T val) volatile CDS_NOEXCEPT
1524             {
1525                 return fetch_xor( val ) ^ val;
1526             }
1527             T operator^=(T val) CDS_NOEXCEPT
1528             {
1529                 return fetch_xor( val ) ^ val;
1530             }
1531         };
1532
1533         template <typename Type>
1534         struct select_primary_type {
1535             typedef typename details::primary_type<sizeof(Type)>::type type;
1536         };
1537         template <>
1538         struct select_primary_type<bool> {
1539             typedef bool type;
1540         };
1541
1542     }   // namespace details
1543
1544     template <class T>
1545     struct atomic
1546     {
1547     private:
1548         typedef details::atomic_generic_ops<T, sizeof(T), typename details::select_primary_type<T>::type >  atomic_ops;
1549
1550         T volatile m_data;
1551     public:
1552         bool is_lock_free() const volatile CDS_NOEXCEPT
1553         {
1554             return true;
1555         }
1556         bool is_lock_free() const CDS_NOEXCEPT
1557         {
1558             return true;
1559         }
1560
1561         void store(T val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1562         {
1563             atomic_ops::atomic_store_explicit( &m_data, val, order );
1564         }
1565         void store(T val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1566         {
1567             atomic_ops::atomic_store_explicit( &m_data, val, order );
1568         }
1569
1570         T load(memory_order order = memory_order_seq_cst) const volatile CDS_NOEXCEPT
1571         {
1572             return atomic_ops::atomic_load_explicit( &m_data, order );
1573         }
1574         T load(memory_order order = memory_order_seq_cst) const CDS_NOEXCEPT
1575         {
1576            return atomic_ops::atomic_load_explicit( &m_data, order );
1577         }
1578
1579         operator T() const volatile CDS_NOEXCEPT
1580         {
1581             return load();
1582         }
1583         operator T() const CDS_NOEXCEPT
1584         {
1585             return load();
1586         }
1587
1588         T exchange(T val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1589         {
1590             return atomic_ops::atomic_exchange_explicit( &m_data, val, order );
1591         }
1592         T exchange(T val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1593         {
1594             return atomic_ops::atomic_exchange_explicit( &m_data, val, order );
1595         }
1596
1597         bool compare_exchange_weak(T& expected, T desired, memory_order success_order, memory_order failure_order) volatile CDS_NOEXCEPT
1598         {
1599             return atomic_ops::atomic_compare_exchange_weak_explicit( &m_data, &expected, desired, success_order, failure_order );
1600         }
1601         bool compare_exchange_weak(T& expected, T desired, memory_order success_order, memory_order failure_order) CDS_NOEXCEPT
1602         {
1603             return atomic_ops::atomic_compare_exchange_weak_explicit( &m_data, &expected, desired, success_order, failure_order );
1604         }
1605         bool compare_exchange_strong(T& expected, T desired, memory_order success_order, memory_order failure_order) volatile CDS_NOEXCEPT
1606         {
1607             return atomic_ops::atomic_compare_exchange_strong_explicit( &m_data, &expected, desired, success_order, failure_order );
1608         }
1609         bool compare_exchange_strong(T& expected, T desired, memory_order success_order, memory_order failure_order) CDS_NOEXCEPT
1610         {
1611             return atomic_ops::atomic_compare_exchange_strong_explicit( &m_data, &expected, desired, success_order, failure_order );
1612         }
1613         bool compare_exchange_weak(T& expected, T desired, memory_order success_order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1614         {
1615             return compare_exchange_weak( expected, desired, success_order, memory_order_relaxed );
1616         }
1617         bool compare_exchange_weak(T& expected, T desired, memory_order success_order = memory_order_seq_cst) CDS_NOEXCEPT
1618         {
1619             return compare_exchange_weak( expected, desired, success_order, memory_order_relaxed );
1620         }
1621         bool compare_exchange_strong(T& expected, T desired, memory_order success_order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1622         {
1623             return compare_exchange_strong( expected, desired, success_order, memory_order_relaxed );
1624         }
1625         bool compare_exchange_strong(T& expected, T desired, memory_order success_order = memory_order_seq_cst) CDS_NOEXCEPT
1626         {
1627             return compare_exchange_strong( expected, desired, success_order, memory_order_relaxed );
1628         }
1629
1630         atomic() = default;
1631         CDS_CONSTEXPR atomic(T val)
1632             : m_data( val )
1633             {}
1634
1635         atomic(const atomic&) = delete;
1636         atomic& operator=(const atomic&) = delete;
1637         atomic& operator=(const atomic&) volatile = delete;
1638
1639         T operator=(T val) volatile CDS_NOEXCEPT
1640         {
1641             store( val );
1642             return val;
1643         }
1644         T operator=(T val) CDS_NOEXCEPT
1645         {
1646             store( val );
1647             return val;
1648         }
1649     };
1650
1651 #   define CDS_DECLARE_ATOMIC_INTEGRAL( _type ) \
1652     template <> \
1653     struct atomic<_type>: public details::atomic_integral<_type> \
1654     { \
1655     private: \
1656         typedef details::atomic_integral<_type>   base_class  ; \
1657     public: \
1658         atomic() = default; \
1659         atomic(_type val) CDS_NOEXCEPT : base_class(val) {} \
1660         atomic(const atomic&) = delete; \
1661         atomic& operator=(const atomic&) = delete; \
1662         atomic& operator=(const atomic&) volatile = delete; \
1663         _type operator=(_type val) volatile CDS_NOEXCEPT { return base_class::operator=(val); } \
1664         _type operator=(_type val) CDS_NOEXCEPT { return base_class::operator=(val); } \
1665     };
1666
1667     CDS_DECLARE_ATOMIC_INTEGRAL(char)
1668     CDS_DECLARE_ATOMIC_INTEGRAL(signed char)
1669     CDS_DECLARE_ATOMIC_INTEGRAL(unsigned char)
1670     CDS_DECLARE_ATOMIC_INTEGRAL(short)
1671     CDS_DECLARE_ATOMIC_INTEGRAL(unsigned short)
1672     CDS_DECLARE_ATOMIC_INTEGRAL(int)
1673     CDS_DECLARE_ATOMIC_INTEGRAL(unsigned int)
1674     CDS_DECLARE_ATOMIC_INTEGRAL(long)
1675     CDS_DECLARE_ATOMIC_INTEGRAL(unsigned long)
1676     CDS_DECLARE_ATOMIC_INTEGRAL(long long)
1677     CDS_DECLARE_ATOMIC_INTEGRAL(unsigned long long)
1678 //#if CDS_COMPILER == CDS_COMPILER_GCC && CDS_COMPILER_VERSION >= 40400
1679 //    CDS_DECLARE_ATOMIC_INTEGRAL(char16_t)
1680 //    CDS_DECLARE_ATOMIC_INTEGRAL(char32_t)
1681 //#endif
1682 //    CDS_DECLARE_ATOMIC_INTEGRAL(wchar_t)
1683
1684 #   undef CDS_DECLARE_ATOMIC_INTEGRAL
1685
1686
1687     template <typename T>
1688     class atomic<T *>
1689     {
1690     private:
1691         T * volatile m_ptr;
1692         typedef details::atomic_pointer<T>  atomic_ops;
1693     public:
1694         bool is_lock_free() const volatile CDS_NOEXCEPT
1695         {
1696             return true;
1697         }
1698         bool is_lock_free() const CDS_NOEXCEPT
1699         {
1700             return true;
1701         }
1702
1703         void store(T * val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1704         {
1705             atomic_ops::atomic_store_explicit( &m_ptr, val, order );
1706         }
1707         void store(T * val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1708         {
1709             atomic_ops::atomic_store_explicit( &m_ptr, val, order );
1710         }
1711
1712         T * load(memory_order order = memory_order_seq_cst) const volatile CDS_NOEXCEPT
1713         {
1714             return atomic_ops::atomic_load_explicit( &m_ptr, order );
1715         }
1716         T * load(memory_order order = memory_order_seq_cst) const CDS_NOEXCEPT
1717         {
1718             return atomic_ops::atomic_load_explicit( &m_ptr, order );
1719         }
1720
1721         operator T *() const volatile CDS_NOEXCEPT
1722         {
1723             return load();
1724         }
1725         operator T *() const CDS_NOEXCEPT
1726         {
1727             return load();
1728         }
1729
1730         T * exchange(T * val, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1731         {
1732             return atomic_ops::atomic_exchange_explicit( &m_ptr, val, order );
1733         }
1734         T * exchange(T * val, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1735         {
1736             return atomic_ops::atomic_exchange_explicit( &m_ptr, val, order );
1737         }
1738
1739         bool compare_exchange_weak(T *& expected, T * desired, memory_order success_order, memory_order failure_order) volatile CDS_NOEXCEPT
1740         {
1741             return atomic_ops::atomic_compare_exchange_weak_explicit( &m_ptr, &expected, desired, success_order, failure_order );
1742         }
1743         bool compare_exchange_weak(T *& expected, T * desired, memory_order success_order, memory_order failure_order) CDS_NOEXCEPT
1744         {
1745             return atomic_ops::atomic_compare_exchange_weak_explicit( &m_ptr, &expected, desired, success_order, failure_order );
1746         }
1747         bool compare_exchange_strong(T *& expected, T * desired, memory_order success_order, memory_order failure_order) volatile CDS_NOEXCEPT
1748         {
1749             return atomic_ops::atomic_compare_exchange_strong_explicit( &m_ptr, &expected, desired, success_order, failure_order );
1750         }
1751         bool compare_exchange_strong(T *& expected, T * desired, memory_order success_order, memory_order failure_order) CDS_NOEXCEPT
1752         {
1753             return atomic_ops::atomic_compare_exchange_strong_explicit( &m_ptr, &expected, desired, success_order, failure_order );
1754         }
1755         bool compare_exchange_weak(T *& expected, T * desired, memory_order success_order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1756         {
1757             return compare_exchange_weak( expected, desired, success_order, memory_order_relaxed );
1758         }
1759         bool compare_exchange_weak(T *& expected, T * desired, memory_order success_order = memory_order_seq_cst) CDS_NOEXCEPT
1760         {
1761             return compare_exchange_weak( expected, desired, success_order, memory_order_relaxed );
1762         }
1763         bool compare_exchange_strong(T *& expected, T * desired, memory_order success_order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1764         {
1765             return compare_exchange_strong( expected, desired, success_order, memory_order_relaxed );
1766         }
1767         bool compare_exchange_strong(T *& expected, T * desired, memory_order success_order = memory_order_seq_cst) CDS_NOEXCEPT
1768         {
1769             return compare_exchange_strong( expected, desired, success_order, memory_order_relaxed );
1770         }
1771
1772         T * fetch_add(ptrdiff_t offset, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1773         {
1774             return atomic_ops::atomic_fetch_add_explicit( &m_ptr, offset, order );
1775         }
1776         T * fetch_add(ptrdiff_t offset, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1777         {
1778             return atomic_ops::atomic_fetch_add_explicit( &m_ptr, offset, order );
1779         }
1780
1781         T * fetch_sub(ptrdiff_t offset, memory_order order = memory_order_seq_cst) volatile CDS_NOEXCEPT
1782         {
1783             return atomic_ops::atomic_fetch_sub_explicit( &m_ptr, offset, order );
1784         }
1785         T * fetch_sub(ptrdiff_t offset, memory_order order = memory_order_seq_cst) CDS_NOEXCEPT
1786         {
1787             return atomic_ops::atomic_fetch_sub_explicit( &m_ptr, offset, order );
1788         }
1789
1790         atomic() = default;
1791         CDS_CONSTEXPR atomic(T * val) CDS_NOEXCEPT
1792             : m_ptr( val )
1793         {}
1794
1795         atomic(const atomic&) = delete;
1796         atomic& operator=(const atomic&) = delete;
1797         atomic& operator=(const atomic&) volatile = delete;
1798
1799         T * operator=(T * val) volatile CDS_NOEXCEPT
1800         {
1801             store( val );
1802             return val;
1803         }
1804         T * operator=(T * val) CDS_NOEXCEPT
1805         {
1806             store( val );
1807             return val;
1808         }
1809     };
1810
1811     // Atomic typedefs
1812     typedef atomic<bool>            atomic_bool;
1813     typedef atomic<char>            atomic_char;
1814     typedef atomic<signed char>     atomic_schar;
1815     typedef atomic<unsigned char>   atomic_uchar;
1816     typedef atomic<short>           atomic_short;
1817     typedef atomic<unsigned short>  atomic_ushort;
1818     typedef atomic<int>             atomic_int;
1819     typedef atomic<unsigned int>    atomic_uint;
1820     typedef atomic<long>            atomic_long;
1821     typedef atomic<unsigned long>   atomic_ulong;
1822     typedef atomic<long long>       atomic_llong;
1823     typedef atomic<unsigned long long> atomic_ullong;
1824 #if ( CDS_COMPILER == CDS_COMPILER_GCC && CDS_COMPILER_VERSION >= 40400 ) || CDS_COMPILER == CDS_COMPILER_CLANG
1825     typedef atomic<char16_t>        atomic_char16_t;
1826     typedef atomic<char32_t>        atomic_char32_t;
1827 #endif
1828     typedef atomic<wchar_t>         atomic_wchar_t;
1829
1830
1831     typedef atomic<std::int_least8_t>    atomic_int_least8_t;
1832     typedef atomic<std::uint_least8_t>   atomic_uint_least8_t;
1833     typedef atomic<std::int_least16_t>   atomic_int_least16_t;
1834     typedef atomic<std::uint_least16_t>  atomic_uint_least16_t;
1835     typedef atomic<std::int_least32_t>   atomic_int_least32_t;
1836     typedef atomic<std::uint_least32_t>  atomic_uint_least32_t;
1837     typedef atomic<std::int_least64_t>   atomic_int_least64_t;
1838     typedef atomic<std::uint_least64_t>  atomic_uint_least64_t;
1839     typedef atomic<std::int_fast8_t>     atomic_int_fast8_t;
1840     typedef atomic<std::uint_fast8_t>    atomic_uint_fast8_t;
1841     typedef atomic<std::int_fast16_t>    atomic_int_fast16_t;
1842     typedef atomic<std::uint_fast16_t>   atomic_uint_fast16_t;
1843     typedef atomic<std::int_fast32_t>    atomic_int_fast32_t;
1844     typedef atomic<std::uint_fast32_t>   atomic_uint_fast32_t;
1845     typedef atomic<std::int_fast64_t>    atomic_int_fast64_t;
1846     typedef atomic<std::uint_fast64_t>   atomic_uint_fast64_t;
1847     typedef atomic<intptr_t>             atomic_intptr_t;
1848     typedef atomic<uintptr_t>            atomic_uintptr_t;
1849     typedef atomic<size_t>               atomic_size_t;
1850     typedef atomic<ptrdiff_t>            atomic_ptrdiff_t;
1851     typedef atomic<std::intmax_t>        atomic_intmax_t;
1852     typedef atomic<std::uintmax_t>       atomic_uintmax_t;
1853
1854     template <class T>
1855     static inline bool atomic_is_lock_free(const volatile atomic<T> * p) CDS_NOEXCEPT
1856     {
1857         return p->is_lock_free();
1858     }
1859
1860     template <class T>
1861     static inline bool atomic_is_lock_free(const atomic<T> * p ) CDS_NOEXCEPT
1862     {
1863         return p->is_lock_free();
1864     }
1865
1866     /*
1867     template <class T>
1868     static inline void atomic_init(volatile atomic<T> * p, T val) CDS_NOEXCEPT
1869     {
1870         p->init( val );
1871     }
1872
1873     template <class T>
1874     static inline void atomic_init( atomic<T> * p, T val) CDS_NOEXCEPT
1875     {
1876         p->init( val );
1877     }
1878     */
1879
1880     template <class T>
1881     static inline void atomic_store(volatile atomic<T>* p, T val) CDS_NOEXCEPT
1882     {
1883         p->store(val);
1884     }
1885     template <class T>
1886     static inline void atomic_store(atomic<T>* p, T val) CDS_NOEXCEPT
1887     {
1888         p->store( val );
1889     }
1890
1891     template <class T>
1892     static inline void atomic_store_explicit(volatile atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
1893     {
1894         p->store( val, order );
1895     }
1896     template <class T>
1897     static inline void atomic_store_explicit(atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
1898     {
1899         p->store( val, order );
1900     }
1901
1902     template <class T>
1903     static inline T atomic_load(const volatile atomic<T>* p) CDS_NOEXCEPT
1904     {
1905         return p->load();
1906     }
1907     template <class T>
1908     static inline T atomic_load(const atomic<T>* p) CDS_NOEXCEPT
1909     {
1910         return p->load();
1911     }
1912
1913     template <class T>
1914     static inline T atomic_load_explicit(const volatile atomic<T>* p, memory_order order) CDS_NOEXCEPT
1915     {
1916         return p->load( order );
1917     }
1918     template <class T>
1919     static inline T atomic_load_explicit(const atomic<T>* p, memory_order order) CDS_NOEXCEPT
1920     {
1921         return p->load( order );
1922     }
1923
1924     template <class T>
1925     static inline T atomic_exchange(volatile atomic<T>* p, T val) CDS_NOEXCEPT
1926     {
1927         return p->exchange( val );
1928     }
1929     template <class T>
1930     static inline T atomic_exchange(atomic<T>* p, T val ) CDS_NOEXCEPT
1931     {
1932         return p->exchange( val );
1933     }
1934
1935     template <class T>
1936     static inline T atomic_exchange_explicit(volatile atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
1937     {
1938         return p->exchange( val, order );
1939     }
1940     template <class T>
1941     static inline T atomic_exchange_explicit(atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
1942     {
1943         return p->exchange( val, order );
1944     }
1945
1946     template <class T>
1947     static inline bool atomic_compare_exchange_weak(volatile atomic<T>* p, T* expected, T desired) CDS_NOEXCEPT
1948     {
1949         return p->compare_exchange_weak( *expected, desired );
1950     }
1951     template <class T>
1952     static inline bool atomic_compare_exchange_weak(atomic<T>* p, T* expected, T desired) CDS_NOEXCEPT
1953     {
1954         return p->compare_exchange_weak( *expected, desired );
1955     }
1956
1957     template <class T>
1958     static inline bool atomic_compare_exchange_strong(volatile atomic<T>* p, T* expected, T desired) CDS_NOEXCEPT
1959     {
1960         return p->compare_exchange_strong( *expected, desired );
1961     }
1962     template <class T>
1963     static inline bool atomic_compare_exchange_strong(atomic<T>* p, T* expected, T desired) CDS_NOEXCEPT
1964     {
1965         return p->compare_exchange_strong( *expected, desired );
1966     }
1967
1968     template <class T>
1969     static inline bool atomic_compare_exchange_weak_explicit(volatile atomic<T>* p, T* expected, T desired, memory_order success_order, memory_order failure_order) CDS_NOEXCEPT
1970     {
1971         return p->compare_exchange_weak( *expected, desired, success_order, failure_order );
1972     }
1973     template <class T>
1974     static inline bool atomic_compare_exchange_weak_explicit(atomic<T>* p, T* expected, T desired, memory_order success_order, memory_order failure_order) CDS_NOEXCEPT
1975     {
1976         return p->compare_exchange_weak( *expected, desired, success_order, failure_order );
1977     }
1978
1979     template <class T>
1980     static inline bool atomic_compare_exchange_strong_explicit(volatile atomic<T>* p, T* expected, T desired, memory_order success_order, memory_order failure_order) CDS_NOEXCEPT
1981     {
1982         return p->compare_exchange_strong( *expected, desired, success_order, failure_order );
1983     }
1984     template <class T>
1985     static inline bool atomic_compare_exchange_strong_explicit(atomic<T>* p, T* expected, T desired, memory_order success_order, memory_order failure_order) CDS_NOEXCEPT
1986     {
1987         return p->compare_exchange_strong( *expected, desired, success_order, failure_order );
1988     }
1989
1990     template <class T>
1991     static inline T atomic_fetch_add(volatile atomic<T>* p, T val) CDS_NOEXCEPT
1992     {
1993         return p->fetch_add( val );
1994     }
1995     template <class T>
1996     static inline T atomic_fetch_add(atomic<T>* p, T val) CDS_NOEXCEPT
1997     {
1998         return p->fetch_add( val );
1999     }
2000     template <class T>
2001     static inline T * atomic_fetch_add(volatile atomic<T *>* p, ptrdiff_t offset) CDS_NOEXCEPT
2002     {
2003         return p->fetch_add( offset );
2004     }
2005     template <class T>
2006     static inline T * atomic_fetch_add(atomic<T *>* p, ptrdiff_t offset) CDS_NOEXCEPT
2007     {
2008         return p->fetch_add( offset );
2009     }
2010
2011     template <class T>
2012     static inline T atomic_fetch_add_explicit(volatile atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
2013     {
2014         return p->fetch_add( val, order );
2015     }
2016     template <class T>
2017     static inline T atomic_fetch_add_explicit(atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
2018     {
2019         return p->fetch_add( val, order );
2020     }
2021     template <class T>
2022     static inline T * atomic_fetch_add_explicit(volatile atomic<T *>* p, ptrdiff_t offset, memory_order order) CDS_NOEXCEPT
2023     {
2024         return p->fetch_add( offset, order );
2025     }
2026     template <class T>
2027     static inline T * atomic_fetch_add_explicit(atomic<T *>* p, ptrdiff_t offset, memory_order order) CDS_NOEXCEPT
2028     {
2029         return p->fetch_add( offset, order );
2030     }
2031
2032     template <class T>
2033     static inline T atomic_fetch_sub(volatile atomic<T>* p, T val) CDS_NOEXCEPT
2034     {
2035         return p->fetch_sub( val );
2036     }
2037     template <class T>
2038     static inline T atomic_fetch_sub(atomic<T>* p, T val) CDS_NOEXCEPT
2039     {
2040         return p->fetch_sub( val );
2041     }
2042     template <class T>
2043     static inline T * atomic_fetch_sub(volatile atomic<T *>* p, ptrdiff_t offset) CDS_NOEXCEPT
2044     {
2045         return p->fetch_sub( offset );
2046     }
2047     template <class T>
2048     static inline T * atomic_fetch_sub(atomic<T *>* p, ptrdiff_t offset) CDS_NOEXCEPT
2049     {
2050         return p->fetch_sub( offset );
2051     }
2052
2053     template <class T>
2054     static inline T atomic_fetch_sub_explicit(volatile atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
2055     {
2056         return p->fetch_sub( val, order );
2057     }
2058     template <class T>
2059     static inline T atomic_fetch_sub_explicit(atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
2060     {
2061         return p->fetch_sub( val, order );
2062     }
2063     template <class T>
2064     static inline T * atomic_fetch_sub_explicit(volatile atomic<T *>* p, ptrdiff_t offset, memory_order order) CDS_NOEXCEPT
2065     {
2066         return p->fetch_sub( offset, order );
2067     }
2068     template <class T>
2069     static inline T * atomic_fetch_sub_explicit(atomic<T *>* p, ptrdiff_t offset, memory_order order) CDS_NOEXCEPT
2070     {
2071         return p->fetch_sub( offset, order );
2072     }
2073
2074     template <class T>
2075     static inline T atomic_fetch_and(volatile atomic<T>* p, T val) CDS_NOEXCEPT
2076     {
2077         return p->fetch_and( val );
2078     }
2079     template <class T>
2080     static inline T atomic_fetch_and(atomic<T>* p, T val) CDS_NOEXCEPT
2081     {
2082         return p->fetch_and( val );
2083     }
2084
2085     template <class T>
2086     static inline T atomic_fetch_and_explicit(volatile atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
2087     {
2088         return p->fetch_and( val, order );
2089     }
2090     template <class T>
2091     static inline T atomic_fetch_and_explicit(atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
2092     {
2093         return p->fetch_and( val, order );
2094     }
2095
2096     template <class T>
2097     static inline T atomic_fetch_or(volatile atomic<T>* p, T val) CDS_NOEXCEPT
2098     {
2099         return p->fetch_or( val );
2100     }
2101     template <class T>
2102     static inline T atomic_fetch_or(atomic<T>* p, T val) CDS_NOEXCEPT
2103     {
2104         return p->fetch_or( val );
2105     }
2106
2107     template <class T>
2108     static inline T atomic_fetch_or_explicit(volatile atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
2109     {
2110         return p->fetch_or( val, order );
2111     }
2112     template <class T>
2113     static inline T atomic_fetch_or_explicit(atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
2114     {
2115         return p->fetch_or( val, order );
2116     }
2117
2118     template <class T>
2119     static inline T atomic_fetch_xor(volatile atomic<T>* p, T val) CDS_NOEXCEPT
2120     {
2121         return p->fetch_xor( val );
2122     }
2123     template <class T>
2124     static inline T atomic_fetch_xor(atomic<T>* p, T val) CDS_NOEXCEPT
2125     {
2126         return p->fetch_xor( val );
2127     }
2128
2129     template <class T>
2130     static inline T atomic_fetch_xor_explicit(volatile atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
2131     {
2132         return p->fetch_xor( val, order );
2133     }
2134     template <class T>
2135     static inline T atomic_fetch_xor_explicit(atomic<T>* p, T val, memory_order order) CDS_NOEXCEPT
2136     {
2137         return p->fetch_xor( val, order );
2138     }
2139
2140     // Atomic flag type
2141     typedef struct atomic_flag
2142     {
2143         void clear( memory_order order = memory_order_seq_cst ) volatile CDS_NOEXCEPT
2144         {
2145             assert( order != memory_order_acquire
2146                 && order != memory_order_acq_rel
2147                 && order != memory_order_consume
2148                 );
2149             platform::atomic_flag_clear( &m_Flag, order );
2150         }
2151         void clear( memory_order order = memory_order_seq_cst ) CDS_NOEXCEPT
2152         {
2153             assert( order != memory_order_acquire
2154                 && order != memory_order_acq_rel
2155                 && order != memory_order_consume
2156                 );
2157             platform::atomic_flag_clear( &m_Flag, order );
2158         }
2159
2160         bool test_and_set( memory_order order = memory_order_seq_cst ) volatile CDS_NOEXCEPT
2161         {
2162             return platform::atomic_flag_tas( &m_Flag, order );
2163         }
2164         bool test_and_set( memory_order order = memory_order_seq_cst ) CDS_NOEXCEPT
2165         {
2166             return platform::atomic_flag_tas( &m_Flag, order );
2167         }
2168
2169         atomic_flag() = default;
2170
2171         atomic_flag(const atomic_flag&) = delete;
2172         atomic_flag& operator=(const atomic_flag&) = delete;
2173         atomic_flag& operator=(const atomic_flag&) volatile = delete;
2174
2175         platform::atomic_flag_type volatile m_Flag;
2176     } atomic_flag;
2177
2178     static inline bool atomic_flag_test_and_set(volatile atomic_flag* p) CDS_NOEXCEPT
2179     {
2180         return p->test_and_set();
2181     }
2182     static inline bool atomic_flag_test_and_set(atomic_flag * p) CDS_NOEXCEPT
2183     {
2184         return p->test_and_set();
2185     }
2186     static inline bool atomic_flag_test_and_set_explicit(volatile atomic_flag* p, memory_order order) CDS_NOEXCEPT
2187     {
2188         return p->test_and_set( order );
2189     }
2190     static inline bool atomic_flag_test_and_set_explicit(atomic_flag* p, memory_order order) CDS_NOEXCEPT
2191     {
2192         return p->test_and_set( order );
2193     }
2194     static inline void atomic_flag_clear(volatile atomic_flag* p) CDS_NOEXCEPT
2195     {
2196         return p->clear();
2197     }
2198     static inline void atomic_flag_clear(atomic_flag* p) CDS_NOEXCEPT
2199     {
2200         return p->clear();
2201     }
2202     static inline void atomic_flag_clear_explicit(volatile atomic_flag* p, memory_order order) CDS_NOEXCEPT
2203     {
2204         return p->clear( order );
2205     }
2206     static inline void atomic_flag_clear_explicit(atomic_flag* p, memory_order order) CDS_NOEXCEPT
2207     {
2208         return p->clear( order );
2209     }
2210
2211     // Fences
2212     static inline void atomic_thread_fence(memory_order order) CDS_NOEXCEPT
2213     {
2214         platform::thread_fence( order );
2215         CDS_COMPILER_RW_BARRIER;
2216     }
2217     static inline void atomic_signal_fence(memory_order order) CDS_NOEXCEPT
2218     {
2219         platform::signal_fence( order );
2220     }
2221
2222 }}  // namespace cds::cxx11_atomic
2223
2224 //@endcond
2225 #endif // #ifndef CDSLIB_COMPILER_CXX11_ATOMIC_H