Renaming cds/cxx11_atomic.h to cds/algo/atomic.h
[libcds.git] / tests / test-hdr / misc / cxx11_atomic_func.cpp
1 //$$CDS-header$$
2
3 #include "cppunit/cppunit_proxy.h"
4
5 #include <cds/algo/atomic.h>
6
7 #ifndef CDS_USE_BOOST_ATOMIC
8 // Skip this test for boost.atomic
9 // Boost.atomic has no free atomic functions implementation.
10
11 #include "misc/cxx11_convert_memory_order.h"
12
13 namespace misc {
14
15     class cxx11_atomic_func: public CppUnitMini::TestCase
16     {
17         template <typename AtomicFlag>
18         void do_test_atomic_flag_mo( AtomicFlag& f, atomics::memory_order order )
19         {
20             atomics::memory_order mo_clear = convert_to_store_order(order);
21
22             f.clear( convert_to_store_order(order) );
23
24             for ( int i = 0; i < 5; ++i ) {
25                 CPPUNIT_ASSERT( !atomics::atomic_flag_test_and_set_explicit( &f, order ));
26                 CPPUNIT_ASSERT( atomics::atomic_flag_test_and_set_explicit( &f, order ) );
27                 atomics::atomic_flag_clear_explicit( &f, mo_clear );
28                 atomics::atomic_flag_clear_explicit( &f, mo_clear );
29             }
30             //CPPUNIT_ASSERT( f.m_Flag == 0 );
31         }
32
33         template <typename AtomicFlag>
34         void do_test_atomic_flag( AtomicFlag& f )
35         {
36             f.clear();
37
38             for ( int i = 0; i < 5; ++i ) {
39                 //CPPUNIT_ASSERT( f.m_Flag == 0 );
40                 CPPUNIT_ASSERT( !atomics::atomic_flag_test_and_set( &f ));
41                 //CPPUNIT_ASSERT( f.m_Flag != 0 );
42                 CPPUNIT_ASSERT( atomics::atomic_flag_test_and_set( &f ) );
43                 //CPPUNIT_ASSERT( f.m_Flag != 0 );
44                 atomics::atomic_flag_clear(&f);
45                 //CPPUNIT_ASSERT( f.m_Flag == 0 );
46                 atomics::atomic_flag_clear(&f);
47             }
48             //CPPUNIT_ASSERT( f.m_Flag == 0 );
49
50             do_test_atomic_flag_mo( f, atomics::memory_order_relaxed );
51             do_test_atomic_flag_mo( f, atomics::memory_order_consume );
52             do_test_atomic_flag_mo( f, atomics::memory_order_acquire );
53             do_test_atomic_flag_mo( f, atomics::memory_order_release );
54             do_test_atomic_flag_mo( f, atomics::memory_order_acq_rel );
55             do_test_atomic_flag_mo( f, atomics::memory_order_seq_cst );
56         }
57
58         template <class Atomic, typename Integral>
59         void do_test_atomic_type(Atomic& a )
60         {
61             typedef Integral    integral_type;
62
63             CPPUNIT_ASSERT( atomics::atomic_is_lock_free( &a ) );
64             atomics::atomic_store( &a, (integral_type) 0 );
65             CPPUNIT_ASSERT( a == 0 );
66             CPPUNIT_ASSERT( atomics::atomic_load( &a ) == 0 );
67
68             for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
69                 integral_type n = integral_type(42) << (nByte * 8);
70                 CPPUNIT_ASSERT( atomics::atomic_exchange( &a, n ) == 0 );
71                 CPPUNIT_ASSERT( atomics::atomic_load( &a ) == n );
72                 CPPUNIT_ASSERT( atomics::atomic_exchange( &a, (integral_type) 0 ) == n );
73                 CPPUNIT_ASSERT( atomics::atomic_load( &a ) == 0 );
74             }
75
76             integral_type prev = atomics::atomic_load( &a );
77             for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
78                 integral_type n = integral_type(42) << (nByte * 8);
79                 integral_type expected = prev;
80
81                 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_weak( &a, &expected, n));
82                 CPPUNIT_ASSERT( expected  == prev );
83                 CPPUNIT_ASSERT( expected  != n );
84                 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_weak( &a, &expected, n) );
85                 CPPUNIT_ASSERT( expected  == n );
86
87                 prev = n;
88                 CPPUNIT_ASSERT( atomics::atomic_load( &a ) == n );
89             }
90
91             atomics::atomic_store( &a, (integral_type) 0 );
92
93             prev = atomics::atomic_load( &a );
94             for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
95                 integral_type n = integral_type(42) << (nByte * 8);
96                 integral_type expected = prev;
97
98                 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_strong( &a, &expected, n));
99                 CPPUNIT_ASSERT( expected  == prev );
100                 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_strong( &a, &expected, n));
101                 CPPUNIT_ASSERT( expected  == n );
102
103                 prev = n;
104                 CPPUNIT_ASSERT( atomics::atomic_load( &a ) == n );
105             }
106
107             CPPUNIT_ASSERT( atomics::atomic_exchange( &a, (integral_type) 0 ) == prev );
108         }
109
110         template <class Atomic, typename Integral>
111         void do_test_atomic_integral( Atomic& a )
112         {
113             do_test_atomic_type< Atomic, Integral >( a );
114
115             typedef Integral    integral_type;
116
117             // fetch_xxx testing
118             atomics::atomic_store( &a, (integral_type) 0 );
119
120             // fetch_add
121             for ( size_t nByte = 0; nByte < sizeof(integral_type); ++nByte )
122             {
123                 integral_type prev = atomics::atomic_load( &a );
124                 integral_type n = integral_type(42) << (nByte * 8);
125
126                 CPPUNIT_ASSERT( atomics::atomic_fetch_add( &a, n) == prev);
127             }
128
129             // fetch_sub
130             for ( size_t nByte = sizeof(integral_type); nByte > 0; --nByte )
131             {
132                 integral_type prev = atomics::atomic_load( &a );
133                 integral_type n = integral_type(42) << ((nByte - 1) * 8);
134
135                 CPPUNIT_ASSERT( atomics::atomic_fetch_sub( &a, n) == prev);
136             }
137             CPPUNIT_ASSERT( atomics::atomic_load( &a ) == 0 );
138
139             // fetch_or / fetc_xor / fetch_and
140             for ( size_t nBit = 0; nBit < sizeof(integral_type) * 8; ++nBit )
141             {
142                 integral_type prev = atomics::atomic_load( &a );
143                 integral_type mask = 1 << nBit;
144
145                 CPPUNIT_ASSERT( atomics::atomic_fetch_or( &a, mask ) == prev );
146                 prev = atomics::atomic_load( &a );
147                 CPPUNIT_ASSERT( ( prev & mask)  == mask);
148
149                 CPPUNIT_ASSERT( atomics::atomic_fetch_and( &a, (integral_type) ~mask ) == prev );
150                 prev = atomics::atomic_load( &a );
151                 CPPUNIT_ASSERT_EX( integral_type(prev & mask) == integral_type(0), "prev=" << std::hex << prev << ", mask=" << std::hex << mask);
152
153                 CPPUNIT_ASSERT( atomics::atomic_fetch_xor( &a, mask ) == prev );
154                 prev = atomics::atomic_load( &a );
155                 CPPUNIT_ASSERT( ( prev & mask)  == mask);
156             }
157             CPPUNIT_ASSERT( atomics::atomic_load( &a ) == (integral_type) -1 );
158         }
159
160         template <class Atomic, typename Integral>
161         void do_test_atomic_type( Atomic& a, atomics::memory_order order )
162         {
163             typedef Integral    integral_type;
164
165             const atomics::memory_order oLoad = convert_to_load_order( order );
166             const atomics::memory_order oStore = convert_to_store_order( order );
167
168             CPPUNIT_ASSERT( atomics::atomic_is_lock_free( &a ) );
169             atomics::atomic_store_explicit( &a, (integral_type) 0, oStore );
170             CPPUNIT_ASSERT( a == 0 );
171             CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == 0 );
172
173             for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
174                 integral_type n = integral_type(42) << (nByte * 8);
175                 CPPUNIT_ASSERT( atomics::atomic_exchange_explicit( &a, n, order ) == 0 );
176                 CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == n );
177                 CPPUNIT_ASSERT( atomics::atomic_exchange_explicit( &a, (integral_type) 0, order ) == n );
178                 CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == 0 );
179             }
180
181             integral_type prev = atomics::atomic_load_explicit( &a, oLoad );
182             for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
183                 integral_type n = integral_type(42) << (nByte * 8);
184                 integral_type expected = prev;
185
186                 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_weak_explicit( &a, &expected, n, order, atomics::memory_order_relaxed));
187                 CPPUNIT_ASSERT( expected  == prev );
188                 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_weak_explicit( &a, &expected, n, order, atomics::memory_order_relaxed));
189                 CPPUNIT_ASSERT( expected  == n );
190
191                 prev = n;
192                 CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == n );
193             }
194
195             atomics::atomic_store_explicit( &a, (integral_type) 0, oStore );
196
197             prev = atomics::atomic_load_explicit( &a, oLoad );
198             for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
199                 integral_type n = integral_type(42) << (nByte * 8);
200                 integral_type expected = prev;
201
202                 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_strong_explicit( &a, &expected, n, order, atomics::memory_order_relaxed));
203                 CPPUNIT_ASSERT( expected  == prev );
204                 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_strong_explicit( &a, &expected, n, order, atomics::memory_order_relaxed));
205                 CPPUNIT_ASSERT( expected  == n );
206
207                 prev = n;
208                 CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == n );
209             }
210
211             CPPUNIT_ASSERT( atomics::atomic_exchange_explicit( &a, (integral_type) 0, order ) == prev );
212         }
213
214         template <class Atomic, typename Integral>
215         void do_test_atomic_integral( Atomic& a, atomics::memory_order order )
216         {
217             do_test_atomic_type< Atomic, Integral >( a, order );
218             typedef Integral    integral_type;
219
220             const atomics::memory_order oLoad = convert_to_load_order( order );
221             const atomics::memory_order oStore = convert_to_store_order( order );
222
223             // fetch_xxx testing
224             atomics::atomic_store_explicit( &a, (integral_type) 0, oStore );
225
226             // fetch_add
227             for ( size_t nByte = 0; nByte < sizeof(integral_type); ++nByte )
228             {
229                 integral_type prev = atomics::atomic_load_explicit( &a, oLoad );
230                 integral_type n = integral_type(42) << (nByte * 8);
231
232                 CPPUNIT_ASSERT( atomics::atomic_fetch_add_explicit( &a, n, order) == prev);
233             }
234
235             // fetch_sub
236             for ( size_t nByte = sizeof(integral_type); nByte > 0; --nByte )
237             {
238                 integral_type prev = atomics::atomic_load_explicit( &a, oLoad );
239                 integral_type n = integral_type(42) << ((nByte - 1) * 8);
240
241                 CPPUNIT_ASSERT( atomics::atomic_fetch_sub_explicit( &a, n, order ) == prev);
242             }
243             CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == 0 );
244
245             // fetch_or / fetc_xor / fetch_and
246             for ( size_t nBit = 0; nBit < sizeof(integral_type) * 8; ++nBit )
247             {
248                 integral_type prev = atomics::atomic_load_explicit( &a, oLoad )  ;;
249                 integral_type mask = 1 << nBit;
250
251                 CPPUNIT_ASSERT( atomics::atomic_fetch_or_explicit( &a, mask, order ) == prev );
252                 prev = atomics::atomic_load_explicit( &a, oLoad );
253                 CPPUNIT_ASSERT( ( prev & mask)  == mask);
254
255                 CPPUNIT_ASSERT( atomics::atomic_fetch_and_explicit( &a, (integral_type) ~mask, order ) == prev );
256                 prev = atomics::atomic_load_explicit( &a, oLoad );
257                 CPPUNIT_ASSERT( ( prev & mask)  == 0);
258
259                 CPPUNIT_ASSERT( atomics::atomic_fetch_xor_explicit( &a, mask, order ) == prev );
260                 prev = atomics::atomic_load_explicit( &a, oLoad );
261                 CPPUNIT_ASSERT( ( prev & mask)  == mask);
262             }
263             CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == (integral_type) -1 );
264         }
265
266         template <typename Atomic, typename Integral>
267         void test_atomic_integral_(Atomic& a)
268         {
269             do_test_atomic_integral<Atomic, Integral >(a);
270
271             do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_relaxed );
272             do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_consume );
273             do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_acquire );
274             do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_release );
275             do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_acq_rel );
276             do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_seq_cst );
277         }
278
279         template <typename Integral>
280         void test_atomic_integral()
281         {
282             typedef atomics::atomic<Integral>    atomic_type;
283             atomic_type a[8];
284             for ( size_t i = 0; i < sizeof(a)/sizeof(a[0]); ++i ) {
285                 test_atomic_integral_<atomic_type, Integral>( a[i] );
286             }
287         }
288         template <typename Integral>
289         void test_atomic_integral_volatile()
290         {
291             typedef atomics::atomic<Integral> volatile atomic_type;
292             atomic_type a[8];
293             for ( size_t i = 0; i < sizeof(a)/sizeof(a[0]); ++i ) {
294                 test_atomic_integral_<atomic_type, Integral>( a[i] );
295             }
296         }
297
298         template <class AtomicBool>
299         void do_test_atomic_bool(AtomicBool& a)
300         {
301             CPPUNIT_ASSERT( atomics::atomic_is_lock_free( &a ) );
302             atomics::atomic_store( &a, false );
303             CPPUNIT_ASSERT( a == false );
304             CPPUNIT_ASSERT( atomics::atomic_load( &a ) == false );
305
306             CPPUNIT_ASSERT( atomics::atomic_exchange( &a, true ) == false );
307             CPPUNIT_ASSERT( atomics::atomic_load( &a ) == true );
308             CPPUNIT_ASSERT( atomics::atomic_exchange( &a, false ) == true );
309             CPPUNIT_ASSERT( atomics::atomic_load( &a ) == false );
310
311             bool expected = false;
312             CPPUNIT_ASSERT( atomics::atomic_compare_exchange_weak( &a, &expected, true));
313             CPPUNIT_ASSERT( expected  == false );
314             CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_weak( &a, &expected, false));
315             CPPUNIT_ASSERT( expected  == true );
316             CPPUNIT_ASSERT( atomics::atomic_load( &a ) == true );
317
318             atomics::atomic_store( &a, false );
319
320             expected = false;
321             CPPUNIT_ASSERT( atomics::atomic_compare_exchange_strong( &a, &expected, true));
322             CPPUNIT_ASSERT( expected  == false );
323             CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_strong( &a, &expected, false));
324             CPPUNIT_ASSERT( expected  == true );
325
326             CPPUNIT_ASSERT( atomics::atomic_load( &a ) == true );
327
328             CPPUNIT_ASSERT( atomics::atomic_exchange( &a, false ) == true );
329         }
330
331         template <class AtomicBool>
332         void do_test_atomic_bool( AtomicBool& a, atomics::memory_order order )
333         {
334             const atomics::memory_order oLoad = convert_to_load_order( order );
335             const atomics::memory_order oStore = convert_to_store_order( order );
336             const atomics::memory_order oExchange = convert_to_exchange_order( order );
337
338             CPPUNIT_ASSERT( atomics::atomic_is_lock_free( &a ) );
339             atomics::atomic_store_explicit( &a, false, oStore );
340             CPPUNIT_ASSERT( a == false );
341             CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == false );
342
343             CPPUNIT_ASSERT( atomics::atomic_exchange_explicit( &a, true, oExchange ) == false );
344             CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == true );
345             CPPUNIT_ASSERT( atomics::atomic_exchange_explicit( &a, false, oExchange ) == true );
346             CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == false );
347
348             bool expected = false;
349             CPPUNIT_ASSERT( atomics::atomic_compare_exchange_weak_explicit( &a, &expected, true, order, atomics::memory_order_relaxed));
350             CPPUNIT_ASSERT( expected  == false );
351             CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_weak_explicit( &a, &expected, false, order, atomics::memory_order_relaxed));
352             CPPUNIT_ASSERT( expected  == true );
353             CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == true );
354
355             atomics::atomic_store( &a, false );
356
357             expected = false;
358             CPPUNIT_ASSERT( atomics::atomic_compare_exchange_strong_explicit( &a, &expected, true, order, atomics::memory_order_relaxed));
359             CPPUNIT_ASSERT( expected  == false );
360             CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_strong_explicit( &a, &expected, false, order, atomics::memory_order_relaxed));
361             CPPUNIT_ASSERT( expected  == true );
362
363             CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == true );
364
365             CPPUNIT_ASSERT( atomics::atomic_exchange_explicit( &a, false, oExchange ) == true );
366         }
367
368         template <typename Atomic, typename Integral>
369         void test_atomic_pointer_for_( Atomic& a, Integral * arr, Integral aSize, atomics::memory_order order )
370         {
371             typedef Integral integral_type;
372             atomics::memory_order oLoad = convert_to_load_order(order);
373             atomics::memory_order oStore = convert_to_store_order(order);
374             integral_type *  p;
375
376             atomics::atomic_store_explicit( &a, arr, oStore );
377             CPPUNIT_ASSERT( *atomics::atomic_load_explicit( &a, oLoad ) == 1 );
378
379             p = arr;
380             CPPUNIT_ASSERT( atomics::atomic_compare_exchange_weak_explicit( &a, &p, arr + 5, order, atomics::memory_order_relaxed ));
381             CPPUNIT_ASSERT( p == arr + 0 );
382             CPPUNIT_ASSERT( *p == 1 );
383             CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_weak_explicit( &a, &p, arr + 3, order, atomics::memory_order_relaxed ));
384             CPPUNIT_ASSERT( p == arr + 5 );
385             CPPUNIT_ASSERT( *p == 6 );
386
387             CPPUNIT_ASSERT( atomics::atomic_compare_exchange_strong_explicit( &a, &p, arr + 3, order, atomics::memory_order_relaxed ));
388             CPPUNIT_ASSERT( p == arr + 5 );
389             CPPUNIT_ASSERT( *p == 6 );
390             CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_strong_explicit( &a, &p, arr + 5, order, atomics::memory_order_relaxed ));
391             CPPUNIT_ASSERT( p == arr + 3 );
392             CPPUNIT_ASSERT( *p == 4 );
393
394             CPPUNIT_ASSERT( atomics::atomic_exchange_explicit( &a, arr, order ) == arr + 3 );
395             CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == arr );
396             CPPUNIT_ASSERT( *atomics::atomic_load_explicit( &a, oLoad ) == 1 );
397
398             for ( integral_type i = 1; i < aSize; ++i ) {
399                 integral_type * p = atomics::atomic_load_explicit( &a, oLoad );
400                 CPPUNIT_ASSERT( *p == i );
401                 CPPUNIT_ASSERT( atomics::atomic_fetch_add_explicit( &a, 1, order ) == p );
402                 CPPUNIT_ASSERT( *atomics::atomic_load_explicit( &a, oLoad ) == i + 1 );
403             }
404
405             for ( integral_type i = aSize; i > 1; --i ) {
406                 integral_type * p = atomics::atomic_load_explicit( &a, oLoad );
407                 CPPUNIT_ASSERT( *p == i  );
408                 CPPUNIT_ASSERT( atomics::atomic_fetch_sub_explicit( &a, 1, order ) == p );
409                 CPPUNIT_ASSERT( *atomics::atomic_load_explicit( &a, oLoad ) == i - 1 );
410             }
411         }
412
413         template <typename Integral, bool Volatile>
414         void test_atomic_pointer_for()
415         {
416             typedef Integral integral_type;
417             typedef typename add_volatile<atomics::atomic< integral_type *>, Volatile>::type    atomic_pointer;
418
419             integral_type   arr[8];
420             const integral_type aSize = sizeof(arr)/sizeof(arr[0]);
421             for ( integral_type i = 0; i < aSize; ++i ) {
422                 arr[size_t(i)] = i + 1;
423             }
424
425             atomic_pointer  a;
426             integral_type *  p;
427
428             atomics::atomic_store( &a, arr );
429             CPPUNIT_ASSERT( *atomics::atomic_load( &a ) == 1 );
430
431             p = arr;
432             CPPUNIT_ASSERT( atomics::atomic_compare_exchange_weak( &a, &p, arr + 5 ));
433             CPPUNIT_ASSERT( p == arr + 0 );
434             CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_weak( &a, &p, arr + 3 ));
435             CPPUNIT_ASSERT( p == arr + 5 );
436
437             CPPUNIT_ASSERT( atomics::atomic_compare_exchange_strong( &a, &p, arr + 3 ));
438             CPPUNIT_ASSERT( p == arr + 5 );
439             CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_strong( &a, &p, arr + 5 ));
440             CPPUNIT_ASSERT( p == arr + 3 );
441
442             CPPUNIT_ASSERT( atomics::atomic_exchange( &a, arr ) == arr + 3 );
443             CPPUNIT_ASSERT( atomics::atomic_load( &a ) == arr );
444             CPPUNIT_ASSERT( *atomics::atomic_load( &a ) == 1 );
445
446             for ( integral_type i = 1; i < aSize; ++i ) {
447                 integral_type * p = atomics::atomic_load( &a );
448                 CPPUNIT_ASSERT( *p == i );
449                 CPPUNIT_ASSERT( atomics::atomic_fetch_add( &a, 1 ) == p );
450                 CPPUNIT_ASSERT( *atomics::atomic_load( &a ) == i + 1 );
451             }
452
453             for ( integral_type i = aSize; i > 1; --i ) {
454                 integral_type * p = atomics::atomic_load( &a );
455                 CPPUNIT_ASSERT( *p == i  );
456                 CPPUNIT_ASSERT( atomics::atomic_fetch_sub( &a, 1 ) == p );
457                 CPPUNIT_ASSERT( *atomics::atomic_load( &a ) == i - 1 );
458             }
459
460             test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_relaxed );
461             test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_consume );
462             test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_acquire );
463             test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_release );
464             test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_acq_rel );
465             test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_seq_cst );
466
467         }
468
469         template <typename Atomic>
470         void do_test_atomic_pointer_void_( Atomic& a, char * arr, char aSize, atomics::memory_order order )
471         {
472             atomics::memory_order oLoad = convert_to_load_order(order);
473             atomics::memory_order oStore = convert_to_store_order(order);
474             char *  p;
475
476             atomics::atomic_store_explicit( &a, (void *) arr, oStore );
477             CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load_explicit( &a, oLoad )) == 1 );
478
479             p = arr;
480             CPPUNIT_ASSERT( atomics::atomic_compare_exchange_weak_explicit( &a, (void **) &p, (void *)(arr + 5), order, atomics::memory_order_relaxed ));
481             CPPUNIT_ASSERT( p == arr + 0 );
482             CPPUNIT_ASSERT( *p == 1 );
483             CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_weak_explicit( &a, (void **) &p, (void *)(arr + 3), order, atomics::memory_order_relaxed ));
484             CPPUNIT_ASSERT( p == arr + 5 );
485             CPPUNIT_ASSERT( *p == 6 );
486
487             CPPUNIT_ASSERT( atomics::atomic_compare_exchange_strong_explicit( &a, (void **) &p, (void *)(arr + 3), order, atomics::memory_order_relaxed ));
488             CPPUNIT_ASSERT( p == arr + 5 );
489             CPPUNIT_ASSERT( *p == 6 );
490             CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_strong_explicit( &a, (void **) &p, (void *)(arr + 5), order, atomics::memory_order_relaxed ));
491             CPPUNIT_ASSERT( p == arr + 3 );
492             CPPUNIT_ASSERT( *p == 4 );
493
494             CPPUNIT_ASSERT( reinterpret_cast<char *>(atomics::atomic_exchange_explicit( &a, (void *) arr, order )) == arr + 3 );
495             CPPUNIT_ASSERT( reinterpret_cast<char *>(atomics::atomic_load_explicit( &a, oLoad )) == arr );
496             CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load_explicit( &a, oLoad )) == 1 );
497
498             for ( char i = 1; i < aSize; ++i ) {
499                 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load_explicit( &a, oLoad )) == i );
500                 CPPUNIT_ASSERT( atomics::atomic_fetch_add_explicit( &a, 1, order ));
501                 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load_explicit( &a, oLoad )) == i + 1 );
502             }
503
504             for ( char i = aSize; i > 1; --i ) {
505                 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load_explicit( &a, oLoad )) == i  );
506                 CPPUNIT_ASSERT( atomics::atomic_fetch_sub_explicit( &a, 1, order ));
507                 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load_explicit( &a, oLoad )) == i - 1 );
508             }
509         }
510
511         template <bool Volatile>
512         void do_test_atomic_pointer_void()
513         {
514             typedef typename add_volatile<atomics::atomic< void *>, Volatile>::type    atomic_pointer;
515
516             char   arr[8];
517             const char aSize = sizeof(arr)/sizeof(arr[0]);
518             for ( char i = 0; i < aSize; ++i ) {
519                 arr[unsigned(i)] = i + 1;
520             }
521
522             atomic_pointer  a;
523             char *  p;
524
525             atomics::atomic_store( &a, (void *) arr );
526             CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load( &a )) == 1 );
527
528             p = arr;
529             CPPUNIT_ASSERT( atomics::atomic_compare_exchange_weak( &a, (void **) &p, (void *)(arr + 5) ));
530             CPPUNIT_ASSERT( p == arr + 0 );
531             CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_weak( &a, (void **) &p, (void *)(arr + 3) ));
532             CPPUNIT_ASSERT( p == arr + 5 );
533
534             CPPUNIT_ASSERT( atomics::atomic_compare_exchange_strong( &a, (void **) &p, (void *)(arr + 3) ));
535             CPPUNIT_ASSERT( p == arr + 5 );
536             CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_strong( &a, (void **) &p, (void *)(arr + 5) ));
537             CPPUNIT_ASSERT( p == arr + 3 );
538
539             CPPUNIT_ASSERT( reinterpret_cast<char *>( atomics::atomic_exchange( &a, (void *) arr )) == arr + 3 );
540             CPPUNIT_ASSERT( reinterpret_cast<char *>( atomics::atomic_load( &a )) == arr );
541             CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load( &a )) == 1 );
542
543             for ( char i = 1; i < aSize; ++i ) {
544                 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load( &a )) == i );
545                 CPPUNIT_ASSERT( atomics::atomic_fetch_add( &a, 1 ));
546                 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load( &a )) == i + 1 );
547             }
548
549             for ( char i = aSize; i > 1; --i ) {
550                 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load( &a )) == i  );
551                 CPPUNIT_ASSERT( atomics::atomic_fetch_sub( &a, 1 ));
552                 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load( &a )) == i - 1 );
553             }
554
555             do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_relaxed );
556             do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_consume );
557             do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_acquire );
558             do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_release );
559             do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_acq_rel );
560             do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_seq_cst );
561         }
562
563     public:
564         void test_atomic_flag()
565         {
566             atomics::atomic_flag flags[8];
567             for ( size_t i = 0; i < sizeof(flags)/sizeof(flags[0]); ++i )
568                 do_test_atomic_flag( flags[i] );
569         }
570         void test_atomic_flag_volatile()
571         {
572             atomics::atomic_flag volatile flags[8];
573             for ( size_t i = 0; i < sizeof(flags)/sizeof(flags[0]); ++i )
574                 do_test_atomic_flag( flags[i] );
575         }
576
577         template <typename AtomicBool>
578         void test_atomic_bool_()
579         {
580             AtomicBool a[8];
581             for ( size_t i = 0; i < sizeof(a)/sizeof(a[0]); ++i ) {
582                 do_test_atomic_bool( a[i] );
583
584                 do_test_atomic_bool( a[i], atomics::memory_order_relaxed );
585                 do_test_atomic_bool( a[i], atomics::memory_order_consume );
586                 do_test_atomic_bool( a[i], atomics::memory_order_acquire );
587                 do_test_atomic_bool( a[i], atomics::memory_order_release );
588                 do_test_atomic_bool( a[i], atomics::memory_order_acq_rel );
589                 do_test_atomic_bool( a[i], atomics::memory_order_seq_cst );
590             }
591         }
592
593         void test_atomic_bool()
594         {
595             test_atomic_bool_<atomics::atomic<bool> >();
596         }
597         void test_atomic_bool_volatile()
598         {
599             test_atomic_bool_<atomics::atomic<bool> volatile >();
600         }
601
602         void test_atomic_char()                 { test_atomic_integral<char>(); }
603         void test_atomic_char_volatile()        { test_atomic_integral_volatile<char>(); }
604         void test_atomic_signed_char()          { test_atomic_integral<signed char>(); }
605         void test_atomic_signed_char_volatile() { test_atomic_integral_volatile<signed char>(); }
606         void test_atomic_unsigned_char()        { test_atomic_integral<unsigned char>(); }
607         void test_atomic_unsigned_char_volatile(){ test_atomic_integral_volatile<unsigned char>(); }
608         void test_atomic_short_int()            { test_atomic_integral<short int>(); }
609         void test_atomic_short_int_volatile()   { test_atomic_integral_volatile<short int>(); }
610         void test_atomic_unsigned_short_int()   { test_atomic_integral<unsigned short int>(); }
611         void test_atomic_unsigned_short_int_volatile() { test_atomic_integral_volatile<unsigned short int>(); }
612         void test_atomic_int()                  { test_atomic_integral<int>(); }
613         void test_atomic_int_volatile()         { test_atomic_integral_volatile<int>(); }
614         void test_atomic_unsigned_int()         { test_atomic_integral<unsigned int>(); }
615         void test_atomic_unsigned_int_volatile(){ test_atomic_integral_volatile<unsigned int>(); }
616         void test_atomic_long()                 { test_atomic_integral<long>(); }
617         void test_atomic_long_volatile()        { test_atomic_integral_volatile<long>(); }
618         void test_atomic_unsigned_long()        { test_atomic_integral<unsigned long>(); }
619         void test_atomic_unsigned_long_volatile() { test_atomic_integral_volatile<unsigned long>(); }
620         void test_atomic_long_long()            { test_atomic_integral<long long>(); }
621         void test_atomic_long_long_volatile()   { test_atomic_integral_volatile<long long>(); }
622         void test_atomic_unsigned_long_long()   { test_atomic_integral<unsigned long long>(); }
623         void test_atomic_unsigned_long_long_volatile() { test_atomic_integral_volatile<unsigned long long>(); }
624
625         void test_atomic_pointer_void()         { do_test_atomic_pointer_void<false>() ;}
626         void test_atomic_pointer_void_volatile(){ do_test_atomic_pointer_void<true>() ;}
627
628         void test_atomic_pointer_char()         { test_atomic_pointer_for<char, false>() ;}
629         void test_atomic_pointer_short()        { test_atomic_pointer_for<short int, false>() ;}
630         void test_atomic_pointer_int()          { test_atomic_pointer_for<int, false>() ;}
631         void test_atomic_pointer_long()         { test_atomic_pointer_for<long, false>() ;}
632         void test_atomic_pointer_long_long()    { test_atomic_pointer_for<long long, false>() ;}
633
634         void test_atomic_pointer_char_volatile()        { test_atomic_pointer_for<char, true>() ;}
635         void test_atomic_pointer_short_volatile()       { test_atomic_pointer_for<unsigned short int, true>() ;}
636         void test_atomic_pointer_int_volatile()          { test_atomic_pointer_for<int, true>() ;}
637         void test_atomic_pointer_long_volatile()         { test_atomic_pointer_for<long, true>() ;}
638         void test_atomic_pointer_long_long_volatile()    { test_atomic_pointer_for<long long, true>() ;}
639
640         void test_atomic_fence()
641         {
642             atomics::atomic_thread_fence(atomics::memory_order_relaxed );
643             atomics::atomic_thread_fence(atomics::memory_order_consume );
644             atomics::atomic_thread_fence(atomics::memory_order_acquire );
645             atomics::atomic_thread_fence(atomics::memory_order_release );
646             atomics::atomic_thread_fence(atomics::memory_order_acq_rel );
647             atomics::atomic_thread_fence(atomics::memory_order_seq_cst );
648
649             atomics::atomic_signal_fence(atomics::memory_order_relaxed );
650             atomics::atomic_signal_fence(atomics::memory_order_consume );
651             atomics::atomic_signal_fence(atomics::memory_order_acquire );
652             atomics::atomic_signal_fence(atomics::memory_order_release );
653             atomics::atomic_signal_fence(atomics::memory_order_acq_rel );
654             atomics::atomic_signal_fence(atomics::memory_order_seq_cst );
655         }
656
657     public:
658         CPPUNIT_TEST_SUITE(cxx11_atomic_func)
659             CPPUNIT_TEST( test_atomic_flag )
660             CPPUNIT_TEST( test_atomic_flag_volatile )
661
662             CPPUNIT_TEST( test_atomic_bool )
663             CPPUNIT_TEST( test_atomic_char )
664             CPPUNIT_TEST( test_atomic_signed_char)
665             CPPUNIT_TEST( test_atomic_unsigned_char)
666             CPPUNIT_TEST( test_atomic_short_int)
667             CPPUNIT_TEST( test_atomic_unsigned_short_int)
668             CPPUNIT_TEST( test_atomic_int)
669             CPPUNIT_TEST( test_atomic_unsigned_int)
670             CPPUNIT_TEST( test_atomic_long)
671             CPPUNIT_TEST( test_atomic_unsigned_long)
672             CPPUNIT_TEST( test_atomic_long_long)
673             CPPUNIT_TEST( test_atomic_unsigned_long_long)
674
675             CPPUNIT_TEST( test_atomic_bool_volatile )
676             CPPUNIT_TEST( test_atomic_char_volatile )
677             CPPUNIT_TEST( test_atomic_signed_char_volatile)
678             CPPUNIT_TEST( test_atomic_unsigned_char_volatile)
679             CPPUNIT_TEST( test_atomic_short_int_volatile)
680             CPPUNIT_TEST( test_atomic_unsigned_short_int_volatile)
681             CPPUNIT_TEST( test_atomic_int_volatile)
682             CPPUNIT_TEST( test_atomic_unsigned_int_volatile)
683             CPPUNIT_TEST( test_atomic_long_volatile)
684             CPPUNIT_TEST( test_atomic_unsigned_long_volatile)
685             CPPUNIT_TEST( test_atomic_long_long_volatile)
686             CPPUNIT_TEST( test_atomic_unsigned_long_long_volatile)
687
688             CPPUNIT_TEST( test_atomic_pointer_void)
689             CPPUNIT_TEST( test_atomic_pointer_void_volatile)
690
691             CPPUNIT_TEST( test_atomic_pointer_char)
692             CPPUNIT_TEST( test_atomic_pointer_short)
693             CPPUNIT_TEST( test_atomic_pointer_int)
694             CPPUNIT_TEST( test_atomic_pointer_long)
695             CPPUNIT_TEST( test_atomic_pointer_long_long)
696
697             CPPUNIT_TEST( test_atomic_pointer_char_volatile)
698             CPPUNIT_TEST( test_atomic_pointer_short_volatile)
699             CPPUNIT_TEST( test_atomic_pointer_int_volatile)
700             CPPUNIT_TEST( test_atomic_pointer_long_volatile)
701             CPPUNIT_TEST( test_atomic_pointer_long_long_volatile)
702
703             CPPUNIT_TEST( test_atomic_fence)
704
705         CPPUNIT_TEST_SUITE_END()
706     };
707 }   // namespace misc
708
709 CPPUNIT_TEST_SUITE_REGISTRATION(misc::cxx11_atomic_func);
710
711 #endif // #ifndef CDS_USE_BOOST_ATOMIC