Uses different pass count for different parallel queue test cases
[libcds.git] / test / unit / misc / cxx11_atomic_func.cpp
1 /*
2     This file is a part of libcds - Concurrent Data Structures library
3
4     (C) Copyright Maxim Khizhinsky (libcds.dev@gmail.com) 2006-2017
5
6     Source code repo: http://github.com/khizmax/libcds/
7     Download: http://sourceforge.net/projects/libcds/files/
8
9     Redistribution and use in source and binary forms, with or without
10     modification, are permitted provided that the following conditions are met:
11
12     * Redistributions of source code must retain the above copyright notice, this
13       list of conditions and the following disclaimer.
14
15     * Redistributions in binary form must reproduce the above copyright notice,
16       this list of conditions and the following disclaimer in the documentation
17       and/or other materials provided with the distribution.
18
19     THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
20     AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
21     IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22     DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
23     FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
24     DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
25     SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
26     CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
27     OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28     OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 */
30
31 #include <cds_test/ext_gtest.h>
32 #include <cds/algo/atomic.h>
33
34 #ifndef CDS_USE_BOOST_ATOMIC
35 // Skip this test for boost.atomic
36 // Boost.atomic has no free atomic functions implementation.
37
38 #include "cxx11_convert_memory_order.h"
39
40 #define EXPECT_ATOMIC_IS_LOCK_FREE( x ) EXPECT_TRUE( atomics::atomic_is_lock_free( &x ));
41
42 namespace misc {
43
44     class cxx11_atomic_func: public ::testing::Test
45     {
46     protected:
47         template <typename AtomicFlag>
48         void do_test_atomic_flag_mo( AtomicFlag& f, atomics::memory_order order )
49         {
50             atomics::memory_order mo_clear = convert_to_store_order(order);
51
52             f.clear( convert_to_store_order(order));
53
54             for ( int i = 0; i < 5; ++i ) {
55                 EXPECT_FALSE( atomics::atomic_flag_test_and_set_explicit( &f, order ));
56                 EXPECT_TRUE( atomics::atomic_flag_test_and_set_explicit( &f, order ));
57                 atomics::atomic_flag_clear_explicit( &f, mo_clear );
58                 atomics::atomic_flag_clear_explicit( &f, mo_clear );
59             }
60         }
61
62         template <typename AtomicFlag>
63         void do_test_atomic_flag( AtomicFlag& f )
64         {
65             f.clear();
66
67             for ( int i = 0; i < 5; ++i ) {
68                 EXPECT_FALSE( atomics::atomic_flag_test_and_set( &f ));
69                 EXPECT_TRUE( atomics::atomic_flag_test_and_set( &f ));
70                 atomics::atomic_flag_clear(&f);
71                 atomics::atomic_flag_clear(&f);
72             }
73
74             do_test_atomic_flag_mo( f, atomics::memory_order_relaxed );
75             do_test_atomic_flag_mo( f, atomics::memory_order_acquire );
76             do_test_atomic_flag_mo( f, atomics::memory_order_release );
77             do_test_atomic_flag_mo( f, atomics::memory_order_acq_rel );
78             do_test_atomic_flag_mo( f, atomics::memory_order_seq_cst );
79         }
80
81         template <class Atomic, typename Integral>
82         void do_test_atomic_type(Atomic& a )
83         {
84             typedef Integral    integral_type;
85
86             EXPECT_ATOMIC_IS_LOCK_FREE( a );
87             atomics::atomic_store( &a, (integral_type) 0 );
88             EXPECT_EQ( atomics::atomic_load( &a ), integral_type( 0 ));
89
90             for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
91                 integral_type n = static_cast<integral_type>( integral_type(42) << (nByte * 8));
92                 EXPECT_EQ( atomics::atomic_exchange( &a, n ), (integral_type) 0 );
93                 EXPECT_EQ( atomics::atomic_load( &a ), n );
94                 EXPECT_EQ( atomics::atomic_exchange( &a, (integral_type) 0 ), n );
95                 EXPECT_EQ( atomics::atomic_load( &a ), (integral_type) 0 );
96             }
97
98             integral_type prev = atomics::atomic_load( &a );
99             for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
100                 integral_type n = static_cast<integral_type>( integral_type(42) << (nByte * 8));
101                 integral_type expected = prev;
102
103                 EXPECT_TRUE( atomics::atomic_compare_exchange_weak( &a, &expected, n));
104                 EXPECT_EQ( expected, prev );
105                 EXPECT_NE( expected, n );
106                 EXPECT_FALSE( atomics::atomic_compare_exchange_weak( &a, &expected, n));
107                 EXPECT_EQ( expected, n );
108
109                 prev = n;
110                 EXPECT_EQ( atomics::atomic_load( &a ), n );
111             }
112
113             atomics::atomic_store( &a, (integral_type) 0 );
114
115             prev = atomics::atomic_load( &a );
116             for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
117                 integral_type n = static_cast<integral_type>( integral_type(42) << (nByte * 8));
118                 integral_type expected = prev;
119
120                 EXPECT_TRUE( atomics::atomic_compare_exchange_strong( &a, &expected, n));
121                 EXPECT_EQ( expected, prev );
122                 EXPECT_FALSE( atomics::atomic_compare_exchange_strong( &a, &expected, n));
123                 EXPECT_EQ( expected, n );
124
125                 prev = n;
126                 EXPECT_EQ( atomics::atomic_load( &a ), n );
127             }
128
129             EXPECT_EQ( atomics::atomic_exchange( &a, (integral_type) 0 ), prev );
130         }
131
132         template <class Atomic, typename Integral>
133         void do_test_atomic_integral( Atomic& a )
134         {
135             do_test_atomic_type< Atomic, Integral >( a );
136
137             typedef Integral    integral_type;
138
139             // fetch_xxx testing
140             atomics::atomic_store( &a, (integral_type) 0 );
141
142             // fetch_add
143             for ( size_t nByte = 0; nByte < sizeof(integral_type); ++nByte )
144             {
145                 integral_type prev = atomics::atomic_load( &a );
146                 integral_type n = static_cast<integral_type>( integral_type(42) << (nByte * 8));
147
148                 EXPECT_EQ( atomics::atomic_fetch_add( &a, n ), prev );
149             }
150
151             // fetch_sub
152             for ( size_t nByte = sizeof(integral_type); nByte > 0; --nByte )
153             {
154                 integral_type prev = atomics::atomic_load( &a );
155                 integral_type n = static_cast<integral_type>( integral_type(42) << ((nByte - 1) * 8));
156
157                 EXPECT_EQ( atomics::atomic_fetch_sub( &a, n ), prev );
158             }
159             EXPECT_EQ( atomics::atomic_load( &a ), (integral_type) 0 );
160
161             // fetch_or / fetc_xor / fetch_and
162             for ( size_t nBit = 0; nBit < sizeof(integral_type) * 8; ++nBit )
163             {
164                 integral_type prev = atomics::atomic_load( &a );
165                 integral_type mask = static_cast<integral_type>( integral_type(1) << nBit );
166
167                 EXPECT_EQ( atomics::atomic_fetch_or( &a, mask ), prev );
168                 prev = atomics::atomic_load( &a );
169                 EXPECT_EQ( ( prev & mask ), mask );
170
171                 EXPECT_EQ( atomics::atomic_fetch_and( &a, (integral_type) ~mask ), prev );
172                 prev = atomics::atomic_load( &a );
173                 EXPECT_EQ( integral_type(prev & mask), integral_type(0));
174
175                 EXPECT_EQ( atomics::atomic_fetch_xor( &a, mask ), prev );
176                 prev = atomics::atomic_load( &a );
177                 EXPECT_EQ( ( prev & mask), mask);
178             }
179             EXPECT_EQ( atomics::atomic_load( &a ), (integral_type) -1 );
180         }
181
182         template <class Atomic, typename Integral>
183         void do_test_atomic_type( Atomic& a, atomics::memory_order order )
184         {
185             typedef Integral    integral_type;
186
187             const atomics::memory_order oLoad = convert_to_load_order( order );
188             const atomics::memory_order oStore = convert_to_store_order( order );
189
190             EXPECT_ATOMIC_IS_LOCK_FREE( a );
191             atomics::atomic_store_explicit( &a, (integral_type) 0, oStore );
192             EXPECT_EQ( atomics::atomic_load_explicit( &a, oLoad ), (integral_type) 0 );
193
194             for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
195                 integral_type n = static_cast<integral_type>( integral_type(42) << (nByte * 8));
196                 EXPECT_EQ( atomics::atomic_exchange_explicit( &a, n, order ), (integral_type) 0 );
197                 EXPECT_EQ( atomics::atomic_load_explicit( &a, oLoad ), n );
198                 EXPECT_EQ( atomics::atomic_exchange_explicit( &a, (integral_type) 0, order ), n );
199                 EXPECT_EQ( atomics::atomic_load_explicit( &a, oLoad ), (integral_type) 0 );
200             }
201
202             integral_type prev = atomics::atomic_load_explicit( &a, oLoad );
203             for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
204                 integral_type n = static_cast<integral_type>( integral_type(42) << (nByte * 8));
205                 integral_type expected = prev;
206
207                 EXPECT_TRUE( atomics::atomic_compare_exchange_weak_explicit( &a, &expected, n, order, atomics::memory_order_relaxed));
208                 EXPECT_EQ( expected, prev );
209                 EXPECT_FALSE( atomics::atomic_compare_exchange_weak_explicit( &a, &expected, n, order, atomics::memory_order_relaxed));
210                 EXPECT_EQ( expected, n );
211
212                 prev = n;
213                 EXPECT_EQ( atomics::atomic_load_explicit( &a, oLoad ), n );
214             }
215
216             atomics::atomic_store_explicit( &a, (integral_type) 0, oStore );
217
218             prev = atomics::atomic_load_explicit( &a, oLoad );
219             for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
220                 integral_type n = static_cast<integral_type>( integral_type(42) << (nByte * 8));
221                 integral_type expected = prev;
222
223                 EXPECT_TRUE( atomics::atomic_compare_exchange_strong_explicit( &a, &expected, n, order, atomics::memory_order_relaxed));
224                 EXPECT_EQ( expected, prev );
225                 EXPECT_FALSE( atomics::atomic_compare_exchange_strong_explicit( &a, &expected, n, order, atomics::memory_order_relaxed));
226                 EXPECT_EQ( expected, n );
227
228                 prev = n;
229                 EXPECT_EQ( atomics::atomic_load_explicit( &a, oLoad ), n );
230             }
231
232             EXPECT_EQ( atomics::atomic_exchange_explicit( &a, (integral_type) 0, order ), prev );
233         }
234
235         template <class Atomic, typename Integral>
236         void do_test_atomic_integral( Atomic& a, atomics::memory_order order )
237         {
238             do_test_atomic_type< Atomic, Integral >( a, order );
239             typedef Integral    integral_type;
240
241             const atomics::memory_order oLoad = convert_to_load_order( order );
242             const atomics::memory_order oStore = convert_to_store_order( order );
243
244             // fetch_xxx testing
245             atomics::atomic_store_explicit( &a, (integral_type) 0, oStore );
246
247             // fetch_add
248             for ( size_t nByte = 0; nByte < sizeof(integral_type); ++nByte )
249             {
250                 integral_type prev = atomics::atomic_load_explicit( &a, oLoad );
251                 integral_type n = static_cast<integral_type>( integral_type(42) << (nByte * 8));
252
253                 EXPECT_EQ( atomics::atomic_fetch_add_explicit( &a, n, order), prev);
254             }
255
256             // fetch_sub
257             for ( size_t nByte = sizeof(integral_type); nByte > 0; --nByte )
258             {
259                 integral_type prev = atomics::atomic_load_explicit( &a, oLoad );
260                 integral_type n = static_cast<integral_type>( integral_type(42) << ((nByte - 1) * 8));
261
262                 EXPECT_EQ( atomics::atomic_fetch_sub_explicit( &a, n, order ), prev);
263             }
264             EXPECT_EQ( atomics::atomic_load_explicit( &a, oLoad ), integral_type( 0 ));
265
266             // fetch_or / fetc_xor / fetch_and
267             for ( size_t nBit = 0; nBit < sizeof(integral_type) * 8; ++nBit )
268             {
269                 integral_type prev = atomics::atomic_load_explicit( &a, oLoad )  ;;
270                 integral_type mask = static_cast<integral_type>( integral_type(1) << nBit );
271
272                 EXPECT_EQ( atomics::atomic_fetch_or_explicit( &a, mask, order ), prev );
273                 prev = atomics::atomic_load_explicit( &a, oLoad );
274                 EXPECT_EQ( ( prev & mask), mask);
275
276                 EXPECT_EQ( atomics::atomic_fetch_and_explicit( &a, (integral_type) ~mask, order ), prev );
277                 prev = atomics::atomic_load_explicit( &a, oLoad );
278                 EXPECT_EQ( ( prev & mask), integral_type( 0 ));
279
280                 EXPECT_EQ( atomics::atomic_fetch_xor_explicit( &a, mask, order ), prev );
281                 prev = atomics::atomic_load_explicit( &a, oLoad );
282                 EXPECT_EQ( ( prev & mask), mask);
283             }
284             EXPECT_EQ( atomics::atomic_load_explicit( &a, oLoad ), (integral_type) -1 );
285         }
286
287         template <typename Atomic, typename Integral>
288         void test_atomic_integral_(Atomic& a)
289         {
290             do_test_atomic_integral<Atomic, Integral >(a);
291
292             do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_relaxed );
293             do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_acquire );
294             do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_release );
295             do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_acq_rel );
296             do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_seq_cst );
297         }
298
299         template <typename Integral>
300         void test_atomic_integral()
301         {
302             typedef atomics::atomic<Integral>    atomic_type;
303             atomic_type a[8];
304             for ( size_t i = 0; i < sizeof(a)/sizeof(a[0]); ++i ) {
305                 test_atomic_integral_<atomic_type, Integral>( a[i] );
306             }
307         }
308         template <typename Integral>
309         void test_atomic_integral_volatile()
310         {
311             typedef atomics::atomic<Integral> volatile atomic_type;
312             atomic_type a[8];
313             for ( size_t i = 0; i < sizeof(a)/sizeof(a[0]); ++i ) {
314                 test_atomic_integral_<atomic_type, Integral>( a[i] );
315             }
316         }
317
318         template <class AtomicBool>
319         void do_test_atomic_bool(AtomicBool& a)
320         {
321             EXPECT_ATOMIC_IS_LOCK_FREE( a );
322             atomics::atomic_store( &a, false );
323             EXPECT_FALSE( a );
324             EXPECT_FALSE( atomics::atomic_load( &a ));
325
326             EXPECT_FALSE( atomics::atomic_exchange( &a, true ));
327             EXPECT_TRUE( atomics::atomic_load( &a ));
328             EXPECT_TRUE( atomics::atomic_exchange( &a, false ));
329             EXPECT_FALSE( atomics::atomic_load( &a ));
330
331             bool expected = false;
332             EXPECT_TRUE( atomics::atomic_compare_exchange_weak( &a, &expected, true));
333             EXPECT_FALSE( expected );
334             EXPECT_FALSE( atomics::atomic_compare_exchange_weak( &a, &expected, false));
335             EXPECT_TRUE( expected );
336             EXPECT_TRUE( atomics::atomic_load( &a ));
337
338             atomics::atomic_store( &a, false );
339
340             expected = false;
341             EXPECT_TRUE( atomics::atomic_compare_exchange_strong( &a, &expected, true));
342             EXPECT_FALSE( expected );
343             EXPECT_FALSE( atomics::atomic_compare_exchange_strong( &a, &expected, false));
344             EXPECT_TRUE( expected );
345
346             EXPECT_TRUE( atomics::atomic_load( &a ));
347
348             EXPECT_TRUE( atomics::atomic_exchange( &a, false ));
349         }
350
351         template <class AtomicBool>
352         void do_test_atomic_bool( AtomicBool& a, atomics::memory_order order )
353         {
354             const atomics::memory_order oLoad = convert_to_load_order( order );
355             const atomics::memory_order oStore = convert_to_store_order( order );
356             const atomics::memory_order oExchange = convert_to_exchange_order( order );
357
358             EXPECT_ATOMIC_IS_LOCK_FREE( a );
359             atomics::atomic_store_explicit( &a, false, oStore );
360             EXPECT_FALSE( a == false );
361             EXPECT_FALSE( atomics::atomic_load_explicit( &a, oLoad ));
362
363             EXPECT_FALSE( atomics::atomic_exchange_explicit( &a, true, oExchange ));
364             EXPECT_TRUE( atomics::atomic_load_explicit( &a, oLoad ));
365             EXPECT_TRUE( atomics::atomic_exchange_explicit( &a, false, oExchange ));
366             EXPECT_FALSE( atomics::atomic_load_explicit( &a, oLoad ));
367
368             bool expected = false;
369             EXPECT_TRUE( atomics::atomic_compare_exchange_weak_explicit( &a, &expected, true, order, atomics::memory_order_relaxed));
370             EXPECT_FALSE( expected );
371             EXPECT_FALSE( atomics::atomic_compare_exchange_weak_explicit( &a, &expected, false, order, atomics::memory_order_relaxed));
372             EXPECT_TRUE( expected );
373             EXPECT_TRUE( atomics::atomic_load_explicit( &a, oLoad ));
374
375             atomics::atomic_store( &a, false );
376
377             expected = false;
378             EXPECT_TRUE( atomics::atomic_compare_exchange_strong_explicit( &a, &expected, true, order, atomics::memory_order_relaxed));
379             EXPECT_FALSE( expected );
380             EXPECT_FALSE( atomics::atomic_compare_exchange_strong_explicit( &a, &expected, false, order, atomics::memory_order_relaxed));
381             EXPECT_TRUE( expected );
382
383             EXPECT_TRUE( atomics::atomic_load_explicit( &a, oLoad ));
384
385             EXPECT_TRUE( atomics::atomic_exchange_explicit( &a, false, oExchange ));
386         }
387
388         template <typename Atomic, typename Integral>
389         void test_atomic_pointer_for_( Atomic& a, Integral * arr, Integral aSize, atomics::memory_order order )
390         {
391             typedef Integral integral_type;
392             atomics::memory_order oLoad = convert_to_load_order(order);
393             atomics::memory_order oStore = convert_to_store_order(order);
394             integral_type *  p;
395
396             atomics::atomic_store_explicit( &a, arr, oStore );
397             EXPECT_EQ( *atomics::atomic_load_explicit( &a, oLoad ), 1 );
398
399             p = arr;
400             EXPECT_TRUE( atomics::atomic_compare_exchange_weak_explicit( &a, &p, arr + 5, order, atomics::memory_order_relaxed ));
401             EXPECT_EQ( p, arr + 0 );
402             EXPECT_EQ( *p, 1 );
403             EXPECT_FALSE( atomics::atomic_compare_exchange_weak_explicit( &a, &p, arr + 3, order, atomics::memory_order_relaxed ));
404             EXPECT_EQ( p, arr + 5 );
405             EXPECT_EQ( *p, 6 );
406
407             EXPECT_TRUE( atomics::atomic_compare_exchange_strong_explicit( &a, &p, arr + 3, order, atomics::memory_order_relaxed ));
408             EXPECT_EQ( p, arr + 5 );
409             EXPECT_EQ( *p, 6 );
410             EXPECT_FALSE( atomics::atomic_compare_exchange_strong_explicit( &a, &p, arr + 5, order, atomics::memory_order_relaxed ));
411             EXPECT_EQ( p, arr + 3 );
412             EXPECT_EQ( *p, 4 );
413
414             EXPECT_EQ( atomics::atomic_exchange_explicit( &a, arr, order ), arr + 3 );
415             EXPECT_EQ( atomics::atomic_load_explicit( &a, oLoad ), arr );
416             EXPECT_EQ( *atomics::atomic_load_explicit( &a, oLoad ), 1 );
417
418             for ( integral_type i = 1; i < aSize; ++i ) {
419                 p = atomics::atomic_load_explicit( &a, oLoad );
420                 EXPECT_EQ( *p, i );
421                 EXPECT_EQ( atomics::atomic_fetch_add_explicit( &a, 1, order ), p );
422                 EXPECT_EQ( *atomics::atomic_load_explicit( &a, oLoad ), i + 1 );
423             }
424
425             for ( integral_type i = aSize; i > 1; --i ) {
426                 p = atomics::atomic_load_explicit( &a, oLoad );
427                 EXPECT_EQ( *p, i  );
428                 EXPECT_EQ( atomics::atomic_fetch_sub_explicit( &a, 1, order ), p );
429                 EXPECT_EQ( *atomics::atomic_load_explicit( &a, oLoad ), i - 1 );
430             }
431         }
432
433         template <typename Integral, bool Volatile>
434         void test_atomic_pointer_for()
435         {
436             typedef Integral integral_type;
437             typedef typename add_volatile<atomics::atomic< integral_type *>, Volatile>::type    atomic_pointer;
438
439             integral_type   arr[8];
440             const integral_type aSize = sizeof(arr)/sizeof(arr[0]);
441             for ( integral_type i = 0; i < aSize; ++i ) {
442                 arr[static_cast<size_t>(i)] = i + 1;
443             }
444
445             atomic_pointer  a;
446             integral_type *  p;
447
448             atomics::atomic_store( &a, arr );
449             EXPECT_EQ( *atomics::atomic_load( &a ), 1 );
450
451             p = arr;
452             EXPECT_TRUE( atomics::atomic_compare_exchange_weak( &a, &p, arr + 5 ));
453             EXPECT_EQ( p, arr + 0 );
454             EXPECT_FALSE( atomics::atomic_compare_exchange_weak( &a, &p, arr + 3 ));
455             EXPECT_EQ( p, arr + 5 );
456
457             EXPECT_TRUE( atomics::atomic_compare_exchange_strong( &a, &p, arr + 3 ));
458             EXPECT_EQ( p, arr + 5 );
459             EXPECT_FALSE( atomics::atomic_compare_exchange_strong( &a, &p, arr + 5 ));
460             EXPECT_EQ( p, arr + 3 );
461
462             EXPECT_EQ( atomics::atomic_exchange( &a, arr ), arr + 3 );
463             EXPECT_EQ( atomics::atomic_load( &a ), arr );
464             EXPECT_EQ( *atomics::atomic_load( &a ), 1 );
465
466             for ( integral_type i = 1; i < aSize; ++i ) {
467                 p = atomics::atomic_load( &a );
468                 EXPECT_EQ( *p, i );
469                 EXPECT_EQ( atomics::atomic_fetch_add( &a, 1 ), p );
470                 EXPECT_EQ( *atomics::atomic_load( &a ), i + 1 );
471             }
472
473             for ( integral_type i = aSize; i > 1; --i ) {
474                 p = atomics::atomic_load( &a );
475                 EXPECT_EQ( *p, i );
476                 EXPECT_EQ( atomics::atomic_fetch_sub( &a, 1 ), p );
477                 EXPECT_EQ( *atomics::atomic_load( &a ), i - 1 );
478             }
479
480             test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_relaxed );
481             test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_acquire );
482             test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_release );
483             test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_acq_rel );
484             test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_seq_cst );
485
486         }
487
488         template <typename Atomic>
489         void do_test_atomic_pointer_void_( Atomic& a, char * arr, char aSize, atomics::memory_order order )
490         {
491             CDS_UNUSED( aSize );
492
493             atomics::memory_order oLoad = convert_to_load_order(order);
494             atomics::memory_order oStore = convert_to_store_order(order);
495             char *  p;
496
497             atomics::atomic_store_explicit( &a, (void *) arr, oStore );
498             EXPECT_EQ( *reinterpret_cast<char *>(atomics::atomic_load_explicit( &a, oLoad )), 1 );
499
500             p = arr;
501             EXPECT_TRUE( atomics::atomic_compare_exchange_weak_explicit( &a, (void **) &p, (void *)(arr + 5), order, atomics::memory_order_relaxed ));
502             EXPECT_EQ( p, arr + 0 );
503             EXPECT_EQ( *p, 1 );
504             EXPECT_FALSE( atomics::atomic_compare_exchange_weak_explicit( &a, (void **) &p, (void *)(arr + 3), order, atomics::memory_order_relaxed ));
505             EXPECT_EQ( p, arr + 5 );
506             EXPECT_EQ( *p, 6 );
507
508             EXPECT_TRUE( atomics::atomic_compare_exchange_strong_explicit( &a, (void **) &p, (void *)(arr + 3), order, atomics::memory_order_relaxed ));
509             EXPECT_EQ( p, arr + 5 );
510             EXPECT_EQ( *p, 6 );
511             EXPECT_FALSE( atomics::atomic_compare_exchange_strong_explicit( &a, (void **) &p, (void *)(arr + 5), order, atomics::memory_order_relaxed ));
512             EXPECT_EQ( p, arr + 3 );
513             EXPECT_EQ( *p, 4 );
514
515             EXPECT_EQ( reinterpret_cast<char *>(atomics::atomic_exchange_explicit( &a, (void *) arr, order )), arr + 3 );
516             EXPECT_EQ( reinterpret_cast<char *>(atomics::atomic_load_explicit( &a, oLoad )), arr );
517             EXPECT_EQ( *reinterpret_cast<char *>(atomics::atomic_load_explicit( &a, oLoad )), 1 );
518         }
519
520         template <bool Volatile>
521         void do_test_atomic_pointer_void()
522         {
523             typedef typename add_volatile<atomics::atomic< void *>, Volatile>::type    atomic_pointer;
524
525             char   arr[8];
526             const char aSize = sizeof(arr)/sizeof(arr[0]);
527             for ( char i = 0; i < aSize; ++i ) {
528                 arr[static_cast<size_t>(i)] = i + 1;
529             }
530
531             atomic_pointer  a;
532             char *  p;
533
534             atomics::atomic_store( &a, (void *) arr );
535             EXPECT_EQ( *reinterpret_cast<char *>(atomics::atomic_load( &a )), 1 );
536
537             p = arr;
538             EXPECT_TRUE( atomics::atomic_compare_exchange_weak( &a, (void **) &p, (void *)(arr + 5)));
539             EXPECT_EQ( p, arr + 0 );
540             EXPECT_FALSE( atomics::atomic_compare_exchange_weak( &a, (void **) &p, (void *)(arr + 3)));
541             EXPECT_EQ( p, arr + 5 );
542
543             EXPECT_TRUE( atomics::atomic_compare_exchange_strong( &a, (void **) &p, (void *)(arr + 3)));
544             EXPECT_EQ( p, arr + 5 );
545             EXPECT_FALSE( atomics::atomic_compare_exchange_strong( &a, (void **) &p, (void *)(arr + 5)));
546             EXPECT_EQ( p, arr + 3 );
547
548             EXPECT_EQ( reinterpret_cast<char *>( atomics::atomic_exchange( &a, (void *) arr )), arr + 3 );
549             EXPECT_EQ( reinterpret_cast<char *>( atomics::atomic_load( &a )), arr );
550             EXPECT_EQ( *reinterpret_cast<char *>(atomics::atomic_load( &a )), 1 );
551
552             do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_relaxed );
553             do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_acquire );
554             do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_release );
555             do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_acq_rel );
556             do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_seq_cst );
557         }
558
559     public:
560         void test_atomic_flag()
561         {
562             atomics::atomic_flag flags[8];
563             for ( size_t i = 0; i < sizeof(flags)/sizeof(flags[0]); ++i )
564                 do_test_atomic_flag( flags[i] );
565         }
566         void test_atomic_flag_volatile()
567         {
568             atomics::atomic_flag volatile flags[8];
569             for ( size_t i = 0; i < sizeof(flags)/sizeof(flags[0]); ++i )
570                 do_test_atomic_flag( flags[i] );
571         }
572
573         template <typename AtomicBool>
574         void test_atomic_bool_()
575         {
576             AtomicBool a[8];
577             for ( size_t i = 0; i < sizeof(a)/sizeof(a[0]); ++i ) {
578                 do_test_atomic_bool( a[i] );
579
580                 do_test_atomic_bool( a[i], atomics::memory_order_relaxed );
581                 do_test_atomic_bool( a[i], atomics::memory_order_acquire );
582                 do_test_atomic_bool( a[i], atomics::memory_order_release );
583                 do_test_atomic_bool( a[i], atomics::memory_order_acq_rel );
584                 do_test_atomic_bool( a[i], atomics::memory_order_seq_cst );
585             }
586         }
587
588         void test_atomic_bool()
589         {
590             test_atomic_bool_<atomics::atomic<bool> >();
591         }
592         void test_atomic_bool_volatile()
593         {
594             test_atomic_bool_<atomics::atomic<bool> volatile >();
595         }
596     };
597
598     TEST_F( cxx11_atomic_func, atomic_char )
599     {
600         test_atomic_integral<char>();
601     }
602     TEST_F( cxx11_atomic_func, atomic_char_volatile )
603     {
604         test_atomic_integral_volatile<char>();
605     }
606     TEST_F( cxx11_atomic_func, atomic_unsigned_char )
607     {
608         test_atomic_integral<unsigned char>();
609     }
610     TEST_F( cxx11_atomic_func, atomic_unsigned_char_volatile )
611     {
612         test_atomic_integral_volatile<unsigned char>();
613     }
614     TEST_F( cxx11_atomic_func, atomic_signed_char )
615     {
616         test_atomic_integral<signed char>();
617     }
618     TEST_F( cxx11_atomic_func, atomic_signed_char_volatile )
619     {
620         test_atomic_integral_volatile<signed char>();
621     }
622     TEST_F( cxx11_atomic_func, atomic_short_int )
623     {
624         test_atomic_integral<short int>();
625     }
626     TEST_F( cxx11_atomic_func, atomic_short_int_volatile )
627     {
628         test_atomic_integral_volatile<short int>();
629     }
630     TEST_F( cxx11_atomic_func, atomic_unsigned_short_int )
631     {
632         test_atomic_integral<unsigned short int>();
633     }
634     TEST_F( cxx11_atomic_func, atomic_unsigned_short_int_volatile )
635     {
636         test_atomic_integral_volatile<unsigned short int>();
637     }
638     TEST_F( cxx11_atomic_func, atomic_int )
639     {
640         test_atomic_integral<int>();
641     }
642     TEST_F( cxx11_atomic_func, atomic_int_volatile )
643     {
644         test_atomic_integral_volatile<int>();
645     }
646     TEST_F( cxx11_atomic_func, atomic_unsigned_int )
647     {
648         test_atomic_integral<unsigned int>();
649     }
650     TEST_F( cxx11_atomic_func, atomic_unsigned_int_volatile )
651     {
652         test_atomic_integral_volatile<unsigned int>();
653     }
654     TEST_F( cxx11_atomic_func, atomic_long )
655     {
656         test_atomic_integral<long>();
657     }
658     TEST_F( cxx11_atomic_func, atomic_long_volatile )
659     {
660         test_atomic_integral_volatile<long>();
661     }
662     TEST_F( cxx11_atomic_func, atomic_unsigned_long )
663     {
664         test_atomic_integral<unsigned long>();
665     }
666     TEST_F( cxx11_atomic_func, atomic_unsigned_long_volatile )
667     {
668         test_atomic_integral_volatile<unsigned long>();
669     }
670     TEST_F( cxx11_atomic_func, atomic_long_long )
671     {
672         test_atomic_integral<long long>();
673     }
674     TEST_F( cxx11_atomic_func, atomic_long_long_volatile )
675     {
676         test_atomic_integral_volatile<long long>();
677     }
678     TEST_F( cxx11_atomic_func, atomic_unsigned_long_long )
679     {
680         test_atomic_integral<unsigned long long>();
681     }
682     TEST_F( cxx11_atomic_func, atomic_unsigned_long_long_volatile )
683     {
684         test_atomic_integral_volatile<unsigned long long>();
685     }
686
687     TEST_F( cxx11_atomic_func, atomic_pointer_void )
688     {
689         do_test_atomic_pointer_void<false>();
690     }
691     TEST_F( cxx11_atomic_func, atomic_pointer_void_volatile )
692     {
693         do_test_atomic_pointer_void<true>();
694     }
695
696     TEST_F( cxx11_atomic_func, atomic_pointer_char )
697     {
698         test_atomic_pointer_for<char, false>();
699     }
700     TEST_F( cxx11_atomic_func, atomic_pointer_char_volatile )
701     {
702         test_atomic_pointer_for<char, true>();
703     }
704     TEST_F( cxx11_atomic_func, atomic_pointer_short )
705     {
706         test_atomic_pointer_for<short, false>();
707     }
708     TEST_F( cxx11_atomic_func, atomic_pointer_short_volatile )
709     {
710         test_atomic_pointer_for<short, true>();
711     }
712     TEST_F( cxx11_atomic_func, atomic_pointer_int )
713     {
714         test_atomic_pointer_for<int, false>();
715     }
716     TEST_F( cxx11_atomic_func, atomic_pointer_int_volatile )
717     {
718         test_atomic_pointer_for<int, true>();
719     }
720     TEST_F( cxx11_atomic_func, atomic_pointer_long )
721     {
722         test_atomic_pointer_for<long, false>();
723     }
724     TEST_F( cxx11_atomic_func, atomic_pointer_long_volatile )
725     {
726         test_atomic_pointer_for<long, true>();
727     }
728     TEST_F( cxx11_atomic_func, atomic_pointer_long_long )
729     {
730         test_atomic_pointer_for<long long, false>();
731     }
732     TEST_F( cxx11_atomic_func, atomic_pointer_long_long_volatile )
733     {
734         test_atomic_pointer_for<long long, true>();
735     }
736
737     TEST_F( cxx11_atomic_func, test_atomic_fence )
738     {
739         atomics::atomic_thread_fence(atomics::memory_order_relaxed );
740         atomics::atomic_thread_fence(atomics::memory_order_acquire );
741         atomics::atomic_thread_fence(atomics::memory_order_release );
742         atomics::atomic_thread_fence(atomics::memory_order_acq_rel );
743         atomics::atomic_thread_fence(atomics::memory_order_seq_cst );
744
745         atomics::atomic_signal_fence(atomics::memory_order_relaxed );
746         atomics::atomic_signal_fence(atomics::memory_order_acquire );
747         atomics::atomic_signal_fence(atomics::memory_order_release );
748         atomics::atomic_signal_fence(atomics::memory_order_acq_rel );
749         atomics::atomic_signal_fence(atomics::memory_order_seq_cst );
750     }
751 }   // namespace
752
753
754 #endif // #ifndef CDS_USE_BOOST_ATOMIC