2 This file is a part of libcds - Concurrent Data Structures library
4 (C) Copyright Maxim Khizhinsky (libcds.dev@gmail.com) 2006-2017
6 Source code repo: http://github.com/khizmax/libcds/
7 Download: http://sourceforge.net/projects/libcds/files/
9 Redistribution and use in source and binary forms, with or without
10 modification, are permitted provided that the following conditions are met:
12 * Redistributions of source code must retain the above copyright notice, this
13 list of conditions and the following disclaimer.
15 * Redistributions in binary form must reproduce the above copyright notice,
16 this list of conditions and the following disclaimer in the documentation
17 and/or other materials provided with the distribution.
19 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
20 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
21 IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22 DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
23 FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
24 DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
25 SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
26 CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
27 OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include <gtest/gtest.h>
32 #include <cds/algo/atomic.h>
34 #ifndef CDS_USE_BOOST_ATOMIC
35 // Skip this test for boost.atomic
36 // Boost.atomic has no free atomic functions implementation.
38 #include "cxx11_convert_memory_order.h"
40 #if CDS_COMPILER == CDS_COMPILER_CLANG && !defined( _LIBCPP_VERSION )
41 // CLang (at least 3.6) without libc++ has no gcc-specific __atomic_is_lock_free function
42 # define EXPECT_ATOMIC_IS_LOCK_FREE( x )
44 # define EXPECT_ATOMIC_IS_LOCK_FREE( x ) EXPECT_TRUE( atomics::atomic_is_lock_free( &x ));
50 class cxx11_atomic_func: public ::testing::Test
53 template <typename AtomicFlag>
54 void do_test_atomic_flag_mo( AtomicFlag& f, atomics::memory_order order )
56 atomics::memory_order mo_clear = convert_to_store_order(order);
58 f.clear( convert_to_store_order(order));
60 for ( int i = 0; i < 5; ++i ) {
61 EXPECT_FALSE( atomics::atomic_flag_test_and_set_explicit( &f, order ));
62 EXPECT_TRUE( atomics::atomic_flag_test_and_set_explicit( &f, order ));
63 atomics::atomic_flag_clear_explicit( &f, mo_clear );
64 atomics::atomic_flag_clear_explicit( &f, mo_clear );
68 template <typename AtomicFlag>
69 void do_test_atomic_flag( AtomicFlag& f )
73 for ( int i = 0; i < 5; ++i ) {
74 EXPECT_FALSE( atomics::atomic_flag_test_and_set( &f ));
75 EXPECT_TRUE( atomics::atomic_flag_test_and_set( &f ));
76 atomics::atomic_flag_clear(&f);
77 atomics::atomic_flag_clear(&f);
80 do_test_atomic_flag_mo( f, atomics::memory_order_relaxed );
81 do_test_atomic_flag_mo( f, atomics::memory_order_acquire );
82 do_test_atomic_flag_mo( f, atomics::memory_order_release );
83 do_test_atomic_flag_mo( f, atomics::memory_order_acq_rel );
84 do_test_atomic_flag_mo( f, atomics::memory_order_seq_cst );
87 template <class Atomic, typename Integral>
88 void do_test_atomic_type(Atomic& a )
90 typedef Integral integral_type;
92 EXPECT_ATOMIC_IS_LOCK_FREE( a );
93 atomics::atomic_store( &a, (integral_type) 0 );
94 EXPECT_EQ( atomics::atomic_load( &a ), integral_type( 0 ));
96 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
97 integral_type n = static_cast<integral_type>( integral_type(42) << (nByte * 8));
98 EXPECT_EQ( atomics::atomic_exchange( &a, n ), (integral_type) 0 );
99 EXPECT_EQ( atomics::atomic_load( &a ), n );
100 EXPECT_EQ( atomics::atomic_exchange( &a, (integral_type) 0 ), n );
101 EXPECT_EQ( atomics::atomic_load( &a ), (integral_type) 0 );
104 integral_type prev = atomics::atomic_load( &a );
105 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
106 integral_type n = static_cast<integral_type>( integral_type(42) << (nByte * 8));
107 integral_type expected = prev;
109 EXPECT_TRUE( atomics::atomic_compare_exchange_weak( &a, &expected, n));
110 EXPECT_EQ( expected, prev );
111 EXPECT_NE( expected, n );
112 EXPECT_FALSE( atomics::atomic_compare_exchange_weak( &a, &expected, n));
113 EXPECT_EQ( expected, n );
116 EXPECT_EQ( atomics::atomic_load( &a ), n );
119 atomics::atomic_store( &a, (integral_type) 0 );
121 prev = atomics::atomic_load( &a );
122 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
123 integral_type n = static_cast<integral_type>( integral_type(42) << (nByte * 8));
124 integral_type expected = prev;
126 EXPECT_TRUE( atomics::atomic_compare_exchange_strong( &a, &expected, n));
127 EXPECT_EQ( expected, prev );
128 EXPECT_FALSE( atomics::atomic_compare_exchange_strong( &a, &expected, n));
129 EXPECT_EQ( expected, n );
132 EXPECT_EQ( atomics::atomic_load( &a ), n );
135 EXPECT_EQ( atomics::atomic_exchange( &a, (integral_type) 0 ), prev );
138 template <class Atomic, typename Integral>
139 void do_test_atomic_integral( Atomic& a )
141 do_test_atomic_type< Atomic, Integral >( a );
143 typedef Integral integral_type;
146 atomics::atomic_store( &a, (integral_type) 0 );
149 for ( size_t nByte = 0; nByte < sizeof(integral_type); ++nByte )
151 integral_type prev = atomics::atomic_load( &a );
152 integral_type n = static_cast<integral_type>( integral_type(42) << (nByte * 8));
154 EXPECT_EQ( atomics::atomic_fetch_add( &a, n ), prev );
158 for ( size_t nByte = sizeof(integral_type); nByte > 0; --nByte )
160 integral_type prev = atomics::atomic_load( &a );
161 integral_type n = static_cast<integral_type>( integral_type(42) << ((nByte - 1) * 8));
163 EXPECT_EQ( atomics::atomic_fetch_sub( &a, n ), prev );
165 EXPECT_EQ( atomics::atomic_load( &a ), (integral_type) 0 );
167 // fetch_or / fetc_xor / fetch_and
168 for ( size_t nBit = 0; nBit < sizeof(integral_type) * 8; ++nBit )
170 integral_type prev = atomics::atomic_load( &a );
171 integral_type mask = static_cast<integral_type>( integral_type(1) << nBit );
173 EXPECT_EQ( atomics::atomic_fetch_or( &a, mask ), prev );
174 prev = atomics::atomic_load( &a );
175 EXPECT_EQ( ( prev & mask ), mask );
177 EXPECT_EQ( atomics::atomic_fetch_and( &a, (integral_type) ~mask ), prev );
178 prev = atomics::atomic_load( &a );
179 EXPECT_EQ( integral_type(prev & mask), integral_type(0));
181 EXPECT_EQ( atomics::atomic_fetch_xor( &a, mask ), prev );
182 prev = atomics::atomic_load( &a );
183 EXPECT_EQ( ( prev & mask), mask);
185 EXPECT_EQ( atomics::atomic_load( &a ), (integral_type) -1 );
188 template <class Atomic, typename Integral>
189 void do_test_atomic_type( Atomic& a, atomics::memory_order order )
191 typedef Integral integral_type;
193 const atomics::memory_order oLoad = convert_to_load_order( order );
194 const atomics::memory_order oStore = convert_to_store_order( order );
196 EXPECT_ATOMIC_IS_LOCK_FREE( a );
197 atomics::atomic_store_explicit( &a, (integral_type) 0, oStore );
198 EXPECT_EQ( atomics::atomic_load_explicit( &a, oLoad ), (integral_type) 0 );
200 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
201 integral_type n = static_cast<integral_type>( integral_type(42) << (nByte * 8));
202 EXPECT_EQ( atomics::atomic_exchange_explicit( &a, n, order ), (integral_type) 0 );
203 EXPECT_EQ( atomics::atomic_load_explicit( &a, oLoad ), n );
204 EXPECT_EQ( atomics::atomic_exchange_explicit( &a, (integral_type) 0, order ), n );
205 EXPECT_EQ( atomics::atomic_load_explicit( &a, oLoad ), (integral_type) 0 );
208 integral_type prev = atomics::atomic_load_explicit( &a, oLoad );
209 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
210 integral_type n = static_cast<integral_type>( integral_type(42) << (nByte * 8));
211 integral_type expected = prev;
213 EXPECT_TRUE( atomics::atomic_compare_exchange_weak_explicit( &a, &expected, n, order, atomics::memory_order_relaxed));
214 EXPECT_EQ( expected, prev );
215 EXPECT_FALSE( atomics::atomic_compare_exchange_weak_explicit( &a, &expected, n, order, atomics::memory_order_relaxed));
216 EXPECT_EQ( expected, n );
219 EXPECT_EQ( atomics::atomic_load_explicit( &a, oLoad ), n );
222 atomics::atomic_store_explicit( &a, (integral_type) 0, oStore );
224 prev = atomics::atomic_load_explicit( &a, oLoad );
225 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
226 integral_type n = static_cast<integral_type>( integral_type(42) << (nByte * 8));
227 integral_type expected = prev;
229 EXPECT_TRUE( atomics::atomic_compare_exchange_strong_explicit( &a, &expected, n, order, atomics::memory_order_relaxed));
230 EXPECT_EQ( expected, prev );
231 EXPECT_FALSE( atomics::atomic_compare_exchange_strong_explicit( &a, &expected, n, order, atomics::memory_order_relaxed));
232 EXPECT_EQ( expected, n );
235 EXPECT_EQ( atomics::atomic_load_explicit( &a, oLoad ), n );
238 EXPECT_EQ( atomics::atomic_exchange_explicit( &a, (integral_type) 0, order ), prev );
241 template <class Atomic, typename Integral>
242 void do_test_atomic_integral( Atomic& a, atomics::memory_order order )
244 do_test_atomic_type< Atomic, Integral >( a, order );
245 typedef Integral integral_type;
247 const atomics::memory_order oLoad = convert_to_load_order( order );
248 const atomics::memory_order oStore = convert_to_store_order( order );
251 atomics::atomic_store_explicit( &a, (integral_type) 0, oStore );
254 for ( size_t nByte = 0; nByte < sizeof(integral_type); ++nByte )
256 integral_type prev = atomics::atomic_load_explicit( &a, oLoad );
257 integral_type n = static_cast<integral_type>( integral_type(42) << (nByte * 8));
259 EXPECT_EQ( atomics::atomic_fetch_add_explicit( &a, n, order), prev);
263 for ( size_t nByte = sizeof(integral_type); nByte > 0; --nByte )
265 integral_type prev = atomics::atomic_load_explicit( &a, oLoad );
266 integral_type n = static_cast<integral_type>( integral_type(42) << ((nByte - 1) * 8));
268 EXPECT_EQ( atomics::atomic_fetch_sub_explicit( &a, n, order ), prev);
270 EXPECT_EQ( atomics::atomic_load_explicit( &a, oLoad ), integral_type( 0 ));
272 // fetch_or / fetc_xor / fetch_and
273 for ( size_t nBit = 0; nBit < sizeof(integral_type) * 8; ++nBit )
275 integral_type prev = atomics::atomic_load_explicit( &a, oLoad ) ;;
276 integral_type mask = static_cast<integral_type>( integral_type(1) << nBit );
278 EXPECT_EQ( atomics::atomic_fetch_or_explicit( &a, mask, order ), prev );
279 prev = atomics::atomic_load_explicit( &a, oLoad );
280 EXPECT_EQ( ( prev & mask), mask);
282 EXPECT_EQ( atomics::atomic_fetch_and_explicit( &a, (integral_type) ~mask, order ), prev );
283 prev = atomics::atomic_load_explicit( &a, oLoad );
284 EXPECT_EQ( ( prev & mask), integral_type( 0 ));
286 EXPECT_EQ( atomics::atomic_fetch_xor_explicit( &a, mask, order ), prev );
287 prev = atomics::atomic_load_explicit( &a, oLoad );
288 EXPECT_EQ( ( prev & mask), mask);
290 EXPECT_EQ( atomics::atomic_load_explicit( &a, oLoad ), (integral_type) -1 );
293 template <typename Atomic, typename Integral>
294 void test_atomic_integral_(Atomic& a)
296 do_test_atomic_integral<Atomic, Integral >(a);
298 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_relaxed );
299 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_acquire );
300 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_release );
301 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_acq_rel );
302 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_seq_cst );
305 template <typename Integral>
306 void test_atomic_integral()
308 typedef atomics::atomic<Integral> atomic_type;
310 for ( size_t i = 0; i < sizeof(a)/sizeof(a[0]); ++i ) {
311 test_atomic_integral_<atomic_type, Integral>( a[i] );
314 template <typename Integral>
315 void test_atomic_integral_volatile()
317 typedef atomics::atomic<Integral> volatile atomic_type;
319 for ( size_t i = 0; i < sizeof(a)/sizeof(a[0]); ++i ) {
320 test_atomic_integral_<atomic_type, Integral>( a[i] );
324 template <class AtomicBool>
325 void do_test_atomic_bool(AtomicBool& a)
327 EXPECT_ATOMIC_IS_LOCK_FREE( a );
328 atomics::atomic_store( &a, false );
330 EXPECT_FALSE( atomics::atomic_load( &a ));
332 EXPECT_FALSE( atomics::atomic_exchange( &a, true ));
333 EXPECT_TRUE( atomics::atomic_load( &a ));
334 EXPECT_TRUE( atomics::atomic_exchange( &a, false ));
335 EXPECT_FALSE( atomics::atomic_load( &a ));
337 bool expected = false;
338 EXPECT_TRUE( atomics::atomic_compare_exchange_weak( &a, &expected, true));
339 EXPECT_FALSE( expected );
340 EXPECT_FALSE( atomics::atomic_compare_exchange_weak( &a, &expected, false));
341 EXPECT_TRUE( expected );
342 EXPECT_TRUE( atomics::atomic_load( &a ));
344 atomics::atomic_store( &a, false );
347 EXPECT_TRUE( atomics::atomic_compare_exchange_strong( &a, &expected, true));
348 EXPECT_FALSE( expected );
349 EXPECT_FALSE( atomics::atomic_compare_exchange_strong( &a, &expected, false));
350 EXPECT_TRUE( expected );
352 EXPECT_TRUE( atomics::atomic_load( &a ));
354 EXPECT_TRUE( atomics::atomic_exchange( &a, false ));
357 template <class AtomicBool>
358 void do_test_atomic_bool( AtomicBool& a, atomics::memory_order order )
360 const atomics::memory_order oLoad = convert_to_load_order( order );
361 const atomics::memory_order oStore = convert_to_store_order( order );
362 const atomics::memory_order oExchange = convert_to_exchange_order( order );
364 EXPECT_ATOMIC_IS_LOCK_FREE( a );
365 atomics::atomic_store_explicit( &a, false, oStore );
366 EXPECT_FALSE( a == false );
367 EXPECT_FALSE( atomics::atomic_load_explicit( &a, oLoad ));
369 EXPECT_FALSE( atomics::atomic_exchange_explicit( &a, true, oExchange ));
370 EXPECT_TRUE( atomics::atomic_load_explicit( &a, oLoad ));
371 EXPECT_TRUE( atomics::atomic_exchange_explicit( &a, false, oExchange ));
372 EXPECT_FALSE( atomics::atomic_load_explicit( &a, oLoad ));
374 bool expected = false;
375 EXPECT_TRUE( atomics::atomic_compare_exchange_weak_explicit( &a, &expected, true, order, atomics::memory_order_relaxed));
376 EXPECT_FALSE( expected );
377 EXPECT_FALSE( atomics::atomic_compare_exchange_weak_explicit( &a, &expected, false, order, atomics::memory_order_relaxed));
378 EXPECT_TRUE( expected );
379 EXPECT_TRUE( atomics::atomic_load_explicit( &a, oLoad ));
381 atomics::atomic_store( &a, false );
384 EXPECT_TRUE( atomics::atomic_compare_exchange_strong_explicit( &a, &expected, true, order, atomics::memory_order_relaxed));
385 EXPECT_FALSE( expected );
386 EXPECT_FALSE( atomics::atomic_compare_exchange_strong_explicit( &a, &expected, false, order, atomics::memory_order_relaxed));
387 EXPECT_TRUE( expected );
389 EXPECT_TRUE( atomics::atomic_load_explicit( &a, oLoad ));
391 EXPECT_TRUE( atomics::atomic_exchange_explicit( &a, false, oExchange ));
394 template <typename Atomic, typename Integral>
395 void test_atomic_pointer_for_( Atomic& a, Integral * arr, Integral aSize, atomics::memory_order order )
397 typedef Integral integral_type;
398 atomics::memory_order oLoad = convert_to_load_order(order);
399 atomics::memory_order oStore = convert_to_store_order(order);
402 atomics::atomic_store_explicit( &a, arr, oStore );
403 EXPECT_EQ( *atomics::atomic_load_explicit( &a, oLoad ), 1 );
406 EXPECT_TRUE( atomics::atomic_compare_exchange_weak_explicit( &a, &p, arr + 5, order, atomics::memory_order_relaxed ));
407 EXPECT_EQ( p, arr + 0 );
409 EXPECT_FALSE( atomics::atomic_compare_exchange_weak_explicit( &a, &p, arr + 3, order, atomics::memory_order_relaxed ));
410 EXPECT_EQ( p, arr + 5 );
413 EXPECT_TRUE( atomics::atomic_compare_exchange_strong_explicit( &a, &p, arr + 3, order, atomics::memory_order_relaxed ));
414 EXPECT_EQ( p, arr + 5 );
416 EXPECT_FALSE( atomics::atomic_compare_exchange_strong_explicit( &a, &p, arr + 5, order, atomics::memory_order_relaxed ));
417 EXPECT_EQ( p, arr + 3 );
420 EXPECT_EQ( atomics::atomic_exchange_explicit( &a, arr, order ), arr + 3 );
421 EXPECT_EQ( atomics::atomic_load_explicit( &a, oLoad ), arr );
422 EXPECT_EQ( *atomics::atomic_load_explicit( &a, oLoad ), 1 );
424 for ( integral_type i = 1; i < aSize; ++i ) {
425 integral_type * p = atomics::atomic_load_explicit( &a, oLoad );
427 EXPECT_EQ( atomics::atomic_fetch_add_explicit( &a, 1, order ), p );
428 EXPECT_EQ( *atomics::atomic_load_explicit( &a, oLoad ), i + 1 );
431 for ( integral_type i = aSize; i > 1; --i ) {
432 integral_type * p = atomics::atomic_load_explicit( &a, oLoad );
434 EXPECT_EQ( atomics::atomic_fetch_sub_explicit( &a, 1, order ), p );
435 EXPECT_EQ( *atomics::atomic_load_explicit( &a, oLoad ), i - 1 );
439 template <typename Integral, bool Volatile>
440 void test_atomic_pointer_for()
442 typedef Integral integral_type;
443 typedef typename add_volatile<atomics::atomic< integral_type *>, Volatile>::type atomic_pointer;
445 integral_type arr[8];
446 const integral_type aSize = sizeof(arr)/sizeof(arr[0]);
447 for ( integral_type i = 0; i < aSize; ++i ) {
448 arr[static_cast<size_t>(i)] = i + 1;
454 atomics::atomic_store( &a, arr );
455 EXPECT_EQ( *atomics::atomic_load( &a ), 1 );
458 EXPECT_TRUE( atomics::atomic_compare_exchange_weak( &a, &p, arr + 5 ));
459 EXPECT_EQ( p, arr + 0 );
460 EXPECT_FALSE( atomics::atomic_compare_exchange_weak( &a, &p, arr + 3 ));
461 EXPECT_EQ( p, arr + 5 );
463 EXPECT_TRUE( atomics::atomic_compare_exchange_strong( &a, &p, arr + 3 ));
464 EXPECT_EQ( p, arr + 5 );
465 EXPECT_FALSE( atomics::atomic_compare_exchange_strong( &a, &p, arr + 5 ));
466 EXPECT_EQ( p, arr + 3 );
468 EXPECT_EQ( atomics::atomic_exchange( &a, arr ), arr + 3 );
469 EXPECT_EQ( atomics::atomic_load( &a ), arr );
470 EXPECT_EQ( *atomics::atomic_load( &a ), 1 );
472 for ( integral_type i = 1; i < aSize; ++i ) {
473 integral_type * p = atomics::atomic_load( &a );
475 EXPECT_EQ( atomics::atomic_fetch_add( &a, 1 ), p );
476 EXPECT_EQ( *atomics::atomic_load( &a ), i + 1 );
479 for ( integral_type i = aSize; i > 1; --i ) {
480 integral_type * p = atomics::atomic_load( &a );
482 EXPECT_EQ( atomics::atomic_fetch_sub( &a, 1 ), p );
483 EXPECT_EQ( *atomics::atomic_load( &a ), i - 1 );
486 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_relaxed );
487 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_acquire );
488 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_release );
489 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_acq_rel );
490 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_seq_cst );
494 template <typename Atomic>
495 void do_test_atomic_pointer_void_( Atomic& a, char * arr, char aSize, atomics::memory_order order )
497 atomics::memory_order oLoad = convert_to_load_order(order);
498 atomics::memory_order oStore = convert_to_store_order(order);
501 atomics::atomic_store_explicit( &a, (void *) arr, oStore );
502 EXPECT_EQ( *reinterpret_cast<char *>(atomics::atomic_load_explicit( &a, oLoad )), 1 );
505 EXPECT_TRUE( atomics::atomic_compare_exchange_weak_explicit( &a, (void **) &p, (void *)(arr + 5), order, atomics::memory_order_relaxed ));
506 EXPECT_EQ( p, arr + 0 );
508 EXPECT_FALSE( atomics::atomic_compare_exchange_weak_explicit( &a, (void **) &p, (void *)(arr + 3), order, atomics::memory_order_relaxed ));
509 EXPECT_EQ( p, arr + 5 );
512 EXPECT_TRUE( atomics::atomic_compare_exchange_strong_explicit( &a, (void **) &p, (void *)(arr + 3), order, atomics::memory_order_relaxed ));
513 EXPECT_EQ( p, arr + 5 );
515 EXPECT_FALSE( atomics::atomic_compare_exchange_strong_explicit( &a, (void **) &p, (void *)(arr + 5), order, atomics::memory_order_relaxed ));
516 EXPECT_EQ( p, arr + 3 );
519 EXPECT_EQ( reinterpret_cast<char *>(atomics::atomic_exchange_explicit( &a, (void *) arr, order )), arr + 3 );
520 EXPECT_EQ( reinterpret_cast<char *>(atomics::atomic_load_explicit( &a, oLoad )), arr );
521 EXPECT_EQ( *reinterpret_cast<char *>(atomics::atomic_load_explicit( &a, oLoad )), 1 );
523 for ( char i = 1; i < aSize; ++i ) {
524 EXPECT_EQ( *reinterpret_cast<char *>(atomics::atomic_load_explicit( &a, oLoad )), i );
525 atomics::atomic_fetch_add_explicit( &a, 1, order );
526 EXPECT_EQ( *reinterpret_cast<char *>(atomics::atomic_load_explicit( &a, oLoad )), i + 1 );
529 for ( char i = aSize; i > 1; --i ) {
530 EXPECT_EQ( *reinterpret_cast<char *>(atomics::atomic_load_explicit( &a, oLoad )), i );
531 atomics::atomic_fetch_sub_explicit( &a, 1, order );
532 EXPECT_EQ( *reinterpret_cast<char *>(atomics::atomic_load_explicit( &a, oLoad )), i - 1 );
536 template <bool Volatile>
537 void do_test_atomic_pointer_void()
539 typedef typename add_volatile<atomics::atomic< void *>, Volatile>::type atomic_pointer;
542 const char aSize = sizeof(arr)/sizeof(arr[0]);
543 for ( char i = 0; i < aSize; ++i ) {
544 arr[static_cast<size_t>(i)] = i + 1;
550 atomics::atomic_store( &a, (void *) arr );
551 EXPECT_EQ( *reinterpret_cast<char *>(atomics::atomic_load( &a )), 1 );
554 EXPECT_TRUE( atomics::atomic_compare_exchange_weak( &a, (void **) &p, (void *)(arr + 5)));
555 EXPECT_EQ( p, arr + 0 );
556 EXPECT_FALSE( atomics::atomic_compare_exchange_weak( &a, (void **) &p, (void *)(arr + 3)));
557 EXPECT_EQ( p, arr + 5 );
559 EXPECT_TRUE( atomics::atomic_compare_exchange_strong( &a, (void **) &p, (void *)(arr + 3)));
560 EXPECT_EQ( p, arr + 5 );
561 EXPECT_FALSE( atomics::atomic_compare_exchange_strong( &a, (void **) &p, (void *)(arr + 5)));
562 EXPECT_EQ( p, arr + 3 );
564 EXPECT_EQ( reinterpret_cast<char *>( atomics::atomic_exchange( &a, (void *) arr )), arr + 3 );
565 EXPECT_EQ( reinterpret_cast<char *>( atomics::atomic_load( &a )), arr );
566 EXPECT_EQ( *reinterpret_cast<char *>(atomics::atomic_load( &a )), 1 );
568 for ( char i = 1; i < aSize; ++i ) {
569 EXPECT_EQ( *reinterpret_cast<char *>(atomics::atomic_load( &a )), i );
570 atomics::atomic_fetch_add( &a, 1 );
571 EXPECT_EQ( *reinterpret_cast<char *>(atomics::atomic_load( &a )), i + 1 );
574 for ( char i = aSize; i > 1; --i ) {
575 EXPECT_EQ( *reinterpret_cast<char *>(atomics::atomic_load( &a )), i );
576 atomics::atomic_fetch_sub( &a, 1 );
577 EXPECT_EQ( *reinterpret_cast<char *>(atomics::atomic_load( &a )), i - 1 );
580 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_relaxed );
581 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_acquire );
582 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_release );
583 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_acq_rel );
584 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_seq_cst );
588 void test_atomic_flag()
590 atomics::atomic_flag flags[8];
591 for ( size_t i = 0; i < sizeof(flags)/sizeof(flags[0]); ++i )
592 do_test_atomic_flag( flags[i] );
594 void test_atomic_flag_volatile()
596 atomics::atomic_flag volatile flags[8];
597 for ( size_t i = 0; i < sizeof(flags)/sizeof(flags[0]); ++i )
598 do_test_atomic_flag( flags[i] );
601 template <typename AtomicBool>
602 void test_atomic_bool_()
605 for ( size_t i = 0; i < sizeof(a)/sizeof(a[0]); ++i ) {
606 do_test_atomic_bool( a[i] );
608 do_test_atomic_bool( a[i], atomics::memory_order_relaxed );
609 do_test_atomic_bool( a[i], atomics::memory_order_acquire );
610 do_test_atomic_bool( a[i], atomics::memory_order_release );
611 do_test_atomic_bool( a[i], atomics::memory_order_acq_rel );
612 do_test_atomic_bool( a[i], atomics::memory_order_seq_cst );
616 void test_atomic_bool()
618 test_atomic_bool_<atomics::atomic<bool> >();
620 void test_atomic_bool_volatile()
622 test_atomic_bool_<atomics::atomic<bool> volatile >();
626 TEST_F( cxx11_atomic_func, atomic_char )
628 test_atomic_integral<char>();
630 TEST_F( cxx11_atomic_func, atomic_char_volatile )
632 test_atomic_integral_volatile<char>();
634 TEST_F( cxx11_atomic_func, atomic_unsigned_char )
636 test_atomic_integral<unsigned char>();
638 TEST_F( cxx11_atomic_func, atomic_unsigned_char_volatile )
640 test_atomic_integral_volatile<unsigned char>();
642 TEST_F( cxx11_atomic_func, atomic_signed_char )
644 test_atomic_integral<signed char>();
646 TEST_F( cxx11_atomic_func, atomic_signed_char_volatile )
648 test_atomic_integral_volatile<signed char>();
650 TEST_F( cxx11_atomic_func, atomic_short_int )
652 test_atomic_integral<short int>();
654 TEST_F( cxx11_atomic_func, atomic_short_int_volatile )
656 test_atomic_integral_volatile<short int>();
658 TEST_F( cxx11_atomic_func, atomic_unsigned_short_int )
660 test_atomic_integral<unsigned short int>();
662 TEST_F( cxx11_atomic_func, atomic_unsigned_short_int_volatile )
664 test_atomic_integral_volatile<unsigned short int>();
666 TEST_F( cxx11_atomic_func, atomic_int )
668 test_atomic_integral<int>();
670 TEST_F( cxx11_atomic_func, atomic_int_volatile )
672 test_atomic_integral_volatile<int>();
674 TEST_F( cxx11_atomic_func, atomic_unsigned_int )
676 test_atomic_integral<unsigned int>();
678 TEST_F( cxx11_atomic_func, atomic_unsigned_int_volatile )
680 test_atomic_integral_volatile<unsigned int>();
682 TEST_F( cxx11_atomic_func, atomic_long )
684 test_atomic_integral<long>();
686 TEST_F( cxx11_atomic_func, atomic_long_volatile )
688 test_atomic_integral_volatile<long>();
690 TEST_F( cxx11_atomic_func, atomic_unsigned_long )
692 test_atomic_integral<unsigned long>();
694 TEST_F( cxx11_atomic_func, atomic_unsigned_long_volatile )
696 test_atomic_integral_volatile<unsigned long>();
698 TEST_F( cxx11_atomic_func, atomic_long_long )
700 test_atomic_integral<long long>();
702 TEST_F( cxx11_atomic_func, atomic_long_long_volatile )
704 test_atomic_integral_volatile<long long>();
706 TEST_F( cxx11_atomic_func, atomic_unsigned_long_long )
708 test_atomic_integral<unsigned long long>();
710 TEST_F( cxx11_atomic_func, atomic_unsigned_long_long_volatile )
712 test_atomic_integral_volatile<unsigned long long>();
715 #if !( CDS_COMPILER == CDS_COMPILER_CLANG && CDS_COMPILER_VERSION < 40000 )
716 //clang error with atomic<void*> fetch_add/fetch_sub
717 TEST_F( cxx11_atomic_func, atomic_pointer_void )
719 do_test_atomic_pointer_void<false>();
721 TEST_F( cxx11_atomic_func, atomic_pointer_void_volatile )
723 do_test_atomic_pointer_void<true>();
727 TEST_F( cxx11_atomic_func, atomic_pointer_char )
729 test_atomic_pointer_for<char, false>();
731 TEST_F( cxx11_atomic_func, atomic_pointer_char_volatile )
733 test_atomic_pointer_for<char, true>();
735 TEST_F( cxx11_atomic_func, atomic_pointer_short )
737 test_atomic_pointer_for<short, false>();
739 TEST_F( cxx11_atomic_func, atomic_pointer_short_volatile )
741 test_atomic_pointer_for<short, true>();
743 TEST_F( cxx11_atomic_func, atomic_pointer_int )
745 test_atomic_pointer_for<int, false>();
747 TEST_F( cxx11_atomic_func, atomic_pointer_int_volatile )
749 test_atomic_pointer_for<int, true>();
751 TEST_F( cxx11_atomic_func, atomic_pointer_long )
753 test_atomic_pointer_for<long, false>();
755 TEST_F( cxx11_atomic_func, atomic_pointer_long_volatile )
757 test_atomic_pointer_for<long, true>();
759 TEST_F( cxx11_atomic_func, atomic_pointer_long_long )
761 test_atomic_pointer_for<long long, false>();
763 TEST_F( cxx11_atomic_func, atomic_pointer_long_long_volatile )
765 test_atomic_pointer_for<long long, true>();
768 TEST_F( cxx11_atomic_func, test_atomic_fence )
770 atomics::atomic_thread_fence(atomics::memory_order_relaxed );
771 atomics::atomic_thread_fence(atomics::memory_order_acquire );
772 atomics::atomic_thread_fence(atomics::memory_order_release );
773 atomics::atomic_thread_fence(atomics::memory_order_acq_rel );
774 atomics::atomic_thread_fence(atomics::memory_order_seq_cst );
776 atomics::atomic_signal_fence(atomics::memory_order_relaxed );
777 atomics::atomic_signal_fence(atomics::memory_order_acquire );
778 atomics::atomic_signal_fence(atomics::memory_order_release );
779 atomics::atomic_signal_fence(atomics::memory_order_acq_rel );
780 atomics::atomic_signal_fence(atomics::memory_order_seq_cst );
785 #endif // #ifndef CDS_USE_BOOST_ATOMIC