3 #include "cppunit/cppunit_proxy.h"
5 #include <cds/algo/atomic.h>
7 #include "misc/cxx11_convert_memory_order.h"
10 class cxx11_atomic_class: public CppUnitMini::TestCase
12 template <typename AtomicFlag>
13 void do_test_atomic_flag_mo( AtomicFlag& f, atomics::memory_order order )
15 atomics::memory_order mo_clear = convert_to_store_order(order);
16 for ( int i = 0; i < 5; ++i ) {
17 CPPUNIT_ASSERT( !f.test_and_set( order ));
18 CPPUNIT_ASSERT( f.test_and_set( order ) );
23 template <typename AtomicFlag>
24 void do_test_atomic_flag( AtomicFlag& f)
28 for ( int i = 0; i < 5; ++i ) {
29 CPPUNIT_ASSERT( !f.test_and_set());
30 CPPUNIT_ASSERT( f.test_and_set() );
34 do_test_atomic_flag_mo( f, atomics::memory_order_relaxed );
35 do_test_atomic_flag_mo( f, atomics::memory_order_consume );
36 do_test_atomic_flag_mo( f, atomics::memory_order_acquire );
37 do_test_atomic_flag_mo( f, atomics::memory_order_release );
38 do_test_atomic_flag_mo( f, atomics::memory_order_acq_rel );
39 do_test_atomic_flag_mo( f, atomics::memory_order_seq_cst );
42 template <class Atomic, typename Integral>
43 void do_test_atomic_type(Atomic& a)
45 typedef Integral integral_type;
47 CPPUNIT_ASSERT( a.is_lock_free() );
48 a.store( (integral_type) 0 );
49 CPPUNIT_ASSERT( a == 0 );
50 CPPUNIT_ASSERT( a.load() == 0 );
52 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
53 integral_type n = integral_type(42) << (nByte * 8);
54 CPPUNIT_ASSERT( a.exchange( n ) == 0 );
55 CPPUNIT_ASSERT( a == n );
56 CPPUNIT_ASSERT( a.exchange( (integral_type) 0 ) == n );
57 CPPUNIT_ASSERT( a.load() == 0 );
60 integral_type prev = a.load();
61 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
62 integral_type n = integral_type(42) << (nByte * 8);
63 integral_type expected = prev;
65 CPPUNIT_ASSERT( a.compare_exchange_weak( expected, n));
66 CPPUNIT_ASSERT( expected == prev );
67 CPPUNIT_ASSERT( !a.compare_exchange_weak( expected, n));
68 CPPUNIT_ASSERT( expected == n );
71 CPPUNIT_ASSERT( a == n );
74 a = (integral_type) 0;
77 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
78 integral_type n = integral_type(42) << (nByte * 8);
79 integral_type expected = prev;
81 CPPUNIT_ASSERT( a.compare_exchange_strong( expected, n));
82 CPPUNIT_ASSERT( expected == prev );
83 CPPUNIT_ASSERT( !a.compare_exchange_strong( expected, n));
84 CPPUNIT_ASSERT( expected == n );
87 CPPUNIT_ASSERT( a.load() == n );
90 CPPUNIT_ASSERT( a.exchange( (integral_type) 0 ) == prev );
93 template <class Atomic, typename Integral>
94 void do_test_atomic_integral(Atomic& a)
96 do_test_atomic_type< Atomic, Integral >(a);
98 typedef Integral integral_type;
101 a.store( (integral_type) 0 );
104 for ( size_t nByte = 0; nByte < sizeof(integral_type); ++nByte )
106 integral_type prev = a.load();
107 integral_type n = integral_type(42) << (nByte * 8);
109 CPPUNIT_ASSERT( a.fetch_add(n) == prev);
113 for ( size_t nByte = sizeof(integral_type); nByte > 0; --nByte )
115 integral_type prev = a.load();
116 integral_type n = integral_type(42) << ((nByte - 1) * 8);
118 CPPUNIT_ASSERT( a.fetch_sub(n) == prev);
120 CPPUNIT_ASSERT( a.load() == 0 );
122 // fetch_or / fetc_xor / fetch_and
123 for ( size_t nBit = 0; nBit < sizeof(integral_type) * 8; ++nBit )
125 integral_type prev = a.load() ;;
126 integral_type mask = 1 << nBit;
128 CPPUNIT_ASSERT( a.fetch_or( mask ) == prev );
130 CPPUNIT_ASSERT( ( prev & mask) == mask);
132 CPPUNIT_ASSERT( a.fetch_and( (integral_type) ~mask ) == prev );
134 CPPUNIT_ASSERT( integral_type(prev & mask) == integral_type(0));
136 CPPUNIT_ASSERT( a.fetch_xor( mask ) == prev );
138 CPPUNIT_ASSERT( integral_type( prev & mask) == mask);
140 CPPUNIT_ASSERT( a.load() == (integral_type) -1 );
144 a = (integral_type) 0;
147 for ( size_t nByte = 0; nByte < sizeof(integral_type); ++nByte )
149 integral_type prev = a;
150 integral_type n = integral_type(42) << (nByte * 8);
152 CPPUNIT_ASSERT( (a += n) == (prev + n));
156 for ( size_t nByte = sizeof(integral_type); nByte > 0; --nByte )
158 integral_type prev = a;
159 integral_type n = integral_type(42) << ((nByte - 1) * 8);
161 CPPUNIT_ASSERT( (a -= n) == prev - n );
163 CPPUNIT_ASSERT( a.load() == 0 );
166 for ( size_t nBit = 0; nBit < sizeof(integral_type) * 8; ++nBit )
168 integral_type prev = a;
169 integral_type mask = integral_type(1) << nBit;
171 CPPUNIT_ASSERT( (a |= mask ) == (prev | mask ));
173 CPPUNIT_ASSERT( ( prev & mask) == mask);
175 CPPUNIT_ASSERT( (a &= (integral_type) ~mask ) == ( prev & (integral_type) ~mask ));
177 CPPUNIT_ASSERT( ( prev & mask) == 0);
179 CPPUNIT_ASSERT( (a ^= mask ) == (prev ^ mask ));
181 CPPUNIT_ASSERT( ( prev & mask) == mask);
183 CPPUNIT_ASSERT( a == (integral_type) -1 );
186 template <class Atomic, typename Integral>
187 void do_test_atomic_type( Atomic& a, atomics::memory_order order )
189 typedef Integral integral_type;
191 const atomics::memory_order oLoad = convert_to_load_order( order );
192 const atomics::memory_order oStore = convert_to_store_order( order );
194 CPPUNIT_ASSERT( a.is_lock_free() );
195 a.store((integral_type) 0, oStore );
196 CPPUNIT_ASSERT( a == 0 );
197 CPPUNIT_ASSERT( a.load( oLoad ) == 0 );
199 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
200 integral_type n = integral_type(42) << (nByte * 8);
201 CPPUNIT_ASSERT( a.exchange( n, order ) == 0 );
202 CPPUNIT_ASSERT( a.load( oLoad ) == n );
203 CPPUNIT_ASSERT( a.exchange( (integral_type) 0, order ) == n );
204 CPPUNIT_ASSERT( a.load( oLoad ) == 0 );
207 integral_type prev = a.load( oLoad );
208 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
209 integral_type n = integral_type(42) << (nByte * 8);
210 integral_type expected = prev;
212 CPPUNIT_ASSERT( a.compare_exchange_weak( expected, n, order, atomics::memory_order_relaxed));
213 CPPUNIT_ASSERT( expected == prev );
214 CPPUNIT_ASSERT( !a.compare_exchange_weak( expected, n, order, atomics::memory_order_relaxed));
215 CPPUNIT_ASSERT( expected == n );
218 CPPUNIT_ASSERT( a.load( oLoad ) == n );
221 a.store( (integral_type) 0, oStore );
223 prev = a.load( oLoad );
224 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
225 integral_type n = integral_type(42) << (nByte * 8);
226 integral_type expected = prev;
228 CPPUNIT_ASSERT( a.compare_exchange_strong( expected, n, order, atomics::memory_order_relaxed));
229 CPPUNIT_ASSERT( expected == prev );
230 CPPUNIT_ASSERT( !a.compare_exchange_strong( expected, n, order, atomics::memory_order_relaxed));
231 CPPUNIT_ASSERT( expected == n );
234 CPPUNIT_ASSERT( a.load( oLoad ) == n );
237 CPPUNIT_ASSERT( a.exchange( (integral_type) 0, order ) == prev );
240 template <class Atomic, typename Integral>
241 void do_test_atomic_integral( Atomic& a, atomics::memory_order order )
243 do_test_atomic_type< Atomic, Integral >( a, order );
245 typedef Integral integral_type;
247 const atomics::memory_order oLoad = convert_to_load_order( order );
248 const atomics::memory_order oStore = convert_to_store_order( order );
251 a.store( (integral_type) 0, oStore );
254 for ( size_t nByte = 0; nByte < sizeof(integral_type); ++nByte )
256 integral_type prev = a.load( oLoad );
257 integral_type n = integral_type(42) << (nByte * 8);
259 CPPUNIT_ASSERT( a.fetch_add( n, order) == prev);
263 for ( size_t nByte = sizeof(integral_type); nByte > 0; --nByte )
265 integral_type prev = a.load( oLoad );
266 integral_type n = integral_type(42) << ((nByte - 1) * 8);
268 CPPUNIT_ASSERT( a.fetch_sub( n, order ) == prev);
270 CPPUNIT_ASSERT( a.load( oLoad ) == 0 );
272 // fetch_or / fetc_xor / fetch_and
273 for ( size_t nBit = 0; nBit < sizeof(integral_type) * 8; ++nBit )
275 integral_type prev = a.load( oLoad ) ;;
276 integral_type mask = 1 << nBit;
278 CPPUNIT_ASSERT( a.fetch_or( mask, order ) == prev );
279 prev = a.load( oLoad );
280 CPPUNIT_ASSERT( ( prev & mask) == mask);
282 CPPUNIT_ASSERT( a.fetch_and( (integral_type) ~mask, order ) == prev );
283 prev = a.load( oLoad );
284 CPPUNIT_ASSERT( ( prev & mask) == 0);
286 CPPUNIT_ASSERT( a.fetch_xor( mask, order ) == prev );
287 prev = a.load( oLoad );
288 CPPUNIT_ASSERT( ( prev & mask) == mask);
290 CPPUNIT_ASSERT( a.load( oLoad ) == (integral_type) -1 );
295 template <typename Atomic, typename Integral>
296 void test_atomic_integral_(Atomic& a)
298 do_test_atomic_integral<Atomic, Integral >(a);
300 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_relaxed );
301 #if !(CDS_COMPILER == CDS_COMPILER_GCC && CDS_COMPILER_VERSION < 40900)
302 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_consume );
304 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_acquire );
305 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_release );
306 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_acq_rel );
307 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_seq_cst );
310 template <typename Integral>
311 void test_atomic_integral()
313 typedef atomics::atomic<Integral> atomic_type;
316 for ( size_t i = 0; i < sizeof(a)/sizeof(a[0]); ++i ) {
317 test_atomic_integral_<atomic_type, Integral>( a[i] );
320 template <typename Integral>
321 void test_atomic_integral_volatile()
323 typedef atomics::atomic<Integral> volatile atomic_type;
326 for ( size_t i = 0; i < sizeof(a)/sizeof(a[0]); ++i ) {
327 test_atomic_integral_<atomic_type, Integral>( a[i] );
331 template <class AtomicBool>
332 void do_test_atomic_bool( AtomicBool& a )
334 CPPUNIT_ASSERT( a.is_lock_free() );
336 CPPUNIT_ASSERT( a == false );
337 CPPUNIT_ASSERT( a.load() == false );
339 CPPUNIT_ASSERT( a.exchange( true ) == false );
340 CPPUNIT_ASSERT( a.load() == true );
341 CPPUNIT_ASSERT( a.exchange( false ) == true );
342 CPPUNIT_ASSERT( a.load() == false );
344 bool expected = false;
345 CPPUNIT_ASSERT( a.compare_exchange_weak( expected, true));
346 CPPUNIT_ASSERT( expected == false );
347 CPPUNIT_ASSERT( !a.compare_exchange_weak( expected, false));
348 CPPUNIT_ASSERT( expected == true );
349 CPPUNIT_ASSERT( a.load() == true );
354 CPPUNIT_ASSERT( a.compare_exchange_strong( expected, true));
355 CPPUNIT_ASSERT( expected == false );
356 CPPUNIT_ASSERT( !a.compare_exchange_strong( expected, false));
357 CPPUNIT_ASSERT( expected == true );
359 CPPUNIT_ASSERT( a.load() == true );
361 CPPUNIT_ASSERT( a.exchange( false ) == true );
364 template <class AtomicBool>
365 void do_test_atomic_bool( AtomicBool& a, atomics::memory_order order )
367 const atomics::memory_order oLoad = convert_to_load_order( order );
368 const atomics::memory_order oStore = convert_to_store_order( order );
369 const atomics::memory_order oExchange = convert_to_exchange_order( order );
371 CPPUNIT_ASSERT( a.is_lock_free() );
372 a.store( false, oStore );
373 CPPUNIT_ASSERT( a == false );
374 CPPUNIT_ASSERT( a.load( oLoad ) == false );
376 CPPUNIT_ASSERT( a.exchange( true, oExchange ) == false );
377 CPPUNIT_ASSERT( a.load( oLoad ) == true );
378 CPPUNIT_ASSERT( a.exchange( false, oExchange ) == true );
379 CPPUNIT_ASSERT( a.load( oLoad ) == false );
381 bool expected = false;
382 CPPUNIT_ASSERT( a.compare_exchange_weak( expected, true, order, atomics::memory_order_relaxed));
383 CPPUNIT_ASSERT( expected == false );
384 CPPUNIT_ASSERT( !a.compare_exchange_weak( expected, false, order, atomics::memory_order_relaxed));
385 CPPUNIT_ASSERT( expected == true );
386 CPPUNIT_ASSERT( a.load( oLoad ) == true );
389 a.store( false, oStore );
392 CPPUNIT_ASSERT( a.compare_exchange_strong( expected, true, order, atomics::memory_order_relaxed));
393 CPPUNIT_ASSERT( expected == false );
394 CPPUNIT_ASSERT( !a.compare_exchange_strong( expected, false, order, atomics::memory_order_relaxed));
395 CPPUNIT_ASSERT( expected == true );
397 CPPUNIT_ASSERT( a.load( oLoad ) == true );
399 CPPUNIT_ASSERT( a.exchange( false, oExchange ) == true );
403 template <typename Atomic>
404 void do_test_atomic_pointer_void_( Atomic& a, char * arr, char aSize, atomics::memory_order order )
406 atomics::memory_order oLoad = convert_to_load_order(order);
407 atomics::memory_order oStore = convert_to_store_order(order);
410 a.store( (void *) arr, oStore );
411 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load( oLoad )) == 1 );
414 CPPUNIT_ASSERT( a.compare_exchange_weak( p, (void *)(arr + 5), order, atomics::memory_order_relaxed ));
415 CPPUNIT_ASSERT( p == arr + 0 );
416 CPPUNIT_ASSERT( *reinterpret_cast<char *>(p) == 1 );
417 CPPUNIT_ASSERT( !a.compare_exchange_weak( p, (void *)(arr + 3), order, atomics::memory_order_relaxed ));
418 CPPUNIT_ASSERT( p == arr + 5 );
419 CPPUNIT_ASSERT( *reinterpret_cast<char *>(p) == 6 );
421 CPPUNIT_ASSERT( a.compare_exchange_strong( p, (void *)(arr + 3), order, atomics::memory_order_relaxed ));
422 CPPUNIT_ASSERT( p == arr + 5 );
423 CPPUNIT_ASSERT( *reinterpret_cast<char *>(p) == 6 );
424 CPPUNIT_ASSERT( !a.compare_exchange_strong( p, (void *)(arr + 5), order, atomics::memory_order_relaxed ));
425 CPPUNIT_ASSERT( p == arr + 3 );
426 CPPUNIT_ASSERT( *reinterpret_cast<char *>(p) == 4 );
428 CPPUNIT_ASSERT( reinterpret_cast<char *>(a.exchange( (void *) arr, order )) == arr + 3 );
429 CPPUNIT_ASSERT( reinterpret_cast<char *>(a.load( oLoad )) == arr );
430 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load( oLoad )) == 1 );
432 for ( char i = 1; i < aSize; ++i ) {
433 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load( oLoad )) == i );
434 CPPUNIT_ASSERT( a.fetch_add( 1, order ));
435 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load( oLoad )) == i + 1 );
438 for ( char i = aSize; i > 1; --i ) {
439 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load( oLoad )) == i );
440 CPPUNIT_ASSERT( a.fetch_sub( 1, order ));
441 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load( oLoad )) == i - 1 );
445 template <bool Volatile>
446 void do_test_atomic_pointer_void()
448 typedef typename add_volatile<atomics::atomic< void *>, Volatile>::type atomic_pointer;
451 const char aSize = sizeof(arr)/sizeof(arr[0]);
452 for ( char i = 0; i < aSize; ++i ) {
453 arr[unsigned(i)] = i + 1;
459 #if CDS_BUILD_BITS == 32 && !( CDS_COMPILER == CDS_COMPILER_GCC && CDS_COMPILER_VERSION == 40700 )
460 /* GCC 4.7.0 has an linktime error in 32bit x86 mode:
462 ../tests/test-hdr/misc/cxx11_atomic_class.o: In function `std::__atomic_base<void*>::is_lock_free() const':
463 /usr/local/lib/gcc/x86_64-unknown-linux-gnu/4.7.0/../../../../include/c++/4.7.0/bits/atomic_base.h:719: undefined reference to `__atomic_is_lock_free'
465 ../tests/test-hdr/misc/cxx11_atomic_class.o: In function `std::__atomic_base<void*>::is_lock_free() const volatile':
466 /usr/local/lib/gcc/x86_64-unknown-linux-gnu/4.7.0/../../../../include/c++/4.7.0/bits/atomic_base.h:723: undefined reference to `__atomic_is_lock_free'
469 CPPUNIT_ASSERT( a.is_lock_free() );
472 a.store( (void *) arr );
473 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load()) == 1 );
476 CPPUNIT_ASSERT( a.compare_exchange_weak( p, (void *)(arr + 5) ));
477 CPPUNIT_ASSERT( p == arr + 0 );
478 CPPUNIT_ASSERT( !a.compare_exchange_weak( p, (void *)(arr + 3) ));
479 CPPUNIT_ASSERT( p == arr + 5 );
481 CPPUNIT_ASSERT( a.compare_exchange_strong( p, (void *)(arr + 3) ));
482 CPPUNIT_ASSERT( p == arr + 5 );
483 CPPUNIT_ASSERT( !a.compare_exchange_strong( p, (void *)(arr + 5) ));
484 CPPUNIT_ASSERT( p == arr + 3 );
486 CPPUNIT_ASSERT( reinterpret_cast<char *>( a.exchange( (void *) arr )) == arr + 3 );
487 CPPUNIT_ASSERT( reinterpret_cast<char *>( a.load()) == arr );
488 CPPUNIT_ASSERT( *reinterpret_cast<char *>( a.load()) == 1 );
490 for ( char i = 1; i < aSize; ++i ) {
491 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load()) == i );
492 CPPUNIT_ASSERT( a.fetch_add( 1 ));
493 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load()) == i + 1 );
496 for ( char i = aSize; i > 1; --i ) {
497 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load()) == i );
498 CPPUNIT_ASSERT( a.fetch_sub( 1 ));
499 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load()) == i - 1 );
502 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_relaxed );
503 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_consume );
504 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_acquire );
505 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_release );
506 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_acq_rel );
507 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_seq_cst );
510 template <typename Atomic, typename Integral>
511 void test_atomic_pointer_for_( Atomic& a, Integral * arr, Integral aSize, atomics::memory_order order )
513 typedef Integral integral_type;
514 atomics::memory_order oLoad = convert_to_load_order(order);
515 atomics::memory_order oStore = convert_to_store_order(order);
518 a.store( arr, oStore );
519 CPPUNIT_ASSERT( *a.load( oLoad ) == 1 );
522 CPPUNIT_ASSERT( a.compare_exchange_weak( p, arr + 5, order, atomics::memory_order_relaxed ));
523 CPPUNIT_ASSERT( p == arr + 0 );
524 CPPUNIT_ASSERT( *p == 1 );
525 CPPUNIT_ASSERT( !a.compare_exchange_weak( p, arr + 3, order, atomics::memory_order_relaxed ));
526 CPPUNIT_ASSERT( p == arr + 5 );
527 CPPUNIT_ASSERT( *p == 6 );
529 CPPUNIT_ASSERT( a.compare_exchange_strong( p, arr + 3, order, atomics::memory_order_relaxed ));
530 CPPUNIT_ASSERT( p == arr + 5 );
531 CPPUNIT_ASSERT( *p == 6 );
532 CPPUNIT_ASSERT( !a.compare_exchange_strong( p, arr + 5, order, atomics::memory_order_relaxed ));
533 CPPUNIT_ASSERT( p == arr + 3 );
534 CPPUNIT_ASSERT( *p == 4 );
536 CPPUNIT_ASSERT( a.exchange( arr, order ) == arr + 3 );
537 CPPUNIT_ASSERT( a.load( oLoad ) == arr );
538 CPPUNIT_ASSERT( *a.load( oLoad ) == 1 );
540 for ( integral_type i = 1; i < aSize; ++i ) {
541 integral_type * p = a.load();
542 CPPUNIT_ASSERT( *p == i );
543 CPPUNIT_ASSERT( a.fetch_add( 1, order ) == p );
544 CPPUNIT_ASSERT( *a.load( oLoad ) == i + 1 );
547 for ( integral_type i = aSize; i > 1; --i ) {
548 integral_type * p = a.load();
549 CPPUNIT_ASSERT( *p == i );
550 CPPUNIT_ASSERT( a.fetch_sub( 1, order ) == p );
551 CPPUNIT_ASSERT( *a.load( oLoad ) == i - 1 );
555 template <typename Integral, bool Volatile>
556 void test_atomic_pointer_for()
558 typedef Integral integral_type;
559 typedef typename add_volatile<atomics::atomic< integral_type *>, Volatile>::type atomic_pointer;
561 integral_type arr[8];
562 const integral_type aSize = sizeof(arr)/sizeof(arr[0]);
563 for ( integral_type i = 0; i < aSize; ++i ) {
564 arr[size_t(i)] = i + 1;
571 CPPUNIT_ASSERT( *a.load() == 1 );
574 CPPUNIT_ASSERT( a.compare_exchange_weak( p, arr + 5 ));
575 CPPUNIT_ASSERT( p == arr + 0 );
576 CPPUNIT_ASSERT( *p == 1 );
577 CPPUNIT_ASSERT( !a.compare_exchange_weak( p, arr + 3 ));
578 CPPUNIT_ASSERT( p == arr + 5 );
579 CPPUNIT_ASSERT( *p == 6 );
581 CPPUNIT_ASSERT( a.compare_exchange_strong( p, arr + 3 ));
582 CPPUNIT_ASSERT( p == arr + 5 );
583 CPPUNIT_ASSERT( *p == 6 );
584 CPPUNIT_ASSERT( !a.compare_exchange_strong( p, arr + 5 ));
585 CPPUNIT_ASSERT( p == arr + 3 );
586 CPPUNIT_ASSERT( *p == 4 );
588 CPPUNIT_ASSERT( a.exchange( arr ) == arr + 3 );
589 CPPUNIT_ASSERT( a.load() == arr );
590 CPPUNIT_ASSERT( *a.load() == 1 );
592 for ( integral_type i = 1; i < aSize; ++i ) {
593 integral_type * p = a.load();
594 CPPUNIT_ASSERT( *p == i );
595 integral_type * pa = a.fetch_add( 1 );
596 CPPUNIT_ASSERT_EX( pa == p, "pa=" << ((uintptr_t) pa) << " p=" << ((uintptr_t) p) );
597 CPPUNIT_ASSERT( *a.load() == i + 1 );
600 for ( integral_type i = aSize; i > 1; --i ) {
601 integral_type * p = a.load();
602 CPPUNIT_ASSERT( *p == i );
603 CPPUNIT_ASSERT( a.fetch_sub( 1 ) == p );
604 CPPUNIT_ASSERT( *a.load() == i - 1 );
607 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_relaxed );
608 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_consume );
609 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_acquire );
610 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_release );
611 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_acq_rel );
612 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_seq_cst );
616 void test_atomic_flag()
618 // Array to test different alignment
620 atomics::atomic_flag flags[8];
621 for ( size_t i = 0; i < sizeof(flags)/sizeof(flags[0]); ++i )
622 do_test_atomic_flag( flags[i] );
625 void test_atomic_flag_volatile()
627 // Array to test different alignment
629 atomics::atomic_flag volatile flags[8];
630 for ( size_t i = 0; i < sizeof(flags)/sizeof(flags[0]); ++i )
631 do_test_atomic_flag( flags[i] );
634 template <typename AtomicBool>
635 void test_atomic_bool_()
637 // Array to test different alignment
640 for ( size_t i = 0; i < sizeof(a)/sizeof(a[0]); ++i ) {
641 do_test_atomic_bool( a[i] );
643 do_test_atomic_bool( a[i], atomics::memory_order_relaxed );
644 do_test_atomic_bool( a[i], atomics::memory_order_consume );
645 do_test_atomic_bool( a[i], atomics::memory_order_acquire );
646 do_test_atomic_bool( a[i], atomics::memory_order_release );
647 do_test_atomic_bool( a[i], atomics::memory_order_acq_rel );
648 do_test_atomic_bool( a[i], atomics::memory_order_seq_cst );
652 void test_atomic_bool()
654 test_atomic_bool_< atomics::atomic<bool> >();
656 void test_atomic_bool_volatile()
658 test_atomic_bool_< atomics::atomic<bool> volatile >();
661 void test_atomic_char() { test_atomic_integral<char>(); }
662 void test_atomic_signed_char() { test_atomic_integral<signed char>(); }
663 void test_atomic_unsigned_char() { test_atomic_integral<unsigned char>(); }
664 void test_atomic_short_int() { test_atomic_integral<short int>(); }
665 void test_atomic_unsigned_short_int() { test_atomic_integral<unsigned short int>(); }
666 void test_atomic_int() { test_atomic_integral<int>(); }
667 void test_atomic_unsigned_int() { test_atomic_integral<unsigned int>(); }
668 void test_atomic_long() { test_atomic_integral<long>(); }
669 void test_atomic_unsigned_long() { test_atomic_integral<unsigned long>(); }
670 void test_atomic_long_long() { test_atomic_integral<long long>(); }
671 void test_atomic_unsigned_long_long() { test_atomic_integral<unsigned long long>(); }
673 void test_atomic_char_volatile() { test_atomic_integral_volatile<char>(); }
674 void test_atomic_signed_char_volatile() { test_atomic_integral_volatile<signed char>(); }
675 void test_atomic_unsigned_char_volatile() { test_atomic_integral_volatile<unsigned char>(); }
676 void test_atomic_short_int_volatile() { test_atomic_integral_volatile<short int>(); }
677 void test_atomic_unsigned_short_int_volatile() { test_atomic_integral_volatile<unsigned short int>(); }
678 void test_atomic_int_volatile() { test_atomic_integral_volatile<int>(); }
679 void test_atomic_unsigned_int_volatile() { test_atomic_integral_volatile<unsigned int>(); }
680 void test_atomic_long_volatile() { test_atomic_integral_volatile<long>(); }
681 void test_atomic_unsigned_long_volatile() { test_atomic_integral_volatile<unsigned long>(); }
682 void test_atomic_long_long_volatile() { test_atomic_integral_volatile<long long>(); }
683 void test_atomic_unsigned_long_long_volatile() { test_atomic_integral_volatile<unsigned long long>(); }
685 void test_atomic_pointer_void() { do_test_atomic_pointer_void<false>() ;}
686 void test_atomic_pointer_void_volatile(){ do_test_atomic_pointer_void<true>() ;}
688 void test_atomic_pointer_char() { test_atomic_pointer_for<char, false>() ;}
689 void test_atomic_pointer_short() { test_atomic_pointer_for<short int, false>() ;}
690 void test_atomic_pointer_int() { test_atomic_pointer_for<int, false>() ;}
691 void test_atomic_pointer_long() { test_atomic_pointer_for<long, false>() ;}
692 void test_atomic_pointer_long_long() { test_atomic_pointer_for<long long, false>() ;}
694 void test_atomic_pointer_char_volatile() { test_atomic_pointer_for<char, true>() ;}
695 void test_atomic_pointer_short_volatile() { test_atomic_pointer_for<unsigned short int, true>() ;}
696 void test_atomic_pointer_int_volatile() { test_atomic_pointer_for<int, true>() ;}
697 void test_atomic_pointer_long_volatile() { test_atomic_pointer_for<long, true>() ;}
698 void test_atomic_pointer_long_long_volatile() { test_atomic_pointer_for<long long, true>() ;}
701 CPPUNIT_TEST_SUITE(cxx11_atomic_class)
702 CPPUNIT_TEST( test_atomic_flag )
703 CPPUNIT_TEST( test_atomic_flag_volatile )
705 CPPUNIT_TEST( test_atomic_bool )
706 CPPUNIT_TEST( test_atomic_char )
707 CPPUNIT_TEST( test_atomic_signed_char)
708 CPPUNIT_TEST( test_atomic_unsigned_char)
709 CPPUNIT_TEST( test_atomic_short_int)
710 CPPUNIT_TEST( test_atomic_unsigned_short_int)
711 CPPUNIT_TEST( test_atomic_int)
712 CPPUNIT_TEST( test_atomic_unsigned_int)
713 CPPUNIT_TEST( test_atomic_long)
714 CPPUNIT_TEST( test_atomic_unsigned_long)
715 CPPUNIT_TEST( test_atomic_long_long)
716 CPPUNIT_TEST( test_atomic_unsigned_long_long)
718 CPPUNIT_TEST( test_atomic_bool_volatile )
719 CPPUNIT_TEST( test_atomic_char_volatile )
720 CPPUNIT_TEST( test_atomic_signed_char_volatile)
721 CPPUNIT_TEST( test_atomic_unsigned_char_volatile)
722 CPPUNIT_TEST( test_atomic_short_int_volatile)
723 CPPUNIT_TEST( test_atomic_unsigned_short_int_volatile)
724 CPPUNIT_TEST( test_atomic_int_volatile)
725 CPPUNIT_TEST( test_atomic_unsigned_int_volatile)
726 CPPUNIT_TEST( test_atomic_long_volatile)
727 CPPUNIT_TEST( test_atomic_unsigned_long_volatile)
728 CPPUNIT_TEST( test_atomic_long_long_volatile)
729 CPPUNIT_TEST( test_atomic_unsigned_long_long_volatile)
731 CPPUNIT_TEST( test_atomic_pointer_void)
732 CPPUNIT_TEST( test_atomic_pointer_void_volatile)
734 CPPUNIT_TEST( test_atomic_pointer_char)
735 CPPUNIT_TEST( test_atomic_pointer_short)
736 CPPUNIT_TEST( test_atomic_pointer_int)
737 CPPUNIT_TEST( test_atomic_pointer_long)
738 CPPUNIT_TEST( test_atomic_pointer_long_long)
740 CPPUNIT_TEST( test_atomic_pointer_char_volatile)
741 CPPUNIT_TEST( test_atomic_pointer_short_volatile)
742 CPPUNIT_TEST( test_atomic_pointer_int_volatile)
743 CPPUNIT_TEST( test_atomic_pointer_long_volatile)
744 CPPUNIT_TEST( test_atomic_pointer_long_long_volatile)
746 CPPUNIT_TEST_SUITE_END()
750 CPPUNIT_TEST_SUITE_REGISTRATION(misc::cxx11_atomic_class);