3 #include "cppunit/cppunit_proxy.h"
5 //#define CDS_USE_BOOST_ATOMIC
6 #include <cds/cxx11_atomic.h>
8 #include "misc/cxx11_convert_memory_order.h"
11 class cxx11_atomic_class: public CppUnitMini::TestCase
13 template <typename AtomicFlag>
14 void do_test_atomic_flag_mo( AtomicFlag& f, atomics::memory_order order )
16 atomics::memory_order mo_clear = convert_to_store_order(order);
17 for ( int i = 0; i < 5; ++i ) {
18 CPPUNIT_ASSERT( !f.test_and_set( order ));
19 CPPUNIT_ASSERT( f.test_and_set( order ) );
24 template <typename AtomicFlag>
25 void do_test_atomic_flag( AtomicFlag& f)
29 for ( int i = 0; i < 5; ++i ) {
30 CPPUNIT_ASSERT( !f.test_and_set());
31 CPPUNIT_ASSERT( f.test_and_set() );
35 do_test_atomic_flag_mo( f, atomics::memory_order_relaxed );
36 do_test_atomic_flag_mo( f, atomics::memory_order_consume );
37 do_test_atomic_flag_mo( f, atomics::memory_order_acquire );
38 do_test_atomic_flag_mo( f, atomics::memory_order_release );
39 do_test_atomic_flag_mo( f, atomics::memory_order_acq_rel );
40 do_test_atomic_flag_mo( f, atomics::memory_order_seq_cst );
43 template <class Atomic, typename Integral>
44 void do_test_atomic_type(Atomic& a)
46 typedef Integral integral_type;
48 CPPUNIT_ASSERT( a.is_lock_free() );
49 a.store( (integral_type) 0 );
50 CPPUNIT_ASSERT( a == 0 );
51 CPPUNIT_ASSERT( a.load() == 0 );
53 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
54 integral_type n = integral_type(42) << (nByte * 8);
55 CPPUNIT_ASSERT( a.exchange( n ) == 0 );
56 CPPUNIT_ASSERT( a == n );
57 CPPUNIT_ASSERT( a.exchange( (integral_type) 0 ) == n );
58 CPPUNIT_ASSERT( a.load() == 0 );
61 integral_type prev = a.load();
62 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
63 integral_type n = integral_type(42) << (nByte * 8);
64 integral_type expected = prev;
66 CPPUNIT_ASSERT( a.compare_exchange_weak( expected, n));
67 CPPUNIT_ASSERT( expected == prev );
68 CPPUNIT_ASSERT( !a.compare_exchange_weak( expected, n));
69 CPPUNIT_ASSERT( expected == n );
72 CPPUNIT_ASSERT( a == n );
75 a = (integral_type) 0;
78 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
79 integral_type n = integral_type(42) << (nByte * 8);
80 integral_type expected = prev;
82 CPPUNIT_ASSERT( a.compare_exchange_strong( expected, n));
83 CPPUNIT_ASSERT( expected == prev );
84 CPPUNIT_ASSERT( !a.compare_exchange_strong( expected, n));
85 CPPUNIT_ASSERT( expected == n );
88 CPPUNIT_ASSERT( a.load() == n );
91 CPPUNIT_ASSERT( a.exchange( (integral_type) 0 ) == prev );
94 template <class Atomic, typename Integral>
95 void do_test_atomic_integral(Atomic& a)
97 do_test_atomic_type< Atomic, Integral >(a);
99 typedef Integral integral_type;
102 a.store( (integral_type) 0 );
105 for ( size_t nByte = 0; nByte < sizeof(integral_type); ++nByte )
107 integral_type prev = a.load();
108 integral_type n = integral_type(42) << (nByte * 8);
110 CPPUNIT_ASSERT( a.fetch_add(n) == prev);
114 for ( size_t nByte = sizeof(integral_type); nByte > 0; --nByte )
116 integral_type prev = a.load();
117 integral_type n = integral_type(42) << ((nByte - 1) * 8);
119 CPPUNIT_ASSERT( a.fetch_sub(n) == prev);
121 CPPUNIT_ASSERT( a.load() == 0 );
123 // fetch_or / fetc_xor / fetch_and
124 for ( size_t nBit = 0; nBit < sizeof(integral_type) * 8; ++nBit )
126 integral_type prev = a.load() ;;
127 integral_type mask = 1 << nBit;
129 CPPUNIT_ASSERT( a.fetch_or( mask ) == prev );
131 CPPUNIT_ASSERT( ( prev & mask) == mask);
133 CPPUNIT_ASSERT( a.fetch_and( (integral_type) ~mask ) == prev );
135 CPPUNIT_ASSERT( integral_type(prev & mask) == integral_type(0));
137 CPPUNIT_ASSERT( a.fetch_xor( mask ) == prev );
139 CPPUNIT_ASSERT( integral_type( prev & mask) == mask);
141 CPPUNIT_ASSERT( a.load() == (integral_type) -1 );
145 a = (integral_type) 0;
148 for ( size_t nByte = 0; nByte < sizeof(integral_type); ++nByte )
150 integral_type prev = a;
151 integral_type n = integral_type(42) << (nByte * 8);
153 CPPUNIT_ASSERT( (a += n) == (prev + n));
157 for ( size_t nByte = sizeof(integral_type); nByte > 0; --nByte )
159 integral_type prev = a;
160 integral_type n = integral_type(42) << ((nByte - 1) * 8);
162 CPPUNIT_ASSERT( (a -= n) == prev - n );
164 CPPUNIT_ASSERT( a.load() == 0 );
167 for ( size_t nBit = 0; nBit < sizeof(integral_type) * 8; ++nBit )
169 integral_type prev = a;
170 integral_type mask = integral_type(1) << nBit;
172 CPPUNIT_ASSERT( (a |= mask ) == (prev | mask ));
174 CPPUNIT_ASSERT( ( prev & mask) == mask);
176 CPPUNIT_ASSERT( (a &= (integral_type) ~mask ) == ( prev & (integral_type) ~mask ));
178 CPPUNIT_ASSERT( ( prev & mask) == 0);
180 CPPUNIT_ASSERT( (a ^= mask ) == (prev ^ mask ));
182 CPPUNIT_ASSERT( ( prev & mask) == mask);
184 CPPUNIT_ASSERT( a == (integral_type) -1 );
187 template <class Atomic, typename Integral>
188 void do_test_atomic_type( Atomic& a, atomics::memory_order order )
190 typedef Integral integral_type;
192 const atomics::memory_order oLoad = convert_to_load_order( order );
193 const atomics::memory_order oStore = convert_to_store_order( order );
195 CPPUNIT_ASSERT( a.is_lock_free() );
196 a.store((integral_type) 0, oStore );
197 CPPUNIT_ASSERT( a == 0 );
198 CPPUNIT_ASSERT( a.load( oLoad ) == 0 );
200 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
201 integral_type n = integral_type(42) << (nByte * 8);
202 CPPUNIT_ASSERT( a.exchange( n, order ) == 0 );
203 CPPUNIT_ASSERT( a.load( oLoad ) == n );
204 CPPUNIT_ASSERT( a.exchange( (integral_type) 0, order ) == n );
205 CPPUNIT_ASSERT( a.load( oLoad ) == 0 );
208 integral_type prev = a.load( oLoad );
209 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
210 integral_type n = integral_type(42) << (nByte * 8);
211 integral_type expected = prev;
213 CPPUNIT_ASSERT( a.compare_exchange_weak( expected, n, order, atomics::memory_order_relaxed));
214 CPPUNIT_ASSERT( expected == prev );
215 CPPUNIT_ASSERT( !a.compare_exchange_weak( expected, n, order, atomics::memory_order_relaxed));
216 CPPUNIT_ASSERT( expected == n );
219 CPPUNIT_ASSERT( a.load( oLoad ) == n );
222 a.store( (integral_type) 0, oStore );
224 prev = a.load( oLoad );
225 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
226 integral_type n = integral_type(42) << (nByte * 8);
227 integral_type expected = prev;
229 CPPUNIT_ASSERT( a.compare_exchange_strong( expected, n, order, atomics::memory_order_relaxed));
230 CPPUNIT_ASSERT( expected == prev );
231 CPPUNIT_ASSERT( !a.compare_exchange_strong( expected, n, order, atomics::memory_order_relaxed));
232 CPPUNIT_ASSERT( expected == n );
235 CPPUNIT_ASSERT( a.load( oLoad ) == n );
238 CPPUNIT_ASSERT( a.exchange( (integral_type) 0, order ) == prev );
241 template <class Atomic, typename Integral>
242 void do_test_atomic_integral( Atomic& a, atomics::memory_order order )
244 do_test_atomic_type< Atomic, Integral >( a, order );
246 typedef Integral integral_type;
248 const atomics::memory_order oLoad = convert_to_load_order( order );
249 const atomics::memory_order oStore = convert_to_store_order( order );
252 a.store( (integral_type) 0, oStore );
255 for ( size_t nByte = 0; nByte < sizeof(integral_type); ++nByte )
257 integral_type prev = a.load( oLoad );
258 integral_type n = integral_type(42) << (nByte * 8);
260 CPPUNIT_ASSERT( a.fetch_add( n, order) == prev);
264 for ( size_t nByte = sizeof(integral_type); nByte > 0; --nByte )
266 integral_type prev = a.load( oLoad );
267 integral_type n = integral_type(42) << ((nByte - 1) * 8);
269 CPPUNIT_ASSERT( a.fetch_sub( n, order ) == prev);
271 CPPUNIT_ASSERT( a.load( oLoad ) == 0 );
273 // fetch_or / fetc_xor / fetch_and
274 for ( size_t nBit = 0; nBit < sizeof(integral_type) * 8; ++nBit )
276 integral_type prev = a.load( oLoad ) ;;
277 integral_type mask = 1 << nBit;
279 CPPUNIT_ASSERT( a.fetch_or( mask, order ) == prev );
280 prev = a.load( oLoad );
281 CPPUNIT_ASSERT( ( prev & mask) == mask);
283 CPPUNIT_ASSERT( a.fetch_and( (integral_type) ~mask, order ) == prev );
284 prev = a.load( oLoad );
285 CPPUNIT_ASSERT( ( prev & mask) == 0);
287 CPPUNIT_ASSERT( a.fetch_xor( mask, order ) == prev );
288 prev = a.load( oLoad );
289 CPPUNIT_ASSERT( ( prev & mask) == mask);
291 CPPUNIT_ASSERT( a.load( oLoad ) == (integral_type) -1 );
296 template <typename Atomic, typename Integral>
297 void test_atomic_integral_(Atomic& a)
299 do_test_atomic_integral<Atomic, Integral >(a);
301 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_relaxed );
302 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_consume );
303 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_acquire );
304 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_release );
305 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_acq_rel );
306 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_seq_cst );
309 template <typename Integral>
310 void test_atomic_integral()
312 typedef atomics::atomic<Integral> atomic_type;
315 for ( size_t i = 0; i < sizeof(a)/sizeof(a[0]); ++i ) {
316 test_atomic_integral_<atomic_type, Integral>( a[i] );
319 template <typename Integral>
320 void test_atomic_integral_volatile()
322 typedef atomics::atomic<Integral> volatile atomic_type;
325 for ( size_t i = 0; i < sizeof(a)/sizeof(a[0]); ++i ) {
326 test_atomic_integral_<atomic_type, Integral>( a[i] );
330 template <class AtomicBool>
331 void do_test_atomic_bool( AtomicBool& a )
333 CPPUNIT_ASSERT( a.is_lock_free() );
335 CPPUNIT_ASSERT( a == false );
336 CPPUNIT_ASSERT( a.load() == false );
338 CPPUNIT_ASSERT( a.exchange( true ) == false );
339 CPPUNIT_ASSERT( a.load() == true );
340 CPPUNIT_ASSERT( a.exchange( false ) == true );
341 CPPUNIT_ASSERT( a.load() == false );
343 bool expected = false;
344 CPPUNIT_ASSERT( a.compare_exchange_weak( expected, true));
345 CPPUNIT_ASSERT( expected == false );
346 CPPUNIT_ASSERT( !a.compare_exchange_weak( expected, false));
347 CPPUNIT_ASSERT( expected == true );
348 CPPUNIT_ASSERT( a.load() == true );
353 CPPUNIT_ASSERT( a.compare_exchange_strong( expected, true));
354 CPPUNIT_ASSERT( expected == false );
355 CPPUNIT_ASSERT( !a.compare_exchange_strong( expected, false));
356 CPPUNIT_ASSERT( expected == true );
358 CPPUNIT_ASSERT( a.load() == true );
360 CPPUNIT_ASSERT( a.exchange( false ) == true );
363 template <class AtomicBool>
364 void do_test_atomic_bool( AtomicBool& a, atomics::memory_order order )
366 const atomics::memory_order oLoad = convert_to_load_order( order );
367 const atomics::memory_order oStore = convert_to_store_order( order );
368 const atomics::memory_order oExchange = convert_to_exchange_order( order );
370 CPPUNIT_ASSERT( a.is_lock_free() );
371 a.store( false, oStore );
372 CPPUNIT_ASSERT( a == false );
373 CPPUNIT_ASSERT( a.load( oLoad ) == false );
375 CPPUNIT_ASSERT( a.exchange( true, oExchange ) == false );
376 CPPUNIT_ASSERT( a.load( oLoad ) == true );
377 CPPUNIT_ASSERT( a.exchange( false, oExchange ) == true );
378 CPPUNIT_ASSERT( a.load( oLoad ) == false );
380 bool expected = false;
381 CPPUNIT_ASSERT( a.compare_exchange_weak( expected, true, order, atomics::memory_order_relaxed));
382 CPPUNIT_ASSERT( expected == false );
383 CPPUNIT_ASSERT( !a.compare_exchange_weak( expected, false, order, atomics::memory_order_relaxed));
384 CPPUNIT_ASSERT( expected == true );
385 CPPUNIT_ASSERT( a.load( oLoad ) == true );
388 a.store( false, oStore );
391 CPPUNIT_ASSERT( a.compare_exchange_strong( expected, true, order, atomics::memory_order_relaxed));
392 CPPUNIT_ASSERT( expected == false );
393 CPPUNIT_ASSERT( !a.compare_exchange_strong( expected, false, order, atomics::memory_order_relaxed));
394 CPPUNIT_ASSERT( expected == true );
396 CPPUNIT_ASSERT( a.load( oLoad ) == true );
398 CPPUNIT_ASSERT( a.exchange( false, oExchange ) == true );
402 template <typename Atomic>
403 void do_test_atomic_pointer_void_( Atomic& a, char * arr, char aSize, atomics::memory_order order )
405 atomics::memory_order oLoad = convert_to_load_order(order);
406 atomics::memory_order oStore = convert_to_store_order(order);
409 a.store( (void *) arr, oStore );
410 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load( oLoad )) == 1 );
413 CPPUNIT_ASSERT( a.compare_exchange_weak( p, (void *)(arr + 5), order, atomics::memory_order_relaxed ));
414 CPPUNIT_ASSERT( p == arr + 0 );
415 CPPUNIT_ASSERT( *reinterpret_cast<char *>(p) == 1 );
416 CPPUNIT_ASSERT( !a.compare_exchange_weak( p, (void *)(arr + 3), order, atomics::memory_order_relaxed ));
417 CPPUNIT_ASSERT( p == arr + 5 );
418 CPPUNIT_ASSERT( *reinterpret_cast<char *>(p) == 6 );
420 CPPUNIT_ASSERT( a.compare_exchange_strong( p, (void *)(arr + 3), order, atomics::memory_order_relaxed ));
421 CPPUNIT_ASSERT( p == arr + 5 );
422 CPPUNIT_ASSERT( *reinterpret_cast<char *>(p) == 6 );
423 CPPUNIT_ASSERT( !a.compare_exchange_strong( p, (void *)(arr + 5), order, atomics::memory_order_relaxed ));
424 CPPUNIT_ASSERT( p == arr + 3 );
425 CPPUNIT_ASSERT( *reinterpret_cast<char *>(p) == 4 );
427 CPPUNIT_ASSERT( reinterpret_cast<char *>(a.exchange( (void *) arr, order )) == arr + 3 );
428 CPPUNIT_ASSERT( reinterpret_cast<char *>(a.load( oLoad )) == arr );
429 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load( oLoad )) == 1 );
431 for ( char i = 1; i < aSize; ++i ) {
432 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load( oLoad )) == i );
433 CPPUNIT_ASSERT( a.fetch_add( 1, order ));
434 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load( oLoad )) == i + 1 );
437 for ( char i = aSize; i > 1; --i ) {
438 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load( oLoad )) == i );
439 CPPUNIT_ASSERT( a.fetch_sub( 1, order ));
440 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load( oLoad )) == i - 1 );
444 template <bool Volatile>
445 void do_test_atomic_pointer_void()
447 typedef typename add_volatile<atomics::atomic< void *>, Volatile>::type atomic_pointer;
450 const char aSize = sizeof(arr)/sizeof(arr[0]);
451 for ( char i = 0; i < aSize; ++i ) {
452 arr[unsigned(i)] = i + 1;
458 #if CDS_BUILD_BITS == 32 && !( CDS_COMPILER == CDS_COMPILER_GCC && CDS_COMPILER_VERSION == 40700 )
459 /* GCC 4.7.0 has an linktime error in 32bit x86 mode:
461 ../tests/test-hdr/misc/cxx11_atomic_class.o: In function `std::__atomic_base<void*>::is_lock_free() const':
462 /usr/local/lib/gcc/x86_64-unknown-linux-gnu/4.7.0/../../../../include/c++/4.7.0/bits/atomic_base.h:719: undefined reference to `__atomic_is_lock_free'
464 ../tests/test-hdr/misc/cxx11_atomic_class.o: In function `std::__atomic_base<void*>::is_lock_free() const volatile':
465 /usr/local/lib/gcc/x86_64-unknown-linux-gnu/4.7.0/../../../../include/c++/4.7.0/bits/atomic_base.h:723: undefined reference to `__atomic_is_lock_free'
468 CPPUNIT_ASSERT( a.is_lock_free() );
471 a.store( (void *) arr );
472 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load()) == 1 );
475 CPPUNIT_ASSERT( a.compare_exchange_weak( p, (void *)(arr + 5) ));
476 CPPUNIT_ASSERT( p == arr + 0 );
477 CPPUNIT_ASSERT( !a.compare_exchange_weak( p, (void *)(arr + 3) ));
478 CPPUNIT_ASSERT( p == arr + 5 );
480 CPPUNIT_ASSERT( a.compare_exchange_strong( p, (void *)(arr + 3) ));
481 CPPUNIT_ASSERT( p == arr + 5 );
482 CPPUNIT_ASSERT( !a.compare_exchange_strong( p, (void *)(arr + 5) ));
483 CPPUNIT_ASSERT( p == arr + 3 );
485 CPPUNIT_ASSERT( reinterpret_cast<char *>( a.exchange( (void *) arr )) == arr + 3 );
486 CPPUNIT_ASSERT( reinterpret_cast<char *>( a.load()) == arr );
487 CPPUNIT_ASSERT( *reinterpret_cast<char *>( a.load()) == 1 );
489 for ( char i = 1; i < aSize; ++i ) {
490 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load()) == i );
491 CPPUNIT_ASSERT( a.fetch_add( 1 ));
492 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load()) == i + 1 );
495 for ( char i = aSize; i > 1; --i ) {
496 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load()) == i );
497 CPPUNIT_ASSERT( a.fetch_sub( 1 ));
498 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load()) == i - 1 );
501 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_relaxed );
502 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_consume );
503 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_acquire );
504 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_release );
505 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_acq_rel );
506 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_seq_cst );
509 template <typename Atomic, typename Integral>
510 void test_atomic_pointer_for_( Atomic& a, Integral * arr, Integral aSize, atomics::memory_order order )
512 typedef Integral integral_type;
513 atomics::memory_order oLoad = convert_to_load_order(order);
514 atomics::memory_order oStore = convert_to_store_order(order);
517 a.store( arr, oStore );
518 CPPUNIT_ASSERT( *a.load( oLoad ) == 1 );
521 CPPUNIT_ASSERT( a.compare_exchange_weak( p, arr + 5, order, atomics::memory_order_relaxed ));
522 CPPUNIT_ASSERT( p == arr + 0 );
523 CPPUNIT_ASSERT( *p == 1 );
524 CPPUNIT_ASSERT( !a.compare_exchange_weak( p, arr + 3, order, atomics::memory_order_relaxed ));
525 CPPUNIT_ASSERT( p == arr + 5 );
526 CPPUNIT_ASSERT( *p == 6 );
528 CPPUNIT_ASSERT( a.compare_exchange_strong( p, arr + 3, order, atomics::memory_order_relaxed ));
529 CPPUNIT_ASSERT( p == arr + 5 );
530 CPPUNIT_ASSERT( *p == 6 );
531 CPPUNIT_ASSERT( !a.compare_exchange_strong( p, arr + 5, order, atomics::memory_order_relaxed ));
532 CPPUNIT_ASSERT( p == arr + 3 );
533 CPPUNIT_ASSERT( *p == 4 );
535 CPPUNIT_ASSERT( a.exchange( arr, order ) == arr + 3 );
536 CPPUNIT_ASSERT( a.load( oLoad ) == arr );
537 CPPUNIT_ASSERT( *a.load( oLoad ) == 1 );
539 for ( integral_type i = 1; i < aSize; ++i ) {
540 integral_type * p = a.load();
541 CPPUNIT_ASSERT( *p == i );
542 CPPUNIT_ASSERT( a.fetch_add( 1, order ) == p );
543 CPPUNIT_ASSERT( *a.load( oLoad ) == i + 1 );
546 for ( integral_type i = aSize; i > 1; --i ) {
547 integral_type * p = a.load();
548 CPPUNIT_ASSERT( *p == i );
549 CPPUNIT_ASSERT( a.fetch_sub( 1, order ) == p );
550 CPPUNIT_ASSERT( *a.load( oLoad ) == i - 1 );
554 template <typename Integral, bool Volatile>
555 void test_atomic_pointer_for()
557 typedef Integral integral_type;
558 typedef typename add_volatile<atomics::atomic< integral_type *>, Volatile>::type atomic_pointer;
560 integral_type arr[8];
561 const integral_type aSize = sizeof(arr)/sizeof(arr[0]);
562 for ( integral_type i = 0; i < aSize; ++i ) {
563 arr[size_t(i)] = i + 1;
570 CPPUNIT_ASSERT( *a.load() == 1 );
573 CPPUNIT_ASSERT( a.compare_exchange_weak( p, arr + 5 ));
574 CPPUNIT_ASSERT( p == arr + 0 );
575 CPPUNIT_ASSERT( *p == 1 );
576 CPPUNIT_ASSERT( !a.compare_exchange_weak( p, arr + 3 ));
577 CPPUNIT_ASSERT( p == arr + 5 );
578 CPPUNIT_ASSERT( *p == 6 );
580 CPPUNIT_ASSERT( a.compare_exchange_strong( p, arr + 3 ));
581 CPPUNIT_ASSERT( p == arr + 5 );
582 CPPUNIT_ASSERT( *p == 6 );
583 CPPUNIT_ASSERT( !a.compare_exchange_strong( p, arr + 5 ));
584 CPPUNIT_ASSERT( p == arr + 3 );
585 CPPUNIT_ASSERT( *p == 4 );
587 CPPUNIT_ASSERT( a.exchange( arr ) == arr + 3 );
588 CPPUNIT_ASSERT( a.load() == arr );
589 CPPUNIT_ASSERT( *a.load() == 1 );
591 for ( integral_type i = 1; i < aSize; ++i ) {
592 integral_type * p = a.load();
593 CPPUNIT_ASSERT( *p == i );
594 integral_type * pa = a.fetch_add( 1 );
595 CPPUNIT_ASSERT_EX( pa == p, "pa=" << ((uintptr_t) pa) << " p=" << ((uintptr_t) p) );
596 CPPUNIT_ASSERT( *a.load() == i + 1 );
599 for ( integral_type i = aSize; i > 1; --i ) {
600 integral_type * p = a.load();
601 CPPUNIT_ASSERT( *p == i );
602 CPPUNIT_ASSERT( a.fetch_sub( 1 ) == p );
603 CPPUNIT_ASSERT( *a.load() == i - 1 );
606 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_relaxed );
607 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_consume );
608 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_acquire );
609 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_release );
610 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_acq_rel );
611 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_seq_cst );
615 void test_atomic_flag()
617 // Array to test different alignment
619 atomics::atomic_flag flags[8];
620 for ( size_t i = 0; i < sizeof(flags)/sizeof(flags[0]); ++i )
621 do_test_atomic_flag( flags[i] );
624 void test_atomic_flag_volatile()
626 // Array to test different alignment
628 atomics::atomic_flag volatile flags[8];
629 for ( size_t i = 0; i < sizeof(flags)/sizeof(flags[0]); ++i )
630 do_test_atomic_flag( flags[i] );
633 template <typename AtomicBool>
634 void test_atomic_bool_()
636 // Array to test different alignment
639 for ( size_t i = 0; i < sizeof(a)/sizeof(a[0]); ++i ) {
640 do_test_atomic_bool( a[i] );
642 do_test_atomic_bool( a[i], atomics::memory_order_relaxed );
643 do_test_atomic_bool( a[i], atomics::memory_order_consume );
644 do_test_atomic_bool( a[i], atomics::memory_order_acquire );
645 do_test_atomic_bool( a[i], atomics::memory_order_release );
646 do_test_atomic_bool( a[i], atomics::memory_order_acq_rel );
647 do_test_atomic_bool( a[i], atomics::memory_order_seq_cst );
651 void test_atomic_bool()
653 test_atomic_bool_< atomics::atomic<bool> >();
655 void test_atomic_bool_volatile()
657 test_atomic_bool_< atomics::atomic<bool> volatile >();
660 void test_atomic_char() { test_atomic_integral<char>(); }
661 void test_atomic_signed_char() { test_atomic_integral<signed char>(); }
662 void test_atomic_unsigned_char() { test_atomic_integral<unsigned char>(); }
663 void test_atomic_short_int() { test_atomic_integral<short int>(); }
664 void test_atomic_unsigned_short_int() { test_atomic_integral<unsigned short int>(); }
665 void test_atomic_int() { test_atomic_integral<int>(); }
666 void test_atomic_unsigned_int() { test_atomic_integral<unsigned int>(); }
667 void test_atomic_long() { test_atomic_integral<long>(); }
668 void test_atomic_unsigned_long() { test_atomic_integral<unsigned long>(); }
669 void test_atomic_long_long() { test_atomic_integral<long long>(); }
670 void test_atomic_unsigned_long_long() { test_atomic_integral<unsigned long long>(); }
672 void test_atomic_char_volatile() { test_atomic_integral_volatile<char>(); }
673 void test_atomic_signed_char_volatile() { test_atomic_integral_volatile<signed char>(); }
674 void test_atomic_unsigned_char_volatile() { test_atomic_integral_volatile<unsigned char>(); }
675 void test_atomic_short_int_volatile() { test_atomic_integral_volatile<short int>(); }
676 void test_atomic_unsigned_short_int_volatile() { test_atomic_integral_volatile<unsigned short int>(); }
677 void test_atomic_int_volatile() { test_atomic_integral_volatile<int>(); }
678 void test_atomic_unsigned_int_volatile() { test_atomic_integral_volatile<unsigned int>(); }
679 void test_atomic_long_volatile() { test_atomic_integral_volatile<long>(); }
680 void test_atomic_unsigned_long_volatile() { test_atomic_integral_volatile<unsigned long>(); }
681 void test_atomic_long_long_volatile() { test_atomic_integral_volatile<long long>(); }
682 void test_atomic_unsigned_long_long_volatile() { test_atomic_integral_volatile<unsigned long long>(); }
684 void test_atomic_pointer_void() { do_test_atomic_pointer_void<false>() ;}
685 void test_atomic_pointer_void_volatile(){ do_test_atomic_pointer_void<true>() ;}
687 void test_atomic_pointer_char() { test_atomic_pointer_for<char, false>() ;}
688 void test_atomic_pointer_short() { test_atomic_pointer_for<short int, false>() ;}
689 void test_atomic_pointer_int() { test_atomic_pointer_for<int, false>() ;}
690 void test_atomic_pointer_long() { test_atomic_pointer_for<long, false>() ;}
691 void test_atomic_pointer_long_long() { test_atomic_pointer_for<long long, false>() ;}
693 void test_atomic_pointer_char_volatile() { test_atomic_pointer_for<char, true>() ;}
694 void test_atomic_pointer_short_volatile() { test_atomic_pointer_for<unsigned short int, true>() ;}
695 void test_atomic_pointer_int_volatile() { test_atomic_pointer_for<int, true>() ;}
696 void test_atomic_pointer_long_volatile() { test_atomic_pointer_for<long, true>() ;}
697 void test_atomic_pointer_long_long_volatile() { test_atomic_pointer_for<long long, true>() ;}
700 CPPUNIT_TEST_SUITE(cxx11_atomic_class)
701 CPPUNIT_TEST( test_atomic_flag )
702 CPPUNIT_TEST( test_atomic_flag_volatile )
704 CPPUNIT_TEST( test_atomic_bool )
705 CPPUNIT_TEST( test_atomic_char )
706 CPPUNIT_TEST( test_atomic_signed_char)
707 CPPUNIT_TEST( test_atomic_unsigned_char)
708 CPPUNIT_TEST( test_atomic_short_int)
709 CPPUNIT_TEST( test_atomic_unsigned_short_int)
710 CPPUNIT_TEST( test_atomic_int)
711 CPPUNIT_TEST( test_atomic_unsigned_int)
712 CPPUNIT_TEST( test_atomic_long)
713 CPPUNIT_TEST( test_atomic_unsigned_long)
714 CPPUNIT_TEST( test_atomic_long_long)
715 CPPUNIT_TEST( test_atomic_unsigned_long_long)
717 CPPUNIT_TEST( test_atomic_bool_volatile )
718 CPPUNIT_TEST( test_atomic_char_volatile )
719 CPPUNIT_TEST( test_atomic_signed_char_volatile)
720 CPPUNIT_TEST( test_atomic_unsigned_char_volatile)
721 CPPUNIT_TEST( test_atomic_short_int_volatile)
722 CPPUNIT_TEST( test_atomic_unsigned_short_int_volatile)
723 CPPUNIT_TEST( test_atomic_int_volatile)
724 CPPUNIT_TEST( test_atomic_unsigned_int_volatile)
725 CPPUNIT_TEST( test_atomic_long_volatile)
726 CPPUNIT_TEST( test_atomic_unsigned_long_volatile)
727 CPPUNIT_TEST( test_atomic_long_long_volatile)
728 CPPUNIT_TEST( test_atomic_unsigned_long_long_volatile)
730 CPPUNIT_TEST( test_atomic_pointer_void)
731 CPPUNIT_TEST( test_atomic_pointer_void_volatile)
733 CPPUNIT_TEST( test_atomic_pointer_char)
734 CPPUNIT_TEST( test_atomic_pointer_short)
735 CPPUNIT_TEST( test_atomic_pointer_int)
736 CPPUNIT_TEST( test_atomic_pointer_long)
737 CPPUNIT_TEST( test_atomic_pointer_long_long)
739 CPPUNIT_TEST( test_atomic_pointer_char_volatile)
740 CPPUNIT_TEST( test_atomic_pointer_short_volatile)
741 CPPUNIT_TEST( test_atomic_pointer_int_volatile)
742 CPPUNIT_TEST( test_atomic_pointer_long_volatile)
743 CPPUNIT_TEST( test_atomic_pointer_long_long_volatile)
745 CPPUNIT_TEST_SUITE_END()
749 CPPUNIT_TEST_SUITE_REGISTRATION(misc::cxx11_atomic_class);