2 This file is a part of libcds - Concurrent Data Structures library
4 (C) Copyright Maxim Khizhinsky (libcds.dev@gmail.com) 2006-2016
6 Source code repo: http://github.com/khizmax/libcds/
7 Download: http://sourceforge.net/projects/libcds/files/
9 Redistribution and use in source and binary forms, with or without
10 modification, are permitted provided that the following conditions are met:
12 * Redistributions of source code must retain the above copyright notice, this
13 list of conditions and the following disclaimer.
15 * Redistributions in binary form must reproduce the above copyright notice,
16 this list of conditions and the following disclaimer in the documentation
17 and/or other materials provided with the distribution.
19 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
20 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
21 IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22 DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
23 FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
24 DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
25 SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
26 CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
27 OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "cppunit/cppunit_proxy.h"
33 #include <cds/algo/atomic.h>
35 #include "misc/cxx11_convert_memory_order.h"
38 class cxx11_atomic_class: public CppUnitMini::TestCase
40 template <typename AtomicFlag>
41 void do_test_atomic_flag_mo( AtomicFlag& f, atomics::memory_order order )
43 atomics::memory_order mo_clear = convert_to_store_order(order);
44 for ( int i = 0; i < 5; ++i ) {
45 CPPUNIT_ASSERT( !f.test_and_set( order ));
46 CPPUNIT_ASSERT( f.test_and_set( order ) );
51 template <typename AtomicFlag>
52 void do_test_atomic_flag( AtomicFlag& f)
56 for ( int i = 0; i < 5; ++i ) {
57 CPPUNIT_ASSERT( !f.test_and_set());
58 CPPUNIT_ASSERT( f.test_and_set() );
62 do_test_atomic_flag_mo( f, atomics::memory_order_relaxed );
63 //do_test_atomic_flag_mo( f, atomics::memory_order_consume );
64 do_test_atomic_flag_mo( f, atomics::memory_order_acquire );
65 do_test_atomic_flag_mo( f, atomics::memory_order_release );
66 do_test_atomic_flag_mo( f, atomics::memory_order_acq_rel );
67 do_test_atomic_flag_mo( f, atomics::memory_order_seq_cst );
70 template <class Atomic, typename Integral>
71 void do_test_atomic_type(Atomic& a)
73 typedef Integral integral_type;
75 CPPUNIT_ASSERT( a.is_lock_free() );
76 a.store( (integral_type) 0 );
77 CPPUNIT_ASSERT( a == 0 );
78 CPPUNIT_ASSERT( a.load() == 0 );
80 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
81 integral_type n = integral_type(42) << (nByte * 8);
82 CPPUNIT_ASSERT( a.exchange( n ) == 0 );
83 CPPUNIT_ASSERT( a == n );
84 CPPUNIT_ASSERT( a.exchange( (integral_type) 0 ) == n );
85 CPPUNIT_ASSERT( a.load() == 0 );
88 integral_type prev = a.load();
89 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
90 integral_type n = integral_type(42) << (nByte * 8);
91 integral_type expected = prev;
93 CPPUNIT_ASSERT( a.compare_exchange_weak( expected, n));
94 CPPUNIT_ASSERT( expected == prev );
95 CPPUNIT_ASSERT( !a.compare_exchange_weak( expected, n));
96 CPPUNIT_ASSERT( expected == n );
99 CPPUNIT_ASSERT( a == n );
102 a = (integral_type) 0;
105 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
106 integral_type n = integral_type(42) << (nByte * 8);
107 integral_type expected = prev;
109 CPPUNIT_ASSERT( a.compare_exchange_strong( expected, n));
110 CPPUNIT_ASSERT( expected == prev );
111 CPPUNIT_ASSERT( !a.compare_exchange_strong( expected, n));
112 CPPUNIT_ASSERT( expected == n );
115 CPPUNIT_ASSERT( a.load() == n );
118 CPPUNIT_ASSERT( a.exchange( (integral_type) 0 ) == prev );
121 template <class Atomic, typename Integral>
122 void do_test_atomic_integral(Atomic& a)
124 do_test_atomic_type< Atomic, Integral >(a);
126 typedef Integral integral_type;
129 a.store( (integral_type) 0 );
132 for ( size_t nByte = 0; nByte < sizeof(integral_type); ++nByte )
134 integral_type prev = a.load();
135 integral_type n = integral_type(42) << (nByte * 8);
137 CPPUNIT_ASSERT( a.fetch_add(n) == prev);
141 for ( size_t nByte = sizeof(integral_type); nByte > 0; --nByte )
143 integral_type prev = a.load();
144 integral_type n = integral_type(42) << ((nByte - 1) * 8);
146 CPPUNIT_ASSERT( a.fetch_sub(n) == prev);
148 CPPUNIT_ASSERT( a.load() == 0 );
150 // fetch_or / fetc_xor / fetch_and
151 for ( size_t nBit = 0; nBit < sizeof(integral_type) * 8; ++nBit )
153 integral_type prev = a.load() ;;
154 integral_type mask = 1 << nBit;
156 CPPUNIT_ASSERT( a.fetch_or( mask ) == prev );
158 CPPUNIT_ASSERT( ( prev & mask) == mask);
160 CPPUNIT_ASSERT( a.fetch_and( (integral_type) ~mask ) == prev );
162 CPPUNIT_ASSERT( integral_type(prev & mask) == integral_type(0));
164 CPPUNIT_ASSERT( a.fetch_xor( mask ) == prev );
166 CPPUNIT_ASSERT( integral_type( prev & mask) == mask);
168 CPPUNIT_ASSERT( a.load() == (integral_type) -1 );
172 a = (integral_type) 0;
175 for ( size_t nByte = 0; nByte < sizeof(integral_type); ++nByte )
177 integral_type prev = a;
178 integral_type n = integral_type(42) << (nByte * 8);
180 CPPUNIT_ASSERT( (a += n) == (prev + n));
184 for ( size_t nByte = sizeof(integral_type); nByte > 0; --nByte )
186 integral_type prev = a;
187 integral_type n = integral_type(42) << ((nByte - 1) * 8);
189 CPPUNIT_ASSERT( (a -= n) == prev - n );
191 CPPUNIT_ASSERT( a.load() == 0 );
194 for ( size_t nBit = 0; nBit < sizeof(integral_type) * 8; ++nBit )
196 integral_type prev = a;
197 integral_type mask = integral_type(1) << nBit;
199 CPPUNIT_ASSERT( (a |= mask ) == (prev | mask ));
201 CPPUNIT_ASSERT( ( prev & mask) == mask);
203 CPPUNIT_ASSERT( (a &= (integral_type) ~mask ) == ( prev & (integral_type) ~mask ));
205 CPPUNIT_ASSERT( ( prev & mask) == 0);
207 CPPUNIT_ASSERT( (a ^= mask ) == (prev ^ mask ));
209 CPPUNIT_ASSERT( ( prev & mask) == mask);
211 CPPUNIT_ASSERT( a == (integral_type) -1 );
214 template <class Atomic, typename Integral>
215 void do_test_atomic_type( Atomic& a, atomics::memory_order order )
217 typedef Integral integral_type;
219 const atomics::memory_order oLoad = convert_to_load_order( order );
220 const atomics::memory_order oStore = convert_to_store_order( order );
222 CPPUNIT_ASSERT( a.is_lock_free() );
223 a.store((integral_type) 0, oStore );
224 CPPUNIT_ASSERT( a == 0 );
225 CPPUNIT_ASSERT( a.load( oLoad ) == 0 );
227 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
228 integral_type n = integral_type(42) << (nByte * 8);
229 CPPUNIT_ASSERT( a.exchange( n, order ) == 0 );
230 CPPUNIT_ASSERT( a.load( oLoad ) == n );
231 CPPUNIT_ASSERT( a.exchange( (integral_type) 0, order ) == n );
232 CPPUNIT_ASSERT( a.load( oLoad ) == 0 );
235 integral_type prev = a.load( oLoad );
236 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
237 integral_type n = integral_type(42) << (nByte * 8);
238 integral_type expected = prev;
240 CPPUNIT_ASSERT( a.compare_exchange_weak( expected, n, order, atomics::memory_order_relaxed));
241 CPPUNIT_ASSERT( expected == prev );
242 CPPUNIT_ASSERT( !a.compare_exchange_weak( expected, n, order, atomics::memory_order_relaxed));
243 CPPUNIT_ASSERT( expected == n );
246 CPPUNIT_ASSERT( a.load( oLoad ) == n );
249 a.store( (integral_type) 0, oStore );
251 prev = a.load( oLoad );
252 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
253 integral_type n = integral_type(42) << (nByte * 8);
254 integral_type expected = prev;
256 CPPUNIT_ASSERT( a.compare_exchange_strong( expected, n, order, atomics::memory_order_relaxed));
257 CPPUNIT_ASSERT( expected == prev );
258 CPPUNIT_ASSERT( !a.compare_exchange_strong( expected, n, order, atomics::memory_order_relaxed));
259 CPPUNIT_ASSERT( expected == n );
262 CPPUNIT_ASSERT( a.load( oLoad ) == n );
265 CPPUNIT_ASSERT( a.exchange( (integral_type) 0, order ) == prev );
268 template <class Atomic, typename Integral>
269 void do_test_atomic_integral( Atomic& a, atomics::memory_order order )
271 do_test_atomic_type< Atomic, Integral >( a, order );
273 typedef Integral integral_type;
275 const atomics::memory_order oLoad = convert_to_load_order( order );
276 const atomics::memory_order oStore = convert_to_store_order( order );
279 a.store( (integral_type) 0, oStore );
282 for ( size_t nByte = 0; nByte < sizeof(integral_type); ++nByte )
284 integral_type prev = a.load( oLoad );
285 integral_type n = integral_type(42) << (nByte * 8);
287 CPPUNIT_ASSERT( a.fetch_add( n, order) == prev);
291 for ( size_t nByte = sizeof(integral_type); nByte > 0; --nByte )
293 integral_type prev = a.load( oLoad );
294 integral_type n = integral_type(42) << ((nByte - 1) * 8);
296 CPPUNIT_ASSERT( a.fetch_sub( n, order ) == prev);
298 CPPUNIT_ASSERT( a.load( oLoad ) == 0 );
300 // fetch_or / fetc_xor / fetch_and
301 for ( size_t nBit = 0; nBit < sizeof(integral_type) * 8; ++nBit )
303 integral_type prev = a.load( oLoad ) ;;
304 integral_type mask = 1 << nBit;
306 CPPUNIT_ASSERT( a.fetch_or( mask, order ) == prev );
307 prev = a.load( oLoad );
308 CPPUNIT_ASSERT( ( prev & mask) == mask);
310 CPPUNIT_ASSERT( a.fetch_and( (integral_type) ~mask, order ) == prev );
311 prev = a.load( oLoad );
312 CPPUNIT_ASSERT( ( prev & mask) == 0);
314 CPPUNIT_ASSERT( a.fetch_xor( mask, order ) == prev );
315 prev = a.load( oLoad );
316 CPPUNIT_ASSERT( ( prev & mask) == mask);
318 CPPUNIT_ASSERT( a.load( oLoad ) == (integral_type) -1 );
323 template <typename Atomic, typename Integral>
324 void test_atomic_integral_(Atomic& a)
326 do_test_atomic_integral<Atomic, Integral >(a);
328 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_relaxed );
329 //#if !(CDS_COMPILER == CDS_COMPILER_GCC && CDS_COMPILER_VERSION < 40900)
330 // do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_consume );
332 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_acquire );
333 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_release );
334 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_acq_rel );
335 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_seq_cst );
338 template <typename Integral>
339 void test_atomic_integral()
341 typedef atomics::atomic<Integral> atomic_type;
344 for ( size_t i = 0; i < sizeof(a)/sizeof(a[0]); ++i ) {
345 test_atomic_integral_<atomic_type, Integral>( a[i] );
348 template <typename Integral>
349 void test_atomic_integral_volatile()
351 typedef atomics::atomic<Integral> volatile atomic_type;
354 for ( size_t i = 0; i < sizeof(a)/sizeof(a[0]); ++i ) {
355 test_atomic_integral_<atomic_type, Integral>( a[i] );
359 template <class AtomicBool>
360 void do_test_atomic_bool( AtomicBool& a )
362 CPPUNIT_ASSERT( a.is_lock_free() );
364 CPPUNIT_ASSERT( a == false );
365 CPPUNIT_ASSERT( a.load() == false );
367 CPPUNIT_ASSERT( a.exchange( true ) == false );
368 CPPUNIT_ASSERT( a.load() == true );
369 CPPUNIT_ASSERT( a.exchange( false ) == true );
370 CPPUNIT_ASSERT( a.load() == false );
372 bool expected = false;
373 CPPUNIT_ASSERT( a.compare_exchange_weak( expected, true));
374 CPPUNIT_ASSERT( expected == false );
375 CPPUNIT_ASSERT( !a.compare_exchange_weak( expected, false));
376 CPPUNIT_ASSERT( expected == true );
377 CPPUNIT_ASSERT( a.load() == true );
382 CPPUNIT_ASSERT( a.compare_exchange_strong( expected, true));
383 CPPUNIT_ASSERT( expected == false );
384 CPPUNIT_ASSERT( !a.compare_exchange_strong( expected, false));
385 CPPUNIT_ASSERT( expected == true );
387 CPPUNIT_ASSERT( a.load() == true );
389 CPPUNIT_ASSERT( a.exchange( false ) == true );
392 template <class AtomicBool>
393 void do_test_atomic_bool( AtomicBool& a, atomics::memory_order order )
395 const atomics::memory_order oLoad = convert_to_load_order( order );
396 const atomics::memory_order oStore = convert_to_store_order( order );
397 const atomics::memory_order oExchange = convert_to_exchange_order( order );
399 CPPUNIT_ASSERT( a.is_lock_free() );
400 a.store( false, oStore );
401 CPPUNIT_ASSERT( a == false );
402 CPPUNIT_ASSERT( a.load( oLoad ) == false );
404 CPPUNIT_ASSERT( a.exchange( true, oExchange ) == false );
405 CPPUNIT_ASSERT( a.load( oLoad ) == true );
406 CPPUNIT_ASSERT( a.exchange( false, oExchange ) == true );
407 CPPUNIT_ASSERT( a.load( oLoad ) == false );
409 bool expected = false;
410 CPPUNIT_ASSERT( a.compare_exchange_weak( expected, true, order, atomics::memory_order_relaxed));
411 CPPUNIT_ASSERT( expected == false );
412 CPPUNIT_ASSERT( !a.compare_exchange_weak( expected, false, order, atomics::memory_order_relaxed));
413 CPPUNIT_ASSERT( expected == true );
414 CPPUNIT_ASSERT( a.load( oLoad ) == true );
417 a.store( false, oStore );
420 CPPUNIT_ASSERT( a.compare_exchange_strong( expected, true, order, atomics::memory_order_relaxed));
421 CPPUNIT_ASSERT( expected == false );
422 CPPUNIT_ASSERT( !a.compare_exchange_strong( expected, false, order, atomics::memory_order_relaxed));
423 CPPUNIT_ASSERT( expected == true );
425 CPPUNIT_ASSERT( a.load( oLoad ) == true );
427 CPPUNIT_ASSERT( a.exchange( false, oExchange ) == true );
431 template <typename Atomic>
432 void do_test_atomic_pointer_void_( Atomic& a, char * arr, char aSize, atomics::memory_order order )
434 atomics::memory_order oLoad = convert_to_load_order(order);
435 atomics::memory_order oStore = convert_to_store_order(order);
438 a.store( (void *) arr, oStore );
439 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load( oLoad )) == 1 );
442 CPPUNIT_ASSERT( a.compare_exchange_weak( p, (void *)(arr + 5), order, atomics::memory_order_relaxed ));
443 CPPUNIT_ASSERT( p == arr + 0 );
444 CPPUNIT_ASSERT( *reinterpret_cast<char *>(p) == 1 );
445 CPPUNIT_ASSERT( !a.compare_exchange_weak( p, (void *)(arr + 3), order, atomics::memory_order_relaxed ));
446 CPPUNIT_ASSERT( p == arr + 5 );
447 CPPUNIT_ASSERT( *reinterpret_cast<char *>(p) == 6 );
449 CPPUNIT_ASSERT( a.compare_exchange_strong( p, (void *)(arr + 3), order, atomics::memory_order_relaxed ));
450 CPPUNIT_ASSERT( p == arr + 5 );
451 CPPUNIT_ASSERT( *reinterpret_cast<char *>(p) == 6 );
452 CPPUNIT_ASSERT( !a.compare_exchange_strong( p, (void *)(arr + 5), order, atomics::memory_order_relaxed ));
453 CPPUNIT_ASSERT( p == arr + 3 );
454 CPPUNIT_ASSERT( *reinterpret_cast<char *>(p) == 4 );
456 CPPUNIT_ASSERT( reinterpret_cast<char *>(a.exchange( (void *) arr, order )) == arr + 3 );
457 CPPUNIT_ASSERT( reinterpret_cast<char *>(a.load( oLoad )) == arr );
458 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load( oLoad )) == 1 );
460 for ( char i = 1; i < aSize; ++i ) {
461 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load( oLoad )) == i );
462 CPPUNIT_ASSERT( a.fetch_add( 1, order ));
463 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load( oLoad )) == i + 1 );
466 for ( char i = aSize; i > 1; --i ) {
467 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load( oLoad )) == i );
468 CPPUNIT_ASSERT( a.fetch_sub( 1, order ));
469 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load( oLoad )) == i - 1 );
473 template <bool Volatile>
474 void do_test_atomic_pointer_void()
476 typedef typename add_volatile<atomics::atomic< void *>, Volatile>::type atomic_pointer;
479 const char aSize = sizeof(arr)/sizeof(arr[0]);
480 for ( char i = 0; i < aSize; ++i ) {
481 arr[unsigned(i)] = i + 1;
487 #if CDS_BUILD_BITS == 32 && !( CDS_COMPILER == CDS_COMPILER_GCC && CDS_COMPILER_VERSION == 40700 )
488 /* GCC 4.7.0 has an linktime error in 32bit x86 mode:
490 ../tests/test-hdr/misc/cxx11_atomic_class.o: In function `std::__atomic_base<void*>::is_lock_free() const':
491 /usr/local/lib/gcc/x86_64-unknown-linux-gnu/4.7.0/../../../../include/c++/4.7.0/bits/atomic_base.h:719: undefined reference to `__atomic_is_lock_free'
493 ../tests/test-hdr/misc/cxx11_atomic_class.o: In function `std::__atomic_base<void*>::is_lock_free() const volatile':
494 /usr/local/lib/gcc/x86_64-unknown-linux-gnu/4.7.0/../../../../include/c++/4.7.0/bits/atomic_base.h:723: undefined reference to `__atomic_is_lock_free'
497 CPPUNIT_ASSERT( a.is_lock_free() );
500 a.store( (void *) arr );
501 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load()) == 1 );
504 CPPUNIT_ASSERT( a.compare_exchange_weak( p, (void *)(arr + 5) ));
505 CPPUNIT_ASSERT( p == arr + 0 );
506 CPPUNIT_ASSERT( !a.compare_exchange_weak( p, (void *)(arr + 3) ));
507 CPPUNIT_ASSERT( p == arr + 5 );
509 CPPUNIT_ASSERT( a.compare_exchange_strong( p, (void *)(arr + 3) ));
510 CPPUNIT_ASSERT( p == arr + 5 );
511 CPPUNIT_ASSERT( !a.compare_exchange_strong( p, (void *)(arr + 5) ));
512 CPPUNIT_ASSERT( p == arr + 3 );
514 CPPUNIT_ASSERT( reinterpret_cast<char *>( a.exchange( (void *) arr )) == arr + 3 );
515 CPPUNIT_ASSERT( reinterpret_cast<char *>( a.load()) == arr );
516 CPPUNIT_ASSERT( *reinterpret_cast<char *>( a.load()) == 1 );
518 for ( char i = 1; i < aSize; ++i ) {
519 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load()) == i );
520 CPPUNIT_ASSERT( a.fetch_add( 1 ));
521 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load()) == i + 1 );
524 for ( char i = aSize; i > 1; --i ) {
525 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load()) == i );
526 CPPUNIT_ASSERT( a.fetch_sub( 1 ));
527 CPPUNIT_ASSERT( *reinterpret_cast<char *>(a.load()) == i - 1 );
530 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_relaxed );
531 //do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_consume );
532 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_acquire );
533 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_release );
534 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_acq_rel );
535 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_seq_cst );
538 template <typename Atomic, typename Integral>
539 void test_atomic_pointer_for_( Atomic& a, Integral * arr, Integral aSize, atomics::memory_order order )
541 typedef Integral integral_type;
542 atomics::memory_order oLoad = convert_to_load_order(order);
543 atomics::memory_order oStore = convert_to_store_order(order);
546 a.store( arr, oStore );
547 CPPUNIT_ASSERT( *a.load( oLoad ) == 1 );
550 CPPUNIT_ASSERT( a.compare_exchange_weak( p, arr + 5, order, atomics::memory_order_relaxed ));
551 CPPUNIT_ASSERT( p == arr + 0 );
552 CPPUNIT_ASSERT( *p == 1 );
553 CPPUNIT_ASSERT( !a.compare_exchange_weak( p, arr + 3, order, atomics::memory_order_relaxed ));
554 CPPUNIT_ASSERT( p == arr + 5 );
555 CPPUNIT_ASSERT( *p == 6 );
557 CPPUNIT_ASSERT( a.compare_exchange_strong( p, arr + 3, order, atomics::memory_order_relaxed ));
558 CPPUNIT_ASSERT( p == arr + 5 );
559 CPPUNIT_ASSERT( *p == 6 );
560 CPPUNIT_ASSERT( !a.compare_exchange_strong( p, arr + 5, order, atomics::memory_order_relaxed ));
561 CPPUNIT_ASSERT( p == arr + 3 );
562 CPPUNIT_ASSERT( *p == 4 );
564 CPPUNIT_ASSERT( a.exchange( arr, order ) == arr + 3 );
565 CPPUNIT_ASSERT( a.load( oLoad ) == arr );
566 CPPUNIT_ASSERT( *a.load( oLoad ) == 1 );
568 for ( integral_type i = 1; i < aSize; ++i ) {
569 integral_type * p = a.load();
570 CPPUNIT_ASSERT( *p == i );
571 CPPUNIT_ASSERT( a.fetch_add( 1, order ) == p );
572 CPPUNIT_ASSERT( *a.load( oLoad ) == i + 1 );
575 for ( integral_type i = aSize; i > 1; --i ) {
576 integral_type * p = a.load();
577 CPPUNIT_ASSERT( *p == i );
578 CPPUNIT_ASSERT( a.fetch_sub( 1, order ) == p );
579 CPPUNIT_ASSERT( *a.load( oLoad ) == i - 1 );
583 template <typename Integral, bool Volatile>
584 void test_atomic_pointer_for()
586 typedef Integral integral_type;
587 typedef typename add_volatile<atomics::atomic< integral_type *>, Volatile>::type atomic_pointer;
589 integral_type arr[8];
590 const integral_type aSize = sizeof(arr)/sizeof(arr[0]);
591 for ( integral_type i = 0; i < aSize; ++i ) {
592 arr[size_t(i)] = i + 1;
599 CPPUNIT_ASSERT( *a.load() == 1 );
602 CPPUNIT_ASSERT( a.compare_exchange_weak( p, arr + 5 ));
603 CPPUNIT_ASSERT( p == arr + 0 );
604 CPPUNIT_ASSERT( *p == 1 );
605 CPPUNIT_ASSERT( !a.compare_exchange_weak( p, arr + 3 ));
606 CPPUNIT_ASSERT( p == arr + 5 );
607 CPPUNIT_ASSERT( *p == 6 );
609 CPPUNIT_ASSERT( a.compare_exchange_strong( p, arr + 3 ));
610 CPPUNIT_ASSERT( p == arr + 5 );
611 CPPUNIT_ASSERT( *p == 6 );
612 CPPUNIT_ASSERT( !a.compare_exchange_strong( p, arr + 5 ));
613 CPPUNIT_ASSERT( p == arr + 3 );
614 CPPUNIT_ASSERT( *p == 4 );
616 CPPUNIT_ASSERT( a.exchange( arr ) == arr + 3 );
617 CPPUNIT_ASSERT( a.load() == arr );
618 CPPUNIT_ASSERT( *a.load() == 1 );
620 for ( integral_type i = 1; i < aSize; ++i ) {
621 integral_type * p = a.load();
622 CPPUNIT_ASSERT( *p == i );
623 integral_type * pa = a.fetch_add( 1 );
624 CPPUNIT_ASSERT_EX( pa == p, "pa=" << ((uintptr_t) pa) << " p=" << ((uintptr_t) p) );
625 CPPUNIT_ASSERT( *a.load() == i + 1 );
628 for ( integral_type i = aSize; i > 1; --i ) {
629 integral_type * p = a.load();
630 CPPUNIT_ASSERT( *p == i );
631 CPPUNIT_ASSERT( a.fetch_sub( 1 ) == p );
632 CPPUNIT_ASSERT( *a.load() == i - 1 );
635 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_relaxed );
636 //test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_consume );
637 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_acquire );
638 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_release );
639 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_acq_rel );
640 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_seq_cst );
644 void test_atomic_flag()
646 // Array to test different alignment
648 atomics::atomic_flag flags[8];
649 for ( size_t i = 0; i < sizeof(flags)/sizeof(flags[0]); ++i )
650 do_test_atomic_flag( flags[i] );
653 void test_atomic_flag_volatile()
655 // Array to test different alignment
657 atomics::atomic_flag volatile flags[8];
658 for ( size_t i = 0; i < sizeof(flags)/sizeof(flags[0]); ++i )
659 do_test_atomic_flag( flags[i] );
662 template <typename AtomicBool>
663 void test_atomic_bool_()
665 // Array to test different alignment
668 for ( size_t i = 0; i < sizeof(a)/sizeof(a[0]); ++i ) {
669 do_test_atomic_bool( a[i] );
671 do_test_atomic_bool( a[i], atomics::memory_order_relaxed );
672 //do_test_atomic_bool( a[i], atomics::memory_order_consume );
673 do_test_atomic_bool( a[i], atomics::memory_order_acquire );
674 do_test_atomic_bool( a[i], atomics::memory_order_release );
675 do_test_atomic_bool( a[i], atomics::memory_order_acq_rel );
676 do_test_atomic_bool( a[i], atomics::memory_order_seq_cst );
680 void test_atomic_bool()
682 test_atomic_bool_< atomics::atomic<bool> >();
684 void test_atomic_bool_volatile()
686 test_atomic_bool_< atomics::atomic<bool> volatile >();
689 void test_atomic_char() { test_atomic_integral<char>(); }
690 void test_atomic_signed_char() { test_atomic_integral<signed char>(); }
691 void test_atomic_unsigned_char() { test_atomic_integral<unsigned char>(); }
692 void test_atomic_short_int() { test_atomic_integral<short int>(); }
693 void test_atomic_unsigned_short_int() { test_atomic_integral<unsigned short int>(); }
694 void test_atomic_int() { test_atomic_integral<int>(); }
695 void test_atomic_unsigned_int() { test_atomic_integral<unsigned int>(); }
696 void test_atomic_long() { test_atomic_integral<long>(); }
697 void test_atomic_unsigned_long() { test_atomic_integral<unsigned long>(); }
698 void test_atomic_long_long() { test_atomic_integral<long long>(); }
699 void test_atomic_unsigned_long_long() { test_atomic_integral<unsigned long long>(); }
701 void test_atomic_char_volatile() { test_atomic_integral_volatile<char>(); }
702 void test_atomic_signed_char_volatile() { test_atomic_integral_volatile<signed char>(); }
703 void test_atomic_unsigned_char_volatile() { test_atomic_integral_volatile<unsigned char>(); }
704 void test_atomic_short_int_volatile() { test_atomic_integral_volatile<short int>(); }
705 void test_atomic_unsigned_short_int_volatile() { test_atomic_integral_volatile<unsigned short int>(); }
706 void test_atomic_int_volatile() { test_atomic_integral_volatile<int>(); }
707 void test_atomic_unsigned_int_volatile() { test_atomic_integral_volatile<unsigned int>(); }
708 void test_atomic_long_volatile() { test_atomic_integral_volatile<long>(); }
709 void test_atomic_unsigned_long_volatile() { test_atomic_integral_volatile<unsigned long>(); }
710 void test_atomic_long_long_volatile() { test_atomic_integral_volatile<long long>(); }
711 void test_atomic_unsigned_long_long_volatile() { test_atomic_integral_volatile<unsigned long long>(); }
713 void test_atomic_pointer_void() { do_test_atomic_pointer_void<false>() ;}
714 void test_atomic_pointer_void_volatile(){ do_test_atomic_pointer_void<true>() ;}
716 void test_atomic_pointer_char() { test_atomic_pointer_for<char, false>() ;}
717 void test_atomic_pointer_short() { test_atomic_pointer_for<short int, false>() ;}
718 void test_atomic_pointer_int() { test_atomic_pointer_for<int, false>() ;}
719 void test_atomic_pointer_long() { test_atomic_pointer_for<long, false>() ;}
720 void test_atomic_pointer_long_long() { test_atomic_pointer_for<long long, false>() ;}
722 void test_atomic_pointer_char_volatile() { test_atomic_pointer_for<char, true>() ;}
723 void test_atomic_pointer_short_volatile() { test_atomic_pointer_for<unsigned short int, true>() ;}
724 void test_atomic_pointer_int_volatile() { test_atomic_pointer_for<int, true>() ;}
725 void test_atomic_pointer_long_volatile() { test_atomic_pointer_for<long, true>() ;}
726 void test_atomic_pointer_long_long_volatile() { test_atomic_pointer_for<long long, true>() ;}
729 CPPUNIT_TEST_SUITE(cxx11_atomic_class)
730 CPPUNIT_TEST( test_atomic_flag )
731 CPPUNIT_TEST( test_atomic_flag_volatile )
733 CPPUNIT_TEST( test_atomic_bool )
734 CPPUNIT_TEST( test_atomic_char )
735 CPPUNIT_TEST( test_atomic_signed_char)
736 CPPUNIT_TEST( test_atomic_unsigned_char)
737 CPPUNIT_TEST( test_atomic_short_int)
738 CPPUNIT_TEST( test_atomic_unsigned_short_int)
739 CPPUNIT_TEST( test_atomic_int)
740 CPPUNIT_TEST( test_atomic_unsigned_int)
741 CPPUNIT_TEST( test_atomic_long)
742 CPPUNIT_TEST( test_atomic_unsigned_long)
743 CPPUNIT_TEST( test_atomic_long_long)
744 CPPUNIT_TEST( test_atomic_unsigned_long_long)
746 CPPUNIT_TEST( test_atomic_bool_volatile )
747 CPPUNIT_TEST( test_atomic_char_volatile )
748 CPPUNIT_TEST( test_atomic_signed_char_volatile)
749 CPPUNIT_TEST( test_atomic_unsigned_char_volatile)
750 CPPUNIT_TEST( test_atomic_short_int_volatile)
751 CPPUNIT_TEST( test_atomic_unsigned_short_int_volatile)
752 CPPUNIT_TEST( test_atomic_int_volatile)
753 CPPUNIT_TEST( test_atomic_unsigned_int_volatile)
754 CPPUNIT_TEST( test_atomic_long_volatile)
755 CPPUNIT_TEST( test_atomic_unsigned_long_volatile)
756 CPPUNIT_TEST( test_atomic_long_long_volatile)
757 CPPUNIT_TEST( test_atomic_unsigned_long_long_volatile)
759 CPPUNIT_TEST( test_atomic_pointer_void)
760 CPPUNIT_TEST( test_atomic_pointer_void_volatile)
762 CPPUNIT_TEST( test_atomic_pointer_char)
763 CPPUNIT_TEST( test_atomic_pointer_short)
764 CPPUNIT_TEST( test_atomic_pointer_int)
765 CPPUNIT_TEST( test_atomic_pointer_long)
766 CPPUNIT_TEST( test_atomic_pointer_long_long)
768 CPPUNIT_TEST( test_atomic_pointer_char_volatile)
769 CPPUNIT_TEST( test_atomic_pointer_short_volatile)
770 CPPUNIT_TEST( test_atomic_pointer_int_volatile)
771 CPPUNIT_TEST( test_atomic_pointer_long_volatile)
772 CPPUNIT_TEST( test_atomic_pointer_long_long_volatile)
774 CPPUNIT_TEST_SUITE_END()
778 CPPUNIT_TEST_SUITE_REGISTRATION(misc::cxx11_atomic_class);