3 #include "cppunit/cppunit_proxy.h"
5 #include <cds/cxx11_atomic.h>
7 #ifndef CDS_USE_BOOST_ATOMIC
8 // Skip this test for boost.atomic
9 // Boost.atomic has no free atomic functions implementation.
11 #include "misc/cxx11_convert_memory_order.h"
15 class cxx11_atomic_func: public CppUnitMini::TestCase
17 template <typename AtomicFlag>
18 void do_test_atomic_flag_mo( AtomicFlag& f, atomics::memory_order order )
20 atomics::memory_order mo_clear = convert_to_store_order(order);
22 f.clear( convert_to_store_order(order) );
24 for ( int i = 0; i < 5; ++i ) {
25 CPPUNIT_ASSERT( !atomics::atomic_flag_test_and_set_explicit( &f, order ));
26 CPPUNIT_ASSERT( atomics::atomic_flag_test_and_set_explicit( &f, order ) );
27 atomics::atomic_flag_clear_explicit( &f, mo_clear );
28 atomics::atomic_flag_clear_explicit( &f, mo_clear );
30 //CPPUNIT_ASSERT( f.m_Flag == 0 );
33 template <typename AtomicFlag>
34 void do_test_atomic_flag( AtomicFlag& f )
38 for ( int i = 0; i < 5; ++i ) {
39 //CPPUNIT_ASSERT( f.m_Flag == 0 );
40 CPPUNIT_ASSERT( !atomics::atomic_flag_test_and_set( &f ));
41 //CPPUNIT_ASSERT( f.m_Flag != 0 );
42 CPPUNIT_ASSERT( atomics::atomic_flag_test_and_set( &f ) );
43 //CPPUNIT_ASSERT( f.m_Flag != 0 );
44 atomics::atomic_flag_clear(&f);
45 //CPPUNIT_ASSERT( f.m_Flag == 0 );
46 atomics::atomic_flag_clear(&f);
48 //CPPUNIT_ASSERT( f.m_Flag == 0 );
50 do_test_atomic_flag_mo( f, atomics::memory_order_relaxed );
51 do_test_atomic_flag_mo( f, atomics::memory_order_consume );
52 do_test_atomic_flag_mo( f, atomics::memory_order_acquire );
53 do_test_atomic_flag_mo( f, atomics::memory_order_release );
54 do_test_atomic_flag_mo( f, atomics::memory_order_acq_rel );
55 do_test_atomic_flag_mo( f, atomics::memory_order_seq_cst );
58 template <class Atomic, typename Integral>
59 void do_test_atomic_type(Atomic& a )
61 typedef Integral integral_type;
63 CPPUNIT_ASSERT( atomics::atomic_is_lock_free( &a ) );
64 atomics::atomic_store( &a, (integral_type) 0 );
65 CPPUNIT_ASSERT( a == 0 );
66 CPPUNIT_ASSERT( atomics::atomic_load( &a ) == 0 );
68 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
69 integral_type n = integral_type(42) << (nByte * 8);
70 CPPUNIT_ASSERT( atomics::atomic_exchange( &a, n ) == 0 );
71 CPPUNIT_ASSERT( atomics::atomic_load( &a ) == n );
72 CPPUNIT_ASSERT( atomics::atomic_exchange( &a, (integral_type) 0 ) == n );
73 CPPUNIT_ASSERT( atomics::atomic_load( &a ) == 0 );
76 integral_type prev = atomics::atomic_load( &a );
77 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
78 integral_type n = integral_type(42) << (nByte * 8);
79 integral_type expected = prev;
81 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_weak( &a, &expected, n));
82 CPPUNIT_ASSERT( expected == prev );
83 CPPUNIT_ASSERT( expected != n );
84 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_weak( &a, &expected, n) );
85 CPPUNIT_ASSERT( expected == n );
88 CPPUNIT_ASSERT( atomics::atomic_load( &a ) == n );
91 atomics::atomic_store( &a, (integral_type) 0 );
93 prev = atomics::atomic_load( &a );
94 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
95 integral_type n = integral_type(42) << (nByte * 8);
96 integral_type expected = prev;
98 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_strong( &a, &expected, n));
99 CPPUNIT_ASSERT( expected == prev );
100 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_strong( &a, &expected, n));
101 CPPUNIT_ASSERT( expected == n );
104 CPPUNIT_ASSERT( atomics::atomic_load( &a ) == n );
107 CPPUNIT_ASSERT( atomics::atomic_exchange( &a, (integral_type) 0 ) == prev );
110 template <class Atomic, typename Integral>
111 void do_test_atomic_integral( Atomic& a )
113 do_test_atomic_type< Atomic, Integral >( a );
115 typedef Integral integral_type;
118 atomics::atomic_store( &a, (integral_type) 0 );
121 for ( size_t nByte = 0; nByte < sizeof(integral_type); ++nByte )
123 integral_type prev = atomics::atomic_load( &a );
124 integral_type n = integral_type(42) << (nByte * 8);
126 CPPUNIT_ASSERT( atomics::atomic_fetch_add( &a, n) == prev);
130 for ( size_t nByte = sizeof(integral_type); nByte > 0; --nByte )
132 integral_type prev = atomics::atomic_load( &a );
133 integral_type n = integral_type(42) << ((nByte - 1) * 8);
135 CPPUNIT_ASSERT( atomics::atomic_fetch_sub( &a, n) == prev);
137 CPPUNIT_ASSERT( atomics::atomic_load( &a ) == 0 );
139 // fetch_or / fetc_xor / fetch_and
140 for ( size_t nBit = 0; nBit < sizeof(integral_type) * 8; ++nBit )
142 integral_type prev = atomics::atomic_load( &a );
143 integral_type mask = 1 << nBit;
145 CPPUNIT_ASSERT( atomics::atomic_fetch_or( &a, mask ) == prev );
146 prev = atomics::atomic_load( &a );
147 CPPUNIT_ASSERT( ( prev & mask) == mask);
149 CPPUNIT_ASSERT( atomics::atomic_fetch_and( &a, (integral_type) ~mask ) == prev );
150 prev = atomics::atomic_load( &a );
151 CPPUNIT_ASSERT_EX( integral_type(prev & mask) == integral_type(0), "prev=" << std::hex << prev << ", mask=" << std::hex << mask);
153 CPPUNIT_ASSERT( atomics::atomic_fetch_xor( &a, mask ) == prev );
154 prev = atomics::atomic_load( &a );
155 CPPUNIT_ASSERT( ( prev & mask) == mask);
157 CPPUNIT_ASSERT( atomics::atomic_load( &a ) == (integral_type) -1 );
160 template <class Atomic, typename Integral>
161 void do_test_atomic_type( Atomic& a, atomics::memory_order order )
163 typedef Integral integral_type;
165 const atomics::memory_order oLoad = convert_to_load_order( order );
166 const atomics::memory_order oStore = convert_to_store_order( order );
168 CPPUNIT_ASSERT( atomics::atomic_is_lock_free( &a ) );
169 atomics::atomic_store_explicit( &a, (integral_type) 0, oStore );
170 CPPUNIT_ASSERT( a == 0 );
171 CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == 0 );
173 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
174 integral_type n = integral_type(42) << (nByte * 8);
175 CPPUNIT_ASSERT( atomics::atomic_exchange_explicit( &a, n, order ) == 0 );
176 CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == n );
177 CPPUNIT_ASSERT( atomics::atomic_exchange_explicit( &a, (integral_type) 0, order ) == n );
178 CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == 0 );
181 integral_type prev = atomics::atomic_load_explicit( &a, oLoad );
182 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
183 integral_type n = integral_type(42) << (nByte * 8);
184 integral_type expected = prev;
186 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_weak_explicit( &a, &expected, n, order, atomics::memory_order_relaxed));
187 CPPUNIT_ASSERT( expected == prev );
188 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_weak_explicit( &a, &expected, n, order, atomics::memory_order_relaxed));
189 CPPUNIT_ASSERT( expected == n );
192 CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == n );
195 atomics::atomic_store_explicit( &a, (integral_type) 0, oStore );
197 prev = atomics::atomic_load_explicit( &a, oLoad );
198 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
199 integral_type n = integral_type(42) << (nByte * 8);
200 integral_type expected = prev;
202 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_strong_explicit( &a, &expected, n, order, atomics::memory_order_relaxed));
203 CPPUNIT_ASSERT( expected == prev );
204 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_strong_explicit( &a, &expected, n, order, atomics::memory_order_relaxed));
205 CPPUNIT_ASSERT( expected == n );
208 CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == n );
211 CPPUNIT_ASSERT( atomics::atomic_exchange_explicit( &a, (integral_type) 0, order ) == prev );
214 template <class Atomic, typename Integral>
215 void do_test_atomic_integral( Atomic& a, atomics::memory_order order )
217 do_test_atomic_type< Atomic, Integral >( a, order );
218 typedef Integral integral_type;
220 const atomics::memory_order oLoad = convert_to_load_order( order );
221 const atomics::memory_order oStore = convert_to_store_order( order );
224 atomics::atomic_store_explicit( &a, (integral_type) 0, oStore );
227 for ( size_t nByte = 0; nByte < sizeof(integral_type); ++nByte )
229 integral_type prev = atomics::atomic_load_explicit( &a, oLoad );
230 integral_type n = integral_type(42) << (nByte * 8);
232 CPPUNIT_ASSERT( atomics::atomic_fetch_add_explicit( &a, n, order) == prev);
236 for ( size_t nByte = sizeof(integral_type); nByte > 0; --nByte )
238 integral_type prev = atomics::atomic_load_explicit( &a, oLoad );
239 integral_type n = integral_type(42) << ((nByte - 1) * 8);
241 CPPUNIT_ASSERT( atomics::atomic_fetch_sub_explicit( &a, n, order ) == prev);
243 CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == 0 );
245 // fetch_or / fetc_xor / fetch_and
246 for ( size_t nBit = 0; nBit < sizeof(integral_type) * 8; ++nBit )
248 integral_type prev = atomics::atomic_load_explicit( &a, oLoad ) ;;
249 integral_type mask = 1 << nBit;
251 CPPUNIT_ASSERT( atomics::atomic_fetch_or_explicit( &a, mask, order ) == prev );
252 prev = atomics::atomic_load_explicit( &a, oLoad );
253 CPPUNIT_ASSERT( ( prev & mask) == mask);
255 CPPUNIT_ASSERT( atomics::atomic_fetch_and_explicit( &a, (integral_type) ~mask, order ) == prev );
256 prev = atomics::atomic_load_explicit( &a, oLoad );
257 CPPUNIT_ASSERT( ( prev & mask) == 0);
259 CPPUNIT_ASSERT( atomics::atomic_fetch_xor_explicit( &a, mask, order ) == prev );
260 prev = atomics::atomic_load_explicit( &a, oLoad );
261 CPPUNIT_ASSERT( ( prev & mask) == mask);
263 CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == (integral_type) -1 );
266 template <typename Atomic, typename Integral>
267 void test_atomic_integral_(Atomic& a)
269 do_test_atomic_integral<Atomic, Integral >(a);
271 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_relaxed );
272 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_consume );
273 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_acquire );
274 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_release );
275 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_acq_rel );
276 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_seq_cst );
279 template <typename Integral>
280 void test_atomic_integral()
282 typedef atomics::atomic<Integral> atomic_type;
284 for ( size_t i = 0; i < sizeof(a)/sizeof(a[0]); ++i ) {
285 test_atomic_integral_<atomic_type, Integral>( a[i] );
288 template <typename Integral>
289 void test_atomic_integral_volatile()
291 typedef atomics::atomic<Integral> volatile atomic_type;
293 for ( size_t i = 0; i < sizeof(a)/sizeof(a[0]); ++i ) {
294 test_atomic_integral_<atomic_type, Integral>( a[i] );
298 template <class AtomicBool>
299 void do_test_atomic_bool(AtomicBool& a)
301 CPPUNIT_ASSERT( atomics::atomic_is_lock_free( &a ) );
302 atomics::atomic_store( &a, false );
303 CPPUNIT_ASSERT( a == false );
304 CPPUNIT_ASSERT( atomics::atomic_load( &a ) == false );
306 CPPUNIT_ASSERT( atomics::atomic_exchange( &a, true ) == false );
307 CPPUNIT_ASSERT( atomics::atomic_load( &a ) == true );
308 CPPUNIT_ASSERT( atomics::atomic_exchange( &a, false ) == true );
309 CPPUNIT_ASSERT( atomics::atomic_load( &a ) == false );
311 bool expected = false;
312 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_weak( &a, &expected, true));
313 CPPUNIT_ASSERT( expected == false );
314 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_weak( &a, &expected, false));
315 CPPUNIT_ASSERT( expected == true );
316 CPPUNIT_ASSERT( atomics::atomic_load( &a ) == true );
318 atomics::atomic_store( &a, false );
321 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_strong( &a, &expected, true));
322 CPPUNIT_ASSERT( expected == false );
323 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_strong( &a, &expected, false));
324 CPPUNIT_ASSERT( expected == true );
326 CPPUNIT_ASSERT( atomics::atomic_load( &a ) == true );
328 CPPUNIT_ASSERT( atomics::atomic_exchange( &a, false ) == true );
331 template <class AtomicBool>
332 void do_test_atomic_bool( AtomicBool& a, atomics::memory_order order )
334 const atomics::memory_order oLoad = convert_to_load_order( order );
335 const atomics::memory_order oStore = convert_to_store_order( order );
336 const atomics::memory_order oExchange = convert_to_exchange_order( order );
338 CPPUNIT_ASSERT( atomics::atomic_is_lock_free( &a ) );
339 atomics::atomic_store_explicit( &a, false, oStore );
340 CPPUNIT_ASSERT( a == false );
341 CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == false );
343 CPPUNIT_ASSERT( atomics::atomic_exchange_explicit( &a, true, oExchange ) == false );
344 CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == true );
345 CPPUNIT_ASSERT( atomics::atomic_exchange_explicit( &a, false, oExchange ) == true );
346 CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == false );
348 bool expected = false;
349 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_weak_explicit( &a, &expected, true, order, atomics::memory_order_relaxed));
350 CPPUNIT_ASSERT( expected == false );
351 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_weak_explicit( &a, &expected, false, order, atomics::memory_order_relaxed));
352 CPPUNIT_ASSERT( expected == true );
353 CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == true );
355 atomics::atomic_store( &a, false );
358 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_strong_explicit( &a, &expected, true, order, atomics::memory_order_relaxed));
359 CPPUNIT_ASSERT( expected == false );
360 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_strong_explicit( &a, &expected, false, order, atomics::memory_order_relaxed));
361 CPPUNIT_ASSERT( expected == true );
363 CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == true );
365 CPPUNIT_ASSERT( atomics::atomic_exchange_explicit( &a, false, oExchange ) == true );
368 template <typename Atomic, typename Integral>
369 void test_atomic_pointer_for_( Atomic& a, Integral * arr, Integral aSize, atomics::memory_order order )
371 typedef Integral integral_type;
372 atomics::memory_order oLoad = convert_to_load_order(order);
373 atomics::memory_order oStore = convert_to_store_order(order);
376 atomics::atomic_store_explicit( &a, arr, oStore );
377 CPPUNIT_ASSERT( *atomics::atomic_load_explicit( &a, oLoad ) == 1 );
380 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_weak_explicit( &a, &p, arr + 5, order, atomics::memory_order_relaxed ));
381 CPPUNIT_ASSERT( p == arr + 0 );
382 CPPUNIT_ASSERT( *p == 1 );
383 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_weak_explicit( &a, &p, arr + 3, order, atomics::memory_order_relaxed ));
384 CPPUNIT_ASSERT( p == arr + 5 );
385 CPPUNIT_ASSERT( *p == 6 );
387 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_strong_explicit( &a, &p, arr + 3, order, atomics::memory_order_relaxed ));
388 CPPUNIT_ASSERT( p == arr + 5 );
389 CPPUNIT_ASSERT( *p == 6 );
390 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_strong_explicit( &a, &p, arr + 5, order, atomics::memory_order_relaxed ));
391 CPPUNIT_ASSERT( p == arr + 3 );
392 CPPUNIT_ASSERT( *p == 4 );
394 CPPUNIT_ASSERT( atomics::atomic_exchange_explicit( &a, arr, order ) == arr + 3 );
395 CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == arr );
396 CPPUNIT_ASSERT( *atomics::atomic_load_explicit( &a, oLoad ) == 1 );
398 for ( integral_type i = 1; i < aSize; ++i ) {
399 integral_type * p = atomics::atomic_load_explicit( &a, oLoad );
400 CPPUNIT_ASSERT( *p == i );
401 CPPUNIT_ASSERT( atomics::atomic_fetch_add_explicit( &a, 1, order ) == p );
402 CPPUNIT_ASSERT( *atomics::atomic_load_explicit( &a, oLoad ) == i + 1 );
405 for ( integral_type i = aSize; i > 1; --i ) {
406 integral_type * p = atomics::atomic_load_explicit( &a, oLoad );
407 CPPUNIT_ASSERT( *p == i );
408 CPPUNIT_ASSERT( atomics::atomic_fetch_sub_explicit( &a, 1, order ) == p );
409 CPPUNIT_ASSERT( *atomics::atomic_load_explicit( &a, oLoad ) == i - 1 );
413 template <typename Integral, bool Volatile>
414 void test_atomic_pointer_for()
416 typedef Integral integral_type;
417 typedef typename add_volatile<atomics::atomic< integral_type *>, Volatile>::type atomic_pointer;
419 integral_type arr[8];
420 const integral_type aSize = sizeof(arr)/sizeof(arr[0]);
421 for ( integral_type i = 0; i < aSize; ++i ) {
422 arr[size_t(i)] = i + 1;
428 atomics::atomic_store( &a, arr );
429 CPPUNIT_ASSERT( *atomics::atomic_load( &a ) == 1 );
432 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_weak( &a, &p, arr + 5 ));
433 CPPUNIT_ASSERT( p == arr + 0 );
434 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_weak( &a, &p, arr + 3 ));
435 CPPUNIT_ASSERT( p == arr + 5 );
437 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_strong( &a, &p, arr + 3 ));
438 CPPUNIT_ASSERT( p == arr + 5 );
439 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_strong( &a, &p, arr + 5 ));
440 CPPUNIT_ASSERT( p == arr + 3 );
442 CPPUNIT_ASSERT( atomics::atomic_exchange( &a, arr ) == arr + 3 );
443 CPPUNIT_ASSERT( atomics::atomic_load( &a ) == arr );
444 CPPUNIT_ASSERT( *atomics::atomic_load( &a ) == 1 );
446 for ( integral_type i = 1; i < aSize; ++i ) {
447 integral_type * p = atomics::atomic_load( &a );
448 CPPUNIT_ASSERT( *p == i );
449 CPPUNIT_ASSERT( atomics::atomic_fetch_add( &a, 1 ) == p );
450 CPPUNIT_ASSERT( *atomics::atomic_load( &a ) == i + 1 );
453 for ( integral_type i = aSize; i > 1; --i ) {
454 integral_type * p = atomics::atomic_load( &a );
455 CPPUNIT_ASSERT( *p == i );
456 CPPUNIT_ASSERT( atomics::atomic_fetch_sub( &a, 1 ) == p );
457 CPPUNIT_ASSERT( *atomics::atomic_load( &a ) == i - 1 );
460 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_relaxed );
461 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_consume );
462 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_acquire );
463 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_release );
464 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_acq_rel );
465 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_seq_cst );
469 template <typename Atomic>
470 void do_test_atomic_pointer_void_( Atomic& a, char * arr, char aSize, atomics::memory_order order )
472 atomics::memory_order oLoad = convert_to_load_order(order);
473 atomics::memory_order oStore = convert_to_store_order(order);
476 atomics::atomic_store_explicit( &a, (void *) arr, oStore );
477 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load_explicit( &a, oLoad )) == 1 );
480 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_weak_explicit( &a, (void **) &p, (void *)(arr + 5), order, atomics::memory_order_relaxed ));
481 CPPUNIT_ASSERT( p == arr + 0 );
482 CPPUNIT_ASSERT( *p == 1 );
483 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_weak_explicit( &a, (void **) &p, (void *)(arr + 3), order, atomics::memory_order_relaxed ));
484 CPPUNIT_ASSERT( p == arr + 5 );
485 CPPUNIT_ASSERT( *p == 6 );
487 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_strong_explicit( &a, (void **) &p, (void *)(arr + 3), order, atomics::memory_order_relaxed ));
488 CPPUNIT_ASSERT( p == arr + 5 );
489 CPPUNIT_ASSERT( *p == 6 );
490 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_strong_explicit( &a, (void **) &p, (void *)(arr + 5), order, atomics::memory_order_relaxed ));
491 CPPUNIT_ASSERT( p == arr + 3 );
492 CPPUNIT_ASSERT( *p == 4 );
494 CPPUNIT_ASSERT( reinterpret_cast<char *>(atomics::atomic_exchange_explicit( &a, (void *) arr, order )) == arr + 3 );
495 CPPUNIT_ASSERT( reinterpret_cast<char *>(atomics::atomic_load_explicit( &a, oLoad )) == arr );
496 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load_explicit( &a, oLoad )) == 1 );
498 for ( char i = 1; i < aSize; ++i ) {
499 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load_explicit( &a, oLoad )) == i );
500 CPPUNIT_ASSERT( atomics::atomic_fetch_add_explicit( &a, 1, order ));
501 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load_explicit( &a, oLoad )) == i + 1 );
504 for ( char i = aSize; i > 1; --i ) {
505 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load_explicit( &a, oLoad )) == i );
506 CPPUNIT_ASSERT( atomics::atomic_fetch_sub_explicit( &a, 1, order ));
507 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load_explicit( &a, oLoad )) == i - 1 );
511 template <bool Volatile>
512 void do_test_atomic_pointer_void()
514 typedef typename add_volatile<atomics::atomic< void *>, Volatile>::type atomic_pointer;
517 const char aSize = sizeof(arr)/sizeof(arr[0]);
518 for ( char i = 0; i < aSize; ++i ) {
519 arr[unsigned(i)] = i + 1;
525 atomics::atomic_store( &a, (void *) arr );
526 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load( &a )) == 1 );
529 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_weak( &a, (void **) &p, (void *)(arr + 5) ));
530 CPPUNIT_ASSERT( p == arr + 0 );
531 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_weak( &a, (void **) &p, (void *)(arr + 3) ));
532 CPPUNIT_ASSERT( p == arr + 5 );
534 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_strong( &a, (void **) &p, (void *)(arr + 3) ));
535 CPPUNIT_ASSERT( p == arr + 5 );
536 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_strong( &a, (void **) &p, (void *)(arr + 5) ));
537 CPPUNIT_ASSERT( p == arr + 3 );
539 CPPUNIT_ASSERT( reinterpret_cast<char *>( atomics::atomic_exchange( &a, (void *) arr )) == arr + 3 );
540 CPPUNIT_ASSERT( reinterpret_cast<char *>( atomics::atomic_load( &a )) == arr );
541 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load( &a )) == 1 );
543 for ( char i = 1; i < aSize; ++i ) {
544 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load( &a )) == i );
545 CPPUNIT_ASSERT( atomics::atomic_fetch_add( &a, 1 ));
546 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load( &a )) == i + 1 );
549 for ( char i = aSize; i > 1; --i ) {
550 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load( &a )) == i );
551 CPPUNIT_ASSERT( atomics::atomic_fetch_sub( &a, 1 ));
552 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load( &a )) == i - 1 );
555 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_relaxed );
556 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_consume );
557 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_acquire );
558 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_release );
559 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_acq_rel );
560 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_seq_cst );
564 void test_atomic_flag()
566 atomics::atomic_flag flags[8];
567 for ( size_t i = 0; i < sizeof(flags)/sizeof(flags[0]); ++i )
568 do_test_atomic_flag( flags[i] );
570 void test_atomic_flag_volatile()
572 atomics::atomic_flag volatile flags[8];
573 for ( size_t i = 0; i < sizeof(flags)/sizeof(flags[0]); ++i )
574 do_test_atomic_flag( flags[i] );
577 template <typename AtomicBool>
578 void test_atomic_bool_()
581 for ( size_t i = 0; i < sizeof(a)/sizeof(a[0]); ++i ) {
582 do_test_atomic_bool( a[i] );
584 do_test_atomic_bool( a[i], atomics::memory_order_relaxed );
585 do_test_atomic_bool( a[i], atomics::memory_order_consume );
586 do_test_atomic_bool( a[i], atomics::memory_order_acquire );
587 do_test_atomic_bool( a[i], atomics::memory_order_release );
588 do_test_atomic_bool( a[i], atomics::memory_order_acq_rel );
589 do_test_atomic_bool( a[i], atomics::memory_order_seq_cst );
593 void test_atomic_bool()
595 test_atomic_bool_<atomics::atomic<bool> >();
597 void test_atomic_bool_volatile()
599 test_atomic_bool_<atomics::atomic<bool> volatile >();
602 void test_atomic_char() { test_atomic_integral<char>(); }
603 void test_atomic_char_volatile() { test_atomic_integral_volatile<char>(); }
604 void test_atomic_signed_char() { test_atomic_integral<signed char>(); }
605 void test_atomic_signed_char_volatile() { test_atomic_integral_volatile<signed char>(); }
606 void test_atomic_unsigned_char() { test_atomic_integral<unsigned char>(); }
607 void test_atomic_unsigned_char_volatile(){ test_atomic_integral_volatile<unsigned char>(); }
608 void test_atomic_short_int() { test_atomic_integral<short int>(); }
609 void test_atomic_short_int_volatile() { test_atomic_integral_volatile<short int>(); }
610 void test_atomic_unsigned_short_int() { test_atomic_integral<unsigned short int>(); }
611 void test_atomic_unsigned_short_int_volatile() { test_atomic_integral_volatile<unsigned short int>(); }
612 void test_atomic_int() { test_atomic_integral<int>(); }
613 void test_atomic_int_volatile() { test_atomic_integral_volatile<int>(); }
614 void test_atomic_unsigned_int() { test_atomic_integral<unsigned int>(); }
615 void test_atomic_unsigned_int_volatile(){ test_atomic_integral_volatile<unsigned int>(); }
616 void test_atomic_long() { test_atomic_integral<long>(); }
617 void test_atomic_long_volatile() { test_atomic_integral_volatile<long>(); }
618 void test_atomic_unsigned_long() { test_atomic_integral<unsigned long>(); }
619 void test_atomic_unsigned_long_volatile() { test_atomic_integral_volatile<unsigned long>(); }
620 void test_atomic_long_long() { test_atomic_integral<long long>(); }
621 void test_atomic_long_long_volatile() { test_atomic_integral_volatile<long long>(); }
622 void test_atomic_unsigned_long_long() { test_atomic_integral<unsigned long long>(); }
623 void test_atomic_unsigned_long_long_volatile() { test_atomic_integral_volatile<unsigned long long>(); }
625 void test_atomic_pointer_void() { do_test_atomic_pointer_void<false>() ;}
626 void test_atomic_pointer_void_volatile(){ do_test_atomic_pointer_void<true>() ;}
628 void test_atomic_pointer_char() { test_atomic_pointer_for<char, false>() ;}
629 void test_atomic_pointer_short() { test_atomic_pointer_for<short int, false>() ;}
630 void test_atomic_pointer_int() { test_atomic_pointer_for<int, false>() ;}
631 void test_atomic_pointer_long() { test_atomic_pointer_for<long, false>() ;}
632 void test_atomic_pointer_long_long() { test_atomic_pointer_for<long long, false>() ;}
634 void test_atomic_pointer_char_volatile() { test_atomic_pointer_for<char, true>() ;}
635 void test_atomic_pointer_short_volatile() { test_atomic_pointer_for<unsigned short int, true>() ;}
636 void test_atomic_pointer_int_volatile() { test_atomic_pointer_for<int, true>() ;}
637 void test_atomic_pointer_long_volatile() { test_atomic_pointer_for<long, true>() ;}
638 void test_atomic_pointer_long_long_volatile() { test_atomic_pointer_for<long long, true>() ;}
640 void test_atomic_fence()
642 atomics::atomic_thread_fence(atomics::memory_order_relaxed );
643 atomics::atomic_thread_fence(atomics::memory_order_consume );
644 atomics::atomic_thread_fence(atomics::memory_order_acquire );
645 atomics::atomic_thread_fence(atomics::memory_order_release );
646 atomics::atomic_thread_fence(atomics::memory_order_acq_rel );
647 atomics::atomic_thread_fence(atomics::memory_order_seq_cst );
649 atomics::atomic_signal_fence(atomics::memory_order_relaxed );
650 atomics::atomic_signal_fence(atomics::memory_order_consume );
651 atomics::atomic_signal_fence(atomics::memory_order_acquire );
652 atomics::atomic_signal_fence(atomics::memory_order_release );
653 atomics::atomic_signal_fence(atomics::memory_order_acq_rel );
654 atomics::atomic_signal_fence(atomics::memory_order_seq_cst );
658 CPPUNIT_TEST_SUITE(cxx11_atomic_func)
659 CPPUNIT_TEST( test_atomic_flag )
660 CPPUNIT_TEST( test_atomic_flag_volatile )
662 CPPUNIT_TEST( test_atomic_bool )
663 CPPUNIT_TEST( test_atomic_char )
664 CPPUNIT_TEST( test_atomic_signed_char)
665 CPPUNIT_TEST( test_atomic_unsigned_char)
666 CPPUNIT_TEST( test_atomic_short_int)
667 CPPUNIT_TEST( test_atomic_unsigned_short_int)
668 CPPUNIT_TEST( test_atomic_int)
669 CPPUNIT_TEST( test_atomic_unsigned_int)
670 CPPUNIT_TEST( test_atomic_long)
671 CPPUNIT_TEST( test_atomic_unsigned_long)
672 CPPUNIT_TEST( test_atomic_long_long)
673 CPPUNIT_TEST( test_atomic_unsigned_long_long)
675 CPPUNIT_TEST( test_atomic_bool_volatile )
676 CPPUNIT_TEST( test_atomic_char_volatile )
677 CPPUNIT_TEST( test_atomic_signed_char_volatile)
678 CPPUNIT_TEST( test_atomic_unsigned_char_volatile)
679 CPPUNIT_TEST( test_atomic_short_int_volatile)
680 CPPUNIT_TEST( test_atomic_unsigned_short_int_volatile)
681 CPPUNIT_TEST( test_atomic_int_volatile)
682 CPPUNIT_TEST( test_atomic_unsigned_int_volatile)
683 CPPUNIT_TEST( test_atomic_long_volatile)
684 CPPUNIT_TEST( test_atomic_unsigned_long_volatile)
685 CPPUNIT_TEST( test_atomic_long_long_volatile)
686 CPPUNIT_TEST( test_atomic_unsigned_long_long_volatile)
688 CPPUNIT_TEST( test_atomic_pointer_void)
689 CPPUNIT_TEST( test_atomic_pointer_void_volatile)
691 CPPUNIT_TEST( test_atomic_pointer_char)
692 CPPUNIT_TEST( test_atomic_pointer_short)
693 CPPUNIT_TEST( test_atomic_pointer_int)
694 CPPUNIT_TEST( test_atomic_pointer_long)
695 CPPUNIT_TEST( test_atomic_pointer_long_long)
697 CPPUNIT_TEST( test_atomic_pointer_char_volatile)
698 CPPUNIT_TEST( test_atomic_pointer_short_volatile)
699 CPPUNIT_TEST( test_atomic_pointer_int_volatile)
700 CPPUNIT_TEST( test_atomic_pointer_long_volatile)
701 CPPUNIT_TEST( test_atomic_pointer_long_long_volatile)
703 CPPUNIT_TEST( test_atomic_fence)
705 CPPUNIT_TEST_SUITE_END()
709 CPPUNIT_TEST_SUITE_REGISTRATION(misc::cxx11_atomic_func);
711 #endif // #ifndef CDS_USE_BOOST_ATOMIC