3 #include "cppunit/cppunit_proxy.h"
5 #include <cds/cxx11_atomic.h>
7 #ifndef CDS_USE_BOOST_ATOMIC
8 // Skip this test for boost.atomic
9 // Boost.atomic has no free atomic functions implementation.
11 #include "misc/cxx11_convert_memory_order.h"
15 class cxx11_atomic_func: public CppUnitMini::TestCase
17 template <typename AtomicFlag>
18 void do_test_atomic_flag_mo( AtomicFlag& f, atomics::memory_order order )
20 atomics::memory_order mo_clear = convert_to_store_order(order);
22 f.clear( convert_to_store_order(order) );
24 for ( int i = 0; i < 5; ++i ) {
25 CPPUNIT_ASSERT( !atomics::atomic_flag_test_and_set_explicit( &f, order ));
26 CPPUNIT_ASSERT( atomics::atomic_flag_test_and_set_explicit( &f, order ) );
27 atomics::atomic_flag_clear_explicit( &f, mo_clear );
28 atomics::atomic_flag_clear_explicit( &f, mo_clear );
30 //CPPUNIT_ASSERT( f.m_Flag == 0 );
33 template <typename AtomicFlag>
34 void do_test_atomic_flag( AtomicFlag& f )
38 for ( int i = 0; i < 5; ++i ) {
39 //CPPUNIT_ASSERT( f.m_Flag == 0 );
40 CPPUNIT_ASSERT( !atomics::atomic_flag_test_and_set( &f ));
41 //CPPUNIT_ASSERT( f.m_Flag != 0 );
42 CPPUNIT_ASSERT( atomics::atomic_flag_test_and_set( &f ) );
43 //CPPUNIT_ASSERT( f.m_Flag != 0 );
44 atomics::atomic_flag_clear(&f);
45 //CPPUNIT_ASSERT( f.m_Flag == 0 );
46 atomics::atomic_flag_clear(&f);
48 //CPPUNIT_ASSERT( f.m_Flag == 0 );
50 do_test_atomic_flag_mo( f, atomics::memory_order_relaxed );
51 do_test_atomic_flag_mo( f, atomics::memory_order_consume );
52 do_test_atomic_flag_mo( f, atomics::memory_order_acquire );
53 do_test_atomic_flag_mo( f, atomics::memory_order_release );
54 do_test_atomic_flag_mo( f, atomics::memory_order_acq_rel );
55 do_test_atomic_flag_mo( f, atomics::memory_order_seq_cst );
58 template <class Atomic, typename Integral>
59 void do_test_atomic_type(Atomic& a )
61 typedef Integral integral_type;
63 CPPUNIT_ASSERT( atomics::atomic_is_lock_free( &a ) );
64 atomics::atomic_store( &a, (integral_type) 0 );
65 CPPUNIT_ASSERT( a == 0 );
66 CPPUNIT_ASSERT( atomics::atomic_load( &a ) == 0 );
68 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
69 integral_type n = integral_type(42) << (nByte * 8);
70 CPPUNIT_ASSERT( atomics::atomic_exchange( &a, n ) == 0 );
71 CPPUNIT_ASSERT( atomics::atomic_load( &a ) == n );
72 CPPUNIT_ASSERT( atomics::atomic_exchange( &a, (integral_type) 0 ) == n );
73 CPPUNIT_ASSERT( atomics::atomic_load( &a ) == 0 );
76 integral_type prev = atomics::atomic_load( &a );
77 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
78 integral_type n = integral_type(42) << (nByte * 8);
79 integral_type expected = prev;
81 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_weak( &a, &expected, n));
82 CPPUNIT_ASSERT( expected == prev );
83 CPPUNIT_ASSERT( expected != n );
84 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_weak( &a, &expected, n) );
85 CPPUNIT_ASSERT( expected == n );
88 CPPUNIT_ASSERT( atomics::atomic_load( &a ) == n );
91 atomics::atomic_store( &a, (integral_type) 0 );
93 prev = atomics::atomic_load( &a );
94 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
95 integral_type n = integral_type(42) << (nByte * 8);
96 integral_type expected = prev;
98 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_strong( &a, &expected, n));
99 CPPUNIT_ASSERT( expected == prev );
100 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_strong( &a, &expected, n));
101 CPPUNIT_ASSERT( expected == n );
104 CPPUNIT_ASSERT( atomics::atomic_load( &a ) == n );
107 CPPUNIT_ASSERT( atomics::atomic_exchange( &a, (integral_type) 0 ) == prev );
110 template <class Atomic, typename Integral>
111 void do_test_atomic_integral( Atomic& a )
113 do_test_atomic_type< Atomic, Integral >( a );
115 typedef Integral integral_type;
118 atomics::atomic_store( &a, (integral_type) 0 );
121 for ( size_t nByte = 0; nByte < sizeof(integral_type); ++nByte )
123 integral_type prev = atomics::atomic_load( &a );
124 integral_type n = integral_type(42) << (nByte * 8);
126 CPPUNIT_ASSERT( atomics::atomic_fetch_add( &a, n) == prev);
130 for ( size_t nByte = sizeof(integral_type); nByte > 0; --nByte )
132 integral_type prev = atomics::atomic_load( &a );
133 integral_type n = integral_type(42) << ((nByte - 1) * 8);
135 CPPUNIT_ASSERT( atomics::atomic_fetch_sub( &a, n) == prev);
137 CPPUNIT_ASSERT( atomics::atomic_load( &a ) == 0 );
139 // fetch_or / fetc_xor / fetch_and
140 for ( size_t nBit = 0; nBit < sizeof(integral_type) * 8; ++nBit )
142 integral_type prev = atomics::atomic_load( &a );
143 integral_type mask = 1 << nBit;
145 CPPUNIT_ASSERT( atomics::atomic_fetch_or( &a, mask ) == prev );
146 prev = atomics::atomic_load( &a );
147 CPPUNIT_ASSERT( ( prev & mask) == mask);
149 CPPUNIT_ASSERT( atomics::atomic_fetch_and( &a, (integral_type) ~mask ) == prev );
150 prev = atomics::atomic_load( &a );
151 CPPUNIT_ASSERT_EX( integral_type(prev & mask) == integral_type(0), "prev=" << std::hex << prev << ", mask=" << std::hex << mask);
153 CPPUNIT_ASSERT( atomics::atomic_fetch_xor( &a, mask ) == prev );
154 prev = atomics::atomic_load( &a );
155 CPPUNIT_ASSERT( ( prev & mask) == mask);
157 CPPUNIT_ASSERT( atomics::atomic_load( &a ) == (integral_type) -1 );
160 template <class Atomic, typename Integral>
161 void do_test_atomic_type( Atomic& a, atomics::memory_order order )
163 typedef Integral integral_type;
165 const atomics::memory_order oLoad = convert_to_load_order( order );
166 const atomics::memory_order oStore = convert_to_store_order( order );
168 CPPUNIT_ASSERT( atomics::atomic_is_lock_free( &a ) );
169 atomics::atomic_store_explicit( &a, (integral_type) 0, oStore );
170 CPPUNIT_ASSERT( a == 0 );
171 CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == 0 );
173 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
174 integral_type n = integral_type(42) << (nByte * 8);
175 CPPUNIT_ASSERT( atomics::atomic_exchange_explicit( &a, n, order ) == 0 );
176 CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == n );
177 CPPUNIT_ASSERT( atomics::atomic_exchange_explicit( &a, (integral_type) 0, order ) == n );
178 CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == 0 );
181 integral_type prev = atomics::atomic_load_explicit( &a, oLoad );
182 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
183 integral_type n = integral_type(42) << (nByte * 8);
184 integral_type expected = prev;
186 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_weak_explicit( &a, &expected, n, order, atomics::memory_order_relaxed));
187 CPPUNIT_ASSERT( expected == prev );
188 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_weak_explicit( &a, &expected, n, order, atomics::memory_order_relaxed));
189 CPPUNIT_ASSERT( expected == n );
192 CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == n );
195 atomics::atomic_store_explicit( &a, (integral_type) 0, oStore );
197 prev = atomics::atomic_load_explicit( &a, oLoad );
198 for ( size_t nByte = 0; nByte < sizeof(Integral); ++nByte ) {
199 integral_type n = integral_type(42) << (nByte * 8);
200 integral_type expected = prev;
202 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_strong_explicit( &a, &expected, n, order, atomics::memory_order_relaxed));
203 CPPUNIT_ASSERT( expected == prev );
204 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_strong_explicit( &a, &expected, n, order, atomics::memory_order_relaxed));
205 CPPUNIT_ASSERT( expected == n );
208 CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == n );
211 CPPUNIT_ASSERT( atomics::atomic_exchange_explicit( &a, (integral_type) 0, order ) == prev );
214 template <class Atomic, typename Integral>
215 void do_test_atomic_integral( Atomic& a, atomics::memory_order order )
217 do_test_atomic_type< Atomic, Integral >( a, order );
218 typedef Integral integral_type;
220 const atomics::memory_order oLoad = convert_to_load_order( order );
221 const atomics::memory_order oStore = convert_to_store_order( order );
224 atomics::atomic_store_explicit( &a, (integral_type) 0, oStore );
227 for ( size_t nByte = 0; nByte < sizeof(integral_type); ++nByte )
229 integral_type prev = atomics::atomic_load_explicit( &a, oLoad );
230 integral_type n = integral_type(42) << (nByte * 8);
232 CPPUNIT_ASSERT( atomics::atomic_fetch_add_explicit( &a, n, order) == prev);
236 for ( size_t nByte = sizeof(integral_type); nByte > 0; --nByte )
238 integral_type prev = atomics::atomic_load_explicit( &a, oLoad );
239 integral_type n = integral_type(42) << ((nByte - 1) * 8);
241 CPPUNIT_ASSERT( atomics::atomic_fetch_sub_explicit( &a, n, order ) == prev);
243 CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == 0 );
245 // fetch_or / fetc_xor / fetch_and
246 for ( size_t nBit = 0; nBit < sizeof(integral_type) * 8; ++nBit )
248 integral_type prev = atomics::atomic_load_explicit( &a, oLoad ) ;;
249 integral_type mask = 1 << nBit;
251 CPPUNIT_ASSERT( atomics::atomic_fetch_or_explicit( &a, mask, order ) == prev );
252 prev = atomics::atomic_load_explicit( &a, oLoad );
253 CPPUNIT_ASSERT( ( prev & mask) == mask);
255 CPPUNIT_ASSERT( atomics::atomic_fetch_and_explicit( &a, (integral_type) ~mask, order ) == prev );
256 prev = atomics::atomic_load_explicit( &a, oLoad );
257 CPPUNIT_ASSERT( ( prev & mask) == 0);
259 CPPUNIT_ASSERT( atomics::atomic_fetch_xor_explicit( &a, mask, order ) == prev );
260 prev = atomics::atomic_load_explicit( &a, oLoad );
261 CPPUNIT_ASSERT( ( prev & mask) == mask);
263 CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == (integral_type) -1 );
266 template <typename Atomic, typename Integral>
267 void test_atomic_integral_(Atomic& a)
269 do_test_atomic_integral<Atomic, Integral >(a);
271 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_relaxed );
272 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_consume );
273 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_acquire );
274 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_release );
275 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_acq_rel );
276 do_test_atomic_integral<Atomic, Integral >( a, atomics::memory_order_seq_cst );
279 template <typename Integral>
280 void test_atomic_integral()
282 typedef atomics::atomic<Integral> atomic_type;
284 for ( size_t i = 0; i < sizeof(a)/sizeof(a[0]); ++i ) {
285 test_atomic_integral_<atomic_type, Integral>( a[i] );
288 template <typename Integral>
289 void test_atomic_integral_volatile()
291 typedef atomics::atomic<Integral> volatile atomic_type;
293 for ( size_t i = 0; i < sizeof(a)/sizeof(a[0]); ++i ) {
294 test_atomic_integral_<atomic_type, Integral>( a[i] );
298 template <class AtomicBool>
299 void do_test_atomic_bool(AtomicBool& a)
301 CPPUNIT_ASSERT( atomics::atomic_is_lock_free( &a ) );
302 atomics::atomic_store( &a, false );
303 CPPUNIT_ASSERT( a == false );
304 CPPUNIT_ASSERT( atomics::atomic_load( &a ) == false );
306 CPPUNIT_ASSERT( atomics::atomic_exchange( &a, true ) == false );
307 CPPUNIT_ASSERT( atomics::atomic_load( &a ) == true );
308 CPPUNIT_ASSERT( atomics::atomic_exchange( &a, false ) == true );
309 CPPUNIT_ASSERT( atomics::atomic_load( &a ) == false );
311 bool expected = false;
312 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_weak( &a, &expected, true));
313 CPPUNIT_ASSERT( expected == false );
314 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_weak( &a, &expected, false));
315 CPPUNIT_ASSERT( expected == true );
316 CPPUNIT_ASSERT( atomics::atomic_load( &a ) == true );
318 atomics::atomic_store( &a, false );
321 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_strong( &a, &expected, true));
322 CPPUNIT_ASSERT( expected == false );
323 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_strong( &a, &expected, false));
324 CPPUNIT_ASSERT( expected == true );
326 CPPUNIT_ASSERT( atomics::atomic_load( &a ) == true );
328 CPPUNIT_ASSERT( atomics::atomic_exchange( &a, false ) == true );
331 template <class AtomicBool>
332 void do_test_atomic_bool( AtomicBool& a, atomics::memory_order order )
334 const atomics::memory_order oLoad = convert_to_load_order( order );
335 const atomics::memory_order oStore = convert_to_store_order( order );
337 CPPUNIT_ASSERT( atomics::atomic_is_lock_free( &a ) );
338 atomics::atomic_store_explicit( &a, false, oStore );
339 CPPUNIT_ASSERT( a == false );
340 CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == false );
342 CPPUNIT_ASSERT( atomics::atomic_exchange_explicit( &a, true, order ) == false );
343 CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == true );
344 CPPUNIT_ASSERT( atomics::atomic_exchange_explicit( &a, false, order ) == true );
345 CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == false );
347 bool expected = false;
348 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_weak_explicit( &a, &expected, true, order, atomics::memory_order_relaxed));
349 CPPUNIT_ASSERT( expected == false );
350 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_weak_explicit( &a, &expected, false, order, atomics::memory_order_relaxed));
351 CPPUNIT_ASSERT( expected == true );
352 CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == true );
354 atomics::atomic_store( &a, false );
357 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_strong_explicit( &a, &expected, true, order, atomics::memory_order_relaxed));
358 CPPUNIT_ASSERT( expected == false );
359 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_strong_explicit( &a, &expected, false, order, atomics::memory_order_relaxed));
360 CPPUNIT_ASSERT( expected == true );
362 CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == true );
364 CPPUNIT_ASSERT( atomics::atomic_exchange_explicit( &a, false, order ) == true );
367 template <typename Atomic, typename Integral>
368 void test_atomic_pointer_for_( Atomic& a, Integral * arr, Integral aSize, atomics::memory_order order )
370 typedef Integral integral_type;
371 atomics::memory_order oLoad = convert_to_load_order(order);
372 atomics::memory_order oStore = convert_to_store_order(order);
375 atomics::atomic_store_explicit( &a, arr, oStore );
376 CPPUNIT_ASSERT( *atomics::atomic_load_explicit( &a, oLoad ) == 1 );
379 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_weak_explicit( &a, &p, arr + 5, order, atomics::memory_order_relaxed ));
380 CPPUNIT_ASSERT( p == arr + 0 );
381 CPPUNIT_ASSERT( *p == 1 );
382 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_weak_explicit( &a, &p, arr + 3, order, atomics::memory_order_relaxed ));
383 CPPUNIT_ASSERT( p == arr + 5 );
384 CPPUNIT_ASSERT( *p == 6 );
386 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_strong_explicit( &a, &p, arr + 3, order, atomics::memory_order_relaxed ));
387 CPPUNIT_ASSERT( p == arr + 5 );
388 CPPUNIT_ASSERT( *p == 6 );
389 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_strong_explicit( &a, &p, arr + 5, order, atomics::memory_order_relaxed ));
390 CPPUNIT_ASSERT( p == arr + 3 );
391 CPPUNIT_ASSERT( *p == 4 );
393 CPPUNIT_ASSERT( atomics::atomic_exchange_explicit( &a, arr, order ) == arr + 3 );
394 CPPUNIT_ASSERT( atomics::atomic_load_explicit( &a, oLoad ) == arr );
395 CPPUNIT_ASSERT( *atomics::atomic_load_explicit( &a, oLoad ) == 1 );
397 for ( integral_type i = 1; i < aSize; ++i ) {
398 integral_type * p = atomics::atomic_load_explicit( &a, oLoad );
399 CPPUNIT_ASSERT( *p == i );
400 CPPUNIT_ASSERT( atomics::atomic_fetch_add_explicit( &a, 1, order ) == p );
401 CPPUNIT_ASSERT( *atomics::atomic_load_explicit( &a, oLoad ) == i + 1 );
404 for ( integral_type i = aSize; i > 1; --i ) {
405 integral_type * p = atomics::atomic_load_explicit( &a, oLoad );
406 CPPUNIT_ASSERT( *p == i );
407 CPPUNIT_ASSERT( atomics::atomic_fetch_sub_explicit( &a, 1, order ) == p );
408 CPPUNIT_ASSERT( *atomics::atomic_load_explicit( &a, oLoad ) == i - 1 );
412 template <typename Integral, bool Volatile>
413 void test_atomic_pointer_for()
415 typedef Integral integral_type;
416 typedef typename add_volatile<atomics::atomic< integral_type *>, Volatile>::type atomic_pointer;
418 integral_type arr[8];
419 const integral_type aSize = sizeof(arr)/sizeof(arr[0]);
420 for ( integral_type i = 0; i < aSize; ++i ) {
421 arr[size_t(i)] = i + 1;
427 atomics::atomic_store( &a, arr );
428 CPPUNIT_ASSERT( *atomics::atomic_load( &a ) == 1 );
431 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_weak( &a, &p, arr + 5 ));
432 CPPUNIT_ASSERT( p == arr + 0 );
433 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_weak( &a, &p, arr + 3 ));
434 CPPUNIT_ASSERT( p == arr + 5 );
436 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_strong( &a, &p, arr + 3 ));
437 CPPUNIT_ASSERT( p == arr + 5 );
438 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_strong( &a, &p, arr + 5 ));
439 CPPUNIT_ASSERT( p == arr + 3 );
441 CPPUNIT_ASSERT( atomics::atomic_exchange( &a, arr ) == arr + 3 );
442 CPPUNIT_ASSERT( atomics::atomic_load( &a ) == arr );
443 CPPUNIT_ASSERT( *atomics::atomic_load( &a ) == 1 );
445 for ( integral_type i = 1; i < aSize; ++i ) {
446 integral_type * p = atomics::atomic_load( &a );
447 CPPUNIT_ASSERT( *p == i );
448 CPPUNIT_ASSERT( atomics::atomic_fetch_add( &a, 1 ) == p );
449 CPPUNIT_ASSERT( *atomics::atomic_load( &a ) == i + 1 );
452 for ( integral_type i = aSize; i > 1; --i ) {
453 integral_type * p = atomics::atomic_load( &a );
454 CPPUNIT_ASSERT( *p == i );
455 CPPUNIT_ASSERT( atomics::atomic_fetch_sub( &a, 1 ) == p );
456 CPPUNIT_ASSERT( *atomics::atomic_load( &a ) == i - 1 );
459 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_relaxed );
460 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_consume );
461 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_acquire );
462 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_release );
463 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_acq_rel );
464 test_atomic_pointer_for_( a, arr, aSize, atomics::memory_order_seq_cst );
468 template <typename Atomic>
469 void do_test_atomic_pointer_void_( Atomic& a, char * arr, char aSize, atomics::memory_order order )
471 atomics::memory_order oLoad = convert_to_load_order(order);
472 atomics::memory_order oStore = convert_to_store_order(order);
475 atomics::atomic_store_explicit( &a, (void *) arr, oStore );
476 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load_explicit( &a, oLoad )) == 1 );
479 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_weak_explicit( &a, (void **) &p, (void *)(arr + 5), order, atomics::memory_order_relaxed ));
480 CPPUNIT_ASSERT( p == arr + 0 );
481 CPPUNIT_ASSERT( *p == 1 );
482 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_weak_explicit( &a, (void **) &p, (void *)(arr + 3), order, atomics::memory_order_relaxed ));
483 CPPUNIT_ASSERT( p == arr + 5 );
484 CPPUNIT_ASSERT( *p == 6 );
486 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_strong_explicit( &a, (void **) &p, (void *)(arr + 3), order, atomics::memory_order_relaxed ));
487 CPPUNIT_ASSERT( p == arr + 5 );
488 CPPUNIT_ASSERT( *p == 6 );
489 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_strong_explicit( &a, (void **) &p, (void *)(arr + 5), order, atomics::memory_order_relaxed ));
490 CPPUNIT_ASSERT( p == arr + 3 );
491 CPPUNIT_ASSERT( *p == 4 );
493 CPPUNIT_ASSERT( reinterpret_cast<char *>(atomics::atomic_exchange_explicit( &a, (void *) arr, order )) == arr + 3 );
494 CPPUNIT_ASSERT( reinterpret_cast<char *>(atomics::atomic_load_explicit( &a, oLoad )) == arr );
495 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load_explicit( &a, oLoad )) == 1 );
497 for ( char i = 1; i < aSize; ++i ) {
498 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load_explicit( &a, oLoad )) == i );
499 CPPUNIT_ASSERT( atomics::atomic_fetch_add_explicit( &a, 1, order ));
500 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load_explicit( &a, oLoad )) == i + 1 );
503 for ( char i = aSize; i > 1; --i ) {
504 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load_explicit( &a, oLoad )) == i );
505 CPPUNIT_ASSERT( atomics::atomic_fetch_sub_explicit( &a, 1, order ));
506 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load_explicit( &a, oLoad )) == i - 1 );
510 template <bool Volatile>
511 void do_test_atomic_pointer_void()
513 typedef typename add_volatile<atomics::atomic< void *>, Volatile>::type atomic_pointer;
516 const char aSize = sizeof(arr)/sizeof(arr[0]);
517 for ( char i = 0; i < aSize; ++i ) {
518 arr[unsigned(i)] = i + 1;
524 atomics::atomic_store( &a, (void *) arr );
525 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load( &a )) == 1 );
528 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_weak( &a, (void **) &p, (void *)(arr + 5) ));
529 CPPUNIT_ASSERT( p == arr + 0 );
530 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_weak( &a, (void **) &p, (void *)(arr + 3) ));
531 CPPUNIT_ASSERT( p == arr + 5 );
533 CPPUNIT_ASSERT( atomics::atomic_compare_exchange_strong( &a, (void **) &p, (void *)(arr + 3) ));
534 CPPUNIT_ASSERT( p == arr + 5 );
535 CPPUNIT_ASSERT( !atomics::atomic_compare_exchange_strong( &a, (void **) &p, (void *)(arr + 5) ));
536 CPPUNIT_ASSERT( p == arr + 3 );
538 CPPUNIT_ASSERT( reinterpret_cast<char *>( atomics::atomic_exchange( &a, (void *) arr )) == arr + 3 );
539 CPPUNIT_ASSERT( reinterpret_cast<char *>( atomics::atomic_load( &a )) == arr );
540 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load( &a )) == 1 );
542 for ( char i = 1; i < aSize; ++i ) {
543 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load( &a )) == i );
544 CPPUNIT_ASSERT( atomics::atomic_fetch_add( &a, 1 ));
545 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load( &a )) == i + 1 );
548 for ( char i = aSize; i > 1; --i ) {
549 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load( &a )) == i );
550 CPPUNIT_ASSERT( atomics::atomic_fetch_sub( &a, 1 ));
551 CPPUNIT_ASSERT( *reinterpret_cast<char *>(atomics::atomic_load( &a )) == i - 1 );
554 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_relaxed );
555 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_consume );
556 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_acquire );
557 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_release );
558 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_acq_rel );
559 do_test_atomic_pointer_void_( a, arr, aSize, atomics::memory_order_seq_cst );
563 void test_atomic_flag()
565 atomics::atomic_flag flags[8];
566 for ( size_t i = 0; i < sizeof(flags)/sizeof(flags[0]); ++i )
567 do_test_atomic_flag( flags[i] );
569 void test_atomic_flag_volatile()
571 atomics::atomic_flag volatile flags[8];
572 for ( size_t i = 0; i < sizeof(flags)/sizeof(flags[0]); ++i )
573 do_test_atomic_flag( flags[i] );
576 template <typename AtomicBool>
577 void test_atomic_bool_()
580 for ( size_t i = 0; i < sizeof(a)/sizeof(a[0]); ++i ) {
581 do_test_atomic_bool( a[i] );
583 do_test_atomic_bool( a[i], atomics::memory_order_relaxed );
584 do_test_atomic_bool( a[i], atomics::memory_order_consume );
585 do_test_atomic_bool( a[i], atomics::memory_order_acquire );
586 do_test_atomic_bool( a[i], atomics::memory_order_release );
587 do_test_atomic_bool( a[i], atomics::memory_order_acq_rel );
588 do_test_atomic_bool( a[i], atomics::memory_order_seq_cst );
592 void test_atomic_bool()
594 test_atomic_bool_<atomics::atomic<bool> >();
596 void test_atomic_bool_volatile()
598 test_atomic_bool_<atomics::atomic<bool> volatile >();
601 void test_atomic_char() { test_atomic_integral<char>(); }
602 void test_atomic_char_volatile() { test_atomic_integral_volatile<char>(); }
603 void test_atomic_signed_char() { test_atomic_integral<signed char>(); }
604 void test_atomic_signed_char_volatile() { test_atomic_integral_volatile<signed char>(); }
605 void test_atomic_unsigned_char() { test_atomic_integral<unsigned char>(); }
606 void test_atomic_unsigned_char_volatile(){ test_atomic_integral_volatile<unsigned char>(); }
607 void test_atomic_short_int() { test_atomic_integral<short int>(); }
608 void test_atomic_short_int_volatile() { test_atomic_integral_volatile<short int>(); }
609 void test_atomic_unsigned_short_int() { test_atomic_integral<unsigned short int>(); }
610 void test_atomic_unsigned_short_int_volatile() { test_atomic_integral_volatile<unsigned short int>(); }
611 void test_atomic_int() { test_atomic_integral<int>(); }
612 void test_atomic_int_volatile() { test_atomic_integral_volatile<int>(); }
613 void test_atomic_unsigned_int() { test_atomic_integral<unsigned int>(); }
614 void test_atomic_unsigned_int_volatile(){ test_atomic_integral_volatile<unsigned int>(); }
615 void test_atomic_long() { test_atomic_integral<long>(); }
616 void test_atomic_long_volatile() { test_atomic_integral_volatile<long>(); }
617 void test_atomic_unsigned_long() { test_atomic_integral<unsigned long>(); }
618 void test_atomic_unsigned_long_volatile() { test_atomic_integral_volatile<unsigned long>(); }
619 void test_atomic_long_long() { test_atomic_integral<long long>(); }
620 void test_atomic_long_long_volatile() { test_atomic_integral_volatile<long long>(); }
621 void test_atomic_unsigned_long_long() { test_atomic_integral<unsigned long long>(); }
622 void test_atomic_unsigned_long_long_volatile() { test_atomic_integral_volatile<unsigned long long>(); }
623 //#if CDS_COMPILER == CDS_COMPILER_GCC && CDS_COMPILER_VERSION >= 40400
624 // void test_atomic_char16_t() { test_atomic_integral<char16_t>(); }
625 // void test_atomic_char16_t_volatile() { test_atomic_integral_volatile<char16_t>(); }
626 // void test_atomic_char32_t() { test_atomic_integral<char32_t>(); }
627 // void test_atomic_char32_t_volatile() { test_atomic_integral_volatile<char32_t>(); }
629 // void test_atomic_wchar_t()
631 //#if CDS_OS_TYPE != CDS_OS_HPUX
632 // test_atomic_integral<wchar_t>();
635 // void test_atomic_wchar_t_volatile()
637 //#if CDS_OS_TYPE != CDS_OS_HPUX
638 // test_atomic_integral_volatile<wchar_t>();
642 void test_atomic_pointer_void() { do_test_atomic_pointer_void<false>() ;}
643 void test_atomic_pointer_void_volatile(){ do_test_atomic_pointer_void<true>() ;}
645 void test_atomic_pointer_char() { test_atomic_pointer_for<char, false>() ;}
646 void test_atomic_pointer_short() { test_atomic_pointer_for<short int, false>() ;}
647 void test_atomic_pointer_int() { test_atomic_pointer_for<int, false>() ;}
648 void test_atomic_pointer_long() { test_atomic_pointer_for<long, false>() ;}
649 void test_atomic_pointer_long_long() { test_atomic_pointer_for<long long, false>() ;}
651 void test_atomic_pointer_char_volatile() { test_atomic_pointer_for<char, true>() ;}
652 void test_atomic_pointer_short_volatile() { test_atomic_pointer_for<unsigned short int, true>() ;}
653 void test_atomic_pointer_int_volatile() { test_atomic_pointer_for<int, true>() ;}
654 void test_atomic_pointer_long_volatile() { test_atomic_pointer_for<long, true>() ;}
655 void test_atomic_pointer_long_long_volatile() { test_atomic_pointer_for<long long, true>() ;}
657 void test_atomic_fence()
659 atomics::atomic_thread_fence(atomics::memory_order_relaxed );
660 atomics::atomic_thread_fence(atomics::memory_order_consume );
661 atomics::atomic_thread_fence(atomics::memory_order_acquire );
662 atomics::atomic_thread_fence(atomics::memory_order_release );
663 atomics::atomic_thread_fence(atomics::memory_order_acq_rel );
664 atomics::atomic_thread_fence(atomics::memory_order_seq_cst );
666 atomics::atomic_signal_fence(atomics::memory_order_relaxed );
667 atomics::atomic_signal_fence(atomics::memory_order_consume );
668 atomics::atomic_signal_fence(atomics::memory_order_acquire );
669 atomics::atomic_signal_fence(atomics::memory_order_release );
670 atomics::atomic_signal_fence(atomics::memory_order_acq_rel );
671 atomics::atomic_signal_fence(atomics::memory_order_seq_cst );
675 CPPUNIT_TEST_SUITE(cxx11_atomic_func)
676 CPPUNIT_TEST( test_atomic_flag )
677 CPPUNIT_TEST( test_atomic_flag_volatile )
679 CPPUNIT_TEST( test_atomic_bool )
680 CPPUNIT_TEST( test_atomic_char )
681 CPPUNIT_TEST( test_atomic_signed_char)
682 CPPUNIT_TEST( test_atomic_unsigned_char)
683 CPPUNIT_TEST( test_atomic_short_int)
684 CPPUNIT_TEST( test_atomic_unsigned_short_int)
685 CPPUNIT_TEST( test_atomic_int)
686 CPPUNIT_TEST( test_atomic_unsigned_int)
687 CPPUNIT_TEST( test_atomic_long)
688 CPPUNIT_TEST( test_atomic_unsigned_long)
689 CPPUNIT_TEST( test_atomic_long_long)
690 CPPUNIT_TEST( test_atomic_unsigned_long_long)
691 //#if CDS_COMPILER == CDS_COMPILER_GCC && CDS_COMPILER_VERSION >= 40400
692 // CPPUNIT_TEST( test_atomic_char16_t )
693 // CPPUNIT_TEST( test_atomic_char32_t )
695 // CPPUNIT_TEST( test_atomic_wchar_t)
697 CPPUNIT_TEST( test_atomic_bool_volatile )
698 CPPUNIT_TEST( test_atomic_char_volatile )
699 CPPUNIT_TEST( test_atomic_signed_char_volatile)
700 CPPUNIT_TEST( test_atomic_unsigned_char_volatile)
701 CPPUNIT_TEST( test_atomic_short_int_volatile)
702 CPPUNIT_TEST( test_atomic_unsigned_short_int_volatile)
703 CPPUNIT_TEST( test_atomic_int_volatile)
704 CPPUNIT_TEST( test_atomic_unsigned_int_volatile)
705 CPPUNIT_TEST( test_atomic_long_volatile)
706 CPPUNIT_TEST( test_atomic_unsigned_long_volatile)
707 CPPUNIT_TEST( test_atomic_long_long_volatile)
708 CPPUNIT_TEST( test_atomic_unsigned_long_long_volatile)
709 //#if CDS_COMPILER == CDS_COMPILER_GCC && CDS_COMPILER_VERSION >= 40400
710 // CPPUNIT_TEST( test_atomic_char16_t_volatile )
711 // CPPUNIT_TEST( test_atomic_char32_t_volatile )
713 // CPPUNIT_TEST( test_atomic_wchar_t_volatile)
715 CPPUNIT_TEST( test_atomic_pointer_void)
716 CPPUNIT_TEST( test_atomic_pointer_void_volatile)
718 CPPUNIT_TEST( test_atomic_pointer_char)
719 CPPUNIT_TEST( test_atomic_pointer_short)
720 CPPUNIT_TEST( test_atomic_pointer_int)
721 CPPUNIT_TEST( test_atomic_pointer_long)
722 CPPUNIT_TEST( test_atomic_pointer_long_long)
724 CPPUNIT_TEST( test_atomic_pointer_char_volatile)
725 CPPUNIT_TEST( test_atomic_pointer_short_volatile)
726 CPPUNIT_TEST( test_atomic_pointer_int_volatile)
727 CPPUNIT_TEST( test_atomic_pointer_long_volatile)
728 CPPUNIT_TEST( test_atomic_pointer_long_long_volatile)
730 CPPUNIT_TEST( test_atomic_fence)
732 CPPUNIT_TEST_SUITE_END()
736 CPPUNIT_TEST_SUITE_REGISTRATION(misc::cxx11_atomic_func);
738 #endif // #ifndef CDS_USE_BOOST_ATOMIC