11 #define CPP0X( feature )
14 typedef enum memory_order {
15 memory_order_relaxed, memory_order_acquire, memory_order_release,
16 memory_order_acq_rel, memory_order_seq_cst
20 typedef struct atomic_flag
23 bool test_and_set( memory_order = memory_order_seq_cst ) volatile;
24 void clear( memory_order = memory_order_seq_cst ) volatile;
25 void fence( memory_order ) const volatile;
27 CPP0X( atomic_flag() = default; )
28 CPP0X( atomic_flag( const atomic_flag& ) = delete; )
29 atomic_flag& operator =( const atomic_flag& ) CPP0X(=delete);
36 #define ATOMIC_FLAG_INIT { false }
42 extern bool atomic_flag_test_and_set( volatile atomic_flag* );
43 extern bool atomic_flag_test_and_set_explicit
44 ( volatile atomic_flag*, memory_order );
45 extern void atomic_flag_clear( volatile atomic_flag* );
46 extern void atomic_flag_clear_explicit
47 ( volatile atomic_flag*, memory_order );
48 extern void atomic_flag_fence
49 ( const volatile atomic_flag*, memory_order );
50 extern void __atomic_flag_wait__
51 ( volatile atomic_flag* );
52 extern void __atomic_flag_wait_explicit__
53 ( volatile atomic_flag*, memory_order );
54 extern volatile atomic_flag* __atomic_flag_for_address__
55 ( const volatile void* __z__ )
56 __attribute__((const));
64 inline bool atomic_flag::test_and_set( memory_order __x__ ) volatile
65 { return atomic_flag_test_and_set_explicit( this, __x__ ); }
67 inline void atomic_flag::clear( memory_order __x__ ) volatile
68 { atomic_flag_clear_explicit( this, __x__ ); }
70 inline void atomic_flag::fence( memory_order __x__ ) const volatile
71 { atomic_flag_fence( this, __x__ ); }
76 #define _ATOMIC_LOAD_( __a__, __x__ ) \
77 ({ volatile __typeof__((__a__)->__f__)* __p__ = &((__a__)->__f__); \
78 volatile atomic_flag* __g__ = __atomic_flag_for_address__( __p__ ); \
79 __atomic_flag_wait_explicit__( __g__, __x__ ); \
80 __typeof__((__a__)->__f__) __r__ = *__p__; \
81 atomic_flag_clear_explicit( __g__, __x__ ); \
84 #define _ATOMIC_STORE_( __a__, __m__, __x__ ) \
85 ({ volatile __typeof__((__a__)->__f__)* __p__ = &((__a__)->__f__); \
86 __typeof__(__m__) __v__ = (__m__); \
87 volatile atomic_flag* __g__ = __atomic_flag_for_address__( __p__ ); \
88 __atomic_flag_wait_explicit__( __g__, __x__ ); \
90 atomic_flag_clear_explicit( __g__, __x__ ); \
93 #define _ATOMIC_MODIFY_( __a__, __o__, __m__, __x__ ) \
94 ({ volatile __typeof__((__a__)->__f__)* __p__ = &((__a__)->__f__); \
95 __typeof__(__m__) __v__ = (__m__); \
96 volatile atomic_flag* __g__ = __atomic_flag_for_address__( __p__ ); \
97 __atomic_flag_wait_explicit__( __g__, __x__ ); \
98 __typeof__((__a__)->__f__) __r__ = *__p__; \
100 atomic_flag_clear_explicit( __g__, __x__ ); \
103 #define _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ) \
104 ({ volatile __typeof__((__a__)->__f__)* __p__ = &((__a__)->__f__); \
105 __typeof__(__e__) __q__ = (__e__); \
106 __typeof__(__m__) __v__ = (__m__); \
108 volatile atomic_flag* __g__ = __atomic_flag_for_address__( __p__ ); \
109 __atomic_flag_wait_explicit__( __g__, __x__ ); \
110 __typeof__((__a__)->__f__) __t__ = *__p__; \
111 if ( __t__ == *__q__ ) { *__p__ = __v__; __r__ = true; } \
112 else { *__q__ = __t__; __r__ = false; } \
113 atomic_flag_clear_explicit( __g__, __x__ ); \
116 #define _ATOMIC_FENCE_( __a__, __x__ ) \
117 ({ volatile __typeof__((__a__)->__f__)* __p__ = &((__a__)->__f__); \
118 volatile atomic_flag* __g__ = __atomic_flag_for_address__( __p__ ); \
119 atomic_flag_fence( __g__, __x__ ); \
123 #define ATOMIC_INTEGRAL_LOCK_FREE 0
124 #define ATOMIC_ADDRESS_LOCK_FREE 0
127 typedef struct atomic_bool
130 bool is_lock_free() const volatile;
131 void store( bool, memory_order = memory_order_seq_cst ) volatile;
132 bool load( memory_order = memory_order_seq_cst ) volatile;
133 bool swap( bool, memory_order = memory_order_seq_cst ) volatile;
134 bool compare_swap ( bool&, bool, memory_order, memory_order ) volatile;
135 bool compare_swap ( bool&, bool,
136 memory_order = memory_order_seq_cst) volatile;
137 void fence( memory_order ) const volatile;
139 CPP0X( atomic_bool() = delete; )
140 CPP0X( constexpr explicit atomic_bool( bool __v__ ) : __f__( __v__ ) { } )
141 CPP0X( atomic_bool( const atomic_bool& ) = delete; )
142 atomic_bool& operator =( const atomic_bool& ) CPP0X(=delete);
144 bool operator =( bool __v__ ) volatile
145 { store( __v__ ); return __v__; }
147 friend void atomic_store_explicit( volatile atomic_bool*, bool,
149 friend bool atomic_load_explicit( volatile atomic_bool*, memory_order );
150 friend bool atomic_swap_explicit( volatile atomic_bool*, bool,
152 friend bool atomic_compare_swap_explicit( volatile atomic_bool*, bool*, bool,
153 memory_order, memory_order );
154 friend void atomic_fence( const volatile atomic_bool*, memory_order );
162 typedef struct atomic_address
165 bool is_lock_free() const volatile;
166 void store( void*, memory_order = memory_order_seq_cst ) volatile;
167 void* load( memory_order = memory_order_seq_cst ) volatile;
168 void* swap( void*, memory_order = memory_order_seq_cst ) volatile;
169 bool compare_swap( void*&, void*, memory_order, memory_order ) volatile;
170 bool compare_swap( void*&, void*,
171 memory_order = memory_order_seq_cst ) volatile;
172 void fence( memory_order ) const volatile;
173 void* fetch_add( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
174 void* fetch_sub( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
176 CPP0X( atomic_address() = default; )
177 CPP0X( constexpr explicit atomic_address( void* __v__ ) : __f__( __v__) { } )
178 CPP0X( atomic_address( const atomic_address& ) = delete; )
179 atomic_address& operator =( const atomic_address & ) CPP0X(=delete);
181 void* operator =( void* __v__ ) volatile
182 { store( __v__ ); return __v__; }
184 void* operator +=( ptrdiff_t __v__ ) volatile
185 { return fetch_add( __v__ ); }
187 void* operator -=( ptrdiff_t __v__ ) volatile
188 { return fetch_sub( __v__ ); }
190 friend void atomic_store_explicit( volatile atomic_address*, void*,
192 friend void* atomic_load_explicit( volatile atomic_address*, memory_order );
193 friend void* atomic_swap_explicit( volatile atomic_address*, void*,
195 friend bool atomic_compare_swap_explicit( volatile atomic_address*,
196 void**, void*, memory_order, memory_order );
197 friend void atomic_fence( const volatile atomic_address*, memory_order );
198 friend void* atomic_fetch_add_explicit( volatile atomic_address*, ptrdiff_t,
200 friend void* atomic_fetch_sub_explicit( volatile atomic_address*, ptrdiff_t,
209 typedef struct atomic_char
212 bool is_lock_free() const volatile;
214 memory_order = memory_order_seq_cst ) volatile;
215 char load( memory_order = memory_order_seq_cst ) volatile;
217 memory_order = memory_order_seq_cst ) volatile;
218 bool compare_swap( char&, char,
219 memory_order, memory_order ) volatile;
220 bool compare_swap( char&, char,
221 memory_order = memory_order_seq_cst ) volatile;
222 void fence( memory_order ) const volatile;
223 char fetch_add( char,
224 memory_order = memory_order_seq_cst ) volatile;
225 char fetch_sub( char,
226 memory_order = memory_order_seq_cst ) volatile;
227 char fetch_and( char,
228 memory_order = memory_order_seq_cst ) volatile;
230 memory_order = memory_order_seq_cst ) volatile;
231 char fetch_xor( char,
232 memory_order = memory_order_seq_cst ) volatile;
234 CPP0X( atomic_char() = default; )
235 CPP0X( constexpr atomic_char( char __v__ ) : __f__( __v__) { } )
236 CPP0X( atomic_char( const atomic_char& ) = delete; )
237 atomic_char& operator =( const atomic_char& ) CPP0X(=delete);
239 char operator =( char __v__ ) volatile
240 { store( __v__ ); return __v__; }
242 char operator ++( int ) volatile
243 { return fetch_add( 1 ); }
245 char operator --( int ) volatile
246 { return fetch_sub( 1 ); }
248 char operator ++() volatile
249 { return fetch_add( 1 ) + 1; }
251 char operator --() volatile
252 { return fetch_sub( 1 ) - 1; }
254 char operator +=( char __v__ ) volatile
255 { return fetch_add( __v__ ) + __v__; }
257 char operator -=( char __v__ ) volatile
258 { return fetch_sub( __v__ ) - __v__; }
260 char operator &=( char __v__ ) volatile
261 { return fetch_and( __v__ ) & __v__; }
263 char operator |=( char __v__ ) volatile
264 { return fetch_or( __v__ ) | __v__; }
266 char operator ^=( char __v__ ) volatile
267 { return fetch_xor( __v__ ) ^ __v__; }
269 friend void atomic_store_explicit( volatile atomic_char*, char,
271 friend char atomic_load_explicit( volatile atomic_char*,
273 friend char atomic_swap_explicit( volatile atomic_char*,
274 char, memory_order );
275 friend bool atomic_compare_swap_explicit( volatile atomic_char*,
276 char*, char, memory_order, memory_order );
277 friend void atomic_fence( const volatile atomic_char*, memory_order );
278 friend char atomic_fetch_add_explicit( volatile atomic_char*,
279 char, memory_order );
280 friend char atomic_fetch_sub_explicit( volatile atomic_char*,
281 char, memory_order );
282 friend char atomic_fetch_and_explicit( volatile atomic_char*,
283 char, memory_order );
284 friend char atomic_fetch_or_explicit( volatile atomic_char*,
285 char, memory_order );
286 friend char atomic_fetch_xor_explicit( volatile atomic_char*,
287 char, memory_order );
295 typedef struct atomic_schar
298 bool is_lock_free() const volatile;
299 void store( signed char,
300 memory_order = memory_order_seq_cst ) volatile;
301 signed char load( memory_order = memory_order_seq_cst ) volatile;
302 signed char swap( signed char,
303 memory_order = memory_order_seq_cst ) volatile;
304 bool compare_swap( signed char&, signed char,
305 memory_order, memory_order ) volatile;
306 bool compare_swap( signed char&, signed char,
307 memory_order = memory_order_seq_cst ) volatile;
308 void fence( memory_order ) const volatile;
309 signed char fetch_add( signed char,
310 memory_order = memory_order_seq_cst ) volatile;
311 signed char fetch_sub( signed char,
312 memory_order = memory_order_seq_cst ) volatile;
313 signed char fetch_and( signed char,
314 memory_order = memory_order_seq_cst ) volatile;
315 signed char fetch_or( signed char,
316 memory_order = memory_order_seq_cst ) volatile;
317 signed char fetch_xor( signed char,
318 memory_order = memory_order_seq_cst ) volatile;
320 CPP0X( atomic_schar() = default; )
321 CPP0X( constexpr atomic_schar( signed char __v__ ) : __f__( __v__) { } )
322 CPP0X( atomic_schar( const atomic_schar& ) = delete; )
323 atomic_schar& operator =( const atomic_schar& ) CPP0X(=delete);
325 signed char operator =( signed char __v__ ) volatile
326 { store( __v__ ); return __v__; }
328 signed char operator ++( int ) volatile
329 { return fetch_add( 1 ); }
331 signed char operator --( int ) volatile
332 { return fetch_sub( 1 ); }
334 signed char operator ++() volatile
335 { return fetch_add( 1 ) + 1; }
337 signed char operator --() volatile
338 { return fetch_sub( 1 ) - 1; }
340 signed char operator +=( signed char __v__ ) volatile
341 { return fetch_add( __v__ ) + __v__; }
343 signed char operator -=( signed char __v__ ) volatile
344 { return fetch_sub( __v__ ) - __v__; }
346 signed char operator &=( signed char __v__ ) volatile
347 { return fetch_and( __v__ ) & __v__; }
349 signed char operator |=( signed char __v__ ) volatile
350 { return fetch_or( __v__ ) | __v__; }
352 signed char operator ^=( signed char __v__ ) volatile
353 { return fetch_xor( __v__ ) ^ __v__; }
355 friend void atomic_store_explicit( volatile atomic_schar*, signed char,
357 friend signed char atomic_load_explicit( volatile atomic_schar*,
359 friend signed char atomic_swap_explicit( volatile atomic_schar*,
360 signed char, memory_order );
361 friend bool atomic_compare_swap_explicit( volatile atomic_schar*,
362 signed char*, signed char, memory_order, memory_order );
363 friend void atomic_fence( const volatile atomic_schar*, memory_order );
364 friend signed char atomic_fetch_add_explicit( volatile atomic_schar*,
365 signed char, memory_order );
366 friend signed char atomic_fetch_sub_explicit( volatile atomic_schar*,
367 signed char, memory_order );
368 friend signed char atomic_fetch_and_explicit( volatile atomic_schar*,
369 signed char, memory_order );
370 friend signed char atomic_fetch_or_explicit( volatile atomic_schar*,
371 signed char, memory_order );
372 friend signed char atomic_fetch_xor_explicit( volatile atomic_schar*,
373 signed char, memory_order );
381 typedef struct atomic_uchar
384 bool is_lock_free() const volatile;
385 void store( unsigned char,
386 memory_order = memory_order_seq_cst ) volatile;
387 unsigned char load( memory_order = memory_order_seq_cst ) volatile;
388 unsigned char swap( unsigned char,
389 memory_order = memory_order_seq_cst ) volatile;
390 bool compare_swap( unsigned char&, unsigned char,
391 memory_order, memory_order ) volatile;
392 bool compare_swap( unsigned char&, unsigned char,
393 memory_order = memory_order_seq_cst ) volatile;
394 void fence( memory_order ) const volatile;
395 unsigned char fetch_add( unsigned char,
396 memory_order = memory_order_seq_cst ) volatile;
397 unsigned char fetch_sub( unsigned char,
398 memory_order = memory_order_seq_cst ) volatile;
399 unsigned char fetch_and( unsigned char,
400 memory_order = memory_order_seq_cst ) volatile;
401 unsigned char fetch_or( unsigned char,
402 memory_order = memory_order_seq_cst ) volatile;
403 unsigned char fetch_xor( unsigned char,
404 memory_order = memory_order_seq_cst ) volatile;
406 CPP0X( atomic_uchar() = default; )
407 CPP0X( constexpr atomic_uchar( unsigned char __v__ ) : __f__( __v__) { } )
408 CPP0X( atomic_uchar( const atomic_uchar& ) = delete; )
409 atomic_uchar& operator =( const atomic_uchar& ) CPP0X(=delete);
411 unsigned char operator =( unsigned char __v__ ) volatile
412 { store( __v__ ); return __v__; }
414 unsigned char operator ++( int ) volatile
415 { return fetch_add( 1 ); }
417 unsigned char operator --( int ) volatile
418 { return fetch_sub( 1 ); }
420 unsigned char operator ++() volatile
421 { return fetch_add( 1 ) + 1; }
423 unsigned char operator --() volatile
424 { return fetch_sub( 1 ) - 1; }
426 unsigned char operator +=( unsigned char __v__ ) volatile
427 { return fetch_add( __v__ ) + __v__; }
429 unsigned char operator -=( unsigned char __v__ ) volatile
430 { return fetch_sub( __v__ ) - __v__; }
432 unsigned char operator &=( unsigned char __v__ ) volatile
433 { return fetch_and( __v__ ) & __v__; }
435 unsigned char operator |=( unsigned char __v__ ) volatile
436 { return fetch_or( __v__ ) | __v__; }
438 unsigned char operator ^=( unsigned char __v__ ) volatile
439 { return fetch_xor( __v__ ) ^ __v__; }
441 friend void atomic_store_explicit( volatile atomic_uchar*, unsigned char,
443 friend unsigned char atomic_load_explicit( volatile atomic_uchar*,
445 friend unsigned char atomic_swap_explicit( volatile atomic_uchar*,
446 unsigned char, memory_order );
447 friend bool atomic_compare_swap_explicit( volatile atomic_uchar*,
448 unsigned char*, unsigned char, memory_order, memory_order );
449 friend void atomic_fence( const volatile atomic_uchar*, memory_order );
450 friend unsigned char atomic_fetch_add_explicit( volatile atomic_uchar*,
451 unsigned char, memory_order );
452 friend unsigned char atomic_fetch_sub_explicit( volatile atomic_uchar*,
453 unsigned char, memory_order );
454 friend unsigned char atomic_fetch_and_explicit( volatile atomic_uchar*,
455 unsigned char, memory_order );
456 friend unsigned char atomic_fetch_or_explicit( volatile atomic_uchar*,
457 unsigned char, memory_order );
458 friend unsigned char atomic_fetch_xor_explicit( volatile atomic_uchar*,
459 unsigned char, memory_order );
467 typedef struct atomic_short
470 bool is_lock_free() const volatile;
472 memory_order = memory_order_seq_cst ) volatile;
473 short load( memory_order = memory_order_seq_cst ) volatile;
475 memory_order = memory_order_seq_cst ) volatile;
476 bool compare_swap( short&, short,
477 memory_order, memory_order ) volatile;
478 bool compare_swap( short&, short,
479 memory_order = memory_order_seq_cst ) volatile;
480 void fence( memory_order ) const volatile;
481 short fetch_add( short,
482 memory_order = memory_order_seq_cst ) volatile;
483 short fetch_sub( short,
484 memory_order = memory_order_seq_cst ) volatile;
485 short fetch_and( short,
486 memory_order = memory_order_seq_cst ) volatile;
487 short fetch_or( short,
488 memory_order = memory_order_seq_cst ) volatile;
489 short fetch_xor( short,
490 memory_order = memory_order_seq_cst ) volatile;
492 CPP0X( atomic_short() = default; )
493 CPP0X( constexpr atomic_short( short __v__ ) : __f__( __v__) { } )
494 CPP0X( atomic_short( const atomic_short& ) = delete; )
495 atomic_short& operator =( const atomic_short& ) CPP0X(=delete);
497 short operator =( short __v__ ) volatile
498 { store( __v__ ); return __v__; }
500 short operator ++( int ) volatile
501 { return fetch_add( 1 ); }
503 short operator --( int ) volatile
504 { return fetch_sub( 1 ); }
506 short operator ++() volatile
507 { return fetch_add( 1 ) + 1; }
509 short operator --() volatile
510 { return fetch_sub( 1 ) - 1; }
512 short operator +=( short __v__ ) volatile
513 { return fetch_add( __v__ ) + __v__; }
515 short operator -=( short __v__ ) volatile
516 { return fetch_sub( __v__ ) - __v__; }
518 short operator &=( short __v__ ) volatile
519 { return fetch_and( __v__ ) & __v__; }
521 short operator |=( short __v__ ) volatile
522 { return fetch_or( __v__ ) | __v__; }
524 short operator ^=( short __v__ ) volatile
525 { return fetch_xor( __v__ ) ^ __v__; }
527 friend void atomic_store_explicit( volatile atomic_short*, short,
529 friend short atomic_load_explicit( volatile atomic_short*,
531 friend short atomic_swap_explicit( volatile atomic_short*,
532 short, memory_order );
533 friend bool atomic_compare_swap_explicit( volatile atomic_short*,
534 short*, short, memory_order, memory_order );
535 friend void atomic_fence( const volatile atomic_short*, memory_order );
536 friend short atomic_fetch_add_explicit( volatile atomic_short*,
537 short, memory_order );
538 friend short atomic_fetch_sub_explicit( volatile atomic_short*,
539 short, memory_order );
540 friend short atomic_fetch_and_explicit( volatile atomic_short*,
541 short, memory_order );
542 friend short atomic_fetch_or_explicit( volatile atomic_short*,
543 short, memory_order );
544 friend short atomic_fetch_xor_explicit( volatile atomic_short*,
545 short, memory_order );
553 typedef struct atomic_ushort
556 bool is_lock_free() const volatile;
557 void store( unsigned short,
558 memory_order = memory_order_seq_cst ) volatile;
559 unsigned short load( memory_order = memory_order_seq_cst ) volatile;
560 unsigned short swap( unsigned short,
561 memory_order = memory_order_seq_cst ) volatile;
562 bool compare_swap( unsigned short&, unsigned short,
563 memory_order, memory_order ) volatile;
564 bool compare_swap( unsigned short&, unsigned short,
565 memory_order = memory_order_seq_cst ) volatile;
566 void fence( memory_order ) const volatile;
567 unsigned short fetch_add( unsigned short,
568 memory_order = memory_order_seq_cst ) volatile;
569 unsigned short fetch_sub( unsigned short,
570 memory_order = memory_order_seq_cst ) volatile;
571 unsigned short fetch_and( unsigned short,
572 memory_order = memory_order_seq_cst ) volatile;
573 unsigned short fetch_or( unsigned short,
574 memory_order = memory_order_seq_cst ) volatile;
575 unsigned short fetch_xor( unsigned short,
576 memory_order = memory_order_seq_cst ) volatile;
578 CPP0X( atomic_ushort() = default; )
579 CPP0X( constexpr atomic_ushort( unsigned short __v__ ) : __f__( __v__) { } )
580 CPP0X( atomic_ushort( const atomic_ushort& ) = delete; )
581 atomic_ushort& operator =( const atomic_ushort& ) CPP0X(=delete);
583 unsigned short operator =( unsigned short __v__ ) volatile
584 { store( __v__ ); return __v__; }
586 unsigned short operator ++( int ) volatile
587 { return fetch_add( 1 ); }
589 unsigned short operator --( int ) volatile
590 { return fetch_sub( 1 ); }
592 unsigned short operator ++() volatile
593 { return fetch_add( 1 ) + 1; }
595 unsigned short operator --() volatile
596 { return fetch_sub( 1 ) - 1; }
598 unsigned short operator +=( unsigned short __v__ ) volatile
599 { return fetch_add( __v__ ) + __v__; }
601 unsigned short operator -=( unsigned short __v__ ) volatile
602 { return fetch_sub( __v__ ) - __v__; }
604 unsigned short operator &=( unsigned short __v__ ) volatile
605 { return fetch_and( __v__ ) & __v__; }
607 unsigned short operator |=( unsigned short __v__ ) volatile
608 { return fetch_or( __v__ ) | __v__; }
610 unsigned short operator ^=( unsigned short __v__ ) volatile
611 { return fetch_xor( __v__ ) ^ __v__; }
613 friend void atomic_store_explicit( volatile atomic_ushort*, unsigned short,
615 friend unsigned short atomic_load_explicit( volatile atomic_ushort*,
617 friend unsigned short atomic_swap_explicit( volatile atomic_ushort*,
618 unsigned short, memory_order );
619 friend bool atomic_compare_swap_explicit( volatile atomic_ushort*,
620 unsigned short*, unsigned short, memory_order, memory_order );
621 friend void atomic_fence( const volatile atomic_ushort*, memory_order );
622 friend unsigned short atomic_fetch_add_explicit( volatile atomic_ushort*,
623 unsigned short, memory_order );
624 friend unsigned short atomic_fetch_sub_explicit( volatile atomic_ushort*,
625 unsigned short, memory_order );
626 friend unsigned short atomic_fetch_and_explicit( volatile atomic_ushort*,
627 unsigned short, memory_order );
628 friend unsigned short atomic_fetch_or_explicit( volatile atomic_ushort*,
629 unsigned short, memory_order );
630 friend unsigned short atomic_fetch_xor_explicit( volatile atomic_ushort*,
631 unsigned short, memory_order );
635 unsigned short __f__;
639 typedef struct atomic_int
642 bool is_lock_free() const volatile;
644 memory_order = memory_order_seq_cst ) volatile;
645 int load( memory_order = memory_order_seq_cst ) volatile;
647 memory_order = memory_order_seq_cst ) volatile;
648 bool compare_swap( int&, int,
649 memory_order, memory_order ) volatile;
650 bool compare_swap( int&, int,
651 memory_order = memory_order_seq_cst ) volatile;
652 void fence( memory_order ) const volatile;
654 memory_order = memory_order_seq_cst ) volatile;
656 memory_order = memory_order_seq_cst ) volatile;
658 memory_order = memory_order_seq_cst ) volatile;
660 memory_order = memory_order_seq_cst ) volatile;
662 memory_order = memory_order_seq_cst ) volatile;
664 CPP0X( atomic_int() = default; )
665 CPP0X( constexpr atomic_int( int __v__ ) : __f__( __v__) { } )
666 CPP0X( atomic_int( const atomic_int& ) = delete; )
667 atomic_int& operator =( const atomic_int& ) CPP0X(=delete);
669 int operator =( int __v__ ) volatile
670 { store( __v__ ); return __v__; }
672 int operator ++( int ) volatile
673 { return fetch_add( 1 ); }
675 int operator --( int ) volatile
676 { return fetch_sub( 1 ); }
678 int operator ++() volatile
679 { return fetch_add( 1 ) + 1; }
681 int operator --() volatile
682 { return fetch_sub( 1 ) - 1; }
684 int operator +=( int __v__ ) volatile
685 { return fetch_add( __v__ ) + __v__; }
687 int operator -=( int __v__ ) volatile
688 { return fetch_sub( __v__ ) - __v__; }
690 int operator &=( int __v__ ) volatile
691 { return fetch_and( __v__ ) & __v__; }
693 int operator |=( int __v__ ) volatile
694 { return fetch_or( __v__ ) | __v__; }
696 int operator ^=( int __v__ ) volatile
697 { return fetch_xor( __v__ ) ^ __v__; }
699 friend void atomic_store_explicit( volatile atomic_int*, int,
701 friend int atomic_load_explicit( volatile atomic_int*,
703 friend int atomic_swap_explicit( volatile atomic_int*,
705 friend bool atomic_compare_swap_explicit( volatile atomic_int*,
706 int*, int, memory_order, memory_order );
707 friend void atomic_fence( const volatile atomic_int*, memory_order );
708 friend int atomic_fetch_add_explicit( volatile atomic_int*,
710 friend int atomic_fetch_sub_explicit( volatile atomic_int*,
712 friend int atomic_fetch_and_explicit( volatile atomic_int*,
714 friend int atomic_fetch_or_explicit( volatile atomic_int*,
716 friend int atomic_fetch_xor_explicit( volatile atomic_int*,
725 typedef struct atomic_uint
728 bool is_lock_free() const volatile;
729 void store( unsigned int,
730 memory_order = memory_order_seq_cst ) volatile;
731 unsigned int load( memory_order = memory_order_seq_cst ) volatile;
732 unsigned int swap( unsigned int,
733 memory_order = memory_order_seq_cst ) volatile;
734 bool compare_swap( unsigned int&, unsigned int,
735 memory_order, memory_order ) volatile;
736 bool compare_swap( unsigned int&, unsigned int,
737 memory_order = memory_order_seq_cst ) volatile;
738 void fence( memory_order ) const volatile;
739 unsigned int fetch_add( unsigned int,
740 memory_order = memory_order_seq_cst ) volatile;
741 unsigned int fetch_sub( unsigned int,
742 memory_order = memory_order_seq_cst ) volatile;
743 unsigned int fetch_and( unsigned int,
744 memory_order = memory_order_seq_cst ) volatile;
745 unsigned int fetch_or( unsigned int,
746 memory_order = memory_order_seq_cst ) volatile;
747 unsigned int fetch_xor( unsigned int,
748 memory_order = memory_order_seq_cst ) volatile;
750 CPP0X( atomic_uint() = default; )
751 CPP0X( constexpr atomic_uint( unsigned int __v__ ) : __f__( __v__) { } )
752 CPP0X( atomic_uint( const atomic_uint& ) = delete; )
753 atomic_uint& operator =( const atomic_uint& ) CPP0X(=delete);
755 unsigned int operator =( unsigned int __v__ ) volatile
756 { store( __v__ ); return __v__; }
758 unsigned int operator ++( int ) volatile
759 { return fetch_add( 1 ); }
761 unsigned int operator --( int ) volatile
762 { return fetch_sub( 1 ); }
764 unsigned int operator ++() volatile
765 { return fetch_add( 1 ) + 1; }
767 unsigned int operator --() volatile
768 { return fetch_sub( 1 ) - 1; }
770 unsigned int operator +=( unsigned int __v__ ) volatile
771 { return fetch_add( __v__ ) + __v__; }
773 unsigned int operator -=( unsigned int __v__ ) volatile
774 { return fetch_sub( __v__ ) - __v__; }
776 unsigned int operator &=( unsigned int __v__ ) volatile
777 { return fetch_and( __v__ ) & __v__; }
779 unsigned int operator |=( unsigned int __v__ ) volatile
780 { return fetch_or( __v__ ) | __v__; }
782 unsigned int operator ^=( unsigned int __v__ ) volatile
783 { return fetch_xor( __v__ ) ^ __v__; }
785 friend void atomic_store_explicit( volatile atomic_uint*, unsigned int,
787 friend unsigned int atomic_load_explicit( volatile atomic_uint*,
789 friend unsigned int atomic_swap_explicit( volatile atomic_uint*,
790 unsigned int, memory_order );
791 friend bool atomic_compare_swap_explicit( volatile atomic_uint*,
792 unsigned int*, unsigned int, memory_order, memory_order );
793 friend void atomic_fence( const volatile atomic_uint*, memory_order );
794 friend unsigned int atomic_fetch_add_explicit( volatile atomic_uint*,
795 unsigned int, memory_order );
796 friend unsigned int atomic_fetch_sub_explicit( volatile atomic_uint*,
797 unsigned int, memory_order );
798 friend unsigned int atomic_fetch_and_explicit( volatile atomic_uint*,
799 unsigned int, memory_order );
800 friend unsigned int atomic_fetch_or_explicit( volatile atomic_uint*,
801 unsigned int, memory_order );
802 friend unsigned int atomic_fetch_xor_explicit( volatile atomic_uint*,
803 unsigned int, memory_order );
811 typedef struct atomic_long
814 bool is_lock_free() const volatile;
816 memory_order = memory_order_seq_cst ) volatile;
817 long load( memory_order = memory_order_seq_cst ) volatile;
819 memory_order = memory_order_seq_cst ) volatile;
820 bool compare_swap( long&, long,
821 memory_order, memory_order ) volatile;
822 bool compare_swap( long&, long,
823 memory_order = memory_order_seq_cst ) volatile;
824 void fence( memory_order ) const volatile;
825 long fetch_add( long,
826 memory_order = memory_order_seq_cst ) volatile;
827 long fetch_sub( long,
828 memory_order = memory_order_seq_cst ) volatile;
829 long fetch_and( long,
830 memory_order = memory_order_seq_cst ) volatile;
832 memory_order = memory_order_seq_cst ) volatile;
833 long fetch_xor( long,
834 memory_order = memory_order_seq_cst ) volatile;
836 CPP0X( atomic_long() = default; )
837 CPP0X( constexpr atomic_long( long __v__ ) : __f__( __v__) { } )
838 CPP0X( atomic_long( const atomic_long& ) = delete; )
839 atomic_long& operator =( const atomic_long& ) CPP0X(=delete);
841 long operator =( long __v__ ) volatile
842 { store( __v__ ); return __v__; }
844 long operator ++( int ) volatile
845 { return fetch_add( 1 ); }
847 long operator --( int ) volatile
848 { return fetch_sub( 1 ); }
850 long operator ++() volatile
851 { return fetch_add( 1 ) + 1; }
853 long operator --() volatile
854 { return fetch_sub( 1 ) - 1; }
856 long operator +=( long __v__ ) volatile
857 { return fetch_add( __v__ ) + __v__; }
859 long operator -=( long __v__ ) volatile
860 { return fetch_sub( __v__ ) - __v__; }
862 long operator &=( long __v__ ) volatile
863 { return fetch_and( __v__ ) & __v__; }
865 long operator |=( long __v__ ) volatile
866 { return fetch_or( __v__ ) | __v__; }
868 long operator ^=( long __v__ ) volatile
869 { return fetch_xor( __v__ ) ^ __v__; }
871 friend void atomic_store_explicit( volatile atomic_long*, long,
873 friend long atomic_load_explicit( volatile atomic_long*,
875 friend long atomic_swap_explicit( volatile atomic_long*,
876 long, memory_order );
877 friend bool atomic_compare_swap_explicit( volatile atomic_long*,
878 long*, long, memory_order, memory_order );
879 friend void atomic_fence( const volatile atomic_long*, memory_order );
880 friend long atomic_fetch_add_explicit( volatile atomic_long*,
881 long, memory_order );
882 friend long atomic_fetch_sub_explicit( volatile atomic_long*,
883 long, memory_order );
884 friend long atomic_fetch_and_explicit( volatile atomic_long*,
885 long, memory_order );
886 friend long atomic_fetch_or_explicit( volatile atomic_long*,
887 long, memory_order );
888 friend long atomic_fetch_xor_explicit( volatile atomic_long*,
889 long, memory_order );
897 typedef struct atomic_ulong
900 bool is_lock_free() const volatile;
901 void store( unsigned long,
902 memory_order = memory_order_seq_cst ) volatile;
903 unsigned long load( memory_order = memory_order_seq_cst ) volatile;
904 unsigned long swap( unsigned long,
905 memory_order = memory_order_seq_cst ) volatile;
906 bool compare_swap( unsigned long&, unsigned long,
907 memory_order, memory_order ) volatile;
908 bool compare_swap( unsigned long&, unsigned long,
909 memory_order = memory_order_seq_cst ) volatile;
910 void fence( memory_order ) const volatile;
911 unsigned long fetch_add( unsigned long,
912 memory_order = memory_order_seq_cst ) volatile;
913 unsigned long fetch_sub( unsigned long,
914 memory_order = memory_order_seq_cst ) volatile;
915 unsigned long fetch_and( unsigned long,
916 memory_order = memory_order_seq_cst ) volatile;
917 unsigned long fetch_or( unsigned long,
918 memory_order = memory_order_seq_cst ) volatile;
919 unsigned long fetch_xor( unsigned long,
920 memory_order = memory_order_seq_cst ) volatile;
922 CPP0X( atomic_ulong() = default; )
923 CPP0X( constexpr atomic_ulong( unsigned long __v__ ) : __f__( __v__) { } )
924 CPP0X( atomic_ulong( const atomic_ulong& ) = delete; )
925 atomic_ulong& operator =( const atomic_ulong& ) CPP0X(=delete);
927 unsigned long operator =( unsigned long __v__ ) volatile
928 { store( __v__ ); return __v__; }
930 unsigned long operator ++( int ) volatile
931 { return fetch_add( 1 ); }
933 unsigned long operator --( int ) volatile
934 { return fetch_sub( 1 ); }
936 unsigned long operator ++() volatile
937 { return fetch_add( 1 ) + 1; }
939 unsigned long operator --() volatile
940 { return fetch_sub( 1 ) - 1; }
942 unsigned long operator +=( unsigned long __v__ ) volatile
943 { return fetch_add( __v__ ) + __v__; }
945 unsigned long operator -=( unsigned long __v__ ) volatile
946 { return fetch_sub( __v__ ) - __v__; }
948 unsigned long operator &=( unsigned long __v__ ) volatile
949 { return fetch_and( __v__ ) & __v__; }
951 unsigned long operator |=( unsigned long __v__ ) volatile
952 { return fetch_or( __v__ ) | __v__; }
954 unsigned long operator ^=( unsigned long __v__ ) volatile
955 { return fetch_xor( __v__ ) ^ __v__; }
957 friend void atomic_store_explicit( volatile atomic_ulong*, unsigned long,
959 friend unsigned long atomic_load_explicit( volatile atomic_ulong*,
961 friend unsigned long atomic_swap_explicit( volatile atomic_ulong*,
962 unsigned long, memory_order );
963 friend bool atomic_compare_swap_explicit( volatile atomic_ulong*,
964 unsigned long*, unsigned long, memory_order, memory_order );
965 friend void atomic_fence( const volatile atomic_ulong*, memory_order );
966 friend unsigned long atomic_fetch_add_explicit( volatile atomic_ulong*,
967 unsigned long, memory_order );
968 friend unsigned long atomic_fetch_sub_explicit( volatile atomic_ulong*,
969 unsigned long, memory_order );
970 friend unsigned long atomic_fetch_and_explicit( volatile atomic_ulong*,
971 unsigned long, memory_order );
972 friend unsigned long atomic_fetch_or_explicit( volatile atomic_ulong*,
973 unsigned long, memory_order );
974 friend unsigned long atomic_fetch_xor_explicit( volatile atomic_ulong*,
975 unsigned long, memory_order );
983 typedef struct atomic_llong
986 bool is_lock_free() const volatile;
987 void store( long long,
988 memory_order = memory_order_seq_cst ) volatile;
989 long long load( memory_order = memory_order_seq_cst ) volatile;
990 long long swap( long long,
991 memory_order = memory_order_seq_cst ) volatile;
992 bool compare_swap( long long&, long long,
993 memory_order, memory_order ) volatile;
994 bool compare_swap( long long&, long long,
995 memory_order = memory_order_seq_cst ) volatile;
996 void fence( memory_order ) const volatile;
997 long long fetch_add( long long,
998 memory_order = memory_order_seq_cst ) volatile;
999 long long fetch_sub( long long,
1000 memory_order = memory_order_seq_cst ) volatile;
1001 long long fetch_and( long long,
1002 memory_order = memory_order_seq_cst ) volatile;
1003 long long fetch_or( long long,
1004 memory_order = memory_order_seq_cst ) volatile;
1005 long long fetch_xor( long long,
1006 memory_order = memory_order_seq_cst ) volatile;
1008 CPP0X( atomic_llong() = default; )
1009 CPP0X( constexpr atomic_llong( long long __v__ ) : __f__( __v__) { } )
1010 CPP0X( atomic_llong( const atomic_llong& ) = delete; )
1011 atomic_llong& operator =( const atomic_llong& ) CPP0X(=delete);
1013 long long operator =( long long __v__ ) volatile
1014 { store( __v__ ); return __v__; }
1016 long long operator ++( int ) volatile
1017 { return fetch_add( 1 ); }
1019 long long operator --( int ) volatile
1020 { return fetch_sub( 1 ); }
1022 long long operator ++() volatile
1023 { return fetch_add( 1 ) + 1; }
1025 long long operator --() volatile
1026 { return fetch_sub( 1 ) - 1; }
1028 long long operator +=( long long __v__ ) volatile
1029 { return fetch_add( __v__ ) + __v__; }
1031 long long operator -=( long long __v__ ) volatile
1032 { return fetch_sub( __v__ ) - __v__; }
1034 long long operator &=( long long __v__ ) volatile
1035 { return fetch_and( __v__ ) & __v__; }
1037 long long operator |=( long long __v__ ) volatile
1038 { return fetch_or( __v__ ) | __v__; }
1040 long long operator ^=( long long __v__ ) volatile
1041 { return fetch_xor( __v__ ) ^ __v__; }
1043 friend void atomic_store_explicit( volatile atomic_llong*, long long,
1045 friend long long atomic_load_explicit( volatile atomic_llong*,
1047 friend long long atomic_swap_explicit( volatile atomic_llong*,
1048 long long, memory_order );
1049 friend bool atomic_compare_swap_explicit( volatile atomic_llong*,
1050 long long*, long long, memory_order, memory_order );
1051 friend void atomic_fence( const volatile atomic_llong*, memory_order );
1052 friend long long atomic_fetch_add_explicit( volatile atomic_llong*,
1053 long long, memory_order );
1054 friend long long atomic_fetch_sub_explicit( volatile atomic_llong*,
1055 long long, memory_order );
1056 friend long long atomic_fetch_and_explicit( volatile atomic_llong*,
1057 long long, memory_order );
1058 friend long long atomic_fetch_or_explicit( volatile atomic_llong*,
1059 long long, memory_order );
1060 friend long long atomic_fetch_xor_explicit( volatile atomic_llong*,
1061 long long, memory_order );
1069 typedef struct atomic_ullong
1072 bool is_lock_free() const volatile;
1073 void store( unsigned long long,
1074 memory_order = memory_order_seq_cst ) volatile;
1075 unsigned long long load( memory_order = memory_order_seq_cst ) volatile;
1076 unsigned long long swap( unsigned long long,
1077 memory_order = memory_order_seq_cst ) volatile;
1078 bool compare_swap( unsigned long long&, unsigned long long,
1079 memory_order, memory_order ) volatile;
1080 bool compare_swap( unsigned long long&, unsigned long long,
1081 memory_order = memory_order_seq_cst ) volatile;
1082 void fence( memory_order ) const volatile;
1083 unsigned long long fetch_add( unsigned long long,
1084 memory_order = memory_order_seq_cst ) volatile;
1085 unsigned long long fetch_sub( unsigned long long,
1086 memory_order = memory_order_seq_cst ) volatile;
1087 unsigned long long fetch_and( unsigned long long,
1088 memory_order = memory_order_seq_cst ) volatile;
1089 unsigned long long fetch_or( unsigned long long,
1090 memory_order = memory_order_seq_cst ) volatile;
1091 unsigned long long fetch_xor( unsigned long long,
1092 memory_order = memory_order_seq_cst ) volatile;
1094 CPP0X( atomic_ullong() = default; )
1095 CPP0X( constexpr atomic_ullong( unsigned long long __v__ ) : __f__( __v__) { } )
1096 CPP0X( atomic_ullong( const atomic_ullong& ) = delete; )
1097 atomic_ullong& operator =( const atomic_ullong& ) CPP0X(=delete);
1099 unsigned long long operator =( unsigned long long __v__ ) volatile
1100 { store( __v__ ); return __v__; }
1102 unsigned long long operator ++( int ) volatile
1103 { return fetch_add( 1 ); }
1105 unsigned long long operator --( int ) volatile
1106 { return fetch_sub( 1 ); }
1108 unsigned long long operator ++() volatile
1109 { return fetch_add( 1 ) + 1; }
1111 unsigned long long operator --() volatile
1112 { return fetch_sub( 1 ) - 1; }
1114 unsigned long long operator +=( unsigned long long __v__ ) volatile
1115 { return fetch_add( __v__ ) + __v__; }
1117 unsigned long long operator -=( unsigned long long __v__ ) volatile
1118 { return fetch_sub( __v__ ) - __v__; }
1120 unsigned long long operator &=( unsigned long long __v__ ) volatile
1121 { return fetch_and( __v__ ) & __v__; }
1123 unsigned long long operator |=( unsigned long long __v__ ) volatile
1124 { return fetch_or( __v__ ) | __v__; }
1126 unsigned long long operator ^=( unsigned long long __v__ ) volatile
1127 { return fetch_xor( __v__ ) ^ __v__; }
1129 friend void atomic_store_explicit( volatile atomic_ullong*, unsigned long long,
1131 friend unsigned long long atomic_load_explicit( volatile atomic_ullong*,
1133 friend unsigned long long atomic_swap_explicit( volatile atomic_ullong*,
1134 unsigned long long, memory_order );
1135 friend bool atomic_compare_swap_explicit( volatile atomic_ullong*,
1136 unsigned long long*, unsigned long long, memory_order, memory_order );
1137 friend void atomic_fence( const volatile atomic_ullong*, memory_order );
1138 friend unsigned long long atomic_fetch_add_explicit( volatile atomic_ullong*,
1139 unsigned long long, memory_order );
1140 friend unsigned long long atomic_fetch_sub_explicit( volatile atomic_ullong*,
1141 unsigned long long, memory_order );
1142 friend unsigned long long atomic_fetch_and_explicit( volatile atomic_ullong*,
1143 unsigned long long, memory_order );
1144 friend unsigned long long atomic_fetch_or_explicit( volatile atomic_ullong*,
1145 unsigned long long, memory_order );
1146 friend unsigned long long atomic_fetch_xor_explicit( volatile atomic_ullong*,
1147 unsigned long long, memory_order );
1151 unsigned long long __f__;
1155 typedef atomic_schar atomic_int_least8_t;
1156 typedef atomic_uchar atomic_uint_least8_t;
1157 typedef atomic_short atomic_int_least16_t;
1158 typedef atomic_ushort atomic_uint_least16_t;
1159 typedef atomic_int atomic_int_least32_t;
1160 typedef atomic_uint atomic_uint_least32_t;
1161 typedef atomic_llong atomic_int_least64_t;
1162 typedef atomic_ullong atomic_uint_least64_t;
1164 typedef atomic_schar atomic_int_fast8_t;
1165 typedef atomic_uchar atomic_uint_fast8_t;
1166 typedef atomic_short atomic_int_fast16_t;
1167 typedef atomic_ushort atomic_uint_fast16_t;
1168 typedef atomic_int atomic_int_fast32_t;
1169 typedef atomic_uint atomic_uint_fast32_t;
1170 typedef atomic_llong atomic_int_fast64_t;
1171 typedef atomic_ullong atomic_uint_fast64_t;
1173 typedef atomic_long atomic_intptr_t;
1174 typedef atomic_ulong atomic_uintptr_t;
1176 typedef atomic_long atomic_ssize_t;
1177 typedef atomic_ulong atomic_size_t;
1179 typedef atomic_long atomic_ptrdiff_t;
1181 typedef atomic_llong atomic_intmax_t;
1182 typedef atomic_ullong atomic_uintmax_t;
1188 typedef struct atomic_wchar_t
1191 bool is_lock_free() const volatile;
1192 void store( wchar_t, memory_order = memory_order_seq_cst ) volatile;
1193 wchar_t load( memory_order = memory_order_seq_cst ) volatile;
1194 wchar_t swap( wchar_t,
1195 memory_order = memory_order_seq_cst ) volatile;
1196 bool compare_swap( wchar_t&, wchar_t,
1197 memory_order, memory_order ) volatile;
1198 bool compare_swap( wchar_t&, wchar_t,
1199 memory_order = memory_order_seq_cst ) volatile;
1200 void fence( memory_order ) const volatile;
1201 wchar_t fetch_add( wchar_t,
1202 memory_order = memory_order_seq_cst ) volatile;
1203 wchar_t fetch_sub( wchar_t,
1204 memory_order = memory_order_seq_cst ) volatile;
1205 wchar_t fetch_and( wchar_t,
1206 memory_order = memory_order_seq_cst ) volatile;
1207 wchar_t fetch_or( wchar_t,
1208 memory_order = memory_order_seq_cst ) volatile;
1209 wchar_t fetch_xor( wchar_t,
1210 memory_order = memory_order_seq_cst ) volatile;
1212 CPP0X( atomic_wchar_t() = default; )
1213 CPP0X( constexpr atomic_wchar_t( wchar_t __v__ ) : __f__( __v__) { } )
1214 CPP0X( atomic_wchar_t( const atomic_wchar_t& ) = delete; )
1215 atomic_wchar_t& operator =( const atomic_wchar_t& ) CPP0X(=delete);
1217 wchar_t operator =( wchar_t __v__ ) volatile
1218 { store( __v__ ); return __v__; }
1220 wchar_t operator ++( int ) volatile
1221 { return fetch_add( 1 ); }
1223 wchar_t operator --( int ) volatile
1224 { return fetch_sub( 1 ); }
1226 wchar_t operator ++() volatile
1227 { return fetch_add( 1 ) + 1; }
1229 wchar_t operator --() volatile
1230 { return fetch_sub( 1 ) - 1; }
1232 wchar_t operator +=( wchar_t __v__ ) volatile
1233 { return fetch_add( __v__ ) + __v__; }
1235 wchar_t operator -=( wchar_t __v__ ) volatile
1236 { return fetch_sub( __v__ ) - __v__; }
1238 wchar_t operator &=( wchar_t __v__ ) volatile
1239 { return fetch_and( __v__ ) & __v__; }
1241 wchar_t operator |=( wchar_t __v__ ) volatile
1242 { return fetch_or( __v__ ) | __v__; }
1244 wchar_t operator ^=( wchar_t __v__ ) volatile
1245 { return fetch_xor( __v__ ) ^ __v__; }
1247 friend void atomic_store_explicit( volatile atomic_wchar_t*, wchar_t,
1249 friend wchar_t atomic_load_explicit( volatile atomic_wchar_t*,
1251 friend wchar_t atomic_swap_explicit( volatile atomic_wchar_t*,
1252 wchar_t, memory_order );
1253 friend bool atomic_compare_swap_explicit( volatile atomic_wchar_t*,
1254 wchar_t*, wchar_t, memory_order, memory_order );
1255 friend void atomic_fence( const volatile atomic_wchar_t*, memory_order );
1256 friend wchar_t atomic_fetch_add_explicit( volatile atomic_wchar_t*,
1257 wchar_t, memory_order );
1258 friend wchar_t atomic_fetch_sub_explicit( volatile atomic_wchar_t*,
1259 wchar_t, memory_order );
1260 friend wchar_t atomic_fetch_and_explicit( volatile atomic_wchar_t*,
1261 wchar_t, memory_order );
1262 friend wchar_t atomic_fetch_or_explicit( volatile atomic_wchar_t*,
1263 wchar_t, memory_order );
1264 friend wchar_t atomic_fetch_xor_explicit( volatile atomic_wchar_t*,
1265 wchar_t, memory_order );
1275 typedef atomic_int_least16_t atomic_char16_t;
1276 typedef atomic_int_least32_t atomic_char32_t;
1277 typedef atomic_int_least32_t atomic_wchar_t;
1284 template< typename T >
1289 bool is_lock_free() const volatile;
1290 void store( T, memory_order = memory_order_seq_cst ) volatile;
1291 T load( memory_order = memory_order_seq_cst ) volatile;
1292 T swap( T __v__, memory_order = memory_order_seq_cst ) volatile;
1293 bool compare_swap( T&, T, memory_order, memory_order ) volatile;
1294 bool compare_swap( T&, T, memory_order = memory_order_seq_cst ) volatile;
1295 void fence( memory_order ) const volatile;
1297 CPP0X( atomic() = default; )
1298 CPP0X( constexpr explicit atomic( T __v__ ) : __f__( __v__ ) { } )
1299 CPP0X( atomic( const atomic& ) = delete; )
1300 atomic& operator =( const atomic& ) CPP0X(=delete);
1302 T operator =( T __v__ ) volatile
1303 { store( __v__ ); return __v__; }
1314 template<typename T> struct atomic< T* > : atomic_address
1316 T* load( memory_order = memory_order_seq_cst ) volatile;
1317 T* swap( T*, memory_order = memory_order_seq_cst ) volatile;
1318 bool compare_swap( T*&, T*, memory_order, memory_order ) volatile;
1319 bool compare_swap( T*&, T*,
1320 memory_order = memory_order_seq_cst ) volatile;
1321 T* fetch_add( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
1322 T* fetch_sub( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
1324 CPP0X( atomic() = default; )
1325 CPP0X( constexpr explicit atomic( T __v__ ) : atomic_address( __v__ ) { } )
1326 CPP0X( atomic( const atomic& ) = delete; )
1327 atomic& operator =( const atomic& ) CPP0X(=delete);
1329 T* operator =( T* __v__ ) volatile
1330 { store( __v__ ); return __v__; }
1332 T* operator ++( int ) volatile
1333 { return fetch_add( 1 ); }
1335 T* operator --( int ) volatile
1336 { return fetch_sub( 1 ); }
1338 T* operator ++() volatile
1339 { return fetch_add( 1 ) + 1; }
1341 T* operator --() volatile
1342 { return fetch_sub( 1 ) - 1; }
1344 T* operator +=( T* __v__ ) volatile
1345 { return fetch_add( __v__ ) + __v__; }
1347 T* operator -=( T* __v__ ) volatile
1348 { return fetch_sub( __v__ ) - __v__; }
1356 template<> struct atomic< bool > : atomic_bool
1358 CPP0X( atomic() = default; )
1359 CPP0X( constexpr explicit atomic( bool __v__ )
1360 : atomic_bool( __v__ ) { } )
1361 CPP0X( atomic( const atomic& ) = delete; )
1362 atomic& operator =( const atomic& ) CPP0X(=delete);
1364 bool operator =( bool __v__ ) volatile
1365 { store( __v__ ); return __v__; }
1369 template<> struct atomic< void* > : atomic_address
1371 CPP0X( atomic() = default; )
1372 CPP0X( constexpr explicit atomic( void* __v__ )
1373 : atomic_address( __v__ ) { } )
1374 CPP0X( atomic( const atomic& ) = delete; )
1375 atomic& operator =( const atomic& ) CPP0X(=delete);
1377 void* operator =( void* __v__ ) volatile
1378 { store( __v__ ); return __v__; }
1382 template<> struct atomic< char > : atomic_char
1384 CPP0X( atomic() = default; )
1385 CPP0X( constexpr explicit atomic( char __v__ )
1386 : atomic_char( __v__ ) { } )
1387 CPP0X( atomic( const atomic& ) = delete; )
1388 atomic& operator =( const atomic& ) CPP0X(=delete);
1390 char operator =( char __v__ ) volatile
1391 { store( __v__ ); return __v__; }
1395 template<> struct atomic< signed char > : atomic_schar
1397 CPP0X( atomic() = default; )
1398 CPP0X( constexpr explicit atomic( signed char __v__ )
1399 : atomic_schar( __v__ ) { } )
1400 CPP0X( atomic( const atomic& ) = delete; )
1401 atomic& operator =( const atomic& ) CPP0X(=delete);
1403 signed char operator =( signed char __v__ ) volatile
1404 { store( __v__ ); return __v__; }
1408 template<> struct atomic< unsigned char > : atomic_uchar
1410 CPP0X( atomic() = default; )
1411 CPP0X( constexpr explicit atomic( unsigned char __v__ )
1412 : atomic_uchar( __v__ ) { } )
1413 CPP0X( atomic( const atomic& ) = delete; )
1414 atomic& operator =( const atomic& ) CPP0X(=delete);
1416 unsigned char operator =( unsigned char __v__ ) volatile
1417 { store( __v__ ); return __v__; }
1421 template<> struct atomic< short > : atomic_short
1423 CPP0X( atomic() = default; )
1424 CPP0X( constexpr explicit atomic( short __v__ )
1425 : atomic_short( __v__ ) { } )
1426 CPP0X( atomic( const atomic& ) = delete; )
1427 atomic& operator =( const atomic& ) CPP0X(=delete);
1429 short operator =( short __v__ ) volatile
1430 { store( __v__ ); return __v__; }
1434 template<> struct atomic< unsigned short > : atomic_ushort
1436 CPP0X( atomic() = default; )
1437 CPP0X( constexpr explicit atomic( unsigned short __v__ )
1438 : atomic_ushort( __v__ ) { } )
1439 CPP0X( atomic( const atomic& ) = delete; )
1440 atomic& operator =( const atomic& ) CPP0X(=delete);
1442 unsigned short operator =( unsigned short __v__ ) volatile
1443 { store( __v__ ); return __v__; }
1447 template<> struct atomic< int > : atomic_int
1449 CPP0X( atomic() = default; )
1450 CPP0X( constexpr explicit atomic( int __v__ )
1451 : atomic_int( __v__ ) { } )
1452 CPP0X( atomic( const atomic& ) = delete; )
1453 atomic& operator =( const atomic& ) CPP0X(=delete);
1455 int operator =( int __v__ ) volatile
1456 { store( __v__ ); return __v__; }
1460 template<> struct atomic< unsigned int > : atomic_uint
1462 CPP0X( atomic() = default; )
1463 CPP0X( constexpr explicit atomic( unsigned int __v__ )
1464 : atomic_uint( __v__ ) { } )
1465 CPP0X( atomic( const atomic& ) = delete; )
1466 atomic& operator =( const atomic& ) CPP0X(=delete);
1468 unsigned int operator =( unsigned int __v__ ) volatile
1469 { store( __v__ ); return __v__; }
1473 template<> struct atomic< long > : atomic_long
1475 CPP0X( atomic() = default; )
1476 CPP0X( constexpr explicit atomic( long __v__ )
1477 : atomic_long( __v__ ) { } )
1478 CPP0X( atomic( const atomic& ) = delete; )
1479 atomic& operator =( const atomic& ) CPP0X(=delete);
1481 long operator =( long __v__ ) volatile
1482 { store( __v__ ); return __v__; }
1486 template<> struct atomic< unsigned long > : atomic_ulong
1488 CPP0X( atomic() = default; )
1489 CPP0X( constexpr explicit atomic( unsigned long __v__ )
1490 : atomic_ulong( __v__ ) { } )
1491 CPP0X( atomic( const atomic& ) = delete; )
1492 atomic& operator =( const atomic& ) CPP0X(=delete);
1494 unsigned long operator =( unsigned long __v__ ) volatile
1495 { store( __v__ ); return __v__; }
1499 template<> struct atomic< long long > : atomic_llong
1501 CPP0X( atomic() = default; )
1502 CPP0X( constexpr explicit atomic( long long __v__ )
1503 : atomic_llong( __v__ ) { } )
1504 CPP0X( atomic( const atomic& ) = delete; )
1505 atomic& operator =( const atomic& ) CPP0X(=delete);
1507 long long operator =( long long __v__ ) volatile
1508 { store( __v__ ); return __v__; }
1512 template<> struct atomic< unsigned long long > : atomic_ullong
1514 CPP0X( atomic() = default; )
1515 CPP0X( constexpr explicit atomic( unsigned long long __v__ )
1516 : atomic_ullong( __v__ ) { } )
1517 CPP0X( atomic( const atomic& ) = delete; )
1518 atomic& operator =( const atomic& ) CPP0X(=delete);
1520 unsigned long long operator =( unsigned long long __v__ ) volatile
1521 { store( __v__ ); return __v__; }
1525 template<> struct atomic< wchar_t > : atomic_wchar_t
1527 CPP0X( atomic() = default; )
1528 CPP0X( constexpr explicit atomic( wchar_t __v__ )
1529 : atomic_wchar_t( __v__ ) { } )
1530 CPP0X( atomic( const atomic& ) = delete; )
1531 atomic& operator =( const atomic& ) CPP0X(=delete);
1533 wchar_t operator =( wchar_t __v__ ) volatile
1534 { store( __v__ ); return __v__; }
1544 inline bool atomic_is_lock_free( const volatile atomic_bool* __a__ )
1547 inline bool atomic_load_explicit
1548 ( volatile atomic_bool* __a__, memory_order __x__ )
1549 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1551 inline bool atomic_load( volatile atomic_bool* __a__ )
1552 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1554 inline void atomic_store_explicit
1555 ( volatile atomic_bool* __a__, bool __m__, memory_order __x__ )
1556 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1558 inline void atomic_store
1559 ( volatile atomic_bool* __a__, bool __m__ )
1560 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1562 inline bool atomic_swap_explicit
1563 ( volatile atomic_bool* __a__, bool __m__, memory_order __x__ )
1564 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1566 inline bool atomic_swap
1567 ( volatile atomic_bool* __a__, bool __m__ )
1568 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1570 inline bool atomic_compare_swap_explicit
1571 ( volatile atomic_bool* __a__, bool* __e__, bool __m__,
1572 memory_order __x__, memory_order __y__ )
1573 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1575 inline bool atomic_compare_swap
1576 ( volatile atomic_bool* __a__, bool* __e__, bool __m__ )
1577 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1578 memory_order_seq_cst, memory_order_seq_cst ); }
1580 inline void atomic_fence
1581 ( const volatile atomic_bool* __a__, memory_order __x__ )
1582 { _ATOMIC_FENCE_( __a__, __x__ ); }
1585 inline bool atomic_is_lock_free( const volatile atomic_address* __a__ )
1588 inline void* atomic_load_explicit
1589 ( volatile atomic_address* __a__, memory_order __x__ )
1590 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1592 inline void* atomic_load( volatile atomic_address* __a__ )
1593 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1595 inline void atomic_store_explicit
1596 ( volatile atomic_address* __a__, void* __m__, memory_order __x__ )
1597 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1599 inline void atomic_store
1600 ( volatile atomic_address* __a__, void* __m__ )
1601 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1603 inline void* atomic_swap_explicit
1604 ( volatile atomic_address* __a__, void* __m__, memory_order __x__ )
1605 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1607 inline void* atomic_swap
1608 ( volatile atomic_address* __a__, void* __m__ )
1609 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1611 inline bool atomic_compare_swap_explicit
1612 ( volatile atomic_address* __a__, void** __e__, void* __m__,
1613 memory_order __x__, memory_order __y__ )
1614 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1616 inline bool atomic_compare_swap
1617 ( volatile atomic_address* __a__, void** __e__, void* __m__ )
1618 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1619 memory_order_seq_cst, memory_order_seq_cst ); }
1621 inline void atomic_fence
1622 ( const volatile atomic_address* __a__, memory_order __x__ )
1623 { _ATOMIC_FENCE_( __a__, __x__ ); }
1626 inline bool atomic_is_lock_free( const volatile atomic_char* __a__ )
1629 inline char atomic_load_explicit
1630 ( volatile atomic_char* __a__, memory_order __x__ )
1631 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1633 inline char atomic_load( volatile atomic_char* __a__ )
1634 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1636 inline void atomic_store_explicit
1637 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
1638 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1640 inline void atomic_store
1641 ( volatile atomic_char* __a__, char __m__ )
1642 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1644 inline char atomic_swap_explicit
1645 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
1646 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1648 inline char atomic_swap
1649 ( volatile atomic_char* __a__, char __m__ )
1650 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1652 inline bool atomic_compare_swap_explicit
1653 ( volatile atomic_char* __a__, char* __e__, char __m__,
1654 memory_order __x__, memory_order __y__ )
1655 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1657 inline bool atomic_compare_swap
1658 ( volatile atomic_char* __a__, char* __e__, char __m__ )
1659 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1660 memory_order_seq_cst, memory_order_seq_cst ); }
1662 inline void atomic_fence
1663 ( const volatile atomic_char* __a__, memory_order __x__ )
1664 { _ATOMIC_FENCE_( __a__, __x__ ); }
1667 inline bool atomic_is_lock_free( const volatile atomic_schar* __a__ )
1670 inline signed char atomic_load_explicit
1671 ( volatile atomic_schar* __a__, memory_order __x__ )
1672 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1674 inline signed char atomic_load( volatile atomic_schar* __a__ )
1675 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1677 inline void atomic_store_explicit
1678 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
1679 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1681 inline void atomic_store
1682 ( volatile atomic_schar* __a__, signed char __m__ )
1683 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1685 inline signed char atomic_swap_explicit
1686 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
1687 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1689 inline signed char atomic_swap
1690 ( volatile atomic_schar* __a__, signed char __m__ )
1691 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1693 inline bool atomic_compare_swap_explicit
1694 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__,
1695 memory_order __x__, memory_order __y__ )
1696 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1698 inline bool atomic_compare_swap
1699 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__ )
1700 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1701 memory_order_seq_cst, memory_order_seq_cst ); }
1703 inline void atomic_fence
1704 ( const volatile atomic_schar* __a__, memory_order __x__ )
1705 { _ATOMIC_FENCE_( __a__, __x__ ); }
1708 inline bool atomic_is_lock_free( const volatile atomic_uchar* __a__ )
1711 inline unsigned char atomic_load_explicit
1712 ( volatile atomic_uchar* __a__, memory_order __x__ )
1713 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1715 inline unsigned char atomic_load( volatile atomic_uchar* __a__ )
1716 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1718 inline void atomic_store_explicit
1719 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
1720 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1722 inline void atomic_store
1723 ( volatile atomic_uchar* __a__, unsigned char __m__ )
1724 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1726 inline unsigned char atomic_swap_explicit
1727 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
1728 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1730 inline unsigned char atomic_swap
1731 ( volatile atomic_uchar* __a__, unsigned char __m__ )
1732 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1734 inline bool atomic_compare_swap_explicit
1735 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__,
1736 memory_order __x__, memory_order __y__ )
1737 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1739 inline bool atomic_compare_swap
1740 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__ )
1741 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1742 memory_order_seq_cst, memory_order_seq_cst ); }
1744 inline void atomic_fence
1745 ( const volatile atomic_uchar* __a__, memory_order __x__ )
1746 { _ATOMIC_FENCE_( __a__, __x__ ); }
1749 inline bool atomic_is_lock_free( const volatile atomic_short* __a__ )
1752 inline short atomic_load_explicit
1753 ( volatile atomic_short* __a__, memory_order __x__ )
1754 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1756 inline short atomic_load( volatile atomic_short* __a__ )
1757 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1759 inline void atomic_store_explicit
1760 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
1761 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1763 inline void atomic_store
1764 ( volatile atomic_short* __a__, short __m__ )
1765 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1767 inline short atomic_swap_explicit
1768 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
1769 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1771 inline short atomic_swap
1772 ( volatile atomic_short* __a__, short __m__ )
1773 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1775 inline bool atomic_compare_swap_explicit
1776 ( volatile atomic_short* __a__, short* __e__, short __m__,
1777 memory_order __x__, memory_order __y__ )
1778 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1780 inline bool atomic_compare_swap
1781 ( volatile atomic_short* __a__, short* __e__, short __m__ )
1782 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1783 memory_order_seq_cst, memory_order_seq_cst ); }
1785 inline void atomic_fence
1786 ( const volatile atomic_short* __a__, memory_order __x__ )
1787 { _ATOMIC_FENCE_( __a__, __x__ ); }
1790 inline bool atomic_is_lock_free( const volatile atomic_ushort* __a__ )
1793 inline unsigned short atomic_load_explicit
1794 ( volatile atomic_ushort* __a__, memory_order __x__ )
1795 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1797 inline unsigned short atomic_load( volatile atomic_ushort* __a__ )
1798 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1800 inline void atomic_store_explicit
1801 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
1802 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1804 inline void atomic_store
1805 ( volatile atomic_ushort* __a__, unsigned short __m__ )
1806 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1808 inline unsigned short atomic_swap_explicit
1809 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
1810 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1812 inline unsigned short atomic_swap
1813 ( volatile atomic_ushort* __a__, unsigned short __m__ )
1814 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1816 inline bool atomic_compare_swap_explicit
1817 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__,
1818 memory_order __x__, memory_order __y__ )
1819 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1821 inline bool atomic_compare_swap
1822 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__ )
1823 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1824 memory_order_seq_cst, memory_order_seq_cst ); }
1826 inline void atomic_fence
1827 ( const volatile atomic_ushort* __a__, memory_order __x__ )
1828 { _ATOMIC_FENCE_( __a__, __x__ ); }
1831 inline bool atomic_is_lock_free( const volatile atomic_int* __a__ )
1834 inline int atomic_load_explicit
1835 ( volatile atomic_int* __a__, memory_order __x__ )
1836 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1838 inline int atomic_load( volatile atomic_int* __a__ )
1839 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1841 inline void atomic_store_explicit
1842 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
1843 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1845 inline void atomic_store
1846 ( volatile atomic_int* __a__, int __m__ )
1847 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1849 inline int atomic_swap_explicit
1850 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
1851 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1853 inline int atomic_swap
1854 ( volatile atomic_int* __a__, int __m__ )
1855 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1857 inline bool atomic_compare_swap_explicit
1858 ( volatile atomic_int* __a__, int* __e__, int __m__,
1859 memory_order __x__, memory_order __y__ )
1860 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1862 inline bool atomic_compare_swap
1863 ( volatile atomic_int* __a__, int* __e__, int __m__ )
1864 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1865 memory_order_seq_cst, memory_order_seq_cst ); }
1867 inline void atomic_fence
1868 ( const volatile atomic_int* __a__, memory_order __x__ )
1869 { _ATOMIC_FENCE_( __a__, __x__ ); }
1872 inline bool atomic_is_lock_free( const volatile atomic_uint* __a__ )
1875 inline unsigned int atomic_load_explicit
1876 ( volatile atomic_uint* __a__, memory_order __x__ )
1877 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1879 inline unsigned int atomic_load( volatile atomic_uint* __a__ )
1880 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1882 inline void atomic_store_explicit
1883 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
1884 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1886 inline void atomic_store
1887 ( volatile atomic_uint* __a__, unsigned int __m__ )
1888 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1890 inline unsigned int atomic_swap_explicit
1891 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
1892 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1894 inline unsigned int atomic_swap
1895 ( volatile atomic_uint* __a__, unsigned int __m__ )
1896 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1898 inline bool atomic_compare_swap_explicit
1899 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__,
1900 memory_order __x__, memory_order __y__ )
1901 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1903 inline bool atomic_compare_swap
1904 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__ )
1905 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1906 memory_order_seq_cst, memory_order_seq_cst ); }
1908 inline void atomic_fence
1909 ( const volatile atomic_uint* __a__, memory_order __x__ )
1910 { _ATOMIC_FENCE_( __a__, __x__ ); }
1913 inline bool atomic_is_lock_free( const volatile atomic_long* __a__ )
1916 inline long atomic_load_explicit
1917 ( volatile atomic_long* __a__, memory_order __x__ )
1918 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1920 inline long atomic_load( volatile atomic_long* __a__ )
1921 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1923 inline void atomic_store_explicit
1924 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
1925 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1927 inline void atomic_store
1928 ( volatile atomic_long* __a__, long __m__ )
1929 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1931 inline long atomic_swap_explicit
1932 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
1933 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1935 inline long atomic_swap
1936 ( volatile atomic_long* __a__, long __m__ )
1937 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1939 inline bool atomic_compare_swap_explicit
1940 ( volatile atomic_long* __a__, long* __e__, long __m__,
1941 memory_order __x__, memory_order __y__ )
1942 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1944 inline bool atomic_compare_swap
1945 ( volatile atomic_long* __a__, long* __e__, long __m__ )
1946 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1947 memory_order_seq_cst, memory_order_seq_cst ); }
1949 inline void atomic_fence
1950 ( const volatile atomic_long* __a__, memory_order __x__ )
1951 { _ATOMIC_FENCE_( __a__, __x__ ); }
1954 inline bool atomic_is_lock_free( const volatile atomic_ulong* __a__ )
1957 inline unsigned long atomic_load_explicit
1958 ( volatile atomic_ulong* __a__, memory_order __x__ )
1959 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1961 inline unsigned long atomic_load( volatile atomic_ulong* __a__ )
1962 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1964 inline void atomic_store_explicit
1965 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
1966 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1968 inline void atomic_store
1969 ( volatile atomic_ulong* __a__, unsigned long __m__ )
1970 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1972 inline unsigned long atomic_swap_explicit
1973 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
1974 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1976 inline unsigned long atomic_swap
1977 ( volatile atomic_ulong* __a__, unsigned long __m__ )
1978 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
1980 inline bool atomic_compare_swap_explicit
1981 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__,
1982 memory_order __x__, memory_order __y__ )
1983 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1985 inline bool atomic_compare_swap
1986 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__ )
1987 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
1988 memory_order_seq_cst, memory_order_seq_cst ); }
1990 inline void atomic_fence
1991 ( const volatile atomic_ulong* __a__, memory_order __x__ )
1992 { _ATOMIC_FENCE_( __a__, __x__ ); }
1995 inline bool atomic_is_lock_free( const volatile atomic_llong* __a__ )
1998 inline long long atomic_load_explicit
1999 ( volatile atomic_llong* __a__, memory_order __x__ )
2000 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2002 inline long long atomic_load( volatile atomic_llong* __a__ )
2003 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2005 inline void atomic_store_explicit
2006 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2007 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2009 inline void atomic_store
2010 ( volatile atomic_llong* __a__, long long __m__ )
2011 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2013 inline long long atomic_swap_explicit
2014 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2015 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2017 inline long long atomic_swap
2018 ( volatile atomic_llong* __a__, long long __m__ )
2019 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
2021 inline bool atomic_compare_swap_explicit
2022 ( volatile atomic_llong* __a__, long long* __e__, long long __m__,
2023 memory_order __x__, memory_order __y__ )
2024 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2026 inline bool atomic_compare_swap
2027 ( volatile atomic_llong* __a__, long long* __e__, long long __m__ )
2028 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
2029 memory_order_seq_cst, memory_order_seq_cst ); }
2031 inline void atomic_fence
2032 ( const volatile atomic_llong* __a__, memory_order __x__ )
2033 { _ATOMIC_FENCE_( __a__, __x__ ); }
2036 inline bool atomic_is_lock_free( const volatile atomic_ullong* __a__ )
2039 inline unsigned long long atomic_load_explicit
2040 ( volatile atomic_ullong* __a__, memory_order __x__ )
2041 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2043 inline unsigned long long atomic_load( volatile atomic_ullong* __a__ )
2044 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2046 inline void atomic_store_explicit
2047 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2048 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2050 inline void atomic_store
2051 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2052 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2054 inline unsigned long long atomic_swap_explicit
2055 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2056 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2058 inline unsigned long long atomic_swap
2059 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2060 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
2062 inline bool atomic_compare_swap_explicit
2063 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__,
2064 memory_order __x__, memory_order __y__ )
2065 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2067 inline bool atomic_compare_swap
2068 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__ )
2069 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
2070 memory_order_seq_cst, memory_order_seq_cst ); }
2072 inline void atomic_fence
2073 ( const volatile atomic_ullong* __a__, memory_order __x__ )
2074 { _ATOMIC_FENCE_( __a__, __x__ ); }
2077 inline bool atomic_is_lock_free( const volatile atomic_wchar_t* __a__ )
2080 inline wchar_t atomic_load_explicit
2081 ( volatile atomic_wchar_t* __a__, memory_order __x__ )
2082 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2084 inline wchar_t atomic_load( volatile atomic_wchar_t* __a__ )
2085 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2087 inline void atomic_store_explicit
2088 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2089 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2091 inline void atomic_store
2092 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2093 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2095 inline wchar_t atomic_swap_explicit
2096 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2097 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2099 inline wchar_t atomic_swap
2100 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2101 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
2103 inline bool atomic_compare_swap_explicit
2104 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__,
2105 memory_order __x__, memory_order __y__ )
2106 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2108 inline bool atomic_compare_swap
2109 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__ )
2110 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
2111 memory_order_seq_cst, memory_order_seq_cst ); }
2113 inline void atomic_fence
2114 ( const volatile atomic_wchar_t* __a__, memory_order __x__ )
2115 { _ATOMIC_FENCE_( __a__, __x__ ); }
2118 inline void* atomic_fetch_add_explicit
2119 ( volatile atomic_address* __a__, ptrdiff_t __m__, memory_order __x__ )
2120 { void* volatile* __p__ = &((__a__)->__f__);
2121 volatile atomic_flag* __g__ = __atomic_flag_for_address__( __p__ );
2122 __atomic_flag_wait_explicit__( __g__, __x__ );
2123 void* __r__ = *__p__;
2124 *__p__ = (void*)((char*)(*__p__) + __m__);
2125 atomic_flag_clear_explicit( __g__, __x__ );
2128 inline void* atomic_fetch_add
2129 ( volatile atomic_address* __a__, ptrdiff_t __m__ )
2130 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2133 inline void* atomic_fetch_sub_explicit
2134 ( volatile atomic_address* __a__, ptrdiff_t __m__, memory_order __x__ )
2135 { void* volatile* __p__ = &((__a__)->__f__);
2136 volatile atomic_flag* __g__ = __atomic_flag_for_address__( __p__ );
2137 __atomic_flag_wait_explicit__( __g__, __x__ );
2138 void* __r__ = *__p__;
2139 *__p__ = (void*)((char*)(*__p__) - __m__);
2140 atomic_flag_clear_explicit( __g__, __x__ );
2143 inline void* atomic_fetch_sub
2144 ( volatile atomic_address* __a__, ptrdiff_t __m__ )
2145 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2148 inline char atomic_fetch_add_explicit
2149 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2150 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2152 inline char atomic_fetch_add
2153 ( volatile atomic_char* __a__, char __m__ )
2154 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2157 inline char atomic_fetch_sub_explicit
2158 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2159 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2161 inline char atomic_fetch_sub
2162 ( volatile atomic_char* __a__, char __m__ )
2163 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2166 inline char atomic_fetch_and_explicit
2167 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2168 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2170 inline char atomic_fetch_and
2171 ( volatile atomic_char* __a__, char __m__ )
2172 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2175 inline char atomic_fetch_or_explicit
2176 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2177 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2179 inline char atomic_fetch_or
2180 ( volatile atomic_char* __a__, char __m__ )
2181 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2184 inline char atomic_fetch_xor_explicit
2185 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2186 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2188 inline char atomic_fetch_xor
2189 ( volatile atomic_char* __a__, char __m__ )
2190 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2193 inline signed char atomic_fetch_add_explicit
2194 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2195 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2197 inline signed char atomic_fetch_add
2198 ( volatile atomic_schar* __a__, signed char __m__ )
2199 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2202 inline signed char atomic_fetch_sub_explicit
2203 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2204 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2206 inline signed char atomic_fetch_sub
2207 ( volatile atomic_schar* __a__, signed char __m__ )
2208 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2211 inline signed char atomic_fetch_and_explicit
2212 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2213 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2215 inline signed char atomic_fetch_and
2216 ( volatile atomic_schar* __a__, signed char __m__ )
2217 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2220 inline signed char atomic_fetch_or_explicit
2221 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2222 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2224 inline signed char atomic_fetch_or
2225 ( volatile atomic_schar* __a__, signed char __m__ )
2226 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2229 inline signed char atomic_fetch_xor_explicit
2230 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2231 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2233 inline signed char atomic_fetch_xor
2234 ( volatile atomic_schar* __a__, signed char __m__ )
2235 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2238 inline unsigned char atomic_fetch_add_explicit
2239 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2240 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2242 inline unsigned char atomic_fetch_add
2243 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2244 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2247 inline unsigned char atomic_fetch_sub_explicit
2248 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2249 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2251 inline unsigned char atomic_fetch_sub
2252 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2253 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2256 inline unsigned char atomic_fetch_and_explicit
2257 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2258 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2260 inline unsigned char atomic_fetch_and
2261 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2262 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2265 inline unsigned char atomic_fetch_or_explicit
2266 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2267 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2269 inline unsigned char atomic_fetch_or
2270 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2271 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2274 inline unsigned char atomic_fetch_xor_explicit
2275 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2276 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2278 inline unsigned char atomic_fetch_xor
2279 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2280 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2283 inline short atomic_fetch_add_explicit
2284 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2285 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2287 inline short atomic_fetch_add
2288 ( volatile atomic_short* __a__, short __m__ )
2289 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2292 inline short atomic_fetch_sub_explicit
2293 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2294 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2296 inline short atomic_fetch_sub
2297 ( volatile atomic_short* __a__, short __m__ )
2298 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2301 inline short atomic_fetch_and_explicit
2302 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2303 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2305 inline short atomic_fetch_and
2306 ( volatile atomic_short* __a__, short __m__ )
2307 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2310 inline short atomic_fetch_or_explicit
2311 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2312 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2314 inline short atomic_fetch_or
2315 ( volatile atomic_short* __a__, short __m__ )
2316 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2319 inline short atomic_fetch_xor_explicit
2320 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2321 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2323 inline short atomic_fetch_xor
2324 ( volatile atomic_short* __a__, short __m__ )
2325 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2328 inline unsigned short atomic_fetch_add_explicit
2329 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2330 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2332 inline unsigned short atomic_fetch_add
2333 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2334 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2337 inline unsigned short atomic_fetch_sub_explicit
2338 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2339 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2341 inline unsigned short atomic_fetch_sub
2342 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2343 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2346 inline unsigned short atomic_fetch_and_explicit
2347 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2348 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2350 inline unsigned short atomic_fetch_and
2351 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2352 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2355 inline unsigned short atomic_fetch_or_explicit
2356 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2357 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2359 inline unsigned short atomic_fetch_or
2360 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2361 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2364 inline unsigned short atomic_fetch_xor_explicit
2365 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2366 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2368 inline unsigned short atomic_fetch_xor
2369 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2370 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2373 inline int atomic_fetch_add_explicit
2374 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2375 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2377 inline int atomic_fetch_add
2378 ( volatile atomic_int* __a__, int __m__ )
2379 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2382 inline int atomic_fetch_sub_explicit
2383 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2384 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2386 inline int atomic_fetch_sub
2387 ( volatile atomic_int* __a__, int __m__ )
2388 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2391 inline int atomic_fetch_and_explicit
2392 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2393 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2395 inline int atomic_fetch_and
2396 ( volatile atomic_int* __a__, int __m__ )
2397 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2400 inline int atomic_fetch_or_explicit
2401 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2402 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2404 inline int atomic_fetch_or
2405 ( volatile atomic_int* __a__, int __m__ )
2406 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2409 inline int atomic_fetch_xor_explicit
2410 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2411 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2413 inline int atomic_fetch_xor
2414 ( volatile atomic_int* __a__, int __m__ )
2415 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2418 inline unsigned int atomic_fetch_add_explicit
2419 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2420 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2422 inline unsigned int atomic_fetch_add
2423 ( volatile atomic_uint* __a__, unsigned int __m__ )
2424 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2427 inline unsigned int atomic_fetch_sub_explicit
2428 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2429 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2431 inline unsigned int atomic_fetch_sub
2432 ( volatile atomic_uint* __a__, unsigned int __m__ )
2433 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2436 inline unsigned int atomic_fetch_and_explicit
2437 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2438 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2440 inline unsigned int atomic_fetch_and
2441 ( volatile atomic_uint* __a__, unsigned int __m__ )
2442 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2445 inline unsigned int atomic_fetch_or_explicit
2446 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2447 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2449 inline unsigned int atomic_fetch_or
2450 ( volatile atomic_uint* __a__, unsigned int __m__ )
2451 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2454 inline unsigned int atomic_fetch_xor_explicit
2455 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2456 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2458 inline unsigned int atomic_fetch_xor
2459 ( volatile atomic_uint* __a__, unsigned int __m__ )
2460 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2463 inline long atomic_fetch_add_explicit
2464 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2465 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2467 inline long atomic_fetch_add
2468 ( volatile atomic_long* __a__, long __m__ )
2469 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2472 inline long atomic_fetch_sub_explicit
2473 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2474 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2476 inline long atomic_fetch_sub
2477 ( volatile atomic_long* __a__, long __m__ )
2478 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2481 inline long atomic_fetch_and_explicit
2482 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2483 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2485 inline long atomic_fetch_and
2486 ( volatile atomic_long* __a__, long __m__ )
2487 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2490 inline long atomic_fetch_or_explicit
2491 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2492 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2494 inline long atomic_fetch_or
2495 ( volatile atomic_long* __a__, long __m__ )
2496 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2499 inline long atomic_fetch_xor_explicit
2500 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2501 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2503 inline long atomic_fetch_xor
2504 ( volatile atomic_long* __a__, long __m__ )
2505 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2508 inline unsigned long atomic_fetch_add_explicit
2509 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2510 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2512 inline unsigned long atomic_fetch_add
2513 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2514 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2517 inline unsigned long atomic_fetch_sub_explicit
2518 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2519 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2521 inline unsigned long atomic_fetch_sub
2522 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2523 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2526 inline unsigned long atomic_fetch_and_explicit
2527 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2528 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2530 inline unsigned long atomic_fetch_and
2531 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2532 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2535 inline unsigned long atomic_fetch_or_explicit
2536 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2537 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2539 inline unsigned long atomic_fetch_or
2540 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2541 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2544 inline unsigned long atomic_fetch_xor_explicit
2545 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2546 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2548 inline unsigned long atomic_fetch_xor
2549 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2550 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2553 inline long long atomic_fetch_add_explicit
2554 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2555 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2557 inline long long atomic_fetch_add
2558 ( volatile atomic_llong* __a__, long long __m__ )
2559 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2562 inline long long atomic_fetch_sub_explicit
2563 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2564 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2566 inline long long atomic_fetch_sub
2567 ( volatile atomic_llong* __a__, long long __m__ )
2568 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2571 inline long long atomic_fetch_and_explicit
2572 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2573 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2575 inline long long atomic_fetch_and
2576 ( volatile atomic_llong* __a__, long long __m__ )
2577 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2580 inline long long atomic_fetch_or_explicit
2581 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2582 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2584 inline long long atomic_fetch_or
2585 ( volatile atomic_llong* __a__, long long __m__ )
2586 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2589 inline long long atomic_fetch_xor_explicit
2590 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2591 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2593 inline long long atomic_fetch_xor
2594 ( volatile atomic_llong* __a__, long long __m__ )
2595 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2598 inline unsigned long long atomic_fetch_add_explicit
2599 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2600 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2602 inline unsigned long long atomic_fetch_add
2603 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2604 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2607 inline unsigned long long atomic_fetch_sub_explicit
2608 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2609 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2611 inline unsigned long long atomic_fetch_sub
2612 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2613 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2616 inline unsigned long long atomic_fetch_and_explicit
2617 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2618 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2620 inline unsigned long long atomic_fetch_and
2621 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2622 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2625 inline unsigned long long atomic_fetch_or_explicit
2626 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2627 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2629 inline unsigned long long atomic_fetch_or
2630 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2631 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2634 inline unsigned long long atomic_fetch_xor_explicit
2635 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2636 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2638 inline unsigned long long atomic_fetch_xor
2639 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2640 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2643 inline wchar_t atomic_fetch_add_explicit
2644 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2645 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2647 inline wchar_t atomic_fetch_add
2648 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2649 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2652 inline wchar_t atomic_fetch_sub_explicit
2653 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2654 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2656 inline wchar_t atomic_fetch_sub
2657 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2658 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2661 inline wchar_t atomic_fetch_and_explicit
2662 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2663 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2665 inline wchar_t atomic_fetch_and
2666 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2667 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2670 inline wchar_t atomic_fetch_or_explicit
2671 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2672 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2674 inline wchar_t atomic_fetch_or
2675 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2676 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2679 inline wchar_t atomic_fetch_xor_explicit
2680 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2681 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2683 inline wchar_t atomic_fetch_xor
2684 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2685 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2691 #define atomic_is_lock_free( __a__ ) \
2694 #define atomic_load( __a__ ) \
2695 _ATOMIC_LOAD_( __a__, memory_order_seq_cst )
2697 #define atomic_load_explicit( __a__, __x__ ) \
2698 _ATOMIC_LOAD_( __a__, __x__ )
2700 #define atomic_store( __a__, __m__ ) \
2701 _ATOMIC_STORE_( __a__, __m__, memory_order_seq_cst )
2703 #define atomic_store_explicit( __a__, __m__, __x__ ) \
2704 _ATOMIC_STORE_( __a__, __m__, __x__ )
2706 #define atomic_swap( __a__, __m__ ) \
2707 _ATOMIC_MODIFY_( __a__, =, __m__, memory_order_seq_cst )
2709 #define atomic_swap_explicit( __a__, __m__, __x__ ) \
2710 _ATOMIC_MODIFY_( __a__, =, __m__, __x__ )
2712 #define atomic_compare_swap( __a__, __e__, __m__ ) \
2713 _ATOMIC_CMPSWP_( __a__, __e__, __m__, memory_order_seq_cst )
2715 #define atomic_compare_swap_explicit( __a__, __e__, __m__, __x__, __y__ ) \
2716 _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ )
2718 #define atomic_fence( __a__, __x__ ) \
2719 ({ _ATOMIC_FENCE_( __a__, __x__ ); })
2722 #define atomic_fetch_add_explicit( __a__, __m__, __x__ ) \
2723 _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ )
2725 #define atomic_fetch_add( __a__, __m__ ) \
2726 _ATOMIC_MODIFY_( __a__, +=, __m__, memory_order_seq_cst )
2729 #define atomic_fetch_sub_explicit( __a__, __m__, __x__ ) \
2730 _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ )
2732 #define atomic_fetch_sub( __a__, __m__ ) \
2733 _ATOMIC_MODIFY_( __a__, -=, __m__, memory_order_seq_cst )
2736 #define atomic_fetch_and_explicit( __a__, __m__, __x__ ) \
2737 _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ )
2739 #define atomic_fetch_and( __a__, __m__ ) \
2740 _ATOMIC_MODIFY_( __a__, &=, __m__, memory_order_seq_cst )
2743 #define atomic_fetch_or_explicit( __a__, __m__, __x__ ) \
2744 _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ )
2746 #define atomic_fetch_or( __a__, __m__ ) \
2747 _ATOMIC_MODIFY_( __a__, |=, __m__, memory_order_seq_cst )
2750 #define atomic_fetch_xor_explicit( __a__, __m__, __x__ ) \
2751 _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ )
2753 #define atomic_fetch_xor( __a__, __m__ ) \
2754 _ATOMIC_MODIFY_( __a__, ^=, __m__, memory_order_seq_cst )
2763 inline bool atomic_bool::is_lock_free() const volatile
2766 inline void atomic_bool::store
2767 ( bool __m__, memory_order __x__ ) volatile
2768 { atomic_store_explicit( this, __m__, __x__ ); }
2770 inline bool atomic_bool::load
2771 ( memory_order __x__ ) volatile
2772 { return atomic_load_explicit( this, __x__ ); }
2774 inline bool atomic_bool::swap
2775 ( bool __m__, memory_order __x__ ) volatile
2776 { return atomic_swap_explicit( this, __m__, __x__ ); }
2778 inline bool atomic_bool::compare_swap
2779 ( bool& __e__, bool __m__,
2780 memory_order __x__, memory_order __y__ ) volatile
2781 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
2783 inline bool atomic_bool::compare_swap
2784 ( bool& __e__, bool __m__, memory_order __x__ ) volatile
2785 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
2786 __x__ == memory_order_acq_rel ? memory_order_acquire :
2787 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2789 inline void atomic_bool::fence
2790 ( memory_order __x__ ) const volatile
2791 { return atomic_fence( this, __x__ ); }
2794 inline bool atomic_address::is_lock_free() const volatile
2797 inline void atomic_address::store
2798 ( void* __m__, memory_order __x__ ) volatile
2799 { atomic_store_explicit( this, __m__, __x__ ); }
2801 inline void* atomic_address::load
2802 ( memory_order __x__ ) volatile
2803 { return atomic_load_explicit( this, __x__ ); }
2805 inline void* atomic_address::swap
2806 ( void* __m__, memory_order __x__ ) volatile
2807 { return atomic_swap_explicit( this, __m__, __x__ ); }
2809 inline bool atomic_address::compare_swap
2810 ( void*& __e__, void* __m__,
2811 memory_order __x__, memory_order __y__ ) volatile
2812 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
2814 inline bool atomic_address::compare_swap
2815 ( void*& __e__, void* __m__, memory_order __x__ ) volatile
2816 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
2817 __x__ == memory_order_acq_rel ? memory_order_acquire :
2818 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2820 inline void atomic_address::fence
2821 ( memory_order __x__ ) const volatile
2822 { return atomic_fence( this, __x__ ); }
2825 inline bool atomic_char::is_lock_free() const volatile
2828 inline void atomic_char::store
2829 ( char __m__, memory_order __x__ ) volatile
2830 { atomic_store_explicit( this, __m__, __x__ ); }
2832 inline char atomic_char::load
2833 ( memory_order __x__ ) volatile
2834 { return atomic_load_explicit( this, __x__ ); }
2836 inline char atomic_char::swap
2837 ( char __m__, memory_order __x__ ) volatile
2838 { return atomic_swap_explicit( this, __m__, __x__ ); }
2840 inline bool atomic_char::compare_swap
2841 ( char& __e__, char __m__,
2842 memory_order __x__, memory_order __y__ ) volatile
2843 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
2845 inline bool atomic_char::compare_swap
2846 ( char& __e__, char __m__, memory_order __x__ ) volatile
2847 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
2848 __x__ == memory_order_acq_rel ? memory_order_acquire :
2849 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2851 inline void atomic_char::fence
2852 ( memory_order __x__ ) const volatile
2853 { return atomic_fence( this, __x__ ); }
2856 inline bool atomic_schar::is_lock_free() const volatile
2859 inline void atomic_schar::store
2860 ( signed char __m__, memory_order __x__ ) volatile
2861 { atomic_store_explicit( this, __m__, __x__ ); }
2863 inline signed char atomic_schar::load
2864 ( memory_order __x__ ) volatile
2865 { return atomic_load_explicit( this, __x__ ); }
2867 inline signed char atomic_schar::swap
2868 ( signed char __m__, memory_order __x__ ) volatile
2869 { return atomic_swap_explicit( this, __m__, __x__ ); }
2871 inline bool atomic_schar::compare_swap
2872 ( signed char& __e__, signed char __m__,
2873 memory_order __x__, memory_order __y__ ) volatile
2874 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
2876 inline bool atomic_schar::compare_swap
2877 ( signed char& __e__, signed char __m__, memory_order __x__ ) volatile
2878 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
2879 __x__ == memory_order_acq_rel ? memory_order_acquire :
2880 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2882 inline void atomic_schar::fence
2883 ( memory_order __x__ ) const volatile
2884 { return atomic_fence( this, __x__ ); }
2887 inline bool atomic_uchar::is_lock_free() const volatile
2890 inline void atomic_uchar::store
2891 ( unsigned char __m__, memory_order __x__ ) volatile
2892 { atomic_store_explicit( this, __m__, __x__ ); }
2894 inline unsigned char atomic_uchar::load
2895 ( memory_order __x__ ) volatile
2896 { return atomic_load_explicit( this, __x__ ); }
2898 inline unsigned char atomic_uchar::swap
2899 ( unsigned char __m__, memory_order __x__ ) volatile
2900 { return atomic_swap_explicit( this, __m__, __x__ ); }
2902 inline bool atomic_uchar::compare_swap
2903 ( unsigned char& __e__, unsigned char __m__,
2904 memory_order __x__, memory_order __y__ ) volatile
2905 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
2907 inline bool atomic_uchar::compare_swap
2908 ( unsigned char& __e__, unsigned char __m__, memory_order __x__ ) volatile
2909 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
2910 __x__ == memory_order_acq_rel ? memory_order_acquire :
2911 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2913 inline void atomic_uchar::fence
2914 ( memory_order __x__ ) const volatile
2915 { return atomic_fence( this, __x__ ); }
2918 inline bool atomic_short::is_lock_free() const volatile
2921 inline void atomic_short::store
2922 ( short __m__, memory_order __x__ ) volatile
2923 { atomic_store_explicit( this, __m__, __x__ ); }
2925 inline short atomic_short::load
2926 ( memory_order __x__ ) volatile
2927 { return atomic_load_explicit( this, __x__ ); }
2929 inline short atomic_short::swap
2930 ( short __m__, memory_order __x__ ) volatile
2931 { return atomic_swap_explicit( this, __m__, __x__ ); }
2933 inline bool atomic_short::compare_swap
2934 ( short& __e__, short __m__,
2935 memory_order __x__, memory_order __y__ ) volatile
2936 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
2938 inline bool atomic_short::compare_swap
2939 ( short& __e__, short __m__, memory_order __x__ ) volatile
2940 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
2941 __x__ == memory_order_acq_rel ? memory_order_acquire :
2942 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2944 inline void atomic_short::fence
2945 ( memory_order __x__ ) const volatile
2946 { return atomic_fence( this, __x__ ); }
2949 inline bool atomic_ushort::is_lock_free() const volatile
2952 inline void atomic_ushort::store
2953 ( unsigned short __m__, memory_order __x__ ) volatile
2954 { atomic_store_explicit( this, __m__, __x__ ); }
2956 inline unsigned short atomic_ushort::load
2957 ( memory_order __x__ ) volatile
2958 { return atomic_load_explicit( this, __x__ ); }
2960 inline unsigned short atomic_ushort::swap
2961 ( unsigned short __m__, memory_order __x__ ) volatile
2962 { return atomic_swap_explicit( this, __m__, __x__ ); }
2964 inline bool atomic_ushort::compare_swap
2965 ( unsigned short& __e__, unsigned short __m__,
2966 memory_order __x__, memory_order __y__ ) volatile
2967 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
2969 inline bool atomic_ushort::compare_swap
2970 ( unsigned short& __e__, unsigned short __m__, memory_order __x__ ) volatile
2971 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
2972 __x__ == memory_order_acq_rel ? memory_order_acquire :
2973 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2975 inline void atomic_ushort::fence
2976 ( memory_order __x__ ) const volatile
2977 { return atomic_fence( this, __x__ ); }
2980 inline bool atomic_int::is_lock_free() const volatile
2983 inline void atomic_int::store
2984 ( int __m__, memory_order __x__ ) volatile
2985 { atomic_store_explicit( this, __m__, __x__ ); }
2987 inline int atomic_int::load
2988 ( memory_order __x__ ) volatile
2989 { return atomic_load_explicit( this, __x__ ); }
2991 inline int atomic_int::swap
2992 ( int __m__, memory_order __x__ ) volatile
2993 { return atomic_swap_explicit( this, __m__, __x__ ); }
2995 inline bool atomic_int::compare_swap
2996 ( int& __e__, int __m__,
2997 memory_order __x__, memory_order __y__ ) volatile
2998 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
3000 inline bool atomic_int::compare_swap
3001 ( int& __e__, int __m__, memory_order __x__ ) volatile
3002 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
3003 __x__ == memory_order_acq_rel ? memory_order_acquire :
3004 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3006 inline void atomic_int::fence
3007 ( memory_order __x__ ) const volatile
3008 { return atomic_fence( this, __x__ ); }
3011 inline bool atomic_uint::is_lock_free() const volatile
3014 inline void atomic_uint::store
3015 ( unsigned int __m__, memory_order __x__ ) volatile
3016 { atomic_store_explicit( this, __m__, __x__ ); }
3018 inline unsigned int atomic_uint::load
3019 ( memory_order __x__ ) volatile
3020 { return atomic_load_explicit( this, __x__ ); }
3022 inline unsigned int atomic_uint::swap
3023 ( unsigned int __m__, memory_order __x__ ) volatile
3024 { return atomic_swap_explicit( this, __m__, __x__ ); }
3026 inline bool atomic_uint::compare_swap
3027 ( unsigned int& __e__, unsigned int __m__,
3028 memory_order __x__, memory_order __y__ ) volatile
3029 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
3031 inline bool atomic_uint::compare_swap
3032 ( unsigned int& __e__, unsigned int __m__, memory_order __x__ ) volatile
3033 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
3034 __x__ == memory_order_acq_rel ? memory_order_acquire :
3035 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3037 inline void atomic_uint::fence
3038 ( memory_order __x__ ) const volatile
3039 { return atomic_fence( this, __x__ ); }
3042 inline bool atomic_long::is_lock_free() const volatile
3045 inline void atomic_long::store
3046 ( long __m__, memory_order __x__ ) volatile
3047 { atomic_store_explicit( this, __m__, __x__ ); }
3049 inline long atomic_long::load
3050 ( memory_order __x__ ) volatile
3051 { return atomic_load_explicit( this, __x__ ); }
3053 inline long atomic_long::swap
3054 ( long __m__, memory_order __x__ ) volatile
3055 { return atomic_swap_explicit( this, __m__, __x__ ); }
3057 inline bool atomic_long::compare_swap
3058 ( long& __e__, long __m__,
3059 memory_order __x__, memory_order __y__ ) volatile
3060 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
3062 inline bool atomic_long::compare_swap
3063 ( long& __e__, long __m__, memory_order __x__ ) volatile
3064 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
3065 __x__ == memory_order_acq_rel ? memory_order_acquire :
3066 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3068 inline void atomic_long::fence
3069 ( memory_order __x__ ) const volatile
3070 { return atomic_fence( this, __x__ ); }
3073 inline bool atomic_ulong::is_lock_free() const volatile
3076 inline void atomic_ulong::store
3077 ( unsigned long __m__, memory_order __x__ ) volatile
3078 { atomic_store_explicit( this, __m__, __x__ ); }
3080 inline unsigned long atomic_ulong::load
3081 ( memory_order __x__ ) volatile
3082 { return atomic_load_explicit( this, __x__ ); }
3084 inline unsigned long atomic_ulong::swap
3085 ( unsigned long __m__, memory_order __x__ ) volatile
3086 { return atomic_swap_explicit( this, __m__, __x__ ); }
3088 inline bool atomic_ulong::compare_swap
3089 ( unsigned long& __e__, unsigned long __m__,
3090 memory_order __x__, memory_order __y__ ) volatile
3091 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
3093 inline bool atomic_ulong::compare_swap
3094 ( unsigned long& __e__, unsigned long __m__, memory_order __x__ ) volatile
3095 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
3096 __x__ == memory_order_acq_rel ? memory_order_acquire :
3097 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3099 inline void atomic_ulong::fence
3100 ( memory_order __x__ ) const volatile
3101 { return atomic_fence( this, __x__ ); }
3104 inline bool atomic_llong::is_lock_free() const volatile
3107 inline void atomic_llong::store
3108 ( long long __m__, memory_order __x__ ) volatile
3109 { atomic_store_explicit( this, __m__, __x__ ); }
3111 inline long long atomic_llong::load
3112 ( memory_order __x__ ) volatile
3113 { return atomic_load_explicit( this, __x__ ); }
3115 inline long long atomic_llong::swap
3116 ( long long __m__, memory_order __x__ ) volatile
3117 { return atomic_swap_explicit( this, __m__, __x__ ); }
3119 inline bool atomic_llong::compare_swap
3120 ( long long& __e__, long long __m__,
3121 memory_order __x__, memory_order __y__ ) volatile
3122 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
3124 inline bool atomic_llong::compare_swap
3125 ( long long& __e__, long long __m__, memory_order __x__ ) volatile
3126 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
3127 __x__ == memory_order_acq_rel ? memory_order_acquire :
3128 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3130 inline void atomic_llong::fence
3131 ( memory_order __x__ ) const volatile
3132 { return atomic_fence( this, __x__ ); }
3135 inline bool atomic_ullong::is_lock_free() const volatile
3138 inline void atomic_ullong::store
3139 ( unsigned long long __m__, memory_order __x__ ) volatile
3140 { atomic_store_explicit( this, __m__, __x__ ); }
3142 inline unsigned long long atomic_ullong::load
3143 ( memory_order __x__ ) volatile
3144 { return atomic_load_explicit( this, __x__ ); }
3146 inline unsigned long long atomic_ullong::swap
3147 ( unsigned long long __m__, memory_order __x__ ) volatile
3148 { return atomic_swap_explicit( this, __m__, __x__ ); }
3150 inline bool atomic_ullong::compare_swap
3151 ( unsigned long long& __e__, unsigned long long __m__,
3152 memory_order __x__, memory_order __y__ ) volatile
3153 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
3155 inline bool atomic_ullong::compare_swap
3156 ( unsigned long long& __e__, unsigned long long __m__, memory_order __x__ ) volatile
3157 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
3158 __x__ == memory_order_acq_rel ? memory_order_acquire :
3159 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3161 inline void atomic_ullong::fence
3162 ( memory_order __x__ ) const volatile
3163 { return atomic_fence( this, __x__ ); }
3166 inline bool atomic_wchar_t::is_lock_free() const volatile
3169 inline void atomic_wchar_t::store
3170 ( wchar_t __m__, memory_order __x__ ) volatile
3171 { atomic_store_explicit( this, __m__, __x__ ); }
3173 inline wchar_t atomic_wchar_t::load
3174 ( memory_order __x__ ) volatile
3175 { return atomic_load_explicit( this, __x__ ); }
3177 inline wchar_t atomic_wchar_t::swap
3178 ( wchar_t __m__, memory_order __x__ ) volatile
3179 { return atomic_swap_explicit( this, __m__, __x__ ); }
3181 inline bool atomic_wchar_t::compare_swap
3182 ( wchar_t& __e__, wchar_t __m__,
3183 memory_order __x__, memory_order __y__ ) volatile
3184 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
3186 inline bool atomic_wchar_t::compare_swap
3187 ( wchar_t& __e__, wchar_t __m__, memory_order __x__ ) volatile
3188 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
3189 __x__ == memory_order_acq_rel ? memory_order_acquire :
3190 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3192 inline void atomic_wchar_t::fence
3193 ( memory_order __x__ ) const volatile
3194 { return atomic_fence( this, __x__ ); }
3197 template< typename T >
3198 inline bool atomic<T>::is_lock_free() const volatile
3201 template< typename T >
3202 inline void atomic<T>::store( T __v__, memory_order __x__ ) volatile
3203 { _ATOMIC_STORE_( this, __v__, __x__ ); }
3205 template< typename T >
3206 inline T atomic<T>::load( memory_order __x__ ) volatile
3207 { return _ATOMIC_LOAD_( this, __x__ ); }
3209 template< typename T >
3210 inline T atomic<T>::swap( T __v__, memory_order __x__ ) volatile
3211 { return _ATOMIC_MODIFY_( this, =, __v__, __x__ ); }
3213 template< typename T >
3214 inline bool atomic<T>::compare_swap
3215 ( T& __r__, T __v__, memory_order __x__, memory_order __y__ ) volatile
3216 { return _ATOMIC_CMPSWP_( this, &__r__, __v__, __x__ ); }
3218 template< typename T >
3219 inline bool atomic<T>::compare_swap
3220 ( T& __r__, T __v__, memory_order __x__ ) volatile
3221 { return compare_swap( __r__, __v__, __x__,
3222 __x__ == memory_order_acq_rel ? memory_order_acquire :
3223 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3226 inline void* atomic_address::fetch_add
3227 ( ptrdiff_t __m__, memory_order __x__ ) volatile
3228 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3230 inline void* atomic_address::fetch_sub
3231 ( ptrdiff_t __m__, memory_order __x__ ) volatile
3232 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3235 inline char atomic_char::fetch_add
3236 ( char __m__, memory_order __x__ ) volatile
3237 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3240 inline char atomic_char::fetch_sub
3241 ( char __m__, memory_order __x__ ) volatile
3242 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3245 inline char atomic_char::fetch_and
3246 ( char __m__, memory_order __x__ ) volatile
3247 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3250 inline char atomic_char::fetch_or
3251 ( char __m__, memory_order __x__ ) volatile
3252 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3255 inline char atomic_char::fetch_xor
3256 ( char __m__, memory_order __x__ ) volatile
3257 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3260 inline signed char atomic_schar::fetch_add
3261 ( signed char __m__, memory_order __x__ ) volatile
3262 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3265 inline signed char atomic_schar::fetch_sub
3266 ( signed char __m__, memory_order __x__ ) volatile
3267 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3270 inline signed char atomic_schar::fetch_and
3271 ( signed char __m__, memory_order __x__ ) volatile
3272 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3275 inline signed char atomic_schar::fetch_or
3276 ( signed char __m__, memory_order __x__ ) volatile
3277 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3280 inline signed char atomic_schar::fetch_xor
3281 ( signed char __m__, memory_order __x__ ) volatile
3282 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3285 inline unsigned char atomic_uchar::fetch_add
3286 ( unsigned char __m__, memory_order __x__ ) volatile
3287 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3290 inline unsigned char atomic_uchar::fetch_sub
3291 ( unsigned char __m__, memory_order __x__ ) volatile
3292 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3295 inline unsigned char atomic_uchar::fetch_and
3296 ( unsigned char __m__, memory_order __x__ ) volatile
3297 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3300 inline unsigned char atomic_uchar::fetch_or
3301 ( unsigned char __m__, memory_order __x__ ) volatile
3302 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3305 inline unsigned char atomic_uchar::fetch_xor
3306 ( unsigned char __m__, memory_order __x__ ) volatile
3307 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3310 inline short atomic_short::fetch_add
3311 ( short __m__, memory_order __x__ ) volatile
3312 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3315 inline short atomic_short::fetch_sub
3316 ( short __m__, memory_order __x__ ) volatile
3317 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3320 inline short atomic_short::fetch_and
3321 ( short __m__, memory_order __x__ ) volatile
3322 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3325 inline short atomic_short::fetch_or
3326 ( short __m__, memory_order __x__ ) volatile
3327 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3330 inline short atomic_short::fetch_xor
3331 ( short __m__, memory_order __x__ ) volatile
3332 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3335 inline unsigned short atomic_ushort::fetch_add
3336 ( unsigned short __m__, memory_order __x__ ) volatile
3337 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3340 inline unsigned short atomic_ushort::fetch_sub
3341 ( unsigned short __m__, memory_order __x__ ) volatile
3342 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3345 inline unsigned short atomic_ushort::fetch_and
3346 ( unsigned short __m__, memory_order __x__ ) volatile
3347 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3350 inline unsigned short atomic_ushort::fetch_or
3351 ( unsigned short __m__, memory_order __x__ ) volatile
3352 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3355 inline unsigned short atomic_ushort::fetch_xor
3356 ( unsigned short __m__, memory_order __x__ ) volatile
3357 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3360 inline int atomic_int::fetch_add
3361 ( int __m__, memory_order __x__ ) volatile
3362 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3365 inline int atomic_int::fetch_sub
3366 ( int __m__, memory_order __x__ ) volatile
3367 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3370 inline int atomic_int::fetch_and
3371 ( int __m__, memory_order __x__ ) volatile
3372 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3375 inline int atomic_int::fetch_or
3376 ( int __m__, memory_order __x__ ) volatile
3377 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3380 inline int atomic_int::fetch_xor
3381 ( int __m__, memory_order __x__ ) volatile
3382 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3385 inline unsigned int atomic_uint::fetch_add
3386 ( unsigned int __m__, memory_order __x__ ) volatile
3387 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3390 inline unsigned int atomic_uint::fetch_sub
3391 ( unsigned int __m__, memory_order __x__ ) volatile
3392 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3395 inline unsigned int atomic_uint::fetch_and
3396 ( unsigned int __m__, memory_order __x__ ) volatile
3397 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3400 inline unsigned int atomic_uint::fetch_or
3401 ( unsigned int __m__, memory_order __x__ ) volatile
3402 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3405 inline unsigned int atomic_uint::fetch_xor
3406 ( unsigned int __m__, memory_order __x__ ) volatile
3407 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3410 inline long atomic_long::fetch_add
3411 ( long __m__, memory_order __x__ ) volatile
3412 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3415 inline long atomic_long::fetch_sub
3416 ( long __m__, memory_order __x__ ) volatile
3417 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3420 inline long atomic_long::fetch_and
3421 ( long __m__, memory_order __x__ ) volatile
3422 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3425 inline long atomic_long::fetch_or
3426 ( long __m__, memory_order __x__ ) volatile
3427 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3430 inline long atomic_long::fetch_xor
3431 ( long __m__, memory_order __x__ ) volatile
3432 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3435 inline unsigned long atomic_ulong::fetch_add
3436 ( unsigned long __m__, memory_order __x__ ) volatile
3437 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3440 inline unsigned long atomic_ulong::fetch_sub
3441 ( unsigned long __m__, memory_order __x__ ) volatile
3442 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3445 inline unsigned long atomic_ulong::fetch_and
3446 ( unsigned long __m__, memory_order __x__ ) volatile
3447 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3450 inline unsigned long atomic_ulong::fetch_or
3451 ( unsigned long __m__, memory_order __x__ ) volatile
3452 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3455 inline unsigned long atomic_ulong::fetch_xor
3456 ( unsigned long __m__, memory_order __x__ ) volatile
3457 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3460 inline long long atomic_llong::fetch_add
3461 ( long long __m__, memory_order __x__ ) volatile
3462 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3465 inline long long atomic_llong::fetch_sub
3466 ( long long __m__, memory_order __x__ ) volatile
3467 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3470 inline long long atomic_llong::fetch_and
3471 ( long long __m__, memory_order __x__ ) volatile
3472 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3475 inline long long atomic_llong::fetch_or
3476 ( long long __m__, memory_order __x__ ) volatile
3477 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3480 inline long long atomic_llong::fetch_xor
3481 ( long long __m__, memory_order __x__ ) volatile
3482 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3485 inline unsigned long long atomic_ullong::fetch_add
3486 ( unsigned long long __m__, memory_order __x__ ) volatile
3487 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3490 inline unsigned long long atomic_ullong::fetch_sub
3491 ( unsigned long long __m__, memory_order __x__ ) volatile
3492 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3495 inline unsigned long long atomic_ullong::fetch_and
3496 ( unsigned long long __m__, memory_order __x__ ) volatile
3497 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3500 inline unsigned long long atomic_ullong::fetch_or
3501 ( unsigned long long __m__, memory_order __x__ ) volatile
3502 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3505 inline unsigned long long atomic_ullong::fetch_xor
3506 ( unsigned long long __m__, memory_order __x__ ) volatile
3507 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3510 inline wchar_t atomic_wchar_t::fetch_add
3511 ( wchar_t __m__, memory_order __x__ ) volatile
3512 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3515 inline wchar_t atomic_wchar_t::fetch_sub
3516 ( wchar_t __m__, memory_order __x__ ) volatile
3517 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3520 inline wchar_t atomic_wchar_t::fetch_and
3521 ( wchar_t __m__, memory_order __x__ ) volatile
3522 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3525 inline wchar_t atomic_wchar_t::fetch_or
3526 ( wchar_t __m__, memory_order __x__ ) volatile
3527 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3530 inline wchar_t atomic_wchar_t::fetch_xor
3531 ( wchar_t __m__, memory_order __x__ ) volatile
3532 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3535 template< typename T >
3536 T* atomic<T*>::load( memory_order __x__ ) volatile
3537 { return static_cast<T*>( atomic_address::load( __x__ ) ); }
3539 template< typename T >
3540 T* atomic<T*>::swap( T* __v__, memory_order __x__ ) volatile
3541 { return static_cast<T*>( atomic_address::swap( __v__, __x__ ) ); }
3543 template< typename T >
3544 bool atomic<T*>::compare_swap
3545 ( T*& __r__, T* __v__, memory_order __x__, memory_order __y__) volatile
3546 { return atomic_address::compare_swap( *reinterpret_cast<void**>( &__r__ ),
3547 static_cast<void*>( __v__ ), __x__, __y__ ); }
3548 //{ return _ATOMIC_CMPSWP_( this, &__r__, __v__, __x__ ); }
3550 template< typename T >
3551 bool atomic<T*>::compare_swap
3552 ( T*& __r__, T* __v__, memory_order __x__ ) volatile
3553 { return compare_swap( __r__, __v__, __x__,
3554 __x__ == memory_order_acq_rel ? memory_order_acquire :
3555 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3557 template< typename T >
3558 T* atomic<T*>::fetch_add( ptrdiff_t __v__, memory_order __x__ ) volatile
3559 { return atomic_fetch_add_explicit( this, sizeof(T) * __v__, __x__ ); }
3561 template< typename T >
3562 T* atomic<T*>::fetch_sub( ptrdiff_t __v__, memory_order __x__ ) volatile
3563 { return atomic_fetch_sub_explicit( this, sizeof(T) * __v__, __x__ ); }