4 * @brief Common header for C11/C++11 atomics
6 * Note that some features are unavailable, as they require support from a true
10 #ifndef __IMPATOMIC_H__
11 #define __IMPATOMIC_H__
13 #include "memoryorder.h"
14 #include "cmodelint.h"
22 #define CPP0X( feature )
24 typedef struct atomic_flag
27 bool test_and_set( memory_order = memory_order_seq_cst ) volatile;
28 void clear( memory_order = memory_order_seq_cst ) volatile;
30 CPP0X( atomic_flag() = default; )
31 CPP0X( atomic_flag( const atomic_flag& ) = delete; )
32 atomic_flag& operator =( const atomic_flag& ) CPP0X(=delete);
39 #define ATOMIC_FLAG_INIT { false }
45 extern bool atomic_flag_test_and_set( volatile atomic_flag* );
46 extern bool atomic_flag_test_and_set_explicit
47 ( volatile atomic_flag*, memory_order );
48 extern void atomic_flag_clear( volatile atomic_flag* );
49 extern void atomic_flag_clear_explicit
50 ( volatile atomic_flag*, memory_order );
51 extern void __atomic_flag_wait__
52 ( volatile atomic_flag* );
53 extern void __atomic_flag_wait_explicit__
54 ( volatile atomic_flag*, memory_order );
62 inline bool atomic_flag::test_and_set( memory_order __x__ ) volatile
63 { return atomic_flag_test_and_set_explicit( this, __x__ ); }
65 inline void atomic_flag::clear( memory_order __x__ ) volatile
66 { atomic_flag_clear_explicit( this, __x__ ); }
72 The remainder of the example implementation uses the following
73 macros. These macros exploit GNU extensions for value-returning
74 blocks (AKA statement expressions) and __typeof__.
76 The macros rely on data fields of atomic structs being named __f__.
77 Other symbols used are __a__=atomic, __e__=expected, __f__=field,
78 __g__=flag, __m__=modified, __o__=operation, __r__=result,
79 __p__=pointer to field, __v__=value (for single evaluation),
80 __x__=memory-ordering, and __y__=memory-ordering.
83 #define _ATOMIC_LOAD_( __a__, __x__ ) \
84 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
85 __typeof__((__a__)->__f__) __r__ = (__typeof__((__a__)->__f__))model_read_action((void *)__p__, __x__); \
88 #define _ATOMIC_STORE_( __a__, __m__, __x__ ) \
89 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
90 __typeof__(__m__) __v__ = (__m__); \
91 model_write_action((void *) __p__, __x__, (uint64_t) __v__); \
92 __v__ = __v__; /* Silence clang (-Wunused-value) */ \
96 #define _ATOMIC_INIT_( __a__, __m__ ) \
97 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
98 __typeof__(__m__) __v__ = (__m__); \
99 model_init_action((void *) __p__, (uint64_t) __v__); \
100 __v__ = __v__; /* Silence clang (-Wunused-value) */ \
103 #define _ATOMIC_MODIFY_( __a__, __o__, __m__, __x__ ) \
104 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
105 __typeof__((__a__)->__f__) __old__=(__typeof__((__a__)->__f__)) model_rmwr_action((void *)__p__, __x__); \
106 __typeof__(__m__) __v__ = (__m__); \
107 __typeof__((__a__)->__f__) __copy__= __old__; \
108 __copy__ __o__ __v__; \
109 model_rmw_action((void *)__p__, __x__, (uint64_t) __copy__); \
110 __old__ = __old__; /* Silence clang (-Wunused-value) */ \
113 /* No spurious failure for now */
114 #define _ATOMIC_CMPSWP_WEAK_ _ATOMIC_CMPSWP_
116 #define _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ) \
117 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
118 __typeof__(__e__) __q__ = (__e__); \
119 __typeof__(__m__) __v__ = (__m__); \
121 __typeof__((__a__)->__f__) __t__=(__typeof__((__a__)->__f__)) model_rmwr_action((void *)__p__, __x__); \
122 if (__t__ == * __q__ ) {; \
123 model_rmw_action((void *)__p__, __x__, (uint64_t) __v__); __r__ = true; } \
124 else { model_rmwc_action((void *)__p__, __x__); *__q__ = __t__; __r__ = false;} \
127 #define _ATOMIC_FENCE_( __x__ ) \
128 ({ model_fence_action(__x__);})
131 #define ATOMIC_CHAR_LOCK_FREE 1
132 #define ATOMIC_CHAR16_T_LOCK_FREE 1
133 #define ATOMIC_CHAR32_T_LOCK_FREE 1
134 #define ATOMIC_WCHAR_T_LOCK_FREE 1
135 #define ATOMIC_SHORT_LOCK_FREE 1
136 #define ATOMIC_INT_LOCK_FREE 1
137 #define ATOMIC_LONG_LOCK_FREE 1
138 #define ATOMIC_LLONG_LOCK_FREE 1
139 #define ATOMIC_ADDRESS_LOCK_FREE 1
141 typedef struct atomic_bool
144 bool is_lock_free() const volatile;
145 void store( bool, memory_order = memory_order_seq_cst ) volatile;
146 bool load( memory_order = memory_order_seq_cst ) volatile;
147 bool exchange( bool, memory_order = memory_order_seq_cst ) volatile;
148 bool compare_exchange_weak ( bool&, bool, memory_order, memory_order ) volatile;
149 bool compare_exchange_strong ( bool&, bool, memory_order, memory_order ) volatile;
150 bool compare_exchange_weak ( bool&, bool,
151 memory_order = memory_order_seq_cst) volatile;
152 bool compare_exchange_strong ( bool&, bool,
153 memory_order = memory_order_seq_cst) volatile;
155 CPP0X( atomic_bool() = delete; )
156 CPP0X( constexpr explicit atomic_bool( bool __v__ ) : __f__( __v__ ) { } )
157 CPP0X( atomic_bool( const atomic_bool& ) = delete; )
158 atomic_bool& operator =( const atomic_bool& ) CPP0X(=delete);
160 bool operator =( bool __v__ ) volatile
161 { store( __v__ ); return __v__; }
163 friend void atomic_store_explicit( volatile atomic_bool*, bool,
165 friend bool atomic_load_explicit( volatile atomic_bool*, memory_order );
166 friend bool atomic_exchange_explicit( volatile atomic_bool*, bool,
168 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_bool*, bool*, bool,
169 memory_order, memory_order );
170 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_bool*, bool*, bool,
171 memory_order, memory_order );
179 typedef struct atomic_address
182 bool is_lock_free() const volatile;
183 void store( void*, memory_order = memory_order_seq_cst ) volatile;
184 void* load( memory_order = memory_order_seq_cst ) volatile;
185 void* exchange( void*, memory_order = memory_order_seq_cst ) volatile;
186 bool compare_exchange_weak( void*&, void*, memory_order, memory_order ) volatile;
187 bool compare_exchange_strong( void*&, void*, memory_order, memory_order ) volatile;
188 bool compare_exchange_weak( void*&, void*,
189 memory_order = memory_order_seq_cst ) volatile;
190 bool compare_exchange_strong( void*&, void*,
191 memory_order = memory_order_seq_cst ) volatile;
192 void* fetch_add( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
193 void* fetch_sub( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
195 CPP0X( atomic_address() = default; )
196 CPP0X( constexpr explicit atomic_address( void* __v__ ) : __f__( __v__) { } )
197 CPP0X( atomic_address( const atomic_address& ) = delete; )
198 atomic_address& operator =( const atomic_address & ) CPP0X(=delete);
200 void* operator =( void* __v__ ) volatile
201 { store( __v__ ); return __v__; }
203 void* operator +=( ptrdiff_t __v__ ) volatile
204 { return fetch_add( __v__ ); }
206 void* operator -=( ptrdiff_t __v__ ) volatile
207 { return fetch_sub( __v__ ); }
209 friend void atomic_store_explicit( volatile atomic_address*, void*,
211 friend void* atomic_load_explicit( volatile atomic_address*, memory_order );
212 friend void* atomic_exchange_explicit( volatile atomic_address*, void*,
214 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_address*,
215 void**, void*, memory_order, memory_order );
216 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_address*,
217 void**, void*, memory_order, memory_order );
218 friend void* atomic_fetch_add_explicit( volatile atomic_address*, ptrdiff_t,
220 friend void* atomic_fetch_sub_explicit( volatile atomic_address*, ptrdiff_t,
229 typedef struct atomic_char
232 bool is_lock_free() const volatile;
234 memory_order = memory_order_seq_cst ) volatile;
235 char load( memory_order = memory_order_seq_cst ) volatile;
237 memory_order = memory_order_seq_cst ) volatile;
238 bool compare_exchange_weak( char&, char,
239 memory_order, memory_order ) volatile;
240 bool compare_exchange_strong( char&, char,
241 memory_order, memory_order ) volatile;
242 bool compare_exchange_weak( char&, char,
243 memory_order = memory_order_seq_cst ) volatile;
244 bool compare_exchange_strong( char&, char,
245 memory_order = memory_order_seq_cst ) volatile;
246 char fetch_add( char,
247 memory_order = memory_order_seq_cst ) volatile;
248 char fetch_sub( char,
249 memory_order = memory_order_seq_cst ) volatile;
250 char fetch_and( char,
251 memory_order = memory_order_seq_cst ) volatile;
253 memory_order = memory_order_seq_cst ) volatile;
254 char fetch_xor( char,
255 memory_order = memory_order_seq_cst ) volatile;
257 CPP0X( atomic_char() = default; )
258 CPP0X( constexpr atomic_char( char __v__ ) : __f__( __v__) { } )
259 CPP0X( atomic_char( const atomic_char& ) = delete; )
260 atomic_char& operator =( const atomic_char& ) CPP0X(=delete);
262 char operator =( char __v__ ) volatile
263 { store( __v__ ); return __v__; }
265 char operator ++( int ) volatile
266 { return fetch_add( 1 ); }
268 char operator --( int ) volatile
269 { return fetch_sub( 1 ); }
271 char operator ++() volatile
272 { return fetch_add( 1 ) + 1; }
274 char operator --() volatile
275 { return fetch_sub( 1 ) - 1; }
277 char operator +=( char __v__ ) volatile
278 { return fetch_add( __v__ ) + __v__; }
280 char operator -=( char __v__ ) volatile
281 { return fetch_sub( __v__ ) - __v__; }
283 char operator &=( char __v__ ) volatile
284 { return fetch_and( __v__ ) & __v__; }
286 char operator |=( char __v__ ) volatile
287 { return fetch_or( __v__ ) | __v__; }
289 char operator ^=( char __v__ ) volatile
290 { return fetch_xor( __v__ ) ^ __v__; }
292 friend void atomic_store_explicit( volatile atomic_char*, char,
294 friend char atomic_load_explicit( volatile atomic_char*,
296 friend char atomic_exchange_explicit( volatile atomic_char*,
297 char, memory_order );
298 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_char*,
299 char*, char, memory_order, memory_order );
300 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_char*,
301 char*, char, memory_order, memory_order );
302 friend char atomic_fetch_add_explicit( volatile atomic_char*,
303 char, memory_order );
304 friend char atomic_fetch_sub_explicit( volatile atomic_char*,
305 char, memory_order );
306 friend char atomic_fetch_and_explicit( volatile atomic_char*,
307 char, memory_order );
308 friend char atomic_fetch_or_explicit( volatile atomic_char*,
309 char, memory_order );
310 friend char atomic_fetch_xor_explicit( volatile atomic_char*,
311 char, memory_order );
319 typedef struct atomic_schar
322 bool is_lock_free() const volatile;
323 void store( signed char,
324 memory_order = memory_order_seq_cst ) volatile;
325 signed char load( memory_order = memory_order_seq_cst ) volatile;
326 signed char exchange( signed char,
327 memory_order = memory_order_seq_cst ) volatile;
328 bool compare_exchange_weak( signed char&, signed char,
329 memory_order, memory_order ) volatile;
330 bool compare_exchange_strong( signed char&, signed char,
331 memory_order, memory_order ) volatile;
332 bool compare_exchange_weak( signed char&, signed char,
333 memory_order = memory_order_seq_cst ) volatile;
334 bool compare_exchange_strong( signed char&, signed char,
335 memory_order = memory_order_seq_cst ) volatile;
336 signed char fetch_add( signed char,
337 memory_order = memory_order_seq_cst ) volatile;
338 signed char fetch_sub( signed char,
339 memory_order = memory_order_seq_cst ) volatile;
340 signed char fetch_and( signed char,
341 memory_order = memory_order_seq_cst ) volatile;
342 signed char fetch_or( signed char,
343 memory_order = memory_order_seq_cst ) volatile;
344 signed char fetch_xor( signed char,
345 memory_order = memory_order_seq_cst ) volatile;
347 CPP0X( atomic_schar() = default; )
348 CPP0X( constexpr atomic_schar( signed char __v__ ) : __f__( __v__) { } )
349 CPP0X( atomic_schar( const atomic_schar& ) = delete; )
350 atomic_schar& operator =( const atomic_schar& ) CPP0X(=delete);
352 signed char operator =( signed char __v__ ) volatile
353 { store( __v__ ); return __v__; }
355 signed char operator ++( int ) volatile
356 { return fetch_add( 1 ); }
358 signed char operator --( int ) volatile
359 { return fetch_sub( 1 ); }
361 signed char operator ++() volatile
362 { return fetch_add( 1 ) + 1; }
364 signed char operator --() volatile
365 { return fetch_sub( 1 ) - 1; }
367 signed char operator +=( signed char __v__ ) volatile
368 { return fetch_add( __v__ ) + __v__; }
370 signed char operator -=( signed char __v__ ) volatile
371 { return fetch_sub( __v__ ) - __v__; }
373 signed char operator &=( signed char __v__ ) volatile
374 { return fetch_and( __v__ ) & __v__; }
376 signed char operator |=( signed char __v__ ) volatile
377 { return fetch_or( __v__ ) | __v__; }
379 signed char operator ^=( signed char __v__ ) volatile
380 { return fetch_xor( __v__ ) ^ __v__; }
382 friend void atomic_store_explicit( volatile atomic_schar*, signed char,
384 friend signed char atomic_load_explicit( volatile atomic_schar*,
386 friend signed char atomic_exchange_explicit( volatile atomic_schar*,
387 signed char, memory_order );
388 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_schar*,
389 signed char*, signed char, memory_order, memory_order );
390 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_schar*,
391 signed char*, signed char, memory_order, memory_order );
392 friend signed char atomic_fetch_add_explicit( volatile atomic_schar*,
393 signed char, memory_order );
394 friend signed char atomic_fetch_sub_explicit( volatile atomic_schar*,
395 signed char, memory_order );
396 friend signed char atomic_fetch_and_explicit( volatile atomic_schar*,
397 signed char, memory_order );
398 friend signed char atomic_fetch_or_explicit( volatile atomic_schar*,
399 signed char, memory_order );
400 friend signed char atomic_fetch_xor_explicit( volatile atomic_schar*,
401 signed char, memory_order );
409 typedef struct atomic_uchar
412 bool is_lock_free() const volatile;
413 void store( unsigned char,
414 memory_order = memory_order_seq_cst ) volatile;
415 unsigned char load( memory_order = memory_order_seq_cst ) volatile;
416 unsigned char exchange( unsigned char,
417 memory_order = memory_order_seq_cst ) volatile;
418 bool compare_exchange_weak( unsigned char&, unsigned char,
419 memory_order, memory_order ) volatile;
420 bool compare_exchange_strong( unsigned char&, unsigned char,
421 memory_order, memory_order ) volatile;
422 bool compare_exchange_weak( unsigned char&, unsigned char,
423 memory_order = memory_order_seq_cst ) volatile;
424 bool compare_exchange_strong( unsigned char&, unsigned char,
425 memory_order = memory_order_seq_cst ) volatile;
426 unsigned char fetch_add( unsigned char,
427 memory_order = memory_order_seq_cst ) volatile;
428 unsigned char fetch_sub( unsigned char,
429 memory_order = memory_order_seq_cst ) volatile;
430 unsigned char fetch_and( unsigned char,
431 memory_order = memory_order_seq_cst ) volatile;
432 unsigned char fetch_or( unsigned char,
433 memory_order = memory_order_seq_cst ) volatile;
434 unsigned char fetch_xor( unsigned char,
435 memory_order = memory_order_seq_cst ) volatile;
437 CPP0X( atomic_uchar() = default; )
438 CPP0X( constexpr atomic_uchar( unsigned char __v__ ) : __f__( __v__) { } )
439 CPP0X( atomic_uchar( const atomic_uchar& ) = delete; )
440 atomic_uchar& operator =( const atomic_uchar& ) CPP0X(=delete);
442 unsigned char operator =( unsigned char __v__ ) volatile
443 { store( __v__ ); return __v__; }
445 unsigned char operator ++( int ) volatile
446 { return fetch_add( 1 ); }
448 unsigned char operator --( int ) volatile
449 { return fetch_sub( 1 ); }
451 unsigned char operator ++() volatile
452 { return fetch_add( 1 ) + 1; }
454 unsigned char operator --() volatile
455 { return fetch_sub( 1 ) - 1; }
457 unsigned char operator +=( unsigned char __v__ ) volatile
458 { return fetch_add( __v__ ) + __v__; }
460 unsigned char operator -=( unsigned char __v__ ) volatile
461 { return fetch_sub( __v__ ) - __v__; }
463 unsigned char operator &=( unsigned char __v__ ) volatile
464 { return fetch_and( __v__ ) & __v__; }
466 unsigned char operator |=( unsigned char __v__ ) volatile
467 { return fetch_or( __v__ ) | __v__; }
469 unsigned char operator ^=( unsigned char __v__ ) volatile
470 { return fetch_xor( __v__ ) ^ __v__; }
472 friend void atomic_store_explicit( volatile atomic_uchar*, unsigned char,
474 friend unsigned char atomic_load_explicit( volatile atomic_uchar*,
476 friend unsigned char atomic_exchange_explicit( volatile atomic_uchar*,
477 unsigned char, memory_order );
478 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_uchar*,
479 unsigned char*, unsigned char, memory_order, memory_order );
480 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_uchar*,
481 unsigned char*, unsigned char, memory_order, memory_order );
482 friend unsigned char atomic_fetch_add_explicit( volatile atomic_uchar*,
483 unsigned char, memory_order );
484 friend unsigned char atomic_fetch_sub_explicit( volatile atomic_uchar*,
485 unsigned char, memory_order );
486 friend unsigned char atomic_fetch_and_explicit( volatile atomic_uchar*,
487 unsigned char, memory_order );
488 friend unsigned char atomic_fetch_or_explicit( volatile atomic_uchar*,
489 unsigned char, memory_order );
490 friend unsigned char atomic_fetch_xor_explicit( volatile atomic_uchar*,
491 unsigned char, memory_order );
499 typedef struct atomic_short
502 bool is_lock_free() const volatile;
504 memory_order = memory_order_seq_cst ) volatile;
505 short load( memory_order = memory_order_seq_cst ) volatile;
506 short exchange( short,
507 memory_order = memory_order_seq_cst ) volatile;
508 bool compare_exchange_weak( short&, short,
509 memory_order, memory_order ) volatile;
510 bool compare_exchange_strong( short&, short,
511 memory_order, memory_order ) volatile;
512 bool compare_exchange_weak( short&, short,
513 memory_order = memory_order_seq_cst ) volatile;
514 bool compare_exchange_strong( short&, short,
515 memory_order = memory_order_seq_cst ) volatile;
516 short fetch_add( short,
517 memory_order = memory_order_seq_cst ) volatile;
518 short fetch_sub( short,
519 memory_order = memory_order_seq_cst ) volatile;
520 short fetch_and( short,
521 memory_order = memory_order_seq_cst ) volatile;
522 short fetch_or( short,
523 memory_order = memory_order_seq_cst ) volatile;
524 short fetch_xor( short,
525 memory_order = memory_order_seq_cst ) volatile;
527 CPP0X( atomic_short() = default; )
528 CPP0X( constexpr atomic_short( short __v__ ) : __f__( __v__) { } )
529 CPP0X( atomic_short( const atomic_short& ) = delete; )
530 atomic_short& operator =( const atomic_short& ) CPP0X(=delete);
532 short operator =( short __v__ ) volatile
533 { store( __v__ ); return __v__; }
535 short operator ++( int ) volatile
536 { return fetch_add( 1 ); }
538 short operator --( int ) volatile
539 { return fetch_sub( 1 ); }
541 short operator ++() volatile
542 { return fetch_add( 1 ) + 1; }
544 short operator --() volatile
545 { return fetch_sub( 1 ) - 1; }
547 short operator +=( short __v__ ) volatile
548 { return fetch_add( __v__ ) + __v__; }
550 short operator -=( short __v__ ) volatile
551 { return fetch_sub( __v__ ) - __v__; }
553 short operator &=( short __v__ ) volatile
554 { return fetch_and( __v__ ) & __v__; }
556 short operator |=( short __v__ ) volatile
557 { return fetch_or( __v__ ) | __v__; }
559 short operator ^=( short __v__ ) volatile
560 { return fetch_xor( __v__ ) ^ __v__; }
562 friend void atomic_store_explicit( volatile atomic_short*, short,
564 friend short atomic_load_explicit( volatile atomic_short*,
566 friend short atomic_exchange_explicit( volatile atomic_short*,
567 short, memory_order );
568 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_short*,
569 short*, short, memory_order, memory_order );
570 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_short*,
571 short*, short, memory_order, memory_order );
572 friend short atomic_fetch_add_explicit( volatile atomic_short*,
573 short, memory_order );
574 friend short atomic_fetch_sub_explicit( volatile atomic_short*,
575 short, memory_order );
576 friend short atomic_fetch_and_explicit( volatile atomic_short*,
577 short, memory_order );
578 friend short atomic_fetch_or_explicit( volatile atomic_short*,
579 short, memory_order );
580 friend short atomic_fetch_xor_explicit( volatile atomic_short*,
581 short, memory_order );
589 typedef struct atomic_ushort
592 bool is_lock_free() const volatile;
593 void store( unsigned short,
594 memory_order = memory_order_seq_cst ) volatile;
595 unsigned short load( memory_order = memory_order_seq_cst ) volatile;
596 unsigned short exchange( unsigned short,
597 memory_order = memory_order_seq_cst ) volatile;
598 bool compare_exchange_weak( unsigned short&, unsigned short,
599 memory_order, memory_order ) volatile;
600 bool compare_exchange_strong( unsigned short&, unsigned short,
601 memory_order, memory_order ) volatile;
602 bool compare_exchange_weak( unsigned short&, unsigned short,
603 memory_order = memory_order_seq_cst ) volatile;
604 bool compare_exchange_strong( unsigned short&, unsigned short,
605 memory_order = memory_order_seq_cst ) volatile;
606 unsigned short fetch_add( unsigned short,
607 memory_order = memory_order_seq_cst ) volatile;
608 unsigned short fetch_sub( unsigned short,
609 memory_order = memory_order_seq_cst ) volatile;
610 unsigned short fetch_and( unsigned short,
611 memory_order = memory_order_seq_cst ) volatile;
612 unsigned short fetch_or( unsigned short,
613 memory_order = memory_order_seq_cst ) volatile;
614 unsigned short fetch_xor( unsigned short,
615 memory_order = memory_order_seq_cst ) volatile;
617 CPP0X( atomic_ushort() = default; )
618 CPP0X( constexpr atomic_ushort( unsigned short __v__ ) : __f__( __v__) { } )
619 CPP0X( atomic_ushort( const atomic_ushort& ) = delete; )
620 atomic_ushort& operator =( const atomic_ushort& ) CPP0X(=delete);
622 unsigned short operator =( unsigned short __v__ ) volatile
623 { store( __v__ ); return __v__; }
625 unsigned short operator ++( int ) volatile
626 { return fetch_add( 1 ); }
628 unsigned short operator --( int ) volatile
629 { return fetch_sub( 1 ); }
631 unsigned short operator ++() volatile
632 { return fetch_add( 1 ) + 1; }
634 unsigned short operator --() volatile
635 { return fetch_sub( 1 ) - 1; }
637 unsigned short operator +=( unsigned short __v__ ) volatile
638 { return fetch_add( __v__ ) + __v__; }
640 unsigned short operator -=( unsigned short __v__ ) volatile
641 { return fetch_sub( __v__ ) - __v__; }
643 unsigned short operator &=( unsigned short __v__ ) volatile
644 { return fetch_and( __v__ ) & __v__; }
646 unsigned short operator |=( unsigned short __v__ ) volatile
647 { return fetch_or( __v__ ) | __v__; }
649 unsigned short operator ^=( unsigned short __v__ ) volatile
650 { return fetch_xor( __v__ ) ^ __v__; }
652 friend void atomic_store_explicit( volatile atomic_ushort*, unsigned short,
654 friend unsigned short atomic_load_explicit( volatile atomic_ushort*,
656 friend unsigned short atomic_exchange_explicit( volatile atomic_ushort*,
657 unsigned short, memory_order );
658 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_ushort*,
659 unsigned short*, unsigned short, memory_order, memory_order );
660 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_ushort*,
661 unsigned short*, unsigned short, memory_order, memory_order );
662 friend unsigned short atomic_fetch_add_explicit( volatile atomic_ushort*,
663 unsigned short, memory_order );
664 friend unsigned short atomic_fetch_sub_explicit( volatile atomic_ushort*,
665 unsigned short, memory_order );
666 friend unsigned short atomic_fetch_and_explicit( volatile atomic_ushort*,
667 unsigned short, memory_order );
668 friend unsigned short atomic_fetch_or_explicit( volatile atomic_ushort*,
669 unsigned short, memory_order );
670 friend unsigned short atomic_fetch_xor_explicit( volatile atomic_ushort*,
671 unsigned short, memory_order );
675 unsigned short __f__;
679 typedef struct atomic_int
682 bool is_lock_free() const volatile;
684 memory_order = memory_order_seq_cst ) volatile;
685 int load( memory_order = memory_order_seq_cst ) volatile;
687 memory_order = memory_order_seq_cst ) volatile;
688 bool compare_exchange_weak( int&, int,
689 memory_order, memory_order ) volatile;
690 bool compare_exchange_strong( int&, int,
691 memory_order, memory_order ) volatile;
692 bool compare_exchange_weak( int&, int,
693 memory_order = memory_order_seq_cst ) volatile;
694 bool compare_exchange_strong( int&, int,
695 memory_order = memory_order_seq_cst ) volatile;
697 memory_order = memory_order_seq_cst ) volatile;
699 memory_order = memory_order_seq_cst ) volatile;
701 memory_order = memory_order_seq_cst ) volatile;
703 memory_order = memory_order_seq_cst ) volatile;
705 memory_order = memory_order_seq_cst ) volatile;
707 CPP0X( atomic_int() = default; )
708 CPP0X( constexpr atomic_int( int __v__ ) : __f__( __v__) { } )
709 CPP0X( atomic_int( const atomic_int& ) = delete; )
710 atomic_int& operator =( const atomic_int& ) CPP0X(=delete);
712 int operator =( int __v__ ) volatile
713 { store( __v__ ); return __v__; }
715 int operator ++( int ) volatile
716 { return fetch_add( 1 ); }
718 int operator --( int ) volatile
719 { return fetch_sub( 1 ); }
721 int operator ++() volatile
722 { return fetch_add( 1 ) + 1; }
724 int operator --() volatile
725 { return fetch_sub( 1 ) - 1; }
727 int operator +=( int __v__ ) volatile
728 { return fetch_add( __v__ ) + __v__; }
730 int operator -=( int __v__ ) volatile
731 { return fetch_sub( __v__ ) - __v__; }
733 int operator &=( int __v__ ) volatile
734 { return fetch_and( __v__ ) & __v__; }
736 int operator |=( int __v__ ) volatile
737 { return fetch_or( __v__ ) | __v__; }
739 int operator ^=( int __v__ ) volatile
740 { return fetch_xor( __v__ ) ^ __v__; }
742 friend void atomic_store_explicit( volatile atomic_int*, int,
744 friend int atomic_load_explicit( volatile atomic_int*,
746 friend int atomic_exchange_explicit( volatile atomic_int*,
748 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_int*,
749 int*, int, memory_order, memory_order );
750 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_int*,
751 int*, int, memory_order, memory_order );
752 friend int atomic_fetch_add_explicit( volatile atomic_int*,
754 friend int atomic_fetch_sub_explicit( volatile atomic_int*,
756 friend int atomic_fetch_and_explicit( volatile atomic_int*,
758 friend int atomic_fetch_or_explicit( volatile atomic_int*,
760 friend int atomic_fetch_xor_explicit( volatile atomic_int*,
769 typedef struct atomic_uint
772 bool is_lock_free() const volatile;
773 void store( unsigned int,
774 memory_order = memory_order_seq_cst ) volatile;
775 unsigned int load( memory_order = memory_order_seq_cst ) volatile;
776 unsigned int exchange( unsigned int,
777 memory_order = memory_order_seq_cst ) volatile;
778 bool compare_exchange_weak( unsigned int&, unsigned int,
779 memory_order, memory_order ) volatile;
780 bool compare_exchange_strong( unsigned int&, unsigned int,
781 memory_order, memory_order ) volatile;
782 bool compare_exchange_weak( unsigned int&, unsigned int,
783 memory_order = memory_order_seq_cst ) volatile;
784 bool compare_exchange_strong( unsigned int&, unsigned int,
785 memory_order = memory_order_seq_cst ) volatile;
786 unsigned int fetch_add( unsigned int,
787 memory_order = memory_order_seq_cst ) volatile;
788 unsigned int fetch_sub( unsigned int,
789 memory_order = memory_order_seq_cst ) volatile;
790 unsigned int fetch_and( unsigned int,
791 memory_order = memory_order_seq_cst ) volatile;
792 unsigned int fetch_or( unsigned int,
793 memory_order = memory_order_seq_cst ) volatile;
794 unsigned int fetch_xor( unsigned int,
795 memory_order = memory_order_seq_cst ) volatile;
797 CPP0X( atomic_uint() = default; )
798 CPP0X( constexpr atomic_uint( unsigned int __v__ ) : __f__( __v__) { } )
799 CPP0X( atomic_uint( const atomic_uint& ) = delete; )
800 atomic_uint& operator =( const atomic_uint& ) CPP0X(=delete);
802 unsigned int operator =( unsigned int __v__ ) volatile
803 { store( __v__ ); return __v__; }
805 unsigned int operator ++( int ) volatile
806 { return fetch_add( 1 ); }
808 unsigned int operator --( int ) volatile
809 { return fetch_sub( 1 ); }
811 unsigned int operator ++() volatile
812 { return fetch_add( 1 ) + 1; }
814 unsigned int operator --() volatile
815 { return fetch_sub( 1 ) - 1; }
817 unsigned int operator +=( unsigned int __v__ ) volatile
818 { return fetch_add( __v__ ) + __v__; }
820 unsigned int operator -=( unsigned int __v__ ) volatile
821 { return fetch_sub( __v__ ) - __v__; }
823 unsigned int operator &=( unsigned int __v__ ) volatile
824 { return fetch_and( __v__ ) & __v__; }
826 unsigned int operator |=( unsigned int __v__ ) volatile
827 { return fetch_or( __v__ ) | __v__; }
829 unsigned int operator ^=( unsigned int __v__ ) volatile
830 { return fetch_xor( __v__ ) ^ __v__; }
832 friend void atomic_store_explicit( volatile atomic_uint*, unsigned int,
834 friend unsigned int atomic_load_explicit( volatile atomic_uint*,
836 friend unsigned int atomic_exchange_explicit( volatile atomic_uint*,
837 unsigned int, memory_order );
838 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_uint*,
839 unsigned int*, unsigned int, memory_order, memory_order );
840 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_uint*,
841 unsigned int*, unsigned int, memory_order, memory_order );
842 friend unsigned int atomic_fetch_add_explicit( volatile atomic_uint*,
843 unsigned int, memory_order );
844 friend unsigned int atomic_fetch_sub_explicit( volatile atomic_uint*,
845 unsigned int, memory_order );
846 friend unsigned int atomic_fetch_and_explicit( volatile atomic_uint*,
847 unsigned int, memory_order );
848 friend unsigned int atomic_fetch_or_explicit( volatile atomic_uint*,
849 unsigned int, memory_order );
850 friend unsigned int atomic_fetch_xor_explicit( volatile atomic_uint*,
851 unsigned int, memory_order );
859 typedef struct atomic_long
862 bool is_lock_free() const volatile;
864 memory_order = memory_order_seq_cst ) volatile;
865 long load( memory_order = memory_order_seq_cst ) volatile;
867 memory_order = memory_order_seq_cst ) volatile;
868 bool compare_exchange_weak( long&, long,
869 memory_order, memory_order ) volatile;
870 bool compare_exchange_strong( long&, long,
871 memory_order, memory_order ) volatile;
872 bool compare_exchange_weak( long&, long,
873 memory_order = memory_order_seq_cst ) volatile;
874 bool compare_exchange_strong( long&, long,
875 memory_order = memory_order_seq_cst ) volatile;
876 long fetch_add( long,
877 memory_order = memory_order_seq_cst ) volatile;
878 long fetch_sub( long,
879 memory_order = memory_order_seq_cst ) volatile;
880 long fetch_and( long,
881 memory_order = memory_order_seq_cst ) volatile;
883 memory_order = memory_order_seq_cst ) volatile;
884 long fetch_xor( long,
885 memory_order = memory_order_seq_cst ) volatile;
887 CPP0X( atomic_long() = default; )
888 CPP0X( constexpr atomic_long( long __v__ ) : __f__( __v__) { } )
889 CPP0X( atomic_long( const atomic_long& ) = delete; )
890 atomic_long& operator =( const atomic_long& ) CPP0X(=delete);
892 long operator =( long __v__ ) volatile
893 { store( __v__ ); return __v__; }
895 long operator ++( int ) volatile
896 { return fetch_add( 1 ); }
898 long operator --( int ) volatile
899 { return fetch_sub( 1 ); }
901 long operator ++() volatile
902 { return fetch_add( 1 ) + 1; }
904 long operator --() volatile
905 { return fetch_sub( 1 ) - 1; }
907 long operator +=( long __v__ ) volatile
908 { return fetch_add( __v__ ) + __v__; }
910 long operator -=( long __v__ ) volatile
911 { return fetch_sub( __v__ ) - __v__; }
913 long operator &=( long __v__ ) volatile
914 { return fetch_and( __v__ ) & __v__; }
916 long operator |=( long __v__ ) volatile
917 { return fetch_or( __v__ ) | __v__; }
919 long operator ^=( long __v__ ) volatile
920 { return fetch_xor( __v__ ) ^ __v__; }
922 friend void atomic_store_explicit( volatile atomic_long*, long,
924 friend long atomic_load_explicit( volatile atomic_long*,
926 friend long atomic_exchange_explicit( volatile atomic_long*,
927 long, memory_order );
928 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_long*,
929 long*, long, memory_order, memory_order );
930 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_long*,
931 long*, long, memory_order, memory_order );
932 friend long atomic_fetch_add_explicit( volatile atomic_long*,
933 long, memory_order );
934 friend long atomic_fetch_sub_explicit( volatile atomic_long*,
935 long, memory_order );
936 friend long atomic_fetch_and_explicit( volatile atomic_long*,
937 long, memory_order );
938 friend long atomic_fetch_or_explicit( volatile atomic_long*,
939 long, memory_order );
940 friend long atomic_fetch_xor_explicit( volatile atomic_long*,
941 long, memory_order );
949 typedef struct atomic_ulong
952 bool is_lock_free() const volatile;
953 void store( unsigned long,
954 memory_order = memory_order_seq_cst ) volatile;
955 unsigned long load( memory_order = memory_order_seq_cst ) volatile;
956 unsigned long exchange( unsigned long,
957 memory_order = memory_order_seq_cst ) volatile;
958 bool compare_exchange_weak( unsigned long&, unsigned long,
959 memory_order, memory_order ) volatile;
960 bool compare_exchange_strong( unsigned long&, unsigned long,
961 memory_order, memory_order ) volatile;
962 bool compare_exchange_weak( unsigned long&, unsigned long,
963 memory_order = memory_order_seq_cst ) volatile;
964 bool compare_exchange_strong( unsigned long&, unsigned long,
965 memory_order = memory_order_seq_cst ) volatile;
966 unsigned long fetch_add( unsigned long,
967 memory_order = memory_order_seq_cst ) volatile;
968 unsigned long fetch_sub( unsigned long,
969 memory_order = memory_order_seq_cst ) volatile;
970 unsigned long fetch_and( unsigned long,
971 memory_order = memory_order_seq_cst ) volatile;
972 unsigned long fetch_or( unsigned long,
973 memory_order = memory_order_seq_cst ) volatile;
974 unsigned long fetch_xor( unsigned long,
975 memory_order = memory_order_seq_cst ) volatile;
977 CPP0X( atomic_ulong() = default; )
978 CPP0X( constexpr atomic_ulong( unsigned long __v__ ) : __f__( __v__) { } )
979 CPP0X( atomic_ulong( const atomic_ulong& ) = delete; )
980 atomic_ulong& operator =( const atomic_ulong& ) CPP0X(=delete);
982 unsigned long operator =( unsigned long __v__ ) volatile
983 { store( __v__ ); return __v__; }
985 unsigned long operator ++( int ) volatile
986 { return fetch_add( 1 ); }
988 unsigned long operator --( int ) volatile
989 { return fetch_sub( 1 ); }
991 unsigned long operator ++() volatile
992 { return fetch_add( 1 ) + 1; }
994 unsigned long operator --() volatile
995 { return fetch_sub( 1 ) - 1; }
997 unsigned long operator +=( unsigned long __v__ ) volatile
998 { return fetch_add( __v__ ) + __v__; }
1000 unsigned long operator -=( unsigned long __v__ ) volatile
1001 { return fetch_sub( __v__ ) - __v__; }
1003 unsigned long operator &=( unsigned long __v__ ) volatile
1004 { return fetch_and( __v__ ) & __v__; }
1006 unsigned long operator |=( unsigned long __v__ ) volatile
1007 { return fetch_or( __v__ ) | __v__; }
1009 unsigned long operator ^=( unsigned long __v__ ) volatile
1010 { return fetch_xor( __v__ ) ^ __v__; }
1012 friend void atomic_store_explicit( volatile atomic_ulong*, unsigned long,
1014 friend unsigned long atomic_load_explicit( volatile atomic_ulong*,
1016 friend unsigned long atomic_exchange_explicit( volatile atomic_ulong*,
1017 unsigned long, memory_order );
1018 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_ulong*,
1019 unsigned long*, unsigned long, memory_order, memory_order );
1020 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_ulong*,
1021 unsigned long*, unsigned long, memory_order, memory_order );
1022 friend unsigned long atomic_fetch_add_explicit( volatile atomic_ulong*,
1023 unsigned long, memory_order );
1024 friend unsigned long atomic_fetch_sub_explicit( volatile atomic_ulong*,
1025 unsigned long, memory_order );
1026 friend unsigned long atomic_fetch_and_explicit( volatile atomic_ulong*,
1027 unsigned long, memory_order );
1028 friend unsigned long atomic_fetch_or_explicit( volatile atomic_ulong*,
1029 unsigned long, memory_order );
1030 friend unsigned long atomic_fetch_xor_explicit( volatile atomic_ulong*,
1031 unsigned long, memory_order );
1035 unsigned long __f__;
1039 typedef struct atomic_llong
1042 bool is_lock_free() const volatile;
1043 void store( long long,
1044 memory_order = memory_order_seq_cst ) volatile;
1045 long long load( memory_order = memory_order_seq_cst ) volatile;
1046 long long exchange( long long,
1047 memory_order = memory_order_seq_cst ) volatile;
1048 bool compare_exchange_weak( long long&, long long,
1049 memory_order, memory_order ) volatile;
1050 bool compare_exchange_strong( long long&, long long,
1051 memory_order, memory_order ) volatile;
1052 bool compare_exchange_weak( long long&, long long,
1053 memory_order = memory_order_seq_cst ) volatile;
1054 bool compare_exchange_strong( long long&, long long,
1055 memory_order = memory_order_seq_cst ) volatile;
1056 long long fetch_add( long long,
1057 memory_order = memory_order_seq_cst ) volatile;
1058 long long fetch_sub( long long,
1059 memory_order = memory_order_seq_cst ) volatile;
1060 long long fetch_and( long long,
1061 memory_order = memory_order_seq_cst ) volatile;
1062 long long fetch_or( long long,
1063 memory_order = memory_order_seq_cst ) volatile;
1064 long long fetch_xor( long long,
1065 memory_order = memory_order_seq_cst ) volatile;
1067 CPP0X( atomic_llong() = default; )
1068 CPP0X( constexpr atomic_llong( long long __v__ ) : __f__( __v__) { } )
1069 CPP0X( atomic_llong( const atomic_llong& ) = delete; )
1070 atomic_llong& operator =( const atomic_llong& ) CPP0X(=delete);
1072 long long operator =( long long __v__ ) volatile
1073 { store( __v__ ); return __v__; }
1075 long long operator ++( int ) volatile
1076 { return fetch_add( 1 ); }
1078 long long operator --( int ) volatile
1079 { return fetch_sub( 1 ); }
1081 long long operator ++() volatile
1082 { return fetch_add( 1 ) + 1; }
1084 long long operator --() volatile
1085 { return fetch_sub( 1 ) - 1; }
1087 long long operator +=( long long __v__ ) volatile
1088 { return fetch_add( __v__ ) + __v__; }
1090 long long operator -=( long long __v__ ) volatile
1091 { return fetch_sub( __v__ ) - __v__; }
1093 long long operator &=( long long __v__ ) volatile
1094 { return fetch_and( __v__ ) & __v__; }
1096 long long operator |=( long long __v__ ) volatile
1097 { return fetch_or( __v__ ) | __v__; }
1099 long long operator ^=( long long __v__ ) volatile
1100 { return fetch_xor( __v__ ) ^ __v__; }
1102 friend void atomic_store_explicit( volatile atomic_llong*, long long,
1104 friend long long atomic_load_explicit( volatile atomic_llong*,
1106 friend long long atomic_exchange_explicit( volatile atomic_llong*,
1107 long long, memory_order );
1108 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_llong*,
1109 long long*, long long, memory_order, memory_order );
1110 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_llong*,
1111 long long*, long long, memory_order, memory_order );
1112 friend long long atomic_fetch_add_explicit( volatile atomic_llong*,
1113 long long, memory_order );
1114 friend long long atomic_fetch_sub_explicit( volatile atomic_llong*,
1115 long long, memory_order );
1116 friend long long atomic_fetch_and_explicit( volatile atomic_llong*,
1117 long long, memory_order );
1118 friend long long atomic_fetch_or_explicit( volatile atomic_llong*,
1119 long long, memory_order );
1120 friend long long atomic_fetch_xor_explicit( volatile atomic_llong*,
1121 long long, memory_order );
1129 typedef struct atomic_ullong
1132 bool is_lock_free() const volatile;
1133 void store( unsigned long long,
1134 memory_order = memory_order_seq_cst ) volatile;
1135 unsigned long long load( memory_order = memory_order_seq_cst ) volatile;
1136 unsigned long long exchange( unsigned long long,
1137 memory_order = memory_order_seq_cst ) volatile;
1138 bool compare_exchange_weak( unsigned long long&, unsigned long long,
1139 memory_order, memory_order ) volatile;
1140 bool compare_exchange_strong( unsigned long long&, unsigned long long,
1141 memory_order, memory_order ) volatile;
1142 bool compare_exchange_weak( unsigned long long&, unsigned long long,
1143 memory_order = memory_order_seq_cst ) volatile;
1144 bool compare_exchange_strong( unsigned long long&, unsigned long long,
1145 memory_order = memory_order_seq_cst ) volatile;
1146 unsigned long long fetch_add( unsigned long long,
1147 memory_order = memory_order_seq_cst ) volatile;
1148 unsigned long long fetch_sub( unsigned long long,
1149 memory_order = memory_order_seq_cst ) volatile;
1150 unsigned long long fetch_and( unsigned long long,
1151 memory_order = memory_order_seq_cst ) volatile;
1152 unsigned long long fetch_or( unsigned long long,
1153 memory_order = memory_order_seq_cst ) volatile;
1154 unsigned long long fetch_xor( unsigned long long,
1155 memory_order = memory_order_seq_cst ) volatile;
1157 CPP0X( atomic_ullong() = default; )
1158 CPP0X( constexpr atomic_ullong( unsigned long long __v__ ) : __f__( __v__) { } )
1159 CPP0X( atomic_ullong( const atomic_ullong& ) = delete; )
1160 atomic_ullong& operator =( const atomic_ullong& ) CPP0X(=delete);
1162 unsigned long long operator =( unsigned long long __v__ ) volatile
1163 { store( __v__ ); return __v__; }
1165 unsigned long long operator ++( int ) volatile
1166 { return fetch_add( 1 ); }
1168 unsigned long long operator --( int ) volatile
1169 { return fetch_sub( 1 ); }
1171 unsigned long long operator ++() volatile
1172 { return fetch_add( 1 ) + 1; }
1174 unsigned long long operator --() volatile
1175 { return fetch_sub( 1 ) - 1; }
1177 unsigned long long operator +=( unsigned long long __v__ ) volatile
1178 { return fetch_add( __v__ ) + __v__; }
1180 unsigned long long operator -=( unsigned long long __v__ ) volatile
1181 { return fetch_sub( __v__ ) - __v__; }
1183 unsigned long long operator &=( unsigned long long __v__ ) volatile
1184 { return fetch_and( __v__ ) & __v__; }
1186 unsigned long long operator |=( unsigned long long __v__ ) volatile
1187 { return fetch_or( __v__ ) | __v__; }
1189 unsigned long long operator ^=( unsigned long long __v__ ) volatile
1190 { return fetch_xor( __v__ ) ^ __v__; }
1192 friend void atomic_store_explicit( volatile atomic_ullong*, unsigned long long,
1194 friend unsigned long long atomic_load_explicit( volatile atomic_ullong*,
1196 friend unsigned long long atomic_exchange_explicit( volatile atomic_ullong*,
1197 unsigned long long, memory_order );
1198 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_ullong*,
1199 unsigned long long*, unsigned long long, memory_order, memory_order );
1200 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_ullong*,
1201 unsigned long long*, unsigned long long, memory_order, memory_order );
1202 friend unsigned long long atomic_fetch_add_explicit( volatile atomic_ullong*,
1203 unsigned long long, memory_order );
1204 friend unsigned long long atomic_fetch_sub_explicit( volatile atomic_ullong*,
1205 unsigned long long, memory_order );
1206 friend unsigned long long atomic_fetch_and_explicit( volatile atomic_ullong*,
1207 unsigned long long, memory_order );
1208 friend unsigned long long atomic_fetch_or_explicit( volatile atomic_ullong*,
1209 unsigned long long, memory_order );
1210 friend unsigned long long atomic_fetch_xor_explicit( volatile atomic_ullong*,
1211 unsigned long long, memory_order );
1215 unsigned long long __f__;
1219 typedef atomic_schar atomic_int_least8_t;
1220 typedef atomic_uchar atomic_uint_least8_t;
1221 typedef atomic_short atomic_int_least16_t;
1222 typedef atomic_ushort atomic_uint_least16_t;
1223 typedef atomic_int atomic_int_least32_t;
1224 typedef atomic_uint atomic_uint_least32_t;
1225 typedef atomic_llong atomic_int_least64_t;
1226 typedef atomic_ullong atomic_uint_least64_t;
1228 typedef atomic_schar atomic_int_fast8_t;
1229 typedef atomic_uchar atomic_uint_fast8_t;
1230 typedef atomic_short atomic_int_fast16_t;
1231 typedef atomic_ushort atomic_uint_fast16_t;
1232 typedef atomic_int atomic_int_fast32_t;
1233 typedef atomic_uint atomic_uint_fast32_t;
1234 typedef atomic_llong atomic_int_fast64_t;
1235 typedef atomic_ullong atomic_uint_fast64_t;
1237 typedef atomic_long atomic_intptr_t;
1238 typedef atomic_ulong atomic_uintptr_t;
1240 typedef atomic_long atomic_ssize_t;
1241 typedef atomic_ulong atomic_size_t;
1243 typedef atomic_long atomic_ptrdiff_t;
1245 typedef atomic_llong atomic_intmax_t;
1246 typedef atomic_ullong atomic_uintmax_t;
1252 typedef struct atomic_wchar_t
1255 bool is_lock_free() const volatile;
1256 void store( wchar_t, memory_order = memory_order_seq_cst ) volatile;
1257 wchar_t load( memory_order = memory_order_seq_cst ) volatile;
1258 wchar_t exchange( wchar_t,
1259 memory_order = memory_order_seq_cst ) volatile;
1260 bool compare_exchange_weak( wchar_t&, wchar_t,
1261 memory_order, memory_order ) volatile;
1262 bool compare_exchange_strong( wchar_t&, wchar_t,
1263 memory_order, memory_order ) volatile;
1264 bool compare_exchange_weak( wchar_t&, wchar_t,
1265 memory_order = memory_order_seq_cst ) volatile;
1266 bool compare_exchange_strong( wchar_t&, wchar_t,
1267 memory_order = memory_order_seq_cst ) volatile;
1268 wchar_t fetch_add( wchar_t,
1269 memory_order = memory_order_seq_cst ) volatile;
1270 wchar_t fetch_sub( wchar_t,
1271 memory_order = memory_order_seq_cst ) volatile;
1272 wchar_t fetch_and( wchar_t,
1273 memory_order = memory_order_seq_cst ) volatile;
1274 wchar_t fetch_or( wchar_t,
1275 memory_order = memory_order_seq_cst ) volatile;
1276 wchar_t fetch_xor( wchar_t,
1277 memory_order = memory_order_seq_cst ) volatile;
1279 CPP0X( atomic_wchar_t() = default; )
1280 CPP0X( constexpr atomic_wchar_t( wchar_t __v__ ) : __f__( __v__) { } )
1281 CPP0X( atomic_wchar_t( const atomic_wchar_t& ) = delete; )
1282 atomic_wchar_t& operator =( const atomic_wchar_t& ) CPP0X(=delete);
1284 wchar_t operator =( wchar_t __v__ ) volatile
1285 { store( __v__ ); return __v__; }
1287 wchar_t operator ++( int ) volatile
1288 { return fetch_add( 1 ); }
1290 wchar_t operator --( int ) volatile
1291 { return fetch_sub( 1 ); }
1293 wchar_t operator ++() volatile
1294 { return fetch_add( 1 ) + 1; }
1296 wchar_t operator --() volatile
1297 { return fetch_sub( 1 ) - 1; }
1299 wchar_t operator +=( wchar_t __v__ ) volatile
1300 { return fetch_add( __v__ ) + __v__; }
1302 wchar_t operator -=( wchar_t __v__ ) volatile
1303 { return fetch_sub( __v__ ) - __v__; }
1305 wchar_t operator &=( wchar_t __v__ ) volatile
1306 { return fetch_and( __v__ ) & __v__; }
1308 wchar_t operator |=( wchar_t __v__ ) volatile
1309 { return fetch_or( __v__ ) | __v__; }
1311 wchar_t operator ^=( wchar_t __v__ ) volatile
1312 { return fetch_xor( __v__ ) ^ __v__; }
1314 friend void atomic_store_explicit( volatile atomic_wchar_t*, wchar_t,
1316 friend wchar_t atomic_load_explicit( volatile atomic_wchar_t*,
1318 friend wchar_t atomic_exchange_explicit( volatile atomic_wchar_t*,
1319 wchar_t, memory_order );
1320 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_wchar_t*,
1321 wchar_t*, wchar_t, memory_order, memory_order );
1322 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_wchar_t*,
1323 wchar_t*, wchar_t, memory_order, memory_order );
1324 friend wchar_t atomic_fetch_add_explicit( volatile atomic_wchar_t*,
1325 wchar_t, memory_order );
1326 friend wchar_t atomic_fetch_sub_explicit( volatile atomic_wchar_t*,
1327 wchar_t, memory_order );
1328 friend wchar_t atomic_fetch_and_explicit( volatile atomic_wchar_t*,
1329 wchar_t, memory_order );
1330 friend wchar_t atomic_fetch_or_explicit( volatile atomic_wchar_t*,
1331 wchar_t, memory_order );
1332 friend wchar_t atomic_fetch_xor_explicit( volatile atomic_wchar_t*,
1333 wchar_t, memory_order );
1343 typedef atomic_int_least16_t atomic_char16_t;
1344 typedef atomic_int_least32_t atomic_char32_t;
1345 typedef atomic_int_least32_t atomic_wchar_t;
1352 template< typename T >
1357 bool is_lock_free() const volatile;
1358 void store( T, memory_order = memory_order_seq_cst ) volatile;
1359 T load( memory_order = memory_order_seq_cst ) volatile;
1360 T exchange( T __v__, memory_order = memory_order_seq_cst ) volatile;
1361 bool compare_exchange_weak( T&, T, memory_order, memory_order ) volatile;
1362 bool compare_exchange_strong( T&, T, memory_order, memory_order ) volatile;
1363 bool compare_exchange_weak( T&, T, memory_order = memory_order_seq_cst ) volatile;
1364 bool compare_exchange_strong( T&, T, memory_order = memory_order_seq_cst ) volatile;
1366 CPP0X( atomic() = default; )
1367 CPP0X( constexpr explicit atomic( T __v__ ) : __f__( __v__ ) { } )
1368 CPP0X( atomic( const atomic& ) = delete; )
1369 atomic& operator =( const atomic& ) CPP0X(=delete);
1371 T operator =( T __v__ ) volatile
1372 { store( __v__ ); return __v__; }
1383 template<typename T> struct atomic< T* > : atomic_address
1385 T* load( memory_order = memory_order_seq_cst ) volatile;
1386 T* exchange( T*, memory_order = memory_order_seq_cst ) volatile;
1387 bool compare_exchange_weak( T*&, T*, memory_order, memory_order ) volatile;
1388 bool compare_exchange_strong( T*&, T*, memory_order, memory_order ) volatile;
1389 bool compare_exchange_weak( T*&, T*,
1390 memory_order = memory_order_seq_cst ) volatile;
1391 bool compare_exchange_strong( T*&, T*,
1392 memory_order = memory_order_seq_cst ) volatile;
1393 T* fetch_add( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
1394 T* fetch_sub( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
1396 CPP0X( atomic() = default; )
1397 CPP0X( constexpr explicit atomic( T __v__ ) : atomic_address( __v__ ) { } )
1398 CPP0X( atomic( const atomic& ) = delete; )
1399 atomic& operator =( const atomic& ) CPP0X(=delete);
1401 T* operator =( T* __v__ ) volatile
1402 { store( __v__ ); return __v__; }
1404 T* operator ++( int ) volatile
1405 { return fetch_add( 1 ); }
1407 T* operator --( int ) volatile
1408 { return fetch_sub( 1 ); }
1410 T* operator ++() volatile
1411 { return fetch_add( 1 ) + 1; }
1413 T* operator --() volatile
1414 { return fetch_sub( 1 ) - 1; }
1416 T* operator +=( T* __v__ ) volatile
1417 { return fetch_add( __v__ ) + __v__; }
1419 T* operator -=( T* __v__ ) volatile
1420 { return fetch_sub( __v__ ) - __v__; }
1428 template<> struct atomic< bool > : atomic_bool
1430 CPP0X( atomic() = default; )
1431 CPP0X( constexpr explicit atomic( bool __v__ )
1432 : atomic_bool( __v__ ) { } )
1433 CPP0X( atomic( const atomic& ) = delete; )
1434 atomic& operator =( const atomic& ) CPP0X(=delete);
1436 bool operator =( bool __v__ ) volatile
1437 { store( __v__ ); return __v__; }
1441 template<> struct atomic< void* > : atomic_address
1443 CPP0X( atomic() = default; )
1444 CPP0X( constexpr explicit atomic( void* __v__ )
1445 : atomic_address( __v__ ) { } )
1446 CPP0X( atomic( const atomic& ) = delete; )
1447 atomic& operator =( const atomic& ) CPP0X(=delete);
1449 void* operator =( void* __v__ ) volatile
1450 { store( __v__ ); return __v__; }
1454 template<> struct atomic< char > : atomic_char
1456 CPP0X( atomic() = default; )
1457 CPP0X( constexpr explicit atomic( char __v__ )
1458 : atomic_char( __v__ ) { } )
1459 CPP0X( atomic( const atomic& ) = delete; )
1460 atomic& operator =( const atomic& ) CPP0X(=delete);
1462 char operator =( char __v__ ) volatile
1463 { store( __v__ ); return __v__; }
1467 template<> struct atomic< signed char > : atomic_schar
1469 CPP0X( atomic() = default; )
1470 CPP0X( constexpr explicit atomic( signed char __v__ )
1471 : atomic_schar( __v__ ) { } )
1472 CPP0X( atomic( const atomic& ) = delete; )
1473 atomic& operator =( const atomic& ) CPP0X(=delete);
1475 signed char operator =( signed char __v__ ) volatile
1476 { store( __v__ ); return __v__; }
1480 template<> struct atomic< unsigned char > : atomic_uchar
1482 CPP0X( atomic() = default; )
1483 CPP0X( constexpr explicit atomic( unsigned char __v__ )
1484 : atomic_uchar( __v__ ) { } )
1485 CPP0X( atomic( const atomic& ) = delete; )
1486 atomic& operator =( const atomic& ) CPP0X(=delete);
1488 unsigned char operator =( unsigned char __v__ ) volatile
1489 { store( __v__ ); return __v__; }
1493 template<> struct atomic< short > : atomic_short
1495 CPP0X( atomic() = default; )
1496 CPP0X( constexpr explicit atomic( short __v__ )
1497 : atomic_short( __v__ ) { } )
1498 CPP0X( atomic( const atomic& ) = delete; )
1499 atomic& operator =( const atomic& ) CPP0X(=delete);
1501 short operator =( short __v__ ) volatile
1502 { store( __v__ ); return __v__; }
1506 template<> struct atomic< unsigned short > : atomic_ushort
1508 CPP0X( atomic() = default; )
1509 CPP0X( constexpr explicit atomic( unsigned short __v__ )
1510 : atomic_ushort( __v__ ) { } )
1511 CPP0X( atomic( const atomic& ) = delete; )
1512 atomic& operator =( const atomic& ) CPP0X(=delete);
1514 unsigned short operator =( unsigned short __v__ ) volatile
1515 { store( __v__ ); return __v__; }
1519 template<> struct atomic< int > : atomic_int
1521 CPP0X( atomic() = default; )
1522 CPP0X( constexpr explicit atomic( int __v__ )
1523 : atomic_int( __v__ ) { } )
1524 CPP0X( atomic( const atomic& ) = delete; )
1525 atomic& operator =( const atomic& ) CPP0X(=delete);
1527 int operator =( int __v__ ) volatile
1528 { store( __v__ ); return __v__; }
1532 template<> struct atomic< unsigned int > : atomic_uint
1534 CPP0X( atomic() = default; )
1535 CPP0X( constexpr explicit atomic( unsigned int __v__ )
1536 : atomic_uint( __v__ ) { } )
1537 CPP0X( atomic( const atomic& ) = delete; )
1538 atomic& operator =( const atomic& ) CPP0X(=delete);
1540 unsigned int operator =( unsigned int __v__ ) volatile
1541 { store( __v__ ); return __v__; }
1545 template<> struct atomic< long > : atomic_long
1547 CPP0X( atomic() = default; )
1548 CPP0X( constexpr explicit atomic( long __v__ )
1549 : atomic_long( __v__ ) { } )
1550 CPP0X( atomic( const atomic& ) = delete; )
1551 atomic& operator =( const atomic& ) CPP0X(=delete);
1553 long operator =( long __v__ ) volatile
1554 { store( __v__ ); return __v__; }
1558 template<> struct atomic< unsigned long > : atomic_ulong
1560 CPP0X( atomic() = default; )
1561 CPP0X( constexpr explicit atomic( unsigned long __v__ )
1562 : atomic_ulong( __v__ ) { } )
1563 CPP0X( atomic( const atomic& ) = delete; )
1564 atomic& operator =( const atomic& ) CPP0X(=delete);
1566 unsigned long operator =( unsigned long __v__ ) volatile
1567 { store( __v__ ); return __v__; }
1571 template<> struct atomic< long long > : atomic_llong
1573 CPP0X( atomic() = default; )
1574 CPP0X( constexpr explicit atomic( long long __v__ )
1575 : atomic_llong( __v__ ) { } )
1576 CPP0X( atomic( const atomic& ) = delete; )
1577 atomic& operator =( const atomic& ) CPP0X(=delete);
1579 long long operator =( long long __v__ ) volatile
1580 { store( __v__ ); return __v__; }
1584 template<> struct atomic< unsigned long long > : atomic_ullong
1586 CPP0X( atomic() = default; )
1587 CPP0X( constexpr explicit atomic( unsigned long long __v__ )
1588 : atomic_ullong( __v__ ) { } )
1589 CPP0X( atomic( const atomic& ) = delete; )
1590 atomic& operator =( const atomic& ) CPP0X(=delete);
1592 unsigned long long operator =( unsigned long long __v__ ) volatile
1593 { store( __v__ ); return __v__; }
1597 template<> struct atomic< wchar_t > : atomic_wchar_t
1599 CPP0X( atomic() = default; )
1600 CPP0X( constexpr explicit atomic( wchar_t __v__ )
1601 : atomic_wchar_t( __v__ ) { } )
1602 CPP0X( atomic( const atomic& ) = delete; )
1603 atomic& operator =( const atomic& ) CPP0X(=delete);
1605 wchar_t operator =( wchar_t __v__ ) volatile
1606 { store( __v__ ); return __v__; }
1616 inline bool atomic_is_lock_free
1617 ( const volatile atomic_bool* __a__ )
1620 inline bool atomic_load_explicit
1621 ( volatile atomic_bool* __a__, memory_order __x__ )
1622 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1624 inline bool atomic_load
1625 ( volatile atomic_bool* __a__ ) { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1627 inline void atomic_init
1628 ( volatile atomic_bool* __a__, bool __m__ )
1629 { _ATOMIC_INIT_( __a__, __m__ ); }
1631 inline void atomic_store_explicit
1632 ( volatile atomic_bool* __a__, bool __m__, memory_order __x__ )
1633 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1635 inline void atomic_store
1636 ( volatile atomic_bool* __a__, bool __m__ )
1637 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1639 inline bool atomic_exchange_explicit
1640 ( volatile atomic_bool* __a__, bool __m__, memory_order __x__ )
1641 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1643 inline bool atomic_exchange
1644 ( volatile atomic_bool* __a__, bool __m__ )
1645 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1647 inline bool atomic_compare_exchange_weak_explicit
1648 ( volatile atomic_bool* __a__, bool* __e__, bool __m__,
1649 memory_order __x__, memory_order __y__ )
1650 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1652 inline bool atomic_compare_exchange_strong_explicit
1653 ( volatile atomic_bool* __a__, bool* __e__, bool __m__,
1654 memory_order __x__, memory_order __y__ )
1655 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1657 inline bool atomic_compare_exchange_weak
1658 ( volatile atomic_bool* __a__, bool* __e__, bool __m__ )
1659 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1660 memory_order_seq_cst, memory_order_seq_cst ); }
1662 inline bool atomic_compare_exchange_strong
1663 ( volatile atomic_bool* __a__, bool* __e__, bool __m__ )
1664 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1665 memory_order_seq_cst, memory_order_seq_cst ); }
1668 inline bool atomic_is_lock_free( const volatile atomic_address* __a__ )
1671 inline void* atomic_load_explicit
1672 ( volatile atomic_address* __a__, memory_order __x__ )
1673 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1675 inline void* atomic_load( volatile atomic_address* __a__ )
1676 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1678 inline void atomic_init
1679 ( volatile atomic_address* __a__, void* __m__ )
1680 { _ATOMIC_INIT_( __a__, __m__ ); }
1682 inline void atomic_store_explicit
1683 ( volatile atomic_address* __a__, void* __m__, memory_order __x__ )
1684 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1686 inline void atomic_store
1687 ( volatile atomic_address* __a__, void* __m__ )
1688 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1690 inline void* atomic_exchange_explicit
1691 ( volatile atomic_address* __a__, void* __m__, memory_order __x__ )
1692 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1694 inline void* atomic_exchange
1695 ( volatile atomic_address* __a__, void* __m__ )
1696 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1698 inline bool atomic_compare_exchange_weak_explicit
1699 ( volatile atomic_address* __a__, void** __e__, void* __m__,
1700 memory_order __x__, memory_order __y__ )
1701 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1703 inline bool atomic_compare_exchange_strong_explicit
1704 ( volatile atomic_address* __a__, void** __e__, void* __m__,
1705 memory_order __x__, memory_order __y__ )
1706 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1708 inline bool atomic_compare_exchange_weak
1709 ( volatile atomic_address* __a__, void** __e__, void* __m__ )
1710 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1711 memory_order_seq_cst, memory_order_seq_cst ); }
1713 inline bool atomic_compare_exchange_strong
1714 ( volatile atomic_address* __a__, void** __e__, void* __m__ )
1715 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1716 memory_order_seq_cst, memory_order_seq_cst ); }
1719 inline bool atomic_is_lock_free( const volatile atomic_char* __a__ )
1722 inline char atomic_load_explicit
1723 ( volatile atomic_char* __a__, memory_order __x__ )
1724 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1726 inline char atomic_load( volatile atomic_char* __a__ )
1727 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1729 inline void atomic_init
1730 ( volatile atomic_char* __a__, char __m__ )
1731 { _ATOMIC_INIT_( __a__, __m__ ); }
1733 inline void atomic_store_explicit
1734 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
1735 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1737 inline void atomic_store
1738 ( volatile atomic_char* __a__, char __m__ )
1739 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1741 inline char atomic_exchange_explicit
1742 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
1743 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1745 inline char atomic_exchange
1746 ( volatile atomic_char* __a__, char __m__ )
1747 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1749 inline bool atomic_compare_exchange_weak_explicit
1750 ( volatile atomic_char* __a__, char* __e__, char __m__,
1751 memory_order __x__, memory_order __y__ )
1752 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1754 inline bool atomic_compare_exchange_strong_explicit
1755 ( volatile atomic_char* __a__, char* __e__, char __m__,
1756 memory_order __x__, memory_order __y__ )
1757 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1759 inline bool atomic_compare_exchange_weak
1760 ( volatile atomic_char* __a__, char* __e__, char __m__ )
1761 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1762 memory_order_seq_cst, memory_order_seq_cst ); }
1764 inline bool atomic_compare_exchange_strong
1765 ( volatile atomic_char* __a__, char* __e__, char __m__ )
1766 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1767 memory_order_seq_cst, memory_order_seq_cst ); }
1770 inline bool atomic_is_lock_free( const volatile atomic_schar* __a__ )
1773 inline signed char atomic_load_explicit
1774 ( volatile atomic_schar* __a__, memory_order __x__ )
1775 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1777 inline signed char atomic_load( volatile atomic_schar* __a__ )
1778 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1780 inline void atomic_init
1781 ( volatile atomic_schar* __a__, signed char __m__ )
1782 { _ATOMIC_INIT_( __a__, __m__ ); }
1784 inline void atomic_store_explicit
1785 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
1786 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1788 inline void atomic_store
1789 ( volatile atomic_schar* __a__, signed char __m__ )
1790 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1792 inline signed char atomic_exchange_explicit
1793 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
1794 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1796 inline signed char atomic_exchange
1797 ( volatile atomic_schar* __a__, signed char __m__ )
1798 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1800 inline bool atomic_compare_exchange_weak_explicit
1801 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__,
1802 memory_order __x__, memory_order __y__ )
1803 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1805 inline bool atomic_compare_exchange_strong_explicit
1806 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__,
1807 memory_order __x__, memory_order __y__ )
1808 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1810 inline bool atomic_compare_exchange_weak
1811 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__ )
1812 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1813 memory_order_seq_cst, memory_order_seq_cst ); }
1815 inline bool atomic_compare_exchange_strong
1816 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__ )
1817 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1818 memory_order_seq_cst, memory_order_seq_cst ); }
1821 inline bool atomic_is_lock_free( const volatile atomic_uchar* __a__ )
1824 inline unsigned char atomic_load_explicit
1825 ( volatile atomic_uchar* __a__, memory_order __x__ )
1826 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1828 inline unsigned char atomic_load( volatile atomic_uchar* __a__ )
1829 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1831 inline void atomic_init
1832 ( volatile atomic_uchar* __a__, unsigned char __m__ )
1833 { _ATOMIC_INIT_( __a__, __m__ ); }
1835 inline void atomic_store_explicit
1836 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
1837 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1839 inline void atomic_store
1840 ( volatile atomic_uchar* __a__, unsigned char __m__ )
1841 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1843 inline unsigned char atomic_exchange_explicit
1844 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
1845 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1847 inline unsigned char atomic_exchange
1848 ( volatile atomic_uchar* __a__, unsigned char __m__ )
1849 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1851 inline bool atomic_compare_exchange_weak_explicit
1852 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__,
1853 memory_order __x__, memory_order __y__ )
1854 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1856 inline bool atomic_compare_exchange_strong_explicit
1857 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__,
1858 memory_order __x__, memory_order __y__ )
1859 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1861 inline bool atomic_compare_exchange_weak
1862 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__ )
1863 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1864 memory_order_seq_cst, memory_order_seq_cst ); }
1866 inline bool atomic_compare_exchange_strong
1867 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__ )
1868 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1869 memory_order_seq_cst, memory_order_seq_cst ); }
1872 inline bool atomic_is_lock_free( const volatile atomic_short* __a__ )
1875 inline short atomic_load_explicit
1876 ( volatile atomic_short* __a__, memory_order __x__ )
1877 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1879 inline short atomic_load( volatile atomic_short* __a__ )
1880 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1882 inline void atomic_init
1883 ( volatile atomic_short* __a__, short __m__ )
1884 { _ATOMIC_INIT_( __a__, __m__ ); }
1886 inline void atomic_store_explicit
1887 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
1888 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1890 inline void atomic_store
1891 ( volatile atomic_short* __a__, short __m__ )
1892 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1894 inline short atomic_exchange_explicit
1895 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
1896 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1898 inline short atomic_exchange
1899 ( volatile atomic_short* __a__, short __m__ )
1900 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1902 inline bool atomic_compare_exchange_weak_explicit
1903 ( volatile atomic_short* __a__, short* __e__, short __m__,
1904 memory_order __x__, memory_order __y__ )
1905 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1907 inline bool atomic_compare_exchange_strong_explicit
1908 ( volatile atomic_short* __a__, short* __e__, short __m__,
1909 memory_order __x__, memory_order __y__ )
1910 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1912 inline bool atomic_compare_exchange_weak
1913 ( volatile atomic_short* __a__, short* __e__, short __m__ )
1914 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1915 memory_order_seq_cst, memory_order_seq_cst ); }
1917 inline bool atomic_compare_exchange_strong
1918 ( volatile atomic_short* __a__, short* __e__, short __m__ )
1919 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1920 memory_order_seq_cst, memory_order_seq_cst ); }
1923 inline bool atomic_is_lock_free( const volatile atomic_ushort* __a__ )
1926 inline unsigned short atomic_load_explicit
1927 ( volatile atomic_ushort* __a__, memory_order __x__ )
1928 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1930 inline unsigned short atomic_load( volatile atomic_ushort* __a__ )
1931 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1933 inline void atomic_init
1934 ( volatile atomic_ushort* __a__, unsigned short __m__ )
1935 { _ATOMIC_INIT_( __a__, __m__ ); }
1937 inline void atomic_store_explicit
1938 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
1939 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1941 inline void atomic_store
1942 ( volatile atomic_ushort* __a__, unsigned short __m__ )
1943 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1945 inline unsigned short atomic_exchange_explicit
1946 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
1947 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1949 inline unsigned short atomic_exchange
1950 ( volatile atomic_ushort* __a__, unsigned short __m__ )
1951 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1953 inline bool atomic_compare_exchange_weak_explicit
1954 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__,
1955 memory_order __x__, memory_order __y__ )
1956 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1958 inline bool atomic_compare_exchange_strong_explicit
1959 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__,
1960 memory_order __x__, memory_order __y__ )
1961 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1963 inline bool atomic_compare_exchange_weak
1964 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__ )
1965 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1966 memory_order_seq_cst, memory_order_seq_cst ); }
1968 inline bool atomic_compare_exchange_strong
1969 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__ )
1970 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1971 memory_order_seq_cst, memory_order_seq_cst ); }
1974 inline bool atomic_is_lock_free( const volatile atomic_int* __a__ )
1977 inline int atomic_load_explicit
1978 ( volatile atomic_int* __a__, memory_order __x__ )
1979 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1981 inline int atomic_load( volatile atomic_int* __a__ )
1982 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1984 inline void atomic_init
1985 ( volatile atomic_int* __a__, int __m__ )
1986 { _ATOMIC_INIT_( __a__, __m__ ); }
1988 inline void atomic_store_explicit
1989 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
1990 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1992 inline void atomic_store
1993 ( volatile atomic_int* __a__, int __m__ )
1994 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1996 inline int atomic_exchange_explicit
1997 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
1998 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2000 inline int atomic_exchange
2001 ( volatile atomic_int* __a__, int __m__ )
2002 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2004 inline bool atomic_compare_exchange_weak_explicit
2005 ( volatile atomic_int* __a__, int* __e__, int __m__,
2006 memory_order __x__, memory_order __y__ )
2007 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2009 inline bool atomic_compare_exchange_strong_explicit
2010 ( volatile atomic_int* __a__, int* __e__, int __m__,
2011 memory_order __x__, memory_order __y__ )
2012 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2014 inline bool atomic_compare_exchange_weak
2015 ( volatile atomic_int* __a__, int* __e__, int __m__ )
2016 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2017 memory_order_seq_cst, memory_order_seq_cst ); }
2019 inline bool atomic_compare_exchange_strong
2020 ( volatile atomic_int* __a__, int* __e__, int __m__ )
2021 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2022 memory_order_seq_cst, memory_order_seq_cst ); }
2025 inline bool atomic_is_lock_free( const volatile atomic_uint* __a__ )
2028 inline unsigned int atomic_load_explicit
2029 ( volatile atomic_uint* __a__, memory_order __x__ )
2030 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2032 inline unsigned int atomic_load( volatile atomic_uint* __a__ )
2033 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2035 inline void atomic_init
2036 ( volatile atomic_uint* __a__, unsigned int __m__ )
2037 { _ATOMIC_INIT_( __a__, __m__ ); }
2039 inline void atomic_store_explicit
2040 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2041 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2043 inline void atomic_store
2044 ( volatile atomic_uint* __a__, unsigned int __m__ )
2045 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2047 inline unsigned int atomic_exchange_explicit
2048 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2049 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2051 inline unsigned int atomic_exchange
2052 ( volatile atomic_uint* __a__, unsigned int __m__ )
2053 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2055 inline bool atomic_compare_exchange_weak_explicit
2056 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__,
2057 memory_order __x__, memory_order __y__ )
2058 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2060 inline bool atomic_compare_exchange_strong_explicit
2061 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__,
2062 memory_order __x__, memory_order __y__ )
2063 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2065 inline bool atomic_compare_exchange_weak
2066 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__ )
2067 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2068 memory_order_seq_cst, memory_order_seq_cst ); }
2070 inline bool atomic_compare_exchange_strong
2071 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__ )
2072 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2073 memory_order_seq_cst, memory_order_seq_cst ); }
2076 inline bool atomic_is_lock_free( const volatile atomic_long* __a__ )
2079 inline long atomic_load_explicit
2080 ( volatile atomic_long* __a__, memory_order __x__ )
2081 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2083 inline long atomic_load( volatile atomic_long* __a__ )
2084 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2086 inline void atomic_init
2087 ( volatile atomic_long* __a__, long __m__ )
2088 { _ATOMIC_INIT_( __a__, __m__ ); }
2090 inline void atomic_store_explicit
2091 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2092 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2094 inline void atomic_store
2095 ( volatile atomic_long* __a__, long __m__ )
2096 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2098 inline long atomic_exchange_explicit
2099 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2100 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2102 inline long atomic_exchange
2103 ( volatile atomic_long* __a__, long __m__ )
2104 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2106 inline bool atomic_compare_exchange_weak_explicit
2107 ( volatile atomic_long* __a__, long* __e__, long __m__,
2108 memory_order __x__, memory_order __y__ )
2109 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2111 inline bool atomic_compare_exchange_strong_explicit
2112 ( volatile atomic_long* __a__, long* __e__, long __m__,
2113 memory_order __x__, memory_order __y__ )
2114 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2116 inline bool atomic_compare_exchange_weak
2117 ( volatile atomic_long* __a__, long* __e__, long __m__ )
2118 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2119 memory_order_seq_cst, memory_order_seq_cst ); }
2121 inline bool atomic_compare_exchange_strong
2122 ( volatile atomic_long* __a__, long* __e__, long __m__ )
2123 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2124 memory_order_seq_cst, memory_order_seq_cst ); }
2127 inline bool atomic_is_lock_free( const volatile atomic_ulong* __a__ )
2130 inline unsigned long atomic_load_explicit
2131 ( volatile atomic_ulong* __a__, memory_order __x__ )
2132 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2134 inline unsigned long atomic_load( volatile atomic_ulong* __a__ )
2135 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2137 inline void atomic_init
2138 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2139 { _ATOMIC_INIT_( __a__, __m__ ); }
2141 inline void atomic_store_explicit
2142 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2143 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2145 inline void atomic_store
2146 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2147 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2149 inline unsigned long atomic_exchange_explicit
2150 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2151 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2153 inline unsigned long atomic_exchange
2154 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2155 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2157 inline bool atomic_compare_exchange_weak_explicit
2158 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__,
2159 memory_order __x__, memory_order __y__ )
2160 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2162 inline bool atomic_compare_exchange_strong_explicit
2163 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__,
2164 memory_order __x__, memory_order __y__ )
2165 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2167 inline bool atomic_compare_exchange_weak
2168 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__ )
2169 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2170 memory_order_seq_cst, memory_order_seq_cst ); }
2172 inline bool atomic_compare_exchange_strong
2173 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__ )
2174 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2175 memory_order_seq_cst, memory_order_seq_cst ); }
2178 inline bool atomic_is_lock_free( const volatile atomic_llong* __a__ )
2181 inline long long atomic_load_explicit
2182 ( volatile atomic_llong* __a__, memory_order __x__ )
2183 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2185 inline long long atomic_load( volatile atomic_llong* __a__ )
2186 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2188 inline void atomic_init
2189 ( volatile atomic_llong* __a__, long long __m__ )
2190 { _ATOMIC_INIT_( __a__, __m__ ); }
2192 inline void atomic_store_explicit
2193 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2194 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2196 inline void atomic_store
2197 ( volatile atomic_llong* __a__, long long __m__ )
2198 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2200 inline long long atomic_exchange_explicit
2201 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2202 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2204 inline long long atomic_exchange
2205 ( volatile atomic_llong* __a__, long long __m__ )
2206 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2208 inline bool atomic_compare_exchange_weak_explicit
2209 ( volatile atomic_llong* __a__, long long* __e__, long long __m__,
2210 memory_order __x__, memory_order __y__ )
2211 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2213 inline bool atomic_compare_exchange_strong_explicit
2214 ( volatile atomic_llong* __a__, long long* __e__, long long __m__,
2215 memory_order __x__, memory_order __y__ )
2216 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2218 inline bool atomic_compare_exchange_weak
2219 ( volatile atomic_llong* __a__, long long* __e__, long long __m__ )
2220 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2221 memory_order_seq_cst, memory_order_seq_cst ); }
2223 inline bool atomic_compare_exchange_strong
2224 ( volatile atomic_llong* __a__, long long* __e__, long long __m__ )
2225 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2226 memory_order_seq_cst, memory_order_seq_cst ); }
2229 inline bool atomic_is_lock_free( const volatile atomic_ullong* __a__ )
2232 inline unsigned long long atomic_load_explicit
2233 ( volatile atomic_ullong* __a__, memory_order __x__ )
2234 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2236 inline unsigned long long atomic_load( volatile atomic_ullong* __a__ )
2237 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2239 inline void atomic_init
2240 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2241 { _ATOMIC_INIT_( __a__, __m__ ); }
2243 inline void atomic_store_explicit
2244 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2245 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2247 inline void atomic_store
2248 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2249 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2251 inline unsigned long long atomic_exchange_explicit
2252 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2253 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2255 inline unsigned long long atomic_exchange
2256 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2257 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2259 inline bool atomic_compare_exchange_weak_explicit
2260 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__,
2261 memory_order __x__, memory_order __y__ )
2262 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2264 inline bool atomic_compare_exchange_strong_explicit
2265 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__,
2266 memory_order __x__, memory_order __y__ )
2267 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2269 inline bool atomic_compare_exchange_weak
2270 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__ )
2271 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2272 memory_order_seq_cst, memory_order_seq_cst ); }
2274 inline bool atomic_compare_exchange_strong
2275 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__ )
2276 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2277 memory_order_seq_cst, memory_order_seq_cst ); }
2280 inline bool atomic_is_lock_free( const volatile atomic_wchar_t* __a__ )
2283 inline wchar_t atomic_load_explicit
2284 ( volatile atomic_wchar_t* __a__, memory_order __x__ )
2285 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2287 inline wchar_t atomic_load( volatile atomic_wchar_t* __a__ )
2288 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2290 inline void atomic_init
2291 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2292 { _ATOMIC_INIT_( __a__, __m__ ); }
2294 inline void atomic_store_explicit
2295 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2296 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2298 inline void atomic_store
2299 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2300 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2302 inline wchar_t atomic_exchange_explicit
2303 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2304 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2306 inline wchar_t atomic_exchange
2307 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2308 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2310 inline bool atomic_compare_exchange_weak_explicit
2311 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__,
2312 memory_order __x__, memory_order __y__ )
2313 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2315 inline bool atomic_compare_exchange_strong_explicit
2316 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__,
2317 memory_order __x__, memory_order __y__ )
2318 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2320 inline bool atomic_compare_exchange_weak
2321 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__ )
2322 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2323 memory_order_seq_cst, memory_order_seq_cst ); }
2325 inline bool atomic_compare_exchange_strong
2326 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__ )
2327 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2328 memory_order_seq_cst, memory_order_seq_cst ); }
2331 inline void* atomic_fetch_add_explicit
2332 ( volatile atomic_address* __a__, ptrdiff_t __m__, memory_order __x__ )
2334 volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__);
2335 __typeof__((__a__)->__f__) __old__=(__typeof__((__a__)->__f__)) model_rmwr_action((void *)__p__, __x__);
2336 __typeof__((__a__)->__f__) __copy__= __old__;
2337 __copy__ = (void *) (((char *)__copy__) + __m__);
2338 model_rmw_action((void *)__p__, __x__, (uint64_t) __copy__);
2342 inline void* atomic_fetch_add
2343 ( volatile atomic_address* __a__, ptrdiff_t __m__ )
2344 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2347 inline void* atomic_fetch_sub_explicit
2348 ( volatile atomic_address* __a__, ptrdiff_t __m__, memory_order __x__ )
2349 { volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__);
2350 __typeof__((__a__)->__f__) __old__=(__typeof__((__a__)->__f__)) model_rmwr_action((void *)__p__, __x__);
2351 __typeof__((__a__)->__f__) __copy__= __old__;
2352 __copy__ = (void *) (((char *)__copy__) - __m__);
2353 model_rmw_action((void *)__p__, __x__, (uint64_t) __copy__);
2357 inline void* atomic_fetch_sub
2358 ( volatile atomic_address* __a__, ptrdiff_t __m__ )
2359 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2361 inline char atomic_fetch_add_explicit
2362 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2363 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2365 inline char atomic_fetch_add
2366 ( volatile atomic_char* __a__, char __m__ )
2367 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2370 inline char atomic_fetch_sub_explicit
2371 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2372 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2374 inline char atomic_fetch_sub
2375 ( volatile atomic_char* __a__, char __m__ )
2376 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2379 inline char atomic_fetch_and_explicit
2380 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2381 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2383 inline char atomic_fetch_and
2384 ( volatile atomic_char* __a__, char __m__ )
2385 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2388 inline char atomic_fetch_or_explicit
2389 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2390 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2392 inline char atomic_fetch_or
2393 ( volatile atomic_char* __a__, char __m__ )
2394 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2397 inline char atomic_fetch_xor_explicit
2398 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2399 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2401 inline char atomic_fetch_xor
2402 ( volatile atomic_char* __a__, char __m__ )
2403 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2406 inline signed char atomic_fetch_add_explicit
2407 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2408 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2410 inline signed char atomic_fetch_add
2411 ( volatile atomic_schar* __a__, signed char __m__ )
2412 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2415 inline signed char atomic_fetch_sub_explicit
2416 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2417 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2419 inline signed char atomic_fetch_sub
2420 ( volatile atomic_schar* __a__, signed char __m__ )
2421 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2424 inline signed char atomic_fetch_and_explicit
2425 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2426 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2428 inline signed char atomic_fetch_and
2429 ( volatile atomic_schar* __a__, signed char __m__ )
2430 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2433 inline signed char atomic_fetch_or_explicit
2434 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2435 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2437 inline signed char atomic_fetch_or
2438 ( volatile atomic_schar* __a__, signed char __m__ )
2439 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2442 inline signed char atomic_fetch_xor_explicit
2443 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2444 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2446 inline signed char atomic_fetch_xor
2447 ( volatile atomic_schar* __a__, signed char __m__ )
2448 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2451 inline unsigned char atomic_fetch_add_explicit
2452 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2453 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2455 inline unsigned char atomic_fetch_add
2456 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2457 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2460 inline unsigned char atomic_fetch_sub_explicit
2461 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2462 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2464 inline unsigned char atomic_fetch_sub
2465 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2466 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2469 inline unsigned char atomic_fetch_and_explicit
2470 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2471 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2473 inline unsigned char atomic_fetch_and
2474 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2475 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2478 inline unsigned char atomic_fetch_or_explicit
2479 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2480 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2482 inline unsigned char atomic_fetch_or
2483 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2484 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2487 inline unsigned char atomic_fetch_xor_explicit
2488 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2489 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2491 inline unsigned char atomic_fetch_xor
2492 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2493 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2496 inline short atomic_fetch_add_explicit
2497 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2498 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2500 inline short atomic_fetch_add
2501 ( volatile atomic_short* __a__, short __m__ )
2502 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2505 inline short atomic_fetch_sub_explicit
2506 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2507 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2509 inline short atomic_fetch_sub
2510 ( volatile atomic_short* __a__, short __m__ )
2511 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2514 inline short atomic_fetch_and_explicit
2515 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2516 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2518 inline short atomic_fetch_and
2519 ( volatile atomic_short* __a__, short __m__ )
2520 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2523 inline short atomic_fetch_or_explicit
2524 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2525 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2527 inline short atomic_fetch_or
2528 ( volatile atomic_short* __a__, short __m__ )
2529 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2532 inline short atomic_fetch_xor_explicit
2533 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2534 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2536 inline short atomic_fetch_xor
2537 ( volatile atomic_short* __a__, short __m__ )
2538 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2541 inline unsigned short atomic_fetch_add_explicit
2542 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2543 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2545 inline unsigned short atomic_fetch_add
2546 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2547 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2550 inline unsigned short atomic_fetch_sub_explicit
2551 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2552 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2554 inline unsigned short atomic_fetch_sub
2555 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2556 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2559 inline unsigned short atomic_fetch_and_explicit
2560 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2561 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2563 inline unsigned short atomic_fetch_and
2564 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2565 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2568 inline unsigned short atomic_fetch_or_explicit
2569 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2570 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2572 inline unsigned short atomic_fetch_or
2573 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2574 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2577 inline unsigned short atomic_fetch_xor_explicit
2578 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2579 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2581 inline unsigned short atomic_fetch_xor
2582 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2583 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2586 inline int atomic_fetch_add_explicit
2587 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2588 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2590 inline int atomic_fetch_add
2591 ( volatile atomic_int* __a__, int __m__ )
2592 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2595 inline int atomic_fetch_sub_explicit
2596 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2597 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2599 inline int atomic_fetch_sub
2600 ( volatile atomic_int* __a__, int __m__ )
2601 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2604 inline int atomic_fetch_and_explicit
2605 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2606 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2608 inline int atomic_fetch_and
2609 ( volatile atomic_int* __a__, int __m__ )
2610 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2613 inline int atomic_fetch_or_explicit
2614 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2615 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2617 inline int atomic_fetch_or
2618 ( volatile atomic_int* __a__, int __m__ )
2619 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2622 inline int atomic_fetch_xor_explicit
2623 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2624 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2626 inline int atomic_fetch_xor
2627 ( volatile atomic_int* __a__, int __m__ )
2628 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2631 inline unsigned int atomic_fetch_add_explicit
2632 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2633 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2635 inline unsigned int atomic_fetch_add
2636 ( volatile atomic_uint* __a__, unsigned int __m__ )
2637 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2640 inline unsigned int atomic_fetch_sub_explicit
2641 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2642 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2644 inline unsigned int atomic_fetch_sub
2645 ( volatile atomic_uint* __a__, unsigned int __m__ )
2646 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2649 inline unsigned int atomic_fetch_and_explicit
2650 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2651 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2653 inline unsigned int atomic_fetch_and
2654 ( volatile atomic_uint* __a__, unsigned int __m__ )
2655 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2658 inline unsigned int atomic_fetch_or_explicit
2659 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2660 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2662 inline unsigned int atomic_fetch_or
2663 ( volatile atomic_uint* __a__, unsigned int __m__ )
2664 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2667 inline unsigned int atomic_fetch_xor_explicit
2668 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2669 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2671 inline unsigned int atomic_fetch_xor
2672 ( volatile atomic_uint* __a__, unsigned int __m__ )
2673 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2676 inline long atomic_fetch_add_explicit
2677 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2678 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2680 inline long atomic_fetch_add
2681 ( volatile atomic_long* __a__, long __m__ )
2682 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2685 inline long atomic_fetch_sub_explicit
2686 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2687 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2689 inline long atomic_fetch_sub
2690 ( volatile atomic_long* __a__, long __m__ )
2691 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2694 inline long atomic_fetch_and_explicit
2695 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2696 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2698 inline long atomic_fetch_and
2699 ( volatile atomic_long* __a__, long __m__ )
2700 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2703 inline long atomic_fetch_or_explicit
2704 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2705 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2707 inline long atomic_fetch_or
2708 ( volatile atomic_long* __a__, long __m__ )
2709 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2712 inline long atomic_fetch_xor_explicit
2713 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2714 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2716 inline long atomic_fetch_xor
2717 ( volatile atomic_long* __a__, long __m__ )
2718 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2721 inline unsigned long atomic_fetch_add_explicit
2722 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2723 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2725 inline unsigned long atomic_fetch_add
2726 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2727 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2730 inline unsigned long atomic_fetch_sub_explicit
2731 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2732 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2734 inline unsigned long atomic_fetch_sub
2735 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2736 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2739 inline unsigned long atomic_fetch_and_explicit
2740 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2741 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2743 inline unsigned long atomic_fetch_and
2744 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2745 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2748 inline unsigned long atomic_fetch_or_explicit
2749 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2750 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2752 inline unsigned long atomic_fetch_or
2753 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2754 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2757 inline unsigned long atomic_fetch_xor_explicit
2758 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2759 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2761 inline unsigned long atomic_fetch_xor
2762 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2763 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2766 inline long long atomic_fetch_add_explicit
2767 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2768 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2770 inline long long atomic_fetch_add
2771 ( volatile atomic_llong* __a__, long long __m__ )
2772 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2775 inline long long atomic_fetch_sub_explicit
2776 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2777 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2779 inline long long atomic_fetch_sub
2780 ( volatile atomic_llong* __a__, long long __m__ )
2781 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2784 inline long long atomic_fetch_and_explicit
2785 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2786 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2788 inline long long atomic_fetch_and
2789 ( volatile atomic_llong* __a__, long long __m__ )
2790 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2793 inline long long atomic_fetch_or_explicit
2794 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2795 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2797 inline long long atomic_fetch_or
2798 ( volatile atomic_llong* __a__, long long __m__ )
2799 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2802 inline long long atomic_fetch_xor_explicit
2803 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2804 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2806 inline long long atomic_fetch_xor
2807 ( volatile atomic_llong* __a__, long long __m__ )
2808 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2811 inline unsigned long long atomic_fetch_add_explicit
2812 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2813 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2815 inline unsigned long long atomic_fetch_add
2816 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2817 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2820 inline unsigned long long atomic_fetch_sub_explicit
2821 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2822 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2824 inline unsigned long long atomic_fetch_sub
2825 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2826 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2829 inline unsigned long long atomic_fetch_and_explicit
2830 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2831 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2833 inline unsigned long long atomic_fetch_and
2834 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2835 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2838 inline unsigned long long atomic_fetch_or_explicit
2839 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2840 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2842 inline unsigned long long atomic_fetch_or
2843 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2844 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2847 inline unsigned long long atomic_fetch_xor_explicit
2848 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2849 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2851 inline unsigned long long atomic_fetch_xor
2852 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2853 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2856 inline wchar_t atomic_fetch_add_explicit
2857 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2858 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2860 inline wchar_t atomic_fetch_add
2861 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2862 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2865 inline wchar_t atomic_fetch_sub_explicit
2866 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2867 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2869 inline wchar_t atomic_fetch_sub
2870 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2871 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2874 inline wchar_t atomic_fetch_and_explicit
2875 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2876 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2878 inline wchar_t atomic_fetch_and
2879 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2880 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2883 inline wchar_t atomic_fetch_or_explicit
2884 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2885 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2887 inline wchar_t atomic_fetch_or
2888 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2889 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2892 inline wchar_t atomic_fetch_xor_explicit
2893 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2894 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2896 inline wchar_t atomic_fetch_xor
2897 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2898 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2904 #define atomic_is_lock_free( __a__ ) \
2907 #define atomic_load( __a__ ) \
2908 _ATOMIC_LOAD_( __a__, memory_order_seq_cst )
2910 #define atomic_load_explicit( __a__, __x__ ) \
2911 _ATOMIC_LOAD_( __a__, __x__ )
2913 #define atomic_init( __a__, __m__ ) \
2914 _ATOMIC_INIT_( __a__, __m__ )
2916 #define atomic_store( __a__, __m__ ) \
2917 _ATOMIC_STORE_( __a__, __m__, memory_order_seq_cst )
2919 #define atomic_store_explicit( __a__, __m__, __x__ ) \
2920 _ATOMIC_STORE_( __a__, __m__, __x__ )
2922 #define atomic_exchange( __a__, __m__ ) \
2923 _ATOMIC_MODIFY_( __a__, =, __m__, memory_order_seq_cst )
2925 #define atomic_exchange_explicit( __a__, __m__, __x__ ) \
2926 _ATOMIC_MODIFY_( __a__, =, __m__, __x__ )
2928 #define atomic_compare_exchange_weak( __a__, __e__, __m__ ) \
2929 _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, memory_order_seq_cst )
2931 #define atomic_compare_exchange_strong( __a__, __e__, __m__ ) \
2932 _ATOMIC_CMPSWP_( __a__, __e__, __m__, memory_order_seq_cst )
2934 #define atomic_compare_exchange_weak_explicit( __a__, __e__, __m__, __x__, __y__ ) \
2935 _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ )
2937 #define atomic_compare_exchange_strong_explicit( __a__, __e__, __m__, __x__, __y__ ) \
2938 _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ )
2941 #define atomic_fetch_add_explicit( __a__, __m__, __x__ ) \
2942 _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ )
2944 #define atomic_fetch_add( __a__, __m__ ) \
2945 _ATOMIC_MODIFY_( __a__, +=, __m__, memory_order_seq_cst )
2948 #define atomic_fetch_sub_explicit( __a__, __m__, __x__ ) \
2949 _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ )
2951 #define atomic_fetch_sub( __a__, __m__ ) \
2952 _ATOMIC_MODIFY_( __a__, -=, __m__, memory_order_seq_cst )
2955 #define atomic_fetch_and_explicit( __a__, __m__, __x__ ) \
2956 _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ )
2958 #define atomic_fetch_and( __a__, __m__ ) \
2959 _ATOMIC_MODIFY_( __a__, &=, __m__, memory_order_seq_cst )
2962 #define atomic_fetch_or_explicit( __a__, __m__, __x__ ) \
2963 _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ )
2965 #define atomic_fetch_or( __a__, __m__ ) \
2966 _ATOMIC_MODIFY_( __a__, |=, __m__, memory_order_seq_cst )
2969 #define atomic_fetch_xor_explicit( __a__, __m__, __x__ ) \
2970 _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ )
2972 #define atomic_fetch_xor( __a__, __m__ ) \
2973 _ATOMIC_MODIFY_( __a__, ^=, __m__, memory_order_seq_cst )
2982 inline bool atomic_bool::is_lock_free() const volatile
2985 inline void atomic_bool::store
2986 ( bool __m__, memory_order __x__ ) volatile
2987 { atomic_store_explicit( this, __m__, __x__ ); }
2989 inline bool atomic_bool::load
2990 ( memory_order __x__ ) volatile
2991 { return atomic_load_explicit( this, __x__ ); }
2993 inline bool atomic_bool::exchange
2994 ( bool __m__, memory_order __x__ ) volatile
2995 { return atomic_exchange_explicit( this, __m__, __x__ ); }
2997 inline bool atomic_bool::compare_exchange_weak
2998 ( bool& __e__, bool __m__,
2999 memory_order __x__, memory_order __y__ ) volatile
3000 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3002 inline bool atomic_bool::compare_exchange_strong
3003 ( bool& __e__, bool __m__,
3004 memory_order __x__, memory_order __y__ ) volatile
3005 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3007 inline bool atomic_bool::compare_exchange_weak
3008 ( bool& __e__, bool __m__, memory_order __x__ ) volatile
3009 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3010 __x__ == memory_order_acq_rel ? memory_order_acquire :
3011 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3013 inline bool atomic_bool::compare_exchange_strong
3014 ( bool& __e__, bool __m__, memory_order __x__ ) volatile
3015 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3016 __x__ == memory_order_acq_rel ? memory_order_acquire :
3017 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3020 inline bool atomic_address::is_lock_free() const volatile
3023 inline void atomic_address::store
3024 ( void* __m__, memory_order __x__ ) volatile
3025 { atomic_store_explicit( this, __m__, __x__ ); }
3027 inline void* atomic_address::load
3028 ( memory_order __x__ ) volatile
3029 { return atomic_load_explicit( this, __x__ ); }
3031 inline void* atomic_address::exchange
3032 ( void* __m__, memory_order __x__ ) volatile
3033 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3035 inline bool atomic_address::compare_exchange_weak
3036 ( void*& __e__, void* __m__,
3037 memory_order __x__, memory_order __y__ ) volatile
3038 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3040 inline bool atomic_address::compare_exchange_strong
3041 ( void*& __e__, void* __m__,
3042 memory_order __x__, memory_order __y__ ) volatile
3043 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3045 inline bool atomic_address::compare_exchange_weak
3046 ( void*& __e__, void* __m__, memory_order __x__ ) volatile
3047 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3048 __x__ == memory_order_acq_rel ? memory_order_acquire :
3049 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3051 inline bool atomic_address::compare_exchange_strong
3052 ( void*& __e__, void* __m__, memory_order __x__ ) volatile
3053 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3054 __x__ == memory_order_acq_rel ? memory_order_acquire :
3055 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3058 inline bool atomic_char::is_lock_free() const volatile
3061 inline void atomic_char::store
3062 ( char __m__, memory_order __x__ ) volatile
3063 { atomic_store_explicit( this, __m__, __x__ ); }
3065 inline char atomic_char::load
3066 ( memory_order __x__ ) volatile
3067 { return atomic_load_explicit( this, __x__ ); }
3069 inline char atomic_char::exchange
3070 ( char __m__, memory_order __x__ ) volatile
3071 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3073 inline bool atomic_char::compare_exchange_weak
3074 ( char& __e__, char __m__,
3075 memory_order __x__, memory_order __y__ ) volatile
3076 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3078 inline bool atomic_char::compare_exchange_strong
3079 ( char& __e__, char __m__,
3080 memory_order __x__, memory_order __y__ ) volatile
3081 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3083 inline bool atomic_char::compare_exchange_weak
3084 ( char& __e__, char __m__, memory_order __x__ ) volatile
3085 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3086 __x__ == memory_order_acq_rel ? memory_order_acquire :
3087 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3089 inline bool atomic_char::compare_exchange_strong
3090 ( char& __e__, char __m__, memory_order __x__ ) volatile
3091 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3092 __x__ == memory_order_acq_rel ? memory_order_acquire :
3093 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3096 inline bool atomic_schar::is_lock_free() const volatile
3099 inline void atomic_schar::store
3100 ( signed char __m__, memory_order __x__ ) volatile
3101 { atomic_store_explicit( this, __m__, __x__ ); }
3103 inline signed char atomic_schar::load
3104 ( memory_order __x__ ) volatile
3105 { return atomic_load_explicit( this, __x__ ); }
3107 inline signed char atomic_schar::exchange
3108 ( signed char __m__, memory_order __x__ ) volatile
3109 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3111 inline bool atomic_schar::compare_exchange_weak
3112 ( signed char& __e__, signed char __m__,
3113 memory_order __x__, memory_order __y__ ) volatile
3114 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3116 inline bool atomic_schar::compare_exchange_strong
3117 ( signed char& __e__, signed char __m__,
3118 memory_order __x__, memory_order __y__ ) volatile
3119 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3121 inline bool atomic_schar::compare_exchange_weak
3122 ( signed char& __e__, signed char __m__, memory_order __x__ ) volatile
3123 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3124 __x__ == memory_order_acq_rel ? memory_order_acquire :
3125 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3127 inline bool atomic_schar::compare_exchange_strong
3128 ( signed char& __e__, signed char __m__, memory_order __x__ ) volatile
3129 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3130 __x__ == memory_order_acq_rel ? memory_order_acquire :
3131 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3134 inline bool atomic_uchar::is_lock_free() const volatile
3137 inline void atomic_uchar::store
3138 ( unsigned char __m__, memory_order __x__ ) volatile
3139 { atomic_store_explicit( this, __m__, __x__ ); }
3141 inline unsigned char atomic_uchar::load
3142 ( memory_order __x__ ) volatile
3143 { return atomic_load_explicit( this, __x__ ); }
3145 inline unsigned char atomic_uchar::exchange
3146 ( unsigned char __m__, memory_order __x__ ) volatile
3147 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3149 inline bool atomic_uchar::compare_exchange_weak
3150 ( unsigned char& __e__, unsigned char __m__,
3151 memory_order __x__, memory_order __y__ ) volatile
3152 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3154 inline bool atomic_uchar::compare_exchange_strong
3155 ( unsigned char& __e__, unsigned char __m__,
3156 memory_order __x__, memory_order __y__ ) volatile
3157 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3159 inline bool atomic_uchar::compare_exchange_weak
3160 ( unsigned char& __e__, unsigned char __m__, memory_order __x__ ) volatile
3161 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3162 __x__ == memory_order_acq_rel ? memory_order_acquire :
3163 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3165 inline bool atomic_uchar::compare_exchange_strong
3166 ( unsigned char& __e__, unsigned char __m__, memory_order __x__ ) volatile
3167 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3168 __x__ == memory_order_acq_rel ? memory_order_acquire :
3169 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3172 inline bool atomic_short::is_lock_free() const volatile
3175 inline void atomic_short::store
3176 ( short __m__, memory_order __x__ ) volatile
3177 { atomic_store_explicit( this, __m__, __x__ ); }
3179 inline short atomic_short::load
3180 ( memory_order __x__ ) volatile
3181 { return atomic_load_explicit( this, __x__ ); }
3183 inline short atomic_short::exchange
3184 ( short __m__, memory_order __x__ ) volatile
3185 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3187 inline bool atomic_short::compare_exchange_weak
3188 ( short& __e__, short __m__,
3189 memory_order __x__, memory_order __y__ ) volatile
3190 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3192 inline bool atomic_short::compare_exchange_strong
3193 ( short& __e__, short __m__,
3194 memory_order __x__, memory_order __y__ ) volatile
3195 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3197 inline bool atomic_short::compare_exchange_weak
3198 ( short& __e__, short __m__, memory_order __x__ ) volatile
3199 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3200 __x__ == memory_order_acq_rel ? memory_order_acquire :
3201 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3203 inline bool atomic_short::compare_exchange_strong
3204 ( short& __e__, short __m__, memory_order __x__ ) volatile
3205 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3206 __x__ == memory_order_acq_rel ? memory_order_acquire :
3207 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3210 inline bool atomic_ushort::is_lock_free() const volatile
3213 inline void atomic_ushort::store
3214 ( unsigned short __m__, memory_order __x__ ) volatile
3215 { atomic_store_explicit( this, __m__, __x__ ); }
3217 inline unsigned short atomic_ushort::load
3218 ( memory_order __x__ ) volatile
3219 { return atomic_load_explicit( this, __x__ ); }
3221 inline unsigned short atomic_ushort::exchange
3222 ( unsigned short __m__, memory_order __x__ ) volatile
3223 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3225 inline bool atomic_ushort::compare_exchange_weak
3226 ( unsigned short& __e__, unsigned short __m__,
3227 memory_order __x__, memory_order __y__ ) volatile
3228 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3230 inline bool atomic_ushort::compare_exchange_strong
3231 ( unsigned short& __e__, unsigned short __m__,
3232 memory_order __x__, memory_order __y__ ) volatile
3233 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3235 inline bool atomic_ushort::compare_exchange_weak
3236 ( unsigned short& __e__, unsigned short __m__, memory_order __x__ ) volatile
3237 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3238 __x__ == memory_order_acq_rel ? memory_order_acquire :
3239 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3241 inline bool atomic_ushort::compare_exchange_strong
3242 ( unsigned short& __e__, unsigned short __m__, memory_order __x__ ) volatile
3243 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3244 __x__ == memory_order_acq_rel ? memory_order_acquire :
3245 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3248 inline bool atomic_int::is_lock_free() const volatile
3251 inline void atomic_int::store
3252 ( int __m__, memory_order __x__ ) volatile
3253 { atomic_store_explicit( this, __m__, __x__ ); }
3255 inline int atomic_int::load
3256 ( memory_order __x__ ) volatile
3257 { return atomic_load_explicit( this, __x__ ); }
3259 inline int atomic_int::exchange
3260 ( int __m__, memory_order __x__ ) volatile
3261 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3263 inline bool atomic_int::compare_exchange_weak
3264 ( int& __e__, int __m__,
3265 memory_order __x__, memory_order __y__ ) volatile
3266 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3268 inline bool atomic_int::compare_exchange_strong
3269 ( int& __e__, int __m__,
3270 memory_order __x__, memory_order __y__ ) volatile
3271 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3273 inline bool atomic_int::compare_exchange_weak
3274 ( int& __e__, int __m__, memory_order __x__ ) volatile
3275 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3276 __x__ == memory_order_acq_rel ? memory_order_acquire :
3277 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3279 inline bool atomic_int::compare_exchange_strong
3280 ( int& __e__, int __m__, memory_order __x__ ) volatile
3281 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3282 __x__ == memory_order_acq_rel ? memory_order_acquire :
3283 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3286 inline bool atomic_uint::is_lock_free() const volatile
3289 inline void atomic_uint::store
3290 ( unsigned int __m__, memory_order __x__ ) volatile
3291 { atomic_store_explicit( this, __m__, __x__ ); }
3293 inline unsigned int atomic_uint::load
3294 ( memory_order __x__ ) volatile
3295 { return atomic_load_explicit( this, __x__ ); }
3297 inline unsigned int atomic_uint::exchange
3298 ( unsigned int __m__, memory_order __x__ ) volatile
3299 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3301 inline bool atomic_uint::compare_exchange_weak
3302 ( unsigned int& __e__, unsigned int __m__,
3303 memory_order __x__, memory_order __y__ ) volatile
3304 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3306 inline bool atomic_uint::compare_exchange_strong
3307 ( unsigned int& __e__, unsigned int __m__,
3308 memory_order __x__, memory_order __y__ ) volatile
3309 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3311 inline bool atomic_uint::compare_exchange_weak
3312 ( unsigned int& __e__, unsigned int __m__, memory_order __x__ ) volatile
3313 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3314 __x__ == memory_order_acq_rel ? memory_order_acquire :
3315 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3317 inline bool atomic_uint::compare_exchange_strong
3318 ( unsigned int& __e__, unsigned int __m__, memory_order __x__ ) volatile
3319 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3320 __x__ == memory_order_acq_rel ? memory_order_acquire :
3321 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3324 inline bool atomic_long::is_lock_free() const volatile
3327 inline void atomic_long::store
3328 ( long __m__, memory_order __x__ ) volatile
3329 { atomic_store_explicit( this, __m__, __x__ ); }
3331 inline long atomic_long::load
3332 ( memory_order __x__ ) volatile
3333 { return atomic_load_explicit( this, __x__ ); }
3335 inline long atomic_long::exchange
3336 ( long __m__, memory_order __x__ ) volatile
3337 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3339 inline bool atomic_long::compare_exchange_weak
3340 ( long& __e__, long __m__,
3341 memory_order __x__, memory_order __y__ ) volatile
3342 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3344 inline bool atomic_long::compare_exchange_strong
3345 ( long& __e__, long __m__,
3346 memory_order __x__, memory_order __y__ ) volatile
3347 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3349 inline bool atomic_long::compare_exchange_weak
3350 ( long& __e__, long __m__, memory_order __x__ ) volatile
3351 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3352 __x__ == memory_order_acq_rel ? memory_order_acquire :
3353 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3355 inline bool atomic_long::compare_exchange_strong
3356 ( long& __e__, long __m__, memory_order __x__ ) volatile
3357 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3358 __x__ == memory_order_acq_rel ? memory_order_acquire :
3359 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3362 inline bool atomic_ulong::is_lock_free() const volatile
3365 inline void atomic_ulong::store
3366 ( unsigned long __m__, memory_order __x__ ) volatile
3367 { atomic_store_explicit( this, __m__, __x__ ); }
3369 inline unsigned long atomic_ulong::load
3370 ( memory_order __x__ ) volatile
3371 { return atomic_load_explicit( this, __x__ ); }
3373 inline unsigned long atomic_ulong::exchange
3374 ( unsigned long __m__, memory_order __x__ ) volatile
3375 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3377 inline bool atomic_ulong::compare_exchange_weak
3378 ( unsigned long& __e__, unsigned long __m__,
3379 memory_order __x__, memory_order __y__ ) volatile
3380 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3382 inline bool atomic_ulong::compare_exchange_strong
3383 ( unsigned long& __e__, unsigned long __m__,
3384 memory_order __x__, memory_order __y__ ) volatile
3385 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3387 inline bool atomic_ulong::compare_exchange_weak
3388 ( unsigned long& __e__, unsigned long __m__, memory_order __x__ ) volatile
3389 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3390 __x__ == memory_order_acq_rel ? memory_order_acquire :
3391 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3393 inline bool atomic_ulong::compare_exchange_strong
3394 ( unsigned long& __e__, unsigned long __m__, memory_order __x__ ) volatile
3395 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3396 __x__ == memory_order_acq_rel ? memory_order_acquire :
3397 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3400 inline bool atomic_llong::is_lock_free() const volatile
3403 inline void atomic_llong::store
3404 ( long long __m__, memory_order __x__ ) volatile
3405 { atomic_store_explicit( this, __m__, __x__ ); }
3407 inline long long atomic_llong::load
3408 ( memory_order __x__ ) volatile
3409 { return atomic_load_explicit( this, __x__ ); }
3411 inline long long atomic_llong::exchange
3412 ( long long __m__, memory_order __x__ ) volatile
3413 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3415 inline bool atomic_llong::compare_exchange_weak
3416 ( long long& __e__, long long __m__,
3417 memory_order __x__, memory_order __y__ ) volatile
3418 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3420 inline bool atomic_llong::compare_exchange_strong
3421 ( long long& __e__, long long __m__,
3422 memory_order __x__, memory_order __y__ ) volatile
3423 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3425 inline bool atomic_llong::compare_exchange_weak
3426 ( long long& __e__, long long __m__, memory_order __x__ ) volatile
3427 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3428 __x__ == memory_order_acq_rel ? memory_order_acquire :
3429 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3431 inline bool atomic_llong::compare_exchange_strong
3432 ( long long& __e__, long long __m__, memory_order __x__ ) volatile
3433 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3434 __x__ == memory_order_acq_rel ? memory_order_acquire :
3435 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3438 inline bool atomic_ullong::is_lock_free() const volatile
3441 inline void atomic_ullong::store
3442 ( unsigned long long __m__, memory_order __x__ ) volatile
3443 { atomic_store_explicit( this, __m__, __x__ ); }
3445 inline unsigned long long atomic_ullong::load
3446 ( memory_order __x__ ) volatile
3447 { return atomic_load_explicit( this, __x__ ); }
3449 inline unsigned long long atomic_ullong::exchange
3450 ( unsigned long long __m__, memory_order __x__ ) volatile
3451 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3453 inline bool atomic_ullong::compare_exchange_weak
3454 ( unsigned long long& __e__, unsigned long long __m__,
3455 memory_order __x__, memory_order __y__ ) volatile
3456 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3458 inline bool atomic_ullong::compare_exchange_strong
3459 ( unsigned long long& __e__, unsigned long long __m__,
3460 memory_order __x__, memory_order __y__ ) volatile
3461 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3463 inline bool atomic_ullong::compare_exchange_weak
3464 ( unsigned long long& __e__, unsigned long long __m__, memory_order __x__ ) volatile
3465 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3466 __x__ == memory_order_acq_rel ? memory_order_acquire :
3467 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3469 inline bool atomic_ullong::compare_exchange_strong
3470 ( unsigned long long& __e__, unsigned long long __m__, memory_order __x__ ) volatile
3471 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3472 __x__ == memory_order_acq_rel ? memory_order_acquire :
3473 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3476 inline bool atomic_wchar_t::is_lock_free() const volatile
3479 inline void atomic_wchar_t::store
3480 ( wchar_t __m__, memory_order __x__ ) volatile
3481 { atomic_store_explicit( this, __m__, __x__ ); }
3483 inline wchar_t atomic_wchar_t::load
3484 ( memory_order __x__ ) volatile
3485 { return atomic_load_explicit( this, __x__ ); }
3487 inline wchar_t atomic_wchar_t::exchange
3488 ( wchar_t __m__, memory_order __x__ ) volatile
3489 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3491 inline bool atomic_wchar_t::compare_exchange_weak
3492 ( wchar_t& __e__, wchar_t __m__,
3493 memory_order __x__, memory_order __y__ ) volatile
3494 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3496 inline bool atomic_wchar_t::compare_exchange_strong
3497 ( wchar_t& __e__, wchar_t __m__,
3498 memory_order __x__, memory_order __y__ ) volatile
3499 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3501 inline bool atomic_wchar_t::compare_exchange_weak
3502 ( wchar_t& __e__, wchar_t __m__, memory_order __x__ ) volatile
3503 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3504 __x__ == memory_order_acq_rel ? memory_order_acquire :
3505 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3507 inline bool atomic_wchar_t::compare_exchange_strong
3508 ( wchar_t& __e__, wchar_t __m__, memory_order __x__ ) volatile
3509 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3510 __x__ == memory_order_acq_rel ? memory_order_acquire :
3511 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3514 template< typename T >
3515 inline bool atomic<T>::is_lock_free() const volatile
3518 template< typename T >
3519 inline void atomic<T>::store( T __v__, memory_order __x__ ) volatile
3520 { _ATOMIC_STORE_( this, __v__, __x__ ); }
3522 template< typename T >
3523 inline T atomic<T>::load( memory_order __x__ ) volatile
3524 { return _ATOMIC_LOAD_( this, __x__ ); }
3526 template< typename T >
3527 inline T atomic<T>::exchange( T __v__, memory_order __x__ ) volatile
3528 { return _ATOMIC_MODIFY_( this, =, __v__, __x__ ); }
3530 template< typename T >
3531 inline bool atomic<T>::compare_exchange_weak
3532 ( T& __r__, T __v__, memory_order __x__, memory_order __y__ ) volatile
3533 { return _ATOMIC_CMPSWP_WEAK_( this, &__r__, __v__, __x__ ); }
3535 template< typename T >
3536 inline bool atomic<T>::compare_exchange_strong
3537 ( T& __r__, T __v__, memory_order __x__, memory_order __y__ ) volatile
3538 { return _ATOMIC_CMPSWP_( this, &__r__, __v__, __x__ ); }
3540 template< typename T >
3541 inline bool atomic<T>::compare_exchange_weak
3542 ( T& __r__, T __v__, memory_order __x__ ) volatile
3543 { return compare_exchange_weak( __r__, __v__, __x__,
3544 __x__ == memory_order_acq_rel ? memory_order_acquire :
3545 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3547 template< typename T >
3548 inline bool atomic<T>::compare_exchange_strong
3549 ( T& __r__, T __v__, memory_order __x__ ) volatile
3550 { return compare_exchange_strong( __r__, __v__, __x__,
3551 __x__ == memory_order_acq_rel ? memory_order_acquire :
3552 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3555 inline void* atomic_address::fetch_add
3556 ( ptrdiff_t __m__, memory_order __x__ ) volatile
3557 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3559 inline void* atomic_address::fetch_sub
3560 ( ptrdiff_t __m__, memory_order __x__ ) volatile
3561 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3564 inline char atomic_char::fetch_add
3565 ( char __m__, memory_order __x__ ) volatile
3566 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3569 inline char atomic_char::fetch_sub
3570 ( char __m__, memory_order __x__ ) volatile
3571 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3574 inline char atomic_char::fetch_and
3575 ( char __m__, memory_order __x__ ) volatile
3576 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3579 inline char atomic_char::fetch_or
3580 ( char __m__, memory_order __x__ ) volatile
3581 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3584 inline char atomic_char::fetch_xor
3585 ( char __m__, memory_order __x__ ) volatile
3586 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3589 inline signed char atomic_schar::fetch_add
3590 ( signed char __m__, memory_order __x__ ) volatile
3591 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3594 inline signed char atomic_schar::fetch_sub
3595 ( signed char __m__, memory_order __x__ ) volatile
3596 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3599 inline signed char atomic_schar::fetch_and
3600 ( signed char __m__, memory_order __x__ ) volatile
3601 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3604 inline signed char atomic_schar::fetch_or
3605 ( signed char __m__, memory_order __x__ ) volatile
3606 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3609 inline signed char atomic_schar::fetch_xor
3610 ( signed char __m__, memory_order __x__ ) volatile
3611 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3614 inline unsigned char atomic_uchar::fetch_add
3615 ( unsigned char __m__, memory_order __x__ ) volatile
3616 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3619 inline unsigned char atomic_uchar::fetch_sub
3620 ( unsigned char __m__, memory_order __x__ ) volatile
3621 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3624 inline unsigned char atomic_uchar::fetch_and
3625 ( unsigned char __m__, memory_order __x__ ) volatile
3626 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3629 inline unsigned char atomic_uchar::fetch_or
3630 ( unsigned char __m__, memory_order __x__ ) volatile
3631 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3634 inline unsigned char atomic_uchar::fetch_xor
3635 ( unsigned char __m__, memory_order __x__ ) volatile
3636 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3639 inline short atomic_short::fetch_add
3640 ( short __m__, memory_order __x__ ) volatile
3641 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3644 inline short atomic_short::fetch_sub
3645 ( short __m__, memory_order __x__ ) volatile
3646 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3649 inline short atomic_short::fetch_and
3650 ( short __m__, memory_order __x__ ) volatile
3651 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3654 inline short atomic_short::fetch_or
3655 ( short __m__, memory_order __x__ ) volatile
3656 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3659 inline short atomic_short::fetch_xor
3660 ( short __m__, memory_order __x__ ) volatile
3661 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3664 inline unsigned short atomic_ushort::fetch_add
3665 ( unsigned short __m__, memory_order __x__ ) volatile
3666 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3669 inline unsigned short atomic_ushort::fetch_sub
3670 ( unsigned short __m__, memory_order __x__ ) volatile
3671 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3674 inline unsigned short atomic_ushort::fetch_and
3675 ( unsigned short __m__, memory_order __x__ ) volatile
3676 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3679 inline unsigned short atomic_ushort::fetch_or
3680 ( unsigned short __m__, memory_order __x__ ) volatile
3681 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3684 inline unsigned short atomic_ushort::fetch_xor
3685 ( unsigned short __m__, memory_order __x__ ) volatile
3686 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3689 inline int atomic_int::fetch_add
3690 ( int __m__, memory_order __x__ ) volatile
3691 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3694 inline int atomic_int::fetch_sub
3695 ( int __m__, memory_order __x__ ) volatile
3696 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3699 inline int atomic_int::fetch_and
3700 ( int __m__, memory_order __x__ ) volatile
3701 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3704 inline int atomic_int::fetch_or
3705 ( int __m__, memory_order __x__ ) volatile
3706 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3709 inline int atomic_int::fetch_xor
3710 ( int __m__, memory_order __x__ ) volatile
3711 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3714 inline unsigned int atomic_uint::fetch_add
3715 ( unsigned int __m__, memory_order __x__ ) volatile
3716 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3719 inline unsigned int atomic_uint::fetch_sub
3720 ( unsigned int __m__, memory_order __x__ ) volatile
3721 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3724 inline unsigned int atomic_uint::fetch_and
3725 ( unsigned int __m__, memory_order __x__ ) volatile
3726 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3729 inline unsigned int atomic_uint::fetch_or
3730 ( unsigned int __m__, memory_order __x__ ) volatile
3731 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3734 inline unsigned int atomic_uint::fetch_xor
3735 ( unsigned int __m__, memory_order __x__ ) volatile
3736 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3739 inline long atomic_long::fetch_add
3740 ( long __m__, memory_order __x__ ) volatile
3741 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3744 inline long atomic_long::fetch_sub
3745 ( long __m__, memory_order __x__ ) volatile
3746 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3749 inline long atomic_long::fetch_and
3750 ( long __m__, memory_order __x__ ) volatile
3751 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3754 inline long atomic_long::fetch_or
3755 ( long __m__, memory_order __x__ ) volatile
3756 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3759 inline long atomic_long::fetch_xor
3760 ( long __m__, memory_order __x__ ) volatile
3761 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3764 inline unsigned long atomic_ulong::fetch_add
3765 ( unsigned long __m__, memory_order __x__ ) volatile
3766 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3769 inline unsigned long atomic_ulong::fetch_sub
3770 ( unsigned long __m__, memory_order __x__ ) volatile
3771 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3774 inline unsigned long atomic_ulong::fetch_and
3775 ( unsigned long __m__, memory_order __x__ ) volatile
3776 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3779 inline unsigned long atomic_ulong::fetch_or
3780 ( unsigned long __m__, memory_order __x__ ) volatile
3781 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3784 inline unsigned long atomic_ulong::fetch_xor
3785 ( unsigned long __m__, memory_order __x__ ) volatile
3786 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3789 inline long long atomic_llong::fetch_add
3790 ( long long __m__, memory_order __x__ ) volatile
3791 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3794 inline long long atomic_llong::fetch_sub
3795 ( long long __m__, memory_order __x__ ) volatile
3796 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3799 inline long long atomic_llong::fetch_and
3800 ( long long __m__, memory_order __x__ ) volatile
3801 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3804 inline long long atomic_llong::fetch_or
3805 ( long long __m__, memory_order __x__ ) volatile
3806 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3809 inline long long atomic_llong::fetch_xor
3810 ( long long __m__, memory_order __x__ ) volatile
3811 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3814 inline unsigned long long atomic_ullong::fetch_add
3815 ( unsigned long long __m__, memory_order __x__ ) volatile
3816 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3819 inline unsigned long long atomic_ullong::fetch_sub
3820 ( unsigned long long __m__, memory_order __x__ ) volatile
3821 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3824 inline unsigned long long atomic_ullong::fetch_and
3825 ( unsigned long long __m__, memory_order __x__ ) volatile
3826 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3829 inline unsigned long long atomic_ullong::fetch_or
3830 ( unsigned long long __m__, memory_order __x__ ) volatile
3831 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3834 inline unsigned long long atomic_ullong::fetch_xor
3835 ( unsigned long long __m__, memory_order __x__ ) volatile
3836 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3839 inline wchar_t atomic_wchar_t::fetch_add
3840 ( wchar_t __m__, memory_order __x__ ) volatile
3841 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3844 inline wchar_t atomic_wchar_t::fetch_sub
3845 ( wchar_t __m__, memory_order __x__ ) volatile
3846 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3849 inline wchar_t atomic_wchar_t::fetch_and
3850 ( wchar_t __m__, memory_order __x__ ) volatile
3851 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3854 inline wchar_t atomic_wchar_t::fetch_or
3855 ( wchar_t __m__, memory_order __x__ ) volatile
3856 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3859 inline wchar_t atomic_wchar_t::fetch_xor
3860 ( wchar_t __m__, memory_order __x__ ) volatile
3861 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3864 template< typename T >
3865 T* atomic<T*>::load( memory_order __x__ ) volatile
3866 { return static_cast<T*>( atomic_address::load( __x__ ) ); }
3868 template< typename T >
3869 T* atomic<T*>::exchange( T* __v__, memory_order __x__ ) volatile
3870 { return static_cast<T*>( atomic_address::exchange( __v__, __x__ ) ); }
3872 template< typename T >
3873 bool atomic<T*>::compare_exchange_weak
3874 ( T*& __r__, T* __v__, memory_order __x__, memory_order __y__) volatile
3875 { return atomic_address::compare_exchange_weak( *reinterpret_cast<void**>( &__r__ ),
3876 static_cast<void*>( __v__ ), __x__, __y__ ); }
3877 //{ return _ATOMIC_CMPSWP_WEAK_( this, &__r__, __v__, __x__ ); }
3879 template< typename T >
3880 bool atomic<T*>::compare_exchange_strong
3881 ( T*& __r__, T* __v__, memory_order __x__, memory_order __y__) volatile
3882 { return atomic_address::compare_exchange_strong( *reinterpret_cast<void**>( &__r__ ),
3883 static_cast<void*>( __v__ ), __x__, __y__ ); }
3884 //{ return _ATOMIC_CMPSWP_( this, &__r__, __v__, __x__ ); }
3886 template< typename T >
3887 bool atomic<T*>::compare_exchange_weak
3888 ( T*& __r__, T* __v__, memory_order __x__ ) volatile
3889 { return compare_exchange_weak( __r__, __v__, __x__,
3890 __x__ == memory_order_acq_rel ? memory_order_acquire :
3891 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3893 template< typename T >
3894 bool atomic<T*>::compare_exchange_strong
3895 ( T*& __r__, T* __v__, memory_order __x__ ) volatile
3896 { return compare_exchange_strong( __r__, __v__, __x__,
3897 __x__ == memory_order_acq_rel ? memory_order_acquire :
3898 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3900 template< typename T >
3901 T* atomic<T*>::fetch_add( ptrdiff_t __v__, memory_order __x__ ) volatile
3902 { return atomic_fetch_add_explicit( this, sizeof(T) * __v__, __x__ ); }
3904 template< typename T >
3905 T* atomic<T*>::fetch_sub( ptrdiff_t __v__, memory_order __x__ ) volatile
3906 { return atomic_fetch_sub_explicit( this, sizeof(T) * __v__, __x__ ); }
3914 static inline void atomic_thread_fence(memory_order order)
3915 { _ATOMIC_FENCE_(order); }
3917 /** @todo Do we want to try to support a user's signal-handler? */
3918 static inline void atomic_signal_fence(memory_order order)
3929 #endif /* __IMPATOMIC_H__ */