3 * @brief Common header for C11/C++11 atomics
5 * Note that some features are unavailable, as they require support from a true
9 #ifndef __IMPATOMIC_H__
10 #define __IMPATOMIC_H__
12 #include "memoryorder.h"
13 #include "cmodelint.h"
19 #define CPP0X( feature )
21 typedef struct atomic_flag
24 bool test_and_set( memory_order = memory_order_seq_cst ) volatile;
25 void clear( memory_order = memory_order_seq_cst ) volatile;
27 CPP0X( atomic_flag() = default; )
28 CPP0X( atomic_flag( const atomic_flag& ) = delete; )
29 atomic_flag& operator =( const atomic_flag& ) CPP0X(=delete);
36 #define ATOMIC_FLAG_INIT { false }
42 extern bool atomic_flag_test_and_set( volatile atomic_flag* );
43 extern bool atomic_flag_test_and_set_explicit
44 ( volatile atomic_flag*, memory_order );
45 extern void atomic_flag_clear( volatile atomic_flag* );
46 extern void atomic_flag_clear_explicit
47 ( volatile atomic_flag*, memory_order );
48 extern void __atomic_flag_wait__
49 ( volatile atomic_flag* );
50 extern void __atomic_flag_wait_explicit__
51 ( volatile atomic_flag*, memory_order );
59 inline bool atomic_flag::test_and_set( memory_order __x__ ) volatile
60 { return atomic_flag_test_and_set_explicit( this, __x__ ); }
62 inline void atomic_flag::clear( memory_order __x__ ) volatile
63 { atomic_flag_clear_explicit( this, __x__ ); }
69 The remainder of the example implementation uses the following
70 macros. These macros exploit GNU extensions for value-returning
71 blocks (AKA statement expressions) and __typeof__.
73 The macros rely on data fields of atomic structs being named __f__.
74 Other symbols used are __a__=atomic, __e__=expected, __f__=field,
75 __g__=flag, __m__=modified, __o__=operation, __r__=result,
76 __p__=pointer to field, __v__=value (for single evaluation),
77 __x__=memory-ordering, and __y__=memory-ordering.
80 #define _ATOMIC_LOAD_( __a__, __x__ ) \
81 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
82 __typeof__((__a__)->__f__) __r__ = (__typeof__((__a__)->__f__))model_read_action((void *)__p__, __x__); \
85 #define _ATOMIC_STORE_( __a__, __m__, __x__ ) \
86 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
87 __typeof__(__m__) __v__ = (__m__); \
88 model_write_action((void *) __p__, __x__, (uint64_t) __v__); \
92 #define _ATOMIC_INIT_( __a__, __m__ ) \
93 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
94 __typeof__(__m__) __v__ = (__m__); \
95 model_init_action((void *) __p__, (uint64_t) __v__); \
98 #define _ATOMIC_MODIFY_( __a__, __o__, __m__, __x__ ) \
99 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
100 __typeof__((__a__)->__f__) __old__=(__typeof__((__a__)->__f__)) model_rmwr_action((void *)__p__, __x__); \
101 __typeof__(__m__) __v__ = (__m__); \
102 __typeof__((__a__)->__f__) __copy__= __old__; \
103 __copy__ __o__ __v__; \
104 model_rmw_action((void *)__p__, __x__, (uint64_t) __copy__); \
107 /* No spurious failure for now */
108 #define _ATOMIC_CMPSWP_WEAK_ _ATOMIC_CMPSWP_
110 #define _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ) \
111 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
112 __typeof__(__e__) __q__ = (__e__); \
113 __typeof__(__m__) __v__ = (__m__); \
115 __typeof__((__a__)->__f__) __t__=(__typeof__((__a__)->__f__)) model_rmwr_action((void *)__p__, __x__); \
116 if (__t__ == * __q__ ) { \
117 model_rmw_action((void *)__p__, __x__, (uint64_t) __v__); __r__ = true; } \
118 else { model_rmwc_action((void *)__p__, __x__); *__q__ = __t__; __r__ = false;} \
121 #define _ATOMIC_FENCE_( __x__ ) \
122 ({ model_fence_action(__x__);})
125 #define ATOMIC_CHAR_LOCK_FREE 1
126 #define ATOMIC_CHAR16_T_LOCK_FREE 1
127 #define ATOMIC_CHAR32_T_LOCK_FREE 1
128 #define ATOMIC_WCHAR_T_LOCK_FREE 1
129 #define ATOMIC_SHORT_LOCK_FREE 1
130 #define ATOMIC_INT_LOCK_FREE 1
131 #define ATOMIC_LONG_LOCK_FREE 1
132 #define ATOMIC_LLONG_LOCK_FREE 1
133 #define ATOMIC_ADDRESS_LOCK_FREE 1
135 typedef struct atomic_bool
138 bool is_lock_free() const volatile;
139 void store( bool, memory_order = memory_order_seq_cst ) volatile;
140 bool load( memory_order = memory_order_seq_cst ) volatile;
141 bool exchange( bool, memory_order = memory_order_seq_cst ) volatile;
142 bool compare_exchange_weak ( bool&, bool, memory_order, memory_order ) volatile;
143 bool compare_exchange_strong ( bool&, bool, memory_order, memory_order ) volatile;
144 bool compare_exchange_weak ( bool&, bool,
145 memory_order = memory_order_seq_cst) volatile;
146 bool compare_exchange_strong ( bool&, bool,
147 memory_order = memory_order_seq_cst) volatile;
149 CPP0X( atomic_bool() = delete; )
150 CPP0X( constexpr explicit atomic_bool( bool __v__ ) : __f__( __v__ ) { } )
151 CPP0X( atomic_bool( const atomic_bool& ) = delete; )
152 atomic_bool& operator =( const atomic_bool& ) CPP0X(=delete);
154 bool operator =( bool __v__ ) volatile
155 { store( __v__ ); return __v__; }
157 friend void atomic_store_explicit( volatile atomic_bool*, bool,
159 friend bool atomic_load_explicit( volatile atomic_bool*, memory_order );
160 friend bool atomic_exchange_explicit( volatile atomic_bool*, bool,
162 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_bool*, bool*, bool,
163 memory_order, memory_order );
164 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_bool*, bool*, bool,
165 memory_order, memory_order );
173 typedef struct atomic_address
176 bool is_lock_free() const volatile;
177 void store( void*, memory_order = memory_order_seq_cst ) volatile;
178 void* load( memory_order = memory_order_seq_cst ) volatile;
179 void* exchange( void*, memory_order = memory_order_seq_cst ) volatile;
180 bool compare_exchange_weak( void*&, void*, memory_order, memory_order ) volatile;
181 bool compare_exchange_strong( void*&, void*, memory_order, memory_order ) volatile;
182 bool compare_exchange_weak( void*&, void*,
183 memory_order = memory_order_seq_cst ) volatile;
184 bool compare_exchange_strong( void*&, void*,
185 memory_order = memory_order_seq_cst ) volatile;
186 void* fetch_add( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
187 void* fetch_sub( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
189 CPP0X( atomic_address() = default; )
190 CPP0X( constexpr explicit atomic_address( void* __v__ ) : __f__( __v__) { } )
191 CPP0X( atomic_address( const atomic_address& ) = delete; )
192 atomic_address& operator =( const atomic_address & ) CPP0X(=delete);
194 void* operator =( void* __v__ ) volatile
195 { store( __v__ ); return __v__; }
197 void* operator +=( ptrdiff_t __v__ ) volatile
198 { return fetch_add( __v__ ); }
200 void* operator -=( ptrdiff_t __v__ ) volatile
201 { return fetch_sub( __v__ ); }
203 friend void atomic_store_explicit( volatile atomic_address*, void*,
205 friend void* atomic_load_explicit( volatile atomic_address*, memory_order );
206 friend void* atomic_exchange_explicit( volatile atomic_address*, void*,
208 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_address*,
209 void**, void*, memory_order, memory_order );
210 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_address*,
211 void**, void*, memory_order, memory_order );
212 friend void* atomic_fetch_add_explicit( volatile atomic_address*, ptrdiff_t,
214 friend void* atomic_fetch_sub_explicit( volatile atomic_address*, ptrdiff_t,
223 typedef struct atomic_char
226 bool is_lock_free() const volatile;
228 memory_order = memory_order_seq_cst ) volatile;
229 char load( memory_order = memory_order_seq_cst ) volatile;
231 memory_order = memory_order_seq_cst ) volatile;
232 bool compare_exchange_weak( char&, char,
233 memory_order, memory_order ) volatile;
234 bool compare_exchange_strong( char&, char,
235 memory_order, memory_order ) volatile;
236 bool compare_exchange_weak( char&, char,
237 memory_order = memory_order_seq_cst ) volatile;
238 bool compare_exchange_strong( char&, char,
239 memory_order = memory_order_seq_cst ) volatile;
240 char fetch_add( char,
241 memory_order = memory_order_seq_cst ) volatile;
242 char fetch_sub( char,
243 memory_order = memory_order_seq_cst ) volatile;
244 char fetch_and( char,
245 memory_order = memory_order_seq_cst ) volatile;
247 memory_order = memory_order_seq_cst ) volatile;
248 char fetch_xor( char,
249 memory_order = memory_order_seq_cst ) volatile;
251 CPP0X( atomic_char() = default; )
252 CPP0X( constexpr atomic_char( char __v__ ) : __f__( __v__) { } )
253 CPP0X( atomic_char( const atomic_char& ) = delete; )
254 atomic_char& operator =( const atomic_char& ) CPP0X(=delete);
256 char operator =( char __v__ ) volatile
257 { store( __v__ ); return __v__; }
259 char operator ++( int ) volatile
260 { return fetch_add( 1 ); }
262 char operator --( int ) volatile
263 { return fetch_sub( 1 ); }
265 char operator ++() volatile
266 { return fetch_add( 1 ) + 1; }
268 char operator --() volatile
269 { return fetch_sub( 1 ) - 1; }
271 char operator +=( char __v__ ) volatile
272 { return fetch_add( __v__ ) + __v__; }
274 char operator -=( char __v__ ) volatile
275 { return fetch_sub( __v__ ) - __v__; }
277 char operator &=( char __v__ ) volatile
278 { return fetch_and( __v__ ) & __v__; }
280 char operator |=( char __v__ ) volatile
281 { return fetch_or( __v__ ) | __v__; }
283 char operator ^=( char __v__ ) volatile
284 { return fetch_xor( __v__ ) ^ __v__; }
286 friend void atomic_store_explicit( volatile atomic_char*, char,
288 friend char atomic_load_explicit( volatile atomic_char*,
290 friend char atomic_exchange_explicit( volatile atomic_char*,
291 char, memory_order );
292 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_char*,
293 char*, char, memory_order, memory_order );
294 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_char*,
295 char*, char, memory_order, memory_order );
296 friend char atomic_fetch_add_explicit( volatile atomic_char*,
297 char, memory_order );
298 friend char atomic_fetch_sub_explicit( volatile atomic_char*,
299 char, memory_order );
300 friend char atomic_fetch_and_explicit( volatile atomic_char*,
301 char, memory_order );
302 friend char atomic_fetch_or_explicit( volatile atomic_char*,
303 char, memory_order );
304 friend char atomic_fetch_xor_explicit( volatile atomic_char*,
305 char, memory_order );
313 typedef struct atomic_schar
316 bool is_lock_free() const volatile;
317 void store( signed char,
318 memory_order = memory_order_seq_cst ) volatile;
319 signed char load( memory_order = memory_order_seq_cst ) volatile;
320 signed char exchange( signed char,
321 memory_order = memory_order_seq_cst ) volatile;
322 bool compare_exchange_weak( signed char&, signed char,
323 memory_order, memory_order ) volatile;
324 bool compare_exchange_strong( signed char&, signed char,
325 memory_order, memory_order ) volatile;
326 bool compare_exchange_weak( signed char&, signed char,
327 memory_order = memory_order_seq_cst ) volatile;
328 bool compare_exchange_strong( signed char&, signed char,
329 memory_order = memory_order_seq_cst ) volatile;
330 signed char fetch_add( signed char,
331 memory_order = memory_order_seq_cst ) volatile;
332 signed char fetch_sub( signed char,
333 memory_order = memory_order_seq_cst ) volatile;
334 signed char fetch_and( signed char,
335 memory_order = memory_order_seq_cst ) volatile;
336 signed char fetch_or( signed char,
337 memory_order = memory_order_seq_cst ) volatile;
338 signed char fetch_xor( signed char,
339 memory_order = memory_order_seq_cst ) volatile;
341 CPP0X( atomic_schar() = default; )
342 CPP0X( constexpr atomic_schar( signed char __v__ ) : __f__( __v__) { } )
343 CPP0X( atomic_schar( const atomic_schar& ) = delete; )
344 atomic_schar& operator =( const atomic_schar& ) CPP0X(=delete);
346 signed char operator =( signed char __v__ ) volatile
347 { store( __v__ ); return __v__; }
349 signed char operator ++( int ) volatile
350 { return fetch_add( 1 ); }
352 signed char operator --( int ) volatile
353 { return fetch_sub( 1 ); }
355 signed char operator ++() volatile
356 { return fetch_add( 1 ) + 1; }
358 signed char operator --() volatile
359 { return fetch_sub( 1 ) - 1; }
361 signed char operator +=( signed char __v__ ) volatile
362 { return fetch_add( __v__ ) + __v__; }
364 signed char operator -=( signed char __v__ ) volatile
365 { return fetch_sub( __v__ ) - __v__; }
367 signed char operator &=( signed char __v__ ) volatile
368 { return fetch_and( __v__ ) & __v__; }
370 signed char operator |=( signed char __v__ ) volatile
371 { return fetch_or( __v__ ) | __v__; }
373 signed char operator ^=( signed char __v__ ) volatile
374 { return fetch_xor( __v__ ) ^ __v__; }
376 friend void atomic_store_explicit( volatile atomic_schar*, signed char,
378 friend signed char atomic_load_explicit( volatile atomic_schar*,
380 friend signed char atomic_exchange_explicit( volatile atomic_schar*,
381 signed char, memory_order );
382 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_schar*,
383 signed char*, signed char, memory_order, memory_order );
384 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_schar*,
385 signed char*, signed char, memory_order, memory_order );
386 friend signed char atomic_fetch_add_explicit( volatile atomic_schar*,
387 signed char, memory_order );
388 friend signed char atomic_fetch_sub_explicit( volatile atomic_schar*,
389 signed char, memory_order );
390 friend signed char atomic_fetch_and_explicit( volatile atomic_schar*,
391 signed char, memory_order );
392 friend signed char atomic_fetch_or_explicit( volatile atomic_schar*,
393 signed char, memory_order );
394 friend signed char atomic_fetch_xor_explicit( volatile atomic_schar*,
395 signed char, memory_order );
403 typedef struct atomic_uchar
406 bool is_lock_free() const volatile;
407 void store( unsigned char,
408 memory_order = memory_order_seq_cst ) volatile;
409 unsigned char load( memory_order = memory_order_seq_cst ) volatile;
410 unsigned char exchange( unsigned char,
411 memory_order = memory_order_seq_cst ) volatile;
412 bool compare_exchange_weak( unsigned char&, unsigned char,
413 memory_order, memory_order ) volatile;
414 bool compare_exchange_strong( unsigned char&, unsigned char,
415 memory_order, memory_order ) volatile;
416 bool compare_exchange_weak( unsigned char&, unsigned char,
417 memory_order = memory_order_seq_cst ) volatile;
418 bool compare_exchange_strong( unsigned char&, unsigned char,
419 memory_order = memory_order_seq_cst ) volatile;
420 unsigned char fetch_add( unsigned char,
421 memory_order = memory_order_seq_cst ) volatile;
422 unsigned char fetch_sub( unsigned char,
423 memory_order = memory_order_seq_cst ) volatile;
424 unsigned char fetch_and( unsigned char,
425 memory_order = memory_order_seq_cst ) volatile;
426 unsigned char fetch_or( unsigned char,
427 memory_order = memory_order_seq_cst ) volatile;
428 unsigned char fetch_xor( unsigned char,
429 memory_order = memory_order_seq_cst ) volatile;
431 CPP0X( atomic_uchar() = default; )
432 CPP0X( constexpr atomic_uchar( unsigned char __v__ ) : __f__( __v__) { } )
433 CPP0X( atomic_uchar( const atomic_uchar& ) = delete; )
434 atomic_uchar& operator =( const atomic_uchar& ) CPP0X(=delete);
436 unsigned char operator =( unsigned char __v__ ) volatile
437 { store( __v__ ); return __v__; }
439 unsigned char operator ++( int ) volatile
440 { return fetch_add( 1 ); }
442 unsigned char operator --( int ) volatile
443 { return fetch_sub( 1 ); }
445 unsigned char operator ++() volatile
446 { return fetch_add( 1 ) + 1; }
448 unsigned char operator --() volatile
449 { return fetch_sub( 1 ) - 1; }
451 unsigned char operator +=( unsigned char __v__ ) volatile
452 { return fetch_add( __v__ ) + __v__; }
454 unsigned char operator -=( unsigned char __v__ ) volatile
455 { return fetch_sub( __v__ ) - __v__; }
457 unsigned char operator &=( unsigned char __v__ ) volatile
458 { return fetch_and( __v__ ) & __v__; }
460 unsigned char operator |=( unsigned char __v__ ) volatile
461 { return fetch_or( __v__ ) | __v__; }
463 unsigned char operator ^=( unsigned char __v__ ) volatile
464 { return fetch_xor( __v__ ) ^ __v__; }
466 friend void atomic_store_explicit( volatile atomic_uchar*, unsigned char,
468 friend unsigned char atomic_load_explicit( volatile atomic_uchar*,
470 friend unsigned char atomic_exchange_explicit( volatile atomic_uchar*,
471 unsigned char, memory_order );
472 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_uchar*,
473 unsigned char*, unsigned char, memory_order, memory_order );
474 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_uchar*,
475 unsigned char*, unsigned char, memory_order, memory_order );
476 friend unsigned char atomic_fetch_add_explicit( volatile atomic_uchar*,
477 unsigned char, memory_order );
478 friend unsigned char atomic_fetch_sub_explicit( volatile atomic_uchar*,
479 unsigned char, memory_order );
480 friend unsigned char atomic_fetch_and_explicit( volatile atomic_uchar*,
481 unsigned char, memory_order );
482 friend unsigned char atomic_fetch_or_explicit( volatile atomic_uchar*,
483 unsigned char, memory_order );
484 friend unsigned char atomic_fetch_xor_explicit( volatile atomic_uchar*,
485 unsigned char, memory_order );
493 typedef struct atomic_short
496 bool is_lock_free() const volatile;
498 memory_order = memory_order_seq_cst ) volatile;
499 short load( memory_order = memory_order_seq_cst ) volatile;
500 short exchange( short,
501 memory_order = memory_order_seq_cst ) volatile;
502 bool compare_exchange_weak( short&, short,
503 memory_order, memory_order ) volatile;
504 bool compare_exchange_strong( short&, short,
505 memory_order, memory_order ) volatile;
506 bool compare_exchange_weak( short&, short,
507 memory_order = memory_order_seq_cst ) volatile;
508 bool compare_exchange_strong( short&, short,
509 memory_order = memory_order_seq_cst ) volatile;
510 short fetch_add( short,
511 memory_order = memory_order_seq_cst ) volatile;
512 short fetch_sub( short,
513 memory_order = memory_order_seq_cst ) volatile;
514 short fetch_and( short,
515 memory_order = memory_order_seq_cst ) volatile;
516 short fetch_or( short,
517 memory_order = memory_order_seq_cst ) volatile;
518 short fetch_xor( short,
519 memory_order = memory_order_seq_cst ) volatile;
521 CPP0X( atomic_short() = default; )
522 CPP0X( constexpr atomic_short( short __v__ ) : __f__( __v__) { } )
523 CPP0X( atomic_short( const atomic_short& ) = delete; )
524 atomic_short& operator =( const atomic_short& ) CPP0X(=delete);
526 short operator =( short __v__ ) volatile
527 { store( __v__ ); return __v__; }
529 short operator ++( int ) volatile
530 { return fetch_add( 1 ); }
532 short operator --( int ) volatile
533 { return fetch_sub( 1 ); }
535 short operator ++() volatile
536 { return fetch_add( 1 ) + 1; }
538 short operator --() volatile
539 { return fetch_sub( 1 ) - 1; }
541 short operator +=( short __v__ ) volatile
542 { return fetch_add( __v__ ) + __v__; }
544 short operator -=( short __v__ ) volatile
545 { return fetch_sub( __v__ ) - __v__; }
547 short operator &=( short __v__ ) volatile
548 { return fetch_and( __v__ ) & __v__; }
550 short operator |=( short __v__ ) volatile
551 { return fetch_or( __v__ ) | __v__; }
553 short operator ^=( short __v__ ) volatile
554 { return fetch_xor( __v__ ) ^ __v__; }
556 friend void atomic_store_explicit( volatile atomic_short*, short,
558 friend short atomic_load_explicit( volatile atomic_short*,
560 friend short atomic_exchange_explicit( volatile atomic_short*,
561 short, memory_order );
562 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_short*,
563 short*, short, memory_order, memory_order );
564 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_short*,
565 short*, short, memory_order, memory_order );
566 friend short atomic_fetch_add_explicit( volatile atomic_short*,
567 short, memory_order );
568 friend short atomic_fetch_sub_explicit( volatile atomic_short*,
569 short, memory_order );
570 friend short atomic_fetch_and_explicit( volatile atomic_short*,
571 short, memory_order );
572 friend short atomic_fetch_or_explicit( volatile atomic_short*,
573 short, memory_order );
574 friend short atomic_fetch_xor_explicit( volatile atomic_short*,
575 short, memory_order );
583 typedef struct atomic_ushort
586 bool is_lock_free() const volatile;
587 void store( unsigned short,
588 memory_order = memory_order_seq_cst ) volatile;
589 unsigned short load( memory_order = memory_order_seq_cst ) volatile;
590 unsigned short exchange( unsigned short,
591 memory_order = memory_order_seq_cst ) volatile;
592 bool compare_exchange_weak( unsigned short&, unsigned short,
593 memory_order, memory_order ) volatile;
594 bool compare_exchange_strong( unsigned short&, unsigned short,
595 memory_order, memory_order ) volatile;
596 bool compare_exchange_weak( unsigned short&, unsigned short,
597 memory_order = memory_order_seq_cst ) volatile;
598 bool compare_exchange_strong( unsigned short&, unsigned short,
599 memory_order = memory_order_seq_cst ) volatile;
600 unsigned short fetch_add( unsigned short,
601 memory_order = memory_order_seq_cst ) volatile;
602 unsigned short fetch_sub( unsigned short,
603 memory_order = memory_order_seq_cst ) volatile;
604 unsigned short fetch_and( unsigned short,
605 memory_order = memory_order_seq_cst ) volatile;
606 unsigned short fetch_or( unsigned short,
607 memory_order = memory_order_seq_cst ) volatile;
608 unsigned short fetch_xor( unsigned short,
609 memory_order = memory_order_seq_cst ) volatile;
611 CPP0X( atomic_ushort() = default; )
612 CPP0X( constexpr atomic_ushort( unsigned short __v__ ) : __f__( __v__) { } )
613 CPP0X( atomic_ushort( const atomic_ushort& ) = delete; )
614 atomic_ushort& operator =( const atomic_ushort& ) CPP0X(=delete);
616 unsigned short operator =( unsigned short __v__ ) volatile
617 { store( __v__ ); return __v__; }
619 unsigned short operator ++( int ) volatile
620 { return fetch_add( 1 ); }
622 unsigned short operator --( int ) volatile
623 { return fetch_sub( 1 ); }
625 unsigned short operator ++() volatile
626 { return fetch_add( 1 ) + 1; }
628 unsigned short operator --() volatile
629 { return fetch_sub( 1 ) - 1; }
631 unsigned short operator +=( unsigned short __v__ ) volatile
632 { return fetch_add( __v__ ) + __v__; }
634 unsigned short operator -=( unsigned short __v__ ) volatile
635 { return fetch_sub( __v__ ) - __v__; }
637 unsigned short operator &=( unsigned short __v__ ) volatile
638 { return fetch_and( __v__ ) & __v__; }
640 unsigned short operator |=( unsigned short __v__ ) volatile
641 { return fetch_or( __v__ ) | __v__; }
643 unsigned short operator ^=( unsigned short __v__ ) volatile
644 { return fetch_xor( __v__ ) ^ __v__; }
646 friend void atomic_store_explicit( volatile atomic_ushort*, unsigned short,
648 friend unsigned short atomic_load_explicit( volatile atomic_ushort*,
650 friend unsigned short atomic_exchange_explicit( volatile atomic_ushort*,
651 unsigned short, memory_order );
652 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_ushort*,
653 unsigned short*, unsigned short, memory_order, memory_order );
654 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_ushort*,
655 unsigned short*, unsigned short, memory_order, memory_order );
656 friend unsigned short atomic_fetch_add_explicit( volatile atomic_ushort*,
657 unsigned short, memory_order );
658 friend unsigned short atomic_fetch_sub_explicit( volatile atomic_ushort*,
659 unsigned short, memory_order );
660 friend unsigned short atomic_fetch_and_explicit( volatile atomic_ushort*,
661 unsigned short, memory_order );
662 friend unsigned short atomic_fetch_or_explicit( volatile atomic_ushort*,
663 unsigned short, memory_order );
664 friend unsigned short atomic_fetch_xor_explicit( volatile atomic_ushort*,
665 unsigned short, memory_order );
669 unsigned short __f__;
673 typedef struct atomic_int
676 bool is_lock_free() const volatile;
678 memory_order = memory_order_seq_cst ) volatile;
679 int load( memory_order = memory_order_seq_cst ) volatile;
681 memory_order = memory_order_seq_cst ) volatile;
682 bool compare_exchange_weak( int&, int,
683 memory_order, memory_order ) volatile;
684 bool compare_exchange_strong( int&, int,
685 memory_order, memory_order ) volatile;
686 bool compare_exchange_weak( int&, int,
687 memory_order = memory_order_seq_cst ) volatile;
688 bool compare_exchange_strong( int&, int,
689 memory_order = memory_order_seq_cst ) volatile;
691 memory_order = memory_order_seq_cst ) volatile;
693 memory_order = memory_order_seq_cst ) volatile;
695 memory_order = memory_order_seq_cst ) volatile;
697 memory_order = memory_order_seq_cst ) volatile;
699 memory_order = memory_order_seq_cst ) volatile;
701 CPP0X( atomic_int() = default; )
702 CPP0X( constexpr atomic_int( int __v__ ) : __f__( __v__) { } )
703 CPP0X( atomic_int( const atomic_int& ) = delete; )
704 atomic_int& operator =( const atomic_int& ) CPP0X(=delete);
706 int operator =( int __v__ ) volatile
707 { store( __v__ ); return __v__; }
709 int operator ++( int ) volatile
710 { return fetch_add( 1 ); }
712 int operator --( int ) volatile
713 { return fetch_sub( 1 ); }
715 int operator ++() volatile
716 { return fetch_add( 1 ) + 1; }
718 int operator --() volatile
719 { return fetch_sub( 1 ) - 1; }
721 int operator +=( int __v__ ) volatile
722 { return fetch_add( __v__ ) + __v__; }
724 int operator -=( int __v__ ) volatile
725 { return fetch_sub( __v__ ) - __v__; }
727 int operator &=( int __v__ ) volatile
728 { return fetch_and( __v__ ) & __v__; }
730 int operator |=( int __v__ ) volatile
731 { return fetch_or( __v__ ) | __v__; }
733 int operator ^=( int __v__ ) volatile
734 { return fetch_xor( __v__ ) ^ __v__; }
736 friend void atomic_store_explicit( volatile atomic_int*, int,
738 friend int atomic_load_explicit( volatile atomic_int*,
740 friend int atomic_exchange_explicit( volatile atomic_int*,
742 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_int*,
743 int*, int, memory_order, memory_order );
744 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_int*,
745 int*, int, memory_order, memory_order );
746 friend int atomic_fetch_add_explicit( volatile atomic_int*,
748 friend int atomic_fetch_sub_explicit( volatile atomic_int*,
750 friend int atomic_fetch_and_explicit( volatile atomic_int*,
752 friend int atomic_fetch_or_explicit( volatile atomic_int*,
754 friend int atomic_fetch_xor_explicit( volatile atomic_int*,
763 typedef struct atomic_uint
766 bool is_lock_free() const volatile;
767 void store( unsigned int,
768 memory_order = memory_order_seq_cst ) volatile;
769 unsigned int load( memory_order = memory_order_seq_cst ) volatile;
770 unsigned int exchange( unsigned int,
771 memory_order = memory_order_seq_cst ) volatile;
772 bool compare_exchange_weak( unsigned int&, unsigned int,
773 memory_order, memory_order ) volatile;
774 bool compare_exchange_strong( unsigned int&, unsigned int,
775 memory_order, memory_order ) volatile;
776 bool compare_exchange_weak( unsigned int&, unsigned int,
777 memory_order = memory_order_seq_cst ) volatile;
778 bool compare_exchange_strong( unsigned int&, unsigned int,
779 memory_order = memory_order_seq_cst ) volatile;
780 unsigned int fetch_add( unsigned int,
781 memory_order = memory_order_seq_cst ) volatile;
782 unsigned int fetch_sub( unsigned int,
783 memory_order = memory_order_seq_cst ) volatile;
784 unsigned int fetch_and( unsigned int,
785 memory_order = memory_order_seq_cst ) volatile;
786 unsigned int fetch_or( unsigned int,
787 memory_order = memory_order_seq_cst ) volatile;
788 unsigned int fetch_xor( unsigned int,
789 memory_order = memory_order_seq_cst ) volatile;
791 CPP0X( atomic_uint() = default; )
792 CPP0X( constexpr atomic_uint( unsigned int __v__ ) : __f__( __v__) { } )
793 CPP0X( atomic_uint( const atomic_uint& ) = delete; )
794 atomic_uint& operator =( const atomic_uint& ) CPP0X(=delete);
796 unsigned int operator =( unsigned int __v__ ) volatile
797 { store( __v__ ); return __v__; }
799 unsigned int operator ++( int ) volatile
800 { return fetch_add( 1 ); }
802 unsigned int operator --( int ) volatile
803 { return fetch_sub( 1 ); }
805 unsigned int operator ++() volatile
806 { return fetch_add( 1 ) + 1; }
808 unsigned int operator --() volatile
809 { return fetch_sub( 1 ) - 1; }
811 unsigned int operator +=( unsigned int __v__ ) volatile
812 { return fetch_add( __v__ ) + __v__; }
814 unsigned int operator -=( unsigned int __v__ ) volatile
815 { return fetch_sub( __v__ ) - __v__; }
817 unsigned int operator &=( unsigned int __v__ ) volatile
818 { return fetch_and( __v__ ) & __v__; }
820 unsigned int operator |=( unsigned int __v__ ) volatile
821 { return fetch_or( __v__ ) | __v__; }
823 unsigned int operator ^=( unsigned int __v__ ) volatile
824 { return fetch_xor( __v__ ) ^ __v__; }
826 friend void atomic_store_explicit( volatile atomic_uint*, unsigned int,
828 friend unsigned int atomic_load_explicit( volatile atomic_uint*,
830 friend unsigned int atomic_exchange_explicit( volatile atomic_uint*,
831 unsigned int, memory_order );
832 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_uint*,
833 unsigned int*, unsigned int, memory_order, memory_order );
834 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_uint*,
835 unsigned int*, unsigned int, memory_order, memory_order );
836 friend unsigned int atomic_fetch_add_explicit( volatile atomic_uint*,
837 unsigned int, memory_order );
838 friend unsigned int atomic_fetch_sub_explicit( volatile atomic_uint*,
839 unsigned int, memory_order );
840 friend unsigned int atomic_fetch_and_explicit( volatile atomic_uint*,
841 unsigned int, memory_order );
842 friend unsigned int atomic_fetch_or_explicit( volatile atomic_uint*,
843 unsigned int, memory_order );
844 friend unsigned int atomic_fetch_xor_explicit( volatile atomic_uint*,
845 unsigned int, memory_order );
853 typedef struct atomic_long
856 bool is_lock_free() const volatile;
858 memory_order = memory_order_seq_cst ) volatile;
859 long load( memory_order = memory_order_seq_cst ) volatile;
861 memory_order = memory_order_seq_cst ) volatile;
862 bool compare_exchange_weak( long&, long,
863 memory_order, memory_order ) volatile;
864 bool compare_exchange_strong( long&, long,
865 memory_order, memory_order ) volatile;
866 bool compare_exchange_weak( long&, long,
867 memory_order = memory_order_seq_cst ) volatile;
868 bool compare_exchange_strong( long&, long,
869 memory_order = memory_order_seq_cst ) volatile;
870 long fetch_add( long,
871 memory_order = memory_order_seq_cst ) volatile;
872 long fetch_sub( long,
873 memory_order = memory_order_seq_cst ) volatile;
874 long fetch_and( long,
875 memory_order = memory_order_seq_cst ) volatile;
877 memory_order = memory_order_seq_cst ) volatile;
878 long fetch_xor( long,
879 memory_order = memory_order_seq_cst ) volatile;
881 CPP0X( atomic_long() = default; )
882 CPP0X( constexpr atomic_long( long __v__ ) : __f__( __v__) { } )
883 CPP0X( atomic_long( const atomic_long& ) = delete; )
884 atomic_long& operator =( const atomic_long& ) CPP0X(=delete);
886 long operator =( long __v__ ) volatile
887 { store( __v__ ); return __v__; }
889 long operator ++( int ) volatile
890 { return fetch_add( 1 ); }
892 long operator --( int ) volatile
893 { return fetch_sub( 1 ); }
895 long operator ++() volatile
896 { return fetch_add( 1 ) + 1; }
898 long operator --() volatile
899 { return fetch_sub( 1 ) - 1; }
901 long operator +=( long __v__ ) volatile
902 { return fetch_add( __v__ ) + __v__; }
904 long operator -=( long __v__ ) volatile
905 { return fetch_sub( __v__ ) - __v__; }
907 long operator &=( long __v__ ) volatile
908 { return fetch_and( __v__ ) & __v__; }
910 long operator |=( long __v__ ) volatile
911 { return fetch_or( __v__ ) | __v__; }
913 long operator ^=( long __v__ ) volatile
914 { return fetch_xor( __v__ ) ^ __v__; }
916 friend void atomic_store_explicit( volatile atomic_long*, long,
918 friend long atomic_load_explicit( volatile atomic_long*,
920 friend long atomic_exchange_explicit( volatile atomic_long*,
921 long, memory_order );
922 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_long*,
923 long*, long, memory_order, memory_order );
924 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_long*,
925 long*, long, memory_order, memory_order );
926 friend long atomic_fetch_add_explicit( volatile atomic_long*,
927 long, memory_order );
928 friend long atomic_fetch_sub_explicit( volatile atomic_long*,
929 long, memory_order );
930 friend long atomic_fetch_and_explicit( volatile atomic_long*,
931 long, memory_order );
932 friend long atomic_fetch_or_explicit( volatile atomic_long*,
933 long, memory_order );
934 friend long atomic_fetch_xor_explicit( volatile atomic_long*,
935 long, memory_order );
943 typedef struct atomic_ulong
946 bool is_lock_free() const volatile;
947 void store( unsigned long,
948 memory_order = memory_order_seq_cst ) volatile;
949 unsigned long load( memory_order = memory_order_seq_cst ) volatile;
950 unsigned long exchange( unsigned long,
951 memory_order = memory_order_seq_cst ) volatile;
952 bool compare_exchange_weak( unsigned long&, unsigned long,
953 memory_order, memory_order ) volatile;
954 bool compare_exchange_strong( unsigned long&, unsigned long,
955 memory_order, memory_order ) volatile;
956 bool compare_exchange_weak( unsigned long&, unsigned long,
957 memory_order = memory_order_seq_cst ) volatile;
958 bool compare_exchange_strong( unsigned long&, unsigned long,
959 memory_order = memory_order_seq_cst ) volatile;
960 unsigned long fetch_add( unsigned long,
961 memory_order = memory_order_seq_cst ) volatile;
962 unsigned long fetch_sub( unsigned long,
963 memory_order = memory_order_seq_cst ) volatile;
964 unsigned long fetch_and( unsigned long,
965 memory_order = memory_order_seq_cst ) volatile;
966 unsigned long fetch_or( unsigned long,
967 memory_order = memory_order_seq_cst ) volatile;
968 unsigned long fetch_xor( unsigned long,
969 memory_order = memory_order_seq_cst ) volatile;
971 CPP0X( atomic_ulong() = default; )
972 CPP0X( constexpr atomic_ulong( unsigned long __v__ ) : __f__( __v__) { } )
973 CPP0X( atomic_ulong( const atomic_ulong& ) = delete; )
974 atomic_ulong& operator =( const atomic_ulong& ) CPP0X(=delete);
976 unsigned long operator =( unsigned long __v__ ) volatile
977 { store( __v__ ); return __v__; }
979 unsigned long operator ++( int ) volatile
980 { return fetch_add( 1 ); }
982 unsigned long operator --( int ) volatile
983 { return fetch_sub( 1 ); }
985 unsigned long operator ++() volatile
986 { return fetch_add( 1 ) + 1; }
988 unsigned long operator --() volatile
989 { return fetch_sub( 1 ) - 1; }
991 unsigned long operator +=( unsigned long __v__ ) volatile
992 { return fetch_add( __v__ ) + __v__; }
994 unsigned long operator -=( unsigned long __v__ ) volatile
995 { return fetch_sub( __v__ ) - __v__; }
997 unsigned long operator &=( unsigned long __v__ ) volatile
998 { return fetch_and( __v__ ) & __v__; }
1000 unsigned long operator |=( unsigned long __v__ ) volatile
1001 { return fetch_or( __v__ ) | __v__; }
1003 unsigned long operator ^=( unsigned long __v__ ) volatile
1004 { return fetch_xor( __v__ ) ^ __v__; }
1006 friend void atomic_store_explicit( volatile atomic_ulong*, unsigned long,
1008 friend unsigned long atomic_load_explicit( volatile atomic_ulong*,
1010 friend unsigned long atomic_exchange_explicit( volatile atomic_ulong*,
1011 unsigned long, memory_order );
1012 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_ulong*,
1013 unsigned long*, unsigned long, memory_order, memory_order );
1014 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_ulong*,
1015 unsigned long*, unsigned long, memory_order, memory_order );
1016 friend unsigned long atomic_fetch_add_explicit( volatile atomic_ulong*,
1017 unsigned long, memory_order );
1018 friend unsigned long atomic_fetch_sub_explicit( volatile atomic_ulong*,
1019 unsigned long, memory_order );
1020 friend unsigned long atomic_fetch_and_explicit( volatile atomic_ulong*,
1021 unsigned long, memory_order );
1022 friend unsigned long atomic_fetch_or_explicit( volatile atomic_ulong*,
1023 unsigned long, memory_order );
1024 friend unsigned long atomic_fetch_xor_explicit( volatile atomic_ulong*,
1025 unsigned long, memory_order );
1029 unsigned long __f__;
1033 typedef struct atomic_llong
1036 bool is_lock_free() const volatile;
1037 void store( long long,
1038 memory_order = memory_order_seq_cst ) volatile;
1039 long long load( memory_order = memory_order_seq_cst ) volatile;
1040 long long exchange( long long,
1041 memory_order = memory_order_seq_cst ) volatile;
1042 bool compare_exchange_weak( long long&, long long,
1043 memory_order, memory_order ) volatile;
1044 bool compare_exchange_strong( long long&, long long,
1045 memory_order, memory_order ) volatile;
1046 bool compare_exchange_weak( long long&, long long,
1047 memory_order = memory_order_seq_cst ) volatile;
1048 bool compare_exchange_strong( long long&, long long,
1049 memory_order = memory_order_seq_cst ) volatile;
1050 long long fetch_add( long long,
1051 memory_order = memory_order_seq_cst ) volatile;
1052 long long fetch_sub( long long,
1053 memory_order = memory_order_seq_cst ) volatile;
1054 long long fetch_and( long long,
1055 memory_order = memory_order_seq_cst ) volatile;
1056 long long fetch_or( long long,
1057 memory_order = memory_order_seq_cst ) volatile;
1058 long long fetch_xor( long long,
1059 memory_order = memory_order_seq_cst ) volatile;
1061 CPP0X( atomic_llong() = default; )
1062 CPP0X( constexpr atomic_llong( long long __v__ ) : __f__( __v__) { } )
1063 CPP0X( atomic_llong( const atomic_llong& ) = delete; )
1064 atomic_llong& operator =( const atomic_llong& ) CPP0X(=delete);
1066 long long operator =( long long __v__ ) volatile
1067 { store( __v__ ); return __v__; }
1069 long long operator ++( int ) volatile
1070 { return fetch_add( 1 ); }
1072 long long operator --( int ) volatile
1073 { return fetch_sub( 1 ); }
1075 long long operator ++() volatile
1076 { return fetch_add( 1 ) + 1; }
1078 long long operator --() volatile
1079 { return fetch_sub( 1 ) - 1; }
1081 long long operator +=( long long __v__ ) volatile
1082 { return fetch_add( __v__ ) + __v__; }
1084 long long operator -=( long long __v__ ) volatile
1085 { return fetch_sub( __v__ ) - __v__; }
1087 long long operator &=( long long __v__ ) volatile
1088 { return fetch_and( __v__ ) & __v__; }
1090 long long operator |=( long long __v__ ) volatile
1091 { return fetch_or( __v__ ) | __v__; }
1093 long long operator ^=( long long __v__ ) volatile
1094 { return fetch_xor( __v__ ) ^ __v__; }
1096 friend void atomic_store_explicit( volatile atomic_llong*, long long,
1098 friend long long atomic_load_explicit( volatile atomic_llong*,
1100 friend long long atomic_exchange_explicit( volatile atomic_llong*,
1101 long long, memory_order );
1102 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_llong*,
1103 long long*, long long, memory_order, memory_order );
1104 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_llong*,
1105 long long*, long long, memory_order, memory_order );
1106 friend long long atomic_fetch_add_explicit( volatile atomic_llong*,
1107 long long, memory_order );
1108 friend long long atomic_fetch_sub_explicit( volatile atomic_llong*,
1109 long long, memory_order );
1110 friend long long atomic_fetch_and_explicit( volatile atomic_llong*,
1111 long long, memory_order );
1112 friend long long atomic_fetch_or_explicit( volatile atomic_llong*,
1113 long long, memory_order );
1114 friend long long atomic_fetch_xor_explicit( volatile atomic_llong*,
1115 long long, memory_order );
1123 typedef struct atomic_ullong
1126 bool is_lock_free() const volatile;
1127 void store( unsigned long long,
1128 memory_order = memory_order_seq_cst ) volatile;
1129 unsigned long long load( memory_order = memory_order_seq_cst ) volatile;
1130 unsigned long long exchange( unsigned long long,
1131 memory_order = memory_order_seq_cst ) volatile;
1132 bool compare_exchange_weak( unsigned long long&, unsigned long long,
1133 memory_order, memory_order ) volatile;
1134 bool compare_exchange_strong( unsigned long long&, unsigned long long,
1135 memory_order, memory_order ) volatile;
1136 bool compare_exchange_weak( unsigned long long&, unsigned long long,
1137 memory_order = memory_order_seq_cst ) volatile;
1138 bool compare_exchange_strong( unsigned long long&, unsigned long long,
1139 memory_order = memory_order_seq_cst ) volatile;
1140 unsigned long long fetch_add( unsigned long long,
1141 memory_order = memory_order_seq_cst ) volatile;
1142 unsigned long long fetch_sub( unsigned long long,
1143 memory_order = memory_order_seq_cst ) volatile;
1144 unsigned long long fetch_and( unsigned long long,
1145 memory_order = memory_order_seq_cst ) volatile;
1146 unsigned long long fetch_or( unsigned long long,
1147 memory_order = memory_order_seq_cst ) volatile;
1148 unsigned long long fetch_xor( unsigned long long,
1149 memory_order = memory_order_seq_cst ) volatile;
1151 CPP0X( atomic_ullong() = default; )
1152 CPP0X( constexpr atomic_ullong( unsigned long long __v__ ) : __f__( __v__) { } )
1153 CPP0X( atomic_ullong( const atomic_ullong& ) = delete; )
1154 atomic_ullong& operator =( const atomic_ullong& ) CPP0X(=delete);
1156 unsigned long long operator =( unsigned long long __v__ ) volatile
1157 { store( __v__ ); return __v__; }
1159 unsigned long long operator ++( int ) volatile
1160 { return fetch_add( 1 ); }
1162 unsigned long long operator --( int ) volatile
1163 { return fetch_sub( 1 ); }
1165 unsigned long long operator ++() volatile
1166 { return fetch_add( 1 ) + 1; }
1168 unsigned long long operator --() volatile
1169 { return fetch_sub( 1 ) - 1; }
1171 unsigned long long operator +=( unsigned long long __v__ ) volatile
1172 { return fetch_add( __v__ ) + __v__; }
1174 unsigned long long operator -=( unsigned long long __v__ ) volatile
1175 { return fetch_sub( __v__ ) - __v__; }
1177 unsigned long long operator &=( unsigned long long __v__ ) volatile
1178 { return fetch_and( __v__ ) & __v__; }
1180 unsigned long long operator |=( unsigned long long __v__ ) volatile
1181 { return fetch_or( __v__ ) | __v__; }
1183 unsigned long long operator ^=( unsigned long long __v__ ) volatile
1184 { return fetch_xor( __v__ ) ^ __v__; }
1186 friend void atomic_store_explicit( volatile atomic_ullong*, unsigned long long,
1188 friend unsigned long long atomic_load_explicit( volatile atomic_ullong*,
1190 friend unsigned long long atomic_exchange_explicit( volatile atomic_ullong*,
1191 unsigned long long, memory_order );
1192 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_ullong*,
1193 unsigned long long*, unsigned long long, memory_order, memory_order );
1194 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_ullong*,
1195 unsigned long long*, unsigned long long, memory_order, memory_order );
1196 friend unsigned long long atomic_fetch_add_explicit( volatile atomic_ullong*,
1197 unsigned long long, memory_order );
1198 friend unsigned long long atomic_fetch_sub_explicit( volatile atomic_ullong*,
1199 unsigned long long, memory_order );
1200 friend unsigned long long atomic_fetch_and_explicit( volatile atomic_ullong*,
1201 unsigned long long, memory_order );
1202 friend unsigned long long atomic_fetch_or_explicit( volatile atomic_ullong*,
1203 unsigned long long, memory_order );
1204 friend unsigned long long atomic_fetch_xor_explicit( volatile atomic_ullong*,
1205 unsigned long long, memory_order );
1209 unsigned long long __f__;
1213 typedef atomic_schar atomic_int_least8_t;
1214 typedef atomic_uchar atomic_uint_least8_t;
1215 typedef atomic_short atomic_int_least16_t;
1216 typedef atomic_ushort atomic_uint_least16_t;
1217 typedef atomic_int atomic_int_least32_t;
1218 typedef atomic_uint atomic_uint_least32_t;
1219 typedef atomic_llong atomic_int_least64_t;
1220 typedef atomic_ullong atomic_uint_least64_t;
1222 typedef atomic_schar atomic_int_fast8_t;
1223 typedef atomic_uchar atomic_uint_fast8_t;
1224 typedef atomic_short atomic_int_fast16_t;
1225 typedef atomic_ushort atomic_uint_fast16_t;
1226 typedef atomic_int atomic_int_fast32_t;
1227 typedef atomic_uint atomic_uint_fast32_t;
1228 typedef atomic_llong atomic_int_fast64_t;
1229 typedef atomic_ullong atomic_uint_fast64_t;
1231 typedef atomic_long atomic_intptr_t;
1232 typedef atomic_ulong atomic_uintptr_t;
1234 typedef atomic_long atomic_ssize_t;
1235 typedef atomic_ulong atomic_size_t;
1237 typedef atomic_long atomic_ptrdiff_t;
1239 typedef atomic_llong atomic_intmax_t;
1240 typedef atomic_ullong atomic_uintmax_t;
1246 typedef struct atomic_wchar_t
1249 bool is_lock_free() const volatile;
1250 void store( wchar_t, memory_order = memory_order_seq_cst ) volatile;
1251 wchar_t load( memory_order = memory_order_seq_cst ) volatile;
1252 wchar_t exchange( wchar_t,
1253 memory_order = memory_order_seq_cst ) volatile;
1254 bool compare_exchange_weak( wchar_t&, wchar_t,
1255 memory_order, memory_order ) volatile;
1256 bool compare_exchange_strong( wchar_t&, wchar_t,
1257 memory_order, memory_order ) volatile;
1258 bool compare_exchange_weak( wchar_t&, wchar_t,
1259 memory_order = memory_order_seq_cst ) volatile;
1260 bool compare_exchange_strong( wchar_t&, wchar_t,
1261 memory_order = memory_order_seq_cst ) volatile;
1262 wchar_t fetch_add( wchar_t,
1263 memory_order = memory_order_seq_cst ) volatile;
1264 wchar_t fetch_sub( wchar_t,
1265 memory_order = memory_order_seq_cst ) volatile;
1266 wchar_t fetch_and( wchar_t,
1267 memory_order = memory_order_seq_cst ) volatile;
1268 wchar_t fetch_or( wchar_t,
1269 memory_order = memory_order_seq_cst ) volatile;
1270 wchar_t fetch_xor( wchar_t,
1271 memory_order = memory_order_seq_cst ) volatile;
1273 CPP0X( atomic_wchar_t() = default; )
1274 CPP0X( constexpr atomic_wchar_t( wchar_t __v__ ) : __f__( __v__) { } )
1275 CPP0X( atomic_wchar_t( const atomic_wchar_t& ) = delete; )
1276 atomic_wchar_t& operator =( const atomic_wchar_t& ) CPP0X(=delete);
1278 wchar_t operator =( wchar_t __v__ ) volatile
1279 { store( __v__ ); return __v__; }
1281 wchar_t operator ++( int ) volatile
1282 { return fetch_add( 1 ); }
1284 wchar_t operator --( int ) volatile
1285 { return fetch_sub( 1 ); }
1287 wchar_t operator ++() volatile
1288 { return fetch_add( 1 ) + 1; }
1290 wchar_t operator --() volatile
1291 { return fetch_sub( 1 ) - 1; }
1293 wchar_t operator +=( wchar_t __v__ ) volatile
1294 { return fetch_add( __v__ ) + __v__; }
1296 wchar_t operator -=( wchar_t __v__ ) volatile
1297 { return fetch_sub( __v__ ) - __v__; }
1299 wchar_t operator &=( wchar_t __v__ ) volatile
1300 { return fetch_and( __v__ ) & __v__; }
1302 wchar_t operator |=( wchar_t __v__ ) volatile
1303 { return fetch_or( __v__ ) | __v__; }
1305 wchar_t operator ^=( wchar_t __v__ ) volatile
1306 { return fetch_xor( __v__ ) ^ __v__; }
1308 friend void atomic_store_explicit( volatile atomic_wchar_t*, wchar_t,
1310 friend wchar_t atomic_load_explicit( volatile atomic_wchar_t*,
1312 friend wchar_t atomic_exchange_explicit( volatile atomic_wchar_t*,
1313 wchar_t, memory_order );
1314 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_wchar_t*,
1315 wchar_t*, wchar_t, memory_order, memory_order );
1316 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_wchar_t*,
1317 wchar_t*, wchar_t, memory_order, memory_order );
1318 friend wchar_t atomic_fetch_add_explicit( volatile atomic_wchar_t*,
1319 wchar_t, memory_order );
1320 friend wchar_t atomic_fetch_sub_explicit( volatile atomic_wchar_t*,
1321 wchar_t, memory_order );
1322 friend wchar_t atomic_fetch_and_explicit( volatile atomic_wchar_t*,
1323 wchar_t, memory_order );
1324 friend wchar_t atomic_fetch_or_explicit( volatile atomic_wchar_t*,
1325 wchar_t, memory_order );
1326 friend wchar_t atomic_fetch_xor_explicit( volatile atomic_wchar_t*,
1327 wchar_t, memory_order );
1337 typedef atomic_int_least16_t atomic_char16_t;
1338 typedef atomic_int_least32_t atomic_char32_t;
1339 typedef atomic_int_least32_t atomic_wchar_t;
1346 template< typename T >
1351 bool is_lock_free() const volatile;
1352 void store( T, memory_order = memory_order_seq_cst ) volatile;
1353 T load( memory_order = memory_order_seq_cst ) volatile;
1354 T exchange( T __v__, memory_order = memory_order_seq_cst ) volatile;
1355 bool compare_exchange_weak( T&, T, memory_order, memory_order ) volatile;
1356 bool compare_exchange_strong( T&, T, memory_order, memory_order ) volatile;
1357 bool compare_exchange_weak( T&, T, memory_order = memory_order_seq_cst ) volatile;
1358 bool compare_exchange_strong( T&, T, memory_order = memory_order_seq_cst ) volatile;
1360 CPP0X( atomic() = default; )
1361 CPP0X( constexpr explicit atomic( T __v__ ) : __f__( __v__ ) { } )
1362 CPP0X( atomic( const atomic& ) = delete; )
1363 atomic& operator =( const atomic& ) CPP0X(=delete);
1365 T operator =( T __v__ ) volatile
1366 { store( __v__ ); return __v__; }
1377 template<typename T> struct atomic< T* > : atomic_address
1379 T* load( memory_order = memory_order_seq_cst ) volatile;
1380 T* exchange( T*, memory_order = memory_order_seq_cst ) volatile;
1381 bool compare_exchange_weak( T*&, T*, memory_order, memory_order ) volatile;
1382 bool compare_exchange_strong( T*&, T*, memory_order, memory_order ) volatile;
1383 bool compare_exchange_weak( T*&, T*,
1384 memory_order = memory_order_seq_cst ) volatile;
1385 bool compare_exchange_strong( T*&, T*,
1386 memory_order = memory_order_seq_cst ) volatile;
1387 T* fetch_add( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
1388 T* fetch_sub( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
1390 CPP0X( atomic() = default; )
1391 CPP0X( constexpr explicit atomic( T __v__ ) : atomic_address( __v__ ) { } )
1392 CPP0X( atomic( const atomic& ) = delete; )
1393 atomic& operator =( const atomic& ) CPP0X(=delete);
1395 T* operator =( T* __v__ ) volatile
1396 { store( __v__ ); return __v__; }
1398 T* operator ++( int ) volatile
1399 { return fetch_add( 1 ); }
1401 T* operator --( int ) volatile
1402 { return fetch_sub( 1 ); }
1404 T* operator ++() volatile
1405 { return fetch_add( 1 ) + 1; }
1407 T* operator --() volatile
1408 { return fetch_sub( 1 ) - 1; }
1410 T* operator +=( T* __v__ ) volatile
1411 { return fetch_add( __v__ ) + __v__; }
1413 T* operator -=( T* __v__ ) volatile
1414 { return fetch_sub( __v__ ) - __v__; }
1422 template<> struct atomic< bool > : atomic_bool
1424 CPP0X( atomic() = default; )
1425 CPP0X( constexpr explicit atomic( bool __v__ )
1426 : atomic_bool( __v__ ) { } )
1427 CPP0X( atomic( const atomic& ) = delete; )
1428 atomic& operator =( const atomic& ) CPP0X(=delete);
1430 bool operator =( bool __v__ ) volatile
1431 { store( __v__ ); return __v__; }
1435 template<> struct atomic< void* > : atomic_address
1437 CPP0X( atomic() = default; )
1438 CPP0X( constexpr explicit atomic( void* __v__ )
1439 : atomic_address( __v__ ) { } )
1440 CPP0X( atomic( const atomic& ) = delete; )
1441 atomic& operator =( const atomic& ) CPP0X(=delete);
1443 void* operator =( void* __v__ ) volatile
1444 { store( __v__ ); return __v__; }
1448 template<> struct atomic< char > : atomic_char
1450 CPP0X( atomic() = default; )
1451 CPP0X( constexpr explicit atomic( char __v__ )
1452 : atomic_char( __v__ ) { } )
1453 CPP0X( atomic( const atomic& ) = delete; )
1454 atomic& operator =( const atomic& ) CPP0X(=delete);
1456 char operator =( char __v__ ) volatile
1457 { store( __v__ ); return __v__; }
1461 template<> struct atomic< signed char > : atomic_schar
1463 CPP0X( atomic() = default; )
1464 CPP0X( constexpr explicit atomic( signed char __v__ )
1465 : atomic_schar( __v__ ) { } )
1466 CPP0X( atomic( const atomic& ) = delete; )
1467 atomic& operator =( const atomic& ) CPP0X(=delete);
1469 signed char operator =( signed char __v__ ) volatile
1470 { store( __v__ ); return __v__; }
1474 template<> struct atomic< unsigned char > : atomic_uchar
1476 CPP0X( atomic() = default; )
1477 CPP0X( constexpr explicit atomic( unsigned char __v__ )
1478 : atomic_uchar( __v__ ) { } )
1479 CPP0X( atomic( const atomic& ) = delete; )
1480 atomic& operator =( const atomic& ) CPP0X(=delete);
1482 unsigned char operator =( unsigned char __v__ ) volatile
1483 { store( __v__ ); return __v__; }
1487 template<> struct atomic< short > : atomic_short
1489 CPP0X( atomic() = default; )
1490 CPP0X( constexpr explicit atomic( short __v__ )
1491 : atomic_short( __v__ ) { } )
1492 CPP0X( atomic( const atomic& ) = delete; )
1493 atomic& operator =( const atomic& ) CPP0X(=delete);
1495 short operator =( short __v__ ) volatile
1496 { store( __v__ ); return __v__; }
1500 template<> struct atomic< unsigned short > : atomic_ushort
1502 CPP0X( atomic() = default; )
1503 CPP0X( constexpr explicit atomic( unsigned short __v__ )
1504 : atomic_ushort( __v__ ) { } )
1505 CPP0X( atomic( const atomic& ) = delete; )
1506 atomic& operator =( const atomic& ) CPP0X(=delete);
1508 unsigned short operator =( unsigned short __v__ ) volatile
1509 { store( __v__ ); return __v__; }
1513 template<> struct atomic< int > : atomic_int
1515 CPP0X( atomic() = default; )
1516 CPP0X( constexpr explicit atomic( int __v__ )
1517 : atomic_int( __v__ ) { } )
1518 CPP0X( atomic( const atomic& ) = delete; )
1519 atomic& operator =( const atomic& ) CPP0X(=delete);
1521 int operator =( int __v__ ) volatile
1522 { store( __v__ ); return __v__; }
1526 template<> struct atomic< unsigned int > : atomic_uint
1528 CPP0X( atomic() = default; )
1529 CPP0X( constexpr explicit atomic( unsigned int __v__ )
1530 : atomic_uint( __v__ ) { } )
1531 CPP0X( atomic( const atomic& ) = delete; )
1532 atomic& operator =( const atomic& ) CPP0X(=delete);
1534 unsigned int operator =( unsigned int __v__ ) volatile
1535 { store( __v__ ); return __v__; }
1539 template<> struct atomic< long > : atomic_long
1541 CPP0X( atomic() = default; )
1542 CPP0X( constexpr explicit atomic( long __v__ )
1543 : atomic_long( __v__ ) { } )
1544 CPP0X( atomic( const atomic& ) = delete; )
1545 atomic& operator =( const atomic& ) CPP0X(=delete);
1547 long operator =( long __v__ ) volatile
1548 { store( __v__ ); return __v__; }
1552 template<> struct atomic< unsigned long > : atomic_ulong
1554 CPP0X( atomic() = default; )
1555 CPP0X( constexpr explicit atomic( unsigned long __v__ )
1556 : atomic_ulong( __v__ ) { } )
1557 CPP0X( atomic( const atomic& ) = delete; )
1558 atomic& operator =( const atomic& ) CPP0X(=delete);
1560 unsigned long operator =( unsigned long __v__ ) volatile
1561 { store( __v__ ); return __v__; }
1565 template<> struct atomic< long long > : atomic_llong
1567 CPP0X( atomic() = default; )
1568 CPP0X( constexpr explicit atomic( long long __v__ )
1569 : atomic_llong( __v__ ) { } )
1570 CPP0X( atomic( const atomic& ) = delete; )
1571 atomic& operator =( const atomic& ) CPP0X(=delete);
1573 long long operator =( long long __v__ ) volatile
1574 { store( __v__ ); return __v__; }
1578 template<> struct atomic< unsigned long long > : atomic_ullong
1580 CPP0X( atomic() = default; )
1581 CPP0X( constexpr explicit atomic( unsigned long long __v__ )
1582 : atomic_ullong( __v__ ) { } )
1583 CPP0X( atomic( const atomic& ) = delete; )
1584 atomic& operator =( const atomic& ) CPP0X(=delete);
1586 unsigned long long operator =( unsigned long long __v__ ) volatile
1587 { store( __v__ ); return __v__; }
1591 template<> struct atomic< wchar_t > : atomic_wchar_t
1593 CPP0X( atomic() = default; )
1594 CPP0X( constexpr explicit atomic( wchar_t __v__ )
1595 : atomic_wchar_t( __v__ ) { } )
1596 CPP0X( atomic( const atomic& ) = delete; )
1597 atomic& operator =( const atomic& ) CPP0X(=delete);
1599 wchar_t operator =( wchar_t __v__ ) volatile
1600 { store( __v__ ); return __v__; }
1610 inline bool atomic_is_lock_free
1611 ( const volatile atomic_bool* __a__ )
1614 inline bool atomic_load_explicit
1615 ( volatile atomic_bool* __a__, memory_order __x__ )
1616 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1618 inline bool atomic_load
1619 ( volatile atomic_bool* __a__ ) { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1621 inline void atomic_store_explicit
1622 ( volatile atomic_bool* __a__, bool __m__, memory_order __x__ )
1623 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1625 inline void atomic_store
1626 ( volatile atomic_bool* __a__, bool __m__ )
1627 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1629 inline bool atomic_exchange_explicit
1630 ( volatile atomic_bool* __a__, bool __m__, memory_order __x__ )
1631 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1633 inline bool atomic_exchange
1634 ( volatile atomic_bool* __a__, bool __m__ )
1635 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1637 inline bool atomic_compare_exchange_weak_explicit
1638 ( volatile atomic_bool* __a__, bool* __e__, bool __m__,
1639 memory_order __x__, memory_order __y__ )
1640 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1642 inline bool atomic_compare_exchange_strong_explicit
1643 ( volatile atomic_bool* __a__, bool* __e__, bool __m__,
1644 memory_order __x__, memory_order __y__ )
1645 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1647 inline bool atomic_compare_exchange_weak
1648 ( volatile atomic_bool* __a__, bool* __e__, bool __m__ )
1649 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1650 memory_order_seq_cst, memory_order_seq_cst ); }
1652 inline bool atomic_compare_exchange_strong
1653 ( volatile atomic_bool* __a__, bool* __e__, bool __m__ )
1654 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1655 memory_order_seq_cst, memory_order_seq_cst ); }
1658 inline bool atomic_is_lock_free( const volatile atomic_address* __a__ )
1661 inline void* atomic_load_explicit
1662 ( volatile atomic_address* __a__, memory_order __x__ )
1663 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1665 inline void* atomic_load( volatile atomic_address* __a__ )
1666 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1668 inline void atomic_store_explicit
1669 ( volatile atomic_address* __a__, void* __m__, memory_order __x__ )
1670 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1672 inline void atomic_store
1673 ( volatile atomic_address* __a__, void* __m__ )
1674 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1676 inline void* atomic_exchange_explicit
1677 ( volatile atomic_address* __a__, void* __m__, memory_order __x__ )
1678 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1680 inline void* atomic_exchange
1681 ( volatile atomic_address* __a__, void* __m__ )
1682 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1684 inline bool atomic_compare_exchange_weak_explicit
1685 ( volatile atomic_address* __a__, void** __e__, void* __m__,
1686 memory_order __x__, memory_order __y__ )
1687 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1689 inline bool atomic_compare_exchange_strong_explicit
1690 ( volatile atomic_address* __a__, void** __e__, void* __m__,
1691 memory_order __x__, memory_order __y__ )
1692 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1694 inline bool atomic_compare_exchange_weak
1695 ( volatile atomic_address* __a__, void** __e__, void* __m__ )
1696 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1697 memory_order_seq_cst, memory_order_seq_cst ); }
1699 inline bool atomic_compare_exchange_strong
1700 ( volatile atomic_address* __a__, void** __e__, void* __m__ )
1701 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1702 memory_order_seq_cst, memory_order_seq_cst ); }
1705 inline bool atomic_is_lock_free( const volatile atomic_char* __a__ )
1708 inline char atomic_load_explicit
1709 ( volatile atomic_char* __a__, memory_order __x__ )
1710 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1712 inline char atomic_load( volatile atomic_char* __a__ )
1713 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1715 inline void atomic_store_explicit
1716 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
1717 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1719 inline void atomic_store
1720 ( volatile atomic_char* __a__, char __m__ )
1721 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1723 inline char atomic_exchange_explicit
1724 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
1725 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1727 inline char atomic_exchange
1728 ( volatile atomic_char* __a__, char __m__ )
1729 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1731 inline bool atomic_compare_exchange_weak_explicit
1732 ( volatile atomic_char* __a__, char* __e__, char __m__,
1733 memory_order __x__, memory_order __y__ )
1734 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1736 inline bool atomic_compare_exchange_strong_explicit
1737 ( volatile atomic_char* __a__, char* __e__, char __m__,
1738 memory_order __x__, memory_order __y__ )
1739 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1741 inline bool atomic_compare_exchange_weak
1742 ( volatile atomic_char* __a__, char* __e__, char __m__ )
1743 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1744 memory_order_seq_cst, memory_order_seq_cst ); }
1746 inline bool atomic_compare_exchange_strong
1747 ( volatile atomic_char* __a__, char* __e__, char __m__ )
1748 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1749 memory_order_seq_cst, memory_order_seq_cst ); }
1752 inline bool atomic_is_lock_free( const volatile atomic_schar* __a__ )
1755 inline signed char atomic_load_explicit
1756 ( volatile atomic_schar* __a__, memory_order __x__ )
1757 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1759 inline signed char atomic_load( volatile atomic_schar* __a__ )
1760 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1762 inline void atomic_store_explicit
1763 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
1764 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1766 inline void atomic_store
1767 ( volatile atomic_schar* __a__, signed char __m__ )
1768 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1770 inline signed char atomic_exchange_explicit
1771 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
1772 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1774 inline signed char atomic_exchange
1775 ( volatile atomic_schar* __a__, signed char __m__ )
1776 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1778 inline bool atomic_compare_exchange_weak_explicit
1779 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__,
1780 memory_order __x__, memory_order __y__ )
1781 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1783 inline bool atomic_compare_exchange_strong_explicit
1784 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__,
1785 memory_order __x__, memory_order __y__ )
1786 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1788 inline bool atomic_compare_exchange_weak
1789 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__ )
1790 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1791 memory_order_seq_cst, memory_order_seq_cst ); }
1793 inline bool atomic_compare_exchange_strong
1794 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__ )
1795 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1796 memory_order_seq_cst, memory_order_seq_cst ); }
1799 inline bool atomic_is_lock_free( const volatile atomic_uchar* __a__ )
1802 inline unsigned char atomic_load_explicit
1803 ( volatile atomic_uchar* __a__, memory_order __x__ )
1804 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1806 inline unsigned char atomic_load( volatile atomic_uchar* __a__ )
1807 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1809 inline void atomic_store_explicit
1810 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
1811 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1813 inline void atomic_store
1814 ( volatile atomic_uchar* __a__, unsigned char __m__ )
1815 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1817 inline unsigned char atomic_exchange_explicit
1818 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
1819 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1821 inline unsigned char atomic_exchange
1822 ( volatile atomic_uchar* __a__, unsigned char __m__ )
1823 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1825 inline bool atomic_compare_exchange_weak_explicit
1826 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__,
1827 memory_order __x__, memory_order __y__ )
1828 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1830 inline bool atomic_compare_exchange_strong_explicit
1831 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__,
1832 memory_order __x__, memory_order __y__ )
1833 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1835 inline bool atomic_compare_exchange_weak
1836 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__ )
1837 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1838 memory_order_seq_cst, memory_order_seq_cst ); }
1840 inline bool atomic_compare_exchange_strong
1841 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__ )
1842 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1843 memory_order_seq_cst, memory_order_seq_cst ); }
1846 inline bool atomic_is_lock_free( const volatile atomic_short* __a__ )
1849 inline short atomic_load_explicit
1850 ( volatile atomic_short* __a__, memory_order __x__ )
1851 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1853 inline short atomic_load( volatile atomic_short* __a__ )
1854 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1856 inline void atomic_store_explicit
1857 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
1858 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1860 inline void atomic_store
1861 ( volatile atomic_short* __a__, short __m__ )
1862 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1864 inline short atomic_exchange_explicit
1865 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
1866 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1868 inline short atomic_exchange
1869 ( volatile atomic_short* __a__, short __m__ )
1870 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1872 inline bool atomic_compare_exchange_weak_explicit
1873 ( volatile atomic_short* __a__, short* __e__, short __m__,
1874 memory_order __x__, memory_order __y__ )
1875 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1877 inline bool atomic_compare_exchange_strong_explicit
1878 ( volatile atomic_short* __a__, short* __e__, short __m__,
1879 memory_order __x__, memory_order __y__ )
1880 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1882 inline bool atomic_compare_exchange_weak
1883 ( volatile atomic_short* __a__, short* __e__, short __m__ )
1884 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1885 memory_order_seq_cst, memory_order_seq_cst ); }
1887 inline bool atomic_compare_exchange_strong
1888 ( volatile atomic_short* __a__, short* __e__, short __m__ )
1889 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1890 memory_order_seq_cst, memory_order_seq_cst ); }
1893 inline bool atomic_is_lock_free( const volatile atomic_ushort* __a__ )
1896 inline unsigned short atomic_load_explicit
1897 ( volatile atomic_ushort* __a__, memory_order __x__ )
1898 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1900 inline unsigned short atomic_load( volatile atomic_ushort* __a__ )
1901 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1903 inline void atomic_store_explicit
1904 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
1905 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1907 inline void atomic_store
1908 ( volatile atomic_ushort* __a__, unsigned short __m__ )
1909 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1911 inline unsigned short atomic_exchange_explicit
1912 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
1913 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1915 inline unsigned short atomic_exchange
1916 ( volatile atomic_ushort* __a__, unsigned short __m__ )
1917 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1919 inline bool atomic_compare_exchange_weak_explicit
1920 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__,
1921 memory_order __x__, memory_order __y__ )
1922 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1924 inline bool atomic_compare_exchange_strong_explicit
1925 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__,
1926 memory_order __x__, memory_order __y__ )
1927 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1929 inline bool atomic_compare_exchange_weak
1930 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__ )
1931 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1932 memory_order_seq_cst, memory_order_seq_cst ); }
1934 inline bool atomic_compare_exchange_strong
1935 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__ )
1936 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1937 memory_order_seq_cst, memory_order_seq_cst ); }
1940 inline bool atomic_is_lock_free( const volatile atomic_int* __a__ )
1943 inline int atomic_load_explicit
1944 ( volatile atomic_int* __a__, memory_order __x__ )
1945 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1947 inline int atomic_load( volatile atomic_int* __a__ )
1948 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1950 inline void atomic_store_explicit
1951 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
1952 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1954 inline void atomic_store
1955 ( volatile atomic_int* __a__, int __m__ )
1956 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1958 inline int atomic_exchange_explicit
1959 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
1960 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1962 inline int atomic_exchange
1963 ( volatile atomic_int* __a__, int __m__ )
1964 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1966 inline bool atomic_compare_exchange_weak_explicit
1967 ( volatile atomic_int* __a__, int* __e__, int __m__,
1968 memory_order __x__, memory_order __y__ )
1969 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1971 inline bool atomic_compare_exchange_strong_explicit
1972 ( volatile atomic_int* __a__, int* __e__, int __m__,
1973 memory_order __x__, memory_order __y__ )
1974 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1976 inline bool atomic_compare_exchange_weak
1977 ( volatile atomic_int* __a__, int* __e__, int __m__ )
1978 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1979 memory_order_seq_cst, memory_order_seq_cst ); }
1981 inline bool atomic_compare_exchange_strong
1982 ( volatile atomic_int* __a__, int* __e__, int __m__ )
1983 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1984 memory_order_seq_cst, memory_order_seq_cst ); }
1987 inline bool atomic_is_lock_free( const volatile atomic_uint* __a__ )
1990 inline unsigned int atomic_load_explicit
1991 ( volatile atomic_uint* __a__, memory_order __x__ )
1992 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1994 inline unsigned int atomic_load( volatile atomic_uint* __a__ )
1995 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1997 inline void atomic_store_explicit
1998 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
1999 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2001 inline void atomic_store
2002 ( volatile atomic_uint* __a__, unsigned int __m__ )
2003 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2005 inline unsigned int atomic_exchange_explicit
2006 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2007 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2009 inline unsigned int atomic_exchange
2010 ( volatile atomic_uint* __a__, unsigned int __m__ )
2011 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2013 inline bool atomic_compare_exchange_weak_explicit
2014 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__,
2015 memory_order __x__, memory_order __y__ )
2016 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2018 inline bool atomic_compare_exchange_strong_explicit
2019 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__,
2020 memory_order __x__, memory_order __y__ )
2021 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2023 inline bool atomic_compare_exchange_weak
2024 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__ )
2025 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2026 memory_order_seq_cst, memory_order_seq_cst ); }
2028 inline bool atomic_compare_exchange_strong
2029 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__ )
2030 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2031 memory_order_seq_cst, memory_order_seq_cst ); }
2034 inline bool atomic_is_lock_free( const volatile atomic_long* __a__ )
2037 inline long atomic_load_explicit
2038 ( volatile atomic_long* __a__, memory_order __x__ )
2039 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2041 inline long atomic_load( volatile atomic_long* __a__ )
2042 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2044 inline void atomic_store_explicit
2045 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2046 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2048 inline void atomic_store
2049 ( volatile atomic_long* __a__, long __m__ )
2050 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2052 inline long atomic_exchange_explicit
2053 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2054 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2056 inline long atomic_exchange
2057 ( volatile atomic_long* __a__, long __m__ )
2058 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2060 inline bool atomic_compare_exchange_weak_explicit
2061 ( volatile atomic_long* __a__, long* __e__, long __m__,
2062 memory_order __x__, memory_order __y__ )
2063 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2065 inline bool atomic_compare_exchange_strong_explicit
2066 ( volatile atomic_long* __a__, long* __e__, long __m__,
2067 memory_order __x__, memory_order __y__ )
2068 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2070 inline bool atomic_compare_exchange_weak
2071 ( volatile atomic_long* __a__, long* __e__, long __m__ )
2072 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2073 memory_order_seq_cst, memory_order_seq_cst ); }
2075 inline bool atomic_compare_exchange_strong
2076 ( volatile atomic_long* __a__, long* __e__, long __m__ )
2077 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2078 memory_order_seq_cst, memory_order_seq_cst ); }
2081 inline bool atomic_is_lock_free( const volatile atomic_ulong* __a__ )
2084 inline unsigned long atomic_load_explicit
2085 ( volatile atomic_ulong* __a__, memory_order __x__ )
2086 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2088 inline unsigned long atomic_load( volatile atomic_ulong* __a__ )
2089 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2091 inline void atomic_store_explicit
2092 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2093 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2095 inline void atomic_store
2096 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2097 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2099 inline unsigned long atomic_exchange_explicit
2100 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2101 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2103 inline unsigned long atomic_exchange
2104 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2105 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2107 inline bool atomic_compare_exchange_weak_explicit
2108 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__,
2109 memory_order __x__, memory_order __y__ )
2110 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2112 inline bool atomic_compare_exchange_strong_explicit
2113 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__,
2114 memory_order __x__, memory_order __y__ )
2115 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2117 inline bool atomic_compare_exchange_weak
2118 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__ )
2119 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2120 memory_order_seq_cst, memory_order_seq_cst ); }
2122 inline bool atomic_compare_exchange_strong
2123 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__ )
2124 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2125 memory_order_seq_cst, memory_order_seq_cst ); }
2128 inline bool atomic_is_lock_free( const volatile atomic_llong* __a__ )
2131 inline long long atomic_load_explicit
2132 ( volatile atomic_llong* __a__, memory_order __x__ )
2133 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2135 inline long long atomic_load( volatile atomic_llong* __a__ )
2136 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2138 inline void atomic_store_explicit
2139 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2140 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2142 inline void atomic_store
2143 ( volatile atomic_llong* __a__, long long __m__ )
2144 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2146 inline long long atomic_exchange_explicit
2147 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2148 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2150 inline long long atomic_exchange
2151 ( volatile atomic_llong* __a__, long long __m__ )
2152 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2154 inline bool atomic_compare_exchange_weak_explicit
2155 ( volatile atomic_llong* __a__, long long* __e__, long long __m__,
2156 memory_order __x__, memory_order __y__ )
2157 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2159 inline bool atomic_compare_exchange_strong_explicit
2160 ( volatile atomic_llong* __a__, long long* __e__, long long __m__,
2161 memory_order __x__, memory_order __y__ )
2162 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2164 inline bool atomic_compare_exchange_weak
2165 ( volatile atomic_llong* __a__, long long* __e__, long long __m__ )
2166 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2167 memory_order_seq_cst, memory_order_seq_cst ); }
2169 inline bool atomic_compare_exchange_strong
2170 ( volatile atomic_llong* __a__, long long* __e__, long long __m__ )
2171 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2172 memory_order_seq_cst, memory_order_seq_cst ); }
2175 inline bool atomic_is_lock_free( const volatile atomic_ullong* __a__ )
2178 inline unsigned long long atomic_load_explicit
2179 ( volatile atomic_ullong* __a__, memory_order __x__ )
2180 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2182 inline unsigned long long atomic_load( volatile atomic_ullong* __a__ )
2183 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2185 inline void atomic_store_explicit
2186 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2187 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2189 inline void atomic_store
2190 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2191 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2193 inline unsigned long long atomic_exchange_explicit
2194 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2195 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2197 inline unsigned long long atomic_exchange
2198 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2199 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2201 inline bool atomic_compare_exchange_weak_explicit
2202 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__,
2203 memory_order __x__, memory_order __y__ )
2204 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2206 inline bool atomic_compare_exchange_strong_explicit
2207 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__,
2208 memory_order __x__, memory_order __y__ )
2209 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2211 inline bool atomic_compare_exchange_weak
2212 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__ )
2213 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2214 memory_order_seq_cst, memory_order_seq_cst ); }
2216 inline bool atomic_compare_exchange_strong
2217 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__ )
2218 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2219 memory_order_seq_cst, memory_order_seq_cst ); }
2222 inline bool atomic_is_lock_free( const volatile atomic_wchar_t* __a__ )
2225 inline wchar_t atomic_load_explicit
2226 ( volatile atomic_wchar_t* __a__, memory_order __x__ )
2227 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2229 inline wchar_t atomic_load( volatile atomic_wchar_t* __a__ )
2230 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2232 inline void atomic_store_explicit
2233 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2234 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2236 inline void atomic_store
2237 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2238 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2240 inline wchar_t atomic_exchange_explicit
2241 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2242 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2244 inline wchar_t atomic_exchange
2245 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2246 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2248 inline bool atomic_compare_exchange_weak_explicit
2249 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__,
2250 memory_order __x__, memory_order __y__ )
2251 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2253 inline bool atomic_compare_exchange_strong_explicit
2254 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__,
2255 memory_order __x__, memory_order __y__ )
2256 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2258 inline bool atomic_compare_exchange_weak
2259 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__ )
2260 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2261 memory_order_seq_cst, memory_order_seq_cst ); }
2263 inline bool atomic_compare_exchange_strong
2264 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__ )
2265 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2266 memory_order_seq_cst, memory_order_seq_cst ); }
2269 inline void* atomic_fetch_add_explicit
2270 ( volatile atomic_address* __a__, ptrdiff_t __m__, memory_order __x__ )
2272 void* volatile* __p__ = &((__a__)->__f__);
2273 void* __r__ = (void *) model_rmwr_action((void *)__p__, __x__);
2274 model_rmw_action((void *)__p__, __x__, (uint64_t) ((char*)(*__p__) + __m__));
2277 inline void* atomic_fetch_add
2278 ( volatile atomic_address* __a__, ptrdiff_t __m__ )
2279 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2282 inline void* atomic_fetch_sub_explicit
2283 ( volatile atomic_address* __a__, ptrdiff_t __m__, memory_order __x__ )
2285 void* volatile* __p__ = &((__a__)->__f__);
2286 void* __r__ = (void *) model_rmwr_action((void *)__p__, __x__);
2287 model_rmw_action((void *)__p__, __x__, (uint64_t)((char*)(*__p__) - __m__));
2290 inline void* atomic_fetch_sub
2291 ( volatile atomic_address* __a__, ptrdiff_t __m__ )
2292 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2294 inline char atomic_fetch_add_explicit
2295 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2296 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2298 inline char atomic_fetch_add
2299 ( volatile atomic_char* __a__, char __m__ )
2300 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2303 inline char atomic_fetch_sub_explicit
2304 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2305 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2307 inline char atomic_fetch_sub
2308 ( volatile atomic_char* __a__, char __m__ )
2309 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2312 inline char atomic_fetch_and_explicit
2313 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2314 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2316 inline char atomic_fetch_and
2317 ( volatile atomic_char* __a__, char __m__ )
2318 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2321 inline char atomic_fetch_or_explicit
2322 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2323 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2325 inline char atomic_fetch_or
2326 ( volatile atomic_char* __a__, char __m__ )
2327 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2330 inline char atomic_fetch_xor_explicit
2331 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2332 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2334 inline char atomic_fetch_xor
2335 ( volatile atomic_char* __a__, char __m__ )
2336 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2339 inline signed char atomic_fetch_add_explicit
2340 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2341 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2343 inline signed char atomic_fetch_add
2344 ( volatile atomic_schar* __a__, signed char __m__ )
2345 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2348 inline signed char atomic_fetch_sub_explicit
2349 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2350 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2352 inline signed char atomic_fetch_sub
2353 ( volatile atomic_schar* __a__, signed char __m__ )
2354 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2357 inline signed char atomic_fetch_and_explicit
2358 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2359 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2361 inline signed char atomic_fetch_and
2362 ( volatile atomic_schar* __a__, signed char __m__ )
2363 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2366 inline signed char atomic_fetch_or_explicit
2367 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2368 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2370 inline signed char atomic_fetch_or
2371 ( volatile atomic_schar* __a__, signed char __m__ )
2372 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2375 inline signed char atomic_fetch_xor_explicit
2376 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2377 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2379 inline signed char atomic_fetch_xor
2380 ( volatile atomic_schar* __a__, signed char __m__ )
2381 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2384 inline unsigned char atomic_fetch_add_explicit
2385 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2386 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2388 inline unsigned char atomic_fetch_add
2389 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2390 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2393 inline unsigned char atomic_fetch_sub_explicit
2394 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2395 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2397 inline unsigned char atomic_fetch_sub
2398 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2399 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2402 inline unsigned char atomic_fetch_and_explicit
2403 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2404 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2406 inline unsigned char atomic_fetch_and
2407 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2408 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2411 inline unsigned char atomic_fetch_or_explicit
2412 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2413 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2415 inline unsigned char atomic_fetch_or
2416 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2417 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2420 inline unsigned char atomic_fetch_xor_explicit
2421 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2422 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2424 inline unsigned char atomic_fetch_xor
2425 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2426 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2429 inline short atomic_fetch_add_explicit
2430 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2431 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2433 inline short atomic_fetch_add
2434 ( volatile atomic_short* __a__, short __m__ )
2435 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2438 inline short atomic_fetch_sub_explicit
2439 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2440 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2442 inline short atomic_fetch_sub
2443 ( volatile atomic_short* __a__, short __m__ )
2444 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2447 inline short atomic_fetch_and_explicit
2448 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2449 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2451 inline short atomic_fetch_and
2452 ( volatile atomic_short* __a__, short __m__ )
2453 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2456 inline short atomic_fetch_or_explicit
2457 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2458 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2460 inline short atomic_fetch_or
2461 ( volatile atomic_short* __a__, short __m__ )
2462 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2465 inline short atomic_fetch_xor_explicit
2466 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2467 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2469 inline short atomic_fetch_xor
2470 ( volatile atomic_short* __a__, short __m__ )
2471 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2474 inline unsigned short atomic_fetch_add_explicit
2475 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2476 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2478 inline unsigned short atomic_fetch_add
2479 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2480 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2483 inline unsigned short atomic_fetch_sub_explicit
2484 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2485 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2487 inline unsigned short atomic_fetch_sub
2488 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2489 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2492 inline unsigned short atomic_fetch_and_explicit
2493 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2494 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2496 inline unsigned short atomic_fetch_and
2497 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2498 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2501 inline unsigned short atomic_fetch_or_explicit
2502 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2503 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2505 inline unsigned short atomic_fetch_or
2506 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2507 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2510 inline unsigned short atomic_fetch_xor_explicit
2511 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2512 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2514 inline unsigned short atomic_fetch_xor
2515 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2516 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2519 inline int atomic_fetch_add_explicit
2520 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2521 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2523 inline int atomic_fetch_add
2524 ( volatile atomic_int* __a__, int __m__ )
2525 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2528 inline int atomic_fetch_sub_explicit
2529 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2530 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2532 inline int atomic_fetch_sub
2533 ( volatile atomic_int* __a__, int __m__ )
2534 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2537 inline int atomic_fetch_and_explicit
2538 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2539 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2541 inline int atomic_fetch_and
2542 ( volatile atomic_int* __a__, int __m__ )
2543 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2546 inline int atomic_fetch_or_explicit
2547 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2548 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2550 inline int atomic_fetch_or
2551 ( volatile atomic_int* __a__, int __m__ )
2552 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2555 inline int atomic_fetch_xor_explicit
2556 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2557 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2559 inline int atomic_fetch_xor
2560 ( volatile atomic_int* __a__, int __m__ )
2561 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2564 inline unsigned int atomic_fetch_add_explicit
2565 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2566 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2568 inline unsigned int atomic_fetch_add
2569 ( volatile atomic_uint* __a__, unsigned int __m__ )
2570 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2573 inline unsigned int atomic_fetch_sub_explicit
2574 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2575 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2577 inline unsigned int atomic_fetch_sub
2578 ( volatile atomic_uint* __a__, unsigned int __m__ )
2579 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2582 inline unsigned int atomic_fetch_and_explicit
2583 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2584 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2586 inline unsigned int atomic_fetch_and
2587 ( volatile atomic_uint* __a__, unsigned int __m__ )
2588 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2591 inline unsigned int atomic_fetch_or_explicit
2592 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2593 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2595 inline unsigned int atomic_fetch_or
2596 ( volatile atomic_uint* __a__, unsigned int __m__ )
2597 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2600 inline unsigned int atomic_fetch_xor_explicit
2601 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2602 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2604 inline unsigned int atomic_fetch_xor
2605 ( volatile atomic_uint* __a__, unsigned int __m__ )
2606 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2609 inline long atomic_fetch_add_explicit
2610 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2611 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2613 inline long atomic_fetch_add
2614 ( volatile atomic_long* __a__, long __m__ )
2615 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2618 inline long atomic_fetch_sub_explicit
2619 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2620 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2622 inline long atomic_fetch_sub
2623 ( volatile atomic_long* __a__, long __m__ )
2624 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2627 inline long atomic_fetch_and_explicit
2628 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2629 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2631 inline long atomic_fetch_and
2632 ( volatile atomic_long* __a__, long __m__ )
2633 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2636 inline long atomic_fetch_or_explicit
2637 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2638 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2640 inline long atomic_fetch_or
2641 ( volatile atomic_long* __a__, long __m__ )
2642 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2645 inline long atomic_fetch_xor_explicit
2646 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2647 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2649 inline long atomic_fetch_xor
2650 ( volatile atomic_long* __a__, long __m__ )
2651 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2654 inline unsigned long atomic_fetch_add_explicit
2655 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2656 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2658 inline unsigned long atomic_fetch_add
2659 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2660 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2663 inline unsigned long atomic_fetch_sub_explicit
2664 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2665 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2667 inline unsigned long atomic_fetch_sub
2668 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2669 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2672 inline unsigned long atomic_fetch_and_explicit
2673 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2674 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2676 inline unsigned long atomic_fetch_and
2677 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2678 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2681 inline unsigned long atomic_fetch_or_explicit
2682 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2683 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2685 inline unsigned long atomic_fetch_or
2686 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2687 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2690 inline unsigned long atomic_fetch_xor_explicit
2691 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2692 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2694 inline unsigned long atomic_fetch_xor
2695 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2696 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2699 inline long long atomic_fetch_add_explicit
2700 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2701 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2703 inline long long atomic_fetch_add
2704 ( volatile atomic_llong* __a__, long long __m__ )
2705 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2708 inline long long atomic_fetch_sub_explicit
2709 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2710 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2712 inline long long atomic_fetch_sub
2713 ( volatile atomic_llong* __a__, long long __m__ )
2714 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2717 inline long long atomic_fetch_and_explicit
2718 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2719 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2721 inline long long atomic_fetch_and
2722 ( volatile atomic_llong* __a__, long long __m__ )
2723 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2726 inline long long atomic_fetch_or_explicit
2727 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2728 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2730 inline long long atomic_fetch_or
2731 ( volatile atomic_llong* __a__, long long __m__ )
2732 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2735 inline long long atomic_fetch_xor_explicit
2736 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2737 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2739 inline long long atomic_fetch_xor
2740 ( volatile atomic_llong* __a__, long long __m__ )
2741 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2744 inline unsigned long long atomic_fetch_add_explicit
2745 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2746 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2748 inline unsigned long long atomic_fetch_add
2749 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2750 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2753 inline unsigned long long atomic_fetch_sub_explicit
2754 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2755 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2757 inline unsigned long long atomic_fetch_sub
2758 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2759 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2762 inline unsigned long long atomic_fetch_and_explicit
2763 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2764 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2766 inline unsigned long long atomic_fetch_and
2767 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2768 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2771 inline unsigned long long atomic_fetch_or_explicit
2772 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2773 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2775 inline unsigned long long atomic_fetch_or
2776 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2777 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2780 inline unsigned long long atomic_fetch_xor_explicit
2781 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2782 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2784 inline unsigned long long atomic_fetch_xor
2785 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2786 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2789 inline wchar_t atomic_fetch_add_explicit
2790 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2791 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2793 inline wchar_t atomic_fetch_add
2794 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2795 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2798 inline wchar_t atomic_fetch_sub_explicit
2799 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2800 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2802 inline wchar_t atomic_fetch_sub
2803 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2804 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2807 inline wchar_t atomic_fetch_and_explicit
2808 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2809 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2811 inline wchar_t atomic_fetch_and
2812 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2813 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2816 inline wchar_t atomic_fetch_or_explicit
2817 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2818 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2820 inline wchar_t atomic_fetch_or
2821 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2822 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2825 inline wchar_t atomic_fetch_xor_explicit
2826 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2827 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2829 inline wchar_t atomic_fetch_xor
2830 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2831 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2837 #define atomic_is_lock_free( __a__ ) \
2840 #define atomic_load( __a__ ) \
2841 _ATOMIC_LOAD_( __a__, memory_order_seq_cst )
2843 #define atomic_load_explicit( __a__, __x__ ) \
2844 _ATOMIC_LOAD_( __a__, __x__ )
2846 #define atomic_init( __a__, __m__ ) \
2847 _ATOMIC_INIT_( __a__, __m__ )
2849 #define atomic_store( __a__, __m__ ) \
2850 _ATOMIC_STORE_( __a__, __m__, memory_order_seq_cst )
2852 #define atomic_store_explicit( __a__, __m__, __x__ ) \
2853 _ATOMIC_STORE_( __a__, __m__, __x__ )
2855 #define atomic_exchange( __a__, __m__ ) \
2856 _ATOMIC_MODIFY_( __a__, =, __m__, memory_order_seq_cst )
2858 #define atomic_exchange_explicit( __a__, __m__, __x__ ) \
2859 _ATOMIC_MODIFY_( __a__, =, __m__, __x__ )
2861 #define atomic_compare_exchange_weak( __a__, __e__, __m__ ) \
2862 _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, memory_order_seq_cst )
2864 #define atomic_compare_exchange_strong( __a__, __e__, __m__ ) \
2865 _ATOMIC_CMPSWP_( __a__, __e__, __m__, memory_order_seq_cst )
2867 #define atomic_compare_exchange_weak_explicit( __a__, __e__, __m__, __x__, __y__ ) \
2868 _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ )
2870 #define atomic_compare_exchange_strong_explicit( __a__, __e__, __m__, __x__, __y__ ) \
2871 _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ )
2874 #define atomic_fetch_add_explicit( __a__, __m__, __x__ ) \
2875 _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ )
2877 #define atomic_fetch_add( __a__, __m__ ) \
2878 _ATOMIC_MODIFY_( __a__, +=, __m__, memory_order_seq_cst )
2881 #define atomic_fetch_sub_explicit( __a__, __m__, __x__ ) \
2882 _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ )
2884 #define atomic_fetch_sub( __a__, __m__ ) \
2885 _ATOMIC_MODIFY_( __a__, -=, __m__, memory_order_seq_cst )
2888 #define atomic_fetch_and_explicit( __a__, __m__, __x__ ) \
2889 _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ )
2891 #define atomic_fetch_and( __a__, __m__ ) \
2892 _ATOMIC_MODIFY_( __a__, &=, __m__, memory_order_seq_cst )
2895 #define atomic_fetch_or_explicit( __a__, __m__, __x__ ) \
2896 _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ )
2898 #define atomic_fetch_or( __a__, __m__ ) \
2899 _ATOMIC_MODIFY_( __a__, |=, __m__, memory_order_seq_cst )
2902 #define atomic_fetch_xor_explicit( __a__, __m__, __x__ ) \
2903 _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ )
2905 #define atomic_fetch_xor( __a__, __m__ ) \
2906 _ATOMIC_MODIFY_( __a__, ^=, __m__, memory_order_seq_cst )
2915 inline bool atomic_bool::is_lock_free() const volatile
2918 inline void atomic_bool::store
2919 ( bool __m__, memory_order __x__ ) volatile
2920 { atomic_store_explicit( this, __m__, __x__ ); }
2922 inline bool atomic_bool::load
2923 ( memory_order __x__ ) volatile
2924 { return atomic_load_explicit( this, __x__ ); }
2926 inline bool atomic_bool::exchange
2927 ( bool __m__, memory_order __x__ ) volatile
2928 { return atomic_exchange_explicit( this, __m__, __x__ ); }
2930 inline bool atomic_bool::compare_exchange_weak
2931 ( bool& __e__, bool __m__,
2932 memory_order __x__, memory_order __y__ ) volatile
2933 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
2935 inline bool atomic_bool::compare_exchange_strong
2936 ( bool& __e__, bool __m__,
2937 memory_order __x__, memory_order __y__ ) volatile
2938 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
2940 inline bool atomic_bool::compare_exchange_weak
2941 ( bool& __e__, bool __m__, memory_order __x__ ) volatile
2942 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
2943 __x__ == memory_order_acq_rel ? memory_order_acquire :
2944 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2946 inline bool atomic_bool::compare_exchange_strong
2947 ( bool& __e__, bool __m__, memory_order __x__ ) volatile
2948 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
2949 __x__ == memory_order_acq_rel ? memory_order_acquire :
2950 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2953 inline bool atomic_address::is_lock_free() const volatile
2956 inline void atomic_address::store
2957 ( void* __m__, memory_order __x__ ) volatile
2958 { atomic_store_explicit( this, __m__, __x__ ); }
2960 inline void* atomic_address::load
2961 ( memory_order __x__ ) volatile
2962 { return atomic_load_explicit( this, __x__ ); }
2964 inline void* atomic_address::exchange
2965 ( void* __m__, memory_order __x__ ) volatile
2966 { return atomic_exchange_explicit( this, __m__, __x__ ); }
2968 inline bool atomic_address::compare_exchange_weak
2969 ( void*& __e__, void* __m__,
2970 memory_order __x__, memory_order __y__ ) volatile
2971 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
2973 inline bool atomic_address::compare_exchange_strong
2974 ( void*& __e__, void* __m__,
2975 memory_order __x__, memory_order __y__ ) volatile
2976 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
2978 inline bool atomic_address::compare_exchange_weak
2979 ( void*& __e__, void* __m__, memory_order __x__ ) volatile
2980 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
2981 __x__ == memory_order_acq_rel ? memory_order_acquire :
2982 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2984 inline bool atomic_address::compare_exchange_strong
2985 ( void*& __e__, void* __m__, memory_order __x__ ) volatile
2986 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
2987 __x__ == memory_order_acq_rel ? memory_order_acquire :
2988 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
2991 inline bool atomic_char::is_lock_free() const volatile
2994 inline void atomic_char::store
2995 ( char __m__, memory_order __x__ ) volatile
2996 { atomic_store_explicit( this, __m__, __x__ ); }
2998 inline char atomic_char::load
2999 ( memory_order __x__ ) volatile
3000 { return atomic_load_explicit( this, __x__ ); }
3002 inline char atomic_char::exchange
3003 ( char __m__, memory_order __x__ ) volatile
3004 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3006 inline bool atomic_char::compare_exchange_weak
3007 ( char& __e__, char __m__,
3008 memory_order __x__, memory_order __y__ ) volatile
3009 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3011 inline bool atomic_char::compare_exchange_strong
3012 ( char& __e__, char __m__,
3013 memory_order __x__, memory_order __y__ ) volatile
3014 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3016 inline bool atomic_char::compare_exchange_weak
3017 ( char& __e__, char __m__, memory_order __x__ ) volatile
3018 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3019 __x__ == memory_order_acq_rel ? memory_order_acquire :
3020 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3022 inline bool atomic_char::compare_exchange_strong
3023 ( char& __e__, char __m__, memory_order __x__ ) volatile
3024 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3025 __x__ == memory_order_acq_rel ? memory_order_acquire :
3026 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3029 inline bool atomic_schar::is_lock_free() const volatile
3032 inline void atomic_schar::store
3033 ( signed char __m__, memory_order __x__ ) volatile
3034 { atomic_store_explicit( this, __m__, __x__ ); }
3036 inline signed char atomic_schar::load
3037 ( memory_order __x__ ) volatile
3038 { return atomic_load_explicit( this, __x__ ); }
3040 inline signed char atomic_schar::exchange
3041 ( signed char __m__, memory_order __x__ ) volatile
3042 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3044 inline bool atomic_schar::compare_exchange_weak
3045 ( signed char& __e__, signed char __m__,
3046 memory_order __x__, memory_order __y__ ) volatile
3047 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3049 inline bool atomic_schar::compare_exchange_strong
3050 ( signed char& __e__, signed char __m__,
3051 memory_order __x__, memory_order __y__ ) volatile
3052 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3054 inline bool atomic_schar::compare_exchange_weak
3055 ( signed char& __e__, signed char __m__, memory_order __x__ ) volatile
3056 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3057 __x__ == memory_order_acq_rel ? memory_order_acquire :
3058 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3060 inline bool atomic_schar::compare_exchange_strong
3061 ( signed char& __e__, signed char __m__, memory_order __x__ ) volatile
3062 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3063 __x__ == memory_order_acq_rel ? memory_order_acquire :
3064 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3067 inline bool atomic_uchar::is_lock_free() const volatile
3070 inline void atomic_uchar::store
3071 ( unsigned char __m__, memory_order __x__ ) volatile
3072 { atomic_store_explicit( this, __m__, __x__ ); }
3074 inline unsigned char atomic_uchar::load
3075 ( memory_order __x__ ) volatile
3076 { return atomic_load_explicit( this, __x__ ); }
3078 inline unsigned char atomic_uchar::exchange
3079 ( unsigned char __m__, memory_order __x__ ) volatile
3080 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3082 inline bool atomic_uchar::compare_exchange_weak
3083 ( unsigned char& __e__, unsigned char __m__,
3084 memory_order __x__, memory_order __y__ ) volatile
3085 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3087 inline bool atomic_uchar::compare_exchange_strong
3088 ( unsigned char& __e__, unsigned char __m__,
3089 memory_order __x__, memory_order __y__ ) volatile
3090 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3092 inline bool atomic_uchar::compare_exchange_weak
3093 ( unsigned char& __e__, unsigned char __m__, memory_order __x__ ) volatile
3094 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3095 __x__ == memory_order_acq_rel ? memory_order_acquire :
3096 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3098 inline bool atomic_uchar::compare_exchange_strong
3099 ( unsigned char& __e__, unsigned char __m__, memory_order __x__ ) volatile
3100 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3101 __x__ == memory_order_acq_rel ? memory_order_acquire :
3102 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3105 inline bool atomic_short::is_lock_free() const volatile
3108 inline void atomic_short::store
3109 ( short __m__, memory_order __x__ ) volatile
3110 { atomic_store_explicit( this, __m__, __x__ ); }
3112 inline short atomic_short::load
3113 ( memory_order __x__ ) volatile
3114 { return atomic_load_explicit( this, __x__ ); }
3116 inline short atomic_short::exchange
3117 ( short __m__, memory_order __x__ ) volatile
3118 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3120 inline bool atomic_short::compare_exchange_weak
3121 ( short& __e__, short __m__,
3122 memory_order __x__, memory_order __y__ ) volatile
3123 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3125 inline bool atomic_short::compare_exchange_strong
3126 ( short& __e__, short __m__,
3127 memory_order __x__, memory_order __y__ ) volatile
3128 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3130 inline bool atomic_short::compare_exchange_weak
3131 ( short& __e__, short __m__, memory_order __x__ ) volatile
3132 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3133 __x__ == memory_order_acq_rel ? memory_order_acquire :
3134 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3136 inline bool atomic_short::compare_exchange_strong
3137 ( short& __e__, short __m__, memory_order __x__ ) volatile
3138 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3139 __x__ == memory_order_acq_rel ? memory_order_acquire :
3140 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3143 inline bool atomic_ushort::is_lock_free() const volatile
3146 inline void atomic_ushort::store
3147 ( unsigned short __m__, memory_order __x__ ) volatile
3148 { atomic_store_explicit( this, __m__, __x__ ); }
3150 inline unsigned short atomic_ushort::load
3151 ( memory_order __x__ ) volatile
3152 { return atomic_load_explicit( this, __x__ ); }
3154 inline unsigned short atomic_ushort::exchange
3155 ( unsigned short __m__, memory_order __x__ ) volatile
3156 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3158 inline bool atomic_ushort::compare_exchange_weak
3159 ( unsigned short& __e__, unsigned short __m__,
3160 memory_order __x__, memory_order __y__ ) volatile
3161 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3163 inline bool atomic_ushort::compare_exchange_strong
3164 ( unsigned short& __e__, unsigned short __m__,
3165 memory_order __x__, memory_order __y__ ) volatile
3166 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3168 inline bool atomic_ushort::compare_exchange_weak
3169 ( unsigned short& __e__, unsigned short __m__, memory_order __x__ ) volatile
3170 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3171 __x__ == memory_order_acq_rel ? memory_order_acquire :
3172 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3174 inline bool atomic_ushort::compare_exchange_strong
3175 ( unsigned short& __e__, unsigned short __m__, memory_order __x__ ) volatile
3176 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3177 __x__ == memory_order_acq_rel ? memory_order_acquire :
3178 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3181 inline bool atomic_int::is_lock_free() const volatile
3184 inline void atomic_int::store
3185 ( int __m__, memory_order __x__ ) volatile
3186 { atomic_store_explicit( this, __m__, __x__ ); }
3188 inline int atomic_int::load
3189 ( memory_order __x__ ) volatile
3190 { return atomic_load_explicit( this, __x__ ); }
3192 inline int atomic_int::exchange
3193 ( int __m__, memory_order __x__ ) volatile
3194 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3196 inline bool atomic_int::compare_exchange_weak
3197 ( int& __e__, int __m__,
3198 memory_order __x__, memory_order __y__ ) volatile
3199 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3201 inline bool atomic_int::compare_exchange_strong
3202 ( int& __e__, int __m__,
3203 memory_order __x__, memory_order __y__ ) volatile
3204 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3206 inline bool atomic_int::compare_exchange_weak
3207 ( int& __e__, int __m__, memory_order __x__ ) volatile
3208 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3209 __x__ == memory_order_acq_rel ? memory_order_acquire :
3210 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3212 inline bool atomic_int::compare_exchange_strong
3213 ( int& __e__, int __m__, memory_order __x__ ) volatile
3214 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3215 __x__ == memory_order_acq_rel ? memory_order_acquire :
3216 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3219 inline bool atomic_uint::is_lock_free() const volatile
3222 inline void atomic_uint::store
3223 ( unsigned int __m__, memory_order __x__ ) volatile
3224 { atomic_store_explicit( this, __m__, __x__ ); }
3226 inline unsigned int atomic_uint::load
3227 ( memory_order __x__ ) volatile
3228 { return atomic_load_explicit( this, __x__ ); }
3230 inline unsigned int atomic_uint::exchange
3231 ( unsigned int __m__, memory_order __x__ ) volatile
3232 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3234 inline bool atomic_uint::compare_exchange_weak
3235 ( unsigned int& __e__, unsigned int __m__,
3236 memory_order __x__, memory_order __y__ ) volatile
3237 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3239 inline bool atomic_uint::compare_exchange_strong
3240 ( unsigned int& __e__, unsigned int __m__,
3241 memory_order __x__, memory_order __y__ ) volatile
3242 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3244 inline bool atomic_uint::compare_exchange_weak
3245 ( unsigned int& __e__, unsigned int __m__, memory_order __x__ ) volatile
3246 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3247 __x__ == memory_order_acq_rel ? memory_order_acquire :
3248 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3250 inline bool atomic_uint::compare_exchange_strong
3251 ( unsigned int& __e__, unsigned int __m__, memory_order __x__ ) volatile
3252 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3253 __x__ == memory_order_acq_rel ? memory_order_acquire :
3254 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3257 inline bool atomic_long::is_lock_free() const volatile
3260 inline void atomic_long::store
3261 ( long __m__, memory_order __x__ ) volatile
3262 { atomic_store_explicit( this, __m__, __x__ ); }
3264 inline long atomic_long::load
3265 ( memory_order __x__ ) volatile
3266 { return atomic_load_explicit( this, __x__ ); }
3268 inline long atomic_long::exchange
3269 ( long __m__, memory_order __x__ ) volatile
3270 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3272 inline bool atomic_long::compare_exchange_weak
3273 ( long& __e__, long __m__,
3274 memory_order __x__, memory_order __y__ ) volatile
3275 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3277 inline bool atomic_long::compare_exchange_strong
3278 ( long& __e__, long __m__,
3279 memory_order __x__, memory_order __y__ ) volatile
3280 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3282 inline bool atomic_long::compare_exchange_weak
3283 ( long& __e__, long __m__, memory_order __x__ ) volatile
3284 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3285 __x__ == memory_order_acq_rel ? memory_order_acquire :
3286 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3288 inline bool atomic_long::compare_exchange_strong
3289 ( long& __e__, long __m__, memory_order __x__ ) volatile
3290 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3291 __x__ == memory_order_acq_rel ? memory_order_acquire :
3292 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3295 inline bool atomic_ulong::is_lock_free() const volatile
3298 inline void atomic_ulong::store
3299 ( unsigned long __m__, memory_order __x__ ) volatile
3300 { atomic_store_explicit( this, __m__, __x__ ); }
3302 inline unsigned long atomic_ulong::load
3303 ( memory_order __x__ ) volatile
3304 { return atomic_load_explicit( this, __x__ ); }
3306 inline unsigned long atomic_ulong::exchange
3307 ( unsigned long __m__, memory_order __x__ ) volatile
3308 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3310 inline bool atomic_ulong::compare_exchange_weak
3311 ( unsigned long& __e__, unsigned long __m__,
3312 memory_order __x__, memory_order __y__ ) volatile
3313 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3315 inline bool atomic_ulong::compare_exchange_strong
3316 ( unsigned long& __e__, unsigned long __m__,
3317 memory_order __x__, memory_order __y__ ) volatile
3318 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3320 inline bool atomic_ulong::compare_exchange_weak
3321 ( unsigned long& __e__, unsigned long __m__, memory_order __x__ ) volatile
3322 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3323 __x__ == memory_order_acq_rel ? memory_order_acquire :
3324 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3326 inline bool atomic_ulong::compare_exchange_strong
3327 ( unsigned long& __e__, unsigned long __m__, memory_order __x__ ) volatile
3328 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3329 __x__ == memory_order_acq_rel ? memory_order_acquire :
3330 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3333 inline bool atomic_llong::is_lock_free() const volatile
3336 inline void atomic_llong::store
3337 ( long long __m__, memory_order __x__ ) volatile
3338 { atomic_store_explicit( this, __m__, __x__ ); }
3340 inline long long atomic_llong::load
3341 ( memory_order __x__ ) volatile
3342 { return atomic_load_explicit( this, __x__ ); }
3344 inline long long atomic_llong::exchange
3345 ( long long __m__, memory_order __x__ ) volatile
3346 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3348 inline bool atomic_llong::compare_exchange_weak
3349 ( long long& __e__, long long __m__,
3350 memory_order __x__, memory_order __y__ ) volatile
3351 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3353 inline bool atomic_llong::compare_exchange_strong
3354 ( long long& __e__, long long __m__,
3355 memory_order __x__, memory_order __y__ ) volatile
3356 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3358 inline bool atomic_llong::compare_exchange_weak
3359 ( long long& __e__, long long __m__, memory_order __x__ ) volatile
3360 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3361 __x__ == memory_order_acq_rel ? memory_order_acquire :
3362 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3364 inline bool atomic_llong::compare_exchange_strong
3365 ( long long& __e__, long long __m__, memory_order __x__ ) volatile
3366 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3367 __x__ == memory_order_acq_rel ? memory_order_acquire :
3368 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3371 inline bool atomic_ullong::is_lock_free() const volatile
3374 inline void atomic_ullong::store
3375 ( unsigned long long __m__, memory_order __x__ ) volatile
3376 { atomic_store_explicit( this, __m__, __x__ ); }
3378 inline unsigned long long atomic_ullong::load
3379 ( memory_order __x__ ) volatile
3380 { return atomic_load_explicit( this, __x__ ); }
3382 inline unsigned long long atomic_ullong::exchange
3383 ( unsigned long long __m__, memory_order __x__ ) volatile
3384 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3386 inline bool atomic_ullong::compare_exchange_weak
3387 ( unsigned long long& __e__, unsigned long long __m__,
3388 memory_order __x__, memory_order __y__ ) volatile
3389 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3391 inline bool atomic_ullong::compare_exchange_strong
3392 ( unsigned long long& __e__, unsigned long long __m__,
3393 memory_order __x__, memory_order __y__ ) volatile
3394 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3396 inline bool atomic_ullong::compare_exchange_weak
3397 ( unsigned long long& __e__, unsigned long long __m__, memory_order __x__ ) volatile
3398 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3399 __x__ == memory_order_acq_rel ? memory_order_acquire :
3400 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3402 inline bool atomic_ullong::compare_exchange_strong
3403 ( unsigned long long& __e__, unsigned long long __m__, memory_order __x__ ) volatile
3404 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3405 __x__ == memory_order_acq_rel ? memory_order_acquire :
3406 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3409 inline bool atomic_wchar_t::is_lock_free() const volatile
3412 inline void atomic_wchar_t::store
3413 ( wchar_t __m__, memory_order __x__ ) volatile
3414 { atomic_store_explicit( this, __m__, __x__ ); }
3416 inline wchar_t atomic_wchar_t::load
3417 ( memory_order __x__ ) volatile
3418 { return atomic_load_explicit( this, __x__ ); }
3420 inline wchar_t atomic_wchar_t::exchange
3421 ( wchar_t __m__, memory_order __x__ ) volatile
3422 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3424 inline bool atomic_wchar_t::compare_exchange_weak
3425 ( wchar_t& __e__, wchar_t __m__,
3426 memory_order __x__, memory_order __y__ ) volatile
3427 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3429 inline bool atomic_wchar_t::compare_exchange_strong
3430 ( wchar_t& __e__, wchar_t __m__,
3431 memory_order __x__, memory_order __y__ ) volatile
3432 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3434 inline bool atomic_wchar_t::compare_exchange_weak
3435 ( wchar_t& __e__, wchar_t __m__, memory_order __x__ ) volatile
3436 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3437 __x__ == memory_order_acq_rel ? memory_order_acquire :
3438 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3440 inline bool atomic_wchar_t::compare_exchange_strong
3441 ( wchar_t& __e__, wchar_t __m__, memory_order __x__ ) volatile
3442 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3443 __x__ == memory_order_acq_rel ? memory_order_acquire :
3444 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3447 template< typename T >
3448 inline bool atomic<T>::is_lock_free() const volatile
3451 template< typename T >
3452 inline void atomic<T>::store( T __v__, memory_order __x__ ) volatile
3453 { _ATOMIC_STORE_( this, __v__, __x__ ); }
3455 template< typename T >
3456 inline T atomic<T>::load( memory_order __x__ ) volatile
3457 { return _ATOMIC_LOAD_( this, __x__ ); }
3459 template< typename T >
3460 inline T atomic<T>::exchange( T __v__, memory_order __x__ ) volatile
3461 { return _ATOMIC_MODIFY_( this, =, __v__, __x__ ); }
3463 template< typename T >
3464 inline bool atomic<T>::compare_exchange_weak
3465 ( T& __r__, T __v__, memory_order __x__, memory_order __y__ ) volatile
3466 { return _ATOMIC_CMPSWP_WEAK_( this, &__r__, __v__, __x__ ); }
3468 template< typename T >
3469 inline bool atomic<T>::compare_exchange_strong
3470 ( T& __r__, T __v__, memory_order __x__, memory_order __y__ ) volatile
3471 { return _ATOMIC_CMPSWP_( this, &__r__, __v__, __x__ ); }
3473 template< typename T >
3474 inline bool atomic<T>::compare_exchange_weak
3475 ( T& __r__, T __v__, memory_order __x__ ) volatile
3476 { return compare_exchange_weak( __r__, __v__, __x__,
3477 __x__ == memory_order_acq_rel ? memory_order_acquire :
3478 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3480 template< typename T >
3481 inline bool atomic<T>::compare_exchange_strong
3482 ( T& __r__, T __v__, memory_order __x__ ) volatile
3483 { return compare_exchange_strong( __r__, __v__, __x__,
3484 __x__ == memory_order_acq_rel ? memory_order_acquire :
3485 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3488 inline void* atomic_address::fetch_add
3489 ( ptrdiff_t __m__, memory_order __x__ ) volatile
3490 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3492 inline void* atomic_address::fetch_sub
3493 ( ptrdiff_t __m__, memory_order __x__ ) volatile
3494 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3497 inline char atomic_char::fetch_add
3498 ( char __m__, memory_order __x__ ) volatile
3499 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3502 inline char atomic_char::fetch_sub
3503 ( char __m__, memory_order __x__ ) volatile
3504 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3507 inline char atomic_char::fetch_and
3508 ( char __m__, memory_order __x__ ) volatile
3509 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3512 inline char atomic_char::fetch_or
3513 ( char __m__, memory_order __x__ ) volatile
3514 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3517 inline char atomic_char::fetch_xor
3518 ( char __m__, memory_order __x__ ) volatile
3519 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3522 inline signed char atomic_schar::fetch_add
3523 ( signed char __m__, memory_order __x__ ) volatile
3524 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3527 inline signed char atomic_schar::fetch_sub
3528 ( signed char __m__, memory_order __x__ ) volatile
3529 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3532 inline signed char atomic_schar::fetch_and
3533 ( signed char __m__, memory_order __x__ ) volatile
3534 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3537 inline signed char atomic_schar::fetch_or
3538 ( signed char __m__, memory_order __x__ ) volatile
3539 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3542 inline signed char atomic_schar::fetch_xor
3543 ( signed char __m__, memory_order __x__ ) volatile
3544 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3547 inline unsigned char atomic_uchar::fetch_add
3548 ( unsigned char __m__, memory_order __x__ ) volatile
3549 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3552 inline unsigned char atomic_uchar::fetch_sub
3553 ( unsigned char __m__, memory_order __x__ ) volatile
3554 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3557 inline unsigned char atomic_uchar::fetch_and
3558 ( unsigned char __m__, memory_order __x__ ) volatile
3559 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3562 inline unsigned char atomic_uchar::fetch_or
3563 ( unsigned char __m__, memory_order __x__ ) volatile
3564 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3567 inline unsigned char atomic_uchar::fetch_xor
3568 ( unsigned char __m__, memory_order __x__ ) volatile
3569 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3572 inline short atomic_short::fetch_add
3573 ( short __m__, memory_order __x__ ) volatile
3574 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3577 inline short atomic_short::fetch_sub
3578 ( short __m__, memory_order __x__ ) volatile
3579 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3582 inline short atomic_short::fetch_and
3583 ( short __m__, memory_order __x__ ) volatile
3584 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3587 inline short atomic_short::fetch_or
3588 ( short __m__, memory_order __x__ ) volatile
3589 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3592 inline short atomic_short::fetch_xor
3593 ( short __m__, memory_order __x__ ) volatile
3594 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3597 inline unsigned short atomic_ushort::fetch_add
3598 ( unsigned short __m__, memory_order __x__ ) volatile
3599 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3602 inline unsigned short atomic_ushort::fetch_sub
3603 ( unsigned short __m__, memory_order __x__ ) volatile
3604 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3607 inline unsigned short atomic_ushort::fetch_and
3608 ( unsigned short __m__, memory_order __x__ ) volatile
3609 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3612 inline unsigned short atomic_ushort::fetch_or
3613 ( unsigned short __m__, memory_order __x__ ) volatile