3 * @brief Common header for C11/C++11 atomics
5 * Note that some features are unavailable, as they require support from a true
9 #ifndef __IMPATOMIC_H__
10 #define __IMPATOMIC_H__
12 #include "memoryorder.h"
13 #include "cmodelint.h"
19 #define CPP0X( feature )
21 typedef struct atomic_flag
24 bool test_and_set( memory_order = memory_order_seq_cst ) volatile;
25 void clear( memory_order = memory_order_seq_cst ) volatile;
26 void fence( memory_order ) const volatile;
28 CPP0X( atomic_flag() = default; )
29 CPP0X( atomic_flag( const atomic_flag& ) = delete; )
30 atomic_flag& operator =( const atomic_flag& ) CPP0X(=delete);
37 #define ATOMIC_FLAG_INIT { false }
43 extern bool atomic_flag_test_and_set( volatile atomic_flag* );
44 extern bool atomic_flag_test_and_set_explicit
45 ( volatile atomic_flag*, memory_order );
46 extern void atomic_flag_clear( volatile atomic_flag* );
47 extern void atomic_flag_clear_explicit
48 ( volatile atomic_flag*, memory_order );
49 extern void atomic_flag_fence
50 ( const volatile atomic_flag*, memory_order );
51 extern void __atomic_flag_wait__
52 ( volatile atomic_flag* );
53 extern void __atomic_flag_wait_explicit__
54 ( volatile atomic_flag*, memory_order );
62 inline bool atomic_flag::test_and_set( memory_order __x__ ) volatile
63 { return atomic_flag_test_and_set_explicit( this, __x__ ); }
65 inline void atomic_flag::clear( memory_order __x__ ) volatile
66 { atomic_flag_clear_explicit( this, __x__ ); }
68 inline void atomic_flag::fence( memory_order __x__ ) const volatile
69 { atomic_flag_fence( this, __x__ ); }
75 The remainder of the example implementation uses the following
76 macros. These macros exploit GNU extensions for value-returning
77 blocks (AKA statement expressions) and __typeof__.
79 The macros rely on data fields of atomic structs being named __f__.
80 Other symbols used are __a__=atomic, __e__=expected, __f__=field,
81 __g__=flag, __m__=modified, __o__=operation, __r__=result,
82 __p__=pointer to field, __v__=value (for single evaluation),
83 __x__=memory-ordering, and __y__=memory-ordering.
86 #define _ATOMIC_LOAD_( __a__, __x__ ) \
87 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
88 __typeof__((__a__)->__f__) __r__ = (__typeof__((__a__)->__f__))model_read_action((void *)__p__, __x__); \
91 #define _ATOMIC_STORE_( __a__, __m__, __x__ ) \
92 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
93 __typeof__(__m__) __v__ = (__m__); \
94 model_write_action((void *) __p__, __x__, (uint64_t) __v__); \
98 #define _ATOMIC_INIT_( __a__, __m__ ) \
99 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
100 __typeof__(__m__) __v__ = (__m__); \
101 model_init_action((void *) __p__, (uint64_t) __v__); \
104 #define _ATOMIC_MODIFY_( __a__, __o__, __m__, __x__ ) \
105 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
106 __typeof__((__a__)->__f__) __old__=(__typeof__((__a__)->__f__)) model_rmwr_action((void *)__p__, __x__); \
107 __typeof__(__m__) __v__ = (__m__); \
108 __typeof__((__a__)->__f__) __copy__= __old__; \
109 __copy__ __o__ __v__; \
110 model_rmw_action((void *)__p__, __x__, (uint64_t) __copy__); \
113 /* No spurious failure for now */
114 #define _ATOMIC_CMPSWP_WEAK_ _ATOMIC_CMPSWP_
116 #define _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ) \
117 ({ volatile __typeof__((__a__)->__f__)* __p__ = & ((__a__)->__f__); \
118 __typeof__(__e__) __q__ = (__e__); \
119 __typeof__(__m__) __v__ = (__m__); \
121 __typeof__((__a__)->__f__) __t__=(__typeof__((__a__)->__f__)) model_rmwr_action((void *)__p__, __x__); \
122 if (__t__ == * __q__ ) { \
123 model_rmw_action((void *)__p__, __x__, (uint64_t) __v__); __r__ = true; } \
124 else { model_rmwc_action((void *)__p__, __x__); *__q__ = __t__; __r__ = false;} \
127 #define _ATOMIC_FENCE_( __a__, __x__ ) \
128 ({ model_fence_action(__x__);})
131 #define ATOMIC_CHAR_LOCK_FREE 1
132 #define ATOMIC_CHAR16_T_LOCK_FREE 1
133 #define ATOMIC_CHAR32_T_LOCK_FREE 1
134 #define ATOMIC_WCHAR_T_LOCK_FREE 1
135 #define ATOMIC_SHORT_LOCK_FREE 1
136 #define ATOMIC_INT_LOCK_FREE 1
137 #define ATOMIC_LONG_LOCK_FREE 1
138 #define ATOMIC_LLONG_LOCK_FREE 1
139 #define ATOMIC_ADDRESS_LOCK_FREE 1
141 typedef struct atomic_bool
144 bool is_lock_free() const volatile;
145 void store( bool, memory_order = memory_order_seq_cst ) volatile;
146 bool load( memory_order = memory_order_seq_cst ) volatile;
147 bool exchange( bool, memory_order = memory_order_seq_cst ) volatile;
148 bool compare_exchange_weak ( bool&, bool, memory_order, memory_order ) volatile;
149 bool compare_exchange_strong ( bool&, bool, memory_order, memory_order ) volatile;
150 bool compare_exchange_weak ( bool&, bool,
151 memory_order = memory_order_seq_cst) volatile;
152 bool compare_exchange_strong ( bool&, bool,
153 memory_order = memory_order_seq_cst) volatile;
154 void fence( memory_order ) const volatile;
156 CPP0X( atomic_bool() = delete; )
157 CPP0X( constexpr explicit atomic_bool( bool __v__ ) : __f__( __v__ ) { } )
158 CPP0X( atomic_bool( const atomic_bool& ) = delete; )
159 atomic_bool& operator =( const atomic_bool& ) CPP0X(=delete);
161 bool operator =( bool __v__ ) volatile
162 { store( __v__ ); return __v__; }
164 friend void atomic_store_explicit( volatile atomic_bool*, bool,
166 friend bool atomic_load_explicit( volatile atomic_bool*, memory_order );
167 friend bool atomic_exchange_explicit( volatile atomic_bool*, bool,
169 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_bool*, bool*, bool,
170 memory_order, memory_order );
171 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_bool*, bool*, bool,
172 memory_order, memory_order );
173 friend void atomic_fence( const volatile atomic_bool*, memory_order );
181 typedef struct atomic_address
184 bool is_lock_free() const volatile;
185 void store( void*, memory_order = memory_order_seq_cst ) volatile;
186 void* load( memory_order = memory_order_seq_cst ) volatile;
187 void* exchange( void*, memory_order = memory_order_seq_cst ) volatile;
188 bool compare_exchange_weak( void*&, void*, memory_order, memory_order ) volatile;
189 bool compare_exchange_strong( void*&, void*, memory_order, memory_order ) volatile;
190 bool compare_exchange_weak( void*&, void*,
191 memory_order = memory_order_seq_cst ) volatile;
192 bool compare_exchange_strong( void*&, void*,
193 memory_order = memory_order_seq_cst ) volatile;
194 void fence( memory_order ) const volatile;
195 void* fetch_add( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
196 void* fetch_sub( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
198 CPP0X( atomic_address() = default; )
199 CPP0X( constexpr explicit atomic_address( void* __v__ ) : __f__( __v__) { } )
200 CPP0X( atomic_address( const atomic_address& ) = delete; )
201 atomic_address& operator =( const atomic_address & ) CPP0X(=delete);
203 void* operator =( void* __v__ ) volatile
204 { store( __v__ ); return __v__; }
206 void* operator +=( ptrdiff_t __v__ ) volatile
207 { return fetch_add( __v__ ); }
209 void* operator -=( ptrdiff_t __v__ ) volatile
210 { return fetch_sub( __v__ ); }
212 friend void atomic_store_explicit( volatile atomic_address*, void*,
214 friend void* atomic_load_explicit( volatile atomic_address*, memory_order );
215 friend void* atomic_exchange_explicit( volatile atomic_address*, void*,
217 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_address*,
218 void**, void*, memory_order, memory_order );
219 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_address*,
220 void**, void*, memory_order, memory_order );
221 friend void atomic_fence( const volatile atomic_address*, memory_order );
222 friend void* atomic_fetch_add_explicit( volatile atomic_address*, ptrdiff_t,
224 friend void* atomic_fetch_sub_explicit( volatile atomic_address*, ptrdiff_t,
233 typedef struct atomic_char
236 bool is_lock_free() const volatile;
238 memory_order = memory_order_seq_cst ) volatile;
239 char load( memory_order = memory_order_seq_cst ) volatile;
241 memory_order = memory_order_seq_cst ) volatile;
242 bool compare_exchange_weak( char&, char,
243 memory_order, memory_order ) volatile;
244 bool compare_exchange_strong( char&, char,
245 memory_order, memory_order ) volatile;
246 bool compare_exchange_weak( char&, char,
247 memory_order = memory_order_seq_cst ) volatile;
248 bool compare_exchange_strong( char&, char,
249 memory_order = memory_order_seq_cst ) volatile;
250 void fence( memory_order ) const volatile;
251 char fetch_add( char,
252 memory_order = memory_order_seq_cst ) volatile;
253 char fetch_sub( char,
254 memory_order = memory_order_seq_cst ) volatile;
255 char fetch_and( char,
256 memory_order = memory_order_seq_cst ) volatile;
258 memory_order = memory_order_seq_cst ) volatile;
259 char fetch_xor( char,
260 memory_order = memory_order_seq_cst ) volatile;
262 CPP0X( atomic_char() = default; )
263 CPP0X( constexpr atomic_char( char __v__ ) : __f__( __v__) { } )
264 CPP0X( atomic_char( const atomic_char& ) = delete; )
265 atomic_char& operator =( const atomic_char& ) CPP0X(=delete);
267 char operator =( char __v__ ) volatile
268 { store( __v__ ); return __v__; }
270 char operator ++( int ) volatile
271 { return fetch_add( 1 ); }
273 char operator --( int ) volatile
274 { return fetch_sub( 1 ); }
276 char operator ++() volatile
277 { return fetch_add( 1 ) + 1; }
279 char operator --() volatile
280 { return fetch_sub( 1 ) - 1; }
282 char operator +=( char __v__ ) volatile
283 { return fetch_add( __v__ ) + __v__; }
285 char operator -=( char __v__ ) volatile
286 { return fetch_sub( __v__ ) - __v__; }
288 char operator &=( char __v__ ) volatile
289 { return fetch_and( __v__ ) & __v__; }
291 char operator |=( char __v__ ) volatile
292 { return fetch_or( __v__ ) | __v__; }
294 char operator ^=( char __v__ ) volatile
295 { return fetch_xor( __v__ ) ^ __v__; }
297 friend void atomic_store_explicit( volatile atomic_char*, char,
299 friend char atomic_load_explicit( volatile atomic_char*,
301 friend char atomic_exchange_explicit( volatile atomic_char*,
302 char, memory_order );
303 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_char*,
304 char*, char, memory_order, memory_order );
305 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_char*,
306 char*, char, memory_order, memory_order );
307 friend void atomic_fence( const volatile atomic_char*, memory_order );
308 friend char atomic_fetch_add_explicit( volatile atomic_char*,
309 char, memory_order );
310 friend char atomic_fetch_sub_explicit( volatile atomic_char*,
311 char, memory_order );
312 friend char atomic_fetch_and_explicit( volatile atomic_char*,
313 char, memory_order );
314 friend char atomic_fetch_or_explicit( volatile atomic_char*,
315 char, memory_order );
316 friend char atomic_fetch_xor_explicit( volatile atomic_char*,
317 char, memory_order );
325 typedef struct atomic_schar
328 bool is_lock_free() const volatile;
329 void store( signed char,
330 memory_order = memory_order_seq_cst ) volatile;
331 signed char load( memory_order = memory_order_seq_cst ) volatile;
332 signed char exchange( signed char,
333 memory_order = memory_order_seq_cst ) volatile;
334 bool compare_exchange_weak( signed char&, signed char,
335 memory_order, memory_order ) volatile;
336 bool compare_exchange_strong( signed char&, signed char,
337 memory_order, memory_order ) volatile;
338 bool compare_exchange_weak( signed char&, signed char,
339 memory_order = memory_order_seq_cst ) volatile;
340 bool compare_exchange_strong( signed char&, signed char,
341 memory_order = memory_order_seq_cst ) volatile;
342 void fence( memory_order ) const volatile;
343 signed char fetch_add( signed char,
344 memory_order = memory_order_seq_cst ) volatile;
345 signed char fetch_sub( signed char,
346 memory_order = memory_order_seq_cst ) volatile;
347 signed char fetch_and( signed char,
348 memory_order = memory_order_seq_cst ) volatile;
349 signed char fetch_or( signed char,
350 memory_order = memory_order_seq_cst ) volatile;
351 signed char fetch_xor( signed char,
352 memory_order = memory_order_seq_cst ) volatile;
354 CPP0X( atomic_schar() = default; )
355 CPP0X( constexpr atomic_schar( signed char __v__ ) : __f__( __v__) { } )
356 CPP0X( atomic_schar( const atomic_schar& ) = delete; )
357 atomic_schar& operator =( const atomic_schar& ) CPP0X(=delete);
359 signed char operator =( signed char __v__ ) volatile
360 { store( __v__ ); return __v__; }
362 signed char operator ++( int ) volatile
363 { return fetch_add( 1 ); }
365 signed char operator --( int ) volatile
366 { return fetch_sub( 1 ); }
368 signed char operator ++() volatile
369 { return fetch_add( 1 ) + 1; }
371 signed char operator --() volatile
372 { return fetch_sub( 1 ) - 1; }
374 signed char operator +=( signed char __v__ ) volatile
375 { return fetch_add( __v__ ) + __v__; }
377 signed char operator -=( signed char __v__ ) volatile
378 { return fetch_sub( __v__ ) - __v__; }
380 signed char operator &=( signed char __v__ ) volatile
381 { return fetch_and( __v__ ) & __v__; }
383 signed char operator |=( signed char __v__ ) volatile
384 { return fetch_or( __v__ ) | __v__; }
386 signed char operator ^=( signed char __v__ ) volatile
387 { return fetch_xor( __v__ ) ^ __v__; }
389 friend void atomic_store_explicit( volatile atomic_schar*, signed char,
391 friend signed char atomic_load_explicit( volatile atomic_schar*,
393 friend signed char atomic_exchange_explicit( volatile atomic_schar*,
394 signed char, memory_order );
395 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_schar*,
396 signed char*, signed char, memory_order, memory_order );
397 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_schar*,
398 signed char*, signed char, memory_order, memory_order );
399 friend void atomic_fence( const volatile atomic_schar*, memory_order );
400 friend signed char atomic_fetch_add_explicit( volatile atomic_schar*,
401 signed char, memory_order );
402 friend signed char atomic_fetch_sub_explicit( volatile atomic_schar*,
403 signed char, memory_order );
404 friend signed char atomic_fetch_and_explicit( volatile atomic_schar*,
405 signed char, memory_order );
406 friend signed char atomic_fetch_or_explicit( volatile atomic_schar*,
407 signed char, memory_order );
408 friend signed char atomic_fetch_xor_explicit( volatile atomic_schar*,
409 signed char, memory_order );
417 typedef struct atomic_uchar
420 bool is_lock_free() const volatile;
421 void store( unsigned char,
422 memory_order = memory_order_seq_cst ) volatile;
423 unsigned char load( memory_order = memory_order_seq_cst ) volatile;
424 unsigned char exchange( unsigned char,
425 memory_order = memory_order_seq_cst ) volatile;
426 bool compare_exchange_weak( unsigned char&, unsigned char,
427 memory_order, memory_order ) volatile;
428 bool compare_exchange_strong( unsigned char&, unsigned char,
429 memory_order, memory_order ) volatile;
430 bool compare_exchange_weak( unsigned char&, unsigned char,
431 memory_order = memory_order_seq_cst ) volatile;
432 bool compare_exchange_strong( unsigned char&, unsigned char,
433 memory_order = memory_order_seq_cst ) volatile;
434 void fence( memory_order ) const volatile;
435 unsigned char fetch_add( unsigned char,
436 memory_order = memory_order_seq_cst ) volatile;
437 unsigned char fetch_sub( unsigned char,
438 memory_order = memory_order_seq_cst ) volatile;
439 unsigned char fetch_and( unsigned char,
440 memory_order = memory_order_seq_cst ) volatile;
441 unsigned char fetch_or( unsigned char,
442 memory_order = memory_order_seq_cst ) volatile;
443 unsigned char fetch_xor( unsigned char,
444 memory_order = memory_order_seq_cst ) volatile;
446 CPP0X( atomic_uchar() = default; )
447 CPP0X( constexpr atomic_uchar( unsigned char __v__ ) : __f__( __v__) { } )
448 CPP0X( atomic_uchar( const atomic_uchar& ) = delete; )
449 atomic_uchar& operator =( const atomic_uchar& ) CPP0X(=delete);
451 unsigned char operator =( unsigned char __v__ ) volatile
452 { store( __v__ ); return __v__; }
454 unsigned char operator ++( int ) volatile
455 { return fetch_add( 1 ); }
457 unsigned char operator --( int ) volatile
458 { return fetch_sub( 1 ); }
460 unsigned char operator ++() volatile
461 { return fetch_add( 1 ) + 1; }
463 unsigned char operator --() volatile
464 { return fetch_sub( 1 ) - 1; }
466 unsigned char operator +=( unsigned char __v__ ) volatile
467 { return fetch_add( __v__ ) + __v__; }
469 unsigned char operator -=( unsigned char __v__ ) volatile
470 { return fetch_sub( __v__ ) - __v__; }
472 unsigned char operator &=( unsigned char __v__ ) volatile
473 { return fetch_and( __v__ ) & __v__; }
475 unsigned char operator |=( unsigned char __v__ ) volatile
476 { return fetch_or( __v__ ) | __v__; }
478 unsigned char operator ^=( unsigned char __v__ ) volatile
479 { return fetch_xor( __v__ ) ^ __v__; }
481 friend void atomic_store_explicit( volatile atomic_uchar*, unsigned char,
483 friend unsigned char atomic_load_explicit( volatile atomic_uchar*,
485 friend unsigned char atomic_exchange_explicit( volatile atomic_uchar*,
486 unsigned char, memory_order );
487 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_uchar*,
488 unsigned char*, unsigned char, memory_order, memory_order );
489 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_uchar*,
490 unsigned char*, unsigned char, memory_order, memory_order );
491 friend void atomic_fence( const volatile atomic_uchar*, memory_order );
492 friend unsigned char atomic_fetch_add_explicit( volatile atomic_uchar*,
493 unsigned char, memory_order );
494 friend unsigned char atomic_fetch_sub_explicit( volatile atomic_uchar*,
495 unsigned char, memory_order );
496 friend unsigned char atomic_fetch_and_explicit( volatile atomic_uchar*,
497 unsigned char, memory_order );
498 friend unsigned char atomic_fetch_or_explicit( volatile atomic_uchar*,
499 unsigned char, memory_order );
500 friend unsigned char atomic_fetch_xor_explicit( volatile atomic_uchar*,
501 unsigned char, memory_order );
509 typedef struct atomic_short
512 bool is_lock_free() const volatile;
514 memory_order = memory_order_seq_cst ) volatile;
515 short load( memory_order = memory_order_seq_cst ) volatile;
516 short exchange( short,
517 memory_order = memory_order_seq_cst ) volatile;
518 bool compare_exchange_weak( short&, short,
519 memory_order, memory_order ) volatile;
520 bool compare_exchange_strong( short&, short,
521 memory_order, memory_order ) volatile;
522 bool compare_exchange_weak( short&, short,
523 memory_order = memory_order_seq_cst ) volatile;
524 bool compare_exchange_strong( short&, short,
525 memory_order = memory_order_seq_cst ) volatile;
526 void fence( memory_order ) const volatile;
527 short fetch_add( short,
528 memory_order = memory_order_seq_cst ) volatile;
529 short fetch_sub( short,
530 memory_order = memory_order_seq_cst ) volatile;
531 short fetch_and( short,
532 memory_order = memory_order_seq_cst ) volatile;
533 short fetch_or( short,
534 memory_order = memory_order_seq_cst ) volatile;
535 short fetch_xor( short,
536 memory_order = memory_order_seq_cst ) volatile;
538 CPP0X( atomic_short() = default; )
539 CPP0X( constexpr atomic_short( short __v__ ) : __f__( __v__) { } )
540 CPP0X( atomic_short( const atomic_short& ) = delete; )
541 atomic_short& operator =( const atomic_short& ) CPP0X(=delete);
543 short operator =( short __v__ ) volatile
544 { store( __v__ ); return __v__; }
546 short operator ++( int ) volatile
547 { return fetch_add( 1 ); }
549 short operator --( int ) volatile
550 { return fetch_sub( 1 ); }
552 short operator ++() volatile
553 { return fetch_add( 1 ) + 1; }
555 short operator --() volatile
556 { return fetch_sub( 1 ) - 1; }
558 short operator +=( short __v__ ) volatile
559 { return fetch_add( __v__ ) + __v__; }
561 short operator -=( short __v__ ) volatile
562 { return fetch_sub( __v__ ) - __v__; }
564 short operator &=( short __v__ ) volatile
565 { return fetch_and( __v__ ) & __v__; }
567 short operator |=( short __v__ ) volatile
568 { return fetch_or( __v__ ) | __v__; }
570 short operator ^=( short __v__ ) volatile
571 { return fetch_xor( __v__ ) ^ __v__; }
573 friend void atomic_store_explicit( volatile atomic_short*, short,
575 friend short atomic_load_explicit( volatile atomic_short*,
577 friend short atomic_exchange_explicit( volatile atomic_short*,
578 short, memory_order );
579 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_short*,
580 short*, short, memory_order, memory_order );
581 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_short*,
582 short*, short, memory_order, memory_order );
583 friend void atomic_fence( const volatile atomic_short*, memory_order );
584 friend short atomic_fetch_add_explicit( volatile atomic_short*,
585 short, memory_order );
586 friend short atomic_fetch_sub_explicit( volatile atomic_short*,
587 short, memory_order );
588 friend short atomic_fetch_and_explicit( volatile atomic_short*,
589 short, memory_order );
590 friend short atomic_fetch_or_explicit( volatile atomic_short*,
591 short, memory_order );
592 friend short atomic_fetch_xor_explicit( volatile atomic_short*,
593 short, memory_order );
601 typedef struct atomic_ushort
604 bool is_lock_free() const volatile;
605 void store( unsigned short,
606 memory_order = memory_order_seq_cst ) volatile;
607 unsigned short load( memory_order = memory_order_seq_cst ) volatile;
608 unsigned short exchange( unsigned short,
609 memory_order = memory_order_seq_cst ) volatile;
610 bool compare_exchange_weak( unsigned short&, unsigned short,
611 memory_order, memory_order ) volatile;
612 bool compare_exchange_strong( unsigned short&, unsigned short,
613 memory_order, memory_order ) volatile;
614 bool compare_exchange_weak( unsigned short&, unsigned short,
615 memory_order = memory_order_seq_cst ) volatile;
616 bool compare_exchange_strong( unsigned short&, unsigned short,
617 memory_order = memory_order_seq_cst ) volatile;
618 void fence( memory_order ) const volatile;
619 unsigned short fetch_add( unsigned short,
620 memory_order = memory_order_seq_cst ) volatile;
621 unsigned short fetch_sub( unsigned short,
622 memory_order = memory_order_seq_cst ) volatile;
623 unsigned short fetch_and( unsigned short,
624 memory_order = memory_order_seq_cst ) volatile;
625 unsigned short fetch_or( unsigned short,
626 memory_order = memory_order_seq_cst ) volatile;
627 unsigned short fetch_xor( unsigned short,
628 memory_order = memory_order_seq_cst ) volatile;
630 CPP0X( atomic_ushort() = default; )
631 CPP0X( constexpr atomic_ushort( unsigned short __v__ ) : __f__( __v__) { } )
632 CPP0X( atomic_ushort( const atomic_ushort& ) = delete; )
633 atomic_ushort& operator =( const atomic_ushort& ) CPP0X(=delete);
635 unsigned short operator =( unsigned short __v__ ) volatile
636 { store( __v__ ); return __v__; }
638 unsigned short operator ++( int ) volatile
639 { return fetch_add( 1 ); }
641 unsigned short operator --( int ) volatile
642 { return fetch_sub( 1 ); }
644 unsigned short operator ++() volatile
645 { return fetch_add( 1 ) + 1; }
647 unsigned short operator --() volatile
648 { return fetch_sub( 1 ) - 1; }
650 unsigned short operator +=( unsigned short __v__ ) volatile
651 { return fetch_add( __v__ ) + __v__; }
653 unsigned short operator -=( unsigned short __v__ ) volatile
654 { return fetch_sub( __v__ ) - __v__; }
656 unsigned short operator &=( unsigned short __v__ ) volatile
657 { return fetch_and( __v__ ) & __v__; }
659 unsigned short operator |=( unsigned short __v__ ) volatile
660 { return fetch_or( __v__ ) | __v__; }
662 unsigned short operator ^=( unsigned short __v__ ) volatile
663 { return fetch_xor( __v__ ) ^ __v__; }
665 friend void atomic_store_explicit( volatile atomic_ushort*, unsigned short,
667 friend unsigned short atomic_load_explicit( volatile atomic_ushort*,
669 friend unsigned short atomic_exchange_explicit( volatile atomic_ushort*,
670 unsigned short, memory_order );
671 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_ushort*,
672 unsigned short*, unsigned short, memory_order, memory_order );
673 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_ushort*,
674 unsigned short*, unsigned short, memory_order, memory_order );
675 friend void atomic_fence( const volatile atomic_ushort*, memory_order );
676 friend unsigned short atomic_fetch_add_explicit( volatile atomic_ushort*,
677 unsigned short, memory_order );
678 friend unsigned short atomic_fetch_sub_explicit( volatile atomic_ushort*,
679 unsigned short, memory_order );
680 friend unsigned short atomic_fetch_and_explicit( volatile atomic_ushort*,
681 unsigned short, memory_order );
682 friend unsigned short atomic_fetch_or_explicit( volatile atomic_ushort*,
683 unsigned short, memory_order );
684 friend unsigned short atomic_fetch_xor_explicit( volatile atomic_ushort*,
685 unsigned short, memory_order );
689 unsigned short __f__;
693 typedef struct atomic_int
696 bool is_lock_free() const volatile;
698 memory_order = memory_order_seq_cst ) volatile;
699 int load( memory_order = memory_order_seq_cst ) volatile;
701 memory_order = memory_order_seq_cst ) volatile;
702 bool compare_exchange_weak( int&, int,
703 memory_order, memory_order ) volatile;
704 bool compare_exchange_strong( int&, int,
705 memory_order, memory_order ) volatile;
706 bool compare_exchange_weak( int&, int,
707 memory_order = memory_order_seq_cst ) volatile;
708 bool compare_exchange_strong( int&, int,
709 memory_order = memory_order_seq_cst ) volatile;
710 void fence( memory_order ) const volatile;
712 memory_order = memory_order_seq_cst ) volatile;
714 memory_order = memory_order_seq_cst ) volatile;
716 memory_order = memory_order_seq_cst ) volatile;
718 memory_order = memory_order_seq_cst ) volatile;
720 memory_order = memory_order_seq_cst ) volatile;
722 CPP0X( atomic_int() = default; )
723 CPP0X( constexpr atomic_int( int __v__ ) : __f__( __v__) { } )
724 CPP0X( atomic_int( const atomic_int& ) = delete; )
725 atomic_int& operator =( const atomic_int& ) CPP0X(=delete);
727 int operator =( int __v__ ) volatile
728 { store( __v__ ); return __v__; }
730 int operator ++( int ) volatile
731 { return fetch_add( 1 ); }
733 int operator --( int ) volatile
734 { return fetch_sub( 1 ); }
736 int operator ++() volatile
737 { return fetch_add( 1 ) + 1; }
739 int operator --() volatile
740 { return fetch_sub( 1 ) - 1; }
742 int operator +=( int __v__ ) volatile
743 { return fetch_add( __v__ ) + __v__; }
745 int operator -=( int __v__ ) volatile
746 { return fetch_sub( __v__ ) - __v__; }
748 int operator &=( int __v__ ) volatile
749 { return fetch_and( __v__ ) & __v__; }
751 int operator |=( int __v__ ) volatile
752 { return fetch_or( __v__ ) | __v__; }
754 int operator ^=( int __v__ ) volatile
755 { return fetch_xor( __v__ ) ^ __v__; }
757 friend void atomic_store_explicit( volatile atomic_int*, int,
759 friend int atomic_load_explicit( volatile atomic_int*,
761 friend int atomic_exchange_explicit( volatile atomic_int*,
763 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_int*,
764 int*, int, memory_order, memory_order );
765 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_int*,
766 int*, int, memory_order, memory_order );
767 friend void atomic_fence( const volatile atomic_int*, memory_order );
768 friend int atomic_fetch_add_explicit( volatile atomic_int*,
770 friend int atomic_fetch_sub_explicit( volatile atomic_int*,
772 friend int atomic_fetch_and_explicit( volatile atomic_int*,
774 friend int atomic_fetch_or_explicit( volatile atomic_int*,
776 friend int atomic_fetch_xor_explicit( volatile atomic_int*,
785 typedef struct atomic_uint
788 bool is_lock_free() const volatile;
789 void store( unsigned int,
790 memory_order = memory_order_seq_cst ) volatile;
791 unsigned int load( memory_order = memory_order_seq_cst ) volatile;
792 unsigned int exchange( unsigned int,
793 memory_order = memory_order_seq_cst ) volatile;
794 bool compare_exchange_weak( unsigned int&, unsigned int,
795 memory_order, memory_order ) volatile;
796 bool compare_exchange_strong( unsigned int&, unsigned int,
797 memory_order, memory_order ) volatile;
798 bool compare_exchange_weak( unsigned int&, unsigned int,
799 memory_order = memory_order_seq_cst ) volatile;
800 bool compare_exchange_strong( unsigned int&, unsigned int,
801 memory_order = memory_order_seq_cst ) volatile;
802 void fence( memory_order ) const volatile;
803 unsigned int fetch_add( unsigned int,
804 memory_order = memory_order_seq_cst ) volatile;
805 unsigned int fetch_sub( unsigned int,
806 memory_order = memory_order_seq_cst ) volatile;
807 unsigned int fetch_and( unsigned int,
808 memory_order = memory_order_seq_cst ) volatile;
809 unsigned int fetch_or( unsigned int,
810 memory_order = memory_order_seq_cst ) volatile;
811 unsigned int fetch_xor( unsigned int,
812 memory_order = memory_order_seq_cst ) volatile;
814 CPP0X( atomic_uint() = default; )
815 CPP0X( constexpr atomic_uint( unsigned int __v__ ) : __f__( __v__) { } )
816 CPP0X( atomic_uint( const atomic_uint& ) = delete; )
817 atomic_uint& operator =( const atomic_uint& ) CPP0X(=delete);
819 unsigned int operator =( unsigned int __v__ ) volatile
820 { store( __v__ ); return __v__; }
822 unsigned int operator ++( int ) volatile
823 { return fetch_add( 1 ); }
825 unsigned int operator --( int ) volatile
826 { return fetch_sub( 1 ); }
828 unsigned int operator ++() volatile
829 { return fetch_add( 1 ) + 1; }
831 unsigned int operator --() volatile
832 { return fetch_sub( 1 ) - 1; }
834 unsigned int operator +=( unsigned int __v__ ) volatile
835 { return fetch_add( __v__ ) + __v__; }
837 unsigned int operator -=( unsigned int __v__ ) volatile
838 { return fetch_sub( __v__ ) - __v__; }
840 unsigned int operator &=( unsigned int __v__ ) volatile
841 { return fetch_and( __v__ ) & __v__; }
843 unsigned int operator |=( unsigned int __v__ ) volatile
844 { return fetch_or( __v__ ) | __v__; }
846 unsigned int operator ^=( unsigned int __v__ ) volatile
847 { return fetch_xor( __v__ ) ^ __v__; }
849 friend void atomic_store_explicit( volatile atomic_uint*, unsigned int,
851 friend unsigned int atomic_load_explicit( volatile atomic_uint*,
853 friend unsigned int atomic_exchange_explicit( volatile atomic_uint*,
854 unsigned int, memory_order );
855 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_uint*,
856 unsigned int*, unsigned int, memory_order, memory_order );
857 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_uint*,
858 unsigned int*, unsigned int, memory_order, memory_order );
859 friend void atomic_fence( const volatile atomic_uint*, memory_order );
860 friend unsigned int atomic_fetch_add_explicit( volatile atomic_uint*,
861 unsigned int, memory_order );
862 friend unsigned int atomic_fetch_sub_explicit( volatile atomic_uint*,
863 unsigned int, memory_order );
864 friend unsigned int atomic_fetch_and_explicit( volatile atomic_uint*,
865 unsigned int, memory_order );
866 friend unsigned int atomic_fetch_or_explicit( volatile atomic_uint*,
867 unsigned int, memory_order );
868 friend unsigned int atomic_fetch_xor_explicit( volatile atomic_uint*,
869 unsigned int, memory_order );
877 typedef struct atomic_long
880 bool is_lock_free() const volatile;
882 memory_order = memory_order_seq_cst ) volatile;
883 long load( memory_order = memory_order_seq_cst ) volatile;
885 memory_order = memory_order_seq_cst ) volatile;
886 bool compare_exchange_weak( long&, long,
887 memory_order, memory_order ) volatile;
888 bool compare_exchange_strong( long&, long,
889 memory_order, memory_order ) volatile;
890 bool compare_exchange_weak( long&, long,
891 memory_order = memory_order_seq_cst ) volatile;
892 bool compare_exchange_strong( long&, long,
893 memory_order = memory_order_seq_cst ) volatile;
894 void fence( memory_order ) const volatile;
895 long fetch_add( long,
896 memory_order = memory_order_seq_cst ) volatile;
897 long fetch_sub( long,
898 memory_order = memory_order_seq_cst ) volatile;
899 long fetch_and( long,
900 memory_order = memory_order_seq_cst ) volatile;
902 memory_order = memory_order_seq_cst ) volatile;
903 long fetch_xor( long,
904 memory_order = memory_order_seq_cst ) volatile;
906 CPP0X( atomic_long() = default; )
907 CPP0X( constexpr atomic_long( long __v__ ) : __f__( __v__) { } )
908 CPP0X( atomic_long( const atomic_long& ) = delete; )
909 atomic_long& operator =( const atomic_long& ) CPP0X(=delete);
911 long operator =( long __v__ ) volatile
912 { store( __v__ ); return __v__; }
914 long operator ++( int ) volatile
915 { return fetch_add( 1 ); }
917 long operator --( int ) volatile
918 { return fetch_sub( 1 ); }
920 long operator ++() volatile
921 { return fetch_add( 1 ) + 1; }
923 long operator --() volatile
924 { return fetch_sub( 1 ) - 1; }
926 long operator +=( long __v__ ) volatile
927 { return fetch_add( __v__ ) + __v__; }
929 long operator -=( long __v__ ) volatile
930 { return fetch_sub( __v__ ) - __v__; }
932 long operator &=( long __v__ ) volatile
933 { return fetch_and( __v__ ) & __v__; }
935 long operator |=( long __v__ ) volatile
936 { return fetch_or( __v__ ) | __v__; }
938 long operator ^=( long __v__ ) volatile
939 { return fetch_xor( __v__ ) ^ __v__; }
941 friend void atomic_store_explicit( volatile atomic_long*, long,
943 friend long atomic_load_explicit( volatile atomic_long*,
945 friend long atomic_exchange_explicit( volatile atomic_long*,
946 long, memory_order );
947 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_long*,
948 long*, long, memory_order, memory_order );
949 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_long*,
950 long*, long, memory_order, memory_order );
951 friend void atomic_fence( const volatile atomic_long*, memory_order );
952 friend long atomic_fetch_add_explicit( volatile atomic_long*,
953 long, memory_order );
954 friend long atomic_fetch_sub_explicit( volatile atomic_long*,
955 long, memory_order );
956 friend long atomic_fetch_and_explicit( volatile atomic_long*,
957 long, memory_order );
958 friend long atomic_fetch_or_explicit( volatile atomic_long*,
959 long, memory_order );
960 friend long atomic_fetch_xor_explicit( volatile atomic_long*,
961 long, memory_order );
969 typedef struct atomic_ulong
972 bool is_lock_free() const volatile;
973 void store( unsigned long,
974 memory_order = memory_order_seq_cst ) volatile;
975 unsigned long load( memory_order = memory_order_seq_cst ) volatile;
976 unsigned long exchange( unsigned long,
977 memory_order = memory_order_seq_cst ) volatile;
978 bool compare_exchange_weak( unsigned long&, unsigned long,
979 memory_order, memory_order ) volatile;
980 bool compare_exchange_strong( unsigned long&, unsigned long,
981 memory_order, memory_order ) volatile;
982 bool compare_exchange_weak( unsigned long&, unsigned long,
983 memory_order = memory_order_seq_cst ) volatile;
984 bool compare_exchange_strong( unsigned long&, unsigned long,
985 memory_order = memory_order_seq_cst ) volatile;
986 void fence( memory_order ) const volatile;
987 unsigned long fetch_add( unsigned long,
988 memory_order = memory_order_seq_cst ) volatile;
989 unsigned long fetch_sub( unsigned long,
990 memory_order = memory_order_seq_cst ) volatile;
991 unsigned long fetch_and( unsigned long,
992 memory_order = memory_order_seq_cst ) volatile;
993 unsigned long fetch_or( unsigned long,
994 memory_order = memory_order_seq_cst ) volatile;
995 unsigned long fetch_xor( unsigned long,
996 memory_order = memory_order_seq_cst ) volatile;
998 CPP0X( atomic_ulong() = default; )
999 CPP0X( constexpr atomic_ulong( unsigned long __v__ ) : __f__( __v__) { } )
1000 CPP0X( atomic_ulong( const atomic_ulong& ) = delete; )
1001 atomic_ulong& operator =( const atomic_ulong& ) CPP0X(=delete);
1003 unsigned long operator =( unsigned long __v__ ) volatile
1004 { store( __v__ ); return __v__; }
1006 unsigned long operator ++( int ) volatile
1007 { return fetch_add( 1 ); }
1009 unsigned long operator --( int ) volatile
1010 { return fetch_sub( 1 ); }
1012 unsigned long operator ++() volatile
1013 { return fetch_add( 1 ) + 1; }
1015 unsigned long operator --() volatile
1016 { return fetch_sub( 1 ) - 1; }
1018 unsigned long operator +=( unsigned long __v__ ) volatile
1019 { return fetch_add( __v__ ) + __v__; }
1021 unsigned long operator -=( unsigned long __v__ ) volatile
1022 { return fetch_sub( __v__ ) - __v__; }
1024 unsigned long operator &=( unsigned long __v__ ) volatile
1025 { return fetch_and( __v__ ) & __v__; }
1027 unsigned long operator |=( unsigned long __v__ ) volatile
1028 { return fetch_or( __v__ ) | __v__; }
1030 unsigned long operator ^=( unsigned long __v__ ) volatile
1031 { return fetch_xor( __v__ ) ^ __v__; }
1033 friend void atomic_store_explicit( volatile atomic_ulong*, unsigned long,
1035 friend unsigned long atomic_load_explicit( volatile atomic_ulong*,
1037 friend unsigned long atomic_exchange_explicit( volatile atomic_ulong*,
1038 unsigned long, memory_order );
1039 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_ulong*,
1040 unsigned long*, unsigned long, memory_order, memory_order );
1041 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_ulong*,
1042 unsigned long*, unsigned long, memory_order, memory_order );
1043 friend void atomic_fence( const volatile atomic_ulong*, memory_order );
1044 friend unsigned long atomic_fetch_add_explicit( volatile atomic_ulong*,
1045 unsigned long, memory_order );
1046 friend unsigned long atomic_fetch_sub_explicit( volatile atomic_ulong*,
1047 unsigned long, memory_order );
1048 friend unsigned long atomic_fetch_and_explicit( volatile atomic_ulong*,
1049 unsigned long, memory_order );
1050 friend unsigned long atomic_fetch_or_explicit( volatile atomic_ulong*,
1051 unsigned long, memory_order );
1052 friend unsigned long atomic_fetch_xor_explicit( volatile atomic_ulong*,
1053 unsigned long, memory_order );
1057 unsigned long __f__;
1061 typedef struct atomic_llong
1064 bool is_lock_free() const volatile;
1065 void store( long long,
1066 memory_order = memory_order_seq_cst ) volatile;
1067 long long load( memory_order = memory_order_seq_cst ) volatile;
1068 long long exchange( long long,
1069 memory_order = memory_order_seq_cst ) volatile;
1070 bool compare_exchange_weak( long long&, long long,
1071 memory_order, memory_order ) volatile;
1072 bool compare_exchange_strong( long long&, long long,
1073 memory_order, memory_order ) volatile;
1074 bool compare_exchange_weak( long long&, long long,
1075 memory_order = memory_order_seq_cst ) volatile;
1076 bool compare_exchange_strong( long long&, long long,
1077 memory_order = memory_order_seq_cst ) volatile;
1078 void fence( memory_order ) const volatile;
1079 long long fetch_add( long long,
1080 memory_order = memory_order_seq_cst ) volatile;
1081 long long fetch_sub( long long,
1082 memory_order = memory_order_seq_cst ) volatile;
1083 long long fetch_and( long long,
1084 memory_order = memory_order_seq_cst ) volatile;
1085 long long fetch_or( long long,
1086 memory_order = memory_order_seq_cst ) volatile;
1087 long long fetch_xor( long long,
1088 memory_order = memory_order_seq_cst ) volatile;
1090 CPP0X( atomic_llong() = default; )
1091 CPP0X( constexpr atomic_llong( long long __v__ ) : __f__( __v__) { } )
1092 CPP0X( atomic_llong( const atomic_llong& ) = delete; )
1093 atomic_llong& operator =( const atomic_llong& ) CPP0X(=delete);
1095 long long operator =( long long __v__ ) volatile
1096 { store( __v__ ); return __v__; }
1098 long long operator ++( int ) volatile
1099 { return fetch_add( 1 ); }
1101 long long operator --( int ) volatile
1102 { return fetch_sub( 1 ); }
1104 long long operator ++() volatile
1105 { return fetch_add( 1 ) + 1; }
1107 long long operator --() volatile
1108 { return fetch_sub( 1 ) - 1; }
1110 long long operator +=( long long __v__ ) volatile
1111 { return fetch_add( __v__ ) + __v__; }
1113 long long operator -=( long long __v__ ) volatile
1114 { return fetch_sub( __v__ ) - __v__; }
1116 long long operator &=( long long __v__ ) volatile
1117 { return fetch_and( __v__ ) & __v__; }
1119 long long operator |=( long long __v__ ) volatile
1120 { return fetch_or( __v__ ) | __v__; }
1122 long long operator ^=( long long __v__ ) volatile
1123 { return fetch_xor( __v__ ) ^ __v__; }
1125 friend void atomic_store_explicit( volatile atomic_llong*, long long,
1127 friend long long atomic_load_explicit( volatile atomic_llong*,
1129 friend long long atomic_exchange_explicit( volatile atomic_llong*,
1130 long long, memory_order );
1131 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_llong*,
1132 long long*, long long, memory_order, memory_order );
1133 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_llong*,
1134 long long*, long long, memory_order, memory_order );
1135 friend void atomic_fence( const volatile atomic_llong*, memory_order );
1136 friend long long atomic_fetch_add_explicit( volatile atomic_llong*,
1137 long long, memory_order );
1138 friend long long atomic_fetch_sub_explicit( volatile atomic_llong*,
1139 long long, memory_order );
1140 friend long long atomic_fetch_and_explicit( volatile atomic_llong*,
1141 long long, memory_order );
1142 friend long long atomic_fetch_or_explicit( volatile atomic_llong*,
1143 long long, memory_order );
1144 friend long long atomic_fetch_xor_explicit( volatile atomic_llong*,
1145 long long, memory_order );
1153 typedef struct atomic_ullong
1156 bool is_lock_free() const volatile;
1157 void store( unsigned long long,
1158 memory_order = memory_order_seq_cst ) volatile;
1159 unsigned long long load( memory_order = memory_order_seq_cst ) volatile;
1160 unsigned long long exchange( unsigned long long,
1161 memory_order = memory_order_seq_cst ) volatile;
1162 bool compare_exchange_weak( unsigned long long&, unsigned long long,
1163 memory_order, memory_order ) volatile;
1164 bool compare_exchange_strong( unsigned long long&, unsigned long long,
1165 memory_order, memory_order ) volatile;
1166 bool compare_exchange_weak( unsigned long long&, unsigned long long,
1167 memory_order = memory_order_seq_cst ) volatile;
1168 bool compare_exchange_strong( unsigned long long&, unsigned long long,
1169 memory_order = memory_order_seq_cst ) volatile;
1170 void fence( memory_order ) const volatile;
1171 unsigned long long fetch_add( unsigned long long,
1172 memory_order = memory_order_seq_cst ) volatile;
1173 unsigned long long fetch_sub( unsigned long long,
1174 memory_order = memory_order_seq_cst ) volatile;
1175 unsigned long long fetch_and( unsigned long long,
1176 memory_order = memory_order_seq_cst ) volatile;
1177 unsigned long long fetch_or( unsigned long long,
1178 memory_order = memory_order_seq_cst ) volatile;
1179 unsigned long long fetch_xor( unsigned long long,
1180 memory_order = memory_order_seq_cst ) volatile;
1182 CPP0X( atomic_ullong() = default; )
1183 CPP0X( constexpr atomic_ullong( unsigned long long __v__ ) : __f__( __v__) { } )
1184 CPP0X( atomic_ullong( const atomic_ullong& ) = delete; )
1185 atomic_ullong& operator =( const atomic_ullong& ) CPP0X(=delete);
1187 unsigned long long operator =( unsigned long long __v__ ) volatile
1188 { store( __v__ ); return __v__; }
1190 unsigned long long operator ++( int ) volatile
1191 { return fetch_add( 1 ); }
1193 unsigned long long operator --( int ) volatile
1194 { return fetch_sub( 1 ); }
1196 unsigned long long operator ++() volatile
1197 { return fetch_add( 1 ) + 1; }
1199 unsigned long long operator --() volatile
1200 { return fetch_sub( 1 ) - 1; }
1202 unsigned long long operator +=( unsigned long long __v__ ) volatile
1203 { return fetch_add( __v__ ) + __v__; }
1205 unsigned long long operator -=( unsigned long long __v__ ) volatile
1206 { return fetch_sub( __v__ ) - __v__; }
1208 unsigned long long operator &=( unsigned long long __v__ ) volatile
1209 { return fetch_and( __v__ ) & __v__; }
1211 unsigned long long operator |=( unsigned long long __v__ ) volatile
1212 { return fetch_or( __v__ ) | __v__; }
1214 unsigned long long operator ^=( unsigned long long __v__ ) volatile
1215 { return fetch_xor( __v__ ) ^ __v__; }
1217 friend void atomic_store_explicit( volatile atomic_ullong*, unsigned long long,
1219 friend unsigned long long atomic_load_explicit( volatile atomic_ullong*,
1221 friend unsigned long long atomic_exchange_explicit( volatile atomic_ullong*,
1222 unsigned long long, memory_order );
1223 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_ullong*,
1224 unsigned long long*, unsigned long long, memory_order, memory_order );
1225 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_ullong*,
1226 unsigned long long*, unsigned long long, memory_order, memory_order );
1227 friend void atomic_fence( const volatile atomic_ullong*, memory_order );
1228 friend unsigned long long atomic_fetch_add_explicit( volatile atomic_ullong*,
1229 unsigned long long, memory_order );
1230 friend unsigned long long atomic_fetch_sub_explicit( volatile atomic_ullong*,
1231 unsigned long long, memory_order );
1232 friend unsigned long long atomic_fetch_and_explicit( volatile atomic_ullong*,
1233 unsigned long long, memory_order );
1234 friend unsigned long long atomic_fetch_or_explicit( volatile atomic_ullong*,
1235 unsigned long long, memory_order );
1236 friend unsigned long long atomic_fetch_xor_explicit( volatile atomic_ullong*,
1237 unsigned long long, memory_order );
1241 unsigned long long __f__;
1245 typedef atomic_schar atomic_int_least8_t;
1246 typedef atomic_uchar atomic_uint_least8_t;
1247 typedef atomic_short atomic_int_least16_t;
1248 typedef atomic_ushort atomic_uint_least16_t;
1249 typedef atomic_int atomic_int_least32_t;
1250 typedef atomic_uint atomic_uint_least32_t;
1251 typedef atomic_llong atomic_int_least64_t;
1252 typedef atomic_ullong atomic_uint_least64_t;
1254 typedef atomic_schar atomic_int_fast8_t;
1255 typedef atomic_uchar atomic_uint_fast8_t;
1256 typedef atomic_short atomic_int_fast16_t;
1257 typedef atomic_ushort atomic_uint_fast16_t;
1258 typedef atomic_int atomic_int_fast32_t;
1259 typedef atomic_uint atomic_uint_fast32_t;
1260 typedef atomic_llong atomic_int_fast64_t;
1261 typedef atomic_ullong atomic_uint_fast64_t;
1263 typedef atomic_long atomic_intptr_t;
1264 typedef atomic_ulong atomic_uintptr_t;
1266 typedef atomic_long atomic_ssize_t;
1267 typedef atomic_ulong atomic_size_t;
1269 typedef atomic_long atomic_ptrdiff_t;
1271 typedef atomic_llong atomic_intmax_t;
1272 typedef atomic_ullong atomic_uintmax_t;
1278 typedef struct atomic_wchar_t
1281 bool is_lock_free() const volatile;
1282 void store( wchar_t, memory_order = memory_order_seq_cst ) volatile;
1283 wchar_t load( memory_order = memory_order_seq_cst ) volatile;
1284 wchar_t exchange( wchar_t,
1285 memory_order = memory_order_seq_cst ) volatile;
1286 bool compare_exchange_weak( wchar_t&, wchar_t,
1287 memory_order, memory_order ) volatile;
1288 bool compare_exchange_strong( wchar_t&, wchar_t,
1289 memory_order, memory_order ) volatile;
1290 bool compare_exchange_weak( wchar_t&, wchar_t,
1291 memory_order = memory_order_seq_cst ) volatile;
1292 bool compare_exchange_strong( wchar_t&, wchar_t,
1293 memory_order = memory_order_seq_cst ) volatile;
1294 void fence( memory_order ) const volatile;
1295 wchar_t fetch_add( wchar_t,
1296 memory_order = memory_order_seq_cst ) volatile;
1297 wchar_t fetch_sub( wchar_t,
1298 memory_order = memory_order_seq_cst ) volatile;
1299 wchar_t fetch_and( wchar_t,
1300 memory_order = memory_order_seq_cst ) volatile;
1301 wchar_t fetch_or( wchar_t,
1302 memory_order = memory_order_seq_cst ) volatile;
1303 wchar_t fetch_xor( wchar_t,
1304 memory_order = memory_order_seq_cst ) volatile;
1306 CPP0X( atomic_wchar_t() = default; )
1307 CPP0X( constexpr atomic_wchar_t( wchar_t __v__ ) : __f__( __v__) { } )
1308 CPP0X( atomic_wchar_t( const atomic_wchar_t& ) = delete; )
1309 atomic_wchar_t& operator =( const atomic_wchar_t& ) CPP0X(=delete);
1311 wchar_t operator =( wchar_t __v__ ) volatile
1312 { store( __v__ ); return __v__; }
1314 wchar_t operator ++( int ) volatile
1315 { return fetch_add( 1 ); }
1317 wchar_t operator --( int ) volatile
1318 { return fetch_sub( 1 ); }
1320 wchar_t operator ++() volatile
1321 { return fetch_add( 1 ) + 1; }
1323 wchar_t operator --() volatile
1324 { return fetch_sub( 1 ) - 1; }
1326 wchar_t operator +=( wchar_t __v__ ) volatile
1327 { return fetch_add( __v__ ) + __v__; }
1329 wchar_t operator -=( wchar_t __v__ ) volatile
1330 { return fetch_sub( __v__ ) - __v__; }
1332 wchar_t operator &=( wchar_t __v__ ) volatile
1333 { return fetch_and( __v__ ) & __v__; }
1335 wchar_t operator |=( wchar_t __v__ ) volatile
1336 { return fetch_or( __v__ ) | __v__; }
1338 wchar_t operator ^=( wchar_t __v__ ) volatile
1339 { return fetch_xor( __v__ ) ^ __v__; }
1341 friend void atomic_store_explicit( volatile atomic_wchar_t*, wchar_t,
1343 friend wchar_t atomic_load_explicit( volatile atomic_wchar_t*,
1345 friend wchar_t atomic_exchange_explicit( volatile atomic_wchar_t*,
1346 wchar_t, memory_order );
1347 friend bool atomic_compare_exchange_weak_explicit( volatile atomic_wchar_t*,
1348 wchar_t*, wchar_t, memory_order, memory_order );
1349 friend bool atomic_compare_exchange_strong_explicit( volatile atomic_wchar_t*,
1350 wchar_t*, wchar_t, memory_order, memory_order );
1351 friend void atomic_fence( const volatile atomic_wchar_t*, memory_order );
1352 friend wchar_t atomic_fetch_add_explicit( volatile atomic_wchar_t*,
1353 wchar_t, memory_order );
1354 friend wchar_t atomic_fetch_sub_explicit( volatile atomic_wchar_t*,
1355 wchar_t, memory_order );
1356 friend wchar_t atomic_fetch_and_explicit( volatile atomic_wchar_t*,
1357 wchar_t, memory_order );
1358 friend wchar_t atomic_fetch_or_explicit( volatile atomic_wchar_t*,
1359 wchar_t, memory_order );
1360 friend wchar_t atomic_fetch_xor_explicit( volatile atomic_wchar_t*,
1361 wchar_t, memory_order );
1371 typedef atomic_int_least16_t atomic_char16_t;
1372 typedef atomic_int_least32_t atomic_char32_t;
1373 typedef atomic_int_least32_t atomic_wchar_t;
1380 template< typename T >
1385 bool is_lock_free() const volatile;
1386 void store( T, memory_order = memory_order_seq_cst ) volatile;
1387 T load( memory_order = memory_order_seq_cst ) volatile;
1388 T exchange( T __v__, memory_order = memory_order_seq_cst ) volatile;
1389 bool compare_exchange_weak( T&, T, memory_order, memory_order ) volatile;
1390 bool compare_exchange_strong( T&, T, memory_order, memory_order ) volatile;
1391 bool compare_exchange_weak( T&, T, memory_order = memory_order_seq_cst ) volatile;
1392 bool compare_exchange_strong( T&, T, memory_order = memory_order_seq_cst ) volatile;
1393 void fence( memory_order ) const volatile;
1395 CPP0X( atomic() = default; )
1396 CPP0X( constexpr explicit atomic( T __v__ ) : __f__( __v__ ) { } )
1397 CPP0X( atomic( const atomic& ) = delete; )
1398 atomic& operator =( const atomic& ) CPP0X(=delete);
1400 T operator =( T __v__ ) volatile
1401 { store( __v__ ); return __v__; }
1412 template<typename T> struct atomic< T* > : atomic_address
1414 T* load( memory_order = memory_order_seq_cst ) volatile;
1415 T* exchange( T*, memory_order = memory_order_seq_cst ) volatile;
1416 bool compare_exchange_weak( T*&, T*, memory_order, memory_order ) volatile;
1417 bool compare_exchange_strong( T*&, T*, memory_order, memory_order ) volatile;
1418 bool compare_exchange_weak( T*&, T*,
1419 memory_order = memory_order_seq_cst ) volatile;
1420 bool compare_exchange_strong( T*&, T*,
1421 memory_order = memory_order_seq_cst ) volatile;
1422 T* fetch_add( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
1423 T* fetch_sub( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
1425 CPP0X( atomic() = default; )
1426 CPP0X( constexpr explicit atomic( T __v__ ) : atomic_address( __v__ ) { } )
1427 CPP0X( atomic( const atomic& ) = delete; )
1428 atomic& operator =( const atomic& ) CPP0X(=delete);
1430 T* operator =( T* __v__ ) volatile
1431 { store( __v__ ); return __v__; }
1433 T* operator ++( int ) volatile
1434 { return fetch_add( 1 ); }
1436 T* operator --( int ) volatile
1437 { return fetch_sub( 1 ); }
1439 T* operator ++() volatile
1440 { return fetch_add( 1 ) + 1; }
1442 T* operator --() volatile
1443 { return fetch_sub( 1 ) - 1; }
1445 T* operator +=( T* __v__ ) volatile
1446 { return fetch_add( __v__ ) + __v__; }
1448 T* operator -=( T* __v__ ) volatile
1449 { return fetch_sub( __v__ ) - __v__; }
1457 template<> struct atomic< bool > : atomic_bool
1459 CPP0X( atomic() = default; )
1460 CPP0X( constexpr explicit atomic( bool __v__ )
1461 : atomic_bool( __v__ ) { } )
1462 CPP0X( atomic( const atomic& ) = delete; )
1463 atomic& operator =( const atomic& ) CPP0X(=delete);
1465 bool operator =( bool __v__ ) volatile
1466 { store( __v__ ); return __v__; }
1470 template<> struct atomic< void* > : atomic_address
1472 CPP0X( atomic() = default; )
1473 CPP0X( constexpr explicit atomic( void* __v__ )
1474 : atomic_address( __v__ ) { } )
1475 CPP0X( atomic( const atomic& ) = delete; )
1476 atomic& operator =( const atomic& ) CPP0X(=delete);
1478 void* operator =( void* __v__ ) volatile
1479 { store( __v__ ); return __v__; }
1483 template<> struct atomic< char > : atomic_char
1485 CPP0X( atomic() = default; )
1486 CPP0X( constexpr explicit atomic( char __v__ )
1487 : atomic_char( __v__ ) { } )
1488 CPP0X( atomic( const atomic& ) = delete; )
1489 atomic& operator =( const atomic& ) CPP0X(=delete);
1491 char operator =( char __v__ ) volatile
1492 { store( __v__ ); return __v__; }
1496 template<> struct atomic< signed char > : atomic_schar
1498 CPP0X( atomic() = default; )
1499 CPP0X( constexpr explicit atomic( signed char __v__ )
1500 : atomic_schar( __v__ ) { } )
1501 CPP0X( atomic( const atomic& ) = delete; )
1502 atomic& operator =( const atomic& ) CPP0X(=delete);
1504 signed char operator =( signed char __v__ ) volatile
1505 { store( __v__ ); return __v__; }
1509 template<> struct atomic< unsigned char > : atomic_uchar
1511 CPP0X( atomic() = default; )
1512 CPP0X( constexpr explicit atomic( unsigned char __v__ )
1513 : atomic_uchar( __v__ ) { } )
1514 CPP0X( atomic( const atomic& ) = delete; )
1515 atomic& operator =( const atomic& ) CPP0X(=delete);
1517 unsigned char operator =( unsigned char __v__ ) volatile
1518 { store( __v__ ); return __v__; }
1522 template<> struct atomic< short > : atomic_short
1524 CPP0X( atomic() = default; )
1525 CPP0X( constexpr explicit atomic( short __v__ )
1526 : atomic_short( __v__ ) { } )
1527 CPP0X( atomic( const atomic& ) = delete; )
1528 atomic& operator =( const atomic& ) CPP0X(=delete);
1530 short operator =( short __v__ ) volatile
1531 { store( __v__ ); return __v__; }
1535 template<> struct atomic< unsigned short > : atomic_ushort
1537 CPP0X( atomic() = default; )
1538 CPP0X( constexpr explicit atomic( unsigned short __v__ )
1539 : atomic_ushort( __v__ ) { } )
1540 CPP0X( atomic( const atomic& ) = delete; )
1541 atomic& operator =( const atomic& ) CPP0X(=delete);
1543 unsigned short operator =( unsigned short __v__ ) volatile
1544 { store( __v__ ); return __v__; }
1548 template<> struct atomic< int > : atomic_int
1550 CPP0X( atomic() = default; )
1551 CPP0X( constexpr explicit atomic( int __v__ )
1552 : atomic_int( __v__ ) { } )
1553 CPP0X( atomic( const atomic& ) = delete; )
1554 atomic& operator =( const atomic& ) CPP0X(=delete);
1556 int operator =( int __v__ ) volatile
1557 { store( __v__ ); return __v__; }
1561 template<> struct atomic< unsigned int > : atomic_uint
1563 CPP0X( atomic() = default; )
1564 CPP0X( constexpr explicit atomic( unsigned int __v__ )
1565 : atomic_uint( __v__ ) { } )
1566 CPP0X( atomic( const atomic& ) = delete; )
1567 atomic& operator =( const atomic& ) CPP0X(=delete);
1569 unsigned int operator =( unsigned int __v__ ) volatile
1570 { store( __v__ ); return __v__; }
1574 template<> struct atomic< long > : atomic_long
1576 CPP0X( atomic() = default; )
1577 CPP0X( constexpr explicit atomic( long __v__ )
1578 : atomic_long( __v__ ) { } )
1579 CPP0X( atomic( const atomic& ) = delete; )
1580 atomic& operator =( const atomic& ) CPP0X(=delete);
1582 long operator =( long __v__ ) volatile
1583 { store( __v__ ); return __v__; }
1587 template<> struct atomic< unsigned long > : atomic_ulong
1589 CPP0X( atomic() = default; )
1590 CPP0X( constexpr explicit atomic( unsigned long __v__ )
1591 : atomic_ulong( __v__ ) { } )
1592 CPP0X( atomic( const atomic& ) = delete; )
1593 atomic& operator =( const atomic& ) CPP0X(=delete);
1595 unsigned long operator =( unsigned long __v__ ) volatile
1596 { store( __v__ ); return __v__; }
1600 template<> struct atomic< long long > : atomic_llong
1602 CPP0X( atomic() = default; )
1603 CPP0X( constexpr explicit atomic( long long __v__ )
1604 : atomic_llong( __v__ ) { } )
1605 CPP0X( atomic( const atomic& ) = delete; )
1606 atomic& operator =( const atomic& ) CPP0X(=delete);
1608 long long operator =( long long __v__ ) volatile
1609 { store( __v__ ); return __v__; }
1613 template<> struct atomic< unsigned long long > : atomic_ullong
1615 CPP0X( atomic() = default; )
1616 CPP0X( constexpr explicit atomic( unsigned long long __v__ )
1617 : atomic_ullong( __v__ ) { } )
1618 CPP0X( atomic( const atomic& ) = delete; )
1619 atomic& operator =( const atomic& ) CPP0X(=delete);
1621 unsigned long long operator =( unsigned long long __v__ ) volatile
1622 { store( __v__ ); return __v__; }
1626 template<> struct atomic< wchar_t > : atomic_wchar_t
1628 CPP0X( atomic() = default; )
1629 CPP0X( constexpr explicit atomic( wchar_t __v__ )
1630 : atomic_wchar_t( __v__ ) { } )
1631 CPP0X( atomic( const atomic& ) = delete; )
1632 atomic& operator =( const atomic& ) CPP0X(=delete);
1634 wchar_t operator =( wchar_t __v__ ) volatile
1635 { store( __v__ ); return __v__; }
1645 inline bool atomic_is_lock_free
1646 ( const volatile atomic_bool* __a__ )
1649 inline bool atomic_load_explicit
1650 ( volatile atomic_bool* __a__, memory_order __x__ )
1651 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1653 inline bool atomic_load
1654 ( volatile atomic_bool* __a__ ) { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1656 inline void atomic_store_explicit
1657 ( volatile atomic_bool* __a__, bool __m__, memory_order __x__ )
1658 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1660 inline void atomic_store
1661 ( volatile atomic_bool* __a__, bool __m__ )
1662 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1664 inline bool atomic_exchange_explicit
1665 ( volatile atomic_bool* __a__, bool __m__, memory_order __x__ )
1666 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1668 inline bool atomic_exchange
1669 ( volatile atomic_bool* __a__, bool __m__ )
1670 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1672 inline bool atomic_compare_exchange_weak_explicit
1673 ( volatile atomic_bool* __a__, bool* __e__, bool __m__,
1674 memory_order __x__, memory_order __y__ )
1675 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1677 inline bool atomic_compare_exchange_strong_explicit
1678 ( volatile atomic_bool* __a__, bool* __e__, bool __m__,
1679 memory_order __x__, memory_order __y__ )
1680 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1682 inline bool atomic_compare_exchange_weak
1683 ( volatile atomic_bool* __a__, bool* __e__, bool __m__ )
1684 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1685 memory_order_seq_cst, memory_order_seq_cst ); }
1687 inline bool atomic_compare_exchange_strong
1688 ( volatile atomic_bool* __a__, bool* __e__, bool __m__ )
1689 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1690 memory_order_seq_cst, memory_order_seq_cst ); }
1692 inline void atomic_fence
1693 ( const volatile atomic_bool* __a__, memory_order __x__ )
1694 { _ATOMIC_FENCE_( __a__, __x__ ); }
1697 inline bool atomic_is_lock_free( const volatile atomic_address* __a__ )
1700 inline void* atomic_load_explicit
1701 ( volatile atomic_address* __a__, memory_order __x__ )
1702 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1704 inline void* atomic_load( volatile atomic_address* __a__ )
1705 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1707 inline void atomic_store_explicit
1708 ( volatile atomic_address* __a__, void* __m__, memory_order __x__ )
1709 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1711 inline void atomic_store
1712 ( volatile atomic_address* __a__, void* __m__ )
1713 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1715 inline void* atomic_exchange_explicit
1716 ( volatile atomic_address* __a__, void* __m__, memory_order __x__ )
1717 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1719 inline void* atomic_exchange
1720 ( volatile atomic_address* __a__, void* __m__ )
1721 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1723 inline bool atomic_compare_exchange_weak_explicit
1724 ( volatile atomic_address* __a__, void** __e__, void* __m__,
1725 memory_order __x__, memory_order __y__ )
1726 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1728 inline bool atomic_compare_exchange_strong_explicit
1729 ( volatile atomic_address* __a__, void** __e__, void* __m__,
1730 memory_order __x__, memory_order __y__ )
1731 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1733 inline bool atomic_compare_exchange_weak
1734 ( volatile atomic_address* __a__, void** __e__, void* __m__ )
1735 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1736 memory_order_seq_cst, memory_order_seq_cst ); }
1738 inline bool atomic_compare_exchange_strong
1739 ( volatile atomic_address* __a__, void** __e__, void* __m__ )
1740 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1741 memory_order_seq_cst, memory_order_seq_cst ); }
1743 inline void atomic_fence
1744 ( const volatile atomic_address* __a__, memory_order __x__ )
1745 { _ATOMIC_FENCE_( __a__, __x__ ); }
1748 inline bool atomic_is_lock_free( const volatile atomic_char* __a__ )
1751 inline char atomic_load_explicit
1752 ( volatile atomic_char* __a__, memory_order __x__ )
1753 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1755 inline char atomic_load( volatile atomic_char* __a__ )
1756 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1758 inline void atomic_store_explicit
1759 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
1760 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1762 inline void atomic_store
1763 ( volatile atomic_char* __a__, char __m__ )
1764 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1766 inline char atomic_exchange_explicit
1767 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
1768 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1770 inline char atomic_exchange
1771 ( volatile atomic_char* __a__, char __m__ )
1772 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1774 inline bool atomic_compare_exchange_weak_explicit
1775 ( volatile atomic_char* __a__, char* __e__, char __m__,
1776 memory_order __x__, memory_order __y__ )
1777 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1779 inline bool atomic_compare_exchange_strong_explicit
1780 ( volatile atomic_char* __a__, char* __e__, char __m__,
1781 memory_order __x__, memory_order __y__ )
1782 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1784 inline bool atomic_compare_exchange_weak
1785 ( volatile atomic_char* __a__, char* __e__, char __m__ )
1786 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1787 memory_order_seq_cst, memory_order_seq_cst ); }
1789 inline bool atomic_compare_exchange_strong
1790 ( volatile atomic_char* __a__, char* __e__, char __m__ )
1791 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1792 memory_order_seq_cst, memory_order_seq_cst ); }
1794 inline void atomic_fence
1795 ( const volatile atomic_char* __a__, memory_order __x__ )
1796 { _ATOMIC_FENCE_( __a__, __x__ ); }
1799 inline bool atomic_is_lock_free( const volatile atomic_schar* __a__ )
1802 inline signed char atomic_load_explicit
1803 ( volatile atomic_schar* __a__, memory_order __x__ )
1804 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1806 inline signed char atomic_load( volatile atomic_schar* __a__ )
1807 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1809 inline void atomic_store_explicit
1810 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
1811 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1813 inline void atomic_store
1814 ( volatile atomic_schar* __a__, signed char __m__ )
1815 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1817 inline signed char atomic_exchange_explicit
1818 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
1819 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1821 inline signed char atomic_exchange
1822 ( volatile atomic_schar* __a__, signed char __m__ )
1823 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1825 inline bool atomic_compare_exchange_weak_explicit
1826 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__,
1827 memory_order __x__, memory_order __y__ )
1828 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1830 inline bool atomic_compare_exchange_strong_explicit
1831 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__,
1832 memory_order __x__, memory_order __y__ )
1833 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1835 inline bool atomic_compare_exchange_weak
1836 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__ )
1837 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1838 memory_order_seq_cst, memory_order_seq_cst ); }
1840 inline bool atomic_compare_exchange_strong
1841 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__ )
1842 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1843 memory_order_seq_cst, memory_order_seq_cst ); }
1845 inline void atomic_fence
1846 ( const volatile atomic_schar* __a__, memory_order __x__ )
1847 { _ATOMIC_FENCE_( __a__, __x__ ); }
1850 inline bool atomic_is_lock_free( const volatile atomic_uchar* __a__ )
1853 inline unsigned char atomic_load_explicit
1854 ( volatile atomic_uchar* __a__, memory_order __x__ )
1855 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1857 inline unsigned char atomic_load( volatile atomic_uchar* __a__ )
1858 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1860 inline void atomic_store_explicit
1861 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
1862 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1864 inline void atomic_store
1865 ( volatile atomic_uchar* __a__, unsigned char __m__ )
1866 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1868 inline unsigned char atomic_exchange_explicit
1869 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
1870 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1872 inline unsigned char atomic_exchange
1873 ( volatile atomic_uchar* __a__, unsigned char __m__ )
1874 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1876 inline bool atomic_compare_exchange_weak_explicit
1877 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__,
1878 memory_order __x__, memory_order __y__ )
1879 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1881 inline bool atomic_compare_exchange_strong_explicit
1882 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__,
1883 memory_order __x__, memory_order __y__ )
1884 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1886 inline bool atomic_compare_exchange_weak
1887 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__ )
1888 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1889 memory_order_seq_cst, memory_order_seq_cst ); }
1891 inline bool atomic_compare_exchange_strong
1892 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__ )
1893 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1894 memory_order_seq_cst, memory_order_seq_cst ); }
1896 inline void atomic_fence
1897 ( const volatile atomic_uchar* __a__, memory_order __x__ )
1898 { _ATOMIC_FENCE_( __a__, __x__ ); }
1901 inline bool atomic_is_lock_free( const volatile atomic_short* __a__ )
1904 inline short atomic_load_explicit
1905 ( volatile atomic_short* __a__, memory_order __x__ )
1906 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1908 inline short atomic_load( volatile atomic_short* __a__ )
1909 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1911 inline void atomic_store_explicit
1912 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
1913 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1915 inline void atomic_store
1916 ( volatile atomic_short* __a__, short __m__ )
1917 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1919 inline short atomic_exchange_explicit
1920 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
1921 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1923 inline short atomic_exchange
1924 ( volatile atomic_short* __a__, short __m__ )
1925 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1927 inline bool atomic_compare_exchange_weak_explicit
1928 ( volatile atomic_short* __a__, short* __e__, short __m__,
1929 memory_order __x__, memory_order __y__ )
1930 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1932 inline bool atomic_compare_exchange_strong_explicit
1933 ( volatile atomic_short* __a__, short* __e__, short __m__,
1934 memory_order __x__, memory_order __y__ )
1935 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1937 inline bool atomic_compare_exchange_weak
1938 ( volatile atomic_short* __a__, short* __e__, short __m__ )
1939 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1940 memory_order_seq_cst, memory_order_seq_cst ); }
1942 inline bool atomic_compare_exchange_strong
1943 ( volatile atomic_short* __a__, short* __e__, short __m__ )
1944 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1945 memory_order_seq_cst, memory_order_seq_cst ); }
1947 inline void atomic_fence
1948 ( const volatile atomic_short* __a__, memory_order __x__ )
1949 { _ATOMIC_FENCE_( __a__, __x__ ); }
1952 inline bool atomic_is_lock_free( const volatile atomic_ushort* __a__ )
1955 inline unsigned short atomic_load_explicit
1956 ( volatile atomic_ushort* __a__, memory_order __x__ )
1957 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1959 inline unsigned short atomic_load( volatile atomic_ushort* __a__ )
1960 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1962 inline void atomic_store_explicit
1963 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
1964 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1966 inline void atomic_store
1967 ( volatile atomic_ushort* __a__, unsigned short __m__ )
1968 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1970 inline unsigned short atomic_exchange_explicit
1971 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
1972 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1974 inline unsigned short atomic_exchange
1975 ( volatile atomic_ushort* __a__, unsigned short __m__ )
1976 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1978 inline bool atomic_compare_exchange_weak_explicit
1979 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__,
1980 memory_order __x__, memory_order __y__ )
1981 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1983 inline bool atomic_compare_exchange_strong_explicit
1984 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__,
1985 memory_order __x__, memory_order __y__ )
1986 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1988 inline bool atomic_compare_exchange_weak
1989 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__ )
1990 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1991 memory_order_seq_cst, memory_order_seq_cst ); }
1993 inline bool atomic_compare_exchange_strong
1994 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__ )
1995 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1996 memory_order_seq_cst, memory_order_seq_cst ); }
1998 inline void atomic_fence
1999 ( const volatile atomic_ushort* __a__, memory_order __x__ )
2000 { _ATOMIC_FENCE_( __a__, __x__ ); }
2003 inline bool atomic_is_lock_free( const volatile atomic_int* __a__ )
2006 inline int atomic_load_explicit
2007 ( volatile atomic_int* __a__, memory_order __x__ )
2008 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2010 inline int atomic_load( volatile atomic_int* __a__ )
2011 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2013 inline void atomic_store_explicit
2014 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2015 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2017 inline void atomic_store
2018 ( volatile atomic_int* __a__, int __m__ )
2019 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2021 inline int atomic_exchange_explicit
2022 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2023 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2025 inline int atomic_exchange
2026 ( volatile atomic_int* __a__, int __m__ )
2027 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2029 inline bool atomic_compare_exchange_weak_explicit
2030 ( volatile atomic_int* __a__, int* __e__, int __m__,
2031 memory_order __x__, memory_order __y__ )
2032 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2034 inline bool atomic_compare_exchange_strong_explicit
2035 ( volatile atomic_int* __a__, int* __e__, int __m__,
2036 memory_order __x__, memory_order __y__ )
2037 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2039 inline bool atomic_compare_exchange_weak
2040 ( volatile atomic_int* __a__, int* __e__, int __m__ )
2041 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2042 memory_order_seq_cst, memory_order_seq_cst ); }
2044 inline bool atomic_compare_exchange_strong
2045 ( volatile atomic_int* __a__, int* __e__, int __m__ )
2046 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2047 memory_order_seq_cst, memory_order_seq_cst ); }
2049 inline void atomic_fence
2050 ( const volatile atomic_int* __a__, memory_order __x__ )
2051 { _ATOMIC_FENCE_( __a__, __x__ ); }
2054 inline bool atomic_is_lock_free( const volatile atomic_uint* __a__ )
2057 inline unsigned int atomic_load_explicit
2058 ( volatile atomic_uint* __a__, memory_order __x__ )
2059 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2061 inline unsigned int atomic_load( volatile atomic_uint* __a__ )
2062 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2064 inline void atomic_store_explicit
2065 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2066 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2068 inline void atomic_store
2069 ( volatile atomic_uint* __a__, unsigned int __m__ )
2070 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2072 inline unsigned int atomic_exchange_explicit
2073 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2074 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2076 inline unsigned int atomic_exchange
2077 ( volatile atomic_uint* __a__, unsigned int __m__ )
2078 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2080 inline bool atomic_compare_exchange_weak_explicit
2081 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__,
2082 memory_order __x__, memory_order __y__ )
2083 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2085 inline bool atomic_compare_exchange_strong_explicit
2086 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__,
2087 memory_order __x__, memory_order __y__ )
2088 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2090 inline bool atomic_compare_exchange_weak
2091 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__ )
2092 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2093 memory_order_seq_cst, memory_order_seq_cst ); }
2095 inline bool atomic_compare_exchange_strong
2096 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__ )
2097 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2098 memory_order_seq_cst, memory_order_seq_cst ); }
2100 inline void atomic_fence
2101 ( const volatile atomic_uint* __a__, memory_order __x__ )
2102 { _ATOMIC_FENCE_( __a__, __x__ ); }
2105 inline bool atomic_is_lock_free( const volatile atomic_long* __a__ )
2108 inline long atomic_load_explicit
2109 ( volatile atomic_long* __a__, memory_order __x__ )
2110 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2112 inline long atomic_load( volatile atomic_long* __a__ )
2113 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2115 inline void atomic_store_explicit
2116 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2117 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2119 inline void atomic_store
2120 ( volatile atomic_long* __a__, long __m__ )
2121 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2123 inline long atomic_exchange_explicit
2124 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2125 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2127 inline long atomic_exchange
2128 ( volatile atomic_long* __a__, long __m__ )
2129 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2131 inline bool atomic_compare_exchange_weak_explicit
2132 ( volatile atomic_long* __a__, long* __e__, long __m__,
2133 memory_order __x__, memory_order __y__ )
2134 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2136 inline bool atomic_compare_exchange_strong_explicit
2137 ( volatile atomic_long* __a__, long* __e__, long __m__,
2138 memory_order __x__, memory_order __y__ )
2139 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2141 inline bool atomic_compare_exchange_weak
2142 ( volatile atomic_long* __a__, long* __e__, long __m__ )
2143 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2144 memory_order_seq_cst, memory_order_seq_cst ); }
2146 inline bool atomic_compare_exchange_strong
2147 ( volatile atomic_long* __a__, long* __e__, long __m__ )
2148 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2149 memory_order_seq_cst, memory_order_seq_cst ); }
2151 inline void atomic_fence
2152 ( const volatile atomic_long* __a__, memory_order __x__ )
2153 { _ATOMIC_FENCE_( __a__, __x__ ); }
2156 inline bool atomic_is_lock_free( const volatile atomic_ulong* __a__ )
2159 inline unsigned long atomic_load_explicit
2160 ( volatile atomic_ulong* __a__, memory_order __x__ )
2161 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2163 inline unsigned long atomic_load( volatile atomic_ulong* __a__ )
2164 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2166 inline void atomic_store_explicit
2167 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2168 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2170 inline void atomic_store
2171 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2172 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2174 inline unsigned long atomic_exchange_explicit
2175 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2176 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2178 inline unsigned long atomic_exchange
2179 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2180 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2182 inline bool atomic_compare_exchange_weak_explicit
2183 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__,
2184 memory_order __x__, memory_order __y__ )
2185 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2187 inline bool atomic_compare_exchange_strong_explicit
2188 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__,
2189 memory_order __x__, memory_order __y__ )
2190 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2192 inline bool atomic_compare_exchange_weak
2193 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__ )
2194 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2195 memory_order_seq_cst, memory_order_seq_cst ); }
2197 inline bool atomic_compare_exchange_strong
2198 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__ )
2199 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2200 memory_order_seq_cst, memory_order_seq_cst ); }
2202 inline void atomic_fence
2203 ( const volatile atomic_ulong* __a__, memory_order __x__ )
2204 { _ATOMIC_FENCE_( __a__, __x__ ); }
2207 inline bool atomic_is_lock_free( const volatile atomic_llong* __a__ )
2210 inline long long atomic_load_explicit
2211 ( volatile atomic_llong* __a__, memory_order __x__ )
2212 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2214 inline long long atomic_load( volatile atomic_llong* __a__ )
2215 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2217 inline void atomic_store_explicit
2218 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2219 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2221 inline void atomic_store
2222 ( volatile atomic_llong* __a__, long long __m__ )
2223 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2225 inline long long atomic_exchange_explicit
2226 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2227 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2229 inline long long atomic_exchange
2230 ( volatile atomic_llong* __a__, long long __m__ )
2231 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2233 inline bool atomic_compare_exchange_weak_explicit
2234 ( volatile atomic_llong* __a__, long long* __e__, long long __m__,
2235 memory_order __x__, memory_order __y__ )
2236 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2238 inline bool atomic_compare_exchange_strong_explicit
2239 ( volatile atomic_llong* __a__, long long* __e__, long long __m__,
2240 memory_order __x__, memory_order __y__ )
2241 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2243 inline bool atomic_compare_exchange_weak
2244 ( volatile atomic_llong* __a__, long long* __e__, long long __m__ )
2245 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2246 memory_order_seq_cst, memory_order_seq_cst ); }
2248 inline bool atomic_compare_exchange_strong
2249 ( volatile atomic_llong* __a__, long long* __e__, long long __m__ )
2250 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2251 memory_order_seq_cst, memory_order_seq_cst ); }
2253 inline void atomic_fence
2254 ( const volatile atomic_llong* __a__, memory_order __x__ )
2255 { _ATOMIC_FENCE_( __a__, __x__ ); }
2258 inline bool atomic_is_lock_free( const volatile atomic_ullong* __a__ )
2261 inline unsigned long long atomic_load_explicit
2262 ( volatile atomic_ullong* __a__, memory_order __x__ )
2263 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2265 inline unsigned long long atomic_load( volatile atomic_ullong* __a__ )
2266 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2268 inline void atomic_store_explicit
2269 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2270 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2272 inline void atomic_store
2273 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2274 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2276 inline unsigned long long atomic_exchange_explicit
2277 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2278 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2280 inline unsigned long long atomic_exchange
2281 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2282 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2284 inline bool atomic_compare_exchange_weak_explicit
2285 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__,
2286 memory_order __x__, memory_order __y__ )
2287 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2289 inline bool atomic_compare_exchange_strong_explicit
2290 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__,
2291 memory_order __x__, memory_order __y__ )
2292 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2294 inline bool atomic_compare_exchange_weak
2295 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__ )
2296 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2297 memory_order_seq_cst, memory_order_seq_cst ); }
2299 inline bool atomic_compare_exchange_strong
2300 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__ )
2301 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2302 memory_order_seq_cst, memory_order_seq_cst ); }
2304 inline void atomic_fence
2305 ( const volatile atomic_ullong* __a__, memory_order __x__ )
2306 { _ATOMIC_FENCE_( __a__, __x__ ); }
2309 inline bool atomic_is_lock_free( const volatile atomic_wchar_t* __a__ )
2312 inline wchar_t atomic_load_explicit
2313 ( volatile atomic_wchar_t* __a__, memory_order __x__ )
2314 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2316 inline wchar_t atomic_load( volatile atomic_wchar_t* __a__ )
2317 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2319 inline void atomic_store_explicit
2320 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2321 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2323 inline void atomic_store
2324 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2325 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2327 inline wchar_t atomic_exchange_explicit
2328 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2329 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2331 inline wchar_t atomic_exchange
2332 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2333 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2335 inline bool atomic_compare_exchange_weak_explicit
2336 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__,
2337 memory_order __x__, memory_order __y__ )
2338 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2340 inline bool atomic_compare_exchange_strong_explicit
2341 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__,
2342 memory_order __x__, memory_order __y__ )
2343 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2345 inline bool atomic_compare_exchange_weak
2346 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__ )
2347 { return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2348 memory_order_seq_cst, memory_order_seq_cst ); }
2350 inline bool atomic_compare_exchange_strong
2351 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__ )
2352 { return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2353 memory_order_seq_cst, memory_order_seq_cst ); }
2355 inline void atomic_fence
2356 ( const volatile atomic_wchar_t* __a__, memory_order __x__ )
2357 { _ATOMIC_FENCE_( __a__, __x__ ); }
2360 inline void* atomic_fetch_add_explicit
2361 ( volatile atomic_address* __a__, ptrdiff_t __m__, memory_order __x__ )
2363 void* volatile* __p__ = &((__a__)->__f__);
2364 void* __r__ = (void *) model_rmwr_action((void *)__p__, __x__);
2365 model_rmw_action((void *)__p__, __x__, (uint64_t) ((char*)(*__p__) + __m__));
2368 inline void* atomic_fetch_add
2369 ( volatile atomic_address* __a__, ptrdiff_t __m__ )
2370 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2373 inline void* atomic_fetch_sub_explicit
2374 ( volatile atomic_address* __a__, ptrdiff_t __m__, memory_order __x__ )
2376 void* volatile* __p__ = &((__a__)->__f__);
2377 void* __r__ = (void *) model_rmwr_action((void *)__p__, __x__);
2378 model_rmw_action((void *)__p__, __x__, (uint64_t)((char*)(*__p__) - __m__));
2381 inline void* atomic_fetch_sub
2382 ( volatile atomic_address* __a__, ptrdiff_t __m__ )
2383 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2385 inline char atomic_fetch_add_explicit
2386 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2387 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2389 inline char atomic_fetch_add
2390 ( volatile atomic_char* __a__, char __m__ )
2391 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2394 inline char atomic_fetch_sub_explicit
2395 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2396 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2398 inline char atomic_fetch_sub
2399 ( volatile atomic_char* __a__, char __m__ )
2400 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2403 inline char atomic_fetch_and_explicit
2404 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2405 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2407 inline char atomic_fetch_and
2408 ( volatile atomic_char* __a__, char __m__ )
2409 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2412 inline char atomic_fetch_or_explicit
2413 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2414 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2416 inline char atomic_fetch_or
2417 ( volatile atomic_char* __a__, char __m__ )
2418 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2421 inline char atomic_fetch_xor_explicit
2422 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2423 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2425 inline char atomic_fetch_xor
2426 ( volatile atomic_char* __a__, char __m__ )
2427 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2430 inline signed char atomic_fetch_add_explicit
2431 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2432 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2434 inline signed char atomic_fetch_add
2435 ( volatile atomic_schar* __a__, signed char __m__ )
2436 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2439 inline signed char atomic_fetch_sub_explicit
2440 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2441 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2443 inline signed char atomic_fetch_sub
2444 ( volatile atomic_schar* __a__, signed char __m__ )
2445 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2448 inline signed char atomic_fetch_and_explicit
2449 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2450 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2452 inline signed char atomic_fetch_and
2453 ( volatile atomic_schar* __a__, signed char __m__ )
2454 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2457 inline signed char atomic_fetch_or_explicit
2458 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2459 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2461 inline signed char atomic_fetch_or
2462 ( volatile atomic_schar* __a__, signed char __m__ )
2463 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2466 inline signed char atomic_fetch_xor_explicit
2467 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2468 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2470 inline signed char atomic_fetch_xor
2471 ( volatile atomic_schar* __a__, signed char __m__ )
2472 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2475 inline unsigned char atomic_fetch_add_explicit
2476 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2477 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2479 inline unsigned char atomic_fetch_add
2480 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2481 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2484 inline unsigned char atomic_fetch_sub_explicit
2485 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2486 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2488 inline unsigned char atomic_fetch_sub
2489 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2490 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2493 inline unsigned char atomic_fetch_and_explicit
2494 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2495 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2497 inline unsigned char atomic_fetch_and
2498 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2499 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2502 inline unsigned char atomic_fetch_or_explicit
2503 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2504 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2506 inline unsigned char atomic_fetch_or
2507 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2508 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2511 inline unsigned char atomic_fetch_xor_explicit
2512 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2513 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2515 inline unsigned char atomic_fetch_xor
2516 ( volatile atomic_uchar* __a__, unsigned char __m__ )
2517 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2520 inline short atomic_fetch_add_explicit
2521 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2522 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2524 inline short atomic_fetch_add
2525 ( volatile atomic_short* __a__, short __m__ )
2526 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2529 inline short atomic_fetch_sub_explicit
2530 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2531 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2533 inline short atomic_fetch_sub
2534 ( volatile atomic_short* __a__, short __m__ )
2535 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2538 inline short atomic_fetch_and_explicit
2539 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2540 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2542 inline short atomic_fetch_and
2543 ( volatile atomic_short* __a__, short __m__ )
2544 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2547 inline short atomic_fetch_or_explicit
2548 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2549 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2551 inline short atomic_fetch_or
2552 ( volatile atomic_short* __a__, short __m__ )
2553 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2556 inline short atomic_fetch_xor_explicit
2557 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2558 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2560 inline short atomic_fetch_xor
2561 ( volatile atomic_short* __a__, short __m__ )
2562 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2565 inline unsigned short atomic_fetch_add_explicit
2566 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2567 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2569 inline unsigned short atomic_fetch_add
2570 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2571 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2574 inline unsigned short atomic_fetch_sub_explicit
2575 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2576 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2578 inline unsigned short atomic_fetch_sub
2579 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2580 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2583 inline unsigned short atomic_fetch_and_explicit
2584 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2585 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2587 inline unsigned short atomic_fetch_and
2588 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2589 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2592 inline unsigned short atomic_fetch_or_explicit
2593 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2594 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2596 inline unsigned short atomic_fetch_or
2597 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2598 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2601 inline unsigned short atomic_fetch_xor_explicit
2602 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2603 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2605 inline unsigned short atomic_fetch_xor
2606 ( volatile atomic_ushort* __a__, unsigned short __m__ )
2607 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2610 inline int atomic_fetch_add_explicit
2611 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2612 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2614 inline int atomic_fetch_add
2615 ( volatile atomic_int* __a__, int __m__ )
2616 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2619 inline int atomic_fetch_sub_explicit
2620 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2621 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2623 inline int atomic_fetch_sub
2624 ( volatile atomic_int* __a__, int __m__ )
2625 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2628 inline int atomic_fetch_and_explicit
2629 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2630 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2632 inline int atomic_fetch_and
2633 ( volatile atomic_int* __a__, int __m__ )
2634 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2637 inline int atomic_fetch_or_explicit
2638 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2639 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2641 inline int atomic_fetch_or
2642 ( volatile atomic_int* __a__, int __m__ )
2643 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2646 inline int atomic_fetch_xor_explicit
2647 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2648 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2650 inline int atomic_fetch_xor
2651 ( volatile atomic_int* __a__, int __m__ )
2652 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2655 inline unsigned int atomic_fetch_add_explicit
2656 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2657 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2659 inline unsigned int atomic_fetch_add
2660 ( volatile atomic_uint* __a__, unsigned int __m__ )
2661 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2664 inline unsigned int atomic_fetch_sub_explicit
2665 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2666 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2668 inline unsigned int atomic_fetch_sub
2669 ( volatile atomic_uint* __a__, unsigned int __m__ )
2670 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2673 inline unsigned int atomic_fetch_and_explicit
2674 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2675 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2677 inline unsigned int atomic_fetch_and
2678 ( volatile atomic_uint* __a__, unsigned int __m__ )
2679 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2682 inline unsigned int atomic_fetch_or_explicit
2683 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2684 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2686 inline unsigned int atomic_fetch_or
2687 ( volatile atomic_uint* __a__, unsigned int __m__ )
2688 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2691 inline unsigned int atomic_fetch_xor_explicit
2692 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2693 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2695 inline unsigned int atomic_fetch_xor
2696 ( volatile atomic_uint* __a__, unsigned int __m__ )
2697 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2700 inline long atomic_fetch_add_explicit
2701 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2702 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2704 inline long atomic_fetch_add
2705 ( volatile atomic_long* __a__, long __m__ )
2706 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2709 inline long atomic_fetch_sub_explicit
2710 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2711 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2713 inline long atomic_fetch_sub
2714 ( volatile atomic_long* __a__, long __m__ )
2715 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2718 inline long atomic_fetch_and_explicit
2719 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2720 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2722 inline long atomic_fetch_and
2723 ( volatile atomic_long* __a__, long __m__ )
2724 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2727 inline long atomic_fetch_or_explicit
2728 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2729 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2731 inline long atomic_fetch_or
2732 ( volatile atomic_long* __a__, long __m__ )
2733 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2736 inline long atomic_fetch_xor_explicit
2737 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2738 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2740 inline long atomic_fetch_xor
2741 ( volatile atomic_long* __a__, long __m__ )
2742 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2745 inline unsigned long atomic_fetch_add_explicit
2746 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2747 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2749 inline unsigned long atomic_fetch_add
2750 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2751 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2754 inline unsigned long atomic_fetch_sub_explicit
2755 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2756 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2758 inline unsigned long atomic_fetch_sub
2759 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2760 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2763 inline unsigned long atomic_fetch_and_explicit
2764 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2765 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2767 inline unsigned long atomic_fetch_and
2768 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2769 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2772 inline unsigned long atomic_fetch_or_explicit
2773 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2774 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2776 inline unsigned long atomic_fetch_or
2777 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2778 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2781 inline unsigned long atomic_fetch_xor_explicit
2782 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2783 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2785 inline unsigned long atomic_fetch_xor
2786 ( volatile atomic_ulong* __a__, unsigned long __m__ )
2787 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2790 inline long long atomic_fetch_add_explicit
2791 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2792 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2794 inline long long atomic_fetch_add
2795 ( volatile atomic_llong* __a__, long long __m__ )
2796 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2799 inline long long atomic_fetch_sub_explicit
2800 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2801 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2803 inline long long atomic_fetch_sub
2804 ( volatile atomic_llong* __a__, long long __m__ )
2805 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2808 inline long long atomic_fetch_and_explicit
2809 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2810 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2812 inline long long atomic_fetch_and
2813 ( volatile atomic_llong* __a__, long long __m__ )
2814 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2817 inline long long atomic_fetch_or_explicit
2818 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2819 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2821 inline long long atomic_fetch_or
2822 ( volatile atomic_llong* __a__, long long __m__ )
2823 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2826 inline long long atomic_fetch_xor_explicit
2827 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2828 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2830 inline long long atomic_fetch_xor
2831 ( volatile atomic_llong* __a__, long long __m__ )
2832 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2835 inline unsigned long long atomic_fetch_add_explicit
2836 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2837 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2839 inline unsigned long long atomic_fetch_add
2840 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2841 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2844 inline unsigned long long atomic_fetch_sub_explicit
2845 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2846 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2848 inline unsigned long long atomic_fetch_sub
2849 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2850 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2853 inline unsigned long long atomic_fetch_and_explicit
2854 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2855 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2857 inline unsigned long long atomic_fetch_and
2858 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2859 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2862 inline unsigned long long atomic_fetch_or_explicit
2863 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2864 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2866 inline unsigned long long atomic_fetch_or
2867 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2868 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2871 inline unsigned long long atomic_fetch_xor_explicit
2872 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2873 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2875 inline unsigned long long atomic_fetch_xor
2876 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2877 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2880 inline wchar_t atomic_fetch_add_explicit
2881 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2882 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2884 inline wchar_t atomic_fetch_add
2885 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2886 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2889 inline wchar_t atomic_fetch_sub_explicit
2890 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2891 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2893 inline wchar_t atomic_fetch_sub
2894 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2895 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2898 inline wchar_t atomic_fetch_and_explicit
2899 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2900 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2902 inline wchar_t atomic_fetch_and
2903 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2904 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2907 inline wchar_t atomic_fetch_or_explicit
2908 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2909 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2911 inline wchar_t atomic_fetch_or
2912 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2913 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2916 inline wchar_t atomic_fetch_xor_explicit
2917 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2918 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2920 inline wchar_t atomic_fetch_xor
2921 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2922 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2928 #define atomic_is_lock_free( __a__ ) \
2931 #define atomic_load( __a__ ) \
2932 _ATOMIC_LOAD_( __a__, memory_order_seq_cst )
2934 #define atomic_load_explicit( __a__, __x__ ) \
2935 _ATOMIC_LOAD_( __a__, __x__ )
2937 #define atomic_init( __a__, __m__ ) \
2938 _ATOMIC_INIT_( __a__, __m__ )
2940 #define atomic_store( __a__, __m__ ) \
2941 _ATOMIC_STORE_( __a__, __m__, memory_order_seq_cst )
2943 #define atomic_store_explicit( __a__, __m__, __x__ ) \
2944 _ATOMIC_STORE_( __a__, __m__, __x__ )
2946 #define atomic_exchange( __a__, __m__ ) \
2947 _ATOMIC_MODIFY_( __a__, =, __m__, memory_order_seq_cst )
2949 #define atomic_exchange_explicit( __a__, __m__, __x__ ) \
2950 _ATOMIC_MODIFY_( __a__, =, __m__, __x__ )
2952 #define atomic_compare_exchange_weak( __a__, __e__, __m__ ) \
2953 _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, memory_order_seq_cst )
2955 #define atomic_compare_exchange_strong( __a__, __e__, __m__ ) \
2956 _ATOMIC_CMPSWP_( __a__, __e__, __m__, memory_order_seq_cst )
2958 #define atomic_compare_exchange_weak_explicit( __a__, __e__, __m__, __x__, __y__ ) \
2959 _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ )
2961 #define atomic_compare_exchange_strong_explicit( __a__, __e__, __m__, __x__, __y__ ) \
2962 _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ )
2964 #define atomic_fence( __a__, __x__ ) \
2965 ({ _ATOMIC_FENCE_( __a__, __x__ ); })
2968 #define atomic_fetch_add_explicit( __a__, __m__, __x__ ) \
2969 _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ )
2971 #define atomic_fetch_add( __a__, __m__ ) \
2972 _ATOMIC_MODIFY_( __a__, +=, __m__, memory_order_seq_cst )
2975 #define atomic_fetch_sub_explicit( __a__, __m__, __x__ ) \
2976 _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ )
2978 #define atomic_fetch_sub( __a__, __m__ ) \
2979 _ATOMIC_MODIFY_( __a__, -=, __m__, memory_order_seq_cst )
2982 #define atomic_fetch_and_explicit( __a__, __m__, __x__ ) \
2983 _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ )
2985 #define atomic_fetch_and( __a__, __m__ ) \
2986 _ATOMIC_MODIFY_( __a__, &=, __m__, memory_order_seq_cst )
2989 #define atomic_fetch_or_explicit( __a__, __m__, __x__ ) \
2990 _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ )
2992 #define atomic_fetch_or( __a__, __m__ ) \
2993 _ATOMIC_MODIFY_( __a__, |=, __m__, memory_order_seq_cst )
2996 #define atomic_fetch_xor_explicit( __a__, __m__, __x__ ) \
2997 _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ )
2999 #define atomic_fetch_xor( __a__, __m__ ) \
3000 _ATOMIC_MODIFY_( __a__, ^=, __m__, memory_order_seq_cst )
3009 inline bool atomic_bool::is_lock_free() const volatile
3012 inline void atomic_bool::store
3013 ( bool __m__, memory_order __x__ ) volatile
3014 { atomic_store_explicit( this, __m__, __x__ ); }
3016 inline bool atomic_bool::load
3017 ( memory_order __x__ ) volatile
3018 { return atomic_load_explicit( this, __x__ ); }
3020 inline bool atomic_bool::exchange
3021 ( bool __m__, memory_order __x__ ) volatile
3022 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3024 inline bool atomic_bool::compare_exchange_weak
3025 ( bool& __e__, bool __m__,
3026 memory_order __x__, memory_order __y__ ) volatile
3027 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3029 inline bool atomic_bool::compare_exchange_strong
3030 ( bool& __e__, bool __m__,
3031 memory_order __x__, memory_order __y__ ) volatile
3032 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3034 inline bool atomic_bool::compare_exchange_weak
3035 ( bool& __e__, bool __m__, memory_order __x__ ) volatile
3036 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3037 __x__ == memory_order_acq_rel ? memory_order_acquire :
3038 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3040 inline bool atomic_bool::compare_exchange_strong
3041 ( bool& __e__, bool __m__, memory_order __x__ ) volatile
3042 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3043 __x__ == memory_order_acq_rel ? memory_order_acquire :
3044 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3046 inline void atomic_bool::fence
3047 ( memory_order __x__ ) const volatile
3048 { return atomic_fence( this, __x__ ); }
3051 inline bool atomic_address::is_lock_free() const volatile
3054 inline void atomic_address::store
3055 ( void* __m__, memory_order __x__ ) volatile
3056 { atomic_store_explicit( this, __m__, __x__ ); }
3058 inline void* atomic_address::load
3059 ( memory_order __x__ ) volatile
3060 { return atomic_load_explicit( this, __x__ ); }
3062 inline void* atomic_address::exchange
3063 ( void* __m__, memory_order __x__ ) volatile
3064 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3066 inline bool atomic_address::compare_exchange_weak
3067 ( void*& __e__, void* __m__,
3068 memory_order __x__, memory_order __y__ ) volatile
3069 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3071 inline bool atomic_address::compare_exchange_strong
3072 ( void*& __e__, void* __m__,
3073 memory_order __x__, memory_order __y__ ) volatile
3074 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3076 inline bool atomic_address::compare_exchange_weak
3077 ( void*& __e__, void* __m__, memory_order __x__ ) volatile
3078 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3079 __x__ == memory_order_acq_rel ? memory_order_acquire :
3080 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3082 inline bool atomic_address::compare_exchange_strong
3083 ( void*& __e__, void* __m__, memory_order __x__ ) volatile
3084 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3085 __x__ == memory_order_acq_rel ? memory_order_acquire :
3086 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3088 inline void atomic_address::fence
3089 ( memory_order __x__ ) const volatile
3090 { return atomic_fence( this, __x__ ); }
3093 inline bool atomic_char::is_lock_free() const volatile
3096 inline void atomic_char::store
3097 ( char __m__, memory_order __x__ ) volatile
3098 { atomic_store_explicit( this, __m__, __x__ ); }
3100 inline char atomic_char::load
3101 ( memory_order __x__ ) volatile
3102 { return atomic_load_explicit( this, __x__ ); }
3104 inline char atomic_char::exchange
3105 ( char __m__, memory_order __x__ ) volatile
3106 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3108 inline bool atomic_char::compare_exchange_weak
3109 ( char& __e__, char __m__,
3110 memory_order __x__, memory_order __y__ ) volatile
3111 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3113 inline bool atomic_char::compare_exchange_strong
3114 ( char& __e__, char __m__,
3115 memory_order __x__, memory_order __y__ ) volatile
3116 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3118 inline bool atomic_char::compare_exchange_weak
3119 ( char& __e__, char __m__, memory_order __x__ ) volatile
3120 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3121 __x__ == memory_order_acq_rel ? memory_order_acquire :
3122 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3124 inline bool atomic_char::compare_exchange_strong
3125 ( char& __e__, char __m__, memory_order __x__ ) volatile
3126 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3127 __x__ == memory_order_acq_rel ? memory_order_acquire :
3128 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3130 inline void atomic_char::fence
3131 ( memory_order __x__ ) const volatile
3132 { return atomic_fence( this, __x__ ); }
3135 inline bool atomic_schar::is_lock_free() const volatile
3138 inline void atomic_schar::store
3139 ( signed char __m__, memory_order __x__ ) volatile
3140 { atomic_store_explicit( this, __m__, __x__ ); }
3142 inline signed char atomic_schar::load
3143 ( memory_order __x__ ) volatile
3144 { return atomic_load_explicit( this, __x__ ); }
3146 inline signed char atomic_schar::exchange
3147 ( signed char __m__, memory_order __x__ ) volatile
3148 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3150 inline bool atomic_schar::compare_exchange_weak
3151 ( signed char& __e__, signed char __m__,
3152 memory_order __x__, memory_order __y__ ) volatile
3153 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3155 inline bool atomic_schar::compare_exchange_strong
3156 ( signed char& __e__, signed char __m__,
3157 memory_order __x__, memory_order __y__ ) volatile
3158 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3160 inline bool atomic_schar::compare_exchange_weak
3161 ( signed char& __e__, signed char __m__, memory_order __x__ ) volatile
3162 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3163 __x__ == memory_order_acq_rel ? memory_order_acquire :
3164 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3166 inline bool atomic_schar::compare_exchange_strong
3167 ( signed char& __e__, signed char __m__, memory_order __x__ ) volatile
3168 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3169 __x__ == memory_order_acq_rel ? memory_order_acquire :
3170 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3172 inline void atomic_schar::fence
3173 ( memory_order __x__ ) const volatile
3174 { return atomic_fence( this, __x__ ); }
3177 inline bool atomic_uchar::is_lock_free() const volatile
3180 inline void atomic_uchar::store
3181 ( unsigned char __m__, memory_order __x__ ) volatile
3182 { atomic_store_explicit( this, __m__, __x__ ); }
3184 inline unsigned char atomic_uchar::load
3185 ( memory_order __x__ ) volatile
3186 { return atomic_load_explicit( this, __x__ ); }
3188 inline unsigned char atomic_uchar::exchange
3189 ( unsigned char __m__, memory_order __x__ ) volatile
3190 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3192 inline bool atomic_uchar::compare_exchange_weak
3193 ( unsigned char& __e__, unsigned char __m__,
3194 memory_order __x__, memory_order __y__ ) volatile
3195 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3197 inline bool atomic_uchar::compare_exchange_strong
3198 ( unsigned char& __e__, unsigned char __m__,
3199 memory_order __x__, memory_order __y__ ) volatile
3200 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3202 inline bool atomic_uchar::compare_exchange_weak
3203 ( unsigned char& __e__, unsigned char __m__, memory_order __x__ ) volatile
3204 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3205 __x__ == memory_order_acq_rel ? memory_order_acquire :
3206 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3208 inline bool atomic_uchar::compare_exchange_strong
3209 ( unsigned char& __e__, unsigned char __m__, memory_order __x__ ) volatile
3210 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3211 __x__ == memory_order_acq_rel ? memory_order_acquire :
3212 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3214 inline void atomic_uchar::fence
3215 ( memory_order __x__ ) const volatile
3216 { return atomic_fence( this, __x__ ); }
3219 inline bool atomic_short::is_lock_free() const volatile
3222 inline void atomic_short::store
3223 ( short __m__, memory_order __x__ ) volatile
3224 { atomic_store_explicit( this, __m__, __x__ ); }
3226 inline short atomic_short::load
3227 ( memory_order __x__ ) volatile
3228 { return atomic_load_explicit( this, __x__ ); }
3230 inline short atomic_short::exchange
3231 ( short __m__, memory_order __x__ ) volatile
3232 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3234 inline bool atomic_short::compare_exchange_weak
3235 ( short& __e__, short __m__,
3236 memory_order __x__, memory_order __y__ ) volatile
3237 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3239 inline bool atomic_short::compare_exchange_strong
3240 ( short& __e__, short __m__,
3241 memory_order __x__, memory_order __y__ ) volatile
3242 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3244 inline bool atomic_short::compare_exchange_weak
3245 ( short& __e__, short __m__, memory_order __x__ ) volatile
3246 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3247 __x__ == memory_order_acq_rel ? memory_order_acquire :
3248 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3250 inline bool atomic_short::compare_exchange_strong
3251 ( short& __e__, short __m__, memory_order __x__ ) volatile
3252 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3253 __x__ == memory_order_acq_rel ? memory_order_acquire :
3254 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3256 inline void atomic_short::fence
3257 ( memory_order __x__ ) const volatile
3258 { return atomic_fence( this, __x__ ); }
3261 inline bool atomic_ushort::is_lock_free() const volatile
3264 inline void atomic_ushort::store
3265 ( unsigned short __m__, memory_order __x__ ) volatile
3266 { atomic_store_explicit( this, __m__, __x__ ); }
3268 inline unsigned short atomic_ushort::load
3269 ( memory_order __x__ ) volatile
3270 { return atomic_load_explicit( this, __x__ ); }
3272 inline unsigned short atomic_ushort::exchange
3273 ( unsigned short __m__, memory_order __x__ ) volatile
3274 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3276 inline bool atomic_ushort::compare_exchange_weak
3277 ( unsigned short& __e__, unsigned short __m__,
3278 memory_order __x__, memory_order __y__ ) volatile
3279 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3281 inline bool atomic_ushort::compare_exchange_strong
3282 ( unsigned short& __e__, unsigned short __m__,
3283 memory_order __x__, memory_order __y__ ) volatile
3284 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3286 inline bool atomic_ushort::compare_exchange_weak
3287 ( unsigned short& __e__, unsigned short __m__, memory_order __x__ ) volatile
3288 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3289 __x__ == memory_order_acq_rel ? memory_order_acquire :
3290 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3292 inline bool atomic_ushort::compare_exchange_strong
3293 ( unsigned short& __e__, unsigned short __m__, memory_order __x__ ) volatile
3294 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3295 __x__ == memory_order_acq_rel ? memory_order_acquire :
3296 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3298 inline void atomic_ushort::fence
3299 ( memory_order __x__ ) const volatile
3300 { return atomic_fence( this, __x__ ); }
3303 inline bool atomic_int::is_lock_free() const volatile
3306 inline void atomic_int::store
3307 ( int __m__, memory_order __x__ ) volatile
3308 { atomic_store_explicit( this, __m__, __x__ ); }
3310 inline int atomic_int::load
3311 ( memory_order __x__ ) volatile
3312 { return atomic_load_explicit( this, __x__ ); }
3314 inline int atomic_int::exchange
3315 ( int __m__, memory_order __x__ ) volatile
3316 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3318 inline bool atomic_int::compare_exchange_weak
3319 ( int& __e__, int __m__,
3320 memory_order __x__, memory_order __y__ ) volatile
3321 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3323 inline bool atomic_int::compare_exchange_strong
3324 ( int& __e__, int __m__,
3325 memory_order __x__, memory_order __y__ ) volatile
3326 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3328 inline bool atomic_int::compare_exchange_weak
3329 ( int& __e__, int __m__, memory_order __x__ ) volatile
3330 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3331 __x__ == memory_order_acq_rel ? memory_order_acquire :
3332 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3334 inline bool atomic_int::compare_exchange_strong
3335 ( int& __e__, int __m__, memory_order __x__ ) volatile
3336 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3337 __x__ == memory_order_acq_rel ? memory_order_acquire :
3338 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3340 inline void atomic_int::fence
3341 ( memory_order __x__ ) const volatile
3342 { return atomic_fence( this, __x__ ); }
3345 inline bool atomic_uint::is_lock_free() const volatile
3348 inline void atomic_uint::store
3349 ( unsigned int __m__, memory_order __x__ ) volatile
3350 { atomic_store_explicit( this, __m__, __x__ ); }
3352 inline unsigned int atomic_uint::load
3353 ( memory_order __x__ ) volatile
3354 { return atomic_load_explicit( this, __x__ ); }
3356 inline unsigned int atomic_uint::exchange
3357 ( unsigned int __m__, memory_order __x__ ) volatile
3358 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3360 inline bool atomic_uint::compare_exchange_weak
3361 ( unsigned int& __e__, unsigned int __m__,
3362 memory_order __x__, memory_order __y__ ) volatile
3363 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3365 inline bool atomic_uint::compare_exchange_strong
3366 ( unsigned int& __e__, unsigned int __m__,
3367 memory_order __x__, memory_order __y__ ) volatile
3368 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3370 inline bool atomic_uint::compare_exchange_weak
3371 ( unsigned int& __e__, unsigned int __m__, memory_order __x__ ) volatile
3372 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3373 __x__ == memory_order_acq_rel ? memory_order_acquire :
3374 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3376 inline bool atomic_uint::compare_exchange_strong
3377 ( unsigned int& __e__, unsigned int __m__, memory_order __x__ ) volatile
3378 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3379 __x__ == memory_order_acq_rel ? memory_order_acquire :
3380 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3382 inline void atomic_uint::fence
3383 ( memory_order __x__ ) const volatile
3384 { return atomic_fence( this, __x__ ); }
3387 inline bool atomic_long::is_lock_free() const volatile
3390 inline void atomic_long::store
3391 ( long __m__, memory_order __x__ ) volatile
3392 { atomic_store_explicit( this, __m__, __x__ ); }
3394 inline long atomic_long::load
3395 ( memory_order __x__ ) volatile
3396 { return atomic_load_explicit( this, __x__ ); }
3398 inline long atomic_long::exchange
3399 ( long __m__, memory_order __x__ ) volatile
3400 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3402 inline bool atomic_long::compare_exchange_weak
3403 ( long& __e__, long __m__,
3404 memory_order __x__, memory_order __y__ ) volatile
3405 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3407 inline bool atomic_long::compare_exchange_strong
3408 ( long& __e__, long __m__,
3409 memory_order __x__, memory_order __y__ ) volatile
3410 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3412 inline bool atomic_long::compare_exchange_weak
3413 ( long& __e__, long __m__, memory_order __x__ ) volatile
3414 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3415 __x__ == memory_order_acq_rel ? memory_order_acquire :
3416 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3418 inline bool atomic_long::compare_exchange_strong
3419 ( long& __e__, long __m__, memory_order __x__ ) volatile
3420 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3421 __x__ == memory_order_acq_rel ? memory_order_acquire :
3422 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3424 inline void atomic_long::fence
3425 ( memory_order __x__ ) const volatile
3426 { return atomic_fence( this, __x__ ); }
3429 inline bool atomic_ulong::is_lock_free() const volatile
3432 inline void atomic_ulong::store
3433 ( unsigned long __m__, memory_order __x__ ) volatile
3434 { atomic_store_explicit( this, __m__, __x__ ); }
3436 inline unsigned long atomic_ulong::load
3437 ( memory_order __x__ ) volatile
3438 { return atomic_load_explicit( this, __x__ ); }
3440 inline unsigned long atomic_ulong::exchange
3441 ( unsigned long __m__, memory_order __x__ ) volatile
3442 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3444 inline bool atomic_ulong::compare_exchange_weak
3445 ( unsigned long& __e__, unsigned long __m__,
3446 memory_order __x__, memory_order __y__ ) volatile
3447 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3449 inline bool atomic_ulong::compare_exchange_strong
3450 ( unsigned long& __e__, unsigned long __m__,
3451 memory_order __x__, memory_order __y__ ) volatile
3452 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3454 inline bool atomic_ulong::compare_exchange_weak
3455 ( unsigned long& __e__, unsigned long __m__, memory_order __x__ ) volatile
3456 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3457 __x__ == memory_order_acq_rel ? memory_order_acquire :
3458 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3460 inline bool atomic_ulong::compare_exchange_strong
3461 ( unsigned long& __e__, unsigned long __m__, memory_order __x__ ) volatile
3462 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3463 __x__ == memory_order_acq_rel ? memory_order_acquire :
3464 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3466 inline void atomic_ulong::fence
3467 ( memory_order __x__ ) const volatile
3468 { return atomic_fence( this, __x__ ); }
3471 inline bool atomic_llong::is_lock_free() const volatile
3474 inline void atomic_llong::store
3475 ( long long __m__, memory_order __x__ ) volatile
3476 { atomic_store_explicit( this, __m__, __x__ ); }
3478 inline long long atomic_llong::load
3479 ( memory_order __x__ ) volatile
3480 { return atomic_load_explicit( this, __x__ ); }
3482 inline long long atomic_llong::exchange
3483 ( long long __m__, memory_order __x__ ) volatile
3484 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3486 inline bool atomic_llong::compare_exchange_weak
3487 ( long long& __e__, long long __m__,
3488 memory_order __x__, memory_order __y__ ) volatile
3489 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3491 inline bool atomic_llong::compare_exchange_strong
3492 ( long long& __e__, long long __m__,
3493 memory_order __x__, memory_order __y__ ) volatile
3494 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3496 inline bool atomic_llong::compare_exchange_weak
3497 ( long long& __e__, long long __m__, memory_order __x__ ) volatile
3498 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3499 __x__ == memory_order_acq_rel ? memory_order_acquire :
3500 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3502 inline bool atomic_llong::compare_exchange_strong
3503 ( long long& __e__, long long __m__, memory_order __x__ ) volatile
3504 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3505 __x__ == memory_order_acq_rel ? memory_order_acquire :
3506 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3508 inline void atomic_llong::fence
3509 ( memory_order __x__ ) const volatile
3510 { return atomic_fence( this, __x__ ); }
3513 inline bool atomic_ullong::is_lock_free() const volatile
3516 inline void atomic_ullong::store
3517 ( unsigned long long __m__, memory_order __x__ ) volatile
3518 { atomic_store_explicit( this, __m__, __x__ ); }
3520 inline unsigned long long atomic_ullong::load
3521 ( memory_order __x__ ) volatile
3522 { return atomic_load_explicit( this, __x__ ); }
3524 inline unsigned long long atomic_ullong::exchange
3525 ( unsigned long long __m__, memory_order __x__ ) volatile
3526 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3528 inline bool atomic_ullong::compare_exchange_weak
3529 ( unsigned long long& __e__, unsigned long long __m__,
3530 memory_order __x__, memory_order __y__ ) volatile
3531 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3533 inline bool atomic_ullong::compare_exchange_strong
3534 ( unsigned long long& __e__, unsigned long long __m__,
3535 memory_order __x__, memory_order __y__ ) volatile
3536 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3538 inline bool atomic_ullong::compare_exchange_weak
3539 ( unsigned long long& __e__, unsigned long long __m__, memory_order __x__ ) volatile
3540 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3541 __x__ == memory_order_acq_rel ? memory_order_acquire :
3542 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3544 inline bool atomic_ullong::compare_exchange_strong
3545 ( unsigned long long& __e__, unsigned long long __m__, memory_order __x__ ) volatile
3546 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3547 __x__ == memory_order_acq_rel ? memory_order_acquire :
3548 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3550 inline void atomic_ullong::fence
3551 ( memory_order __x__ ) const volatile
3552 { return atomic_fence( this, __x__ ); }
3555 inline bool atomic_wchar_t::is_lock_free() const volatile
3558 inline void atomic_wchar_t::store
3559 ( wchar_t __m__, memory_order __x__ ) volatile
3560 { atomic_store_explicit( this, __m__, __x__ ); }
3562 inline wchar_t atomic_wchar_t::load
3563 ( memory_order __x__ ) volatile
3564 { return atomic_load_explicit( this, __x__ ); }
3566 inline wchar_t atomic_wchar_t::exchange
3567 ( wchar_t __m__, memory_order __x__ ) volatile
3568 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3570 inline bool atomic_wchar_t::compare_exchange_weak
3571 ( wchar_t& __e__, wchar_t __m__,
3572 memory_order __x__, memory_order __y__ ) volatile
3573 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3575 inline bool atomic_wchar_t::compare_exchange_strong
3576 ( wchar_t& __e__, wchar_t __m__,
3577 memory_order __x__, memory_order __y__ ) volatile
3578 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3580 inline bool atomic_wchar_t::compare_exchange_weak
3581 ( wchar_t& __e__, wchar_t __m__, memory_order __x__ ) volatile
3582 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3583 __x__ == memory_order_acq_rel ? memory_order_acquire :
3584 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3586 inline bool atomic_wchar_t::compare_exchange_strong
3587 ( wchar_t& __e__, wchar_t __m__, memory_order __x__ ) volatile
3588 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3589 __x__ == memory_order_acq_rel ? memory_order_acquire :
3590 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3592 inline void atomic_wchar_t::fence
3593 ( memory_order __x__ ) const volatile
3594 { return atomic_fence( this, __x__ ); }
3597 template< typename T >
3598 inline bool atomic<T>::is_lock_free() const volatile
3601 template< typename T >
3602 inline void atomic<T>::store( T __v__, memory_order __x__ ) volatile
3603 { _ATOMIC_STORE_( this, __v__, __x__ ); }
3605 template< typename T >
3606 inline T atomic<T>::load( memory_order __x__ ) volatile
3607 { return _ATOMIC_LOAD_( this, __x__ ); }
3609 template< typename T >
3610 inline T atomic<T>::exchange( T __v__, memory_order __x__ ) volatile
3611 { return _ATOMIC_MODIFY_( this, =, __v__, __x__ ); }
3613 template< typename T >
3614 inline bool atomic<T>::compare_exchange_weak
3615 ( T& __r__, T __v__, memory_order __x__, memory_order __y__ ) volatile
3616 { return _ATOMIC_CMPSWP_WEAK_( this, &__r__, __v__, __x__ ); }
3618 template< typename T >
3619 inline bool atomic<T>::compare_exchange_strong
3620 ( T& __r__, T __v__, memory_order __x__, memory_order __y__ ) volatile
3621 { return _ATOMIC_CMPSWP_( this, &__r__, __v__, __x__ ); }
3623 template< typename T >
3624 inline bool atomic<T>::compare_exchange_weak
3625 ( T& __r__, T __v__, memory_order __x__ ) volatile
3626 { return compare_exchange_weak( __r__, __v__, __x__,
3627 __x__ == memory_order_acq_rel ? memory_order_acquire :
3628 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3630 template< typename T >
3631 inline bool atomic<T>::compare_exchange_strong
3632 ( T& __r__, T __v__, memory_order __x__ ) volatile
3633 { return compare_exchange_strong( __r__, __v__, __x__,
3634 __x__ == memory_order_acq_rel ? memory_order_acquire :
3635 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3638 inline void* atomic_address::fetch_add
3639 ( ptrdiff_t __m__, memory_order __x__ ) volatile
3640 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3642 inline void* atomic_address::fetch_sub
3643 ( ptrdiff_t __m__, memory_order __x__ ) volatile
3644 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3647 inline char atomic_char::fetch_add
3648 ( char __m__, memory_order __x__ ) volatile
3649 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3652 inline char atomic_char::fetch_sub
3653 ( char __m__, memory_order __x__ ) volatile
3654 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3657 inline char atomic_char::fetch_and
3658 ( char __m__, memory_order __x__ ) volatile
3659 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3662 inline char atomic_char::fetch_or
3663 ( char __m__, memory_order __x__ ) volatile
3664 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3667 inline char atomic_char::fetch_xor
3668 ( char __m__, memory_order __x__ ) volatile
3669 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3672 inline signed char atomic_schar::fetch_add
3673 ( signed char __m__, memory_order __x__ ) volatile
3674 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3677 inline signed char atomic_schar::fetch_sub
3678 ( signed char __m__, memory_order __x__ ) volatile
3679 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3682 inline signed char atomic_schar::fetch_and
3683 ( signed char __m__, memory_order __x__ ) volatile
3684 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3687 inline signed char atomic_schar::fetch_or
3688 ( signed char __m__, memory_order __x__ ) volatile
3689 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3692 inline signed char atomic_schar::fetch_xor
3693 ( signed char __m__, memory_order __x__ ) volatile
3694 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3697 inline unsigned char atomic_uchar::fetch_add
3698 ( unsigned char __m__, memory_order __x__ ) volatile
3699 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3702 inline unsigned char atomic_uchar::fetch_sub
3703 ( unsigned char __m__, memory_order __x__ ) volatile
3704 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3707 inline unsigned char atomic_uchar::fetch_and
3708 ( unsigned char __m__, memory_order __x__ ) volatile
3709 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3712 inline unsigned char atomic_uchar::fetch_or
3713 ( unsigned char __m__, memory_order __x__ ) volatile
3714 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3717 inline unsigned char atomic_uchar::fetch_xor
3718 ( unsigned char __m__, memory_order __x__ ) volatile
3719 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3722 inline short atomic_short::fetch_add
3723 ( short __m__, memory_order __x__ ) volatile
3724 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3727 inline short atomic_short::fetch_sub
3728 ( short __m__, memory_order __x__ ) volatile
3729 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3732 inline short atomic_short::fetch_and
3733 ( short __m__, memory_order __x__ ) volatile
3734 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3737 inline short atomic_short::fetch_or
3738 ( short __m__, memory_order __x__ ) volatile
3739 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3742 inline short atomic_short::fetch_xor
3743 ( short __m__, memory_order __x__ ) volatile
3744 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3747 inline unsigned short atomic_ushort::fetch_add
3748 ( unsigned short __m__, memory_order __x__ ) volatile
3749 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3752 inline unsigned short atomic_ushort::fetch_sub
3753 ( unsigned short __m__, memory_order __x__ ) volatile
3754 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3757 inline unsigned short atomic_ushort::fetch_and
3758 ( unsigned short __m__, memory_order __x__ ) volatile
3759 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3762 inline unsigned short atomic_ushort::fetch_or
3763 ( unsigned short __m__, memory_order __x__ ) volatile
3764 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3767 inline unsigned short atomic_ushort::fetch_xor
3768 ( unsigned short __m__, memory_order __x__ ) volatile
3769 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3772 inline int atomic_int::fetch_add
3773 ( int __m__, memory_order __x__ ) volatile
3774 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3777 inline int atomic_int::fetch_sub
3778 ( int __m__, memory_order __x__ ) volatile
3779 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3782 inline int atomic_int::fetch_and
3783 ( int __m__, memory_order __x__ ) volatile
3784 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3787 inline int atomic_int::fetch_or
3788 ( int __m__, memory_order __x__ ) volatile
3789 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3792 inline int atomic_int::fetch_xor
3793 ( int __m__, memory_order __x__ ) volatile
3794 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3797 inline unsigned int atomic_uint::fetch_add
3798 ( unsigned int __m__, memory_order __x__ ) volatile
3799 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3802 inline unsigned int atomic_uint::fetch_sub
3803 ( unsigned int __m__, memory_order __x__ ) volatile
3804 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3807 inline unsigned int atomic_uint::fetch_and
3808 ( unsigned int __m__, memory_order __x__ ) volatile
3809 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3812 inline unsigned int atomic_uint::fetch_or
3813 ( unsigned int __m__, memory_order __x__ ) volatile
3814 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3817 inline unsigned int atomic_uint::fetch_xor
3818 ( unsigned int __m__, memory_order __x__ ) volatile
3819 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3822 inline long atomic_long::fetch_add
3823 ( long __m__, memory_order __x__ ) volatile
3824 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3827 inline long atomic_long::fetch_sub
3828 ( long __m__, memory_order __x__ ) volatile
3829 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3832 inline long atomic_long::fetch_and
3833 ( long __m__, memory_order __x__ ) volatile
3834 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3837 inline long atomic_long::fetch_or
3838 ( long __m__, memory_order __x__ ) volatile
3839 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3842 inline long atomic_long::fetch_xor
3843 ( long __m__, memory_order __x__ ) volatile
3844 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3847 inline unsigned long atomic_ulong::fetch_add
3848 ( unsigned long __m__, memory_order __x__ ) volatile
3849 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3852 inline unsigned long atomic_ulong::fetch_sub
3853 ( unsigned long __m__, memory_order __x__ ) volatile
3854 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3857 inline unsigned long atomic_ulong::fetch_and
3858 ( unsigned long __m__, memory_order __x__ ) volatile
3859 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3862 inline unsigned long atomic_ulong::fetch_or
3863 ( unsigned long __m__, memory_order __x__ ) volatile
3864 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3867 inline unsigned long atomic_ulong::fetch_xor
3868 ( unsigned long __m__, memory_order __x__ ) volatile
3869 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3872 inline long long atomic_llong::fetch_add
3873 ( long long __m__, memory_order __x__ ) volatile
3874 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3877 inline long long atomic_llong::fetch_sub
3878 ( long long __m__, memory_order __x__ ) volatile
3879 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3882 inline long long atomic_llong::fetch_and
3883 ( long long __m__, memory_order __x__ ) volatile
3884 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3887 inline long long atomic_llong::fetch_or
3888 ( long long __m__, memory_order __x__ ) volatile
3889 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3892 inline long long atomic_llong::fetch_xor
3893 ( long long __m__, memory_order __x__ ) volatile
3894 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3897 inline unsigned long long atomic_ullong::fetch_add
3898 ( unsigned long long __m__, memory_order __x__ ) volatile
3899 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3902 inline unsigned long long atomic_ullong::fetch_sub
3903 ( unsigned long long __m__, memory_order __x__ ) volatile
3904 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3907 inline unsigned long long atomic_ullong::fetch_and
3908 ( unsigned long long __m__, memory_order __x__ ) volatile
3909 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3912 inline unsigned long long atomic_ullong::fetch_or
3913 ( unsigned long long __m__, memory_order __x__ ) volatile
3914 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3917 inline unsigned long long atomic_ullong::fetch_xor
3918 ( unsigned long long __m__, memory_order __x__ ) volatile
3919 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3922 inline wchar_t atomic_wchar_t::fetch_add
3923 ( wchar_t __m__, memory_order __x__ ) volatile
3924 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
3927 inline wchar_t atomic_wchar_t::fetch_sub
3928 ( wchar_t __m__, memory_order __x__ ) volatile
3929 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
3932 inline wchar_t atomic_wchar_t::fetch_and
3933 ( wchar_t __m__, memory_order __x__ ) volatile
3934 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
3937 inline wchar_t atomic_wchar_t::fetch_or
3938 ( wchar_t __m__, memory_order __x__ ) volatile
3939 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
3942 inline wchar_t atomic_wchar_t::fetch_xor
3943 ( wchar_t __m__, memory_order __x__ ) volatile
3944 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
3947 template< typename T >
3948 T* atomic<T*>::load( memory_order __x__ ) volatile
3949 { return static_cast<T*>( atomic_address::load( __x__ ) ); }
3951 template< typename T >
3952 T* atomic<T*>::exchange( T* __v__, memory_order __x__ ) volatile
3953 { return static_cast<T*>( atomic_address::exchange( __v__, __x__ ) ); }
3955 template< typename T >
3956 bool atomic<T*>::compare_exchange_weak
3957 ( T*& __r__, T* __v__, memory_order __x__, memory_order __y__) volatile
3958 { return atomic_address::compare_exchange_weak( *reinterpret_cast<void**>( &__r__ ),
3959 static_cast<void*>( __v__ ), __x__, __y__ ); }
3960 //{ return _ATOMIC_CMPSWP_WEAK_( this, &__r__, __v__, __x__ ); }
3962 template< typename T >
3963 bool atomic<T*>::compare_exchange_strong
3964 ( T*& __r__, T* __v__, memory_order __x__, memory_order __y__) volatile
3965 { return atomic_address::compare_exchange_strong( *reinterpret_cast<void**>( &__r__ ),
3966 static_cast<void*>( __v__ ), __x__, __y__ ); }
3967 //{ return _ATOMIC_CMPSWP_( this, &__r__, __v__, __x__ ); }
3969 template< typename T >
3970 bool atomic<T*>::compare_exchange_weak
3971 ( T*& __r__, T* __v__, memory_order __x__ ) volatile
3972 { return compare_exchange_weak( __r__, __v__, __x__,
3973 __x__ == memory_order_acq_rel ? memory_order_acquire :
3974 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3976 template< typename T >
3977 bool atomic<T*>::compare_exchange_strong
3978 ( T*& __r__, T* __v__, memory_order __x__ ) volatile
3979 { return compare_exchange_strong( __r__, __v__, __x__,
3980 __x__ == memory_order_acq_rel ? memory_order_acquire :
3981 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
3983 template< typename T >
3984 T* atomic<T*>::fetch_add( ptrdiff_t __v__, memory_order __x__ ) volatile
3985 { return atomic_fetch_add_explicit( this, sizeof(T) * __v__, __x__ ); }
3987 template< typename T >
3988 T* atomic<T*>::fetch_sub( ptrdiff_t __v__, memory_order __x__ ) volatile
3989 { return atomic_fetch_sub_explicit( this, sizeof(T) * __v__, __x__ ); }
3999 #endif /* __IMPATOMIC_H__ */