7f4dcd4cb5ffeb8801f5f68804fb847cd9801a92
[c11tester.git] / include / impatomic.h
1 #include <stdio.h>
2 /**
3  * @file impatomic.h
4  * @brief Common header for C11/C++11 atomics
5  *
6  * Note that some features are unavailable, as they require support from a true
7  * C11/C++11 compiler.
8  */
9
10 #ifndef __IMPATOMIC_H__
11 #define __IMPATOMIC_H__
12
13 #include "memoryorder.h"
14 #include "cmodelint.h"
15
16 #ifdef __cplusplus
17 namespace std {
18 #else
19 #include <stdbool.h>
20 #endif
21
22 #define CPP0X( feature )
23
24 typedef struct atomic_flag
25 {
26 #ifdef __cplusplus
27         bool test_and_set( memory_order = memory_order_seq_cst ) volatile;
28         void clear( memory_order = memory_order_seq_cst ) volatile;
29
30         CPP0X( atomic_flag() = default; )
31         CPP0X( atomic_flag( const atomic_flag& ) = delete; )
32         atomic_flag& operator =( const atomic_flag& ) CPP0X(=delete);
33
34         CPP0X(private:)
35 #endif
36         bool __f__;
37 } atomic_flag;
38
39 #define ATOMIC_FLAG_INIT { false }
40
41 #ifdef __cplusplus
42 extern "C" {
43 #endif
44
45 extern bool atomic_flag_test_and_set( volatile atomic_flag* );
46 extern bool atomic_flag_test_and_set_explicit
47         ( volatile atomic_flag*, memory_order );
48 extern void atomic_flag_clear( volatile atomic_flag* );
49 extern void atomic_flag_clear_explicit
50         ( volatile atomic_flag*, memory_order );
51 extern void __atomic_flag_wait__
52         ( volatile atomic_flag* );
53 extern void __atomic_flag_wait_explicit__
54         ( volatile atomic_flag*, memory_order );
55
56 #ifdef __cplusplus
57 }
58 #endif
59
60 #ifdef __cplusplus
61
62 inline bool atomic_flag::test_and_set( memory_order __x__ ) volatile
63 { return atomic_flag_test_and_set_explicit( this, __x__ ); }
64
65 inline void atomic_flag::clear( memory_order __x__ ) volatile
66 { atomic_flag_clear_explicit( this, __x__ ); }
67
68 #endif
69
70
71 /*
72         The remainder of the example implementation uses the following
73         macros. These macros exploit GNU extensions for value-returning
74         blocks (AKA statement expressions) and __typeof__.
75
76         The macros rely on data fields of atomic structs being named __f__.
77         Other symbols used are __a__=atomic, __e__=expected, __f__=field,
78         __g__=flag, __m__=modified, __o__=operation, __r__=result,
79         __p__=pointer to field, __v__=value (for single evaluation),
80         __x__=memory-ordering, and __y__=memory-ordering.
81  */
82
83 #define _ATOMIC_LOAD_( __a__, __x__ )                                         \
84         ({ volatile __typeof__((__a__)->__f__)* __p__ = &((__a__)->__f__);   \
85                  __typeof__((__a__)->__f__) __r__ = (__typeof__((__a__)->__f__))model_read_action((void *)__p__, __x__);  \
86                  __r__; })
87
88 #define _ATOMIC_STORE_( __a__, __m__, __x__ )                                 \
89         ({ volatile __typeof__((__a__)->__f__)* __p__ = &((__a__)->__f__);   \
90                  __typeof__(__m__) __v__ = (__m__);                            \
91                  model_write_action((void *) __p__,  __x__, (uint64_t) __v__); \
92                  __v__ = __v__; /* Silence clang (-Wunused-value) */           \
93          })
94
95
96 #define _ATOMIC_INIT_( __a__, __m__ )                                         \
97         ({ volatile __typeof__((__a__)->__f__)* __p__ = &((__a__)->__f__);   \
98                  __typeof__(__m__) __v__ = (__m__);                            \
99                  model_init_action((void *) __p__,  (uint64_t) __v__);         \
100                  __v__ = __v__; /* Silence clang (-Wunused-value) */           \
101          })
102
103 #define _ATOMIC_MODIFY_( __a__, __o__, __m__, __x__ )                         \
104         ({ volatile __typeof__((__a__)->__f__)* __p__ = &((__a__)->__f__);   \
105                  __typeof__((__a__)->__f__) __old__=(__typeof__((__a__)->__f__))model_rmwr_action((void *)__p__, __x__); \
106                  __typeof__(__m__) __v__ = (__m__);                                    \
107                  __typeof__((__a__)->__f__) __copy__= __old__;                         \
108                  __copy__ __o__ __v__;                                                 \
109                  model_rmw_action((void *)__p__, __x__, (uint64_t) __copy__);          \
110                  __old__ = __old__;     /* Silence clang (-Wunused-value) */               \
111          })
112
113 /* No spurious failure for now */
114 #define _ATOMIC_CMPSWP_WEAK_ _ATOMIC_CMPSWP_
115
116 #define _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ )                         \
117         ({ volatile __typeof__((__a__)->__f__)* __p__ = &((__a__)->__f__);   \
118                  __typeof__(__e__) __q__ = (__e__);                            \
119                  __typeof__(__m__) __v__ = (__m__);                            \
120                  bool __r__;                                                   \
121                  __typeof__((__a__)->__f__) __t__=(__typeof__((__a__)->__f__))model_rmwrcas_action((void *)__p__, __x__, (uint64_t) *__q__, sizeof((__a__)->__f__)); \
122                  if (__t__ == *__q__ ) {;                                     \
123                                                                                                                 model_rmw_action((void *)__p__, __x__, (uint64_t) __v__); __r__ = true; } \
124                  else {  model_rmwc_action((void *)__p__, __x__); *__q__ = __t__;  __r__ = false;} \
125                  __r__; })
126
127 #define _ATOMIC_FENCE_( __x__ ) \
128         ({ model_fence_action(__x__);})
129
130
131 #define ATOMIC_CHAR_LOCK_FREE 1
132 #define ATOMIC_CHAR16_T_LOCK_FREE 1
133 #define ATOMIC_CHAR32_T_LOCK_FREE 1
134 #define ATOMIC_WCHAR_T_LOCK_FREE 1
135 #define ATOMIC_SHORT_LOCK_FREE 1
136 #define ATOMIC_INT_LOCK_FREE 1
137 #define ATOMIC_LONG_LOCK_FREE 1
138 #define ATOMIC_LLONG_LOCK_FREE 1
139 #define ATOMIC_ADDRESS_LOCK_FREE 1
140
141 typedef struct atomic_bool
142 {
143 #ifdef __cplusplus
144         bool is_lock_free() const volatile;
145         void store( bool, memory_order = memory_order_seq_cst ) volatile;
146         bool load( memory_order = memory_order_seq_cst ) volatile;
147         bool exchange( bool, memory_order = memory_order_seq_cst ) volatile;
148         bool compare_exchange_weak ( bool&, bool, memory_order, memory_order ) volatile;
149         bool compare_exchange_strong ( bool&, bool, memory_order, memory_order ) volatile;
150         bool compare_exchange_weak ( bool&, bool,
151                                                                                                                          memory_order = memory_order_seq_cst) volatile;
152         bool compare_exchange_strong ( bool&, bool,
153                                                                                                                                  memory_order = memory_order_seq_cst) volatile;
154
155         CPP0X( atomic_bool() = delete; )
156         CPP0X( constexpr explicit atomic_bool( bool __v__ ) : __f__( __v__ ) {
157                 } )
158         CPP0X( atomic_bool( const atomic_bool& ) = delete; )
159         atomic_bool& operator =( const atomic_bool& ) CPP0X(=delete);
160
161         bool operator =( bool __v__ ) volatile
162         { store( __v__ ); return __v__; }
163
164         friend void atomic_store_explicit( volatile atomic_bool*, bool,
165                                                                                                                                                  memory_order );
166         friend bool atomic_load_explicit( volatile atomic_bool*, memory_order );
167         friend bool atomic_exchange_explicit( volatile atomic_bool*, bool,
168                                                                                                                                                                 memory_order );
169         friend bool atomic_compare_exchange_weak_explicit( volatile atomic_bool*, bool*, bool,
170                                                                                                                                                                                                                  memory_order, memory_order );
171         friend bool atomic_compare_exchange_strong_explicit( volatile atomic_bool*, bool*, bool,
172                                                                                                                                                                                                                          memory_order, memory_order );
173
174         CPP0X(private:)
175 #endif
176         bool __f__;
177 } atomic_bool;
178
179
180 typedef struct atomic_address
181 {
182 #ifdef __cplusplus
183         bool is_lock_free() const volatile;
184         void store( void*, memory_order = memory_order_seq_cst ) volatile;
185         void* load( memory_order = memory_order_seq_cst ) volatile;
186         void* exchange( void*, memory_order = memory_order_seq_cst ) volatile;
187         bool compare_exchange_weak( void*&, void*, memory_order, memory_order ) volatile;
188         bool compare_exchange_strong( void*&, void*, memory_order, memory_order ) volatile;
189         bool compare_exchange_weak( void*&, void*,
190                                                                                                                         memory_order = memory_order_seq_cst ) volatile;
191         bool compare_exchange_strong( void*&, void*,
192                                                                                                                                 memory_order = memory_order_seq_cst ) volatile;
193         void* fetch_add( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
194         void* fetch_sub( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
195
196         CPP0X( atomic_address() = default; )
197         CPP0X( constexpr explicit atomic_address( void* __v__ ) : __f__( __v__) {
198                 } )
199         CPP0X( atomic_address( const atomic_address& ) = delete; )
200         atomic_address& operator =( const atomic_address & ) CPP0X(=delete);
201
202         void* operator =( void* __v__ ) volatile
203         { store( __v__ ); return __v__; }
204
205         void* operator +=( ptrdiff_t __v__ ) volatile
206         { return fetch_add( __v__ ); }
207
208         void* operator -=( ptrdiff_t __v__ ) volatile
209         { return fetch_sub( __v__ ); }
210
211         friend void atomic_store_explicit( volatile atomic_address*, void*,
212                                                                                                                                                  memory_order );
213         friend void* atomic_load_explicit( volatile atomic_address*, memory_order );
214         friend void* atomic_exchange_explicit( volatile atomic_address*, void*,
215                                                                                                                                                                  memory_order );
216         friend bool atomic_compare_exchange_weak_explicit( volatile atomic_address*,
217                                                                                                                                                                                                                  void**, void*, memory_order, memory_order );
218         friend bool atomic_compare_exchange_strong_explicit( volatile atomic_address*,
219                                                                                                                                                                                                                          void**, void*, memory_order, memory_order );
220         friend void* atomic_fetch_add_explicit( volatile atomic_address*, ptrdiff_t,
221                                                                                                                                                                         memory_order );
222         friend void* atomic_fetch_sub_explicit( volatile atomic_address*, ptrdiff_t,
223                                                                                                                                                                         memory_order );
224
225         CPP0X(private:)
226 #endif
227         void* __f__;
228 } atomic_address;
229
230
231 typedef struct atomic_char
232 {
233 #ifdef __cplusplus
234         bool is_lock_free() const volatile;
235         void store( char,
236                                                         memory_order = memory_order_seq_cst ) volatile;
237         char load( memory_order = memory_order_seq_cst ) volatile;
238         char exchange( char,
239                                                                  memory_order = memory_order_seq_cst ) volatile;
240         bool compare_exchange_weak( char&, char,
241                                                                                                                         memory_order, memory_order ) volatile;
242         bool compare_exchange_strong( char&, char,
243                                                                                                                                 memory_order, memory_order ) volatile;
244         bool compare_exchange_weak( char&, char,
245                                                                                                                         memory_order = memory_order_seq_cst ) volatile;
246         bool compare_exchange_strong( char&, char,
247                                                                                                                                 memory_order = memory_order_seq_cst ) volatile;
248         char fetch_add( char,
249                                                                         memory_order = memory_order_seq_cst ) volatile;
250         char fetch_sub( char,
251                                                                         memory_order = memory_order_seq_cst ) volatile;
252         char fetch_and( char,
253                                                                         memory_order = memory_order_seq_cst ) volatile;
254         char fetch_or( char,
255                                                                  memory_order = memory_order_seq_cst ) volatile;
256         char fetch_xor( char,
257                                                                         memory_order = memory_order_seq_cst ) volatile;
258
259         CPP0X( atomic_char() = default; )
260         CPP0X( constexpr atomic_char( char __v__ ) : __f__( __v__) {
261                 } )
262         CPP0X( atomic_char( const atomic_char& ) = delete; )
263         atomic_char& operator =( const atomic_char& ) CPP0X(=delete);
264
265         char operator =( char __v__ ) volatile
266         { store( __v__ ); return __v__; }
267
268         char operator ++( int ) volatile
269         { return fetch_add( 1 ); }
270
271         char operator --( int ) volatile
272         { return fetch_sub( 1 ); }
273
274         char operator ++() volatile
275         { return fetch_add( 1 ) + 1; }
276
277         char operator --() volatile
278         { return fetch_sub( 1 ) - 1; }
279
280         char operator +=( char __v__ ) volatile
281         { return fetch_add( __v__ ) + __v__; }
282
283         char operator -=( char __v__ ) volatile
284         { return fetch_sub( __v__ ) - __v__; }
285
286         char operator &=( char __v__ ) volatile
287         { return fetch_and( __v__ ) & __v__; }
288
289         char operator |=( char __v__ ) volatile
290         { return fetch_or( __v__ ) | __v__; }
291
292         char operator ^=( char __v__ ) volatile
293         { return fetch_xor( __v__ ) ^ __v__; }
294
295         friend void atomic_store_explicit( volatile atomic_char*, char,
296                                                                                                                                                  memory_order );
297         friend char atomic_load_explicit( volatile atomic_char*,
298                                                                                                                                                 memory_order );
299         friend char atomic_exchange_explicit( volatile atomic_char*,
300                                                                                                                                                                 char, memory_order );
301         friend bool atomic_compare_exchange_weak_explicit( volatile atomic_char*,
302                                                                                                                                                                                                                  char*, char, memory_order, memory_order );
303         friend bool atomic_compare_exchange_strong_explicit( volatile atomic_char*,
304                                                                                                                                                                                                                          char*, char, memory_order, memory_order );
305         friend char atomic_fetch_add_explicit( volatile atomic_char*,
306                                                                                                                                                                  char, memory_order );
307         friend char atomic_fetch_sub_explicit( volatile atomic_char*,
308                                                                                                                                                                  char, memory_order );
309         friend char atomic_fetch_and_explicit( volatile atomic_char*,
310                                                                                                                                                                  char, memory_order );
311         friend char atomic_fetch_or_explicit(  volatile atomic_char*,
312                                                                                                                                                                  char, memory_order );
313         friend char atomic_fetch_xor_explicit( volatile atomic_char*,
314                                                                                                                                                                  char, memory_order );
315
316         CPP0X(private:)
317 #endif
318         char __f__;
319 } atomic_char;
320
321
322 typedef struct atomic_schar
323 {
324 #ifdef __cplusplus
325         bool is_lock_free() const volatile;
326         void store( signed char,
327                                                         memory_order = memory_order_seq_cst ) volatile;
328         signed char load( memory_order = memory_order_seq_cst ) volatile;
329         signed char exchange( signed char,
330                                                                                                 memory_order = memory_order_seq_cst ) volatile;
331         bool compare_exchange_weak( signed char&, signed char,
332                                                                                                                         memory_order, memory_order ) volatile;
333         bool compare_exchange_strong( signed char&, signed char,
334                                                                                                                                 memory_order, memory_order ) volatile;
335         bool compare_exchange_weak( signed char&, signed char,
336                                                                                                                         memory_order = memory_order_seq_cst ) volatile;
337         bool compare_exchange_strong( signed char&, signed char,
338                                                                                                                                 memory_order = memory_order_seq_cst ) volatile;
339         signed char fetch_add( signed char,
340                                                                                                  memory_order = memory_order_seq_cst ) volatile;
341         signed char fetch_sub( signed char,
342                                                                                                  memory_order = memory_order_seq_cst ) volatile;
343         signed char fetch_and( signed char,
344                                                                                                  memory_order = memory_order_seq_cst ) volatile;
345         signed char fetch_or( signed char,
346                                                                                                 memory_order = memory_order_seq_cst ) volatile;
347         signed char fetch_xor( signed char,
348                                                                                                  memory_order = memory_order_seq_cst ) volatile;
349
350         CPP0X( atomic_schar() = default; )
351         CPP0X( constexpr atomic_schar( signed char __v__ ) : __f__( __v__) {
352                 } )
353         CPP0X( atomic_schar( const atomic_schar& ) = delete; )
354         atomic_schar& operator =( const atomic_schar& ) CPP0X(=delete);
355
356         signed char operator =( signed char __v__ ) volatile
357         { store( __v__ ); return __v__; }
358
359         signed char operator ++( int ) volatile
360         { return fetch_add( 1 ); }
361
362         signed char operator --( int ) volatile
363         { return fetch_sub( 1 ); }
364
365         signed char operator ++() volatile
366         { return fetch_add( 1 ) + 1; }
367
368         signed char operator --() volatile
369         { return fetch_sub( 1 ) - 1; }
370
371         signed char operator +=( signed char __v__ ) volatile
372         { return fetch_add( __v__ ) + __v__; }
373
374         signed char operator -=( signed char __v__ ) volatile
375         { return fetch_sub( __v__ ) - __v__; }
376
377         signed char operator &=( signed char __v__ ) volatile
378         { return fetch_and( __v__ ) & __v__; }
379
380         signed char operator |=( signed char __v__ ) volatile
381         { return fetch_or( __v__ ) | __v__; }
382
383         signed char operator ^=( signed char __v__ ) volatile
384         { return fetch_xor( __v__ ) ^ __v__; }
385
386         friend void atomic_store_explicit( volatile atomic_schar*, signed char,
387                                                                                                                                                  memory_order );
388         friend signed char atomic_load_explicit( volatile atomic_schar*,
389                                                                                                                                                                          memory_order );
390         friend signed char atomic_exchange_explicit( volatile atomic_schar*,
391                                                                                                                                                                                          signed char, memory_order );
392         friend bool atomic_compare_exchange_weak_explicit( volatile atomic_schar*,
393                                                                                                                                                                                                                  signed char*, signed char, memory_order, memory_order );
394         friend bool atomic_compare_exchange_strong_explicit( volatile atomic_schar*,
395                                                                                                                                                                                                                          signed char*, signed char, memory_order, memory_order );
396         friend signed char atomic_fetch_add_explicit( volatile atomic_schar*,
397                                                                                                                                                                                                 signed char, memory_order );
398         friend signed char atomic_fetch_sub_explicit( volatile atomic_schar*,
399                                                                                                                                                                                                 signed char, memory_order );
400         friend signed char atomic_fetch_and_explicit( volatile atomic_schar*,
401                                                                                                                                                                                                 signed char, memory_order );
402         friend signed char atomic_fetch_or_explicit(  volatile atomic_schar*,
403                                                                                                                                                                                                 signed char, memory_order );
404         friend signed char atomic_fetch_xor_explicit( volatile atomic_schar*,
405                                                                                                                                                                                                 signed char, memory_order );
406
407         CPP0X(private:)
408 #endif
409         signed char __f__;
410 } atomic_schar;
411
412
413 typedef struct atomic_uchar
414 {
415 #ifdef __cplusplus
416         bool is_lock_free() const volatile;
417         void store( unsigned char,
418                                                         memory_order = memory_order_seq_cst ) volatile;
419         unsigned char load( memory_order = memory_order_seq_cst ) volatile;
420         unsigned char exchange( unsigned char,
421                                                                                                         memory_order = memory_order_seq_cst ) volatile;
422         bool compare_exchange_weak( unsigned char&, unsigned char,
423                                                                                                                         memory_order, memory_order ) volatile;
424         bool compare_exchange_strong( unsigned char&, unsigned char,
425                                                                                                                                 memory_order, memory_order ) volatile;
426         bool compare_exchange_weak( unsigned char&, unsigned char,
427                                                                                                                         memory_order = memory_order_seq_cst ) volatile;
428         bool compare_exchange_strong( unsigned char&, unsigned char,
429                                                                                                                                 memory_order = memory_order_seq_cst ) volatile;
430         unsigned char fetch_add( unsigned char,
431                                                                                                          memory_order = memory_order_seq_cst ) volatile;
432         unsigned char fetch_sub( unsigned char,
433                                                                                                          memory_order = memory_order_seq_cst ) volatile;
434         unsigned char fetch_and( unsigned char,
435                                                                                                          memory_order = memory_order_seq_cst ) volatile;
436         unsigned char fetch_or( unsigned char,
437                                                                                                         memory_order = memory_order_seq_cst ) volatile;
438         unsigned char fetch_xor( unsigned char,
439                                                                                                          memory_order = memory_order_seq_cst ) volatile;
440
441         CPP0X( atomic_uchar() = default; )
442         CPP0X( constexpr atomic_uchar( unsigned char __v__ ) : __f__( __v__) {
443                 } )
444         CPP0X( atomic_uchar( const atomic_uchar& ) = delete; )
445         atomic_uchar& operator =( const atomic_uchar& ) CPP0X(=delete);
446
447         unsigned char operator =( unsigned char __v__ ) volatile
448         { store( __v__ ); return __v__; }
449
450         unsigned char operator ++( int ) volatile
451         { return fetch_add( 1 ); }
452
453         unsigned char operator --( int ) volatile
454         { return fetch_sub( 1 ); }
455
456         unsigned char operator ++() volatile
457         { return fetch_add( 1 ) + 1; }
458
459         unsigned char operator --() volatile
460         { return fetch_sub( 1 ) - 1; }
461
462         unsigned char operator +=( unsigned char __v__ ) volatile
463         { return fetch_add( __v__ ) + __v__; }
464
465         unsigned char operator -=( unsigned char __v__ ) volatile
466         { return fetch_sub( __v__ ) - __v__; }
467
468         unsigned char operator &=( unsigned char __v__ ) volatile
469         { return fetch_and( __v__ ) & __v__; }
470
471         unsigned char operator |=( unsigned char __v__ ) volatile
472         { return fetch_or( __v__ ) | __v__; }
473
474         unsigned char operator ^=( unsigned char __v__ ) volatile
475         { return fetch_xor( __v__ ) ^ __v__; }
476
477         friend void atomic_store_explicit( volatile atomic_uchar*, unsigned char,
478                                                                                                                                                  memory_order );
479         friend unsigned char atomic_load_explicit( volatile atomic_uchar*,
480                                                                                                                                                                                  memory_order );
481         friend unsigned char atomic_exchange_explicit( volatile atomic_uchar*,
482                                                                                                                                                                                                  unsigned char, memory_order );
483         friend bool atomic_compare_exchange_weak_explicit( volatile atomic_uchar*,
484                                                                                                                                                                                                                  unsigned char*, unsigned char, memory_order, memory_order );
485         friend bool atomic_compare_exchange_strong_explicit( volatile atomic_uchar*,
486                                                                                                                                                                                                                          unsigned char*, unsigned char, memory_order, memory_order );
487         friend unsigned char atomic_fetch_add_explicit( volatile atomic_uchar*,
488                                                                                                                                                                                                         unsigned char, memory_order );
489         friend unsigned char atomic_fetch_sub_explicit( volatile atomic_uchar*,
490                                                                                                                                                                                                         unsigned char, memory_order );
491         friend unsigned char atomic_fetch_and_explicit( volatile atomic_uchar*,
492                                                                                                                                                                                                         unsigned char, memory_order );
493         friend unsigned char atomic_fetch_or_explicit(  volatile atomic_uchar*,
494                                                                                                                                                                                                         unsigned char, memory_order );
495         friend unsigned char atomic_fetch_xor_explicit( volatile atomic_uchar*,
496                                                                                                                                                                                                         unsigned char, memory_order );
497
498         CPP0X(private:)
499 #endif
500         unsigned char __f__;
501 } atomic_uchar;
502
503
504 typedef struct atomic_short
505 {
506 #ifdef __cplusplus
507         bool is_lock_free() const volatile;
508         void store( short,
509                                                         memory_order = memory_order_seq_cst ) volatile;
510         short load( memory_order = memory_order_seq_cst ) volatile;
511         short exchange( short,
512                                                                         memory_order = memory_order_seq_cst ) volatile;
513         bool compare_exchange_weak( short&, short,
514                                                                                                                         memory_order, memory_order ) volatile;
515         bool compare_exchange_strong( short&, short,
516                                                                                                                                 memory_order, memory_order ) volatile;
517         bool compare_exchange_weak( short&, short,
518                                                                                                                         memory_order = memory_order_seq_cst ) volatile;
519         bool compare_exchange_strong( short&, short,
520                                                                                                                                 memory_order = memory_order_seq_cst ) volatile;
521         short fetch_add( short,
522                                                                          memory_order = memory_order_seq_cst ) volatile;
523         short fetch_sub( short,
524                                                                          memory_order = memory_order_seq_cst ) volatile;
525         short fetch_and( short,
526                                                                          memory_order = memory_order_seq_cst ) volatile;
527         short fetch_or( short,
528                                                                         memory_order = memory_order_seq_cst ) volatile;
529         short fetch_xor( short,
530                                                                          memory_order = memory_order_seq_cst ) volatile;
531
532         CPP0X( atomic_short() = default; )
533         CPP0X( constexpr atomic_short( short __v__ ) : __f__( __v__) {
534                 } )
535         CPP0X( atomic_short( const atomic_short& ) = delete; )
536         atomic_short& operator =( const atomic_short& ) CPP0X(=delete);
537
538         short operator =( short __v__ ) volatile
539         { store( __v__ ); return __v__; }
540
541         short operator ++( int ) volatile
542         { return fetch_add( 1 ); }
543
544         short operator --( int ) volatile
545         { return fetch_sub( 1 ); }
546
547         short operator ++() volatile
548         { return fetch_add( 1 ) + 1; }
549
550         short operator --() volatile
551         { return fetch_sub( 1 ) - 1; }
552
553         short operator +=( short __v__ ) volatile
554         { return fetch_add( __v__ ) + __v__; }
555
556         short operator -=( short __v__ ) volatile
557         { return fetch_sub( __v__ ) - __v__; }
558
559         short operator &=( short __v__ ) volatile
560         { return fetch_and( __v__ ) & __v__; }
561
562         short operator |=( short __v__ ) volatile
563         { return fetch_or( __v__ ) | __v__; }
564
565         short operator ^=( short __v__ ) volatile
566         { return fetch_xor( __v__ ) ^ __v__; }
567
568         friend void atomic_store_explicit( volatile atomic_short*, short,
569                                                                                                                                                  memory_order );
570         friend short atomic_load_explicit( volatile atomic_short*,
571                                                                                                                                                  memory_order );
572         friend short atomic_exchange_explicit( volatile atomic_short*,
573                                                                                                                                                                  short, memory_order );
574         friend bool atomic_compare_exchange_weak_explicit( volatile atomic_short*,
575                                                                                                                                                                                                                  short*, short, memory_order, memory_order );
576         friend bool atomic_compare_exchange_strong_explicit( volatile atomic_short*,
577                                                                                                                                                                                                                          short*, short, memory_order, memory_order );
578         friend short atomic_fetch_add_explicit( volatile atomic_short*,
579                                                                                                                                                                         short, memory_order );
580         friend short atomic_fetch_sub_explicit( volatile atomic_short*,
581                                                                                                                                                                         short, memory_order );
582         friend short atomic_fetch_and_explicit( volatile atomic_short*,
583                                                                                                                                                                         short, memory_order );
584         friend short atomic_fetch_or_explicit(  volatile atomic_short*,
585                                                                                                                                                                         short, memory_order );
586         friend short atomic_fetch_xor_explicit( volatile atomic_short*,
587                                                                                                                                                                         short, memory_order );
588
589         CPP0X(private:)
590 #endif
591         short __f__;
592 } atomic_short;
593
594
595 typedef struct atomic_ushort
596 {
597 #ifdef __cplusplus
598         bool is_lock_free() const volatile;
599         void store( unsigned short,
600                                                         memory_order = memory_order_seq_cst ) volatile;
601         unsigned short load( memory_order = memory_order_seq_cst ) volatile;
602         unsigned short exchange( unsigned short,
603                                                                                                          memory_order = memory_order_seq_cst ) volatile;
604         bool compare_exchange_weak( unsigned short&, unsigned short,
605                                                                                                                         memory_order, memory_order ) volatile;
606         bool compare_exchange_strong( unsigned short&, unsigned short,
607                                                                                                                                 memory_order, memory_order ) volatile;
608         bool compare_exchange_weak( unsigned short&, unsigned short,
609                                                                                                                         memory_order = memory_order_seq_cst ) volatile;
610         bool compare_exchange_strong( unsigned short&, unsigned short,
611                                                                                                                                 memory_order = memory_order_seq_cst ) volatile;
612         unsigned short fetch_add( unsigned short,
613                                                                                                                 memory_order = memory_order_seq_cst ) volatile;
614         unsigned short fetch_sub( unsigned short,
615                                                                                                                 memory_order = memory_order_seq_cst ) volatile;
616         unsigned short fetch_and( unsigned short,
617                                                                                                                 memory_order = memory_order_seq_cst ) volatile;
618         unsigned short fetch_or( unsigned short,
619                                                                                                          memory_order = memory_order_seq_cst ) volatile;
620         unsigned short fetch_xor( unsigned short,
621                                                                                                                 memory_order = memory_order_seq_cst ) volatile;
622
623         CPP0X( atomic_ushort() = default; )
624         CPP0X( constexpr atomic_ushort( unsigned short __v__ ) : __f__( __v__) {
625                 } )
626         CPP0X( atomic_ushort( const atomic_ushort& ) = delete; )
627         atomic_ushort& operator =( const atomic_ushort& ) CPP0X(=delete);
628
629         unsigned short operator =( unsigned short __v__ ) volatile
630         { store( __v__ ); return __v__; }
631
632         unsigned short operator ++( int ) volatile
633         { return fetch_add( 1 ); }
634
635         unsigned short operator --( int ) volatile
636         { return fetch_sub( 1 ); }
637
638         unsigned short operator ++() volatile
639         { return fetch_add( 1 ) + 1; }
640
641         unsigned short operator --() volatile
642         { return fetch_sub( 1 ) - 1; }
643
644         unsigned short operator +=( unsigned short __v__ ) volatile
645         { return fetch_add( __v__ ) + __v__; }
646
647         unsigned short operator -=( unsigned short __v__ ) volatile
648         { return fetch_sub( __v__ ) - __v__; }
649
650         unsigned short operator &=( unsigned short __v__ ) volatile
651         { return fetch_and( __v__ ) & __v__; }
652
653         unsigned short operator |=( unsigned short __v__ ) volatile
654         { return fetch_or( __v__ ) | __v__; }
655
656         unsigned short operator ^=( unsigned short __v__ ) volatile
657         { return fetch_xor( __v__ ) ^ __v__; }
658
659         friend void atomic_store_explicit( volatile atomic_ushort*, unsigned short,
660                                                                                                                                                  memory_order );
661         friend unsigned short atomic_load_explicit( volatile atomic_ushort*,
662                                                                                                                                                                                         memory_order );
663         friend unsigned short atomic_exchange_explicit( volatile atomic_ushort*,
664                                                                                                                                                                                                         unsigned short, memory_order );
665         friend bool atomic_compare_exchange_weak_explicit( volatile atomic_ushort*,
666                                                                                                                                                                                                                  unsigned short*, unsigned short, memory_order, memory_order );
667         friend bool atomic_compare_exchange_strong_explicit( volatile atomic_ushort*,
668                                                                                                                                                                                                                          unsigned short*, unsigned short, memory_order, memory_order );
669         friend unsigned short atomic_fetch_add_explicit( volatile atomic_ushort*,
670                                                                                                                                                                                                          unsigned short, memory_order );
671         friend unsigned short atomic_fetch_sub_explicit( volatile atomic_ushort*,
672                                                                                                                                                                                                          unsigned short, memory_order );
673         friend unsigned short atomic_fetch_and_explicit( volatile atomic_ushort*,
674                                                                                                                                                                                                          unsigned short, memory_order );
675         friend unsigned short atomic_fetch_or_explicit(  volatile atomic_ushort*,
676                                                                                                                                                                                                          unsigned short, memory_order );
677         friend unsigned short atomic_fetch_xor_explicit( volatile atomic_ushort*,
678                                                                                                                                                                                                          unsigned short, memory_order );
679
680         CPP0X(private:)
681 #endif
682         unsigned short __f__;
683 } atomic_ushort;
684
685
686 typedef struct atomic_int
687 {
688 #ifdef __cplusplus
689         bool is_lock_free() const volatile;
690         void store( int,
691                                                         memory_order = memory_order_seq_cst ) volatile;
692         int load( memory_order = memory_order_seq_cst ) volatile;
693         int exchange( int,
694                                                                 memory_order = memory_order_seq_cst ) volatile;
695         bool compare_exchange_weak( int&, int,
696                                                                                                                         memory_order, memory_order ) volatile;
697         bool compare_exchange_strong( int&, int,
698                                                                                                                                 memory_order, memory_order ) volatile;
699         bool compare_exchange_weak( int&, int,
700                                                                                                                         memory_order = memory_order_seq_cst ) volatile;
701         bool compare_exchange_strong( int&, int,
702                                                                                                                                 memory_order = memory_order_seq_cst ) volatile;
703         int fetch_add( int,
704                                                                  memory_order = memory_order_seq_cst ) volatile;
705         int fetch_sub( int,
706                                                                  memory_order = memory_order_seq_cst ) volatile;
707         int fetch_and( int,
708                                                                  memory_order = memory_order_seq_cst ) volatile;
709         int fetch_or( int,
710                                                                 memory_order = memory_order_seq_cst ) volatile;
711         int fetch_xor( int,
712                                                                  memory_order = memory_order_seq_cst ) volatile;
713
714         CPP0X( atomic_int() = default; )
715         CPP0X( constexpr atomic_int( int __v__ ) : __f__( __v__) {
716                 } )
717         CPP0X( atomic_int( const atomic_int& ) = delete; )
718         atomic_int& operator =( const atomic_int& ) CPP0X(=delete);
719
720         int operator =( int __v__ ) volatile
721         { store( __v__ ); return __v__; }
722
723         int operator ++( int ) volatile
724         { return fetch_add( 1 ); }
725
726         int operator --( int ) volatile
727         { return fetch_sub( 1 ); }
728
729         int operator ++() volatile
730         { return fetch_add( 1 ) + 1; }
731
732         int operator --() volatile
733         { return fetch_sub( 1 ) - 1; }
734
735         int operator +=( int __v__ ) volatile
736         { return fetch_add( __v__ ) + __v__; }
737
738         int operator -=( int __v__ ) volatile
739         { return fetch_sub( __v__ ) - __v__; }
740
741         int operator &=( int __v__ ) volatile
742         { return fetch_and( __v__ ) & __v__; }
743
744         int operator |=( int __v__ ) volatile
745         { return fetch_or( __v__ ) | __v__; }
746
747         int operator ^=( int __v__ ) volatile
748         { return fetch_xor( __v__ ) ^ __v__; }
749
750         friend void atomic_store_explicit( volatile atomic_int*, int,
751                                                                                                                                                  memory_order );
752         friend int atomic_load_explicit( volatile atomic_int*,
753                                                                                                                                          memory_order );
754         friend int atomic_exchange_explicit( volatile atomic_int*,
755                                                                                                                                                          int, memory_order );
756         friend bool atomic_compare_exchange_weak_explicit( volatile atomic_int*,
757                                                                                                                                                                                                                  int*, int, memory_order, memory_order );
758         friend bool atomic_compare_exchange_strong_explicit( volatile atomic_int*,
759                                                                                                                                                                                                                          int*, int, memory_order, memory_order );
760         friend int atomic_fetch_add_explicit( volatile atomic_int*,
761                                                                                                                                                                 int, memory_order );
762         friend int atomic_fetch_sub_explicit( volatile atomic_int*,
763                                                                                                                                                                 int, memory_order );
764         friend int atomic_fetch_and_explicit( volatile atomic_int*,
765                                                                                                                                                                 int, memory_order );
766         friend int atomic_fetch_or_explicit(  volatile atomic_int*,
767                                                                                                                                                                 int, memory_order );
768         friend int atomic_fetch_xor_explicit( volatile atomic_int*,
769                                                                                                                                                                 int, memory_order );
770
771         CPP0X(private:)
772 #endif
773         int __f__;
774 } atomic_int;
775
776
777 typedef struct atomic_uint
778 {
779 #ifdef __cplusplus
780         bool is_lock_free() const volatile;
781         void store( unsigned int,
782                                                         memory_order = memory_order_seq_cst ) volatile;
783         unsigned int load( memory_order = memory_order_seq_cst ) volatile;
784         unsigned int exchange( unsigned int,
785                                                                                                  memory_order = memory_order_seq_cst ) volatile;
786         bool compare_exchange_weak( unsigned int&, unsigned int,
787                                                                                                                         memory_order, memory_order ) volatile;
788         bool compare_exchange_strong( unsigned int&, unsigned int,
789                                                                                                                                 memory_order, memory_order ) volatile;
790         bool compare_exchange_weak( unsigned int&, unsigned int,
791                                                                                                                         memory_order = memory_order_seq_cst ) volatile;
792         bool compare_exchange_strong( unsigned int&, unsigned int,
793                                                                                                                                 memory_order = memory_order_seq_cst ) volatile;
794         unsigned int fetch_add( unsigned int,
795                                                                                                         memory_order = memory_order_seq_cst ) volatile;
796         unsigned int fetch_sub( unsigned int,
797                                                                                                         memory_order = memory_order_seq_cst ) volatile;
798         unsigned int fetch_and( unsigned int,
799                                                                                                         memory_order = memory_order_seq_cst ) volatile;
800         unsigned int fetch_or( unsigned int,
801                                                                                                  memory_order = memory_order_seq_cst ) volatile;
802         unsigned int fetch_xor( unsigned int,
803                                                                                                         memory_order = memory_order_seq_cst ) volatile;
804
805         CPP0X( atomic_uint() = default; )
806         CPP0X( constexpr atomic_uint( unsigned int __v__ ) : __f__( __v__) {
807                 } )
808         CPP0X( atomic_uint( const atomic_uint& ) = delete; )
809         atomic_uint& operator =( const atomic_uint& ) CPP0X(=delete);
810
811         unsigned int operator =( unsigned int __v__ ) volatile
812         { store( __v__ ); return __v__; }
813
814         unsigned int operator ++( int ) volatile
815         { return fetch_add( 1 ); }
816
817         unsigned int operator --( int ) volatile
818         { return fetch_sub( 1 ); }
819
820         unsigned int operator ++() volatile
821         { return fetch_add( 1 ) + 1; }
822
823         unsigned int operator --() volatile
824         { return fetch_sub( 1 ) - 1; }
825
826         unsigned int operator +=( unsigned int __v__ ) volatile
827         { return fetch_add( __v__ ) + __v__; }
828
829         unsigned int operator -=( unsigned int __v__ ) volatile
830         { return fetch_sub( __v__ ) - __v__; }
831
832         unsigned int operator &=( unsigned int __v__ ) volatile
833         { return fetch_and( __v__ ) & __v__; }
834
835         unsigned int operator |=( unsigned int __v__ ) volatile
836         { return fetch_or( __v__ ) | __v__; }
837
838         unsigned int operator ^=( unsigned int __v__ ) volatile
839         { return fetch_xor( __v__ ) ^ __v__; }
840
841         friend void atomic_store_explicit( volatile atomic_uint*, unsigned int,
842                                                                                                                                                  memory_order );
843         friend unsigned int atomic_load_explicit( volatile atomic_uint*,
844                                                                                                                                                                                 memory_order );
845         friend unsigned int atomic_exchange_explicit( volatile atomic_uint*,
846                                                                                                                                                                                                 unsigned int, memory_order );
847         friend bool atomic_compare_exchange_weak_explicit( volatile atomic_uint*,
848                                                                                                                                                                                                                  unsigned int*, unsigned int, memory_order, memory_order );
849         friend bool atomic_compare_exchange_strong_explicit( volatile atomic_uint*,
850                                                                                                                                                                                                                          unsigned int*, unsigned int, memory_order, memory_order );
851         friend unsigned int atomic_fetch_add_explicit( volatile atomic_uint*,
852                                                                                                                                                                                                  unsigned int, memory_order );
853         friend unsigned int atomic_fetch_sub_explicit( volatile atomic_uint*,
854                                                                                                                                                                                                  unsigned int, memory_order );
855         friend unsigned int atomic_fetch_and_explicit( volatile atomic_uint*,
856                                                                                                                                                                                                  unsigned int, memory_order );
857         friend unsigned int atomic_fetch_or_explicit(  volatile atomic_uint*,
858                                                                                                                                                                                                  unsigned int, memory_order );
859         friend unsigned int atomic_fetch_xor_explicit( volatile atomic_uint*,
860                                                                                                                                                                                                  unsigned int, memory_order );
861
862         CPP0X(private:)
863 #endif
864         unsigned int __f__;
865 } atomic_uint;
866
867
868 typedef struct atomic_long
869 {
870 #ifdef __cplusplus
871         bool is_lock_free() const volatile;
872         void store( long,
873                                                         memory_order = memory_order_seq_cst ) volatile;
874         long load( memory_order = memory_order_seq_cst ) volatile;
875         long exchange( long,
876                                                                  memory_order = memory_order_seq_cst ) volatile;
877         bool compare_exchange_weak( long&, long,
878                                                                                                                         memory_order, memory_order ) volatile;
879         bool compare_exchange_strong( long&, long,
880                                                                                                                                 memory_order, memory_order ) volatile;
881         bool compare_exchange_weak( long&, long,
882                                                                                                                         memory_order = memory_order_seq_cst ) volatile;
883         bool compare_exchange_strong( long&, long,
884                                                                                                                                 memory_order = memory_order_seq_cst ) volatile;
885         long fetch_add( long,
886                                                                         memory_order = memory_order_seq_cst ) volatile;
887         long fetch_sub( long,
888                                                                         memory_order = memory_order_seq_cst ) volatile;
889         long fetch_and( long,
890                                                                         memory_order = memory_order_seq_cst ) volatile;
891         long fetch_or( long,
892                                                                  memory_order = memory_order_seq_cst ) volatile;
893         long fetch_xor( long,
894                                                                         memory_order = memory_order_seq_cst ) volatile;
895
896         CPP0X( atomic_long() = default; )
897         CPP0X( constexpr atomic_long( long __v__ ) : __f__( __v__) {
898                 } )
899         CPP0X( atomic_long( const atomic_long& ) = delete; )
900         atomic_long& operator =( const atomic_long& ) CPP0X(=delete);
901
902         long operator =( long __v__ ) volatile
903         { store( __v__ ); return __v__; }
904
905         long operator ++( int ) volatile
906         { return fetch_add( 1 ); }
907
908         long operator --( int ) volatile
909         { return fetch_sub( 1 ); }
910
911         long operator ++() volatile
912         { return fetch_add( 1 ) + 1; }
913
914         long operator --() volatile
915         { return fetch_sub( 1 ) - 1; }
916
917         long operator +=( long __v__ ) volatile
918         { return fetch_add( __v__ ) + __v__; }
919
920         long operator -=( long __v__ ) volatile
921         { return fetch_sub( __v__ ) - __v__; }
922
923         long operator &=( long __v__ ) volatile
924         { return fetch_and( __v__ ) & __v__; }
925
926         long operator |=( long __v__ ) volatile
927         { return fetch_or( __v__ ) | __v__; }
928
929         long operator ^=( long __v__ ) volatile
930         { return fetch_xor( __v__ ) ^ __v__; }
931
932         friend void atomic_store_explicit( volatile atomic_long*, long,
933                                                                                                                                                  memory_order );
934         friend long atomic_load_explicit( volatile atomic_long*,
935                                                                                                                                                 memory_order );
936         friend long atomic_exchange_explicit( volatile atomic_long*,
937                                                                                                                                                                 long, memory_order );
938         friend bool atomic_compare_exchange_weak_explicit( volatile atomic_long*,
939                                                                                                                                                                                                                  long*, long, memory_order, memory_order );
940         friend bool atomic_compare_exchange_strong_explicit( volatile atomic_long*,
941                                                                                                                                                                                                                          long*, long, memory_order, memory_order );
942         friend long atomic_fetch_add_explicit( volatile atomic_long*,
943                                                                                                                                                                  long, memory_order );
944         friend long atomic_fetch_sub_explicit( volatile atomic_long*,
945                                                                                                                                                                  long, memory_order );
946         friend long atomic_fetch_and_explicit( volatile atomic_long*,
947                                                                                                                                                                  long, memory_order );
948         friend long atomic_fetch_or_explicit(  volatile atomic_long*,
949                                                                                                                                                                  long, memory_order );
950         friend long atomic_fetch_xor_explicit( volatile atomic_long*,
951                                                                                                                                                                  long, memory_order );
952
953         CPP0X(private:)
954 #endif
955         long __f__;
956 } atomic_long;
957
958
959 typedef struct atomic_ulong
960 {
961 #ifdef __cplusplus
962         bool is_lock_free() const volatile;
963         void store( unsigned long,
964                                                         memory_order = memory_order_seq_cst ) volatile;
965         unsigned long load( memory_order = memory_order_seq_cst ) volatile;
966         unsigned long exchange( unsigned long,
967                                                                                                         memory_order = memory_order_seq_cst ) volatile;
968         bool compare_exchange_weak( unsigned long&, unsigned long,
969                                                                                                                         memory_order, memory_order ) volatile;
970         bool compare_exchange_strong( unsigned long&, unsigned long,
971                                                                                                                                 memory_order, memory_order ) volatile;
972         bool compare_exchange_weak( unsigned long&, unsigned long,
973                                                                                                                         memory_order = memory_order_seq_cst ) volatile;
974         bool compare_exchange_strong( unsigned long&, unsigned long,
975                                                                                                                                 memory_order = memory_order_seq_cst ) volatile;
976         unsigned long fetch_add( unsigned long,
977                                                                                                          memory_order = memory_order_seq_cst ) volatile;
978         unsigned long fetch_sub( unsigned long,
979                                                                                                          memory_order = memory_order_seq_cst ) volatile;
980         unsigned long fetch_and( unsigned long,
981                                                                                                          memory_order = memory_order_seq_cst ) volatile;
982         unsigned long fetch_or( unsigned long,
983                                                                                                         memory_order = memory_order_seq_cst ) volatile;
984         unsigned long fetch_xor( unsigned long,
985                                                                                                          memory_order = memory_order_seq_cst ) volatile;
986
987         CPP0X( atomic_ulong() = default; )
988         CPP0X( constexpr atomic_ulong( unsigned long __v__ ) : __f__( __v__) {
989                 } )
990         CPP0X( atomic_ulong( const atomic_ulong& ) = delete; )
991         atomic_ulong& operator =( const atomic_ulong& ) CPP0X(=delete);
992
993         unsigned long operator =( unsigned long __v__ ) volatile
994         { store( __v__ ); return __v__; }
995
996         unsigned long operator ++( int ) volatile
997         { return fetch_add( 1 ); }
998
999         unsigned long operator --( int ) volatile
1000         { return fetch_sub( 1 ); }
1001
1002         unsigned long operator ++() volatile
1003         { return fetch_add( 1 ) + 1; }
1004
1005         unsigned long operator --() volatile
1006         { return fetch_sub( 1 ) - 1; }
1007
1008         unsigned long operator +=( unsigned long __v__ ) volatile
1009         { return fetch_add( __v__ ) + __v__; }
1010
1011         unsigned long operator -=( unsigned long __v__ ) volatile
1012         { return fetch_sub( __v__ ) - __v__; }
1013
1014         unsigned long operator &=( unsigned long __v__ ) volatile
1015         { return fetch_and( __v__ ) & __v__; }
1016
1017         unsigned long operator |=( unsigned long __v__ ) volatile
1018         { return fetch_or( __v__ ) | __v__; }
1019
1020         unsigned long operator ^=( unsigned long __v__ ) volatile
1021         { return fetch_xor( __v__ ) ^ __v__; }
1022
1023         friend void atomic_store_explicit( volatile atomic_ulong*, unsigned long,
1024                                                                                                                                                  memory_order );
1025         friend unsigned long atomic_load_explicit( volatile atomic_ulong*,
1026                                                                                                                                                                                  memory_order );
1027         friend unsigned long atomic_exchange_explicit( volatile atomic_ulong*,
1028                                                                                                                                                                                                  unsigned long, memory_order );
1029         friend bool atomic_compare_exchange_weak_explicit( volatile atomic_ulong*,
1030                                                                                                                                                                                                                  unsigned long*, unsigned long, memory_order, memory_order );
1031         friend bool atomic_compare_exchange_strong_explicit( volatile atomic_ulong*,
1032                                                                                                                                                                                                                          unsigned long*, unsigned long, memory_order, memory_order );
1033         friend unsigned long atomic_fetch_add_explicit( volatile atomic_ulong*,
1034                                                                                                                                                                                                         unsigned long, memory_order );
1035         friend unsigned long atomic_fetch_sub_explicit( volatile atomic_ulong*,
1036                                                                                                                                                                                                         unsigned long, memory_order );
1037         friend unsigned long atomic_fetch_and_explicit( volatile atomic_ulong*,
1038                                                                                                                                                                                                         unsigned long, memory_order );
1039         friend unsigned long atomic_fetch_or_explicit(  volatile atomic_ulong*,
1040                                                                                                                                                                                                         unsigned long, memory_order );
1041         friend unsigned long atomic_fetch_xor_explicit( volatile atomic_ulong*,
1042                                                                                                                                                                                                         unsigned long, memory_order );
1043
1044         CPP0X(private:)
1045 #endif
1046         unsigned long __f__;
1047 } atomic_ulong;
1048
1049
1050 typedef struct atomic_llong
1051 {
1052 #ifdef __cplusplus
1053         bool is_lock_free() const volatile;
1054         void store( long long,
1055                                                         memory_order = memory_order_seq_cst ) volatile;
1056         long long load( memory_order = memory_order_seq_cst ) volatile;
1057         long long exchange( long long,
1058                                                                                         memory_order = memory_order_seq_cst ) volatile;
1059         bool compare_exchange_weak( long long&, long long,
1060                                                                                                                         memory_order, memory_order ) volatile;
1061         bool compare_exchange_strong( long long&, long long,
1062                                                                                                                                 memory_order, memory_order ) volatile;
1063         bool compare_exchange_weak( long long&, long long,
1064                                                                                                                         memory_order = memory_order_seq_cst ) volatile;
1065         bool compare_exchange_strong( long long&, long long,
1066                                                                                                                                 memory_order = memory_order_seq_cst ) volatile;
1067         long long fetch_add( long long,
1068                                                                                          memory_order = memory_order_seq_cst ) volatile;
1069         long long fetch_sub( long long,
1070                                                                                          memory_order = memory_order_seq_cst ) volatile;
1071         long long fetch_and( long long,
1072                                                                                          memory_order = memory_order_seq_cst ) volatile;
1073         long long fetch_or( long long,
1074                                                                                         memory_order = memory_order_seq_cst ) volatile;
1075         long long fetch_xor( long long,
1076                                                                                          memory_order = memory_order_seq_cst ) volatile;
1077
1078         CPP0X( atomic_llong() = default; )
1079         CPP0X( constexpr atomic_llong( long long __v__ ) : __f__( __v__) {
1080                 } )
1081         CPP0X( atomic_llong( const atomic_llong& ) = delete; )
1082         atomic_llong& operator =( const atomic_llong& ) CPP0X(=delete);
1083
1084         long long operator =( long long __v__ ) volatile
1085         { store( __v__ ); return __v__; }
1086
1087         long long operator ++( int ) volatile
1088         { return fetch_add( 1 ); }
1089
1090         long long operator --( int ) volatile
1091         { return fetch_sub( 1 ); }
1092
1093         long long operator ++() volatile
1094         { return fetch_add( 1 ) + 1; }
1095
1096         long long operator --() volatile
1097         { return fetch_sub( 1 ) - 1; }
1098
1099         long long operator +=( long long __v__ ) volatile
1100         { return fetch_add( __v__ ) + __v__; }
1101
1102         long long operator -=( long long __v__ ) volatile
1103         { return fetch_sub( __v__ ) - __v__; }
1104
1105         long long operator &=( long long __v__ ) volatile
1106         { return fetch_and( __v__ ) & __v__; }
1107
1108         long long operator |=( long long __v__ ) volatile
1109         { return fetch_or( __v__ ) | __v__; }
1110
1111         long long operator ^=( long long __v__ ) volatile
1112         { return fetch_xor( __v__ ) ^ __v__; }
1113
1114         friend void atomic_store_explicit( volatile atomic_llong*, long long,
1115                                                                                                                                                  memory_order );
1116         friend long long atomic_load_explicit( volatile atomic_llong*,
1117                                                                                                                                                                  memory_order );
1118         friend long long atomic_exchange_explicit( volatile atomic_llong*,
1119                                                                                                                                                                                  long long, memory_order );
1120         friend bool atomic_compare_exchange_weak_explicit( volatile atomic_llong*,
1121                                                                                                                                                                                                                  long long*, long long, memory_order, memory_order );
1122         friend bool atomic_compare_exchange_strong_explicit( volatile atomic_llong*,
1123                                                                                                                                                                                                                          long long*, long long, memory_order, memory_order );
1124         friend long long atomic_fetch_add_explicit( volatile atomic_llong*,
1125                                                                                                                                                                                         long long, memory_order );
1126         friend long long atomic_fetch_sub_explicit( volatile atomic_llong*,
1127                                                                                                                                                                                         long long, memory_order );
1128         friend long long atomic_fetch_and_explicit( volatile atomic_llong*,
1129                                                                                                                                                                                         long long, memory_order );
1130         friend long long atomic_fetch_or_explicit(  volatile atomic_llong*,
1131                                                                                                                                                                                         long long, memory_order );
1132         friend long long atomic_fetch_xor_explicit( volatile atomic_llong*,
1133                                                                                                                                                                                         long long, memory_order );
1134
1135         CPP0X(private:)
1136 #endif
1137         long long __f__;
1138 } atomic_llong;
1139
1140
1141 typedef struct atomic_ullong
1142 {
1143 #ifdef __cplusplus
1144         bool is_lock_free() const volatile;
1145         void store( unsigned long long,
1146                                                         memory_order = memory_order_seq_cst ) volatile;
1147         unsigned long long load( memory_order = memory_order_seq_cst ) volatile;
1148         unsigned long long exchange( unsigned long long,
1149                                                                                                                          memory_order = memory_order_seq_cst ) volatile;
1150         bool compare_exchange_weak( unsigned long long&, unsigned long long,
1151                                                                                                                         memory_order, memory_order ) volatile;
1152         bool compare_exchange_strong( unsigned long long&, unsigned long long,
1153                                                                                                                                 memory_order, memory_order ) volatile;
1154         bool compare_exchange_weak( unsigned long long&, unsigned long long,
1155                                                                                                                         memory_order = memory_order_seq_cst ) volatile;
1156         bool compare_exchange_strong( unsigned long long&, unsigned long long,
1157                                                                                                                                 memory_order = memory_order_seq_cst ) volatile;
1158         unsigned long long fetch_add( unsigned long long,
1159                                                                                                                                 memory_order = memory_order_seq_cst ) volatile;
1160         unsigned long long fetch_sub( unsigned long long,
1161                                                                                                                                 memory_order = memory_order_seq_cst ) volatile;
1162         unsigned long long fetch_and( unsigned long long,
1163                                                                                                                                 memory_order = memory_order_seq_cst ) volatile;
1164         unsigned long long fetch_or( unsigned long long,
1165                                                                                                                          memory_order = memory_order_seq_cst ) volatile;
1166         unsigned long long fetch_xor( unsigned long long,
1167                                                                                                                                 memory_order = memory_order_seq_cst ) volatile;
1168
1169         CPP0X( atomic_ullong() = default; )
1170         CPP0X( constexpr atomic_ullong( unsigned long long __v__ ) : __f__( __v__) {
1171                 } )
1172         CPP0X( atomic_ullong( const atomic_ullong& ) = delete; )
1173         atomic_ullong& operator =( const atomic_ullong& ) CPP0X(=delete);
1174
1175         unsigned long long operator =( unsigned long long __v__ ) volatile
1176         { store( __v__ ); return __v__; }
1177
1178         unsigned long long operator ++( int ) volatile
1179         { return fetch_add( 1 ); }
1180
1181         unsigned long long operator --( int ) volatile
1182         { return fetch_sub( 1 ); }
1183
1184         unsigned long long operator ++() volatile
1185         { return fetch_add( 1 ) + 1; }
1186
1187         unsigned long long operator --() volatile
1188         { return fetch_sub( 1 ) - 1; }
1189
1190         unsigned long long operator +=( unsigned long long __v__ ) volatile
1191         { return fetch_add( __v__ ) + __v__; }
1192
1193         unsigned long long operator -=( unsigned long long __v__ ) volatile
1194         { return fetch_sub( __v__ ) - __v__; }
1195
1196         unsigned long long operator &=( unsigned long long __v__ ) volatile
1197         { return fetch_and( __v__ ) & __v__; }
1198
1199         unsigned long long operator |=( unsigned long long __v__ ) volatile
1200         { return fetch_or( __v__ ) | __v__; }
1201
1202         unsigned long long operator ^=( unsigned long long __v__ ) volatile
1203         { return fetch_xor( __v__ ) ^ __v__; }
1204
1205         friend void atomic_store_explicit( volatile atomic_ullong*, unsigned long long,
1206                                                                                                                                                  memory_order );
1207         friend unsigned long long atomic_load_explicit( volatile atomic_ullong*,
1208                                                                                                                                                                                                         memory_order );
1209         friend unsigned long long atomic_exchange_explicit( volatile atomic_ullong*,
1210                                                                                                                                                                                                                         unsigned long long, memory_order );
1211         friend bool atomic_compare_exchange_weak_explicit( volatile atomic_ullong*,
1212                                                                                                                                                                                                                  unsigned long long*, unsigned long long, memory_order, memory_order );
1213         friend bool atomic_compare_exchange_strong_explicit( volatile atomic_ullong*,
1214                                                                                                                                                                                                                          unsigned long long*, unsigned long long, memory_order, memory_order );
1215         friend unsigned long long atomic_fetch_add_explicit( volatile atomic_ullong*,
1216                                                                                                                                                                                                                          unsigned long long, memory_order );
1217         friend unsigned long long atomic_fetch_sub_explicit( volatile atomic_ullong*,
1218                                                                                                                                                                                                                          unsigned long long, memory_order );
1219         friend unsigned long long atomic_fetch_and_explicit( volatile atomic_ullong*,
1220                                                                                                                                                                                                                          unsigned long long, memory_order );
1221         friend unsigned long long atomic_fetch_or_explicit(  volatile atomic_ullong*,
1222                                                                                                                                                                                                                          unsigned long long, memory_order );
1223         friend unsigned long long atomic_fetch_xor_explicit( volatile atomic_ullong*,
1224                                                                                                                                                                                                                          unsigned long long, memory_order );
1225
1226         CPP0X(private:)
1227 #endif
1228         unsigned long long __f__;
1229 } atomic_ullong;
1230
1231
1232 typedef atomic_schar atomic_int_least8_t;
1233 typedef atomic_uchar atomic_uint_least8_t;
1234 typedef atomic_short atomic_int_least16_t;
1235 typedef atomic_ushort atomic_uint_least16_t;
1236 typedef atomic_int atomic_int_least32_t;
1237 typedef atomic_uint atomic_uint_least32_t;
1238 typedef atomic_llong atomic_int_least64_t;
1239 typedef atomic_ullong atomic_uint_least64_t;
1240
1241 typedef atomic_schar atomic_int_fast8_t;
1242 typedef atomic_uchar atomic_uint_fast8_t;
1243 typedef atomic_short atomic_int_fast16_t;
1244 typedef atomic_ushort atomic_uint_fast16_t;
1245 typedef atomic_int atomic_int_fast32_t;
1246 typedef atomic_uint atomic_uint_fast32_t;
1247 typedef atomic_llong atomic_int_fast64_t;
1248 typedef atomic_ullong atomic_uint_fast64_t;
1249
1250 typedef atomic_long atomic_intptr_t;
1251 typedef atomic_ulong atomic_uintptr_t;
1252
1253 typedef atomic_long atomic_ssize_t;
1254 typedef atomic_ulong atomic_size_t;
1255
1256 typedef atomic_long atomic_ptrdiff_t;
1257
1258 typedef atomic_llong atomic_intmax_t;
1259 typedef atomic_ullong atomic_uintmax_t;
1260
1261
1262 #ifdef __cplusplus
1263
1264
1265 typedef struct atomic_wchar_t
1266 {
1267 #ifdef __cplusplus
1268         bool is_lock_free() const volatile;
1269         void store( wchar_t, memory_order = memory_order_seq_cst ) volatile;
1270         wchar_t load( memory_order = memory_order_seq_cst ) volatile;
1271         wchar_t exchange( wchar_t,
1272                                                                                 memory_order = memory_order_seq_cst ) volatile;
1273         bool compare_exchange_weak( wchar_t&, wchar_t,
1274                                                                                                                         memory_order, memory_order ) volatile;
1275         bool compare_exchange_strong( wchar_t&, wchar_t,
1276                                                                                                                                 memory_order, memory_order ) volatile;
1277         bool compare_exchange_weak( wchar_t&, wchar_t,
1278                                                                                                                         memory_order = memory_order_seq_cst ) volatile;
1279         bool compare_exchange_strong( wchar_t&, wchar_t,
1280                                                                                                                                 memory_order = memory_order_seq_cst ) volatile;
1281         wchar_t fetch_add( wchar_t,
1282                                                                                  memory_order = memory_order_seq_cst ) volatile;
1283         wchar_t fetch_sub( wchar_t,
1284                                                                                  memory_order = memory_order_seq_cst ) volatile;
1285         wchar_t fetch_and( wchar_t,
1286                                                                                  memory_order = memory_order_seq_cst ) volatile;
1287         wchar_t fetch_or( wchar_t,
1288                                                                                 memory_order = memory_order_seq_cst ) volatile;
1289         wchar_t fetch_xor( wchar_t,
1290                                                                                  memory_order = memory_order_seq_cst ) volatile;
1291
1292         CPP0X( atomic_wchar_t() = default; )
1293         CPP0X( constexpr atomic_wchar_t( wchar_t __v__ ) : __f__( __v__) {
1294                 } )
1295         CPP0X( atomic_wchar_t( const atomic_wchar_t& ) = delete; )
1296         atomic_wchar_t& operator =( const atomic_wchar_t& ) CPP0X(=delete);
1297
1298         wchar_t operator =( wchar_t __v__ ) volatile
1299         { store( __v__ ); return __v__; }
1300
1301         wchar_t operator ++( int ) volatile
1302         { return fetch_add( 1 ); }
1303
1304         wchar_t operator --( int ) volatile
1305         { return fetch_sub( 1 ); }
1306
1307         wchar_t operator ++() volatile
1308         { return fetch_add( 1 ) + 1; }
1309
1310         wchar_t operator --() volatile
1311         { return fetch_sub( 1 ) - 1; }
1312
1313         wchar_t operator +=( wchar_t __v__ ) volatile
1314         { return fetch_add( __v__ ) + __v__; }
1315
1316         wchar_t operator -=( wchar_t __v__ ) volatile
1317         { return fetch_sub( __v__ ) - __v__; }
1318
1319         wchar_t operator &=( wchar_t __v__ ) volatile
1320         { return fetch_and( __v__ ) & __v__; }
1321
1322         wchar_t operator |=( wchar_t __v__ ) volatile
1323         { return fetch_or( __v__ ) | __v__; }
1324
1325         wchar_t operator ^=( wchar_t __v__ ) volatile
1326         { return fetch_xor( __v__ ) ^ __v__; }
1327
1328         friend void atomic_store_explicit( volatile atomic_wchar_t*, wchar_t,
1329                                                                                                                                                  memory_order );
1330         friend wchar_t atomic_load_explicit( volatile atomic_wchar_t*,
1331                                                                                                                                                          memory_order );
1332         friend wchar_t atomic_exchange_explicit( volatile atomic_wchar_t*,
1333                                                                                                                                                                          wchar_t, memory_order );
1334         friend bool atomic_compare_exchange_weak_explicit( volatile atomic_wchar_t*,
1335                                                                                                                                                                                                                  wchar_t*, wchar_t, memory_order, memory_order );
1336         friend bool atomic_compare_exchange_strong_explicit( volatile atomic_wchar_t*,
1337                                                                                                                                                                                                                          wchar_t*, wchar_t, memory_order, memory_order );
1338         friend wchar_t atomic_fetch_add_explicit( volatile atomic_wchar_t*,
1339                                                                                                                                                                                 wchar_t, memory_order );
1340         friend wchar_t atomic_fetch_sub_explicit( volatile atomic_wchar_t*,
1341                                                                                                                                                                                 wchar_t, memory_order );
1342         friend wchar_t atomic_fetch_and_explicit( volatile atomic_wchar_t*,
1343                                                                                                                                                                                 wchar_t, memory_order );
1344         friend wchar_t atomic_fetch_or_explicit( volatile atomic_wchar_t*,
1345                                                                                                                                                                          wchar_t, memory_order );
1346         friend wchar_t atomic_fetch_xor_explicit( volatile atomic_wchar_t*,
1347                                                                                                                                                                                 wchar_t, memory_order );
1348
1349         CPP0X(private:)
1350 #endif
1351         wchar_t __f__;
1352 } atomic_wchar_t;
1353
1354
1355 #else
1356
1357 typedef atomic_int_least16_t atomic_char16_t;
1358 typedef atomic_int_least32_t atomic_char32_t;
1359 typedef atomic_int_least32_t atomic_wchar_t;
1360
1361 #endif
1362
1363
1364 #ifdef __cplusplus
1365
1366 template< typename T >
1367 struct atomic
1368 {
1369 #ifdef __cplusplus
1370
1371         bool is_lock_free() const volatile;
1372         void store( T, memory_order = memory_order_seq_cst ) volatile;
1373         T load( memory_order = memory_order_seq_cst ) volatile;
1374         T exchange( T __v__, memory_order = memory_order_seq_cst ) volatile;
1375         bool compare_exchange_weak( T&, T, memory_order, memory_order ) volatile;
1376         bool compare_exchange_strong( T&, T, memory_order, memory_order ) volatile;
1377         bool compare_exchange_weak( T&, T, memory_order = memory_order_seq_cst ) volatile;
1378         bool compare_exchange_strong( T&, T, memory_order = memory_order_seq_cst ) volatile;
1379
1380         CPP0X( atomic() = default; )
1381         CPP0X( constexpr explicit atomic( T __v__ ) : __f__( __v__ ) {
1382                 } )
1383         CPP0X( atomic( const atomic& ) = delete; )
1384         atomic& operator =( const atomic& ) CPP0X(=delete);
1385
1386         T operator =( T __v__ ) volatile
1387         { store( __v__ ); return __v__; }
1388
1389         CPP0X(private:)
1390 #endif
1391         T __f__;
1392 };
1393
1394 #endif
1395
1396 #ifdef __cplusplus
1397
1398 template<typename T> struct atomic< T* > : atomic_address
1399 {
1400         T* load( memory_order = memory_order_seq_cst ) volatile;
1401         T* exchange( T*, memory_order = memory_order_seq_cst ) volatile;
1402         bool compare_exchange_weak( T*&, T*, memory_order, memory_order ) volatile;
1403         bool compare_exchange_strong( T*&, T*, memory_order, memory_order ) volatile;
1404         bool compare_exchange_weak( T*&, T*,
1405                                                                                                                         memory_order = memory_order_seq_cst ) volatile;
1406         bool compare_exchange_strong( T*&, T*,
1407                                                                                                                                 memory_order = memory_order_seq_cst ) volatile;
1408         T* fetch_add( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
1409         T* fetch_sub( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
1410
1411         CPP0X( atomic() = default; )
1412         CPP0X( constexpr explicit atomic( T __v__ ) : atomic_address( __v__ ) {
1413                 } )
1414         CPP0X( atomic( const atomic& ) = delete; )
1415         atomic& operator =( const atomic& ) CPP0X(=delete);
1416
1417         T* operator =( T* __v__ ) volatile
1418         { store( __v__ ); return __v__; }
1419
1420         T* operator ++( int ) volatile
1421         { return fetch_add( 1 ); }
1422
1423         T* operator --( int ) volatile
1424         { return fetch_sub( 1 ); }
1425
1426         T* operator ++() volatile
1427         { return fetch_add( 1 ) + 1; }
1428
1429         T* operator --() volatile
1430         { return fetch_sub( 1 ) - 1; }
1431
1432         T* operator +=( T* __v__ ) volatile
1433         { return fetch_add( __v__ ) + __v__; }
1434
1435         T* operator -=( T* __v__ ) volatile
1436         { return fetch_sub( __v__ ) - __v__; }
1437 };
1438
1439 #endif
1440
1441 #ifdef __cplusplus
1442
1443
1444 template<> struct atomic< bool > : atomic_bool
1445 {
1446         CPP0X( atomic() = default; )
1447         CPP0X( constexpr explicit atomic( bool __v__ )
1448                                          : atomic_bool( __v__ ) {
1449                 } )
1450         CPP0X( atomic( const atomic& ) = delete; )
1451         atomic& operator =( const atomic& ) CPP0X(=delete);
1452
1453         bool operator =( bool __v__ ) volatile
1454         { store( __v__ ); return __v__; }
1455 };
1456
1457
1458 template<> struct atomic< void* > : atomic_address
1459 {
1460         CPP0X( atomic() = default; )
1461         CPP0X( constexpr explicit atomic( void* __v__ )
1462                                          : atomic_address( __v__ ) {
1463                 } )
1464         CPP0X( atomic( const atomic& ) = delete; )
1465         atomic& operator =( const atomic& ) CPP0X(=delete);
1466
1467         void* operator =( void* __v__ ) volatile
1468         { store( __v__ ); return __v__; }
1469 };
1470
1471
1472 template<> struct atomic< char > : atomic_char
1473 {
1474         CPP0X( atomic() = default; )
1475         CPP0X( constexpr explicit atomic( char __v__ )
1476                                          : atomic_char( __v__ ) {
1477                 } )
1478         CPP0X( atomic( const atomic& ) = delete; )
1479         atomic& operator =( const atomic& ) CPP0X(=delete);
1480
1481         char operator =( char __v__ ) volatile
1482         { store( __v__ ); return __v__; }
1483 };
1484
1485
1486 template<> struct atomic< signed char > : atomic_schar
1487 {
1488         CPP0X( atomic() = default; )
1489         CPP0X( constexpr explicit atomic( signed char __v__ )
1490                                          : atomic_schar( __v__ ) {
1491                 } )
1492         CPP0X( atomic( const atomic& ) = delete; )
1493         atomic& operator =( const atomic& ) CPP0X(=delete);
1494
1495         signed char operator =( signed char __v__ ) volatile
1496         { store( __v__ ); return __v__; }
1497 };
1498
1499
1500 template<> struct atomic< unsigned char > : atomic_uchar
1501 {
1502         CPP0X( atomic() = default; )
1503         CPP0X( constexpr explicit atomic( unsigned char __v__ )
1504                                          : atomic_uchar( __v__ ) {
1505                 } )
1506         CPP0X( atomic( const atomic& ) = delete; )
1507         atomic& operator =( const atomic& ) CPP0X(=delete);
1508
1509         unsigned char operator =( unsigned char __v__ ) volatile
1510         { store( __v__ ); return __v__; }
1511 };
1512
1513
1514 template<> struct atomic< short > : atomic_short
1515 {
1516         CPP0X( atomic() = default; )
1517         CPP0X( constexpr explicit atomic( short __v__ )
1518                                          : atomic_short( __v__ ) {
1519                 } )
1520         CPP0X( atomic( const atomic& ) = delete; )
1521         atomic& operator =( const atomic& ) CPP0X(=delete);
1522
1523         short operator =( short __v__ ) volatile
1524         { store( __v__ ); return __v__; }
1525 };
1526
1527
1528 template<> struct atomic< unsigned short > : atomic_ushort
1529 {
1530         CPP0X( atomic() = default; )
1531         CPP0X( constexpr explicit atomic( unsigned short __v__ )
1532                                          : atomic_ushort( __v__ ) {
1533                 } )
1534         CPP0X( atomic( const atomic& ) = delete; )
1535         atomic& operator =( const atomic& ) CPP0X(=delete);
1536
1537         unsigned short operator =( unsigned short __v__ ) volatile
1538         { store( __v__ ); return __v__; }
1539 };
1540
1541
1542 template<> struct atomic< int > : atomic_int
1543 {
1544         CPP0X( atomic() = default; )
1545         CPP0X( constexpr explicit atomic( int __v__ )
1546                                          : atomic_int( __v__ ) {
1547                 } )
1548         CPP0X( atomic( const atomic& ) = delete; )
1549         atomic& operator =( const atomic& ) CPP0X(=delete);
1550
1551         int operator =( int __v__ ) volatile
1552         { store( __v__ ); return __v__; }
1553 };
1554
1555
1556 template<> struct atomic< unsigned int > : atomic_uint
1557 {
1558         CPP0X( atomic() = default; )
1559         CPP0X( constexpr explicit atomic( unsigned int __v__ )
1560                                          : atomic_uint( __v__ ) {
1561                 } )
1562         CPP0X( atomic( const atomic& ) = delete; )
1563         atomic& operator =( const atomic& ) CPP0X(=delete);
1564
1565         unsigned int operator =( unsigned int __v__ ) volatile
1566         { store( __v__ ); return __v__; }
1567 };
1568
1569
1570 template<> struct atomic< long > : atomic_long
1571 {
1572         CPP0X( atomic() = default; )
1573         CPP0X( constexpr explicit atomic( long __v__ )
1574                                          : atomic_long( __v__ ) {
1575                 } )
1576         CPP0X( atomic( const atomic& ) = delete; )
1577         atomic& operator =( const atomic& ) CPP0X(=delete);
1578
1579         long operator =( long __v__ ) volatile
1580         { store( __v__ ); return __v__; }
1581 };
1582
1583
1584 template<> struct atomic< unsigned long > : atomic_ulong
1585 {
1586         CPP0X( atomic() = default; )
1587         CPP0X( constexpr explicit atomic( unsigned long __v__ )
1588                                          : atomic_ulong( __v__ ) {
1589                 } )
1590         CPP0X( atomic( const atomic& ) = delete; )
1591         atomic& operator =( const atomic& ) CPP0X(=delete);
1592
1593         unsigned long operator =( unsigned long __v__ ) volatile
1594         { store( __v__ ); return __v__; }
1595 };
1596
1597
1598 template<> struct atomic< long long > : atomic_llong
1599 {
1600         CPP0X( atomic() = default; )
1601         CPP0X( constexpr explicit atomic( long long __v__ )
1602                                          : atomic_llong( __v__ ) {
1603                 } )
1604         CPP0X( atomic( const atomic& ) = delete; )
1605         atomic& operator =( const atomic& ) CPP0X(=delete);
1606
1607         long long operator =( long long __v__ ) volatile
1608         { store( __v__ ); return __v__; }
1609 };
1610
1611
1612 template<> struct atomic< unsigned long long > : atomic_ullong
1613 {
1614         CPP0X( atomic() = default; )
1615         CPP0X( constexpr explicit atomic( unsigned long long __v__ )
1616                                          : atomic_ullong( __v__ ) {
1617                 } )
1618         CPP0X( atomic( const atomic& ) = delete; )
1619         atomic& operator =( const atomic& ) CPP0X(=delete);
1620
1621         unsigned long long operator =( unsigned long long __v__ ) volatile
1622         { store( __v__ ); return __v__; }
1623 };
1624
1625
1626 template<> struct atomic< wchar_t > : atomic_wchar_t
1627 {
1628         CPP0X( atomic() = default; )
1629         CPP0X( constexpr explicit atomic( wchar_t __v__ )
1630                                          : atomic_wchar_t( __v__ ) {
1631                 } )
1632         CPP0X( atomic( const atomic& ) = delete; )
1633         atomic& operator =( const atomic& ) CPP0X(=delete);
1634
1635         wchar_t operator =( wchar_t __v__ ) volatile
1636         { store( __v__ ); return __v__; }
1637 };
1638
1639
1640 #endif
1641
1642
1643 #ifdef __cplusplus
1644
1645
1646 inline bool atomic_is_lock_free
1647         ( const volatile atomic_bool* __a__ )
1648 { return false; }
1649
1650 inline bool atomic_load_explicit
1651         ( volatile atomic_bool* __a__, memory_order __x__ )
1652 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1653
1654 inline bool atomic_load
1655         ( volatile atomic_bool* __a__ ) { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1656
1657 inline void atomic_init
1658         ( volatile atomic_bool* __a__, bool __m__ )
1659 { _ATOMIC_INIT_( __a__, __m__ ); }
1660
1661 inline void atomic_store_explicit
1662         ( volatile atomic_bool* __a__, bool __m__, memory_order __x__ )
1663 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1664
1665 inline void atomic_store
1666         ( volatile atomic_bool* __a__, bool __m__ )
1667 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1668
1669 inline bool atomic_exchange_explicit
1670         ( volatile atomic_bool* __a__, bool __m__, memory_order __x__ )
1671 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1672
1673 inline bool atomic_exchange
1674         ( volatile atomic_bool* __a__, bool __m__ )
1675 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1676
1677 inline bool atomic_compare_exchange_weak_explicit
1678         ( volatile atomic_bool* __a__, bool* __e__, bool __m__,
1679         memory_order __x__, memory_order __y__ )
1680 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1681
1682 inline bool atomic_compare_exchange_strong_explicit
1683         ( volatile atomic_bool* __a__, bool* __e__, bool __m__,
1684         memory_order __x__, memory_order __y__ )
1685 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1686
1687 inline bool atomic_compare_exchange_weak
1688         ( volatile atomic_bool* __a__, bool* __e__, bool __m__ )
1689 {
1690         return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1691                                                                                                                                                                                                 memory_order_seq_cst, memory_order_seq_cst );
1692 }
1693
1694 inline bool atomic_compare_exchange_strong
1695         ( volatile atomic_bool* __a__, bool* __e__, bool __m__ )
1696 {
1697         return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1698                                                                                                                                                                                                         memory_order_seq_cst, memory_order_seq_cst );
1699 }
1700
1701
1702 inline bool atomic_is_lock_free( const volatile atomic_address* __a__ )
1703 { return false; }
1704
1705 inline void* atomic_load_explicit
1706         ( volatile atomic_address* __a__, memory_order __x__ )
1707 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1708
1709 inline void* atomic_load( volatile atomic_address* __a__ )
1710 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1711
1712 inline void atomic_init
1713         ( volatile atomic_address* __a__, void* __m__ )
1714 { _ATOMIC_INIT_( __a__, __m__ ); }
1715
1716 inline void atomic_store_explicit
1717         ( volatile atomic_address* __a__, void* __m__, memory_order __x__ )
1718 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1719
1720 inline void atomic_store
1721         ( volatile atomic_address* __a__, void* __m__ )
1722 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1723
1724 inline void* atomic_exchange_explicit
1725         ( volatile atomic_address* __a__, void* __m__, memory_order __x__ )
1726 { return _ATOMIC_MODIFY_( __a__, =, __m__,  __x__ ); }
1727
1728 inline void* atomic_exchange
1729         ( volatile atomic_address* __a__, void* __m__ )
1730 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1731
1732 inline bool atomic_compare_exchange_weak_explicit
1733         ( volatile atomic_address* __a__, void** __e__, void* __m__,
1734         memory_order __x__, memory_order __y__ )
1735 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1736
1737 inline bool atomic_compare_exchange_strong_explicit
1738         ( volatile atomic_address* __a__, void** __e__, void* __m__,
1739         memory_order __x__, memory_order __y__ )
1740 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1741
1742 inline bool atomic_compare_exchange_weak
1743         ( volatile atomic_address* __a__, void** __e__, void* __m__ )
1744 {
1745         return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1746                                                                                                                                                                                                 memory_order_seq_cst, memory_order_seq_cst );
1747 }
1748
1749 inline bool atomic_compare_exchange_strong
1750         ( volatile atomic_address* __a__, void** __e__, void* __m__ )
1751 {
1752         return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1753                                                                                                                                                                                                         memory_order_seq_cst, memory_order_seq_cst );
1754 }
1755
1756
1757 inline bool atomic_is_lock_free( const volatile atomic_char* __a__ )
1758 { return false; }
1759
1760 inline char atomic_load_explicit
1761         ( volatile atomic_char* __a__, memory_order __x__ )
1762 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1763
1764 inline char atomic_load( volatile atomic_char* __a__ )
1765 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1766
1767 inline void atomic_init
1768         ( volatile atomic_char* __a__, char __m__ )
1769 { _ATOMIC_INIT_( __a__, __m__ ); }
1770
1771 inline void atomic_store_explicit
1772         ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
1773 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1774
1775 inline void atomic_store
1776         ( volatile atomic_char* __a__, char __m__ )
1777 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1778
1779 inline char atomic_exchange_explicit
1780         ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
1781 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1782
1783 inline char atomic_exchange
1784         ( volatile atomic_char* __a__, char __m__ )
1785 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1786
1787 inline bool atomic_compare_exchange_weak_explicit
1788         ( volatile atomic_char* __a__, char* __e__, char __m__,
1789         memory_order __x__, memory_order __y__ )
1790 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1791
1792 inline bool atomic_compare_exchange_strong_explicit
1793         ( volatile atomic_char* __a__, char* __e__, char __m__,
1794         memory_order __x__, memory_order __y__ )
1795 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1796
1797 inline bool atomic_compare_exchange_weak
1798         ( volatile atomic_char* __a__, char* __e__, char __m__ )
1799 {
1800         return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1801                                                                                                                                                                                                 memory_order_seq_cst, memory_order_seq_cst );
1802 }
1803
1804 inline bool atomic_compare_exchange_strong
1805         ( volatile atomic_char* __a__, char* __e__, char __m__ )
1806 {
1807         return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1808                                                                                                                                                                                                         memory_order_seq_cst, memory_order_seq_cst );
1809 }
1810
1811
1812 inline bool atomic_is_lock_free( const volatile atomic_schar* __a__ )
1813 { return false; }
1814
1815 inline signed char atomic_load_explicit
1816         ( volatile atomic_schar* __a__, memory_order __x__ )
1817 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1818
1819 inline signed char atomic_load( volatile atomic_schar* __a__ )
1820 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1821
1822 inline void atomic_init
1823         ( volatile atomic_schar* __a__, signed char __m__ )
1824 { _ATOMIC_INIT_( __a__, __m__ ); }
1825
1826 inline void atomic_store_explicit
1827         ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
1828 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1829
1830 inline void atomic_store
1831         ( volatile atomic_schar* __a__, signed char __m__ )
1832 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1833
1834 inline signed char atomic_exchange_explicit
1835         ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
1836 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1837
1838 inline signed char atomic_exchange
1839         ( volatile atomic_schar* __a__, signed char __m__ )
1840 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1841
1842 inline bool atomic_compare_exchange_weak_explicit
1843         ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__,
1844         memory_order __x__, memory_order __y__ )
1845 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1846
1847 inline bool atomic_compare_exchange_strong_explicit
1848         ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__,
1849         memory_order __x__, memory_order __y__ )
1850 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1851
1852 inline bool atomic_compare_exchange_weak
1853         ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__ )
1854 {
1855         return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1856                                                                                                                                                                                                 memory_order_seq_cst, memory_order_seq_cst );
1857 }
1858
1859 inline bool atomic_compare_exchange_strong
1860         ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__ )
1861 {
1862         return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1863                                                                                                                                                                                                         memory_order_seq_cst, memory_order_seq_cst );
1864 }
1865
1866
1867 inline bool atomic_is_lock_free( const volatile atomic_uchar* __a__ )
1868 { return false; }
1869
1870 inline unsigned char atomic_load_explicit
1871         ( volatile atomic_uchar* __a__, memory_order __x__ )
1872 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1873
1874 inline unsigned char atomic_load( volatile atomic_uchar* __a__ )
1875 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1876
1877 inline void atomic_init
1878         ( volatile atomic_uchar* __a__, unsigned char __m__ )
1879 { _ATOMIC_INIT_( __a__, __m__ ); }
1880
1881 inline void atomic_store_explicit
1882         ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
1883 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1884
1885 inline void atomic_store
1886         ( volatile atomic_uchar* __a__, unsigned char __m__ )
1887 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1888
1889 inline unsigned char atomic_exchange_explicit
1890         ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
1891 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1892
1893 inline unsigned char atomic_exchange
1894         ( volatile atomic_uchar* __a__, unsigned char __m__ )
1895 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1896
1897 inline bool atomic_compare_exchange_weak_explicit
1898         ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__,
1899         memory_order __x__, memory_order __y__ )
1900 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1901
1902 inline bool atomic_compare_exchange_strong_explicit
1903         ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__,
1904         memory_order __x__, memory_order __y__ )
1905 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1906
1907 inline bool atomic_compare_exchange_weak
1908         ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__ )
1909 {
1910         return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1911                                                                                                                                                                                                 memory_order_seq_cst, memory_order_seq_cst );
1912 }
1913
1914 inline bool atomic_compare_exchange_strong
1915         ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__ )
1916 {
1917         return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1918                                                                                                                                                                                                         memory_order_seq_cst, memory_order_seq_cst );
1919 }
1920
1921
1922 inline bool atomic_is_lock_free( const volatile atomic_short* __a__ )
1923 { return false; }
1924
1925 inline short atomic_load_explicit
1926         ( volatile atomic_short* __a__, memory_order __x__ )
1927 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1928
1929 inline short atomic_load( volatile atomic_short* __a__ )
1930 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1931
1932 inline void atomic_init
1933         ( volatile atomic_short* __a__, short __m__ )
1934 { _ATOMIC_INIT_( __a__, __m__ ); }
1935
1936 inline void atomic_store_explicit
1937         ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
1938 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1939
1940 inline void atomic_store
1941         ( volatile atomic_short* __a__, short __m__ )
1942 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1943
1944 inline short atomic_exchange_explicit
1945         ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
1946 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
1947
1948 inline short atomic_exchange
1949         ( volatile atomic_short* __a__, short __m__ )
1950 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
1951
1952 inline bool atomic_compare_exchange_weak_explicit
1953         ( volatile atomic_short* __a__, short* __e__, short __m__,
1954         memory_order __x__, memory_order __y__ )
1955 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
1956
1957 inline bool atomic_compare_exchange_strong_explicit
1958         ( volatile atomic_short* __a__, short* __e__, short __m__,
1959         memory_order __x__, memory_order __y__ )
1960 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
1961
1962 inline bool atomic_compare_exchange_weak
1963         ( volatile atomic_short* __a__, short* __e__, short __m__ )
1964 {
1965         return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
1966                                                                                                                                                                                                 memory_order_seq_cst, memory_order_seq_cst );
1967 }
1968
1969 inline bool atomic_compare_exchange_strong
1970         ( volatile atomic_short* __a__, short* __e__, short __m__ )
1971 {
1972         return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
1973                                                                                                                                                                                                         memory_order_seq_cst, memory_order_seq_cst );
1974 }
1975
1976
1977 inline bool atomic_is_lock_free( const volatile atomic_ushort* __a__ )
1978 { return false; }
1979
1980 inline unsigned short atomic_load_explicit
1981         ( volatile atomic_ushort* __a__, memory_order __x__ )
1982 { return _ATOMIC_LOAD_( __a__, __x__ ); }
1983
1984 inline unsigned short atomic_load( volatile atomic_ushort* __a__ )
1985 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
1986
1987 inline void atomic_init
1988         ( volatile atomic_ushort* __a__, unsigned short __m__ )
1989 { _ATOMIC_INIT_( __a__, __m__ ); }
1990
1991 inline void atomic_store_explicit
1992         ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
1993 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
1994
1995 inline void atomic_store
1996         ( volatile atomic_ushort* __a__, unsigned short __m__ )
1997 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
1998
1999 inline unsigned short atomic_exchange_explicit
2000         ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2001 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2002
2003 inline unsigned short atomic_exchange
2004         ( volatile atomic_ushort* __a__, unsigned short __m__ )
2005 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2006
2007 inline bool atomic_compare_exchange_weak_explicit
2008         ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__,
2009         memory_order __x__, memory_order __y__ )
2010 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2011
2012 inline bool atomic_compare_exchange_strong_explicit
2013         ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__,
2014         memory_order __x__, memory_order __y__ )
2015 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2016
2017 inline bool atomic_compare_exchange_weak
2018         ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__ )
2019 {
2020         return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2021                                                                                                                                                                                                 memory_order_seq_cst, memory_order_seq_cst );
2022 }
2023
2024 inline bool atomic_compare_exchange_strong
2025         ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__ )
2026 {
2027         return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2028                                                                                                                                                                                                         memory_order_seq_cst, memory_order_seq_cst );
2029 }
2030
2031
2032 inline bool atomic_is_lock_free( const volatile atomic_int* __a__ )
2033 { return false; }
2034
2035 inline int atomic_load_explicit
2036         ( volatile atomic_int* __a__, memory_order __x__ )
2037 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2038
2039 inline int atomic_load( volatile atomic_int* __a__ )
2040 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2041
2042 inline void atomic_init
2043         ( volatile atomic_int* __a__, int __m__ )
2044 { _ATOMIC_INIT_( __a__, __m__ ); }
2045
2046 inline void atomic_store_explicit
2047         ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2048 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2049
2050 inline void atomic_store
2051         ( volatile atomic_int* __a__, int __m__ )
2052 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2053
2054 inline int atomic_exchange_explicit
2055         ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2056 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2057
2058 inline int atomic_exchange
2059         ( volatile atomic_int* __a__, int __m__ )
2060 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2061
2062 inline bool atomic_compare_exchange_weak_explicit
2063         ( volatile atomic_int* __a__, int* __e__, int __m__,
2064         memory_order __x__, memory_order __y__ )
2065 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2066
2067 inline bool atomic_compare_exchange_strong_explicit
2068         ( volatile atomic_int* __a__, int* __e__, int __m__,
2069         memory_order __x__, memory_order __y__ )
2070 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2071
2072 inline bool atomic_compare_exchange_weak
2073         ( volatile atomic_int* __a__, int* __e__, int __m__ )
2074 {
2075         return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2076                                                                                                                                                                                                 memory_order_seq_cst, memory_order_seq_cst );
2077 }
2078
2079 inline bool atomic_compare_exchange_strong
2080         ( volatile atomic_int* __a__, int* __e__, int __m__ )
2081 {
2082         return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2083                                                                                                                                                                                                         memory_order_seq_cst, memory_order_seq_cst );
2084 }
2085
2086
2087 inline bool atomic_is_lock_free( const volatile atomic_uint* __a__ )
2088 { return false; }
2089
2090 inline unsigned int atomic_load_explicit
2091         ( volatile atomic_uint* __a__, memory_order __x__ )
2092 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2093
2094 inline unsigned int atomic_load( volatile atomic_uint* __a__ )
2095 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2096
2097 inline void atomic_init
2098         ( volatile atomic_uint* __a__, unsigned int __m__ )
2099 { _ATOMIC_INIT_( __a__, __m__ ); }
2100
2101 inline void atomic_store_explicit
2102         ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2103 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2104
2105 inline void atomic_store
2106         ( volatile atomic_uint* __a__, unsigned int __m__ )
2107 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2108
2109 inline unsigned int atomic_exchange_explicit
2110         ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2111 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2112
2113 inline unsigned int atomic_exchange
2114         ( volatile atomic_uint* __a__, unsigned int __m__ )
2115 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2116
2117 inline bool atomic_compare_exchange_weak_explicit
2118         ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__,
2119         memory_order __x__, memory_order __y__ )
2120 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2121
2122 inline bool atomic_compare_exchange_strong_explicit
2123         ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__,
2124         memory_order __x__, memory_order __y__ )
2125 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2126
2127 inline bool atomic_compare_exchange_weak
2128         ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__ )
2129 {
2130         return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2131                                                                                                                                                                                                 memory_order_seq_cst, memory_order_seq_cst );
2132 }
2133
2134 inline bool atomic_compare_exchange_strong
2135         ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__ )
2136 {
2137         return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2138                                                                                                                                                                                                         memory_order_seq_cst, memory_order_seq_cst );
2139 }
2140
2141
2142 inline bool atomic_is_lock_free( const volatile atomic_long* __a__ )
2143 { return false; }
2144
2145 inline long atomic_load_explicit
2146         ( volatile atomic_long* __a__, memory_order __x__ )
2147 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2148
2149 inline long atomic_load( volatile atomic_long* __a__ )
2150 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2151
2152 inline void atomic_init
2153         ( volatile atomic_long* __a__, long __m__ )
2154 { _ATOMIC_INIT_( __a__, __m__ ); }
2155
2156 inline void atomic_store_explicit
2157         ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2158 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2159
2160 inline void atomic_store
2161         ( volatile atomic_long* __a__, long __m__ )
2162 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2163
2164 inline long atomic_exchange_explicit
2165         ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2166 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2167
2168 inline long atomic_exchange
2169         ( volatile atomic_long* __a__, long __m__ )
2170 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2171
2172 inline bool atomic_compare_exchange_weak_explicit
2173         ( volatile atomic_long* __a__, long* __e__, long __m__,
2174         memory_order __x__, memory_order __y__ )
2175 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2176
2177 inline bool atomic_compare_exchange_strong_explicit
2178         ( volatile atomic_long* __a__, long* __e__, long __m__,
2179         memory_order __x__, memory_order __y__ )
2180 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2181
2182 inline bool atomic_compare_exchange_weak
2183         ( volatile atomic_long* __a__, long* __e__, long __m__ )
2184 {
2185         return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2186                                                                                                                                                                                                 memory_order_seq_cst, memory_order_seq_cst );
2187 }
2188
2189 inline bool atomic_compare_exchange_strong
2190         ( volatile atomic_long* __a__, long* __e__, long __m__ )
2191 {
2192         return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2193                                                                                                                                                                                                         memory_order_seq_cst, memory_order_seq_cst );
2194 }
2195
2196
2197 inline bool atomic_is_lock_free( const volatile atomic_ulong* __a__ )
2198 { return false; }
2199
2200 inline unsigned long atomic_load_explicit
2201         ( volatile atomic_ulong* __a__, memory_order __x__ )
2202 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2203
2204 inline unsigned long atomic_load( volatile atomic_ulong* __a__ )
2205 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2206
2207 inline void atomic_init
2208         ( volatile atomic_ulong* __a__, unsigned long __m__ )
2209 { _ATOMIC_INIT_( __a__, __m__ ); }
2210
2211 inline void atomic_store_explicit
2212         ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2213 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2214
2215 inline void atomic_store
2216         ( volatile atomic_ulong* __a__, unsigned long __m__ )
2217 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2218
2219 inline unsigned long atomic_exchange_explicit
2220         ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2221 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2222
2223 inline unsigned long atomic_exchange
2224         ( volatile atomic_ulong* __a__, unsigned long __m__ )
2225 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2226
2227 inline bool atomic_compare_exchange_weak_explicit
2228         ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__,
2229         memory_order __x__, memory_order __y__ )
2230 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2231
2232 inline bool atomic_compare_exchange_strong_explicit
2233         ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__,
2234         memory_order __x__, memory_order __y__ )
2235 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2236
2237 inline bool atomic_compare_exchange_weak
2238         ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__ )
2239 {
2240         return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2241                                                                                                                                                                                                 memory_order_seq_cst, memory_order_seq_cst );
2242 }
2243
2244 inline bool atomic_compare_exchange_strong
2245         ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__ )
2246 {
2247         return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2248                                                                                                                                                                                                         memory_order_seq_cst, memory_order_seq_cst );
2249 }
2250
2251
2252 inline bool atomic_is_lock_free( const volatile atomic_llong* __a__ )
2253 { return false; }
2254
2255 inline long long atomic_load_explicit
2256         ( volatile atomic_llong* __a__, memory_order __x__ )
2257 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2258
2259 inline long long atomic_load( volatile atomic_llong* __a__ )
2260 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2261
2262 inline void atomic_init
2263         ( volatile atomic_llong* __a__, long long __m__ )
2264 { _ATOMIC_INIT_( __a__, __m__ ); }
2265
2266 inline void atomic_store_explicit
2267         ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2268 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2269
2270 inline void atomic_store
2271         ( volatile atomic_llong* __a__, long long __m__ )
2272 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2273
2274 inline long long atomic_exchange_explicit
2275         ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2276 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2277
2278 inline long long atomic_exchange
2279         ( volatile atomic_llong* __a__, long long __m__ )
2280 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2281
2282 inline bool atomic_compare_exchange_weak_explicit
2283         ( volatile atomic_llong* __a__, long long* __e__, long long __m__,
2284         memory_order __x__, memory_order __y__ )
2285 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2286
2287 inline bool atomic_compare_exchange_strong_explicit
2288         ( volatile atomic_llong* __a__, long long* __e__, long long __m__,
2289         memory_order __x__, memory_order __y__ )
2290 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2291
2292 inline bool atomic_compare_exchange_weak
2293         ( volatile atomic_llong* __a__, long long* __e__, long long __m__ )
2294 {
2295         return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2296                                                                                                                                                                                                 memory_order_seq_cst, memory_order_seq_cst );
2297 }
2298
2299 inline bool atomic_compare_exchange_strong
2300         ( volatile atomic_llong* __a__, long long* __e__, long long __m__ )
2301 {
2302         return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2303                                                                                                                                                                                                         memory_order_seq_cst, memory_order_seq_cst );
2304 }
2305
2306
2307 inline bool atomic_is_lock_free( const volatile atomic_ullong* __a__ )
2308 { return false; }
2309
2310 inline unsigned long long atomic_load_explicit
2311         ( volatile atomic_ullong* __a__, memory_order __x__ )
2312 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2313
2314 inline unsigned long long atomic_load( volatile atomic_ullong* __a__ )
2315 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2316
2317 inline void atomic_init
2318         ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2319 { _ATOMIC_INIT_( __a__, __m__ ); }
2320
2321 inline void atomic_store_explicit
2322         ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2323 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2324
2325 inline void atomic_store
2326         ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2327 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2328
2329 inline unsigned long long atomic_exchange_explicit
2330         ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2331 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2332
2333 inline unsigned long long atomic_exchange
2334         ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2335 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2336
2337 inline bool atomic_compare_exchange_weak_explicit
2338         ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__,
2339         memory_order __x__, memory_order __y__ )
2340 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2341
2342 inline bool atomic_compare_exchange_strong_explicit
2343         ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__,
2344         memory_order __x__, memory_order __y__ )
2345 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2346
2347 inline bool atomic_compare_exchange_weak
2348         ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__ )
2349 {
2350         return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2351                                                                                                                                                                                                 memory_order_seq_cst, memory_order_seq_cst );
2352 }
2353
2354 inline bool atomic_compare_exchange_strong
2355         ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__ )
2356 {
2357         return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2358                                                                                                                                                                                                         memory_order_seq_cst, memory_order_seq_cst );
2359 }
2360
2361
2362 inline bool atomic_is_lock_free( const volatile atomic_wchar_t* __a__ )
2363 { return false; }
2364
2365 inline wchar_t atomic_load_explicit
2366         ( volatile atomic_wchar_t* __a__, memory_order __x__ )
2367 { return _ATOMIC_LOAD_( __a__, __x__ ); }
2368
2369 inline wchar_t atomic_load( volatile atomic_wchar_t* __a__ )
2370 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
2371
2372 inline void atomic_init
2373         ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2374 { _ATOMIC_INIT_( __a__, __m__ ); }
2375
2376 inline void atomic_store_explicit
2377         ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2378 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
2379
2380 inline void atomic_store
2381         ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2382 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
2383
2384 inline wchar_t atomic_exchange_explicit
2385         ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2386 { return _ATOMIC_MODIFY_( __a__, =, __m__, __x__ ); }
2387
2388 inline wchar_t atomic_exchange
2389         ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2390 { return atomic_exchange_explicit( __a__, __m__, memory_order_seq_cst ); }
2391
2392 inline bool atomic_compare_exchange_weak_explicit
2393         ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__,
2394         memory_order __x__, memory_order __y__ )
2395 { return _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ ); }
2396
2397 inline bool atomic_compare_exchange_strong_explicit
2398         ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__,
2399         memory_order __x__, memory_order __y__ )
2400 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
2401
2402 inline bool atomic_compare_exchange_weak
2403         ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__ )
2404 {
2405         return atomic_compare_exchange_weak_explicit( __a__, __e__, __m__,
2406                                                                                                                                                                                                 memory_order_seq_cst, memory_order_seq_cst );
2407 }
2408
2409 inline bool atomic_compare_exchange_strong
2410         ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__ )
2411 {
2412         return atomic_compare_exchange_strong_explicit( __a__, __e__, __m__,
2413                                                                                                                                                                                                         memory_order_seq_cst, memory_order_seq_cst );
2414 }
2415
2416
2417 inline void* atomic_fetch_add_explicit
2418         ( volatile atomic_address* __a__, ptrdiff_t __m__, memory_order __x__ )
2419 {
2420         volatile __typeof__((__a__)->__f__)* __p__ = &((__a__)->__f__);
2421         __typeof__((__a__)->__f__) __old__=(__typeof__((__a__)->__f__))model_rmwr_action((void *)__p__, __x__);
2422         __typeof__((__a__)->__f__) __copy__= __old__;
2423         __copy__ = (void *) (((char *)__copy__) + __m__);
2424         model_rmw_action((void *)__p__, __x__, (uint64_t) __copy__);
2425         return __old__;
2426 }
2427
2428 inline void* atomic_fetch_add
2429         ( volatile atomic_address* __a__, ptrdiff_t __m__ )
2430 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2431
2432
2433 inline void* atomic_fetch_sub_explicit
2434         ( volatile atomic_address* __a__, ptrdiff_t __m__, memory_order __x__ )
2435 {
2436         volatile __typeof__((__a__)->__f__)* __p__ = &((__a__)->__f__);
2437         __typeof__((__a__)->__f__) __old__=(__typeof__((__a__)->__f__))model_rmwr_action((void *)__p__, __x__);
2438         __typeof__((__a__)->__f__) __copy__= __old__;
2439         __copy__ = (void *) (((char *)__copy__) - __m__);
2440         model_rmw_action((void *)__p__, __x__, (uint64_t) __copy__);
2441         return __old__;
2442 }
2443
2444 inline void* atomic_fetch_sub
2445         ( volatile atomic_address* __a__, ptrdiff_t __m__ )
2446 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2447
2448 inline char atomic_fetch_add_explicit
2449         ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2450 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2451
2452 inline char atomic_fetch_add
2453         ( volatile atomic_char* __a__, char __m__ )
2454 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2455
2456
2457 inline char atomic_fetch_sub_explicit
2458         ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2459 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2460
2461 inline char atomic_fetch_sub
2462         ( volatile atomic_char* __a__, char __m__ )
2463 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2464
2465
2466 inline char atomic_fetch_and_explicit
2467         ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2468 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2469
2470 inline char atomic_fetch_and
2471         ( volatile atomic_char* __a__, char __m__ )
2472 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2473
2474
2475 inline char atomic_fetch_or_explicit
2476         ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2477 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2478
2479 inline char atomic_fetch_or
2480         ( volatile atomic_char* __a__, char __m__ )
2481 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2482
2483
2484 inline char atomic_fetch_xor_explicit
2485         ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
2486 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2487
2488 inline char atomic_fetch_xor
2489         ( volatile atomic_char* __a__, char __m__ )
2490 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2491
2492
2493 inline signed char atomic_fetch_add_explicit
2494         ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2495 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2496
2497 inline signed char atomic_fetch_add
2498         ( volatile atomic_schar* __a__, signed char __m__ )
2499 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2500
2501
2502 inline signed char atomic_fetch_sub_explicit
2503         ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2504 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2505
2506 inline signed char atomic_fetch_sub
2507         ( volatile atomic_schar* __a__, signed char __m__ )
2508 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2509
2510
2511 inline signed char atomic_fetch_and_explicit
2512         ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2513 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2514
2515 inline signed char atomic_fetch_and
2516         ( volatile atomic_schar* __a__, signed char __m__ )
2517 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2518
2519
2520 inline signed char atomic_fetch_or_explicit
2521         ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2522 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2523
2524 inline signed char atomic_fetch_or
2525         ( volatile atomic_schar* __a__, signed char __m__ )
2526 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2527
2528
2529 inline signed char atomic_fetch_xor_explicit
2530         ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
2531 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2532
2533 inline signed char atomic_fetch_xor
2534         ( volatile atomic_schar* __a__, signed char __m__ )
2535 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2536
2537
2538 inline unsigned char atomic_fetch_add_explicit
2539         ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2540 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2541
2542 inline unsigned char atomic_fetch_add
2543         ( volatile atomic_uchar* __a__, unsigned char __m__ )
2544 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2545
2546
2547 inline unsigned char atomic_fetch_sub_explicit
2548         ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2549 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2550
2551 inline unsigned char atomic_fetch_sub
2552         ( volatile atomic_uchar* __a__, unsigned char __m__ )
2553 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2554
2555
2556 inline unsigned char atomic_fetch_and_explicit
2557         ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2558 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2559
2560 inline unsigned char atomic_fetch_and
2561         ( volatile atomic_uchar* __a__, unsigned char __m__ )
2562 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2563
2564
2565 inline unsigned char atomic_fetch_or_explicit
2566         ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2567 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2568
2569 inline unsigned char atomic_fetch_or
2570         ( volatile atomic_uchar* __a__, unsigned char __m__ )
2571 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2572
2573
2574 inline unsigned char atomic_fetch_xor_explicit
2575         ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
2576 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2577
2578 inline unsigned char atomic_fetch_xor
2579         ( volatile atomic_uchar* __a__, unsigned char __m__ )
2580 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2581
2582
2583 inline short atomic_fetch_add_explicit
2584         ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2585 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2586
2587 inline short atomic_fetch_add
2588         ( volatile atomic_short* __a__, short __m__ )
2589 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2590
2591
2592 inline short atomic_fetch_sub_explicit
2593         ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2594 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2595
2596 inline short atomic_fetch_sub
2597         ( volatile atomic_short* __a__, short __m__ )
2598 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2599
2600
2601 inline short atomic_fetch_and_explicit
2602         ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2603 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2604
2605 inline short atomic_fetch_and
2606         ( volatile atomic_short* __a__, short __m__ )
2607 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2608
2609
2610 inline short atomic_fetch_or_explicit
2611         ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2612 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2613
2614 inline short atomic_fetch_or
2615         ( volatile atomic_short* __a__, short __m__ )
2616 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2617
2618
2619 inline short atomic_fetch_xor_explicit
2620         ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
2621 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2622
2623 inline short atomic_fetch_xor
2624         ( volatile atomic_short* __a__, short __m__ )
2625 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2626
2627
2628 inline unsigned short atomic_fetch_add_explicit
2629         ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2630 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2631
2632 inline unsigned short atomic_fetch_add
2633         ( volatile atomic_ushort* __a__, unsigned short __m__ )
2634 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2635
2636
2637 inline unsigned short atomic_fetch_sub_explicit
2638         ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2639 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2640
2641 inline unsigned short atomic_fetch_sub
2642         ( volatile atomic_ushort* __a__, unsigned short __m__ )
2643 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2644
2645
2646 inline unsigned short atomic_fetch_and_explicit
2647         ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2648 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2649
2650 inline unsigned short atomic_fetch_and
2651         ( volatile atomic_ushort* __a__, unsigned short __m__ )
2652 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2653
2654
2655 inline unsigned short atomic_fetch_or_explicit
2656         ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2657 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2658
2659 inline unsigned short atomic_fetch_or
2660         ( volatile atomic_ushort* __a__, unsigned short __m__ )
2661 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2662
2663
2664 inline unsigned short atomic_fetch_xor_explicit
2665         ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
2666 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2667
2668 inline unsigned short atomic_fetch_xor
2669         ( volatile atomic_ushort* __a__, unsigned short __m__ )
2670 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2671
2672
2673 inline int atomic_fetch_add_explicit
2674         ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2675 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2676
2677 inline int atomic_fetch_add
2678         ( volatile atomic_int* __a__, int __m__ )
2679 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2680
2681
2682 inline int atomic_fetch_sub_explicit
2683         ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2684 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2685
2686 inline int atomic_fetch_sub
2687         ( volatile atomic_int* __a__, int __m__ )
2688 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2689
2690
2691 inline int atomic_fetch_and_explicit
2692         ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2693 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2694
2695 inline int atomic_fetch_and
2696         ( volatile atomic_int* __a__, int __m__ )
2697 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2698
2699
2700 inline int atomic_fetch_or_explicit
2701         ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2702 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2703
2704 inline int atomic_fetch_or
2705         ( volatile atomic_int* __a__, int __m__ )
2706 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2707
2708
2709 inline int atomic_fetch_xor_explicit
2710         ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
2711 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2712
2713 inline int atomic_fetch_xor
2714         ( volatile atomic_int* __a__, int __m__ )
2715 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2716
2717
2718 inline unsigned int atomic_fetch_add_explicit
2719         ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2720 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2721
2722 inline unsigned int atomic_fetch_add
2723         ( volatile atomic_uint* __a__, unsigned int __m__ )
2724 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2725
2726
2727 inline unsigned int atomic_fetch_sub_explicit
2728         ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2729 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2730
2731 inline unsigned int atomic_fetch_sub
2732         ( volatile atomic_uint* __a__, unsigned int __m__ )
2733 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2734
2735
2736 inline unsigned int atomic_fetch_and_explicit
2737         ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2738 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2739
2740 inline unsigned int atomic_fetch_and
2741         ( volatile atomic_uint* __a__, unsigned int __m__ )
2742 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2743
2744
2745 inline unsigned int atomic_fetch_or_explicit
2746         ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2747 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2748
2749 inline unsigned int atomic_fetch_or
2750         ( volatile atomic_uint* __a__, unsigned int __m__ )
2751 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2752
2753
2754 inline unsigned int atomic_fetch_xor_explicit
2755         ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
2756 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2757
2758 inline unsigned int atomic_fetch_xor
2759         ( volatile atomic_uint* __a__, unsigned int __m__ )
2760 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2761
2762
2763 inline long atomic_fetch_add_explicit
2764         ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2765 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2766
2767 inline long atomic_fetch_add
2768         ( volatile atomic_long* __a__, long __m__ )
2769 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2770
2771
2772 inline long atomic_fetch_sub_explicit
2773         ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2774 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2775
2776 inline long atomic_fetch_sub
2777         ( volatile atomic_long* __a__, long __m__ )
2778 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2779
2780
2781 inline long atomic_fetch_and_explicit
2782         ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2783 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2784
2785 inline long atomic_fetch_and
2786         ( volatile atomic_long* __a__, long __m__ )
2787 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2788
2789
2790 inline long atomic_fetch_or_explicit
2791         ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2792 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2793
2794 inline long atomic_fetch_or
2795         ( volatile atomic_long* __a__, long __m__ )
2796 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2797
2798
2799 inline long atomic_fetch_xor_explicit
2800         ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
2801 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2802
2803 inline long atomic_fetch_xor
2804         ( volatile atomic_long* __a__, long __m__ )
2805 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2806
2807
2808 inline unsigned long atomic_fetch_add_explicit
2809         ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2810 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2811
2812 inline unsigned long atomic_fetch_add
2813         ( volatile atomic_ulong* __a__, unsigned long __m__ )
2814 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2815
2816
2817 inline unsigned long atomic_fetch_sub_explicit
2818         ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2819 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2820
2821 inline unsigned long atomic_fetch_sub
2822         ( volatile atomic_ulong* __a__, unsigned long __m__ )
2823 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2824
2825
2826 inline unsigned long atomic_fetch_and_explicit
2827         ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2828 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2829
2830 inline unsigned long atomic_fetch_and
2831         ( volatile atomic_ulong* __a__, unsigned long __m__ )
2832 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2833
2834
2835 inline unsigned long atomic_fetch_or_explicit
2836         ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2837 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2838
2839 inline unsigned long atomic_fetch_or
2840         ( volatile atomic_ulong* __a__, unsigned long __m__ )
2841 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2842
2843
2844 inline unsigned long atomic_fetch_xor_explicit
2845         ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
2846 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2847
2848 inline unsigned long atomic_fetch_xor
2849         ( volatile atomic_ulong* __a__, unsigned long __m__ )
2850 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2851
2852
2853 inline long long atomic_fetch_add_explicit
2854         ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2855 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2856
2857 inline long long atomic_fetch_add
2858         ( volatile atomic_llong* __a__, long long __m__ )
2859 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2860
2861
2862 inline long long atomic_fetch_sub_explicit
2863         ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2864 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2865
2866 inline long long atomic_fetch_sub
2867         ( volatile atomic_llong* __a__, long long __m__ )
2868 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2869
2870
2871 inline long long atomic_fetch_and_explicit
2872         ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2873 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2874
2875 inline long long atomic_fetch_and
2876         ( volatile atomic_llong* __a__, long long __m__ )
2877 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2878
2879
2880 inline long long atomic_fetch_or_explicit
2881         ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2882 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2883
2884 inline long long atomic_fetch_or
2885         ( volatile atomic_llong* __a__, long long __m__ )
2886 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2887
2888
2889 inline long long atomic_fetch_xor_explicit
2890         ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
2891 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2892
2893 inline long long atomic_fetch_xor
2894         ( volatile atomic_llong* __a__, long long __m__ )
2895 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2896
2897
2898 inline unsigned long long atomic_fetch_add_explicit
2899         ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2900 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2901
2902 inline unsigned long long atomic_fetch_add
2903         ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2904 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2905
2906
2907 inline unsigned long long atomic_fetch_sub_explicit
2908         ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2909 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2910
2911 inline unsigned long long atomic_fetch_sub
2912         ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2913 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2914
2915
2916 inline unsigned long long atomic_fetch_and_explicit
2917         ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2918 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2919
2920 inline unsigned long long atomic_fetch_and
2921         ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2922 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2923
2924
2925 inline unsigned long long atomic_fetch_or_explicit
2926         ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2927 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2928
2929 inline unsigned long long atomic_fetch_or
2930         ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2931 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2932
2933
2934 inline unsigned long long atomic_fetch_xor_explicit
2935         ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
2936 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2937
2938 inline unsigned long long atomic_fetch_xor
2939         ( volatile atomic_ullong* __a__, unsigned long long __m__ )
2940 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2941
2942
2943 inline wchar_t atomic_fetch_add_explicit
2944         ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2945 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
2946
2947 inline wchar_t atomic_fetch_add
2948         ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2949 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
2950
2951
2952 inline wchar_t atomic_fetch_sub_explicit
2953         ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2954 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
2955
2956 inline wchar_t atomic_fetch_sub
2957         ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2958 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
2959
2960
2961 inline wchar_t atomic_fetch_and_explicit
2962         ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2963 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
2964
2965 inline wchar_t atomic_fetch_and
2966         ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2967 { return atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
2968
2969
2970 inline wchar_t atomic_fetch_or_explicit
2971         ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2972 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
2973
2974 inline wchar_t atomic_fetch_or
2975         ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2976 { return atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
2977
2978
2979 inline wchar_t atomic_fetch_xor_explicit
2980         ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
2981 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
2982
2983 inline wchar_t atomic_fetch_xor
2984         ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
2985 { return atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
2986
2987
2988 #else
2989
2990
2991 #define atomic_is_lock_free( __a__ ) \
2992         false
2993
2994 #define atomic_load( __a__ ) \
2995         _ATOMIC_LOAD_( __a__, memory_order_seq_cst )
2996
2997 #define atomic_load_explicit( __a__, __x__ ) \
2998         _ATOMIC_LOAD_( __a__, __x__ )
2999
3000 #define atomic_init( __a__, __m__ ) \
3001         _ATOMIC_INIT_( __a__, __m__ )
3002
3003 #define atomic_store( __a__, __m__ ) \
3004         _ATOMIC_STORE_( __a__, __m__, memory_order_seq_cst )
3005
3006 #define atomic_store_explicit( __a__, __m__, __x__ ) \
3007         _ATOMIC_STORE_( __a__, __m__, __x__ )
3008
3009 #define atomic_exchange( __a__, __m__ ) \
3010         _ATOMIC_MODIFY_( __a__, =, __m__, memory_order_seq_cst )
3011
3012 #define atomic_exchange_explicit( __a__, __m__, __x__ ) \
3013         _ATOMIC_MODIFY_( __a__, =, __m__, __x__ )
3014
3015 #define atomic_compare_exchange_weak( __a__, __e__, __m__ ) \
3016         _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, memory_order_seq_cst )
3017
3018 #define atomic_compare_exchange_strong( __a__, __e__, __m__ ) \
3019         _ATOMIC_CMPSWP_( __a__, __e__, __m__, memory_order_seq_cst )
3020
3021 #define atomic_compare_exchange_weak_explicit( __a__, __e__, __m__, __x__, __y__ ) \
3022         _ATOMIC_CMPSWP_WEAK_( __a__, __e__, __m__, __x__ )
3023
3024 #define atomic_compare_exchange_strong_explicit( __a__, __e__, __m__, __x__, __y__ ) \
3025         _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ )
3026
3027
3028 #define atomic_fetch_add_explicit( __a__, __m__, __x__ ) \
3029         _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ )
3030
3031 #define atomic_fetch_add( __a__, __m__ ) \
3032         _ATOMIC_MODIFY_( __a__, +=, __m__, memory_order_seq_cst )
3033
3034
3035 #define atomic_fetch_sub_explicit( __a__, __m__, __x__ ) \
3036         _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ )
3037
3038 #define atomic_fetch_sub( __a__, __m__ ) \
3039         _ATOMIC_MODIFY_( __a__, -=, __m__, memory_order_seq_cst )
3040
3041
3042 #define atomic_fetch_and_explicit( __a__, __m__, __x__ ) \
3043         _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ )
3044
3045 #define atomic_fetch_and( __a__, __m__ ) \
3046         _ATOMIC_MODIFY_( __a__, &=, __m__, memory_order_seq_cst )
3047
3048
3049 #define atomic_fetch_or_explicit( __a__, __m__, __x__ ) \
3050         _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ )
3051
3052 #define atomic_fetch_or( __a__, __m__ ) \
3053         _ATOMIC_MODIFY_( __a__, |=, __m__, memory_order_seq_cst )
3054
3055
3056 #define atomic_fetch_xor_explicit( __a__, __m__, __x__ ) \
3057         _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ )
3058
3059 #define atomic_fetch_xor( __a__, __m__ ) \
3060         _ATOMIC_MODIFY_( __a__, ^=, __m__, memory_order_seq_cst )
3061
3062
3063 #endif
3064
3065
3066 #ifdef __cplusplus
3067
3068
3069 inline bool atomic_bool::is_lock_free() const volatile
3070 { return false; }
3071
3072 inline void atomic_bool::store
3073         ( bool __m__, memory_order __x__ ) volatile
3074 { atomic_store_explicit( this, __m__, __x__ ); }
3075
3076 inline bool atomic_bool::load
3077         ( memory_order __x__ ) volatile
3078 { return atomic_load_explicit( this, __x__ ); }
3079
3080 inline bool atomic_bool::exchange
3081         ( bool __m__, memory_order __x__ ) volatile
3082 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3083
3084 inline bool atomic_bool::compare_exchange_weak
3085         ( bool& __e__, bool __m__,
3086         memory_order __x__, memory_order __y__ ) volatile
3087 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3088
3089 inline bool atomic_bool::compare_exchange_strong
3090         ( bool& __e__, bool __m__,
3091         memory_order __x__, memory_order __y__ ) volatile
3092 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3093
3094 inline bool atomic_bool::compare_exchange_weak
3095         ( bool& __e__, bool __m__, memory_order __x__ ) volatile
3096 {
3097         return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3098                                                                                                                                                                                                 __x__ == memory_order_acq_rel ? memory_order_acquire :
3099                                                                                                                                                                                                 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3100 }
3101
3102 inline bool atomic_bool::compare_exchange_strong
3103         ( bool& __e__, bool __m__, memory_order __x__ ) volatile
3104 {
3105         return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3106                                                                                                                                                                                                         __x__ == memory_order_acq_rel ? memory_order_acquire :
3107                                                                                                                                                                                                         __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3108 }
3109
3110
3111 inline bool atomic_address::is_lock_free() const volatile
3112 { return false; }
3113
3114 inline void atomic_address::store
3115         ( void* __m__, memory_order __x__ ) volatile
3116 { atomic_store_explicit( this, __m__, __x__ ); }
3117
3118 inline void* atomic_address::load
3119         ( memory_order __x__ ) volatile
3120 { return atomic_load_explicit( this, __x__ ); }
3121
3122 inline void* atomic_address::exchange
3123         ( void* __m__, memory_order __x__ ) volatile
3124 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3125
3126 inline bool atomic_address::compare_exchange_weak
3127         ( void*& __e__, void* __m__,
3128         memory_order __x__, memory_order __y__ ) volatile
3129 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3130
3131 inline bool atomic_address::compare_exchange_strong
3132         ( void*& __e__, void* __m__,
3133         memory_order __x__, memory_order __y__ ) volatile
3134 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3135
3136 inline bool atomic_address::compare_exchange_weak
3137         ( void*& __e__, void* __m__, memory_order __x__ ) volatile
3138 {
3139         return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3140                                                                                                                                                                                                 __x__ == memory_order_acq_rel ? memory_order_acquire :
3141                                                                                                                                                                                                 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3142 }
3143
3144 inline bool atomic_address::compare_exchange_strong
3145         ( void*& __e__, void* __m__, memory_order __x__ ) volatile
3146 {
3147         return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3148                                                                                                                                                                                                         __x__ == memory_order_acq_rel ? memory_order_acquire :
3149                                                                                                                                                                                                         __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3150 }
3151
3152
3153 inline bool atomic_char::is_lock_free() const volatile
3154 { return false; }
3155
3156 inline void atomic_char::store
3157         ( char __m__, memory_order __x__ ) volatile
3158 { atomic_store_explicit( this, __m__, __x__ ); }
3159
3160 inline char atomic_char::load
3161         ( memory_order __x__ ) volatile
3162 { return atomic_load_explicit( this, __x__ ); }
3163
3164 inline char atomic_char::exchange
3165         ( char __m__, memory_order __x__ ) volatile
3166 { return atomic_exchange_explicit( this, __m__, __x__ ); }
3167
3168 inline bool atomic_char::compare_exchange_weak
3169         ( char& __e__, char __m__,
3170         memory_order __x__, memory_order __y__ ) volatile
3171 { return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__, __y__ ); }
3172
3173 inline bool atomic_char::compare_exchange_strong
3174         ( char& __e__, char __m__,
3175         memory_order __x__, memory_order __y__ ) volatile
3176 { return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__, __y__ ); }
3177
3178 inline bool atomic_char::compare_exchange_weak
3179         ( char& __e__, char __m__, memory_order __x__ ) volatile
3180 {
3181         return atomic_compare_exchange_weak_explicit( this, &__e__, __m__, __x__,
3182                                                                                                                                                                                                 __x__ == memory_order_acq_rel ? memory_order_acquire :
3183                                                                                                                                                                                                 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3184 }
3185
3186 inline bool atomic_char::compare_exchange_strong
3187         ( char& __e__, char __m__, memory_order __x__ ) volatile
3188 {
3189         return atomic_compare_exchange_strong_explicit( this, &__e__, __m__, __x__,
3190                                                                                                                                                                                                         __x__ == memory_order_acq_rel ? memory_order_acquire :
3191                                                                                                                                                                                                         __x__ == memory_order_release ? memory_order_relaxed : __x__ );
3192 }
3193
3194
3195 inline bool atomic_schar::is_lock_free() const volatile
3196 { return false; }
3197
3198 inline void atomic_schar::store
3199         ( signed char __m__, memory_order __x__ ) volatile
3200 { atomic_store_explicit( this, __m__, __x__ ); }
3201
3202 inline signed char atomic_schar: