OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [libitm/] [local_atomic] - Blame information for rev 737

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 737 jeremybenn
// -*- C++ -*- header.
2
 
3
// Copyright (C) 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4
//
5
// This file is part of the GNU ISO C++ Library.  This library is free
6
// software; you can redistribute it and/or modify it under the
7
// terms of the GNU General Public License as published by the
8
// Free Software Foundation; either version 3, or (at your option)
9
// any later version.
10
 
11
// This library is distributed in the hope that it will be useful,
12
// but WITHOUT ANY WARRANTY; without even the implied warranty of
13
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14
// GNU General Public License for more details.
15
 
16
// Under Section 7 of GPL version 3, you are granted additional
17
// permissions described in the GCC Runtime Library Exception, version
18
// 3.1, as published by the Free Software Foundation.
19
 
20
// You should have received a copy of the GNU General Public License and
21
// a copy of the GCC Runtime Library Exception along with this program;
22
// see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
23
// .
24
 
25
// ????????????????????????????????????????????????????????????????????
26
//
27
// This is a copy of the libstdc++ header, with the trivial modification
28
// of ignoring the c++config.h include.  If and when the top-level build is
29
// fixed so that target libraries can be built using the newly built, we can
30
// delete this file.
31
//
32
// ????????????????????????????????????????????????????????????????????
33
 
34
/** @file include/atomic
35
 *  This is a Standard C++ Library header.
36
 */
37
 
38
// Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
39
// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
40
 
41
#ifndef _GLIBCXX_ATOMIC
42
#define _GLIBCXX_ATOMIC 1
43
 
44
// #pragma GCC system_header
45
 
46
// #ifndef __GXX_EXPERIMENTAL_CXX0X__
47
// # include 
48
// #endif
49
 
50
// #include 
51
 
52
namespace std // _GLIBCXX_VISIBILITY(default)
53
{
54
// _GLIBCXX_BEGIN_NAMESPACE_VERSION
55
 
56
  /**
57
   * @defgroup atomics Atomics
58
   *
59
   * Components for performing atomic operations.
60
   * @{
61
   */
62
 
63
  /// Enumeration for memory_order
64
  typedef enum memory_order
65
    {
66
      memory_order_relaxed,
67
      memory_order_consume,
68
      memory_order_acquire,
69
      memory_order_release,
70
      memory_order_acq_rel,
71
      memory_order_seq_cst
72
    } memory_order;
73
 
74
  inline memory_order
75
  __calculate_memory_order(memory_order __m) noexcept
76
  {
77
    const bool __cond1 = __m == memory_order_release;
78
    const bool __cond2 = __m == memory_order_acq_rel;
79
    memory_order __mo1(__cond1 ? memory_order_relaxed : __m);
80
    memory_order __mo2(__cond2 ? memory_order_acquire : __mo1);
81
    return __mo2;
82
  }
83
 
84
  inline void
85
  atomic_thread_fence(memory_order __m) noexcept
86
  {
87
    __atomic_thread_fence (__m);
88
  }
89
 
90
  inline void
91
  atomic_signal_fence(memory_order __m) noexcept
92
  {
93
    __atomic_thread_fence (__m);
94
  }
95
 
96
  /// kill_dependency
97
  template
98
    inline _Tp
99
    kill_dependency(_Tp __y) noexcept
100
    {
101
      _Tp __ret(__y);
102
      return __ret;
103
    }
104
 
105
  /// Lock-free Property
106
 
107
 
108
#define ATOMIC_BOOL_LOCK_FREE           __GCC_ATOMIC_BOOL_LOCK_FREE
109
#define ATOMIC_CHAR_LOCK_FREE           __GCC_ATOMIC_CHAR_LOCK_FREE
110
#define ATOMIC_WCHAR_T_LOCK_FREE        __GCC_ATOMIC_WCHAR_T_LOCK_FREE
111
#define ATOMIC_CHAR16_T_LOCK_FREE       __GCC_ATOMIC_CHAR16_T_LOCK_FREE
112
#define ATOMIC_CHAR32_T_LOCK_FREE       __GCC_ATOMIC_CHAR32_T_LOCK_FREE
113
#define ATOMIC_SHORT_LOCK_FREE          __GCC_ATOMIC_SHORT_LOCK_FREE
114
#define ATOMIC_INT_LOCK_FREE            __GCC_ATOMIC_INT_LOCK_FREE
115
#define ATOMIC_LONG_LOCK_FREE           __GCC_ATOMIC_LONG_LOCK_FREE
116
#define ATOMIC_LLONG_LOCK_FREE          __GCC_ATOMIC_LLONG_LOCK_FREE
117
#define ATOMIC_POINTER_LOCK_FREE        __GCC_ATOMIC_POINTER_LOCK_FREE
118
 
119
  // Base types for atomics.
120
  template
121
    struct __atomic_base;
122
 
123
  /// atomic_char
124
  typedef __atomic_base                         atomic_char;
125
 
126
  /// atomic_schar
127
  typedef __atomic_base         atomic_schar;
128
 
129
  /// atomic_uchar
130
  typedef __atomic_base         atomic_uchar;
131
 
132
  /// atomic_short
133
  typedef __atomic_base                 atomic_short;
134
 
135
  /// atomic_ushort
136
  typedef __atomic_base         atomic_ushort;
137
 
138
  /// atomic_int
139
  typedef __atomic_base                         atomic_int;
140
 
141
  /// atomic_uint
142
  typedef __atomic_base         atomic_uint;
143
 
144
  /// atomic_long
145
  typedef __atomic_base                         atomic_long;
146
 
147
  /// atomic_ulong
148
  typedef __atomic_base         atomic_ulong;
149
 
150
  /// atomic_llong
151
  typedef __atomic_base                 atomic_llong;
152
 
153
  /// atomic_ullong
154
  typedef __atomic_base         atomic_ullong;
155
 
156
  /// atomic_wchar_t
157
  typedef __atomic_base                 atomic_wchar_t;
158
 
159
  /// atomic_char16_t
160
  typedef __atomic_base                 atomic_char16_t;
161
 
162
  /// atomic_char32_t
163
  typedef __atomic_base                 atomic_char32_t;
164
 
165
  /// atomic_char32_t
166
  typedef __atomic_base                 atomic_char32_t;
167
 
168
 
169
  /// atomic_int_least8_t
170
  typedef __atomic_base                 atomic_int_least8_t;
171
 
172
  /// atomic_uint_least8_t
173
  typedef __atomic_base         atomic_uint_least8_t;
174
 
175
  /// atomic_int_least16_t
176
  typedef __atomic_base         atomic_int_least16_t;
177
 
178
  /// atomic_uint_least16_t
179
  typedef __atomic_base         atomic_uint_least16_t;
180
 
181
  /// atomic_int_least32_t
182
  typedef __atomic_base         atomic_int_least32_t;
183
 
184
  /// atomic_uint_least32_t
185
  typedef __atomic_base         atomic_uint_least32_t;
186
 
187
  /// atomic_int_least64_t
188
  typedef __atomic_base         atomic_int_least64_t;
189
 
190
  /// atomic_uint_least64_t
191
  typedef __atomic_base         atomic_uint_least64_t;
192
 
193
 
194
  /// atomic_int_fast8_t
195
  typedef __atomic_base                 atomic_int_fast8_t;
196
 
197
  /// atomic_uint_fast8_t
198
  typedef __atomic_base         atomic_uint_fast8_t;
199
 
200
  /// atomic_int_fast16_t
201
  typedef __atomic_base         atomic_int_fast16_t;
202
 
203
  /// atomic_uint_fast16_t
204
  typedef __atomic_base         atomic_uint_fast16_t;
205
 
206
  /// atomic_int_fast32_t
207
  typedef __atomic_base         atomic_int_fast32_t;
208
 
209
  /// atomic_uint_fast32_t
210
  typedef __atomic_base         atomic_uint_fast32_t;
211
 
212
  /// atomic_int_fast64_t
213
  typedef __atomic_base         atomic_int_fast64_t;
214
 
215
  /// atomic_uint_fast64_t
216
  typedef __atomic_base         atomic_uint_fast64_t;
217
 
218
 
219
  /// atomic_intptr_t
220
  typedef __atomic_base                 atomic_intptr_t;
221
 
222
  /// atomic_uintptr_t
223
  typedef __atomic_base                 atomic_uintptr_t;
224
 
225
  /// atomic_size_t
226
  typedef __atomic_base                 atomic_size_t;
227
 
228
  /// atomic_intmax_t
229
  typedef __atomic_base                 atomic_intmax_t;
230
 
231
  /// atomic_uintmax_t
232
  typedef __atomic_base                 atomic_uintmax_t;
233
 
234
  /// atomic_ptrdiff_t
235
  typedef __atomic_base                 atomic_ptrdiff_t;
236
 
237
 
238
#define ATOMIC_VAR_INIT(_VI) { _VI }
239
 
240
  template
241
    struct atomic;
242
 
243
  template
244
    struct atomic<_Tp*>;
245
 
246
 
247
  /**
248
   *  @brief Base type for atomic_flag.
249
   *
250
   *  Base type is POD with data, allowing atomic_flag to derive from
251
   *  it and meet the standard layout type requirement. In addition to
252
   *  compatibilty with a C interface, this allows different
253
   *  implementations of atomic_flag to use the same atomic operation
254
   *  functions, via a standard conversion to the __atomic_flag_base
255
   *  argument.
256
  */
257
  // _GLIBCXX_BEGIN_EXTERN_C
258
 
259
  struct __atomic_flag_base
260
  {
261
    bool _M_i;
262
  };
263
 
264
  // _GLIBCXX_END_EXTERN_C
265
 
266
#define ATOMIC_FLAG_INIT { false }
267
 
268
  /// atomic_flag
269
  struct atomic_flag : public __atomic_flag_base
270
  {
271
    atomic_flag() noexcept = default;
272
    ~atomic_flag() noexcept = default;
273
    atomic_flag(const atomic_flag&) = delete;
274
    atomic_flag& operator=(const atomic_flag&) = delete;
275
    atomic_flag& operator=(const atomic_flag&) volatile = delete;
276
 
277
    // Conversion to ATOMIC_FLAG_INIT.
278
    atomic_flag(bool __i) noexcept : __atomic_flag_base({ __i }) { }
279
 
280
    bool
281
    test_and_set(memory_order __m = memory_order_seq_cst) noexcept
282
    {
283
      return __atomic_test_and_set (&_M_i, __m);
284
    }
285
 
286
    bool
287
    test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
288
    {
289
      return __atomic_test_and_set (&_M_i, __m);
290
    }
291
 
292
    void
293
    clear(memory_order __m = memory_order_seq_cst) noexcept
294
    {
295
      // __glibcxx_assert(__m != memory_order_consume);
296
      // __glibcxx_assert(__m != memory_order_acquire);
297
      // __glibcxx_assert(__m != memory_order_acq_rel);
298
 
299
      __atomic_clear (&_M_i, __m);
300
    }
301
 
302
    void
303
    clear(memory_order __m = memory_order_seq_cst) volatile noexcept
304
    {
305
      // __glibcxx_assert(__m != memory_order_consume);
306
      // __glibcxx_assert(__m != memory_order_acquire);
307
      // __glibcxx_assert(__m != memory_order_acq_rel);
308
 
309
      __atomic_clear (&_M_i, __m);
310
    }
311
  };
312
 
313
 
314
  /// Base class for atomic integrals.
315
  //
316
  // For each of the integral types, define atomic_[integral type] struct
317
  //
318
  // atomic_bool     bool
319
  // atomic_char     char
320
  // atomic_schar    signed char
321
  // atomic_uchar    unsigned char
322
  // atomic_short    short
323
  // atomic_ushort   unsigned short
324
  // atomic_int      int
325
  // atomic_uint     unsigned int
326
  // atomic_long     long
327
  // atomic_ulong    unsigned long
328
  // atomic_llong    long long
329
  // atomic_ullong   unsigned long long
330
  // atomic_char16_t char16_t
331
  // atomic_char32_t char32_t
332
  // atomic_wchar_t  wchar_t
333
  //
334
  // NB: Assuming _ITp is an integral scalar type that is 1, 2, 4, or
335
  // 8 bytes, since that is what GCC built-in functions for atomic
336
  // memory access expect.
337
  template
338
    struct __atomic_base
339
    {
340
    private:
341
      typedef _ITp      __int_type;
342
 
343
      __int_type        _M_i;
344
 
345
    public:
346
      __atomic_base() noexcept = default;
347
      ~__atomic_base() noexcept = default;
348
      __atomic_base(const __atomic_base&) = delete;
349
      __atomic_base& operator=(const __atomic_base&) = delete;
350
      __atomic_base& operator=(const __atomic_base&) volatile = delete;
351
 
352
      // Requires __int_type convertible to _M_i.
353
      constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { }
354
 
355
      operator __int_type() const noexcept
356
      { return load(); }
357
 
358
      operator __int_type() const volatile noexcept
359
      { return load(); }
360
 
361
      __int_type
362
      operator=(__int_type __i) noexcept
363
      {
364
        store(__i);
365
        return __i;
366
      }
367
 
368
      __int_type
369
      operator=(__int_type __i) volatile noexcept
370
      {
371
        store(__i);
372
        return __i;
373
      }
374
 
375
      __int_type
376
      operator++(int) noexcept
377
      { return fetch_add(1); }
378
 
379
      __int_type
380
      operator++(int) volatile noexcept
381
      { return fetch_add(1); }
382
 
383
      __int_type
384
      operator--(int) noexcept
385
      { return fetch_sub(1); }
386
 
387
      __int_type
388
      operator--(int) volatile noexcept
389
      { return fetch_sub(1); }
390
 
391
      __int_type
392
      operator++() noexcept
393
      { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
394
 
395
      __int_type
396
      operator++() volatile noexcept
397
      { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
398
 
399
      __int_type
400
      operator--() noexcept
401
      { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
402
 
403
      __int_type
404
      operator--() volatile noexcept
405
      { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
406
 
407
      __int_type
408
      operator+=(__int_type __i) noexcept
409
      { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
410
 
411
      __int_type
412
      operator+=(__int_type __i) volatile noexcept
413
      { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
414
 
415
      __int_type
416
      operator-=(__int_type __i) noexcept
417
      { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
418
 
419
      __int_type
420
      operator-=(__int_type __i) volatile noexcept
421
      { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
422
 
423
      __int_type
424
      operator&=(__int_type __i) noexcept
425
      { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
426
 
427
      __int_type
428
      operator&=(__int_type __i) volatile noexcept
429
      { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
430
 
431
      __int_type
432
      operator|=(__int_type __i) noexcept
433
      { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
434
 
435
      __int_type
436
      operator|=(__int_type __i) volatile noexcept
437
      { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
438
 
439
      __int_type
440
      operator^=(__int_type __i) noexcept
441
      { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
442
 
443
      __int_type
444
      operator^=(__int_type __i) volatile noexcept
445
      { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
446
 
447
      bool
448
      is_lock_free() const noexcept
449
      { return __atomic_is_lock_free (sizeof (_M_i), &_M_i); }
450
 
451
      bool
452
      is_lock_free() const volatile noexcept
453
      { return __atomic_is_lock_free (sizeof (_M_i), &_M_i); }
454
 
455
      void
456
      store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
457
      {
458
        // __glibcxx_assert(__m != memory_order_acquire);
459
        // __glibcxx_assert(__m != memory_order_acq_rel);
460
        // __glibcxx_assert(__m != memory_order_consume);
461
 
462
        __atomic_store_n(&_M_i, __i, __m);
463
      }
464
 
465
      void
466
      store(__int_type __i,
467
            memory_order __m = memory_order_seq_cst) volatile noexcept
468
      {
469
        // __glibcxx_assert(__m != memory_order_acquire);
470
        // __glibcxx_assert(__m != memory_order_acq_rel);
471
        // __glibcxx_assert(__m != memory_order_consume);
472
 
473
        __atomic_store_n(&_M_i, __i, __m);
474
      }
475
 
476
      __int_type
477
      load(memory_order __m = memory_order_seq_cst) const noexcept
478
      {
479
        // __glibcxx_assert(__m != memory_order_release);
480
        // __glibcxx_assert(__m != memory_order_acq_rel);
481
 
482
        return __atomic_load_n(&_M_i, __m);
483
      }
484
 
485
      __int_type
486
      load(memory_order __m = memory_order_seq_cst) const volatile noexcept
487
      {
488
        // __glibcxx_assert(__m != memory_order_release);
489
        // __glibcxx_assert(__m != memory_order_acq_rel);
490
 
491
        return __atomic_load_n(&_M_i, __m);
492
      }
493
 
494
      __int_type
495
      exchange(__int_type __i,
496
               memory_order __m = memory_order_seq_cst) noexcept
497
      {
498
        return __atomic_exchange_n(&_M_i, __i, __m);
499
      }
500
 
501
 
502
      __int_type
503
      exchange(__int_type __i,
504
               memory_order __m = memory_order_seq_cst) volatile noexcept
505
      {
506
        return __atomic_exchange_n(&_M_i, __i, __m);
507
      }
508
 
509
      bool
510
      compare_exchange_weak(__int_type& __i1, __int_type __i2,
511
                            memory_order __m1, memory_order __m2) noexcept
512
      {
513
        // __glibcxx_assert(__m2 != memory_order_release);
514
        // __glibcxx_assert(__m2 != memory_order_acq_rel);
515
        // __glibcxx_assert(__m2 <= __m1);
516
 
517
        return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
518
      }
519
 
520
      bool
521
      compare_exchange_weak(__int_type& __i1, __int_type __i2,
522
                            memory_order __m1,
523
                            memory_order __m2) volatile noexcept
524
      {
525
        // __glibcxx_assert(__m2 != memory_order_release);
526
        // __glibcxx_assert(__m2 != memory_order_acq_rel);
527
        // __glibcxx_assert(__m2 <= __m1);
528
 
529
        return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
530
      }
531
 
532
      bool
533
      compare_exchange_weak(__int_type& __i1, __int_type __i2,
534
                            memory_order __m = memory_order_seq_cst) noexcept
535
      {
536
        return compare_exchange_weak(__i1, __i2, __m,
537
                                     __calculate_memory_order(__m));
538
      }
539
 
540
      bool
541
      compare_exchange_weak(__int_type& __i1, __int_type __i2,
542
                   memory_order __m = memory_order_seq_cst) volatile noexcept
543
      {
544
        return compare_exchange_weak(__i1, __i2, __m,
545
                                     __calculate_memory_order(__m));
546
      }
547
 
548
      bool
549
      compare_exchange_strong(__int_type& __i1, __int_type __i2,
550
                              memory_order __m1, memory_order __m2) noexcept
551
      {
552
        // __glibcxx_assert(__m2 != memory_order_release);
553
        // __glibcxx_assert(__m2 != memory_order_acq_rel);
554
        // __glibcxx_assert(__m2 <= __m1);
555
 
556
        return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
557
      }
558
 
559
      bool
560
      compare_exchange_strong(__int_type& __i1, __int_type __i2,
561
                              memory_order __m1,
562
                              memory_order __m2) volatile noexcept
563
      {
564
        // __glibcxx_assert(__m2 != memory_order_release);
565
        // __glibcxx_assert(__m2 != memory_order_acq_rel);
566
        // __glibcxx_assert(__m2 <= __m1);
567
 
568
        return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
569
      }
570
 
571
      bool
572
      compare_exchange_strong(__int_type& __i1, __int_type __i2,
573
                              memory_order __m = memory_order_seq_cst) noexcept
574
      {
575
        return compare_exchange_strong(__i1, __i2, __m,
576
                                       __calculate_memory_order(__m));
577
      }
578
 
579
      bool
580
      compare_exchange_strong(__int_type& __i1, __int_type __i2,
581
                 memory_order __m = memory_order_seq_cst) volatile noexcept
582
      {
583
        return compare_exchange_strong(__i1, __i2, __m,
584
                                       __calculate_memory_order(__m));
585
      }
586
 
587
      __int_type
588
      fetch_add(__int_type __i,
589
                memory_order __m = memory_order_seq_cst) noexcept
590
      { return __atomic_fetch_add(&_M_i, __i, __m); }
591
 
592
      __int_type
593
      fetch_add(__int_type __i,
594
                memory_order __m = memory_order_seq_cst) volatile noexcept
595
      { return __atomic_fetch_add(&_M_i, __i, __m); }
596
 
597
      __int_type
598
      fetch_sub(__int_type __i,
599
                memory_order __m = memory_order_seq_cst) noexcept
600
      { return __atomic_fetch_sub(&_M_i, __i, __m); }
601
 
602
      __int_type
603
      fetch_sub(__int_type __i,
604
                memory_order __m = memory_order_seq_cst) volatile noexcept
605
      { return __atomic_fetch_sub(&_M_i, __i, __m); }
606
 
607
      __int_type
608
      fetch_and(__int_type __i,
609
                memory_order __m = memory_order_seq_cst) noexcept
610
      { return __atomic_fetch_and(&_M_i, __i, __m); }
611
 
612
      __int_type
613
      fetch_and(__int_type __i,
614
                memory_order __m = memory_order_seq_cst) volatile noexcept
615
      { return __atomic_fetch_and(&_M_i, __i, __m); }
616
 
617
      __int_type
618
      fetch_or(__int_type __i,
619
               memory_order __m = memory_order_seq_cst) noexcept
620
      { return __atomic_fetch_or(&_M_i, __i, __m); }
621
 
622
      __int_type
623
      fetch_or(__int_type __i,
624
               memory_order __m = memory_order_seq_cst) volatile noexcept
625
      { return __atomic_fetch_or(&_M_i, __i, __m); }
626
 
627
      __int_type
628
      fetch_xor(__int_type __i,
629
                memory_order __m = memory_order_seq_cst) noexcept
630
      { return __atomic_fetch_xor(&_M_i, __i, __m); }
631
 
632
      __int_type
633
      fetch_xor(__int_type __i,
634
                memory_order __m = memory_order_seq_cst) volatile noexcept
635
      { return __atomic_fetch_xor(&_M_i, __i, __m); }
636
    };
637
 
638
 
639
  /// Partial specialization for pointer types.
640
  template
641
    struct __atomic_base<_PTp*>
642
    {
643
    private:
644
      typedef _PTp*     __pointer_type;
645
 
646
      __pointer_type    _M_p;
647
 
648
    public:
649
      __atomic_base() noexcept = default;
650
      ~__atomic_base() noexcept = default;
651
      __atomic_base(const __atomic_base&) = delete;
652
      __atomic_base& operator=(const __atomic_base&) = delete;
653
      __atomic_base& operator=(const __atomic_base&) volatile = delete;
654
 
655
      // Requires __pointer_type convertible to _M_p.
656
      constexpr __atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
657
 
658
      operator __pointer_type() const noexcept
659
      { return load(); }
660
 
661
      operator __pointer_type() const volatile noexcept
662
      { return load(); }
663
 
664
      __pointer_type
665
      operator=(__pointer_type __p) noexcept
666
      {
667
        store(__p);
668
        return __p;
669
      }
670
 
671
      __pointer_type
672
      operator=(__pointer_type __p) volatile noexcept
673
      {
674
        store(__p);
675
        return __p;
676
      }
677
 
678
      __pointer_type
679
      operator++(int) noexcept
680
      { return fetch_add(1); }
681
 
682
      __pointer_type
683
      operator++(int) volatile noexcept
684
      { return fetch_add(1); }
685
 
686
      __pointer_type
687
      operator--(int) noexcept
688
      { return fetch_sub(1); }
689
 
690
      __pointer_type
691
      operator--(int) volatile noexcept
692
      { return fetch_sub(1); }
693
 
694
      __pointer_type
695
      operator++() noexcept
696
      { return __atomic_add_fetch(&_M_p, 1, memory_order_seq_cst); }
697
 
698
      __pointer_type
699
      operator++() volatile noexcept
700
      { return __atomic_add_fetch(&_M_p, 1, memory_order_seq_cst); }
701
 
702
      __pointer_type
703
      operator--() noexcept
704
      { return __atomic_sub_fetch(&_M_p, 1, memory_order_seq_cst); }
705
 
706
      __pointer_type
707
      operator--() volatile noexcept
708
      { return __atomic_sub_fetch(&_M_p, 1, memory_order_seq_cst); }
709
 
710
      __pointer_type
711
      operator+=(ptrdiff_t __d) noexcept
712
      { return __atomic_add_fetch(&_M_p, __d, memory_order_seq_cst); }
713
 
714
      __pointer_type
715
      operator+=(ptrdiff_t __d) volatile noexcept
716
      { return __atomic_add_fetch(&_M_p, __d, memory_order_seq_cst); }
717
 
718
      __pointer_type
719
      operator-=(ptrdiff_t __d) noexcept
720
      { return __atomic_sub_fetch(&_M_p, __d, memory_order_seq_cst); }
721
 
722
      __pointer_type
723
      operator-=(ptrdiff_t __d) volatile noexcept
724
      { return __atomic_sub_fetch(&_M_p, __d, memory_order_seq_cst); }
725
 
726
      bool
727
      is_lock_free() const noexcept
728
      { return __atomic_is_lock_free (sizeof (_M_p), &_M_p); }
729
 
730
      bool
731
      is_lock_free() const volatile noexcept
732
      { return __atomic_is_lock_free (sizeof (_M_p), &_M_p); }
733
 
734
      void
735
      store(__pointer_type __p,
736
            memory_order __m = memory_order_seq_cst) noexcept
737
      {
738
        // __glibcxx_assert(__m != memory_order_acquire);
739
        // __glibcxx_assert(__m != memory_order_acq_rel);
740
        // __glibcxx_assert(__m != memory_order_consume);
741
 
742
        __atomic_store_n(&_M_p, __p, __m);
743
      }
744
 
745
      void
746
      store(__pointer_type __p,
747
            memory_order __m = memory_order_seq_cst) volatile noexcept
748
      {
749
        // __glibcxx_assert(__m != memory_order_acquire);
750
        // __glibcxx_assert(__m != memory_order_acq_rel);
751
        // __glibcxx_assert(__m != memory_order_consume);
752
 
753
        __atomic_store_n(&_M_p, __p, __m);
754
      }
755
 
756
      __pointer_type
757
      load(memory_order __m = memory_order_seq_cst) const noexcept
758
      {
759
        // __glibcxx_assert(__m != memory_order_release);
760
        // __glibcxx_assert(__m != memory_order_acq_rel);
761
 
762
        return __atomic_load_n(&_M_p, __m);
763
      }
764
 
765
      __pointer_type
766
      load(memory_order __m = memory_order_seq_cst) const volatile noexcept
767
      {
768
        // __glibcxx_assert(__m != memory_order_release);
769
        // __glibcxx_assert(__m != memory_order_acq_rel);
770
 
771
        return __atomic_load_n(&_M_p, __m);
772
      }
773
 
774
      __pointer_type
775
      exchange(__pointer_type __p,
776
               memory_order __m = memory_order_seq_cst) noexcept
777
      {
778
        return __atomic_exchange_n(&_M_p, __p, __m);
779
      }
780
 
781
 
782
      __pointer_type
783
      exchange(__pointer_type __p,
784
               memory_order __m = memory_order_seq_cst) volatile noexcept
785
      {
786
        return __atomic_exchange_n(&_M_p, __p, __m);
787
      }
788
 
789
      bool
790
      compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
791
                              memory_order __m1,
792
                              memory_order __m2) noexcept
793
      {
794
        // __glibcxx_assert(__m2 != memory_order_release);
795
        // __glibcxx_assert(__m2 != memory_order_acq_rel);
796
        // __glibcxx_assert(__m2 <= __m1);
797
 
798
        return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
799
      }
800
 
801
      bool
802
      compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
803
                              memory_order __m1,
804
                              memory_order __m2) volatile noexcept
805
      {
806
        // __glibcxx_assert(__m2 != memory_order_release);
807
        // __glibcxx_assert(__m2 != memory_order_acq_rel);
808
        // __glibcxx_assert(__m2 <= __m1);
809
 
810
        return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
811
      }
812
 
813
      __pointer_type
814
      fetch_add(ptrdiff_t __d,
815
                memory_order __m = memory_order_seq_cst) noexcept
816
      { return __atomic_fetch_add(&_M_p, __d, __m); }
817
 
818
      __pointer_type
819
      fetch_add(ptrdiff_t __d,
820
                memory_order __m = memory_order_seq_cst) volatile noexcept
821
      { return __atomic_fetch_add(&_M_p, __d, __m); }
822
 
823
      __pointer_type
824
      fetch_sub(ptrdiff_t __d,
825
                memory_order __m = memory_order_seq_cst) noexcept
826
      { return __atomic_fetch_sub(&_M_p, __d, __m); }
827
 
828
      __pointer_type
829
      fetch_sub(ptrdiff_t __d,
830
                memory_order __m = memory_order_seq_cst) volatile noexcept
831
      { return __atomic_fetch_sub(&_M_p, __d, __m); }
832
    };
833
 
834
 
835
  /**
836
   * @addtogroup atomics
837
   * @{
838
   */
839
 
840
  /// atomic_bool
841
  // NB: No operators or fetch-operations for this type.
842
  struct atomic_bool
843
  {
844
  private:
845
    __atomic_base       _M_base;
846
 
847
  public:
848
    atomic_bool() noexcept = default;
849
    ~atomic_bool() noexcept = default;
850
    atomic_bool(const atomic_bool&) = delete;
851
    atomic_bool& operator=(const atomic_bool&) = delete;
852
    atomic_bool& operator=(const atomic_bool&) volatile = delete;
853
 
854
    constexpr atomic_bool(bool __i) noexcept : _M_base(__i) { }
855
 
856
    bool
857
    operator=(bool __i) noexcept
858
    { return _M_base.operator=(__i); }
859
 
860
    operator bool() const noexcept
861
    { return _M_base.load(); }
862
 
863
    operator bool() const volatile noexcept
864
    { return _M_base.load(); }
865
 
866
    bool
867
    is_lock_free() const noexcept { return _M_base.is_lock_free(); }
868
 
869
    bool
870
    is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
871
 
872
    void
873
    store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
874
    { _M_base.store(__i, __m); }
875
 
876
    void
877
    store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
878
    { _M_base.store(__i, __m); }
879
 
880
    bool
881
    load(memory_order __m = memory_order_seq_cst) const noexcept
882
    { return _M_base.load(__m); }
883
 
884
    bool
885
    load(memory_order __m = memory_order_seq_cst) const volatile noexcept
886
    { return _M_base.load(__m); }
887
 
888
    bool
889
    exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
890
    { return _M_base.exchange(__i, __m); }
891
 
892
    bool
893
    exchange(bool __i,
894
             memory_order __m = memory_order_seq_cst) volatile noexcept
895
    { return _M_base.exchange(__i, __m); }
896
 
897
    bool
898
    compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
899
                          memory_order __m2) noexcept
900
    { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
901
 
902
    bool
903
    compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
904
                          memory_order __m2) volatile noexcept
905
    { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
906
 
907
    bool
908
    compare_exchange_weak(bool& __i1, bool __i2,
909
                          memory_order __m = memory_order_seq_cst) noexcept
910
    { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
911
 
912
    bool
913
    compare_exchange_weak(bool& __i1, bool __i2,
914
                     memory_order __m = memory_order_seq_cst) volatile noexcept
915
    { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
916
 
917
    bool
918
    compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
919
                            memory_order __m2) noexcept
920
    { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
921
 
922
    bool
923
    compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
924
                            memory_order __m2) volatile noexcept
925
    { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
926
 
927
    bool
928
    compare_exchange_strong(bool& __i1, bool __i2,
929
                            memory_order __m = memory_order_seq_cst) noexcept
930
    { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
931
 
932
    bool
933
    compare_exchange_strong(bool& __i1, bool __i2,
934
                    memory_order __m = memory_order_seq_cst) volatile noexcept
935
    { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
936
  };
937
 
938
 
939
  /// atomic
940
  /// 29.4.3, Generic atomic type, primary class template.
941
  template
942
    struct atomic
943
    {
944
    private:
945
      _Tp _M_i;
946
 
947
    public:
948
      atomic() noexcept = default;
949
      ~atomic() noexcept = default;
950
      atomic(const atomic&) = delete;
951
      atomic& operator=(const atomic&) = delete;
952
      atomic& operator=(const atomic&) volatile = delete;
953
 
954
      constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
955
 
956
      operator _Tp() const noexcept
957
      { return load(); }
958
 
959
      operator _Tp() const volatile noexcept
960
      { return load(); }
961
 
962
      _Tp
963
      operator=(_Tp __i) noexcept
964
      { store(__i); return __i; }
965
 
966
      _Tp
967
      operator=(_Tp __i) volatile noexcept
968
      { store(__i); return __i; }
969
 
970
      bool
971
      is_lock_free() const noexcept
972
      { return __atomic_is_lock_free(sizeof(_M_i), &_M_i); }
973
 
974
      bool
975
      is_lock_free() const volatile noexcept
976
      { return __atomic_is_lock_free(sizeof(_M_i), &_M_i); }
977
 
978
      void
979
      store(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept
980
      { __atomic_store(&_M_i, &__i, _m); }
981
 
982
      void
983
      store(_Tp __i, memory_order _m = memory_order_seq_cst) volatile noexcept
984
      { __atomic_store(&_M_i, &__i, _m); }
985
 
986
      _Tp
987
      load(memory_order _m = memory_order_seq_cst) const noexcept
988
      {
989
        _Tp tmp;
990
        __atomic_load(&_M_i, &tmp, _m);
991
        return tmp;
992
      }
993
 
994
      _Tp
995
      load(memory_order _m = memory_order_seq_cst) const volatile noexcept
996
      {
997
        _Tp tmp;
998
        __atomic_load(&_M_i, &tmp, _m);
999
        return tmp;
1000
      }
1001
 
1002
      _Tp
1003
      exchange(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept
1004
      {
1005
        _Tp tmp;
1006
        __atomic_exchange(&_M_i, &__i, &tmp, _m);
1007
        return tmp;
1008
      }
1009
 
1010
      _Tp
1011
      exchange(_Tp __i,
1012
               memory_order _m = memory_order_seq_cst) volatile noexcept
1013
      {
1014
        _Tp tmp;
1015
        __atomic_exchange(&_M_i, &__i, &tmp, _m);
1016
        return tmp;
1017
      }
1018
 
1019
      bool
1020
      compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
1021
                            memory_order __f) noexcept
1022
      {
1023
        return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f);
1024
      }
1025
 
1026
      bool
1027
      compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
1028
                            memory_order __f) volatile noexcept
1029
      {
1030
        return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f);
1031
      }
1032
 
1033
      bool
1034
      compare_exchange_weak(_Tp& __e, _Tp __i,
1035
                            memory_order __m = memory_order_seq_cst) noexcept
1036
      { return compare_exchange_weak(__e, __i, __m, __m); }
1037
 
1038
      bool
1039
      compare_exchange_weak(_Tp& __e, _Tp __i,
1040
                     memory_order __m = memory_order_seq_cst) volatile noexcept
1041
      { return compare_exchange_weak(__e, __i, __m, __m); }
1042
 
1043
      bool
1044
      compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
1045
                              memory_order __f) noexcept
1046
      {
1047
        return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f);
1048
      }
1049
 
1050
      bool
1051
      compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
1052
                              memory_order __f) volatile noexcept
1053
      {
1054
        return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f);
1055
      }
1056
 
1057
      bool
1058
      compare_exchange_strong(_Tp& __e, _Tp __i,
1059
                               memory_order __m = memory_order_seq_cst) noexcept
1060
      { return compare_exchange_strong(__e, __i, __m, __m); }
1061
 
1062
      bool
1063
      compare_exchange_strong(_Tp& __e, _Tp __i,
1064
                     memory_order __m = memory_order_seq_cst) volatile noexcept
1065
      { return compare_exchange_strong(__e, __i, __m, __m); }
1066
    };
1067
 
1068
 
1069
  /// Partial specialization for pointer types.
1070
  template
1071
    struct atomic<_Tp*>
1072
    {
1073
      typedef _Tp*                      __pointer_type;
1074
      typedef __atomic_base<_Tp*>     __base_type;
1075
      __base_type                       _M_b;
1076
 
1077
      atomic() noexcept = default;
1078
      ~atomic() noexcept = default;
1079
      atomic(const atomic&) = delete;
1080
      atomic& operator=(const atomic&) = delete;
1081
      atomic& operator=(const atomic&) volatile = delete;
1082
 
1083
      constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
1084
 
1085
      operator __pointer_type() const noexcept
1086
      { return __pointer_type(_M_b); }
1087
 
1088
      operator __pointer_type() const volatile noexcept
1089
      { return __pointer_type(_M_b); }
1090
 
1091
      __pointer_type
1092
      operator=(__pointer_type __p) noexcept
1093
      { return _M_b.operator=(__p); }
1094
 
1095
      __pointer_type
1096
      operator=(__pointer_type __p) volatile noexcept
1097
      { return _M_b.operator=(__p); }
1098
 
1099
      __pointer_type
1100
      operator++(int) noexcept
1101
      { return _M_b++; }
1102
 
1103
      __pointer_type
1104
      operator++(int) volatile noexcept
1105
      { return _M_b++; }
1106
 
1107
      __pointer_type
1108
      operator--(int) noexcept
1109
      { return _M_b--; }
1110
 
1111
      __pointer_type
1112
      operator--(int) volatile noexcept
1113
      { return _M_b--; }
1114
 
1115
      __pointer_type
1116
      operator++() noexcept
1117
      { return ++_M_b; }
1118
 
1119
      __pointer_type
1120
      operator++() volatile noexcept
1121
      { return ++_M_b; }
1122
 
1123
      __pointer_type
1124
      operator--() noexcept
1125
      { return --_M_b; }
1126
 
1127
      __pointer_type
1128
      operator--() volatile noexcept
1129
      { return --_M_b; }
1130
 
1131
      __pointer_type
1132
      operator+=(ptrdiff_t __d) noexcept
1133
      { return _M_b.operator+=(__d); }
1134
 
1135
      __pointer_type
1136
      operator+=(ptrdiff_t __d) volatile noexcept
1137
      { return _M_b.operator+=(__d); }
1138
 
1139
      __pointer_type
1140
      operator-=(ptrdiff_t __d) noexcept
1141
      { return _M_b.operator-=(__d); }
1142
 
1143
      __pointer_type
1144
      operator-=(ptrdiff_t __d) volatile noexcept
1145
      { return _M_b.operator-=(__d); }
1146
 
1147
      bool
1148
      is_lock_free() const noexcept
1149
      { return _M_b.is_lock_free(); }
1150
 
1151
      bool
1152
      is_lock_free() const volatile noexcept
1153
      { return _M_b.is_lock_free(); }
1154
 
1155
      void
1156
      store(__pointer_type __p,
1157
            memory_order __m = memory_order_seq_cst) noexcept
1158
      { return _M_b.store(__p, __m); }
1159
 
1160
      void
1161
      store(__pointer_type __p,
1162
            memory_order __m = memory_order_seq_cst) volatile noexcept
1163
      { return _M_b.store(__p, __m); }
1164
 
1165
      __pointer_type
1166
      load(memory_order __m = memory_order_seq_cst) const noexcept
1167
      { return _M_b.load(__m); }
1168
 
1169
      __pointer_type
1170
      load(memory_order __m = memory_order_seq_cst) const volatile noexcept
1171
      { return _M_b.load(__m); }
1172
 
1173
      __pointer_type
1174
      exchange(__pointer_type __p,
1175
               memory_order __m = memory_order_seq_cst) noexcept
1176
      { return _M_b.exchange(__p, __m); }
1177
 
1178
      __pointer_type
1179
      exchange(__pointer_type __p,
1180
               memory_order __m = memory_order_seq_cst) volatile noexcept
1181
      { return _M_b.exchange(__p, __m); }
1182
 
1183
      bool
1184
      compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
1185
                            memory_order __m1, memory_order __m2) noexcept
1186
      { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
1187
 
1188
      bool
1189
      compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
1190
                            memory_order __m1,
1191
                            memory_order __m2) volatile noexcept
1192
      { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
1193
 
1194
      bool
1195
      compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
1196
                            memory_order __m = memory_order_seq_cst) noexcept
1197
      {
1198
        return compare_exchange_weak(__p1, __p2, __m,
1199
                                     __calculate_memory_order(__m));
1200
      }
1201
 
1202
      bool
1203
      compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
1204
                    memory_order __m = memory_order_seq_cst) volatile noexcept
1205
      {
1206
        return compare_exchange_weak(__p1, __p2, __m,
1207
                                     __calculate_memory_order(__m));
1208
      }
1209
 
1210
      bool
1211
      compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
1212
                              memory_order __m1, memory_order __m2) noexcept
1213
      { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
1214
 
1215
      bool
1216
      compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
1217
                              memory_order __m1,
1218
                              memory_order __m2) volatile noexcept
1219
      { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
1220
 
1221
      bool
1222
      compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
1223
                              memory_order __m = memory_order_seq_cst) noexcept
1224
      {
1225
        return _M_b.compare_exchange_strong(__p1, __p2, __m,
1226
                                            __calculate_memory_order(__m));
1227
      }
1228
 
1229
      bool
1230
      compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
1231
                    memory_order __m = memory_order_seq_cst) volatile noexcept
1232
      {
1233
        return _M_b.compare_exchange_strong(__p1, __p2, __m,
1234
                                            __calculate_memory_order(__m));
1235
      }
1236
 
1237
      __pointer_type
1238
      fetch_add(ptrdiff_t __d,
1239
                memory_order __m = memory_order_seq_cst) noexcept
1240
      { return _M_b.fetch_add(__d, __m); }
1241
 
1242
      __pointer_type
1243
      fetch_add(ptrdiff_t __d,
1244
                memory_order __m = memory_order_seq_cst) volatile noexcept
1245
      { return _M_b.fetch_add(__d, __m); }
1246
 
1247
      __pointer_type
1248
      fetch_sub(ptrdiff_t __d,
1249
                memory_order __m = memory_order_seq_cst) noexcept
1250
      { return _M_b.fetch_sub(__d, __m); }
1251
 
1252
      __pointer_type
1253
      fetch_sub(ptrdiff_t __d,
1254
                memory_order __m = memory_order_seq_cst) volatile noexcept
1255
      { return _M_b.fetch_sub(__d, __m); }
1256
    };
1257
 
1258
 
1259
  /// Explicit specialization for bool.
1260
  template<>
1261
    struct atomic : public atomic_bool
1262
    {
1263
      typedef bool                      __integral_type;
1264
      typedef atomic_bool               __base_type;
1265
 
1266
      atomic() noexcept = default;
1267
      ~atomic() noexcept = default;
1268
      atomic(const atomic&) = delete;
1269
      atomic& operator=(const atomic&) = delete;
1270
      atomic& operator=(const atomic&) volatile = delete;
1271
 
1272
      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1273
 
1274
      using __base_type::operator __integral_type;
1275
      using __base_type::operator=;
1276
    };
1277
 
1278
  /// Explicit specialization for char.
1279
  template<>
1280
    struct atomic : public atomic_char
1281
    {
1282
      typedef char                      __integral_type;
1283
      typedef atomic_char               __base_type;
1284
 
1285
      atomic() noexcept = default;
1286
      ~atomic() noexcept = default;
1287
      atomic(const atomic&) = delete;
1288
      atomic& operator=(const atomic&) = delete;
1289
      atomic& operator=(const atomic&) volatile = delete;
1290
 
1291
      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1292
 
1293
      using __base_type::operator __integral_type;
1294
      using __base_type::operator=;
1295
    };
1296
 
1297
  /// Explicit specialization for signed char.
1298
  template<>
1299
    struct atomic : public atomic_schar
1300
    {
1301
      typedef signed char               __integral_type;
1302
      typedef atomic_schar              __base_type;
1303
 
1304
      atomic() noexcept= default;
1305
      ~atomic() noexcept = default;
1306
      atomic(const atomic&) = delete;
1307
      atomic& operator=(const atomic&) = delete;
1308
      atomic& operator=(const atomic&) volatile = delete;
1309
 
1310
      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1311
 
1312
      using __base_type::operator __integral_type;
1313
      using __base_type::operator=;
1314
    };
1315
 
1316
  /// Explicit specialization for unsigned char.
1317
  template<>
1318
    struct atomic : public atomic_uchar
1319
    {
1320
      typedef unsigned char             __integral_type;
1321
      typedef atomic_uchar              __base_type;
1322
 
1323
      atomic() noexcept= default;
1324
      ~atomic() noexcept = default;
1325
      atomic(const atomic&) = delete;
1326
      atomic& operator=(const atomic&) = delete;
1327
      atomic& operator=(const atomic&) volatile = delete;
1328
 
1329
      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1330
 
1331
      using __base_type::operator __integral_type;
1332
      using __base_type::operator=;
1333
    };
1334
 
1335
  /// Explicit specialization for short.
1336
  template<>
1337
    struct atomic : public atomic_short
1338
    {
1339
      typedef short                     __integral_type;
1340
      typedef atomic_short              __base_type;
1341
 
1342
      atomic() noexcept = default;
1343
      ~atomic() noexcept = default;
1344
      atomic(const atomic&) = delete;
1345
      atomic& operator=(const atomic&) = delete;
1346
      atomic& operator=(const atomic&) volatile = delete;
1347
 
1348
      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1349
 
1350
      using __base_type::operator __integral_type;
1351
      using __base_type::operator=;
1352
    };
1353
 
1354
  /// Explicit specialization for unsigned short.
1355
  template<>
1356
    struct atomic : public atomic_ushort
1357
    {
1358
      typedef unsigned short            __integral_type;
1359
      typedef atomic_ushort             __base_type;
1360
 
1361
      atomic() noexcept = default;
1362
      ~atomic() noexcept = default;
1363
      atomic(const atomic&) = delete;
1364
      atomic& operator=(const atomic&) = delete;
1365
      atomic& operator=(const atomic&) volatile = delete;
1366
 
1367
      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1368
 
1369
      using __base_type::operator __integral_type;
1370
      using __base_type::operator=;
1371
    };
1372
 
1373
  /// Explicit specialization for int.
1374
  template<>
1375
    struct atomic : atomic_int
1376
    {
1377
      typedef int                       __integral_type;
1378
      typedef atomic_int                __base_type;
1379
 
1380
      atomic() noexcept = default;
1381
      ~atomic() noexcept = default;
1382
      atomic(const atomic&) = delete;
1383
      atomic& operator=(const atomic&) = delete;
1384
      atomic& operator=(const atomic&) volatile = delete;
1385
 
1386
      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1387
 
1388
      using __base_type::operator __integral_type;
1389
      using __base_type::operator=;
1390
    };
1391
 
1392
  /// Explicit specialization for unsigned int.
1393
  template<>
1394
    struct atomic : public atomic_uint
1395
    {
1396
      typedef unsigned int              __integral_type;
1397
      typedef atomic_uint               __base_type;
1398
 
1399
      atomic() noexcept = default;
1400
      ~atomic() noexcept = default;
1401
      atomic(const atomic&) = delete;
1402
      atomic& operator=(const atomic&) = delete;
1403
      atomic& operator=(const atomic&) volatile = delete;
1404
 
1405
      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1406
 
1407
      using __base_type::operator __integral_type;
1408
      using __base_type::operator=;
1409
    };
1410
 
1411
  /// Explicit specialization for long.
1412
  template<>
1413
    struct atomic : public atomic_long
1414
    {
1415
      typedef long                      __integral_type;
1416
      typedef atomic_long               __base_type;
1417
 
1418
      atomic() noexcept = default;
1419
      ~atomic() noexcept = default;
1420
      atomic(const atomic&) = delete;
1421
      atomic& operator=(const atomic&) = delete;
1422
      atomic& operator=(const atomic&) volatile = delete;
1423
 
1424
      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1425
 
1426
      using __base_type::operator __integral_type;
1427
      using __base_type::operator=;
1428
    };
1429
 
1430
  /// Explicit specialization for unsigned long.
1431
  template<>
1432
    struct atomic : public atomic_ulong
1433
    {
1434
      typedef unsigned long             __integral_type;
1435
      typedef atomic_ulong              __base_type;
1436
 
1437
      atomic() noexcept = default;
1438
      ~atomic() noexcept = default;
1439
      atomic(const atomic&) = delete;
1440
      atomic& operator=(const atomic&) = delete;
1441
      atomic& operator=(const atomic&) volatile = delete;
1442
 
1443
      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1444
 
1445
      using __base_type::operator __integral_type;
1446
      using __base_type::operator=;
1447
    };
1448
 
1449
  /// Explicit specialization for long long.
1450
  template<>
1451
    struct atomic : public atomic_llong
1452
    {
1453
      typedef long long                 __integral_type;
1454
      typedef atomic_llong              __base_type;
1455
 
1456
      atomic() noexcept = default;
1457
      ~atomic() noexcept = default;
1458
      atomic(const atomic&) = delete;
1459
      atomic& operator=(const atomic&) = delete;
1460
      atomic& operator=(const atomic&) volatile = delete;
1461
 
1462
      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1463
 
1464
      using __base_type::operator __integral_type;
1465
      using __base_type::operator=;
1466
    };
1467
 
1468
  /// Explicit specialization for unsigned long long.
1469
  template<>
1470
    struct atomic : public atomic_ullong
1471
    {
1472
      typedef unsigned long long        __integral_type;
1473
      typedef atomic_ullong             __base_type;
1474
 
1475
      atomic() noexcept = default;
1476
      ~atomic() noexcept = default;
1477
      atomic(const atomic&) = delete;
1478
      atomic& operator=(const atomic&) = delete;
1479
      atomic& operator=(const atomic&) volatile = delete;
1480
 
1481
      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1482
 
1483
      using __base_type::operator __integral_type;
1484
      using __base_type::operator=;
1485
    };
1486
 
1487
  /// Explicit specialization for wchar_t.
1488
  template<>
1489
    struct atomic : public atomic_wchar_t
1490
    {
1491
      typedef wchar_t                   __integral_type;
1492
      typedef atomic_wchar_t            __base_type;
1493
 
1494
      atomic() noexcept = default;
1495
      ~atomic() noexcept = default;
1496
      atomic(const atomic&) = delete;
1497
      atomic& operator=(const atomic&) = delete;
1498
      atomic& operator=(const atomic&) volatile = delete;
1499
 
1500
      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1501
 
1502
      using __base_type::operator __integral_type;
1503
      using __base_type::operator=;
1504
    };
1505
 
1506
  /// Explicit specialization for char16_t.
1507
  template<>
1508
    struct atomic : public atomic_char16_t
1509
    {
1510
      typedef char16_t                  __integral_type;
1511
      typedef atomic_char16_t           __base_type;
1512
 
1513
      atomic() noexcept = default;
1514
      ~atomic() noexcept = default;
1515
      atomic(const atomic&) = delete;
1516
      atomic& operator=(const atomic&) = delete;
1517
      atomic& operator=(const atomic&) volatile = delete;
1518
 
1519
      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1520
 
1521
      using __base_type::operator __integral_type;
1522
      using __base_type::operator=;
1523
    };
1524
 
1525
  /// Explicit specialization for char32_t.
1526
  template<>
1527
    struct atomic : public atomic_char32_t
1528
    {
1529
      typedef char32_t                  __integral_type;
1530
      typedef atomic_char32_t           __base_type;
1531
 
1532
      atomic() noexcept = default;
1533
      ~atomic() noexcept = default;
1534
      atomic(const atomic&) = delete;
1535
      atomic& operator=(const atomic&) = delete;
1536
      atomic& operator=(const atomic&) volatile = delete;
1537
 
1538
      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1539
 
1540
      using __base_type::operator __integral_type;
1541
      using __base_type::operator=;
1542
    };
1543
 
1544
 
1545
  // Function definitions, atomic_flag operations.
1546
  inline bool
1547
  atomic_flag_test_and_set_explicit(atomic_flag* __a,
1548
                                    memory_order __m) noexcept
1549
  { return __a->test_and_set(__m); }
1550
 
1551
  inline bool
1552
  atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
1553
                                    memory_order __m) noexcept
1554
  { return __a->test_and_set(__m); }
1555
 
1556
  inline void
1557
  atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
1558
  { __a->clear(__m); }
1559
 
1560
  inline void
1561
  atomic_flag_clear_explicit(volatile atomic_flag* __a,
1562
                             memory_order __m) noexcept
1563
  { __a->clear(__m); }
1564
 
1565
  inline bool
1566
  atomic_flag_test_and_set(atomic_flag* __a) noexcept
1567
  { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1568
 
1569
  inline bool
1570
  atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
1571
  { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1572
 
1573
  inline void
1574
  atomic_flag_clear(atomic_flag* __a) noexcept
1575
  { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1576
 
1577
  inline void
1578
  atomic_flag_clear(volatile atomic_flag* __a) noexcept
1579
  { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1580
 
1581
 
1582
  // Function templates generally applicable to atomic types.
1583
  template
1584
    inline bool
1585
    atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
1586
    { return __a->is_lock_free(); }
1587
 
1588
  template
1589
    inline bool
1590
    atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
1591
    { return __a->is_lock_free(); }
1592
 
1593
  template
1594
    inline void
1595
    atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept;
1596
 
1597
  template
1598
    inline void
1599
    atomic_init(volatile atomic<_ITp>* __a, _ITp __i) noexcept;
1600
 
1601
  template
1602
    inline void
1603
    atomic_store_explicit(atomic<_ITp>* __a, _ITp __i,
1604
                          memory_order __m) noexcept
1605
    { __a->store(__i, __m); }
1606
 
1607
  template
1608
    inline void
1609
    atomic_store_explicit(volatile atomic<_ITp>* __a, _ITp __i,
1610
                          memory_order __m) noexcept
1611
    { __a->store(__i, __m); }
1612
 
1613
  template
1614
    inline _ITp
1615
    atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
1616
    { return __a->load(__m); }
1617
 
1618
  template
1619
    inline _ITp
1620
    atomic_load_explicit(const volatile atomic<_ITp>* __a,
1621
                         memory_order __m) noexcept
1622
    { return __a->load(__m); }
1623
 
1624
  template
1625
    inline _ITp
1626
    atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i,
1627
                             memory_order __m) noexcept
1628
    { return __a->exchange(__i, __m); }
1629
 
1630
  template
1631
    inline _ITp
1632
    atomic_exchange_explicit(volatile atomic<_ITp>* __a, _ITp __i,
1633
                             memory_order __m) noexcept
1634
    { return __a->exchange(__i, __m); }
1635
 
1636
  template
1637
    inline bool
1638
    atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1639
                                          _ITp* __i1, _ITp __i2,
1640
                                          memory_order __m1,
1641
                                          memory_order __m2) noexcept
1642
    { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1643
 
1644
  template
1645
    inline bool
1646
    atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
1647
                                          _ITp* __i1, _ITp __i2,
1648
                                          memory_order __m1,
1649
                                          memory_order __m2) noexcept
1650
    { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1651
 
1652
  template
1653
    inline bool
1654
    atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1655
                                            _ITp* __i1, _ITp __i2,
1656
                                            memory_order __m1,
1657
                                            memory_order __m2) noexcept
1658
    { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1659
 
1660
  template
1661
    inline bool
1662
    atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
1663
                                            _ITp* __i1, _ITp __i2,
1664
                                            memory_order __m1,
1665
                                            memory_order __m2) noexcept
1666
    { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1667
 
1668
 
1669
  template
1670
    inline void
1671
    atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept
1672
    { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1673
 
1674
  template
1675
    inline void
1676
    atomic_store(volatile atomic<_ITp>* __a, _ITp __i) noexcept
1677
    { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1678
 
1679
  template
1680
    inline _ITp
1681
    atomic_load(const atomic<_ITp>* __a) noexcept
1682
    { return atomic_load_explicit(__a, memory_order_seq_cst); }
1683
 
1684
  template
1685
    inline _ITp
1686
    atomic_load(const volatile atomic<_ITp>* __a) noexcept
1687
    { return atomic_load_explicit(__a, memory_order_seq_cst); }
1688
 
1689
  template
1690
    inline _ITp
1691
    atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept
1692
    { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1693
 
1694
  template
1695
    inline _ITp
1696
    atomic_exchange(volatile atomic<_ITp>* __a, _ITp __i) noexcept
1697
    { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1698
 
1699
  template
1700
    inline bool
1701
    atomic_compare_exchange_weak(atomic<_ITp>* __a,
1702
                                 _ITp* __i1, _ITp __i2) noexcept
1703
    {
1704
      return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1705
                                                   memory_order_seq_cst,
1706
                                                   memory_order_seq_cst);
1707
    }
1708
 
1709
  template
1710
    inline bool
1711
    atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
1712
                                 _ITp* __i1, _ITp __i2) noexcept
1713
    {
1714
      return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1715
                                                   memory_order_seq_cst,
1716
                                                   memory_order_seq_cst);
1717
    }
1718
 
1719
  template
1720
    inline bool
1721
    atomic_compare_exchange_strong(atomic<_ITp>* __a,
1722
                                   _ITp* __i1, _ITp __i2) noexcept
1723
    {
1724
      return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1725
                                                     memory_order_seq_cst,
1726
                                                     memory_order_seq_cst);
1727
    }
1728
 
1729
  template
1730
    inline bool
1731
    atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
1732
                                   _ITp* __i1, _ITp __i2) noexcept
1733
    {
1734
      return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1735
                                                     memory_order_seq_cst,
1736
                                                     memory_order_seq_cst);
1737
    }
1738
 
1739
  // Function templates for atomic_integral operations only, using
1740
  // __atomic_base. Template argument should be constricted to
1741
  // intergral types as specified in the standard, excluding address
1742
  // types.
1743
  template
1744
    inline _ITp
1745
    atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1746
                              memory_order __m) noexcept
1747
    { return __a->fetch_add(__i, __m); }
1748
 
1749
  template
1750
    inline _ITp
1751
    atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1752
                              memory_order __m) noexcept
1753
    { return __a->fetch_add(__i, __m); }
1754
 
1755
  template
1756
    inline _ITp
1757
    atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1758
                              memory_order __m) noexcept
1759
    { return __a->fetch_sub(__i, __m); }
1760
 
1761
  template
1762
    inline _ITp
1763
    atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1764
                              memory_order __m) noexcept
1765
    { return __a->fetch_sub(__i, __m); }
1766
 
1767
  template
1768
    inline _ITp
1769
    atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1770
                              memory_order __m) noexcept
1771
    { return __a->fetch_and(__i, __m); }
1772
 
1773
  template
1774
    inline _ITp
1775
    atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1776
                              memory_order __m) noexcept
1777
    { return __a->fetch_and(__i, __m); }
1778
 
1779
  template
1780
    inline _ITp
1781
    atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1782
                             memory_order __m) noexcept
1783
    { return __a->fetch_or(__i, __m); }
1784
 
1785
  template
1786
    inline _ITp
1787
    atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1788
                             memory_order __m) noexcept
1789
    { return __a->fetch_or(__i, __m); }
1790
 
1791
  template
1792
    inline _ITp
1793
    atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1794
                              memory_order __m) noexcept
1795
    { return __a->fetch_xor(__i, __m); }
1796
 
1797
  template
1798
    inline _ITp
1799
    atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1800
                              memory_order __m) noexcept
1801
    { return __a->fetch_xor(__i, __m); }
1802
 
1803
  template
1804
    inline _ITp
1805
    atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1806
    { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1807
 
1808
  template
1809
    inline _ITp
1810
    atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1811
    { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1812
 
1813
  template
1814
    inline _ITp
1815
    atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1816
    { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1817
 
1818
  template
1819
    inline _ITp
1820
    atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1821
    { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1822
 
1823
  template
1824
    inline _ITp
1825
    atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1826
    { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1827
 
1828
  template
1829
    inline _ITp
1830
    atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1831
    { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1832
 
1833
  template
1834
    inline _ITp
1835
    atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1836
    { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1837
 
1838
  template
1839
    inline _ITp
1840
    atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1841
    { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1842
 
1843
  template
1844
    inline _ITp
1845
    atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1846
    { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1847
 
1848
  template
1849
    inline _ITp
1850
    atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1851
    { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1852
 
1853
 
1854
  // Partial specializations for pointers.
1855
  template
1856
    inline _ITp*
1857
    atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1858
                              memory_order __m) noexcept
1859
    { return __a->fetch_add(__d, __m); }
1860
 
1861
  template
1862
    inline _ITp*
1863
    atomic_fetch_add_explicit(volatile atomic<_ITp*>* __a, ptrdiff_t __d,
1864
                              memory_order __m) noexcept
1865
    { return __a->fetch_add(__d, __m); }
1866
 
1867
  template
1868
    inline _ITp*
1869
    atomic_fetch_add(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1870
    { return __a->fetch_add(__d); }
1871
 
1872
  template
1873
    inline _ITp*
1874
    atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1875
    { return __a->fetch_add(__d); }
1876
 
1877
  template
1878
    inline _ITp*
1879
    atomic_fetch_sub_explicit(volatile atomic<_ITp*>* __a,
1880
                              ptrdiff_t __d, memory_order __m) noexcept
1881
    { return __a->fetch_sub(__d, __m); }
1882
 
1883
  template
1884
    inline _ITp*
1885
    atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1886
                              memory_order __m) noexcept
1887
    { return __a->fetch_sub(__d, __m); }
1888
 
1889
  template
1890
    inline _ITp*
1891
    atomic_fetch_sub(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1892
    { return __a->fetch_sub(__d); }
1893
 
1894
  template
1895
    inline _ITp*
1896
    atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1897
    { return __a->fetch_sub(__d); }
1898
  // @} group atomics
1899
 
1900
// _GLIBCXX_END_NAMESPACE_VERSION
1901
} // namespace
1902
 
1903
#endif

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.