OpenCores
URL https://opencores.org/ocsvn/altor32/altor32/trunk

Subversion Repositories altor32

[/] [altor32/] [trunk/] [gcc-x64/] [or1knd-elf/] [or1knd-elf/] [include/] [c++/] [4.8.0/] [atomic] - Blame information for rev 35

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 35 ultra_embe
// -*- C++ -*- header.
2
 
3
// Copyright (C) 2008-2012 Free Software Foundation, Inc.
4
//
5
// This file is part of the GNU ISO C++ Library.  This library is free
6
// software; you can redistribute it and/or modify it under the
7
// terms of the GNU General Public License as published by the
8
// Free Software Foundation; either version 3, or (at your option)
9
// any later version.
10
 
11
// This library is distributed in the hope that it will be useful,
12
// but WITHOUT ANY WARRANTY; without even the implied warranty of
13
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14
// GNU General Public License for more details.
15
 
16
// Under Section 7 of GPL version 3, you are granted additional
17
// permissions described in the GCC Runtime Library Exception, version
18
// 3.1, as published by the Free Software Foundation.
19
 
20
// You should have received a copy of the GNU General Public License and
21
// a copy of the GCC Runtime Library Exception along with this program;
22
// see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
23
// .
24
 
25
/** @file include/atomic
26
 *  This is a Standard C++ Library header.
27
 */
28
 
29
// Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
30
// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
31
 
32
#ifndef _GLIBCXX_ATOMIC
33
#define _GLIBCXX_ATOMIC 1
34
 
35
#pragma GCC system_header
36
 
37
#if __cplusplus < 201103L
38
# include 
39
#endif
40
 
41
#include 
42
 
43
namespace std _GLIBCXX_VISIBILITY(default)
44
{
45
_GLIBCXX_BEGIN_NAMESPACE_VERSION
46
 
47
  /**
48
   * @addtogroup atomics
49
   * @{
50
   */
51
 
52
  /// atomic_bool
53
  // NB: No operators or fetch-operations for this type.
54
  struct atomic_bool
55
  {
56
  private:
57
    __atomic_base       _M_base;
58
 
59
  public:
60
    atomic_bool() noexcept = default;
61
    ~atomic_bool() noexcept = default;
62
    atomic_bool(const atomic_bool&) = delete;
63
    atomic_bool& operator=(const atomic_bool&) = delete;
64
    atomic_bool& operator=(const atomic_bool&) volatile = delete;
65
 
66
    constexpr atomic_bool(bool __i) noexcept : _M_base(__i) { }
67
 
68
    bool
69
    operator=(bool __i) noexcept
70
    { return _M_base.operator=(__i); }
71
 
72
    operator bool() const noexcept
73
    { return _M_base.load(); }
74
 
75
    operator bool() const volatile noexcept
76
    { return _M_base.load(); }
77
 
78
    bool
79
    is_lock_free() const noexcept { return _M_base.is_lock_free(); }
80
 
81
    bool
82
    is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
83
 
84
    void
85
    store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
86
    { _M_base.store(__i, __m); }
87
 
88
    void
89
    store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
90
    { _M_base.store(__i, __m); }
91
 
92
    bool
93
    load(memory_order __m = memory_order_seq_cst) const noexcept
94
    { return _M_base.load(__m); }
95
 
96
    bool
97
    load(memory_order __m = memory_order_seq_cst) const volatile noexcept
98
    { return _M_base.load(__m); }
99
 
100
    bool
101
    exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
102
    { return _M_base.exchange(__i, __m); }
103
 
104
    bool
105
    exchange(bool __i,
106
             memory_order __m = memory_order_seq_cst) volatile noexcept
107
    { return _M_base.exchange(__i, __m); }
108
 
109
    bool
110
    compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
111
                          memory_order __m2) noexcept
112
    { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
113
 
114
    bool
115
    compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
116
                          memory_order __m2) volatile noexcept
117
    { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
118
 
119
    bool
120
    compare_exchange_weak(bool& __i1, bool __i2,
121
                          memory_order __m = memory_order_seq_cst) noexcept
122
    { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
123
 
124
    bool
125
    compare_exchange_weak(bool& __i1, bool __i2,
126
                     memory_order __m = memory_order_seq_cst) volatile noexcept
127
    { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
128
 
129
    bool
130
    compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
131
                            memory_order __m2) noexcept
132
    { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
133
 
134
    bool
135
    compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
136
                            memory_order __m2) volatile noexcept
137
    { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
138
 
139
    bool
140
    compare_exchange_strong(bool& __i1, bool __i2,
141
                            memory_order __m = memory_order_seq_cst) noexcept
142
    { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
143
 
144
    bool
145
    compare_exchange_strong(bool& __i1, bool __i2,
146
                    memory_order __m = memory_order_seq_cst) volatile noexcept
147
    { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
148
  };
149
 
150
 
151
  /**
152
   *  @brief Generic atomic type, primary class template.
153
   *
154
   *  @tparam _Tp  Type to be made atomic, must be trivally copyable.
155
   */
156
  template
157
    struct atomic
158
    {
159
    private:
160
      _Tp _M_i;
161
 
162
    public:
163
      atomic() noexcept = default;
164
      ~atomic() noexcept = default;
165
      atomic(const atomic&) = delete;
166
      atomic& operator=(const atomic&) = delete;
167
      atomic& operator=(const atomic&) volatile = delete;
168
 
169
      constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
170
 
171
      operator _Tp() const noexcept
172
      { return load(); }
173
 
174
      operator _Tp() const volatile noexcept
175
      { return load(); }
176
 
177
      _Tp
178
      operator=(_Tp __i) noexcept
179
      { store(__i); return __i; }
180
 
181
      _Tp
182
      operator=(_Tp __i) volatile noexcept
183
      { store(__i); return __i; }
184
 
185
      bool
186
      is_lock_free() const noexcept
187
      { return __atomic_is_lock_free(sizeof(_M_i), nullptr); }
188
 
189
      bool
190
      is_lock_free() const volatile noexcept
191
      { return __atomic_is_lock_free(sizeof(_M_i), nullptr); }
192
 
193
      void
194
      store(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept
195
      { __atomic_store(&_M_i, &__i, _m); }
196
 
197
      void
198
      store(_Tp __i, memory_order _m = memory_order_seq_cst) volatile noexcept
199
      { __atomic_store(&_M_i, &__i, _m); }
200
 
201
      _Tp
202
      load(memory_order _m = memory_order_seq_cst) const noexcept
203
      {
204
        _Tp tmp;
205
        __atomic_load(&_M_i, &tmp, _m);
206
        return tmp;
207
      }
208
 
209
      _Tp
210
      load(memory_order _m = memory_order_seq_cst) const volatile noexcept
211
      {
212
        _Tp tmp;
213
        __atomic_load(&_M_i, &tmp, _m);
214
        return tmp;
215
      }
216
 
217
      _Tp
218
      exchange(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept
219
      {
220
        _Tp tmp;
221
        __atomic_exchange(&_M_i, &__i, &tmp, _m);
222
        return tmp;
223
      }
224
 
225
      _Tp
226
      exchange(_Tp __i,
227
               memory_order _m = memory_order_seq_cst) volatile noexcept
228
      {
229
        _Tp tmp;
230
        __atomic_exchange(&_M_i, &__i, &tmp, _m);
231
        return tmp;
232
      }
233
 
234
      bool
235
      compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
236
                            memory_order __f) noexcept
237
      {
238
        return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f);
239
      }
240
 
241
      bool
242
      compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
243
                            memory_order __f) volatile noexcept
244
      {
245
        return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f);
246
      }
247
 
248
      bool
249
      compare_exchange_weak(_Tp& __e, _Tp __i,
250
                            memory_order __m = memory_order_seq_cst) noexcept
251
      { return compare_exchange_weak(__e, __i, __m, __m); }
252
 
253
      bool
254
      compare_exchange_weak(_Tp& __e, _Tp __i,
255
                     memory_order __m = memory_order_seq_cst) volatile noexcept
256
      { return compare_exchange_weak(__e, __i, __m, __m); }
257
 
258
      bool
259
      compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
260
                              memory_order __f) noexcept
261
      {
262
        return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f);
263
      }
264
 
265
      bool
266
      compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
267
                              memory_order __f) volatile noexcept
268
      {
269
        return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f);
270
      }
271
 
272
      bool
273
      compare_exchange_strong(_Tp& __e, _Tp __i,
274
                               memory_order __m = memory_order_seq_cst) noexcept
275
      { return compare_exchange_strong(__e, __i, __m, __m); }
276
 
277
      bool
278
      compare_exchange_strong(_Tp& __e, _Tp __i,
279
                     memory_order __m = memory_order_seq_cst) volatile noexcept
280
      { return compare_exchange_strong(__e, __i, __m, __m); }
281
    };
282
 
283
 
284
  /// Partial specialization for pointer types.
285
  template
286
    struct atomic<_Tp*>
287
    {
288
      typedef _Tp*                      __pointer_type;
289
      typedef __atomic_base<_Tp*>     __base_type;
290
      __base_type                       _M_b;
291
 
292
      atomic() noexcept = default;
293
      ~atomic() noexcept = default;
294
      atomic(const atomic&) = delete;
295
      atomic& operator=(const atomic&) = delete;
296
      atomic& operator=(const atomic&) volatile = delete;
297
 
298
      constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
299
 
300
      operator __pointer_type() const noexcept
301
      { return __pointer_type(_M_b); }
302
 
303
      operator __pointer_type() const volatile noexcept
304
      { return __pointer_type(_M_b); }
305
 
306
      __pointer_type
307
      operator=(__pointer_type __p) noexcept
308
      { return _M_b.operator=(__p); }
309
 
310
      __pointer_type
311
      operator=(__pointer_type __p) volatile noexcept
312
      { return _M_b.operator=(__p); }
313
 
314
      __pointer_type
315
      operator++(int) noexcept
316
      { return _M_b++; }
317
 
318
      __pointer_type
319
      operator++(int) volatile noexcept
320
      { return _M_b++; }
321
 
322
      __pointer_type
323
      operator--(int) noexcept
324
      { return _M_b--; }
325
 
326
      __pointer_type
327
      operator--(int) volatile noexcept
328
      { return _M_b--; }
329
 
330
      __pointer_type
331
      operator++() noexcept
332
      { return ++_M_b; }
333
 
334
      __pointer_type
335
      operator++() volatile noexcept
336
      { return ++_M_b; }
337
 
338
      __pointer_type
339
      operator--() noexcept
340
      { return --_M_b; }
341
 
342
      __pointer_type
343
      operator--() volatile noexcept
344
      { return --_M_b; }
345
 
346
      __pointer_type
347
      operator+=(ptrdiff_t __d) noexcept
348
      { return _M_b.operator+=(__d); }
349
 
350
      __pointer_type
351
      operator+=(ptrdiff_t __d) volatile noexcept
352
      { return _M_b.operator+=(__d); }
353
 
354
      __pointer_type
355
      operator-=(ptrdiff_t __d) noexcept
356
      { return _M_b.operator-=(__d); }
357
 
358
      __pointer_type
359
      operator-=(ptrdiff_t __d) volatile noexcept
360
      { return _M_b.operator-=(__d); }
361
 
362
      bool
363
      is_lock_free() const noexcept
364
      { return _M_b.is_lock_free(); }
365
 
366
      bool
367
      is_lock_free() const volatile noexcept
368
      { return _M_b.is_lock_free(); }
369
 
370
      void
371
      store(__pointer_type __p,
372
            memory_order __m = memory_order_seq_cst) noexcept
373
      { return _M_b.store(__p, __m); }
374
 
375
      void
376
      store(__pointer_type __p,
377
            memory_order __m = memory_order_seq_cst) volatile noexcept
378
      { return _M_b.store(__p, __m); }
379
 
380
      __pointer_type
381
      load(memory_order __m = memory_order_seq_cst) const noexcept
382
      { return _M_b.load(__m); }
383
 
384
      __pointer_type
385
      load(memory_order __m = memory_order_seq_cst) const volatile noexcept
386
      { return _M_b.load(__m); }
387
 
388
      __pointer_type
389
      exchange(__pointer_type __p,
390
               memory_order __m = memory_order_seq_cst) noexcept
391
      { return _M_b.exchange(__p, __m); }
392
 
393
      __pointer_type
394
      exchange(__pointer_type __p,
395
               memory_order __m = memory_order_seq_cst) volatile noexcept
396
      { return _M_b.exchange(__p, __m); }
397
 
398
      bool
399
      compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
400
                            memory_order __m1, memory_order __m2) noexcept
401
      { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
402
 
403
      bool
404
      compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
405
                            memory_order __m1,
406
                            memory_order __m2) volatile noexcept
407
      { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
408
 
409
      bool
410
      compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
411
                            memory_order __m = memory_order_seq_cst) noexcept
412
      {
413
        return compare_exchange_weak(__p1, __p2, __m,
414
                                     __cmpexch_failure_order(__m));
415
      }
416
 
417
      bool
418
      compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
419
                    memory_order __m = memory_order_seq_cst) volatile noexcept
420
      {
421
        return compare_exchange_weak(__p1, __p2, __m,
422
                                     __cmpexch_failure_order(__m));
423
      }
424
 
425
      bool
426
      compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
427
                              memory_order __m1, memory_order __m2) noexcept
428
      { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
429
 
430
      bool
431
      compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
432
                              memory_order __m1,
433
                              memory_order __m2) volatile noexcept
434
      { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
435
 
436
      bool
437
      compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
438
                              memory_order __m = memory_order_seq_cst) noexcept
439
      {
440
        return _M_b.compare_exchange_strong(__p1, __p2, __m,
441
                                            __cmpexch_failure_order(__m));
442
      }
443
 
444
      bool
445
      compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
446
                    memory_order __m = memory_order_seq_cst) volatile noexcept
447
      {
448
        return _M_b.compare_exchange_strong(__p1, __p2, __m,
449
                                            __cmpexch_failure_order(__m));
450
      }
451
 
452
      __pointer_type
453
      fetch_add(ptrdiff_t __d,
454
                memory_order __m = memory_order_seq_cst) noexcept
455
      { return _M_b.fetch_add(__d, __m); }
456
 
457
      __pointer_type
458
      fetch_add(ptrdiff_t __d,
459
                memory_order __m = memory_order_seq_cst) volatile noexcept
460
      { return _M_b.fetch_add(__d, __m); }
461
 
462
      __pointer_type
463
      fetch_sub(ptrdiff_t __d,
464
                memory_order __m = memory_order_seq_cst) noexcept
465
      { return _M_b.fetch_sub(__d, __m); }
466
 
467
      __pointer_type
468
      fetch_sub(ptrdiff_t __d,
469
                memory_order __m = memory_order_seq_cst) volatile noexcept
470
      { return _M_b.fetch_sub(__d, __m); }
471
    };
472
 
473
 
474
  /// Explicit specialization for bool.
475
  template<>
476
    struct atomic : public atomic_bool
477
    {
478
      typedef bool                      __integral_type;
479
      typedef atomic_bool               __base_type;
480
 
481
      atomic() noexcept = default;
482
      ~atomic() noexcept = default;
483
      atomic(const atomic&) = delete;
484
      atomic& operator=(const atomic&) = delete;
485
      atomic& operator=(const atomic&) volatile = delete;
486
 
487
      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
488
 
489
      using __base_type::operator __integral_type;
490
      using __base_type::operator=;
491
    };
492
 
493
  /// Explicit specialization for char.
494
  template<>
495
    struct atomic : public atomic_char
496
    {
497
      typedef char                      __integral_type;
498
      typedef atomic_char               __base_type;
499
 
500
      atomic() noexcept = default;
501
      ~atomic() noexcept = default;
502
      atomic(const atomic&) = delete;
503
      atomic& operator=(const atomic&) = delete;
504
      atomic& operator=(const atomic&) volatile = delete;
505
 
506
      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
507
 
508
      using __base_type::operator __integral_type;
509
      using __base_type::operator=;
510
    };
511
 
512
  /// Explicit specialization for signed char.
513
  template<>
514
    struct atomic : public atomic_schar
515
    {
516
      typedef signed char               __integral_type;
517
      typedef atomic_schar              __base_type;
518
 
519
      atomic() noexcept= default;
520
      ~atomic() noexcept = default;
521
      atomic(const atomic&) = delete;
522
      atomic& operator=(const atomic&) = delete;
523
      atomic& operator=(const atomic&) volatile = delete;
524
 
525
      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
526
 
527
      using __base_type::operator __integral_type;
528
      using __base_type::operator=;
529
    };
530
 
531
  /// Explicit specialization for unsigned char.
532
  template<>
533
    struct atomic : public atomic_uchar
534
    {
535
      typedef unsigned char             __integral_type;
536
      typedef atomic_uchar              __base_type;
537
 
538
      atomic() noexcept= default;
539
      ~atomic() noexcept = default;
540
      atomic(const atomic&) = delete;
541
      atomic& operator=(const atomic&) = delete;
542
      atomic& operator=(const atomic&) volatile = delete;
543
 
544
      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
545
 
546
      using __base_type::operator __integral_type;
547
      using __base_type::operator=;
548
    };
549
 
550
  /// Explicit specialization for short.
551
  template<>
552
    struct atomic : public atomic_short
553
    {
554
      typedef short                     __integral_type;
555
      typedef atomic_short              __base_type;
556
 
557
      atomic() noexcept = default;
558
      ~atomic() noexcept = default;
559
      atomic(const atomic&) = delete;
560
      atomic& operator=(const atomic&) = delete;
561
      atomic& operator=(const atomic&) volatile = delete;
562
 
563
      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
564
 
565
      using __base_type::operator __integral_type;
566
      using __base_type::operator=;
567
    };
568
 
569
  /// Explicit specialization for unsigned short.
570
  template<>
571
    struct atomic : public atomic_ushort
572
    {
573
      typedef unsigned short            __integral_type;
574
      typedef atomic_ushort             __base_type;
575
 
576
      atomic() noexcept = default;
577
      ~atomic() noexcept = default;
578
      atomic(const atomic&) = delete;
579
      atomic& operator=(const atomic&) = delete;
580
      atomic& operator=(const atomic&) volatile = delete;
581
 
582
      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
583
 
584
      using __base_type::operator __integral_type;
585
      using __base_type::operator=;
586
    };
587
 
588
  /// Explicit specialization for int.
589
  template<>
590
    struct atomic : atomic_int
591
    {
592
      typedef int                       __integral_type;
593
      typedef atomic_int                __base_type;
594
 
595
      atomic() noexcept = default;
596
      ~atomic() noexcept = default;
597
      atomic(const atomic&) = delete;
598
      atomic& operator=(const atomic&) = delete;
599
      atomic& operator=(const atomic&) volatile = delete;
600
 
601
      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
602
 
603
      using __base_type::operator __integral_type;
604
      using __base_type::operator=;
605
    };
606
 
607
  /// Explicit specialization for unsigned int.
608
  template<>
609
    struct atomic : public atomic_uint
610
    {
611
      typedef unsigned int              __integral_type;
612
      typedef atomic_uint               __base_type;
613
 
614
      atomic() noexcept = default;
615
      ~atomic() noexcept = default;
616
      atomic(const atomic&) = delete;
617
      atomic& operator=(const atomic&) = delete;
618
      atomic& operator=(const atomic&) volatile = delete;
619
 
620
      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
621
 
622
      using __base_type::operator __integral_type;
623
      using __base_type::operator=;
624
    };
625
 
626
  /// Explicit specialization for long.
627
  template<>
628
    struct atomic : public atomic_long
629
    {
630
      typedef long                      __integral_type;
631
      typedef atomic_long               __base_type;
632
 
633
      atomic() noexcept = default;
634
      ~atomic() noexcept = default;
635
      atomic(const atomic&) = delete;
636
      atomic& operator=(const atomic&) = delete;
637
      atomic& operator=(const atomic&) volatile = delete;
638
 
639
      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
640
 
641
      using __base_type::operator __integral_type;
642
      using __base_type::operator=;
643
    };
644
 
645
  /// Explicit specialization for unsigned long.
646
  template<>
647
    struct atomic : public atomic_ulong
648
    {
649
      typedef unsigned long             __integral_type;
650
      typedef atomic_ulong              __base_type;
651
 
652
      atomic() noexcept = default;
653
      ~atomic() noexcept = default;
654
      atomic(const atomic&) = delete;
655
      atomic& operator=(const atomic&) = delete;
656
      atomic& operator=(const atomic&) volatile = delete;
657
 
658
      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
659
 
660
      using __base_type::operator __integral_type;
661
      using __base_type::operator=;
662
    };
663
 
664
  /// Explicit specialization for long long.
665
  template<>
666
    struct atomic : public atomic_llong
667
    {
668
      typedef long long                 __integral_type;
669
      typedef atomic_llong              __base_type;
670
 
671
      atomic() noexcept = default;
672
      ~atomic() noexcept = default;
673
      atomic(const atomic&) = delete;
674
      atomic& operator=(const atomic&) = delete;
675
      atomic& operator=(const atomic&) volatile = delete;
676
 
677
      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
678
 
679
      using __base_type::operator __integral_type;
680
      using __base_type::operator=;
681
    };
682
 
683
  /// Explicit specialization for unsigned long long.
684
  template<>
685
    struct atomic : public atomic_ullong
686
    {
687
      typedef unsigned long long        __integral_type;
688
      typedef atomic_ullong             __base_type;
689
 
690
      atomic() noexcept = default;
691
      ~atomic() noexcept = default;
692
      atomic(const atomic&) = delete;
693
      atomic& operator=(const atomic&) = delete;
694
      atomic& operator=(const atomic&) volatile = delete;
695
 
696
      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
697
 
698
      using __base_type::operator __integral_type;
699
      using __base_type::operator=;
700
    };
701
 
702
  /// Explicit specialization for wchar_t.
703
  template<>
704
    struct atomic : public atomic_wchar_t
705
    {
706
      typedef wchar_t                   __integral_type;
707
      typedef atomic_wchar_t            __base_type;
708
 
709
      atomic() noexcept = default;
710
      ~atomic() noexcept = default;
711
      atomic(const atomic&) = delete;
712
      atomic& operator=(const atomic&) = delete;
713
      atomic& operator=(const atomic&) volatile = delete;
714
 
715
      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
716
 
717
      using __base_type::operator __integral_type;
718
      using __base_type::operator=;
719
    };
720
 
721
  /// Explicit specialization for char16_t.
722
  template<>
723
    struct atomic : public atomic_char16_t
724
    {
725
      typedef char16_t                  __integral_type;
726
      typedef atomic_char16_t           __base_type;
727
 
728
      atomic() noexcept = default;
729
      ~atomic() noexcept = default;
730
      atomic(const atomic&) = delete;
731
      atomic& operator=(const atomic&) = delete;
732
      atomic& operator=(const atomic&) volatile = delete;
733
 
734
      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
735
 
736
      using __base_type::operator __integral_type;
737
      using __base_type::operator=;
738
    };
739
 
740
  /// Explicit specialization for char32_t.
741
  template<>
742
    struct atomic : public atomic_char32_t
743
    {
744
      typedef char32_t                  __integral_type;
745
      typedef atomic_char32_t           __base_type;
746
 
747
      atomic() noexcept = default;
748
      ~atomic() noexcept = default;
749
      atomic(const atomic&) = delete;
750
      atomic& operator=(const atomic&) = delete;
751
      atomic& operator=(const atomic&) volatile = delete;
752
 
753
      constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
754
 
755
      using __base_type::operator __integral_type;
756
      using __base_type::operator=;
757
    };
758
 
759
 
760
  // Function definitions, atomic_flag operations.
761
  inline bool
762
  atomic_flag_test_and_set_explicit(atomic_flag* __a,
763
                                    memory_order __m) noexcept
764
  { return __a->test_and_set(__m); }
765
 
766
  inline bool
767
  atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
768
                                    memory_order __m) noexcept
769
  { return __a->test_and_set(__m); }
770
 
771
  inline void
772
  atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
773
  { __a->clear(__m); }
774
 
775
  inline void
776
  atomic_flag_clear_explicit(volatile atomic_flag* __a,
777
                             memory_order __m) noexcept
778
  { __a->clear(__m); }
779
 
780
  inline bool
781
  atomic_flag_test_and_set(atomic_flag* __a) noexcept
782
  { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
783
 
784
  inline bool
785
  atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
786
  { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
787
 
788
  inline void
789
  atomic_flag_clear(atomic_flag* __a) noexcept
790
  { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
791
 
792
  inline void
793
  atomic_flag_clear(volatile atomic_flag* __a) noexcept
794
  { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
795
 
796
 
797
  // Function templates generally applicable to atomic types.
798
  template
799
    inline bool
800
    atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
801
    { return __a->is_lock_free(); }
802
 
803
  template
804
    inline bool
805
    atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
806
    { return __a->is_lock_free(); }
807
 
808
  template
809
    inline void
810
    atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept;
811
 
812
  template
813
    inline void
814
    atomic_init(volatile atomic<_ITp>* __a, _ITp __i) noexcept;
815
 
816
  template
817
    inline void
818
    atomic_store_explicit(atomic<_ITp>* __a, _ITp __i,
819
                          memory_order __m) noexcept
820
    { __a->store(__i, __m); }
821
 
822
  template
823
    inline void
824
    atomic_store_explicit(volatile atomic<_ITp>* __a, _ITp __i,
825
                          memory_order __m) noexcept
826
    { __a->store(__i, __m); }
827
 
828
  template
829
    inline _ITp
830
    atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
831
    { return __a->load(__m); }
832
 
833
  template
834
    inline _ITp
835
    atomic_load_explicit(const volatile atomic<_ITp>* __a,
836
                         memory_order __m) noexcept
837
    { return __a->load(__m); }
838
 
839
  template
840
    inline _ITp
841
    atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i,
842
                             memory_order __m) noexcept
843
    { return __a->exchange(__i, __m); }
844
 
845
  template
846
    inline _ITp
847
    atomic_exchange_explicit(volatile atomic<_ITp>* __a, _ITp __i,
848
                             memory_order __m) noexcept
849
    { return __a->exchange(__i, __m); }
850
 
851
  template
852
    inline bool
853
    atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
854
                                          _ITp* __i1, _ITp __i2,
855
                                          memory_order __m1,
856
                                          memory_order __m2) noexcept
857
    { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
858
 
859
  template
860
    inline bool
861
    atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
862
                                          _ITp* __i1, _ITp __i2,
863
                                          memory_order __m1,
864
                                          memory_order __m2) noexcept
865
    { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
866
 
867
  template
868
    inline bool
869
    atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
870
                                            _ITp* __i1, _ITp __i2,
871
                                            memory_order __m1,
872
                                            memory_order __m2) noexcept
873
    { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
874
 
875
  template
876
    inline bool
877
    atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
878
                                            _ITp* __i1, _ITp __i2,
879
                                            memory_order __m1,
880
                                            memory_order __m2) noexcept
881
    { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
882
 
883
 
884
  template
885
    inline void
886
    atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept
887
    { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
888
 
889
  template
890
    inline void
891
    atomic_store(volatile atomic<_ITp>* __a, _ITp __i) noexcept
892
    { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
893
 
894
  template
895
    inline _ITp
896
    atomic_load(const atomic<_ITp>* __a) noexcept
897
    { return atomic_load_explicit(__a, memory_order_seq_cst); }
898
 
899
  template
900
    inline _ITp
901
    atomic_load(const volatile atomic<_ITp>* __a) noexcept
902
    { return atomic_load_explicit(__a, memory_order_seq_cst); }
903
 
904
  template
905
    inline _ITp
906
    atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept
907
    { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
908
 
909
  template
910
    inline _ITp
911
    atomic_exchange(volatile atomic<_ITp>* __a, _ITp __i) noexcept
912
    { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
913
 
914
  template
915
    inline bool
916
    atomic_compare_exchange_weak(atomic<_ITp>* __a,
917
                                 _ITp* __i1, _ITp __i2) noexcept
918
    {
919
      return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
920
                                                   memory_order_seq_cst,
921
                                                   memory_order_seq_cst);
922
    }
923
 
924
  template
925
    inline bool
926
    atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
927
                                 _ITp* __i1, _ITp __i2) noexcept
928
    {
929
      return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
930
                                                   memory_order_seq_cst,
931
                                                   memory_order_seq_cst);
932
    }
933
 
934
  template
935
    inline bool
936
    atomic_compare_exchange_strong(atomic<_ITp>* __a,
937
                                   _ITp* __i1, _ITp __i2) noexcept
938
    {
939
      return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
940
                                                     memory_order_seq_cst,
941
                                                     memory_order_seq_cst);
942
    }
943
 
944
  template
945
    inline bool
946
    atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
947
                                   _ITp* __i1, _ITp __i2) noexcept
948
    {
949
      return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
950
                                                     memory_order_seq_cst,
951
                                                     memory_order_seq_cst);
952
    }
953
 
954
  // Function templates for atomic_integral operations only, using
955
  // __atomic_base. Template argument should be constricted to
956
  // intergral types as specified in the standard, excluding address
957
  // types.
958
  template
959
    inline _ITp
960
    atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i,
961
                              memory_order __m) noexcept
962
    { return __a->fetch_add(__i, __m); }
963
 
964
  template
965
    inline _ITp
966
    atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
967
                              memory_order __m) noexcept
968
    { return __a->fetch_add(__i, __m); }
969
 
970
  template
971
    inline _ITp
972
    atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i,
973
                              memory_order __m) noexcept
974
    { return __a->fetch_sub(__i, __m); }
975
 
976
  template
977
    inline _ITp
978
    atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
979
                              memory_order __m) noexcept
980
    { return __a->fetch_sub(__i, __m); }
981
 
982
  template
983
    inline _ITp
984
    atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i,
985
                              memory_order __m) noexcept
986
    { return __a->fetch_and(__i, __m); }
987
 
988
  template
989
    inline _ITp
990
    atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
991
                              memory_order __m) noexcept
992
    { return __a->fetch_and(__i, __m); }
993
 
994
  template
995
    inline _ITp
996
    atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i,
997
                             memory_order __m) noexcept
998
    { return __a->fetch_or(__i, __m); }
999
 
1000
  template
1001
    inline _ITp
1002
    atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1003
                             memory_order __m) noexcept
1004
    { return __a->fetch_or(__i, __m); }
1005
 
1006
  template
1007
    inline _ITp
1008
    atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1009
                              memory_order __m) noexcept
1010
    { return __a->fetch_xor(__i, __m); }
1011
 
1012
  template
1013
    inline _ITp
1014
    atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1015
                              memory_order __m) noexcept
1016
    { return __a->fetch_xor(__i, __m); }
1017
 
1018
  template
1019
    inline _ITp
1020
    atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1021
    { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1022
 
1023
  template
1024
    inline _ITp
1025
    atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1026
    { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1027
 
1028
  template
1029
    inline _ITp
1030
    atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1031
    { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1032
 
1033
  template
1034
    inline _ITp
1035
    atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1036
    { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1037
 
1038
  template
1039
    inline _ITp
1040
    atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1041
    { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1042
 
1043
  template
1044
    inline _ITp
1045
    atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1046
    { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1047
 
1048
  template
1049
    inline _ITp
1050
    atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1051
    { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1052
 
1053
  template
1054
    inline _ITp
1055
    atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1056
    { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1057
 
1058
  template
1059
    inline _ITp
1060
    atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1061
    { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1062
 
1063
  template
1064
    inline _ITp
1065
    atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1066
    { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1067
 
1068
 
1069
  // Partial specializations for pointers.
1070
  template
1071
    inline _ITp*
1072
    atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1073
                              memory_order __m) noexcept
1074
    { return __a->fetch_add(__d, __m); }
1075
 
1076
  template
1077
    inline _ITp*
1078
    atomic_fetch_add_explicit(volatile atomic<_ITp*>* __a, ptrdiff_t __d,
1079
                              memory_order __m) noexcept
1080
    { return __a->fetch_add(__d, __m); }
1081
 
1082
  template
1083
    inline _ITp*
1084
    atomic_fetch_add(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1085
    { return __a->fetch_add(__d); }
1086
 
1087
  template
1088
    inline _ITp*
1089
    atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1090
    { return __a->fetch_add(__d); }
1091
 
1092
  template
1093
    inline _ITp*
1094
    atomic_fetch_sub_explicit(volatile atomic<_ITp*>* __a,
1095
                              ptrdiff_t __d, memory_order __m) noexcept
1096
    { return __a->fetch_sub(__d, __m); }
1097
 
1098
  template
1099
    inline _ITp*
1100
    atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1101
                              memory_order __m) noexcept
1102
    { return __a->fetch_sub(__d, __m); }
1103
 
1104
  template
1105
    inline _ITp*
1106
    atomic_fetch_sub(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1107
    { return __a->fetch_sub(__d); }
1108
 
1109
  template
1110
    inline _ITp*
1111
    atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1112
    { return __a->fetch_sub(__d); }
1113
  // @} group atomics
1114
 
1115
_GLIBCXX_END_NAMESPACE_VERSION
1116
} // namespace
1117
 
1118
#endif

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.