Home | History | Annotate | Line # | Download | only in std
atomic revision 1.1.1.5.4.2
      1 // -*- C++ -*- header.
      2 
      3 // Copyright (C) 2008-2018 Free Software Foundation, Inc.
      4 //
      5 // This file is part of the GNU ISO C++ Library.  This library is free
      6 // software; you can redistribute it and/or modify it under the
      7 // terms of the GNU General Public License as published by the
      8 // Free Software Foundation; either version 3, or (at your option)
      9 // any later version.
     10 
     11 // This library is distributed in the hope that it will be useful,
     12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
     13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
     14 // GNU General Public License for more details.
     15 
     16 // Under Section 7 of GPL version 3, you are granted additional
     17 // permissions described in the GCC Runtime Library Exception, version
     18 // 3.1, as published by the Free Software Foundation.
     19 
     20 // You should have received a copy of the GNU General Public License and
     21 // a copy of the GCC Runtime Library Exception along with this program;
     22 // see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
     23 // <http://www.gnu.org/licenses/>.
     24 
     25 /** @file include/atomic
     26  *  This is a Standard C++ Library header.
     27  */
     28 
     29 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
     30 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
     31 
     32 #ifndef _GLIBCXX_ATOMIC
     33 #define _GLIBCXX_ATOMIC 1
     34 
     35 #pragma GCC system_header
     36 
     37 #if __cplusplus < 201103L
     38 # include <bits/c++0x_warning.h>
     39 #else
     40 
     41 #include <bits/atomic_base.h>
     42 #include <bits/move.h>
     43 
     44 namespace std _GLIBCXX_VISIBILITY(default)
     45 {
     46 _GLIBCXX_BEGIN_NAMESPACE_VERSION
     47 
     48   /**
     49    * @addtogroup atomics
     50    * @{
     51    */
     52 
     53 #if __cplusplus > 201402L
     54 # define __cpp_lib_atomic_is_always_lock_free 201603
     55 #endif
     56 
     57   template<typename _Tp>
     58     struct atomic;
     59 
     60   /// atomic<bool>
     61   // NB: No operators or fetch-operations for this type.
     62   template<>
     63   struct atomic<bool>
     64   {
     65   private:
     66     __atomic_base<bool>	_M_base;
     67 
     68   public:
     69     atomic() noexcept = default;
     70     ~atomic() noexcept = default;
     71     atomic(const atomic&) = delete;
     72     atomic& operator=(const atomic&) = delete;
     73     atomic& operator=(const atomic&) volatile = delete;
     74 
     75     constexpr atomic(bool __i) noexcept : _M_base(__i) { }
     76 
     77     bool
     78     operator=(bool __i) noexcept
     79     { return _M_base.operator=(__i); }
     80 
     81     bool
     82     operator=(bool __i) volatile noexcept
     83     { return _M_base.operator=(__i); }
     84 
     85     operator bool() const noexcept
     86     { return _M_base.load(); }
     87 
     88     operator bool() const volatile noexcept
     89     { return _M_base.load(); }
     90 
     91     bool
     92     is_lock_free() const noexcept { return _M_base.is_lock_free(); }
     93 
     94     bool
     95     is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
     96 
     97 #if __cplusplus > 201402L
     98     static constexpr bool is_always_lock_free = ATOMIC_BOOL_LOCK_FREE == 2;
     99 #endif
    100 
    101     void
    102     store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
    103     { _M_base.store(__i, __m); }
    104 
    105     void
    106     store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
    107     { _M_base.store(__i, __m); }
    108 
    109     bool
    110     load(memory_order __m = memory_order_seq_cst) const noexcept
    111     { return _M_base.load(__m); }
    112 
    113     bool
    114     load(memory_order __m = memory_order_seq_cst) const volatile noexcept
    115     { return _M_base.load(__m); }
    116 
    117     bool
    118     exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
    119     { return _M_base.exchange(__i, __m); }
    120 
    121     bool
    122     exchange(bool __i,
    123 	     memory_order __m = memory_order_seq_cst) volatile noexcept
    124     { return _M_base.exchange(__i, __m); }
    125 
    126     bool
    127     compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
    128 			  memory_order __m2) noexcept
    129     { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
    130 
    131     bool
    132     compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
    133 			  memory_order __m2) volatile noexcept
    134     { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
    135 
    136     bool
    137     compare_exchange_weak(bool& __i1, bool __i2,
    138 			  memory_order __m = memory_order_seq_cst) noexcept
    139     { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
    140 
    141     bool
    142     compare_exchange_weak(bool& __i1, bool __i2,
    143 		     memory_order __m = memory_order_seq_cst) volatile noexcept
    144     { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
    145 
    146     bool
    147     compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
    148 			    memory_order __m2) noexcept
    149     { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
    150 
    151     bool
    152     compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
    153 			    memory_order __m2) volatile noexcept
    154     { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
    155 
    156     bool
    157     compare_exchange_strong(bool& __i1, bool __i2,
    158 			    memory_order __m = memory_order_seq_cst) noexcept
    159     { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
    160 
    161     bool
    162     compare_exchange_strong(bool& __i1, bool __i2,
    163 		    memory_order __m = memory_order_seq_cst) volatile noexcept
    164     { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
    165   };
    166 
    167 
    168   /**
    169    *  @brief Generic atomic type, primary class template.
    170    *
    171    *  @tparam _Tp  Type to be made atomic, must be trivally copyable.
    172    */
    173   template<typename _Tp>
    174     struct atomic
    175     {
    176     private:
    177       // Align 1/2/4/8/16-byte types to at least their size.
    178       static constexpr int _S_min_alignment
    179 	= (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
    180 	? 0 : sizeof(_Tp);
    181 
    182       static constexpr int _S_alignment
    183         = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
    184 
    185       alignas(_S_alignment) _Tp _M_i;
    186 
    187       static_assert(__is_trivially_copyable(_Tp),
    188 		    "std::atomic requires a trivially copyable type");
    189 
    190       static_assert(sizeof(_Tp) > 0,
    191 		    "Incomplete or zero-sized types are not supported");
    192 
    193     public:
    194       atomic() noexcept = default;
    195       ~atomic() noexcept = default;
    196       atomic(const atomic&) = delete;
    197       atomic& operator=(const atomic&) = delete;
    198       atomic& operator=(const atomic&) volatile = delete;
    199 
    200       constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
    201 
    202       operator _Tp() const noexcept
    203       { return load(); }
    204 
    205       operator _Tp() const volatile noexcept
    206       { return load(); }
    207 
    208       _Tp
    209       operator=(_Tp __i) noexcept
    210       { store(__i); return __i; }
    211 
    212       _Tp
    213       operator=(_Tp __i) volatile noexcept
    214       { store(__i); return __i; }
    215 
    216       bool
    217       is_lock_free() const noexcept
    218       {
    219 	// Produce a fake, minimally aligned pointer.
    220 	return __atomic_is_lock_free(sizeof(_M_i),
    221 	    reinterpret_cast<void *>(-__alignof(_M_i)));
    222       }
    223 
    224       bool
    225       is_lock_free() const volatile noexcept
    226       {
    227 	// Produce a fake, minimally aligned pointer.
    228 	return __atomic_is_lock_free(sizeof(_M_i),
    229 	    reinterpret_cast<void *>(-__alignof(_M_i)));
    230       }
    231 
    232 #if __cplusplus > 201402L
    233       static constexpr bool is_always_lock_free
    234 	= __atomic_always_lock_free(sizeof(_M_i), 0);
    235 #endif
    236 
    237       void
    238       store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
    239       { __atomic_store(std::__addressof(_M_i), std::__addressof(__i), __m); }
    240 
    241       void
    242       store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
    243       { __atomic_store(std::__addressof(_M_i), std::__addressof(__i), __m); }
    244 
    245       _Tp
    246       load(memory_order __m = memory_order_seq_cst) const noexcept
    247       {
    248 	alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
    249 	_Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
    250 	__atomic_load(std::__addressof(_M_i), __ptr, __m);
    251 	return *__ptr;
    252       }
    253 
    254       _Tp
    255       load(memory_order __m = memory_order_seq_cst) const volatile noexcept
    256       {
    257         alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
    258 	_Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
    259 	__atomic_load(std::__addressof(_M_i), __ptr, __m);
    260 	return *__ptr;
    261       }
    262 
    263       _Tp
    264       exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
    265       {
    266         alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
    267 	_Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
    268 	__atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
    269 			  __ptr, __m);
    270 	return *__ptr;
    271       }
    272 
    273       _Tp
    274       exchange(_Tp __i,
    275 	       memory_order __m = memory_order_seq_cst) volatile noexcept
    276       {
    277         alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
    278 	_Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
    279 	__atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
    280 			  __ptr, __m);
    281 	return *__ptr;
    282       }
    283 
    284       bool
    285       compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
    286 			    memory_order __f) noexcept
    287       {
    288 	return __atomic_compare_exchange(std::__addressof(_M_i),
    289 					 std::__addressof(__e),
    290 					 std::__addressof(__i),
    291 					 true, __s, __f);
    292       }
    293 
    294       bool
    295       compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
    296 			    memory_order __f) volatile noexcept
    297       {
    298 	return __atomic_compare_exchange(std::__addressof(_M_i),
    299 					 std::__addressof(__e),
    300 					 std::__addressof(__i),
    301 					 true, __s, __f);
    302       }
    303 
    304       bool
    305       compare_exchange_weak(_Tp& __e, _Tp __i,
    306 			    memory_order __m = memory_order_seq_cst) noexcept
    307       { return compare_exchange_weak(__e, __i, __m,
    308                                      __cmpexch_failure_order(__m)); }
    309 
    310       bool
    311       compare_exchange_weak(_Tp& __e, _Tp __i,
    312 		     memory_order __m = memory_order_seq_cst) volatile noexcept
    313       { return compare_exchange_weak(__e, __i, __m,
    314                                      __cmpexch_failure_order(__m)); }
    315 
    316       bool
    317       compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
    318 			      memory_order __f) noexcept
    319       {
    320 	return __atomic_compare_exchange(std::__addressof(_M_i),
    321 					 std::__addressof(__e),
    322 					 std::__addressof(__i),
    323 					 false, __s, __f);
    324       }
    325 
    326       bool
    327       compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
    328 			      memory_order __f) volatile noexcept
    329       {
    330 	return __atomic_compare_exchange(std::__addressof(_M_i),
    331 					 std::__addressof(__e),
    332 					 std::__addressof(__i),
    333 					 false, __s, __f);
    334       }
    335 
    336       bool
    337       compare_exchange_strong(_Tp& __e, _Tp __i,
    338 			       memory_order __m = memory_order_seq_cst) noexcept
    339       { return compare_exchange_strong(__e, __i, __m,
    340                                        __cmpexch_failure_order(__m)); }
    341 
    342       bool
    343       compare_exchange_strong(_Tp& __e, _Tp __i,
    344 		     memory_order __m = memory_order_seq_cst) volatile noexcept
    345       { return compare_exchange_strong(__e, __i, __m,
    346                                        __cmpexch_failure_order(__m)); }
    347     };
    348 
    349 
    350   /// Partial specialization for pointer types.
    351   template<typename _Tp>
    352     struct atomic<_Tp*>
    353     {
    354       typedef _Tp* 			__pointer_type;
    355       typedef __atomic_base<_Tp*>	__base_type;
    356       __base_type			_M_b;
    357 
    358       atomic() noexcept = default;
    359       ~atomic() noexcept = default;
    360       atomic(const atomic&) = delete;
    361       atomic& operator=(const atomic&) = delete;
    362       atomic& operator=(const atomic&) volatile = delete;
    363 
    364       constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
    365 
    366       operator __pointer_type() const noexcept
    367       { return __pointer_type(_M_b); }
    368 
    369       operator __pointer_type() const volatile noexcept
    370       { return __pointer_type(_M_b); }
    371 
    372       __pointer_type
    373       operator=(__pointer_type __p) noexcept
    374       { return _M_b.operator=(__p); }
    375 
    376       __pointer_type
    377       operator=(__pointer_type __p) volatile noexcept
    378       { return _M_b.operator=(__p); }
    379 
    380       __pointer_type
    381       operator++(int) noexcept
    382       { return _M_b++; }
    383 
    384       __pointer_type
    385       operator++(int) volatile noexcept
    386       { return _M_b++; }
    387 
    388       __pointer_type
    389       operator--(int) noexcept
    390       { return _M_b--; }
    391 
    392       __pointer_type
    393       operator--(int) volatile noexcept
    394       { return _M_b--; }
    395 
    396       __pointer_type
    397       operator++() noexcept
    398       { return ++_M_b; }
    399 
    400       __pointer_type
    401       operator++() volatile noexcept
    402       { return ++_M_b; }
    403 
    404       __pointer_type
    405       operator--() noexcept
    406       { return --_M_b; }
    407 
    408       __pointer_type
    409       operator--() volatile noexcept
    410       { return --_M_b; }
    411 
    412       __pointer_type
    413       operator+=(ptrdiff_t __d) noexcept
    414       { return _M_b.operator+=(__d); }
    415 
    416       __pointer_type
    417       operator+=(ptrdiff_t __d) volatile noexcept
    418       { return _M_b.operator+=(__d); }
    419 
    420       __pointer_type
    421       operator-=(ptrdiff_t __d) noexcept
    422       { return _M_b.operator-=(__d); }
    423 
    424       __pointer_type
    425       operator-=(ptrdiff_t __d) volatile noexcept
    426       { return _M_b.operator-=(__d); }
    427 
    428       bool
    429       is_lock_free() const noexcept
    430       { return _M_b.is_lock_free(); }
    431 
    432       bool
    433       is_lock_free() const volatile noexcept
    434       { return _M_b.is_lock_free(); }
    435 
    436 #if __cplusplus > 201402L
    437     static constexpr bool is_always_lock_free = ATOMIC_POINTER_LOCK_FREE == 2;
    438 #endif
    439 
    440       void
    441       store(__pointer_type __p,
    442 	    memory_order __m = memory_order_seq_cst) noexcept
    443       { return _M_b.store(__p, __m); }
    444 
    445       void
    446       store(__pointer_type __p,
    447 	    memory_order __m = memory_order_seq_cst) volatile noexcept
    448       { return _M_b.store(__p, __m); }
    449 
    450       __pointer_type
    451       load(memory_order __m = memory_order_seq_cst) const noexcept
    452       { return _M_b.load(__m); }
    453 
    454       __pointer_type
    455       load(memory_order __m = memory_order_seq_cst) const volatile noexcept
    456       { return _M_b.load(__m); }
    457 
    458       __pointer_type
    459       exchange(__pointer_type __p,
    460 	       memory_order __m = memory_order_seq_cst) noexcept
    461       { return _M_b.exchange(__p, __m); }
    462 
    463       __pointer_type
    464       exchange(__pointer_type __p,
    465 	       memory_order __m = memory_order_seq_cst) volatile noexcept
    466       { return _M_b.exchange(__p, __m); }
    467 
    468       bool
    469       compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
    470 			    memory_order __m1, memory_order __m2) noexcept
    471       { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
    472 
    473       bool
    474       compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
    475 			    memory_order __m1,
    476 			    memory_order __m2) volatile noexcept
    477       { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
    478 
    479       bool
    480       compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
    481 			    memory_order __m = memory_order_seq_cst) noexcept
    482       {
    483 	return compare_exchange_weak(__p1, __p2, __m,
    484 				     __cmpexch_failure_order(__m));
    485       }
    486 
    487       bool
    488       compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
    489 		    memory_order __m = memory_order_seq_cst) volatile noexcept
    490       {
    491 	return compare_exchange_weak(__p1, __p2, __m,
    492 				     __cmpexch_failure_order(__m));
    493       }
    494 
    495       bool
    496       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
    497 			      memory_order __m1, memory_order __m2) noexcept
    498       { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
    499 
    500       bool
    501       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
    502 			      memory_order __m1,
    503 			      memory_order __m2) volatile noexcept
    504       { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
    505 
    506       bool
    507       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
    508 			      memory_order __m = memory_order_seq_cst) noexcept
    509       {
    510 	return _M_b.compare_exchange_strong(__p1, __p2, __m,
    511 					    __cmpexch_failure_order(__m));
    512       }
    513 
    514       bool
    515       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
    516 		    memory_order __m = memory_order_seq_cst) volatile noexcept
    517       {
    518 	return _M_b.compare_exchange_strong(__p1, __p2, __m,
    519 					    __cmpexch_failure_order(__m));
    520       }
    521 
    522       __pointer_type
    523       fetch_add(ptrdiff_t __d,
    524 		memory_order __m = memory_order_seq_cst) noexcept
    525       { return _M_b.fetch_add(__d, __m); }
    526 
    527       __pointer_type
    528       fetch_add(ptrdiff_t __d,
    529 		memory_order __m = memory_order_seq_cst) volatile noexcept
    530       { return _M_b.fetch_add(__d, __m); }
    531 
    532       __pointer_type
    533       fetch_sub(ptrdiff_t __d,
    534 		memory_order __m = memory_order_seq_cst) noexcept
    535       { return _M_b.fetch_sub(__d, __m); }
    536 
    537       __pointer_type
    538       fetch_sub(ptrdiff_t __d,
    539 		memory_order __m = memory_order_seq_cst) volatile noexcept
    540       { return _M_b.fetch_sub(__d, __m); }
    541     };
    542 
    543 
    544   /// Explicit specialization for char.
    545   template<>
    546     struct atomic<char> : __atomic_base<char>
    547     {
    548       typedef char 			__integral_type;
    549       typedef __atomic_base<char> 	__base_type;
    550 
    551       atomic() noexcept = default;
    552       ~atomic() noexcept = default;
    553       atomic(const atomic&) = delete;
    554       atomic& operator=(const atomic&) = delete;
    555       atomic& operator=(const atomic&) volatile = delete;
    556 
    557       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    558 
    559       using __base_type::operator __integral_type;
    560       using __base_type::operator=;
    561 
    562 #if __cplusplus > 201402L
    563     static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
    564 #endif
    565     };
    566 
    567   /// Explicit specialization for signed char.
    568   template<>
    569     struct atomic<signed char> : __atomic_base<signed char>
    570     {
    571       typedef signed char 		__integral_type;
    572       typedef __atomic_base<signed char> 	__base_type;
    573 
    574       atomic() noexcept= default;
    575       ~atomic() noexcept = default;
    576       atomic(const atomic&) = delete;
    577       atomic& operator=(const atomic&) = delete;
    578       atomic& operator=(const atomic&) volatile = delete;
    579 
    580       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    581 
    582       using __base_type::operator __integral_type;
    583       using __base_type::operator=;
    584 
    585 #if __cplusplus > 201402L
    586     static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
    587 #endif
    588     };
    589 
    590   /// Explicit specialization for unsigned char.
    591   template<>
    592     struct atomic<unsigned char> : __atomic_base<unsigned char>
    593     {
    594       typedef unsigned char 		__integral_type;
    595       typedef __atomic_base<unsigned char> 	__base_type;
    596 
    597       atomic() noexcept= default;
    598       ~atomic() noexcept = default;
    599       atomic(const atomic&) = delete;
    600       atomic& operator=(const atomic&) = delete;
    601       atomic& operator=(const atomic&) volatile = delete;
    602 
    603       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    604 
    605       using __base_type::operator __integral_type;
    606       using __base_type::operator=;
    607 
    608 #if __cplusplus > 201402L
    609     static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
    610 #endif
    611     };
    612 
    613   /// Explicit specialization for short.
    614   template<>
    615     struct atomic<short> : __atomic_base<short>
    616     {
    617       typedef short 			__integral_type;
    618       typedef __atomic_base<short> 		__base_type;
    619 
    620       atomic() noexcept = default;
    621       ~atomic() noexcept = default;
    622       atomic(const atomic&) = delete;
    623       atomic& operator=(const atomic&) = delete;
    624       atomic& operator=(const atomic&) volatile = delete;
    625 
    626       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    627 
    628       using __base_type::operator __integral_type;
    629       using __base_type::operator=;
    630 
    631 #if __cplusplus > 201402L
    632     static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
    633 #endif
    634     };
    635 
    636   /// Explicit specialization for unsigned short.
    637   template<>
    638     struct atomic<unsigned short> : __atomic_base<unsigned short>
    639     {
    640       typedef unsigned short 	      	__integral_type;
    641       typedef __atomic_base<unsigned short> 		__base_type;
    642 
    643       atomic() noexcept = default;
    644       ~atomic() noexcept = default;
    645       atomic(const atomic&) = delete;
    646       atomic& operator=(const atomic&) = delete;
    647       atomic& operator=(const atomic&) volatile = delete;
    648 
    649       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    650 
    651       using __base_type::operator __integral_type;
    652       using __base_type::operator=;
    653 
    654 #if __cplusplus > 201402L
    655     static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
    656 #endif
    657     };
    658 
    659   /// Explicit specialization for int.
    660   template<>
    661     struct atomic<int> : __atomic_base<int>
    662     {
    663       typedef int 			__integral_type;
    664       typedef __atomic_base<int> 		__base_type;
    665 
    666       atomic() noexcept = default;
    667       ~atomic() noexcept = default;
    668       atomic(const atomic&) = delete;
    669       atomic& operator=(const atomic&) = delete;
    670       atomic& operator=(const atomic&) volatile = delete;
    671 
    672       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    673 
    674       using __base_type::operator __integral_type;
    675       using __base_type::operator=;
    676 
    677 #if __cplusplus > 201402L
    678     static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
    679 #endif
    680     };
    681 
    682   /// Explicit specialization for unsigned int.
    683   template<>
    684     struct atomic<unsigned int> : __atomic_base<unsigned int>
    685     {
    686       typedef unsigned int		__integral_type;
    687       typedef __atomic_base<unsigned int> 	__base_type;
    688 
    689       atomic() noexcept = default;
    690       ~atomic() noexcept = default;
    691       atomic(const atomic&) = delete;
    692       atomic& operator=(const atomic&) = delete;
    693       atomic& operator=(const atomic&) volatile = delete;
    694 
    695       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    696 
    697       using __base_type::operator __integral_type;
    698       using __base_type::operator=;
    699 
    700 #if __cplusplus > 201402L
    701     static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
    702 #endif
    703     };
    704 
    705   /// Explicit specialization for long.
    706   template<>
    707     struct atomic<long> : __atomic_base<long>
    708     {
    709       typedef long 			__integral_type;
    710       typedef __atomic_base<long> 	__base_type;
    711 
    712       atomic() noexcept = default;
    713       ~atomic() noexcept = default;
    714       atomic(const atomic&) = delete;
    715       atomic& operator=(const atomic&) = delete;
    716       atomic& operator=(const atomic&) volatile = delete;
    717 
    718       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    719 
    720       using __base_type::operator __integral_type;
    721       using __base_type::operator=;
    722 
    723 #if __cplusplus > 201402L
    724     static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
    725 #endif
    726     };
    727 
    728   /// Explicit specialization for unsigned long.
    729   template<>
    730     struct atomic<unsigned long> : __atomic_base<unsigned long>
    731     {
    732       typedef unsigned long 		__integral_type;
    733       typedef __atomic_base<unsigned long> 	__base_type;
    734 
    735       atomic() noexcept = default;
    736       ~atomic() noexcept = default;
    737       atomic(const atomic&) = delete;
    738       atomic& operator=(const atomic&) = delete;
    739       atomic& operator=(const atomic&) volatile = delete;
    740 
    741       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    742 
    743       using __base_type::operator __integral_type;
    744       using __base_type::operator=;
    745 
    746 #if __cplusplus > 201402L
    747     static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
    748 #endif
    749     };
    750 
    751   /// Explicit specialization for long long.
    752   template<>
    753     struct atomic<long long> : __atomic_base<long long>
    754     {
    755       typedef long long 		__integral_type;
    756       typedef __atomic_base<long long> 		__base_type;
    757 
    758       atomic() noexcept = default;
    759       ~atomic() noexcept = default;
    760       atomic(const atomic&) = delete;
    761       atomic& operator=(const atomic&) = delete;
    762       atomic& operator=(const atomic&) volatile = delete;
    763 
    764       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    765 
    766       using __base_type::operator __integral_type;
    767       using __base_type::operator=;
    768 
    769 #if __cplusplus > 201402L
    770     static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
    771 #endif
    772     };
    773 
    774   /// Explicit specialization for unsigned long long.
    775   template<>
    776     struct atomic<unsigned long long> : __atomic_base<unsigned long long>
    777     {
    778       typedef unsigned long long       	__integral_type;
    779       typedef __atomic_base<unsigned long long> 	__base_type;
    780 
    781       atomic() noexcept = default;
    782       ~atomic() noexcept = default;
    783       atomic(const atomic&) = delete;
    784       atomic& operator=(const atomic&) = delete;
    785       atomic& operator=(const atomic&) volatile = delete;
    786 
    787       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    788 
    789       using __base_type::operator __integral_type;
    790       using __base_type::operator=;
    791 
    792 #if __cplusplus > 201402L
    793     static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
    794 #endif
    795     };
    796 
    797   /// Explicit specialization for wchar_t.
    798   template<>
    799     struct atomic<wchar_t> : __atomic_base<wchar_t>
    800     {
    801       typedef wchar_t 			__integral_type;
    802       typedef __atomic_base<wchar_t> 	__base_type;
    803 
    804       atomic() noexcept = default;
    805       ~atomic() noexcept = default;
    806       atomic(const atomic&) = delete;
    807       atomic& operator=(const atomic&) = delete;
    808       atomic& operator=(const atomic&) volatile = delete;
    809 
    810       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    811 
    812       using __base_type::operator __integral_type;
    813       using __base_type::operator=;
    814 
    815 #if __cplusplus > 201402L
    816     static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
    817 #endif
    818     };
    819 
    820   /// Explicit specialization for char16_t.
    821   template<>
    822     struct atomic<char16_t> : __atomic_base<char16_t>
    823     {
    824       typedef char16_t 			__integral_type;
    825       typedef __atomic_base<char16_t> 	__base_type;
    826 
    827       atomic() noexcept = default;
    828       ~atomic() noexcept = default;
    829       atomic(const atomic&) = delete;
    830       atomic& operator=(const atomic&) = delete;
    831       atomic& operator=(const atomic&) volatile = delete;
    832 
    833       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    834 
    835       using __base_type::operator __integral_type;
    836       using __base_type::operator=;
    837 
    838 #if __cplusplus > 201402L
    839     static constexpr bool is_always_lock_free = ATOMIC_CHAR16_T_LOCK_FREE == 2;
    840 #endif
    841     };
    842 
    843   /// Explicit specialization for char32_t.
    844   template<>
    845     struct atomic<char32_t> : __atomic_base<char32_t>
    846     {
    847       typedef char32_t 			__integral_type;
    848       typedef __atomic_base<char32_t> 	__base_type;
    849 
    850       atomic() noexcept = default;
    851       ~atomic() noexcept = default;
    852       atomic(const atomic&) = delete;
    853       atomic& operator=(const atomic&) = delete;
    854       atomic& operator=(const atomic&) volatile = delete;
    855 
    856       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    857 
    858       using __base_type::operator __integral_type;
    859       using __base_type::operator=;
    860 
    861 #if __cplusplus > 201402L
    862     static constexpr bool is_always_lock_free = ATOMIC_CHAR32_T_LOCK_FREE == 2;
    863 #endif
    864     };
    865 
    866 
    867   /// atomic_bool
    868   typedef atomic<bool>			atomic_bool;
    869 
    870   /// atomic_char
    871   typedef atomic<char>			atomic_char;
    872 
    873   /// atomic_schar
    874   typedef atomic<signed char>		atomic_schar;
    875 
    876   /// atomic_uchar
    877   typedef atomic<unsigned char>		atomic_uchar;
    878 
    879   /// atomic_short
    880   typedef atomic<short>			atomic_short;
    881 
    882   /// atomic_ushort
    883   typedef atomic<unsigned short>	atomic_ushort;
    884 
    885   /// atomic_int
    886   typedef atomic<int>			atomic_int;
    887 
    888   /// atomic_uint
    889   typedef atomic<unsigned int>		atomic_uint;
    890 
    891   /// atomic_long
    892   typedef atomic<long>			atomic_long;
    893 
    894   /// atomic_ulong
    895   typedef atomic<unsigned long>		atomic_ulong;
    896 
    897   /// atomic_llong
    898   typedef atomic<long long>		atomic_llong;
    899 
    900   /// atomic_ullong
    901   typedef atomic<unsigned long long>	atomic_ullong;
    902 
    903   /// atomic_wchar_t
    904   typedef atomic<wchar_t>		atomic_wchar_t;
    905 
    906   /// atomic_char16_t
    907   typedef atomic<char16_t>		atomic_char16_t;
    908 
    909   /// atomic_char32_t
    910   typedef atomic<char32_t>		atomic_char32_t;
    911 
    912 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
    913   // _GLIBCXX_RESOLVE_LIB_DEFECTS
    914   // 2441. Exact-width atomic typedefs should be provided
    915 
    916   /// atomic_int8_t
    917   typedef atomic<int8_t>		atomic_int8_t;
    918 
    919   /// atomic_uint8_t
    920   typedef atomic<uint8_t>		atomic_uint8_t;
    921 
    922   /// atomic_int16_t
    923   typedef atomic<int16_t>		atomic_int16_t;
    924 
    925   /// atomic_uint16_t
    926   typedef atomic<uint16_t>		atomic_uint16_t;
    927 
    928   /// atomic_int32_t
    929   typedef atomic<int32_t>		atomic_int32_t;
    930 
    931   /// atomic_uint32_t
    932   typedef atomic<uint32_t>		atomic_uint32_t;
    933 
    934   /// atomic_int64_t
    935   typedef atomic<int64_t>		atomic_int64_t;
    936 
    937   /// atomic_uint64_t
    938   typedef atomic<uint64_t>		atomic_uint64_t;
    939 
    940 
    941   /// atomic_int_least8_t
    942   typedef atomic<int_least8_t>		atomic_int_least8_t;
    943 
    944   /// atomic_uint_least8_t
    945   typedef atomic<uint_least8_t>		atomic_uint_least8_t;
    946 
    947   /// atomic_int_least16_t
    948   typedef atomic<int_least16_t>		atomic_int_least16_t;
    949 
    950   /// atomic_uint_least16_t
    951   typedef atomic<uint_least16_t>	atomic_uint_least16_t;
    952 
    953   /// atomic_int_least32_t
    954   typedef atomic<int_least32_t>		atomic_int_least32_t;
    955 
    956   /// atomic_uint_least32_t
    957   typedef atomic<uint_least32_t>	atomic_uint_least32_t;
    958 
    959   /// atomic_int_least64_t
    960   typedef atomic<int_least64_t>		atomic_int_least64_t;
    961 
    962   /// atomic_uint_least64_t
    963   typedef atomic<uint_least64_t>	atomic_uint_least64_t;
    964 
    965 
    966   /// atomic_int_fast8_t
    967   typedef atomic<int_fast8_t>		atomic_int_fast8_t;
    968 
    969   /// atomic_uint_fast8_t
    970   typedef atomic<uint_fast8_t>		atomic_uint_fast8_t;
    971 
    972   /// atomic_int_fast16_t
    973   typedef atomic<int_fast16_t>		atomic_int_fast16_t;
    974 
    975   /// atomic_uint_fast16_t
    976   typedef atomic<uint_fast16_t>		atomic_uint_fast16_t;
    977 
    978   /// atomic_int_fast32_t
    979   typedef atomic<int_fast32_t>		atomic_int_fast32_t;
    980 
    981   /// atomic_uint_fast32_t
    982   typedef atomic<uint_fast32_t>		atomic_uint_fast32_t;
    983 
    984   /// atomic_int_fast64_t
    985   typedef atomic<int_fast64_t>		atomic_int_fast64_t;
    986 
    987   /// atomic_uint_fast64_t
    988   typedef atomic<uint_fast64_t>		atomic_uint_fast64_t;
    989 #endif
    990 
    991 
    992   /// atomic_intptr_t
    993   typedef atomic<intptr_t>		atomic_intptr_t;
    994 
    995   /// atomic_uintptr_t
    996   typedef atomic<uintptr_t>		atomic_uintptr_t;
    997 
    998   /// atomic_size_t
    999   typedef atomic<size_t>		atomic_size_t;
   1000 
   1001   /// atomic_ptrdiff_t
   1002   typedef atomic<ptrdiff_t>		atomic_ptrdiff_t;
   1003 
   1004 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
   1005   /// atomic_intmax_t
   1006   typedef atomic<intmax_t>		atomic_intmax_t;
   1007 
   1008   /// atomic_uintmax_t
   1009   typedef atomic<uintmax_t>		atomic_uintmax_t;
   1010 #endif
   1011 
   1012   // Function definitions, atomic_flag operations.
   1013   inline bool
   1014   atomic_flag_test_and_set_explicit(atomic_flag* __a,
   1015 				    memory_order __m) noexcept
   1016   { return __a->test_and_set(__m); }
   1017 
   1018   inline bool
   1019   atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
   1020 				    memory_order __m) noexcept
   1021   { return __a->test_and_set(__m); }
   1022 
   1023   inline void
   1024   atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
   1025   { __a->clear(__m); }
   1026 
   1027   inline void
   1028   atomic_flag_clear_explicit(volatile atomic_flag* __a,
   1029 			     memory_order __m) noexcept
   1030   { __a->clear(__m); }
   1031 
   1032   inline bool
   1033   atomic_flag_test_and_set(atomic_flag* __a) noexcept
   1034   { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
   1035 
   1036   inline bool
   1037   atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
   1038   { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
   1039 
   1040   inline void
   1041   atomic_flag_clear(atomic_flag* __a) noexcept
   1042   { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
   1043 
   1044   inline void
   1045   atomic_flag_clear(volatile atomic_flag* __a) noexcept
   1046   { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
   1047 
   1048 
   1049   // Function templates generally applicable to atomic types.
   1050   template<typename _ITp>
   1051     inline bool
   1052     atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
   1053     { return __a->is_lock_free(); }
   1054 
   1055   template<typename _ITp>
   1056     inline bool
   1057     atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
   1058     { return __a->is_lock_free(); }
   1059 
   1060   template<typename _ITp>
   1061     inline void
   1062     atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept
   1063     { __a->store(__i, memory_order_relaxed); }
   1064 
   1065   template<typename _ITp>
   1066     inline void
   1067     atomic_init(volatile atomic<_ITp>* __a, _ITp __i) noexcept
   1068     { __a->store(__i, memory_order_relaxed); }
   1069 
   1070   template<typename _ITp>
   1071     inline void
   1072     atomic_store_explicit(atomic<_ITp>* __a, _ITp __i,
   1073 			  memory_order __m) noexcept
   1074     { __a->store(__i, __m); }
   1075 
   1076   template<typename _ITp>
   1077     inline void
   1078     atomic_store_explicit(volatile atomic<_ITp>* __a, _ITp __i,
   1079 			  memory_order __m) noexcept
   1080     { __a->store(__i, __m); }
   1081 
   1082   template<typename _ITp>
   1083     inline _ITp
   1084     atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
   1085     { return __a->load(__m); }
   1086 
   1087   template<typename _ITp>
   1088     inline _ITp
   1089     atomic_load_explicit(const volatile atomic<_ITp>* __a,
   1090 			 memory_order __m) noexcept
   1091     { return __a->load(__m); }
   1092 
   1093   template<typename _ITp>
   1094     inline _ITp
   1095     atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i,
   1096 			     memory_order __m) noexcept
   1097     { return __a->exchange(__i, __m); }
   1098 
   1099   template<typename _ITp>
   1100     inline _ITp
   1101     atomic_exchange_explicit(volatile atomic<_ITp>* __a, _ITp __i,
   1102 			     memory_order __m) noexcept
   1103     { return __a->exchange(__i, __m); }
   1104 
   1105   template<typename _ITp>
   1106     inline bool
   1107     atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
   1108 					  _ITp* __i1, _ITp __i2,
   1109 					  memory_order __m1,
   1110 					  memory_order __m2) noexcept
   1111     { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
   1112 
   1113   template<typename _ITp>
   1114     inline bool
   1115     atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
   1116 					  _ITp* __i1, _ITp __i2,
   1117 					  memory_order __m1,
   1118 					  memory_order __m2) noexcept
   1119     { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
   1120 
   1121   template<typename _ITp>
   1122     inline bool
   1123     atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
   1124 					    _ITp* __i1, _ITp __i2,
   1125 					    memory_order __m1,
   1126 					    memory_order __m2) noexcept
   1127     { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
   1128 
   1129   template<typename _ITp>
   1130     inline bool
   1131     atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
   1132 					    _ITp* __i1, _ITp __i2,
   1133 					    memory_order __m1,
   1134 					    memory_order __m2) noexcept
   1135     { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
   1136 
   1137 
   1138   template<typename _ITp>
   1139     inline void
   1140     atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept
   1141     { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
   1142 
   1143   template<typename _ITp>
   1144     inline void
   1145     atomic_store(volatile atomic<_ITp>* __a, _ITp __i) noexcept
   1146     { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
   1147 
   1148   template<typename _ITp>
   1149     inline _ITp
   1150     atomic_load(const atomic<_ITp>* __a) noexcept
   1151     { return atomic_load_explicit(__a, memory_order_seq_cst); }
   1152 
   1153   template<typename _ITp>
   1154     inline _ITp
   1155     atomic_load(const volatile atomic<_ITp>* __a) noexcept
   1156     { return atomic_load_explicit(__a, memory_order_seq_cst); }
   1157 
   1158   template<typename _ITp>
   1159     inline _ITp
   1160     atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept
   1161     { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
   1162 
   1163   template<typename _ITp>
   1164     inline _ITp
   1165     atomic_exchange(volatile atomic<_ITp>* __a, _ITp __i) noexcept
   1166     { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
   1167 
   1168   template<typename _ITp>
   1169     inline bool
   1170     atomic_compare_exchange_weak(atomic<_ITp>* __a,
   1171 				 _ITp* __i1, _ITp __i2) noexcept
   1172     {
   1173       return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
   1174 						   memory_order_seq_cst,
   1175 						   memory_order_seq_cst);
   1176     }
   1177 
   1178   template<typename _ITp>
   1179     inline bool
   1180     atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
   1181 				 _ITp* __i1, _ITp __i2) noexcept
   1182     {
   1183       return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
   1184 						   memory_order_seq_cst,
   1185 						   memory_order_seq_cst);
   1186     }
   1187 
   1188   template<typename _ITp>
   1189     inline bool
   1190     atomic_compare_exchange_strong(atomic<_ITp>* __a,
   1191 				   _ITp* __i1, _ITp __i2) noexcept
   1192     {
   1193       return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
   1194 						     memory_order_seq_cst,
   1195 						     memory_order_seq_cst);
   1196     }
   1197 
   1198   template<typename _ITp>
   1199     inline bool
   1200     atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
   1201 				   _ITp* __i1, _ITp __i2) noexcept
   1202     {
   1203       return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
   1204 						     memory_order_seq_cst,
   1205 						     memory_order_seq_cst);
   1206     }
   1207 
   1208   // Function templates for atomic_integral operations only, using
   1209   // __atomic_base. Template argument should be constricted to
   1210   // intergral types as specified in the standard, excluding address
   1211   // types.
   1212   template<typename _ITp>
   1213     inline _ITp
   1214     atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i,
   1215 			      memory_order __m) noexcept
   1216     { return __a->fetch_add(__i, __m); }
   1217 
   1218   template<typename _ITp>
   1219     inline _ITp
   1220     atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
   1221 			      memory_order __m) noexcept
   1222     { return __a->fetch_add(__i, __m); }
   1223 
   1224   template<typename _ITp>
   1225     inline _ITp
   1226     atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i,
   1227 			      memory_order __m) noexcept
   1228     { return __a->fetch_sub(__i, __m); }
   1229 
   1230   template<typename _ITp>
   1231     inline _ITp
   1232     atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
   1233 			      memory_order __m) noexcept
   1234     { return __a->fetch_sub(__i, __m); }
   1235 
   1236   template<typename _ITp>
   1237     inline _ITp
   1238     atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i,
   1239 			      memory_order __m) noexcept
   1240     { return __a->fetch_and(__i, __m); }
   1241 
   1242   template<typename _ITp>
   1243     inline _ITp
   1244     atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
   1245 			      memory_order __m) noexcept
   1246     { return __a->fetch_and(__i, __m); }
   1247 
   1248   template<typename _ITp>
   1249     inline _ITp
   1250     atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i,
   1251 			     memory_order __m) noexcept
   1252     { return __a->fetch_or(__i, __m); }
   1253 
   1254   template<typename _ITp>
   1255     inline _ITp
   1256     atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
   1257 			     memory_order __m) noexcept
   1258     { return __a->fetch_or(__i, __m); }
   1259 
   1260   template<typename _ITp>
   1261     inline _ITp
   1262     atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i,
   1263 			      memory_order __m) noexcept
   1264     { return __a->fetch_xor(__i, __m); }
   1265 
   1266   template<typename _ITp>
   1267     inline _ITp
   1268     atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
   1269 			      memory_order __m) noexcept
   1270     { return __a->fetch_xor(__i, __m); }
   1271 
   1272   template<typename _ITp>
   1273     inline _ITp
   1274     atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept
   1275     { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
   1276 
   1277   template<typename _ITp>
   1278     inline _ITp
   1279     atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
   1280     { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
   1281 
   1282   template<typename _ITp>
   1283     inline _ITp
   1284     atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept
   1285     { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
   1286 
   1287   template<typename _ITp>
   1288     inline _ITp
   1289     atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
   1290     { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
   1291 
   1292   template<typename _ITp>
   1293     inline _ITp
   1294     atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept
   1295     { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
   1296 
   1297   template<typename _ITp>
   1298     inline _ITp
   1299     atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
   1300     { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
   1301 
   1302   template<typename _ITp>
   1303     inline _ITp
   1304     atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept
   1305     { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
   1306 
   1307   template<typename _ITp>
   1308     inline _ITp
   1309     atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
   1310     { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
   1311 
   1312   template<typename _ITp>
   1313     inline _ITp
   1314     atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept
   1315     { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
   1316 
   1317   template<typename _ITp>
   1318     inline _ITp
   1319     atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
   1320     { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
   1321 
   1322 
   1323   // Partial specializations for pointers.
   1324   template<typename _ITp>
   1325     inline _ITp*
   1326     atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
   1327 			      memory_order __m) noexcept
   1328     { return __a->fetch_add(__d, __m); }
   1329 
   1330   template<typename _ITp>
   1331     inline _ITp*
   1332     atomic_fetch_add_explicit(volatile atomic<_ITp*>* __a, ptrdiff_t __d,
   1333 			      memory_order __m) noexcept
   1334     { return __a->fetch_add(__d, __m); }
   1335 
   1336   template<typename _ITp>
   1337     inline _ITp*
   1338     atomic_fetch_add(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
   1339     { return __a->fetch_add(__d); }
   1340 
   1341   template<typename _ITp>
   1342     inline _ITp*
   1343     atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
   1344     { return __a->fetch_add(__d); }
   1345 
   1346   template<typename _ITp>
   1347     inline _ITp*
   1348     atomic_fetch_sub_explicit(volatile atomic<_ITp*>* __a,
   1349 			      ptrdiff_t __d, memory_order __m) noexcept
   1350     { return __a->fetch_sub(__d, __m); }
   1351 
   1352   template<typename _ITp>
   1353     inline _ITp*
   1354     atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
   1355 			      memory_order __m) noexcept
   1356     { return __a->fetch_sub(__d, __m); }
   1357 
   1358   template<typename _ITp>
   1359     inline _ITp*
   1360     atomic_fetch_sub(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
   1361     { return __a->fetch_sub(__d); }
   1362 
   1363   template<typename _ITp>
   1364     inline _ITp*
   1365     atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
   1366     { return __a->fetch_sub(__d); }
   1367   // @} group atomics
   1368 
   1369 _GLIBCXX_END_NAMESPACE_VERSION
   1370 } // namespace
   1371 
   1372 #endif // C++11
   1373 
   1374 #endif // _GLIBCXX_ATOMIC
   1375