Home | History | Annotate | Line # | Download | only in std
atomic revision 1.1.1.13.2.1
      1 // -*- C++ -*- header.
      2 
      3 // Copyright (C) 2008-2022 Free Software Foundation, Inc.
      4 //
      5 // This file is part of the GNU ISO C++ Library.  This library is free
      6 // software; you can redistribute it and/or modify it under the
      7 // terms of the GNU General Public License as published by the
      8 // Free Software Foundation; either version 3, or (at your option)
      9 // any later version.
     10 
     11 // This library is distributed in the hope that it will be useful,
     12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
     13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
     14 // GNU General Public License for more details.
     15 
     16 // Under Section 7 of GPL version 3, you are granted additional
     17 // permissions described in the GCC Runtime Library Exception, version
     18 // 3.1, as published by the Free Software Foundation.
     19 
     20 // You should have received a copy of the GNU General Public License and
     21 // a copy of the GCC Runtime Library Exception along with this program;
     22 // see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
     23 // <http://www.gnu.org/licenses/>.
     24 
     25 /** @file include/atomic
     26  *  This is a Standard C++ Library header.
     27  */
     28 
     29 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
     30 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
     31 
     32 #ifndef _GLIBCXX_ATOMIC
     33 #define _GLIBCXX_ATOMIC 1
     34 
     35 #pragma GCC system_header
     36 
     37 #if __cplusplus < 201103L
     38 # include <bits/c++0x_warning.h>
     39 #else
     40 
     41 #include <bits/atomic_base.h>
     42 
     43 namespace std _GLIBCXX_VISIBILITY(default)
     44 {
     45 _GLIBCXX_BEGIN_NAMESPACE_VERSION
     46 
     47   /**
     48    * @addtogroup atomics
     49    * @{
     50    */
     51 
     52 #if __cplusplus >= 201703L
     53 # define __cpp_lib_atomic_is_always_lock_free 201603L
     54 #endif
     55 
     56   template<typename _Tp>
     57     struct atomic;
     58 
     59   /// atomic<bool>
     60   // NB: No operators or fetch-operations for this type.
     61   template<>
     62   struct atomic<bool>
     63   {
     64     using value_type = bool;
     65 
     66   private:
     67     __atomic_base<bool>	_M_base;
     68 
     69   public:
     70     atomic() noexcept = default;
     71     ~atomic() noexcept = default;
     72     atomic(const atomic&) = delete;
     73     atomic& operator=(const atomic&) = delete;
     74     atomic& operator=(const atomic&) volatile = delete;
     75 
     76     constexpr atomic(bool __i) noexcept : _M_base(__i) { }
     77 
     78     bool
     79     operator=(bool __i) noexcept
     80     { return _M_base.operator=(__i); }
     81 
     82     bool
     83     operator=(bool __i) volatile noexcept
     84     { return _M_base.operator=(__i); }
     85 
     86     operator bool() const noexcept
     87     { return _M_base.load(); }
     88 
     89     operator bool() const volatile noexcept
     90     { return _M_base.load(); }
     91 
     92     bool
     93     is_lock_free() const noexcept { return _M_base.is_lock_free(); }
     94 
     95     bool
     96     is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
     97 
     98 #if __cplusplus >= 201703L
     99     static constexpr bool is_always_lock_free = ATOMIC_BOOL_LOCK_FREE == 2;
    100 #endif
    101 
    102     void
    103     store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
    104     { _M_base.store(__i, __m); }
    105 
    106     void
    107     store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
    108     { _M_base.store(__i, __m); }
    109 
    110     bool
    111     load(memory_order __m = memory_order_seq_cst) const noexcept
    112     { return _M_base.load(__m); }
    113 
    114     bool
    115     load(memory_order __m = memory_order_seq_cst) const volatile noexcept
    116     { return _M_base.load(__m); }
    117 
    118     bool
    119     exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
    120     { return _M_base.exchange(__i, __m); }
    121 
    122     bool
    123     exchange(bool __i,
    124 	     memory_order __m = memory_order_seq_cst) volatile noexcept
    125     { return _M_base.exchange(__i, __m); }
    126 
    127     bool
    128     compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
    129 			  memory_order __m2) noexcept
    130     { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
    131 
    132     bool
    133     compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
    134 			  memory_order __m2) volatile noexcept
    135     { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
    136 
    137     bool
    138     compare_exchange_weak(bool& __i1, bool __i2,
    139 			  memory_order __m = memory_order_seq_cst) noexcept
    140     { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
    141 
    142     bool
    143     compare_exchange_weak(bool& __i1, bool __i2,
    144 		     memory_order __m = memory_order_seq_cst) volatile noexcept
    145     { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
    146 
    147     bool
    148     compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
    149 			    memory_order __m2) noexcept
    150     { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
    151 
    152     bool
    153     compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
    154 			    memory_order __m2) volatile noexcept
    155     { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
    156 
    157     bool
    158     compare_exchange_strong(bool& __i1, bool __i2,
    159 			    memory_order __m = memory_order_seq_cst) noexcept
    160     { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
    161 
    162     bool
    163     compare_exchange_strong(bool& __i1, bool __i2,
    164 		    memory_order __m = memory_order_seq_cst) volatile noexcept
    165     { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
    166 
    167 #if __cpp_lib_atomic_wait
    168     void
    169     wait(bool __old, memory_order __m = memory_order_seq_cst) const noexcept
    170     { _M_base.wait(__old, __m); }
    171 
    172     // TODO add const volatile overload
    173 
    174     void
    175     notify_one() noexcept
    176     { _M_base.notify_one(); }
    177 
    178     void
    179     notify_all() noexcept
    180     { _M_base.notify_all(); }
    181 #endif // __cpp_lib_atomic_wait
    182   };
    183 
    184 /// @cond undocumented
    185 #if __cpp_lib_atomic_value_initialization
    186 # define _GLIBCXX20_INIT(I) = I
    187 #else
    188 # define _GLIBCXX20_INIT(I)
    189 #endif
    190 /// @endcond
    191 
    192   /**
    193    *  @brief Generic atomic type, primary class template.
    194    *
    195    *  @tparam _Tp  Type to be made atomic, must be trivially copyable.
    196    */
    197   template<typename _Tp>
    198     struct atomic
    199     {
    200       using value_type = _Tp;
    201 
    202     private:
    203       // Align 1/2/4/8/16-byte types to at least their size.
    204       static constexpr int _S_min_alignment
    205 	= (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
    206 	? 0 : sizeof(_Tp);
    207 
    208       static constexpr int _S_alignment
    209         = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
    210 
    211       alignas(_S_alignment) _Tp _M_i _GLIBCXX20_INIT(_Tp());
    212 
    213       static_assert(__is_trivially_copyable(_Tp),
    214 		    "std::atomic requires a trivially copyable type");
    215 
    216       static_assert(sizeof(_Tp) > 0,
    217 		    "Incomplete or zero-sized types are not supported");
    218 
    219 #if __cplusplus > 201703L
    220       static_assert(is_copy_constructible_v<_Tp>);
    221       static_assert(is_move_constructible_v<_Tp>);
    222       static_assert(is_copy_assignable_v<_Tp>);
    223       static_assert(is_move_assignable_v<_Tp>);
    224 #endif
    225 
    226     public:
    227       atomic() = default;
    228       ~atomic() noexcept = default;
    229       atomic(const atomic&) = delete;
    230       atomic& operator=(const atomic&) = delete;
    231       atomic& operator=(const atomic&) volatile = delete;
    232 
    233       constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
    234 
    235       operator _Tp() const noexcept
    236       { return load(); }
    237 
    238       operator _Tp() const volatile noexcept
    239       { return load(); }
    240 
    241       _Tp
    242       operator=(_Tp __i) noexcept
    243       { store(__i); return __i; }
    244 
    245       _Tp
    246       operator=(_Tp __i) volatile noexcept
    247       { store(__i); return __i; }
    248 
    249       bool
    250       is_lock_free() const noexcept
    251       {
    252 	// Produce a fake, minimally aligned pointer.
    253 	return __atomic_is_lock_free(sizeof(_M_i),
    254 	    reinterpret_cast<void *>(-_S_alignment));
    255       }
    256 
    257       bool
    258       is_lock_free() const volatile noexcept
    259       {
    260 	// Produce a fake, minimally aligned pointer.
    261 	return __atomic_is_lock_free(sizeof(_M_i),
    262 	    reinterpret_cast<void *>(-_S_alignment));
    263       }
    264 
    265 #if __cplusplus >= 201703L
    266       static constexpr bool is_always_lock_free
    267 	= __atomic_always_lock_free(sizeof(_M_i), 0);
    268 #endif
    269 
    270       void
    271       store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
    272       {
    273 	__atomic_store(std::__addressof(_M_i), std::__addressof(__i), int(__m));
    274       }
    275 
    276       void
    277       store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
    278       {
    279 	__atomic_store(std::__addressof(_M_i), std::__addressof(__i), int(__m));
    280       }
    281 
    282       _Tp
    283       load(memory_order __m = memory_order_seq_cst) const noexcept
    284       {
    285 	alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
    286 	_Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
    287 	__atomic_load(std::__addressof(_M_i), __ptr, int(__m));
    288 	return *__ptr;
    289       }
    290 
    291       _Tp
    292       load(memory_order __m = memory_order_seq_cst) const volatile noexcept
    293       {
    294         alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
    295 	_Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
    296 	__atomic_load(std::__addressof(_M_i), __ptr, int(__m));
    297 	return *__ptr;
    298       }
    299 
    300       _Tp
    301       exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
    302       {
    303         alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
    304 	_Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
    305 	__atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
    306 			  __ptr, int(__m));
    307 	return *__ptr;
    308       }
    309 
    310       _Tp
    311       exchange(_Tp __i,
    312 	       memory_order __m = memory_order_seq_cst) volatile noexcept
    313       {
    314         alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
    315 	_Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
    316 	__atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
    317 			  __ptr, int(__m));
    318 	return *__ptr;
    319       }
    320 
    321       bool
    322       compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
    323 			    memory_order __f) noexcept
    324       {
    325 	__glibcxx_assert(__is_valid_cmpexch_failure_order(__f));
    326 
    327 	return __atomic_compare_exchange(std::__addressof(_M_i),
    328 					 std::__addressof(__e),
    329 					 std::__addressof(__i),
    330 					 true, int(__s), int(__f));
    331       }
    332 
    333       bool
    334       compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
    335 			    memory_order __f) volatile noexcept
    336       {
    337 	__glibcxx_assert(__is_valid_cmpexch_failure_order(__f));
    338 
    339 	return __atomic_compare_exchange(std::__addressof(_M_i),
    340 					 std::__addressof(__e),
    341 					 std::__addressof(__i),
    342 					 true, int(__s), int(__f));
    343       }
    344 
    345       bool
    346       compare_exchange_weak(_Tp& __e, _Tp __i,
    347 			    memory_order __m = memory_order_seq_cst) noexcept
    348       { return compare_exchange_weak(__e, __i, __m,
    349                                      __cmpexch_failure_order(__m)); }
    350 
    351       bool
    352       compare_exchange_weak(_Tp& __e, _Tp __i,
    353 		     memory_order __m = memory_order_seq_cst) volatile noexcept
    354       { return compare_exchange_weak(__e, __i, __m,
    355                                      __cmpexch_failure_order(__m)); }
    356 
    357       bool
    358       compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
    359 			      memory_order __f) noexcept
    360       {
    361 	__glibcxx_assert(__is_valid_cmpexch_failure_order(__f));
    362 
    363 	return __atomic_compare_exchange(std::__addressof(_M_i),
    364 					 std::__addressof(__e),
    365 					 std::__addressof(__i),
    366 					 false, int(__s), int(__f));
    367       }
    368 
    369       bool
    370       compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
    371 			      memory_order __f) volatile noexcept
    372       {
    373 	__glibcxx_assert(__is_valid_cmpexch_failure_order(__f));
    374 
    375 	return __atomic_compare_exchange(std::__addressof(_M_i),
    376 					 std::__addressof(__e),
    377 					 std::__addressof(__i),
    378 					 false, int(__s), int(__f));
    379       }
    380 
    381       bool
    382       compare_exchange_strong(_Tp& __e, _Tp __i,
    383 			       memory_order __m = memory_order_seq_cst) noexcept
    384       { return compare_exchange_strong(__e, __i, __m,
    385                                        __cmpexch_failure_order(__m)); }
    386 
    387       bool
    388       compare_exchange_strong(_Tp& __e, _Tp __i,
    389 		     memory_order __m = memory_order_seq_cst) volatile noexcept
    390       { return compare_exchange_strong(__e, __i, __m,
    391                                        __cmpexch_failure_order(__m)); }
    392 
    393 #if __cpp_lib_atomic_wait
    394     void
    395     wait(_Tp __old, memory_order __m = memory_order_seq_cst) const noexcept
    396     {
    397       std::__atomic_wait_address_v(&_M_i, __old,
    398 			 [__m, this] { return this->load(__m); });
    399     }
    400 
    401     // TODO add const volatile overload
    402 
    403     void
    404     notify_one() noexcept
    405     { std::__atomic_notify_address(&_M_i, false); }
    406 
    407     void
    408     notify_all() noexcept
    409     { std::__atomic_notify_address(&_M_i, true); }
    410 #endif // __cpp_lib_atomic_wait
    411 
    412     };
    413 #undef _GLIBCXX20_INIT
    414 
    415   /// Partial specialization for pointer types.
    416   template<typename _Tp>
    417     struct atomic<_Tp*>
    418     {
    419       using value_type = _Tp*;
    420       using difference_type = ptrdiff_t;
    421 
    422       typedef _Tp* 			__pointer_type;
    423       typedef __atomic_base<_Tp*>	__base_type;
    424       __base_type			_M_b;
    425 
    426       atomic() noexcept = default;
    427       ~atomic() noexcept = default;
    428       atomic(const atomic&) = delete;
    429       atomic& operator=(const atomic&) = delete;
    430       atomic& operator=(const atomic&) volatile = delete;
    431 
    432       constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
    433 
    434       operator __pointer_type() const noexcept
    435       { return __pointer_type(_M_b); }
    436 
    437       operator __pointer_type() const volatile noexcept
    438       { return __pointer_type(_M_b); }
    439 
    440       __pointer_type
    441       operator=(__pointer_type __p) noexcept
    442       { return _M_b.operator=(__p); }
    443 
    444       __pointer_type
    445       operator=(__pointer_type __p) volatile noexcept
    446       { return _M_b.operator=(__p); }
    447 
    448       __pointer_type
    449       operator++(int) noexcept
    450       {
    451 #if __cplusplus >= 201703L
    452 	static_assert( is_object<_Tp>::value, "pointer to object type" );
    453 #endif
    454 	return _M_b++;
    455       }
    456 
    457       __pointer_type
    458       operator++(int) volatile noexcept
    459       {
    460 #if __cplusplus >= 201703L
    461 	static_assert( is_object<_Tp>::value, "pointer to object type" );
    462 #endif
    463 	return _M_b++;
    464       }
    465 
    466       __pointer_type
    467       operator--(int) noexcept
    468       {
    469 #if __cplusplus >= 201703L
    470 	static_assert( is_object<_Tp>::value, "pointer to object type" );
    471 #endif
    472 	return _M_b--;
    473       }
    474 
    475       __pointer_type
    476       operator--(int) volatile noexcept
    477       {
    478 #if __cplusplus >= 201703L
    479 	static_assert( is_object<_Tp>::value, "pointer to object type" );
    480 #endif
    481 	return _M_b--;
    482       }
    483 
    484       __pointer_type
    485       operator++() noexcept
    486       {
    487 #if __cplusplus >= 201703L
    488 	static_assert( is_object<_Tp>::value, "pointer to object type" );
    489 #endif
    490 	return ++_M_b;
    491       }
    492 
    493       __pointer_type
    494       operator++() volatile noexcept
    495       {
    496 #if __cplusplus >= 201703L
    497 	static_assert( is_object<_Tp>::value, "pointer to object type" );
    498 #endif
    499 	return ++_M_b;
    500       }
    501 
    502       __pointer_type
    503       operator--() noexcept
    504       {
    505 #if __cplusplus >= 201703L
    506 	static_assert( is_object<_Tp>::value, "pointer to object type" );
    507 #endif
    508 	return --_M_b;
    509       }
    510 
    511       __pointer_type
    512       operator--() volatile noexcept
    513       {
    514 #if __cplusplus >= 201703L
    515 	static_assert( is_object<_Tp>::value, "pointer to object type" );
    516 #endif
    517 	return --_M_b;
    518       }
    519 
    520       __pointer_type
    521       operator+=(ptrdiff_t __d) noexcept
    522       {
    523 #if __cplusplus >= 201703L
    524 	static_assert( is_object<_Tp>::value, "pointer to object type" );
    525 #endif
    526 	return _M_b.operator+=(__d);
    527       }
    528 
    529       __pointer_type
    530       operator+=(ptrdiff_t __d) volatile noexcept
    531       {
    532 #if __cplusplus >= 201703L
    533 	static_assert( is_object<_Tp>::value, "pointer to object type" );
    534 #endif
    535 	return _M_b.operator+=(__d);
    536       }
    537 
    538       __pointer_type
    539       operator-=(ptrdiff_t __d) noexcept
    540       {
    541 #if __cplusplus >= 201703L
    542 	static_assert( is_object<_Tp>::value, "pointer to object type" );
    543 #endif
    544 	return _M_b.operator-=(__d);
    545       }
    546 
    547       __pointer_type
    548       operator-=(ptrdiff_t __d) volatile noexcept
    549       {
    550 #if __cplusplus >= 201703L
    551 	static_assert( is_object<_Tp>::value, "pointer to object type" );
    552 #endif
    553 	return _M_b.operator-=(__d);
    554       }
    555 
    556       bool
    557       is_lock_free() const noexcept
    558       { return _M_b.is_lock_free(); }
    559 
    560       bool
    561       is_lock_free() const volatile noexcept
    562       { return _M_b.is_lock_free(); }
    563 
    564 #if __cplusplus >= 201703L
    565       static constexpr bool is_always_lock_free
    566 	= ATOMIC_POINTER_LOCK_FREE == 2;
    567 #endif
    568 
    569       void
    570       store(__pointer_type __p,
    571 	    memory_order __m = memory_order_seq_cst) noexcept
    572       { return _M_b.store(__p, __m); }
    573 
    574       void
    575       store(__pointer_type __p,
    576 	    memory_order __m = memory_order_seq_cst) volatile noexcept
    577       { return _M_b.store(__p, __m); }
    578 
    579       __pointer_type
    580       load(memory_order __m = memory_order_seq_cst) const noexcept
    581       { return _M_b.load(__m); }
    582 
    583       __pointer_type
    584       load(memory_order __m = memory_order_seq_cst) const volatile noexcept
    585       { return _M_b.load(__m); }
    586 
    587       __pointer_type
    588       exchange(__pointer_type __p,
    589 	       memory_order __m = memory_order_seq_cst) noexcept
    590       { return _M_b.exchange(__p, __m); }
    591 
    592       __pointer_type
    593       exchange(__pointer_type __p,
    594 	       memory_order __m = memory_order_seq_cst) volatile noexcept
    595       { return _M_b.exchange(__p, __m); }
    596 
    597       bool
    598       compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
    599 			    memory_order __m1, memory_order __m2) noexcept
    600       { return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
    601 
    602       bool
    603       compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
    604 			    memory_order __m1,
    605 			    memory_order __m2) volatile noexcept
    606       { return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
    607 
    608       bool
    609       compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
    610 			    memory_order __m = memory_order_seq_cst) noexcept
    611       {
    612 	return compare_exchange_weak(__p1, __p2, __m,
    613 				     __cmpexch_failure_order(__m));
    614       }
    615 
    616       bool
    617       compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
    618 		    memory_order __m = memory_order_seq_cst) volatile noexcept
    619       {
    620 	return compare_exchange_weak(__p1, __p2, __m,
    621 				     __cmpexch_failure_order(__m));
    622       }
    623 
    624       bool
    625       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
    626 			      memory_order __m1, memory_order __m2) noexcept
    627       { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
    628 
    629       bool
    630       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
    631 			      memory_order __m1,
    632 			      memory_order __m2) volatile noexcept
    633       { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
    634 
    635       bool
    636       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
    637 			      memory_order __m = memory_order_seq_cst) noexcept
    638       {
    639 	return _M_b.compare_exchange_strong(__p1, __p2, __m,
    640 					    __cmpexch_failure_order(__m));
    641       }
    642 
    643       bool
    644       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
    645 		    memory_order __m = memory_order_seq_cst) volatile noexcept
    646       {
    647 	return _M_b.compare_exchange_strong(__p1, __p2, __m,
    648 					    __cmpexch_failure_order(__m));
    649       }
    650 
    651 #if __cpp_lib_atomic_wait
    652     void
    653     wait(__pointer_type __old, memory_order __m = memory_order_seq_cst) const noexcept
    654     { _M_b.wait(__old, __m); }
    655 
    656     // TODO add const volatile overload
    657 
    658     void
    659     notify_one() noexcept
    660     { _M_b.notify_one(); }
    661 
    662     void
    663     notify_all() noexcept
    664     { _M_b.notify_all(); }
    665 #endif // __cpp_lib_atomic_wait
    666 
    667       __pointer_type
    668       fetch_add(ptrdiff_t __d,
    669 		memory_order __m = memory_order_seq_cst) noexcept
    670       {
    671 #if __cplusplus >= 201703L
    672 	static_assert( is_object<_Tp>::value, "pointer to object type" );
    673 #endif
    674 	return _M_b.fetch_add(__d, __m);
    675       }
    676 
    677       __pointer_type
    678       fetch_add(ptrdiff_t __d,
    679 		memory_order __m = memory_order_seq_cst) volatile noexcept
    680       {
    681 #if __cplusplus >= 201703L
    682 	static_assert( is_object<_Tp>::value, "pointer to object type" );
    683 #endif
    684 	return _M_b.fetch_add(__d, __m);
    685       }
    686 
    687       __pointer_type
    688       fetch_sub(ptrdiff_t __d,
    689 		memory_order __m = memory_order_seq_cst) noexcept
    690       {
    691 #if __cplusplus >= 201703L
    692 	static_assert( is_object<_Tp>::value, "pointer to object type" );
    693 #endif
    694 	return _M_b.fetch_sub(__d, __m);
    695       }
    696 
    697       __pointer_type
    698       fetch_sub(ptrdiff_t __d,
    699 		memory_order __m = memory_order_seq_cst) volatile noexcept
    700       {
    701 #if __cplusplus >= 201703L
    702 	static_assert( is_object<_Tp>::value, "pointer to object type" );
    703 #endif
    704 	return _M_b.fetch_sub(__d, __m);
    705       }
    706     };
    707 
    708 
    709   /// Explicit specialization for char.
    710   template<>
    711     struct atomic<char> : __atomic_base<char>
    712     {
    713       typedef char 			__integral_type;
    714       typedef __atomic_base<char> 	__base_type;
    715 
    716       atomic() noexcept = default;
    717       ~atomic() noexcept = default;
    718       atomic(const atomic&) = delete;
    719       atomic& operator=(const atomic&) = delete;
    720       atomic& operator=(const atomic&) volatile = delete;
    721 
    722       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    723 
    724       using __base_type::operator __integral_type;
    725       using __base_type::operator=;
    726 
    727 #if __cplusplus >= 201703L
    728       static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
    729 #endif
    730     };
    731 
    732   /// Explicit specialization for signed char.
    733   template<>
    734     struct atomic<signed char> : __atomic_base<signed char>
    735     {
    736       typedef signed char 		__integral_type;
    737       typedef __atomic_base<signed char> 	__base_type;
    738 
    739       atomic() noexcept= default;
    740       ~atomic() noexcept = default;
    741       atomic(const atomic&) = delete;
    742       atomic& operator=(const atomic&) = delete;
    743       atomic& operator=(const atomic&) volatile = delete;
    744 
    745       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    746 
    747       using __base_type::operator __integral_type;
    748       using __base_type::operator=;
    749 
    750 #if __cplusplus >= 201703L
    751       static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
    752 #endif
    753     };
    754 
    755   /// Explicit specialization for unsigned char.
    756   template<>
    757     struct atomic<unsigned char> : __atomic_base<unsigned char>
    758     {
    759       typedef unsigned char 		__integral_type;
    760       typedef __atomic_base<unsigned char> 	__base_type;
    761 
    762       atomic() noexcept= default;
    763       ~atomic() noexcept = default;
    764       atomic(const atomic&) = delete;
    765       atomic& operator=(const atomic&) = delete;
    766       atomic& operator=(const atomic&) volatile = delete;
    767 
    768       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    769 
    770       using __base_type::operator __integral_type;
    771       using __base_type::operator=;
    772 
    773 #if __cplusplus >= 201703L
    774       static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
    775 #endif
    776     };
    777 
    778   /// Explicit specialization for short.
    779   template<>
    780     struct atomic<short> : __atomic_base<short>
    781     {
    782       typedef short 			__integral_type;
    783       typedef __atomic_base<short> 		__base_type;
    784 
    785       atomic() noexcept = default;
    786       ~atomic() noexcept = default;
    787       atomic(const atomic&) = delete;
    788       atomic& operator=(const atomic&) = delete;
    789       atomic& operator=(const atomic&) volatile = delete;
    790 
    791       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    792 
    793       using __base_type::operator __integral_type;
    794       using __base_type::operator=;
    795 
    796 #if __cplusplus >= 201703L
    797       static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
    798 #endif
    799     };
    800 
    801   /// Explicit specialization for unsigned short.
    802   template<>
    803     struct atomic<unsigned short> : __atomic_base<unsigned short>
    804     {
    805       typedef unsigned short 	      	__integral_type;
    806       typedef __atomic_base<unsigned short> 		__base_type;
    807 
    808       atomic() noexcept = default;
    809       ~atomic() noexcept = default;
    810       atomic(const atomic&) = delete;
    811       atomic& operator=(const atomic&) = delete;
    812       atomic& operator=(const atomic&) volatile = delete;
    813 
    814       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    815 
    816       using __base_type::operator __integral_type;
    817       using __base_type::operator=;
    818 
    819 #if __cplusplus >= 201703L
    820       static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
    821 #endif
    822     };
    823 
    824   /// Explicit specialization for int.
    825   template<>
    826     struct atomic<int> : __atomic_base<int>
    827     {
    828       typedef int 			__integral_type;
    829       typedef __atomic_base<int> 		__base_type;
    830 
    831       atomic() noexcept = default;
    832       ~atomic() noexcept = default;
    833       atomic(const atomic&) = delete;
    834       atomic& operator=(const atomic&) = delete;
    835       atomic& operator=(const atomic&) volatile = delete;
    836 
    837       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    838 
    839       using __base_type::operator __integral_type;
    840       using __base_type::operator=;
    841 
    842 #if __cplusplus >= 201703L
    843       static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
    844 #endif
    845     };
    846 
    847   /// Explicit specialization for unsigned int.
    848   template<>
    849     struct atomic<unsigned int> : __atomic_base<unsigned int>
    850     {
    851       typedef unsigned int		__integral_type;
    852       typedef __atomic_base<unsigned int> 	__base_type;
    853 
    854       atomic() noexcept = default;
    855       ~atomic() noexcept = default;
    856       atomic(const atomic&) = delete;
    857       atomic& operator=(const atomic&) = delete;
    858       atomic& operator=(const atomic&) volatile = delete;
    859 
    860       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    861 
    862       using __base_type::operator __integral_type;
    863       using __base_type::operator=;
    864 
    865 #if __cplusplus >= 201703L
    866       static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
    867 #endif
    868     };
    869 
    870   /// Explicit specialization for long.
    871   template<>
    872     struct atomic<long> : __atomic_base<long>
    873     {
    874       typedef long 			__integral_type;
    875       typedef __atomic_base<long> 	__base_type;
    876 
    877       atomic() noexcept = default;
    878       ~atomic() noexcept = default;
    879       atomic(const atomic&) = delete;
    880       atomic& operator=(const atomic&) = delete;
    881       atomic& operator=(const atomic&) volatile = delete;
    882 
    883       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    884 
    885       using __base_type::operator __integral_type;
    886       using __base_type::operator=;
    887 
    888 #if __cplusplus >= 201703L
    889       static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
    890 #endif
    891     };
    892 
    893   /// Explicit specialization for unsigned long.
    894   template<>
    895     struct atomic<unsigned long> : __atomic_base<unsigned long>
    896     {
    897       typedef unsigned long 		__integral_type;
    898       typedef __atomic_base<unsigned long> 	__base_type;
    899 
    900       atomic() noexcept = default;
    901       ~atomic() noexcept = default;
    902       atomic(const atomic&) = delete;
    903       atomic& operator=(const atomic&) = delete;
    904       atomic& operator=(const atomic&) volatile = delete;
    905 
    906       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    907 
    908       using __base_type::operator __integral_type;
    909       using __base_type::operator=;
    910 
    911 #if __cplusplus >= 201703L
    912       static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
    913 #endif
    914     };
    915 
    916   /// Explicit specialization for long long.
    917   template<>
    918     struct atomic<long long> : __atomic_base<long long>
    919     {
    920       typedef long long 		__integral_type;
    921       typedef __atomic_base<long long> 		__base_type;
    922 
    923       atomic() noexcept = default;
    924       ~atomic() noexcept = default;
    925       atomic(const atomic&) = delete;
    926       atomic& operator=(const atomic&) = delete;
    927       atomic& operator=(const atomic&) volatile = delete;
    928 
    929       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    930 
    931       using __base_type::operator __integral_type;
    932       using __base_type::operator=;
    933 
    934 #if __cplusplus >= 201703L
    935       static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
    936 #endif
    937     };
    938 
    939   /// Explicit specialization for unsigned long long.
    940   template<>
    941     struct atomic<unsigned long long> : __atomic_base<unsigned long long>
    942     {
    943       typedef unsigned long long       	__integral_type;
    944       typedef __atomic_base<unsigned long long> 	__base_type;
    945 
    946       atomic() noexcept = default;
    947       ~atomic() noexcept = default;
    948       atomic(const atomic&) = delete;
    949       atomic& operator=(const atomic&) = delete;
    950       atomic& operator=(const atomic&) volatile = delete;
    951 
    952       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    953 
    954       using __base_type::operator __integral_type;
    955       using __base_type::operator=;
    956 
    957 #if __cplusplus >= 201703L
    958       static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
    959 #endif
    960     };
    961 
    962   /// Explicit specialization for wchar_t.
    963   template<>
    964     struct atomic<wchar_t> : __atomic_base<wchar_t>
    965     {
    966       typedef wchar_t 			__integral_type;
    967       typedef __atomic_base<wchar_t> 	__base_type;
    968 
    969       atomic() noexcept = default;
    970       ~atomic() noexcept = default;
    971       atomic(const atomic&) = delete;
    972       atomic& operator=(const atomic&) = delete;
    973       atomic& operator=(const atomic&) volatile = delete;
    974 
    975       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
    976 
    977       using __base_type::operator __integral_type;
    978       using __base_type::operator=;
    979 
    980 #if __cplusplus >= 201703L
    981       static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
    982 #endif
    983     };
    984 
    985 #ifdef _GLIBCXX_USE_CHAR8_T
    986   /// Explicit specialization for char8_t.
    987   template<>
    988     struct atomic<char8_t> : __atomic_base<char8_t>
    989     {
    990       typedef char8_t 			__integral_type;
    991       typedef __atomic_base<char8_t> 	__base_type;
    992 
    993       atomic() noexcept = default;
    994       ~atomic() noexcept = default;
    995       atomic(const atomic&) = delete;
    996       atomic& operator=(const atomic&) = delete;
    997       atomic& operator=(const atomic&) volatile = delete;
    998 
    999       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
   1000 
   1001       using __base_type::operator __integral_type;
   1002       using __base_type::operator=;
   1003 
   1004 #if __cplusplus > 201402L
   1005       static constexpr bool is_always_lock_free
   1006 	= ATOMIC_CHAR8_T_LOCK_FREE == 2;
   1007 #endif
   1008     };
   1009 #endif
   1010 
   1011   /// Explicit specialization for char16_t.
   1012   template<>
   1013     struct atomic<char16_t> : __atomic_base<char16_t>
   1014     {
   1015       typedef char16_t 			__integral_type;
   1016       typedef __atomic_base<char16_t> 	__base_type;
   1017 
   1018       atomic() noexcept = default;
   1019       ~atomic() noexcept = default;
   1020       atomic(const atomic&) = delete;
   1021       atomic& operator=(const atomic&) = delete;
   1022       atomic& operator=(const atomic&) volatile = delete;
   1023 
   1024       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
   1025 
   1026       using __base_type::operator __integral_type;
   1027       using __base_type::operator=;
   1028 
   1029 #if __cplusplus >= 201703L
   1030       static constexpr bool is_always_lock_free
   1031 	= ATOMIC_CHAR16_T_LOCK_FREE == 2;
   1032 #endif
   1033     };
   1034 
   1035   /// Explicit specialization for char32_t.
   1036   template<>
   1037     struct atomic<char32_t> : __atomic_base<char32_t>
   1038     {
   1039       typedef char32_t 			__integral_type;
   1040       typedef __atomic_base<char32_t> 	__base_type;
   1041 
   1042       atomic() noexcept = default;
   1043       ~atomic() noexcept = default;
   1044       atomic(const atomic&) = delete;
   1045       atomic& operator=(const atomic&) = delete;
   1046       atomic& operator=(const atomic&) volatile = delete;
   1047 
   1048       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
   1049 
   1050       using __base_type::operator __integral_type;
   1051       using __base_type::operator=;
   1052 
   1053 #if __cplusplus >= 201703L
   1054       static constexpr bool is_always_lock_free
   1055 	= ATOMIC_CHAR32_T_LOCK_FREE == 2;
   1056 #endif
   1057     };
   1058 
   1059 
   1060   /// atomic_bool
   1061   typedef atomic<bool>			atomic_bool;
   1062 
   1063   /// atomic_char
   1064   typedef atomic<char>			atomic_char;
   1065 
   1066   /// atomic_schar
   1067   typedef atomic<signed char>		atomic_schar;
   1068 
   1069   /// atomic_uchar
   1070   typedef atomic<unsigned char>		atomic_uchar;
   1071 
   1072   /// atomic_short
   1073   typedef atomic<short>			atomic_short;
   1074 
   1075   /// atomic_ushort
   1076   typedef atomic<unsigned short>	atomic_ushort;
   1077 
   1078   /// atomic_int
   1079   typedef atomic<int>			atomic_int;
   1080 
   1081   /// atomic_uint
   1082   typedef atomic<unsigned int>		atomic_uint;
   1083 
   1084   /// atomic_long
   1085   typedef atomic<long>			atomic_long;
   1086 
   1087   /// atomic_ulong
   1088   typedef atomic<unsigned long>		atomic_ulong;
   1089 
   1090   /// atomic_llong
   1091   typedef atomic<long long>		atomic_llong;
   1092 
   1093   /// atomic_ullong
   1094   typedef atomic<unsigned long long>	atomic_ullong;
   1095 
   1096   /// atomic_wchar_t
   1097   typedef atomic<wchar_t>		atomic_wchar_t;
   1098 
   1099 #ifdef _GLIBCXX_USE_CHAR8_T
   1100   /// atomic_char8_t
   1101   typedef atomic<char8_t>		atomic_char8_t;
   1102 #endif
   1103 
   1104   /// atomic_char16_t
   1105   typedef atomic<char16_t>		atomic_char16_t;
   1106 
   1107   /// atomic_char32_t
   1108   typedef atomic<char32_t>		atomic_char32_t;
   1109 
   1110 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
   1111   // _GLIBCXX_RESOLVE_LIB_DEFECTS
   1112   // 2441. Exact-width atomic typedefs should be provided
   1113 
   1114   /// atomic_int8_t
   1115   typedef atomic<int8_t>		atomic_int8_t;
   1116 
   1117   /// atomic_uint8_t
   1118   typedef atomic<uint8_t>		atomic_uint8_t;
   1119 
   1120   /// atomic_int16_t
   1121   typedef atomic<int16_t>		atomic_int16_t;
   1122 
   1123   /// atomic_uint16_t
   1124   typedef atomic<uint16_t>		atomic_uint16_t;
   1125 
   1126   /// atomic_int32_t
   1127   typedef atomic<int32_t>		atomic_int32_t;
   1128 
   1129   /// atomic_uint32_t
   1130   typedef atomic<uint32_t>		atomic_uint32_t;
   1131 
   1132   /// atomic_int64_t
   1133   typedef atomic<int64_t>		atomic_int64_t;
   1134 
   1135   /// atomic_uint64_t
   1136   typedef atomic<uint64_t>		atomic_uint64_t;
   1137 
   1138 
   1139   /// atomic_int_least8_t
   1140   typedef atomic<int_least8_t>		atomic_int_least8_t;
   1141 
   1142   /// atomic_uint_least8_t
   1143   typedef atomic<uint_least8_t>		atomic_uint_least8_t;
   1144 
   1145   /// atomic_int_least16_t
   1146   typedef atomic<int_least16_t>		atomic_int_least16_t;
   1147 
   1148   /// atomic_uint_least16_t
   1149   typedef atomic<uint_least16_t>	atomic_uint_least16_t;
   1150 
   1151   /// atomic_int_least32_t
   1152   typedef atomic<int_least32_t>		atomic_int_least32_t;
   1153 
   1154   /// atomic_uint_least32_t
   1155   typedef atomic<uint_least32_t>	atomic_uint_least32_t;
   1156 
   1157   /// atomic_int_least64_t
   1158   typedef atomic<int_least64_t>		atomic_int_least64_t;
   1159 
   1160   /// atomic_uint_least64_t
   1161   typedef atomic<uint_least64_t>	atomic_uint_least64_t;
   1162 
   1163 
   1164   /// atomic_int_fast8_t
   1165   typedef atomic<int_fast8_t>		atomic_int_fast8_t;
   1166 
   1167   /// atomic_uint_fast8_t
   1168   typedef atomic<uint_fast8_t>		atomic_uint_fast8_t;
   1169 
   1170   /// atomic_int_fast16_t
   1171   typedef atomic<int_fast16_t>		atomic_int_fast16_t;
   1172 
   1173   /// atomic_uint_fast16_t
   1174   typedef atomic<uint_fast16_t>		atomic_uint_fast16_t;
   1175 
   1176   /// atomic_int_fast32_t
   1177   typedef atomic<int_fast32_t>		atomic_int_fast32_t;
   1178 
   1179   /// atomic_uint_fast32_t
   1180   typedef atomic<uint_fast32_t>		atomic_uint_fast32_t;
   1181 
   1182   /// atomic_int_fast64_t
   1183   typedef atomic<int_fast64_t>		atomic_int_fast64_t;
   1184 
   1185   /// atomic_uint_fast64_t
   1186   typedef atomic<uint_fast64_t>		atomic_uint_fast64_t;
   1187 #endif
   1188 
   1189 
   1190   /// atomic_intptr_t
   1191   typedef atomic<intptr_t>		atomic_intptr_t;
   1192 
   1193   /// atomic_uintptr_t
   1194   typedef atomic<uintptr_t>		atomic_uintptr_t;
   1195 
   1196   /// atomic_size_t
   1197   typedef atomic<size_t>		atomic_size_t;
   1198 
   1199   /// atomic_ptrdiff_t
   1200   typedef atomic<ptrdiff_t>		atomic_ptrdiff_t;
   1201 
   1202 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
   1203   /// atomic_intmax_t
   1204   typedef atomic<intmax_t>		atomic_intmax_t;
   1205 
   1206   /// atomic_uintmax_t
   1207   typedef atomic<uintmax_t>		atomic_uintmax_t;
   1208 #endif
   1209 
   1210   // Function definitions, atomic_flag operations.
   1211   inline bool
   1212   atomic_flag_test_and_set_explicit(atomic_flag* __a,
   1213 				    memory_order __m) noexcept
   1214   { return __a->test_and_set(__m); }
   1215 
   1216   inline bool
   1217   atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
   1218 				    memory_order __m) noexcept
   1219   { return __a->test_and_set(__m); }
   1220 
   1221 #if __cpp_lib_atomic_flag_test
   1222   inline bool
   1223   atomic_flag_test(const atomic_flag* __a) noexcept
   1224   { return __a->test(); }
   1225 
   1226   inline bool
   1227   atomic_flag_test(const volatile atomic_flag* __a) noexcept
   1228   { return __a->test(); }
   1229 
   1230   inline bool
   1231   atomic_flag_test_explicit(const atomic_flag* __a,
   1232 			    memory_order __m) noexcept
   1233   { return __a->test(__m); }
   1234 
   1235   inline bool
   1236   atomic_flag_test_explicit(const volatile atomic_flag* __a,
   1237 			    memory_order __m) noexcept
   1238   { return __a->test(__m); }
   1239 #endif
   1240 
   1241   inline void
   1242   atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
   1243   { __a->clear(__m); }
   1244 
   1245   inline void
   1246   atomic_flag_clear_explicit(volatile atomic_flag* __a,
   1247 			     memory_order __m) noexcept
   1248   { __a->clear(__m); }
   1249 
   1250   inline bool
   1251   atomic_flag_test_and_set(atomic_flag* __a) noexcept
   1252   { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
   1253 
   1254   inline bool
   1255   atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
   1256   { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
   1257 
   1258   inline void
   1259   atomic_flag_clear(atomic_flag* __a) noexcept
   1260   { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
   1261 
   1262   inline void
   1263   atomic_flag_clear(volatile atomic_flag* __a) noexcept
   1264   { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
   1265 
   1266 #if __cpp_lib_atomic_wait
   1267   inline void
   1268   atomic_flag_wait(atomic_flag* __a, bool __old) noexcept
   1269   { __a->wait(__old); }
   1270 
   1271   inline void
   1272   atomic_flag_wait_explicit(atomic_flag* __a, bool __old,
   1273                                 memory_order __m) noexcept
   1274   { __a->wait(__old, __m); }
   1275 
   1276   inline void
   1277   atomic_flag_notify_one(atomic_flag* __a) noexcept
   1278   { __a->notify_one(); }
   1279 
   1280   inline void
   1281   atomic_flag_notify_all(atomic_flag* __a) noexcept
   1282   { __a->notify_all(); }
   1283 #endif // __cpp_lib_atomic_wait
   1284 
   1285   /// @cond undocumented
   1286   // _GLIBCXX_RESOLVE_LIB_DEFECTS
   1287   // 3220. P0558 broke conforming C++14 uses of atomic shared_ptr
   1288   template<typename _Tp>
   1289     using __atomic_val_t = __type_identity_t<_Tp>;
   1290   template<typename _Tp>
   1291     using __atomic_diff_t = typename atomic<_Tp>::difference_type;
   1292   /// @endcond
   1293 
   1294   // [atomics.nonmembers] Non-member functions.
   1295   // Function templates generally applicable to atomic types.
   1296   template<typename _ITp>
   1297     inline bool
   1298     atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
   1299     { return __a->is_lock_free(); }
   1300 
   1301   template<typename _ITp>
   1302     inline bool
   1303     atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
   1304     { return __a->is_lock_free(); }
   1305 
   1306   template<typename _ITp>
   1307     inline void
   1308     atomic_init(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
   1309     { __a->store(__i, memory_order_relaxed); }
   1310 
   1311   template<typename _ITp>
   1312     inline void
   1313     atomic_init(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
   1314     { __a->store(__i, memory_order_relaxed); }
   1315 
   1316   template<typename _ITp>
   1317     inline void
   1318     atomic_store_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
   1319 			  memory_order __m) noexcept
   1320     { __a->store(__i, __m); }
   1321 
   1322   template<typename _ITp>
   1323     inline void
   1324     atomic_store_explicit(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
   1325 			  memory_order __m) noexcept
   1326     { __a->store(__i, __m); }
   1327 
   1328   template<typename _ITp>
   1329     inline _ITp
   1330     atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
   1331     { return __a->load(__m); }
   1332 
   1333   template<typename _ITp>
   1334     inline _ITp
   1335     atomic_load_explicit(const volatile atomic<_ITp>* __a,
   1336 			 memory_order __m) noexcept
   1337     { return __a->load(__m); }
   1338 
   1339   template<typename _ITp>
   1340     inline _ITp
   1341     atomic_exchange_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
   1342 			     memory_order __m) noexcept
   1343     { return __a->exchange(__i, __m); }
   1344 
   1345   template<typename _ITp>
   1346     inline _ITp
   1347     atomic_exchange_explicit(volatile atomic<_ITp>* __a,
   1348 			     __atomic_val_t<_ITp> __i,
   1349 			     memory_order __m) noexcept
   1350     { return __a->exchange(__i, __m); }
   1351 
   1352   template<typename _ITp>
   1353     inline bool
   1354     atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
   1355 					  __atomic_val_t<_ITp>* __i1,
   1356 					  __atomic_val_t<_ITp> __i2,
   1357 					  memory_order __m1,
   1358 					  memory_order __m2) noexcept
   1359     { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
   1360 
   1361   template<typename _ITp>
   1362     inline bool
   1363     atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
   1364 					  __atomic_val_t<_ITp>* __i1,
   1365 					  __atomic_val_t<_ITp> __i2,
   1366 					  memory_order __m1,
   1367 					  memory_order __m2) noexcept
   1368     { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
   1369 
   1370   template<typename _ITp>
   1371     inline bool
   1372     atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
   1373 					    __atomic_val_t<_ITp>* __i1,
   1374 					    __atomic_val_t<_ITp> __i2,
   1375 					    memory_order __m1,
   1376 					    memory_order __m2) noexcept
   1377     { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
   1378 
   1379   template<typename _ITp>
   1380     inline bool
   1381     atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
   1382 					    __atomic_val_t<_ITp>* __i1,
   1383 					    __atomic_val_t<_ITp> __i2,
   1384 					    memory_order __m1,
   1385 					    memory_order __m2) noexcept
   1386     { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
   1387 
   1388 
   1389   template<typename _ITp>
   1390     inline void
   1391     atomic_store(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
   1392     { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
   1393 
   1394   template<typename _ITp>
   1395     inline void
   1396     atomic_store(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
   1397     { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
   1398 
   1399   template<typename _ITp>
   1400     inline _ITp
   1401     atomic_load(const atomic<_ITp>* __a) noexcept
   1402     { return atomic_load_explicit(__a, memory_order_seq_cst); }
   1403 
   1404   template<typename _ITp>
   1405     inline _ITp
   1406     atomic_load(const volatile atomic<_ITp>* __a) noexcept
   1407     { return atomic_load_explicit(__a, memory_order_seq_cst); }
   1408 
   1409   template<typename _ITp>
   1410     inline _ITp
   1411     atomic_exchange(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
   1412     { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
   1413 
   1414   template<typename _ITp>
   1415     inline _ITp
   1416     atomic_exchange(volatile atomic<_ITp>* __a,
   1417 		    __atomic_val_t<_ITp> __i) noexcept
   1418     { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
   1419 
   1420   template<typename _ITp>
   1421     inline bool
   1422     atomic_compare_exchange_weak(atomic<_ITp>* __a,
   1423 				 __atomic_val_t<_ITp>* __i1,
   1424 				 __atomic_val_t<_ITp> __i2) noexcept
   1425     {
   1426       return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
   1427 						   memory_order_seq_cst,
   1428 						   memory_order_seq_cst);
   1429     }
   1430 
   1431   template<typename _ITp>
   1432     inline bool
   1433     atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
   1434 				 __atomic_val_t<_ITp>* __i1,
   1435 				 __atomic_val_t<_ITp> __i2) noexcept
   1436     {
   1437       return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
   1438 						   memory_order_seq_cst,
   1439 						   memory_order_seq_cst);
   1440     }
   1441 
   1442   template<typename _ITp>
   1443     inline bool
   1444     atomic_compare_exchange_strong(atomic<_ITp>* __a,
   1445 				   __atomic_val_t<_ITp>* __i1,
   1446 				   __atomic_val_t<_ITp> __i2) noexcept
   1447     {
   1448       return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
   1449 						     memory_order_seq_cst,
   1450 						     memory_order_seq_cst);
   1451     }
   1452 
   1453   template<typename _ITp>
   1454     inline bool
   1455     atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
   1456 				   __atomic_val_t<_ITp>* __i1,
   1457 				   __atomic_val_t<_ITp> __i2) noexcept
   1458     {
   1459       return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
   1460 						     memory_order_seq_cst,
   1461 						     memory_order_seq_cst);
   1462     }
   1463 
   1464 
   1465 #if __cpp_lib_atomic_wait
   1466   template<typename _Tp>
   1467     inline void
   1468     atomic_wait(const atomic<_Tp>* __a,
   1469 	        typename std::atomic<_Tp>::value_type __old) noexcept
   1470     { __a->wait(__old); }
   1471 
   1472   template<typename _Tp>
   1473     inline void
   1474     atomic_wait_explicit(const atomic<_Tp>* __a,
   1475 			 typename std::atomic<_Tp>::value_type __old,
   1476 			 std::memory_order __m) noexcept
   1477     { __a->wait(__old, __m); }
   1478 
   1479   template<typename _Tp>
   1480     inline void
   1481     atomic_notify_one(atomic<_Tp>* __a) noexcept
   1482     { __a->notify_one(); }
   1483 
   1484   template<typename _Tp>
   1485     inline void
   1486     atomic_notify_all(atomic<_Tp>* __a) noexcept
   1487     { __a->notify_all(); }
   1488 #endif // __cpp_lib_atomic_wait
   1489 
   1490   // Function templates for atomic_integral and atomic_pointer operations only.
   1491   // Some operations (and, or, xor) are only available for atomic integrals,
   1492   // which is implemented by taking a parameter of type __atomic_base<_ITp>*.
   1493 
   1494   template<typename _ITp>
   1495     inline _ITp
   1496     atomic_fetch_add_explicit(atomic<_ITp>* __a,
   1497 			      __atomic_diff_t<_ITp> __i,
   1498 			      memory_order __m) noexcept
   1499     { return __a->fetch_add(__i, __m); }
   1500 
   1501   template<typename _ITp>
   1502     inline _ITp
   1503     atomic_fetch_add_explicit(volatile atomic<_ITp>* __a,
   1504 			      __atomic_diff_t<_ITp> __i,
   1505 			      memory_order __m) noexcept
   1506     { return __a->fetch_add(__i, __m); }
   1507 
   1508   template<typename _ITp>
   1509     inline _ITp
   1510     atomic_fetch_sub_explicit(atomic<_ITp>* __a,
   1511 			      __atomic_diff_t<_ITp> __i,
   1512 			      memory_order __m) noexcept
   1513     { return __a->fetch_sub(__i, __m); }
   1514 
   1515   template<typename _ITp>
   1516     inline _ITp
   1517     atomic_fetch_sub_explicit(volatile atomic<_ITp>* __a,
   1518 			      __atomic_diff_t<_ITp> __i,
   1519 			      memory_order __m) noexcept
   1520     { return __a->fetch_sub(__i, __m); }
   1521 
   1522   template<typename _ITp>
   1523     inline _ITp
   1524     atomic_fetch_and_explicit(__atomic_base<_ITp>* __a,
   1525 			      __atomic_val_t<_ITp> __i,
   1526 			      memory_order __m) noexcept
   1527     { return __a->fetch_and(__i, __m); }
   1528 
   1529   template<typename _ITp>
   1530     inline _ITp
   1531     atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a,
   1532 			      __atomic_val_t<_ITp> __i,
   1533 			      memory_order __m) noexcept
   1534     { return __a->fetch_and(__i, __m); }
   1535 
   1536   template<typename _ITp>
   1537     inline _ITp
   1538     atomic_fetch_or_explicit(__atomic_base<_ITp>* __a,
   1539 			     __atomic_val_t<_ITp> __i,
   1540 			     memory_order __m) noexcept
   1541     { return __a->fetch_or(__i, __m); }
   1542 
   1543   template<typename _ITp>
   1544     inline _ITp
   1545     atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a,
   1546 			     __atomic_val_t<_ITp> __i,
   1547 			     memory_order __m) noexcept
   1548     { return __a->fetch_or(__i, __m); }
   1549 
   1550   template<typename _ITp>
   1551     inline _ITp
   1552     atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a,
   1553 			      __atomic_val_t<_ITp> __i,
   1554 			      memory_order __m) noexcept
   1555     { return __a->fetch_xor(__i, __m); }
   1556 
   1557   template<typename _ITp>
   1558     inline _ITp
   1559     atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a,
   1560 			      __atomic_val_t<_ITp> __i,
   1561 			      memory_order __m) noexcept
   1562     { return __a->fetch_xor(__i, __m); }
   1563 
   1564   template<typename _ITp>
   1565     inline _ITp
   1566     atomic_fetch_add(atomic<_ITp>* __a,
   1567 		     __atomic_diff_t<_ITp> __i) noexcept
   1568     { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
   1569 
   1570   template<typename _ITp>
   1571     inline _ITp
   1572     atomic_fetch_add(volatile atomic<_ITp>* __a,
   1573 		     __atomic_diff_t<_ITp> __i) noexcept
   1574     { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
   1575 
   1576   template<typename _ITp>
   1577     inline _ITp
   1578     atomic_fetch_sub(atomic<_ITp>* __a,
   1579 		     __atomic_diff_t<_ITp> __i) noexcept
   1580     { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
   1581 
   1582   template<typename _ITp>
   1583     inline _ITp
   1584     atomic_fetch_sub(volatile atomic<_ITp>* __a,
   1585 		     __atomic_diff_t<_ITp> __i) noexcept
   1586     { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
   1587 
   1588   template<typename _ITp>
   1589     inline _ITp
   1590     atomic_fetch_and(__atomic_base<_ITp>* __a,
   1591 		     __atomic_val_t<_ITp> __i) noexcept
   1592     { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
   1593 
   1594   template<typename _ITp>
   1595     inline _ITp
   1596     atomic_fetch_and(volatile __atomic_base<_ITp>* __a,
   1597 		     __atomic_val_t<_ITp> __i) noexcept
   1598     { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
   1599 
   1600   template<typename _ITp>
   1601     inline _ITp
   1602     atomic_fetch_or(__atomic_base<_ITp>* __a,
   1603 		    __atomic_val_t<_ITp> __i) noexcept
   1604     { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
   1605 
   1606   template<typename _ITp>
   1607     inline _ITp
   1608     atomic_fetch_or(volatile __atomic_base<_ITp>* __a,
   1609 		    __atomic_val_t<_ITp> __i) noexcept
   1610     { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
   1611 
   1612   template<typename _ITp>
   1613     inline _ITp
   1614     atomic_fetch_xor(__atomic_base<_ITp>* __a,
   1615 		     __atomic_val_t<_ITp> __i) noexcept
   1616     { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
   1617 
   1618   template<typename _ITp>
   1619     inline _ITp
   1620     atomic_fetch_xor(volatile __atomic_base<_ITp>* __a,
   1621 		     __atomic_val_t<_ITp> __i) noexcept
   1622     { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
   1623 
   1624 #if __cplusplus > 201703L
   1625 #define __cpp_lib_atomic_float 201711L
   1626   template<>
   1627     struct atomic<float> : __atomic_float<float>
   1628     {
   1629       atomic() noexcept = default;
   1630 
   1631       constexpr
   1632       atomic(float __fp) noexcept : __atomic_float<float>(__fp)
   1633       { }
   1634 
   1635       atomic& operator=(const atomic&) volatile = delete;
   1636       atomic& operator=(const atomic&) = delete;
   1637 
   1638       using __atomic_float<float>::operator=;
   1639     };
   1640 
   1641   template<>
   1642     struct atomic<double> : __atomic_float<double>
   1643     {
   1644       atomic() noexcept = default;
   1645 
   1646       constexpr
   1647       atomic(double __fp) noexcept : __atomic_float<double>(__fp)
   1648       { }
   1649 
   1650       atomic& operator=(const atomic&) volatile = delete;
   1651       atomic& operator=(const atomic&) = delete;
   1652 
   1653       using __atomic_float<double>::operator=;
   1654     };
   1655 
   1656   template<>
   1657     struct atomic<long double> : __atomic_float<long double>
   1658     {
   1659       atomic() noexcept = default;
   1660 
   1661       constexpr
   1662       atomic(long double __fp) noexcept : __atomic_float<long double>(__fp)
   1663       { }
   1664 
   1665       atomic& operator=(const atomic&) volatile = delete;
   1666       atomic& operator=(const atomic&) = delete;
   1667 
   1668       using __atomic_float<long double>::operator=;
   1669     };
   1670 
   1671 #define __cpp_lib_atomic_ref 201806L
   1672 
   1673   /// Class template to provide atomic operations on a non-atomic variable.
   1674   template<typename _Tp>
   1675     struct atomic_ref : __atomic_ref<_Tp>
   1676     {
   1677       explicit
   1678       atomic_ref(_Tp& __t) noexcept : __atomic_ref<_Tp>(__t)
   1679       { }
   1680 
   1681       atomic_ref& operator=(const atomic_ref&) = delete;
   1682 
   1683       atomic_ref(const atomic_ref&) = default;
   1684 
   1685       using __atomic_ref<_Tp>::operator=;
   1686     };
   1687 
   1688 #endif // C++2a
   1689 
   1690   /// @} group atomics
   1691 
   1692 _GLIBCXX_END_NAMESPACE_VERSION
   1693 } // namespace
   1694 
   1695 #endif // C++11
   1696 
   1697 #endif // _GLIBCXX_ATOMIC
   1698