3 // Copyright (C) 2008-2022 Free Software Foundation, Inc.
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
25 /** @file include/atomic
26 * This is a Standard C++ Library header.
29 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
30 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
32 #ifndef _GLIBCXX_ATOMIC
33 #define _GLIBCXX_ATOMIC 1
35 #pragma GCC system_header
37 #if __cplusplus < 201103L
38 # include <bits/c++0x_warning.h>
41 #include <bits/atomic_base.h>
43 namespace std _GLIBCXX_VISIBILITY(default)
45 _GLIBCXX_BEGIN_NAMESPACE_VERSION
52 #if __cplusplus >= 201703L
53 # define __cpp_lib_atomic_is_always_lock_free 201603L
56 template<typename _Tp>
60 // NB: No operators or fetch-operations for this type.
64 using value_type = bool;
67 __atomic_base<bool> _M_base;
70 atomic() noexcept = default;
71 ~atomic() noexcept = default;
72 atomic(const atomic&) = delete;
73 atomic& operator=(const atomic&) = delete;
74 atomic& operator=(const atomic&) volatile = delete;
76 constexpr atomic(bool __i) noexcept : _M_base(__i) { }
79 operator=(bool __i) noexcept
80 { return _M_base.operator=(__i); }
83 operator=(bool __i) volatile noexcept
84 { return _M_base.operator=(__i); }
86 operator bool() const noexcept
87 { return _M_base.load(); }
89 operator bool() const volatile noexcept
90 { return _M_base.load(); }
93 is_lock_free() const noexcept { return _M_base.is_lock_free(); }
96 is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
98 #if __cplusplus >= 201703L
99 static constexpr bool is_always_lock_free = ATOMIC_BOOL_LOCK_FREE == 2;
103 store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
104 { _M_base.store(__i, __m); }
107 store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
108 { _M_base.store(__i, __m); }
111 load(memory_order __m = memory_order_seq_cst) const noexcept
112 { return _M_base.load(__m); }
115 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
116 { return _M_base.load(__m); }
119 exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
120 { return _M_base.exchange(__i, __m); }
124 memory_order __m = memory_order_seq_cst) volatile noexcept
125 { return _M_base.exchange(__i, __m); }
128 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
129 memory_order __m2) noexcept
130 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
133 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
134 memory_order __m2) volatile noexcept
135 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
138 compare_exchange_weak(bool& __i1, bool __i2,
139 memory_order __m = memory_order_seq_cst) noexcept
140 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
143 compare_exchange_weak(bool& __i1, bool __i2,
144 memory_order __m = memory_order_seq_cst) volatile noexcept
145 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
148 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
149 memory_order __m2) noexcept
150 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
153 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
154 memory_order __m2) volatile noexcept
155 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
158 compare_exchange_strong(bool& __i1, bool __i2,
159 memory_order __m = memory_order_seq_cst) noexcept
160 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
163 compare_exchange_strong(bool& __i1, bool __i2,
164 memory_order __m = memory_order_seq_cst) volatile noexcept
165 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
167 #if __cpp_lib_atomic_wait
169 wait(bool __old, memory_order __m = memory_order_seq_cst) const noexcept
170 { _M_base.wait(__old, __m); }
172 // TODO add const volatile overload
175 notify_one() noexcept
176 { _M_base.notify_one(); }
179 notify_all() noexcept
180 { _M_base.notify_all(); }
181 #endif // __cpp_lib_atomic_wait
184 #if __cplusplus <= 201703L
185 # define _GLIBCXX20_INIT(I)
187 # define _GLIBCXX20_INIT(I) = I
191 * @brief Generic atomic type, primary class template.
193 * @tparam _Tp Type to be made atomic, must be trivially copyable.
195 template<typename _Tp>
198 using value_type = _Tp;
201 // Align 1/2/4/8/16-byte types to at least their size.
202 static constexpr int _S_min_alignment
203 = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
206 static constexpr int _S_alignment
207 = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
209 alignas(_S_alignment) _Tp _M_i _GLIBCXX20_INIT(_Tp());
211 static_assert(__is_trivially_copyable(_Tp),
212 "std::atomic requires a trivially copyable type");
214 static_assert(sizeof(_Tp) > 0,
215 "Incomplete or zero-sized types are not supported");
217 #if __cplusplus > 201703L
218 static_assert(is_copy_constructible_v<_Tp>);
219 static_assert(is_move_constructible_v<_Tp>);
220 static_assert(is_copy_assignable_v<_Tp>);
221 static_assert(is_move_assignable_v<_Tp>);
226 ~atomic() noexcept = default;
227 atomic(const atomic&) = delete;
228 atomic& operator=(const atomic&) = delete;
229 atomic& operator=(const atomic&) volatile = delete;
231 constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
233 operator _Tp() const noexcept
236 operator _Tp() const volatile noexcept
240 operator=(_Tp __i) noexcept
241 { store(__i); return __i; }
244 operator=(_Tp __i) volatile noexcept
245 { store(__i); return __i; }
248 is_lock_free() const noexcept
250 // Produce a fake, minimally aligned pointer.
251 return __atomic_is_lock_free(sizeof(_M_i),
252 reinterpret_cast<void *>(-_S_alignment));
256 is_lock_free() const volatile noexcept
258 // Produce a fake, minimally aligned pointer.
259 return __atomic_is_lock_free(sizeof(_M_i),
260 reinterpret_cast<void *>(-_S_alignment));
263 #if __cplusplus >= 201703L
264 static constexpr bool is_always_lock_free
265 = __atomic_always_lock_free(sizeof(_M_i), 0);
269 store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
271 __atomic_store(std::__addressof(_M_i), std::__addressof(__i), int(__m));
275 store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
277 __atomic_store(std::__addressof(_M_i), std::__addressof(__i), int(__m));
281 load(memory_order __m = memory_order_seq_cst) const noexcept
283 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
284 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
285 __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
290 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
292 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
293 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
294 __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
299 exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
301 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
302 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
303 __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
310 memory_order __m = memory_order_seq_cst) volatile noexcept
312 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
313 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
314 __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
320 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
321 memory_order __f) noexcept
323 __glibcxx_assert(__is_valid_cmpexch_failure_order(__f));
325 return __atomic_compare_exchange(std::__addressof(_M_i),
326 std::__addressof(__e),
327 std::__addressof(__i),
328 true, int(__s), int(__f));
332 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
333 memory_order __f) volatile noexcept
335 __glibcxx_assert(__is_valid_cmpexch_failure_order(__f));
337 return __atomic_compare_exchange(std::__addressof(_M_i),
338 std::__addressof(__e),
339 std::__addressof(__i),
340 true, int(__s), int(__f));
344 compare_exchange_weak(_Tp& __e, _Tp __i,
345 memory_order __m = memory_order_seq_cst) noexcept
346 { return compare_exchange_weak(__e, __i, __m,
347 __cmpexch_failure_order(__m)); }
350 compare_exchange_weak(_Tp& __e, _Tp __i,
351 memory_order __m = memory_order_seq_cst) volatile noexcept
352 { return compare_exchange_weak(__e, __i, __m,
353 __cmpexch_failure_order(__m)); }
356 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
357 memory_order __f) noexcept
359 __glibcxx_assert(__is_valid_cmpexch_failure_order(__f));
361 return __atomic_compare_exchange(std::__addressof(_M_i),
362 std::__addressof(__e),
363 std::__addressof(__i),
364 false, int(__s), int(__f));
368 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
369 memory_order __f) volatile noexcept
371 __glibcxx_assert(__is_valid_cmpexch_failure_order(__f));
373 return __atomic_compare_exchange(std::__addressof(_M_i),
374 std::__addressof(__e),
375 std::__addressof(__i),
376 false, int(__s), int(__f));
380 compare_exchange_strong(_Tp& __e, _Tp __i,
381 memory_order __m = memory_order_seq_cst) noexcept
382 { return compare_exchange_strong(__e, __i, __m,
383 __cmpexch_failure_order(__m)); }
386 compare_exchange_strong(_Tp& __e, _Tp __i,
387 memory_order __m = memory_order_seq_cst) volatile noexcept
388 { return compare_exchange_strong(__e, __i, __m,
389 __cmpexch_failure_order(__m)); }
391 #if __cpp_lib_atomic_wait
393 wait(_Tp __old, memory_order __m = memory_order_seq_cst) const noexcept
395 std::__atomic_wait_address_v(&_M_i, __old,
396 [__m, this] { return this->load(__m); });
399 // TODO add const volatile overload
402 notify_one() noexcept
403 { std::__atomic_notify_address(&_M_i, false); }
406 notify_all() noexcept
407 { std::__atomic_notify_address(&_M_i, true); }
408 #endif // __cpp_lib_atomic_wait
411 #undef _GLIBCXX20_INIT
413 /// Partial specialization for pointer types.
414 template<typename _Tp>
417 using value_type = _Tp*;
418 using difference_type = ptrdiff_t;
420 typedef _Tp* __pointer_type;
421 typedef __atomic_base<_Tp*> __base_type;
424 atomic() noexcept = default;
425 ~atomic() noexcept = default;
426 atomic(const atomic&) = delete;
427 atomic& operator=(const atomic&) = delete;
428 atomic& operator=(const atomic&) volatile = delete;
430 constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
432 operator __pointer_type() const noexcept
433 { return __pointer_type(_M_b); }
435 operator __pointer_type() const volatile noexcept
436 { return __pointer_type(_M_b); }
439 operator=(__pointer_type __p) noexcept
440 { return _M_b.operator=(__p); }
443 operator=(__pointer_type __p) volatile noexcept
444 { return _M_b.operator=(__p); }
447 operator++(int) noexcept
449 #if __cplusplus >= 201703L
450 static_assert( is_object<_Tp>::value, "pointer to object type" );
456 operator++(int) volatile noexcept
458 #if __cplusplus >= 201703L
459 static_assert( is_object<_Tp>::value, "pointer to object type" );
465 operator--(int) noexcept
467 #if __cplusplus >= 201703L
468 static_assert( is_object<_Tp>::value, "pointer to object type" );
474 operator--(int) volatile noexcept
476 #if __cplusplus >= 201703L
477 static_assert( is_object<_Tp>::value, "pointer to object type" );
483 operator++() noexcept
485 #if __cplusplus >= 201703L
486 static_assert( is_object<_Tp>::value, "pointer to object type" );
492 operator++() volatile noexcept
494 #if __cplusplus >= 201703L
495 static_assert( is_object<_Tp>::value, "pointer to object type" );
501 operator--() noexcept
503 #if __cplusplus >= 201703L
504 static_assert( is_object<_Tp>::value, "pointer to object type" );
510 operator--() volatile noexcept
512 #if __cplusplus >= 201703L
513 static_assert( is_object<_Tp>::value, "pointer to object type" );
519 operator+=(ptrdiff_t __d) noexcept
521 #if __cplusplus >= 201703L
522 static_assert( is_object<_Tp>::value, "pointer to object type" );
524 return _M_b.operator+=(__d);
528 operator+=(ptrdiff_t __d) volatile noexcept
530 #if __cplusplus >= 201703L
531 static_assert( is_object<_Tp>::value, "pointer to object type" );
533 return _M_b.operator+=(__d);
537 operator-=(ptrdiff_t __d) noexcept
539 #if __cplusplus >= 201703L
540 static_assert( is_object<_Tp>::value, "pointer to object type" );
542 return _M_b.operator-=(__d);
546 operator-=(ptrdiff_t __d) volatile noexcept
548 #if __cplusplus >= 201703L
549 static_assert( is_object<_Tp>::value, "pointer to object type" );
551 return _M_b.operator-=(__d);
555 is_lock_free() const noexcept
556 { return _M_b.is_lock_free(); }
559 is_lock_free() const volatile noexcept
560 { return _M_b.is_lock_free(); }
562 #if __cplusplus >= 201703L
563 static constexpr bool is_always_lock_free
564 = ATOMIC_POINTER_LOCK_FREE == 2;
568 store(__pointer_type __p,
569 memory_order __m = memory_order_seq_cst) noexcept
570 { return _M_b.store(__p, __m); }
573 store(__pointer_type __p,
574 memory_order __m = memory_order_seq_cst) volatile noexcept
575 { return _M_b.store(__p, __m); }
578 load(memory_order __m = memory_order_seq_cst) const noexcept
579 { return _M_b.load(__m); }
582 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
583 { return _M_b.load(__m); }
586 exchange(__pointer_type __p,
587 memory_order __m = memory_order_seq_cst) noexcept
588 { return _M_b.exchange(__p, __m); }
591 exchange(__pointer_type __p,
592 memory_order __m = memory_order_seq_cst) volatile noexcept
593 { return _M_b.exchange(__p, __m); }
596 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
597 memory_order __m1, memory_order __m2) noexcept
598 { return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
601 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
603 memory_order __m2) volatile noexcept
604 { return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
607 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
608 memory_order __m = memory_order_seq_cst) noexcept
610 return compare_exchange_weak(__p1, __p2, __m,
611 __cmpexch_failure_order(__m));
615 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
616 memory_order __m = memory_order_seq_cst) volatile noexcept
618 return compare_exchange_weak(__p1, __p2, __m,
619 __cmpexch_failure_order(__m));
623 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
624 memory_order __m1, memory_order __m2) noexcept
625 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
628 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
630 memory_order __m2) volatile noexcept
631 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
634 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
635 memory_order __m = memory_order_seq_cst) noexcept
637 return _M_b.compare_exchange_strong(__p1, __p2, __m,
638 __cmpexch_failure_order(__m));
642 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
643 memory_order __m = memory_order_seq_cst) volatile noexcept
645 return _M_b.compare_exchange_strong(__p1, __p2, __m,
646 __cmpexch_failure_order(__m));
649 #if __cpp_lib_atomic_wait
651 wait(__pointer_type __old, memory_order __m = memory_order_seq_cst) const noexcept
652 { _M_b.wait(__old, __m); }
654 // TODO add const volatile overload
657 notify_one() noexcept
658 { _M_b.notify_one(); }
661 notify_all() noexcept
662 { _M_b.notify_all(); }
663 #endif // __cpp_lib_atomic_wait
666 fetch_add(ptrdiff_t __d,
667 memory_order __m = memory_order_seq_cst) noexcept
669 #if __cplusplus >= 201703L
670 static_assert( is_object<_Tp>::value, "pointer to object type" );
672 return _M_b.fetch_add(__d, __m);
676 fetch_add(ptrdiff_t __d,
677 memory_order __m = memory_order_seq_cst) volatile noexcept
679 #if __cplusplus >= 201703L
680 static_assert( is_object<_Tp>::value, "pointer to object type" );
682 return _M_b.fetch_add(__d, __m);
686 fetch_sub(ptrdiff_t __d,
687 memory_order __m = memory_order_seq_cst) noexcept
689 #if __cplusplus >= 201703L
690 static_assert( is_object<_Tp>::value, "pointer to object type" );
692 return _M_b.fetch_sub(__d, __m);
696 fetch_sub(ptrdiff_t __d,
697 memory_order __m = memory_order_seq_cst) volatile noexcept
699 #if __cplusplus >= 201703L
700 static_assert( is_object<_Tp>::value, "pointer to object type" );
702 return _M_b.fetch_sub(__d, __m);
707 /// Explicit specialization for char.
709 struct atomic<char> : __atomic_base<char>
711 typedef char __integral_type;
712 typedef __atomic_base<char> __base_type;
714 atomic() noexcept = default;
715 ~atomic() noexcept = default;
716 atomic(const atomic&) = delete;
717 atomic& operator=(const atomic&) = delete;
718 atomic& operator=(const atomic&) volatile = delete;
720 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
722 using __base_type::operator __integral_type;
723 using __base_type::operator=;
725 #if __cplusplus >= 201703L
726 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
730 /// Explicit specialization for signed char.
732 struct atomic<signed char> : __atomic_base<signed char>
734 typedef signed char __integral_type;
735 typedef __atomic_base<signed char> __base_type;
737 atomic() noexcept= default;
738 ~atomic() noexcept = default;
739 atomic(const atomic&) = delete;
740 atomic& operator=(const atomic&) = delete;
741 atomic& operator=(const atomic&) volatile = delete;
743 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
745 using __base_type::operator __integral_type;
746 using __base_type::operator=;
748 #if __cplusplus >= 201703L
749 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
753 /// Explicit specialization for unsigned char.
755 struct atomic<unsigned char> : __atomic_base<unsigned char>
757 typedef unsigned char __integral_type;
758 typedef __atomic_base<unsigned char> __base_type;
760 atomic() noexcept= default;
761 ~atomic() noexcept = default;
762 atomic(const atomic&) = delete;
763 atomic& operator=(const atomic&) = delete;
764 atomic& operator=(const atomic&) volatile = delete;
766 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
768 using __base_type::operator __integral_type;
769 using __base_type::operator=;
771 #if __cplusplus >= 201703L
772 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
776 /// Explicit specialization for short.
778 struct atomic<short> : __atomic_base<short>
780 typedef short __integral_type;
781 typedef __atomic_base<short> __base_type;
783 atomic() noexcept = default;
784 ~atomic() noexcept = default;
785 atomic(const atomic&) = delete;
786 atomic& operator=(const atomic&) = delete;
787 atomic& operator=(const atomic&) volatile = delete;
789 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
791 using __base_type::operator __integral_type;
792 using __base_type::operator=;
794 #if __cplusplus >= 201703L
795 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
799 /// Explicit specialization for unsigned short.
801 struct atomic<unsigned short> : __atomic_base<unsigned short>
803 typedef unsigned short __integral_type;
804 typedef __atomic_base<unsigned short> __base_type;
806 atomic() noexcept = default;
807 ~atomic() noexcept = default;
808 atomic(const atomic&) = delete;
809 atomic& operator=(const atomic&) = delete;
810 atomic& operator=(const atomic&) volatile = delete;
812 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
814 using __base_type::operator __integral_type;
815 using __base_type::operator=;
817 #if __cplusplus >= 201703L
818 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
822 /// Explicit specialization for int.
824 struct atomic<int> : __atomic_base<int>
826 typedef int __integral_type;
827 typedef __atomic_base<int> __base_type;
829 atomic() noexcept = default;
830 ~atomic() noexcept = default;
831 atomic(const atomic&) = delete;
832 atomic& operator=(const atomic&) = delete;
833 atomic& operator=(const atomic&) volatile = delete;
835 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
837 using __base_type::operator __integral_type;
838 using __base_type::operator=;
840 #if __cplusplus >= 201703L
841 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
845 /// Explicit specialization for unsigned int.
847 struct atomic<unsigned int> : __atomic_base<unsigned int>
849 typedef unsigned int __integral_type;
850 typedef __atomic_base<unsigned int> __base_type;
852 atomic() noexcept = default;
853 ~atomic() noexcept = default;
854 atomic(const atomic&) = delete;
855 atomic& operator=(const atomic&) = delete;
856 atomic& operator=(const atomic&) volatile = delete;
858 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
860 using __base_type::operator __integral_type;
861 using __base_type::operator=;
863 #if __cplusplus >= 201703L
864 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
868 /// Explicit specialization for long.
870 struct atomic<long> : __atomic_base<long>
872 typedef long __integral_type;
873 typedef __atomic_base<long> __base_type;
875 atomic() noexcept = default;
876 ~atomic() noexcept = default;
877 atomic(const atomic&) = delete;
878 atomic& operator=(const atomic&) = delete;
879 atomic& operator=(const atomic&) volatile = delete;
881 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
883 using __base_type::operator __integral_type;
884 using __base_type::operator=;
886 #if __cplusplus >= 201703L
887 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
891 /// Explicit specialization for unsigned long.
893 struct atomic<unsigned long> : __atomic_base<unsigned long>
895 typedef unsigned long __integral_type;
896 typedef __atomic_base<unsigned long> __base_type;
898 atomic() noexcept = default;
899 ~atomic() noexcept = default;
900 atomic(const atomic&) = delete;
901 atomic& operator=(const atomic&) = delete;
902 atomic& operator=(const atomic&) volatile = delete;
904 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
906 using __base_type::operator __integral_type;
907 using __base_type::operator=;
909 #if __cplusplus >= 201703L
910 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
914 /// Explicit specialization for long long.
916 struct atomic<long long> : __atomic_base<long long>
918 typedef long long __integral_type;
919 typedef __atomic_base<long long> __base_type;
921 atomic() noexcept = default;
922 ~atomic() noexcept = default;
923 atomic(const atomic&) = delete;
924 atomic& operator=(const atomic&) = delete;
925 atomic& operator=(const atomic&) volatile = delete;
927 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
929 using __base_type::operator __integral_type;
930 using __base_type::operator=;
932 #if __cplusplus >= 201703L
933 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
937 /// Explicit specialization for unsigned long long.
939 struct atomic<unsigned long long> : __atomic_base<unsigned long long>
941 typedef unsigned long long __integral_type;
942 typedef __atomic_base<unsigned long long> __base_type;
944 atomic() noexcept = default;
945 ~atomic() noexcept = default;
946 atomic(const atomic&) = delete;
947 atomic& operator=(const atomic&) = delete;
948 atomic& operator=(const atomic&) volatile = delete;
950 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
952 using __base_type::operator __integral_type;
953 using __base_type::operator=;
955 #if __cplusplus >= 201703L
956 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
960 /// Explicit specialization for wchar_t.
962 struct atomic<wchar_t> : __atomic_base<wchar_t>
964 typedef wchar_t __integral_type;
965 typedef __atomic_base<wchar_t> __base_type;
967 atomic() noexcept = default;
968 ~atomic() noexcept = default;
969 atomic(const atomic&) = delete;
970 atomic& operator=(const atomic&) = delete;
971 atomic& operator=(const atomic&) volatile = delete;
973 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
975 using __base_type::operator __integral_type;
976 using __base_type::operator=;
978 #if __cplusplus >= 201703L
979 static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
983 #ifdef _GLIBCXX_USE_CHAR8_T
984 /// Explicit specialization for char8_t.
986 struct atomic<char8_t> : __atomic_base<char8_t>
988 typedef char8_t __integral_type;
989 typedef __atomic_base<char8_t> __base_type;
991 atomic() noexcept = default;
992 ~atomic() noexcept = default;
993 atomic(const atomic&) = delete;
994 atomic& operator=(const atomic&) = delete;
995 atomic& operator=(const atomic&) volatile = delete;
997 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
999 using __base_type::operator __integral_type;
1000 using __base_type::operator=;
1002 #if __cplusplus > 201402L
1003 static constexpr bool is_always_lock_free
1004 = ATOMIC_CHAR8_T_LOCK_FREE == 2;
1009 /// Explicit specialization for char16_t.
1011 struct atomic<char16_t> : __atomic_base<char16_t>
1013 typedef char16_t __integral_type;
1014 typedef __atomic_base<char16_t> __base_type;
1016 atomic() noexcept = default;
1017 ~atomic() noexcept = default;
1018 atomic(const atomic&) = delete;
1019 atomic& operator=(const atomic&) = delete;
1020 atomic& operator=(const atomic&) volatile = delete;
1022 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1024 using __base_type::operator __integral_type;
1025 using __base_type::operator=;
1027 #if __cplusplus >= 201703L
1028 static constexpr bool is_always_lock_free
1029 = ATOMIC_CHAR16_T_LOCK_FREE == 2;
1033 /// Explicit specialization for char32_t.
1035 struct atomic<char32_t> : __atomic_base<char32_t>
1037 typedef char32_t __integral_type;
1038 typedef __atomic_base<char32_t> __base_type;
1040 atomic() noexcept = default;
1041 ~atomic() noexcept = default;
1042 atomic(const atomic&) = delete;
1043 atomic& operator=(const atomic&) = delete;
1044 atomic& operator=(const atomic&) volatile = delete;
1046 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1048 using __base_type::operator __integral_type;
1049 using __base_type::operator=;
1051 #if __cplusplus >= 201703L
1052 static constexpr bool is_always_lock_free
1053 = ATOMIC_CHAR32_T_LOCK_FREE == 2;
1059 typedef atomic<bool> atomic_bool;
1062 typedef atomic<char> atomic_char;
1065 typedef atomic<signed char> atomic_schar;
1068 typedef atomic<unsigned char> atomic_uchar;
1071 typedef atomic<short> atomic_short;
1074 typedef atomic<unsigned short> atomic_ushort;
1077 typedef atomic<int> atomic_int;
1080 typedef atomic<unsigned int> atomic_uint;
1083 typedef atomic<long> atomic_long;
1086 typedef atomic<unsigned long> atomic_ulong;
1089 typedef atomic<long long> atomic_llong;
1092 typedef atomic<unsigned long long> atomic_ullong;
1095 typedef atomic<wchar_t> atomic_wchar_t;
1097 #ifdef _GLIBCXX_USE_CHAR8_T
1099 typedef atomic<char8_t> atomic_char8_t;
1103 typedef atomic<char16_t> atomic_char16_t;
1106 typedef atomic<char32_t> atomic_char32_t;
1108 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
1109 // _GLIBCXX_RESOLVE_LIB_DEFECTS
1110 // 2441. Exact-width atomic typedefs should be provided
1113 typedef atomic<int8_t> atomic_int8_t;
1116 typedef atomic<uint8_t> atomic_uint8_t;
1119 typedef atomic<int16_t> atomic_int16_t;
1122 typedef atomic<uint16_t> atomic_uint16_t;
1125 typedef atomic<int32_t> atomic_int32_t;
1128 typedef atomic<uint32_t> atomic_uint32_t;
1131 typedef atomic<int64_t> atomic_int64_t;
1134 typedef atomic<uint64_t> atomic_uint64_t;
1137 /// atomic_int_least8_t
1138 typedef atomic<int_least8_t> atomic_int_least8_t;
1140 /// atomic_uint_least8_t
1141 typedef atomic<uint_least8_t> atomic_uint_least8_t;
1143 /// atomic_int_least16_t
1144 typedef atomic<int_least16_t> atomic_int_least16_t;
1146 /// atomic_uint_least16_t
1147 typedef atomic<uint_least16_t> atomic_uint_least16_t;
1149 /// atomic_int_least32_t
1150 typedef atomic<int_least32_t> atomic_int_least32_t;
1152 /// atomic_uint_least32_t
1153 typedef atomic<uint_least32_t> atomic_uint_least32_t;
1155 /// atomic_int_least64_t
1156 typedef atomic<int_least64_t> atomic_int_least64_t;
1158 /// atomic_uint_least64_t
1159 typedef atomic<uint_least64_t> atomic_uint_least64_t;
1162 /// atomic_int_fast8_t
1163 typedef atomic<int_fast8_t> atomic_int_fast8_t;
1165 /// atomic_uint_fast8_t
1166 typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
1168 /// atomic_int_fast16_t
1169 typedef atomic<int_fast16_t> atomic_int_fast16_t;
1171 /// atomic_uint_fast16_t
1172 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1174 /// atomic_int_fast32_t
1175 typedef atomic<int_fast32_t> atomic_int_fast32_t;
1177 /// atomic_uint_fast32_t
1178 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1180 /// atomic_int_fast64_t
1181 typedef atomic<int_fast64_t> atomic_int_fast64_t;
1183 /// atomic_uint_fast64_t
1184 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1189 typedef atomic<intptr_t> atomic_intptr_t;
1191 /// atomic_uintptr_t
1192 typedef atomic<uintptr_t> atomic_uintptr_t;
1195 typedef atomic<size_t> atomic_size_t;
1197 /// atomic_ptrdiff_t
1198 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1200 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
1202 typedef atomic<intmax_t> atomic_intmax_t;
1204 /// atomic_uintmax_t
1205 typedef atomic<uintmax_t> atomic_uintmax_t;
1208 // Function definitions, atomic_flag operations.
1210 atomic_flag_test_and_set_explicit(atomic_flag* __a,
1211 memory_order __m) noexcept
1212 { return __a->test_and_set(__m); }
1215 atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
1216 memory_order __m) noexcept
1217 { return __a->test_and_set(__m); }
1220 atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
1221 { __a->clear(__m); }
1224 atomic_flag_clear_explicit(volatile atomic_flag* __a,
1225 memory_order __m) noexcept
1226 { __a->clear(__m); }
1229 atomic_flag_test_and_set(atomic_flag* __a) noexcept
1230 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1233 atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
1234 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1237 atomic_flag_clear(atomic_flag* __a) noexcept
1238 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1241 atomic_flag_clear(volatile atomic_flag* __a) noexcept
1242 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1244 // _GLIBCXX_RESOLVE_LIB_DEFECTS
1245 // 3220. P0558 broke conforming C++14 uses of atomic shared_ptr
1246 template<typename _Tp>
1247 using __atomic_val_t = __type_identity_t<_Tp>;
1248 template<typename _Tp>
1249 using __atomic_diff_t = typename atomic<_Tp>::difference_type;
1251 // [atomics.nonmembers] Non-member functions.
1252 // Function templates generally applicable to atomic types.
1253 template<typename _ITp>
1255 atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
1256 { return __a->is_lock_free(); }
1258 template<typename _ITp>
1260 atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
1261 { return __a->is_lock_free(); }
1263 template<typename _ITp>
1265 atomic_init(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1266 { __a->store(__i, memory_order_relaxed); }
1268 template<typename _ITp>
1270 atomic_init(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1271 { __a->store(__i, memory_order_relaxed); }
1273 template<typename _ITp>
1275 atomic_store_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1276 memory_order __m) noexcept
1277 { __a->store(__i, __m); }
1279 template<typename _ITp>
1281 atomic_store_explicit(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1282 memory_order __m) noexcept
1283 { __a->store(__i, __m); }
1285 template<typename _ITp>
1287 atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
1288 { return __a->load(__m); }
1290 template<typename _ITp>
1292 atomic_load_explicit(const volatile atomic<_ITp>* __a,
1293 memory_order __m) noexcept
1294 { return __a->load(__m); }
1296 template<typename _ITp>
1298 atomic_exchange_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1299 memory_order __m) noexcept
1300 { return __a->exchange(__i, __m); }
1302 template<typename _ITp>
1304 atomic_exchange_explicit(volatile atomic<_ITp>* __a,
1305 __atomic_val_t<_ITp> __i,
1306 memory_order __m) noexcept
1307 { return __a->exchange(__i, __m); }
1309 template<typename _ITp>
1311 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1312 __atomic_val_t<_ITp>* __i1,
1313 __atomic_val_t<_ITp> __i2,
1315 memory_order __m2) noexcept
1316 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1318 template<typename _ITp>
1320 atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
1321 __atomic_val_t<_ITp>* __i1,
1322 __atomic_val_t<_ITp> __i2,
1324 memory_order __m2) noexcept
1325 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1327 template<typename _ITp>
1329 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1330 __atomic_val_t<_ITp>* __i1,
1331 __atomic_val_t<_ITp> __i2,
1333 memory_order __m2) noexcept
1334 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1336 template<typename _ITp>
1338 atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
1339 __atomic_val_t<_ITp>* __i1,
1340 __atomic_val_t<_ITp> __i2,
1342 memory_order __m2) noexcept
1343 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1346 template<typename _ITp>
1348 atomic_store(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1349 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1351 template<typename _ITp>
1353 atomic_store(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1354 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1356 template<typename _ITp>
1358 atomic_load(const atomic<_ITp>* __a) noexcept
1359 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1361 template<typename _ITp>
1363 atomic_load(const volatile atomic<_ITp>* __a) noexcept
1364 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1366 template<typename _ITp>
1368 atomic_exchange(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1369 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1371 template<typename _ITp>
1373 atomic_exchange(volatile atomic<_ITp>* __a,
1374 __atomic_val_t<_ITp> __i) noexcept
1375 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1377 template<typename _ITp>
1379 atomic_compare_exchange_weak(atomic<_ITp>* __a,
1380 __atomic_val_t<_ITp>* __i1,
1381 __atomic_val_t<_ITp> __i2) noexcept
1383 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1384 memory_order_seq_cst,
1385 memory_order_seq_cst);
1388 template<typename _ITp>
1390 atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
1391 __atomic_val_t<_ITp>* __i1,
1392 __atomic_val_t<_ITp> __i2) noexcept
1394 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1395 memory_order_seq_cst,
1396 memory_order_seq_cst);
1399 template<typename _ITp>
1401 atomic_compare_exchange_strong(atomic<_ITp>* __a,
1402 __atomic_val_t<_ITp>* __i1,
1403 __atomic_val_t<_ITp> __i2) noexcept
1405 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1406 memory_order_seq_cst,
1407 memory_order_seq_cst);
1410 template<typename _ITp>
1412 atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
1413 __atomic_val_t<_ITp>* __i1,
1414 __atomic_val_t<_ITp> __i2) noexcept
1416 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1417 memory_order_seq_cst,
1418 memory_order_seq_cst);
1422 #if __cpp_lib_atomic_wait
1423 template<typename _Tp>
1425 atomic_wait(const atomic<_Tp>* __a,
1426 typename std::atomic<_Tp>::value_type __old) noexcept
1427 { __a->wait(__old); }
1429 template<typename _Tp>
1431 atomic_wait_explicit(const atomic<_Tp>* __a,
1432 typename std::atomic<_Tp>::value_type __old,
1433 std::memory_order __m) noexcept
1434 { __a->wait(__old, __m); }
1436 template<typename _Tp>
1438 atomic_notify_one(atomic<_Tp>* __a) noexcept
1439 { __a->notify_one(); }
1441 template<typename _Tp>
1443 atomic_notify_all(atomic<_Tp>* __a) noexcept
1444 { __a->notify_all(); }
1445 #endif // __cpp_lib_atomic_wait
1447 // Function templates for atomic_integral and atomic_pointer operations only.
1448 // Some operations (and, or, xor) are only available for atomic integrals,
1449 // which is implemented by taking a parameter of type __atomic_base<_ITp>*.
1451 template<typename _ITp>
1453 atomic_fetch_add_explicit(atomic<_ITp>* __a,
1454 __atomic_diff_t<_ITp> __i,
1455 memory_order __m) noexcept
1456 { return __a->fetch_add(__i, __m); }
1458 template<typename _ITp>
1460 atomic_fetch_add_explicit(volatile atomic<_ITp>* __a,
1461 __atomic_diff_t<_ITp> __i,
1462 memory_order __m) noexcept
1463 { return __a->fetch_add(__i, __m); }
1465 template<typename _ITp>
1467 atomic_fetch_sub_explicit(atomic<_ITp>* __a,
1468 __atomic_diff_t<_ITp> __i,
1469 memory_order __m) noexcept
1470 { return __a->fetch_sub(__i, __m); }
1472 template<typename _ITp>
1474 atomic_fetch_sub_explicit(volatile atomic<_ITp>* __a,
1475 __atomic_diff_t<_ITp> __i,
1476 memory_order __m) noexcept
1477 { return __a->fetch_sub(__i, __m); }
1479 template<typename _ITp>
1481 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a,
1482 __atomic_val_t<_ITp> __i,
1483 memory_order __m) noexcept
1484 { return __a->fetch_and(__i, __m); }
1486 template<typename _ITp>
1488 atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a,
1489 __atomic_val_t<_ITp> __i,
1490 memory_order __m) noexcept
1491 { return __a->fetch_and(__i, __m); }
1493 template<typename _ITp>
1495 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a,
1496 __atomic_val_t<_ITp> __i,
1497 memory_order __m) noexcept
1498 { return __a->fetch_or(__i, __m); }
1500 template<typename _ITp>
1502 atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a,
1503 __atomic_val_t<_ITp> __i,
1504 memory_order __m) noexcept
1505 { return __a->fetch_or(__i, __m); }
1507 template<typename _ITp>
1509 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a,
1510 __atomic_val_t<_ITp> __i,
1511 memory_order __m) noexcept
1512 { return __a->fetch_xor(__i, __m); }
1514 template<typename _ITp>
1516 atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a,
1517 __atomic_val_t<_ITp> __i,
1518 memory_order __m) noexcept
1519 { return __a->fetch_xor(__i, __m); }
1521 template<typename _ITp>
1523 atomic_fetch_add(atomic<_ITp>* __a,
1524 __atomic_diff_t<_ITp> __i) noexcept
1525 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1527 template<typename _ITp>
1529 atomic_fetch_add(volatile atomic<_ITp>* __a,
1530 __atomic_diff_t<_ITp> __i) noexcept
1531 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1533 template<typename _ITp>
1535 atomic_fetch_sub(atomic<_ITp>* __a,
1536 __atomic_diff_t<_ITp> __i) noexcept
1537 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1539 template<typename _ITp>
1541 atomic_fetch_sub(volatile atomic<_ITp>* __a,
1542 __atomic_diff_t<_ITp> __i) noexcept
1543 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1545 template<typename _ITp>
1547 atomic_fetch_and(__atomic_base<_ITp>* __a,
1548 __atomic_val_t<_ITp> __i) noexcept
1549 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1551 template<typename _ITp>
1553 atomic_fetch_and(volatile __atomic_base<_ITp>* __a,
1554 __atomic_val_t<_ITp> __i) noexcept
1555 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1557 template<typename _ITp>
1559 atomic_fetch_or(__atomic_base<_ITp>* __a,
1560 __atomic_val_t<_ITp> __i) noexcept
1561 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1563 template<typename _ITp>
1565 atomic_fetch_or(volatile __atomic_base<_ITp>* __a,
1566 __atomic_val_t<_ITp> __i) noexcept
1567 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1569 template<typename _ITp>
1571 atomic_fetch_xor(__atomic_base<_ITp>* __a,
1572 __atomic_val_t<_ITp> __i) noexcept
1573 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1575 template<typename _ITp>
1577 atomic_fetch_xor(volatile __atomic_base<_ITp>* __a,
1578 __atomic_val_t<_ITp> __i) noexcept
1579 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1581 #if __cplusplus > 201703L
1582 #define __cpp_lib_atomic_float 201711L
1584 struct atomic<float> : __atomic_float<float>
1586 atomic() noexcept = default;
1589 atomic(float __fp) noexcept : __atomic_float<float>(__fp)
1592 atomic& operator=(const atomic&) volatile = delete;
1593 atomic& operator=(const atomic&) = delete;
1595 using __atomic_float<float>::operator=;
1599 struct atomic<double> : __atomic_float<double>
1601 atomic() noexcept = default;
1604 atomic(double __fp) noexcept : __atomic_float<double>(__fp)
1607 atomic& operator=(const atomic&) volatile = delete;
1608 atomic& operator=(const atomic&) = delete;
1610 using __atomic_float<double>::operator=;
1614 struct atomic<long double> : __atomic_float<long double>
1616 atomic() noexcept = default;
1619 atomic(long double __fp) noexcept : __atomic_float<long double>(__fp)
1622 atomic& operator=(const atomic&) volatile = delete;
1623 atomic& operator=(const atomic&) = delete;
1625 using __atomic_float<long double>::operator=;
1628 #define __cpp_lib_atomic_ref 201806L
1630 /// Class template to provide atomic operations on a non-atomic variable.
1631 template<typename _Tp>
1632 struct atomic_ref : __atomic_ref<_Tp>
1635 atomic_ref(_Tp& __t) noexcept : __atomic_ref<_Tp>(__t)
1638 atomic_ref& operator=(const atomic_ref&) = delete;
1640 atomic_ref(const atomic_ref&) = default;
1642 using __atomic_ref<_Tp>::operator=;
1647 /// @} group atomics
1649 _GLIBCXX_END_NAMESPACE_VERSION
1654 #endif // _GLIBCXX_ATOMIC