3 // Copyright (C) 2008-2015 Free Software Foundation, Inc.
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
25 /** @file include/atomic
26 * This is a Standard C++ Library header.
29 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
30 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
32 #ifndef _GLIBCXX_ATOMIC
33 #define _GLIBCXX_ATOMIC 1
35 #pragma GCC system_header
37 #if __cplusplus < 201103L
38 # include <bits/c++0x_warning.h>
41 #include <bits/atomic_base.h>
43 namespace std _GLIBCXX_VISIBILITY(default)
45 _GLIBCXX_BEGIN_NAMESPACE_VERSION
52 template<typename _Tp>
56 // NB: No operators or fetch-operations for this type.
61 __atomic_base<bool> _M_base;
64 atomic() noexcept = default;
65 ~atomic() noexcept = default;
66 atomic(const atomic&) = delete;
67 atomic& operator=(const atomic&) = delete;
68 atomic& operator=(const atomic&) volatile = delete;
70 constexpr atomic(bool __i) noexcept : _M_base(__i) { }
73 operator=(bool __i) noexcept
74 { return _M_base.operator=(__i); }
77 operator=(bool __i) volatile noexcept
78 { return _M_base.operator=(__i); }
80 operator bool() const noexcept
81 { return _M_base.load(); }
83 operator bool() const volatile noexcept
84 { return _M_base.load(); }
87 is_lock_free() const noexcept { return _M_base.is_lock_free(); }
90 is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
93 store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
94 { _M_base.store(__i, __m); }
97 store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
98 { _M_base.store(__i, __m); }
101 load(memory_order __m = memory_order_seq_cst) const noexcept
102 { return _M_base.load(__m); }
105 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
106 { return _M_base.load(__m); }
109 exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
110 { return _M_base.exchange(__i, __m); }
114 memory_order __m = memory_order_seq_cst) volatile noexcept
115 { return _M_base.exchange(__i, __m); }
118 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
119 memory_order __m2) noexcept
120 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
123 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
124 memory_order __m2) volatile noexcept
125 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
128 compare_exchange_weak(bool& __i1, bool __i2,
129 memory_order __m = memory_order_seq_cst) noexcept
130 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
133 compare_exchange_weak(bool& __i1, bool __i2,
134 memory_order __m = memory_order_seq_cst) volatile noexcept
135 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
138 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
139 memory_order __m2) noexcept
140 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
143 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
144 memory_order __m2) volatile noexcept
145 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
148 compare_exchange_strong(bool& __i1, bool __i2,
149 memory_order __m = memory_order_seq_cst) noexcept
150 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
153 compare_exchange_strong(bool& __i1, bool __i2,
154 memory_order __m = memory_order_seq_cst) volatile noexcept
155 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
160 * @brief Generic atomic type, primary class template.
162 * @tparam _Tp Type to be made atomic, must be trivally copyable.
164 template<typename _Tp>
168 // Align 1/2/4/8/16-byte types to at least their size.
169 static constexpr int _S_min_alignment
170 = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
173 static constexpr int _S_alignment
174 = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
176 alignas(_S_alignment) _Tp _M_i;
178 static_assert(__is_trivially_copyable(_Tp),
179 "std::atomic requires a trivially copyable type");
181 static_assert(sizeof(_Tp) > 0,
182 "Incomplete or zero-sized types are not supported");
185 atomic() noexcept = default;
186 ~atomic() noexcept = default;
187 atomic(const atomic&) = delete;
188 atomic& operator=(const atomic&) = delete;
189 atomic& operator=(const atomic&) volatile = delete;
191 constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
193 operator _Tp() const noexcept
196 operator _Tp() const volatile noexcept
200 operator=(_Tp __i) noexcept
201 { store(__i); return __i; }
204 operator=(_Tp __i) volatile noexcept
205 { store(__i); return __i; }
208 is_lock_free() const noexcept
210 // Produce a fake, minimally aligned pointer.
211 return __atomic_is_lock_free(sizeof(_M_i),
212 reinterpret_cast<void *>(-__alignof(_M_i)));
216 is_lock_free() const volatile noexcept
218 // Produce a fake, minimally aligned pointer.
219 return __atomic_is_lock_free(sizeof(_M_i),
220 reinterpret_cast<void *>(-__alignof(_M_i)));
224 store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
225 { __atomic_store(&_M_i, &__i, __m); }
228 store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
229 { __atomic_store(&_M_i, &__i, __m); }
232 load(memory_order __m = memory_order_seq_cst) const noexcept
234 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
235 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
236 __atomic_load(&_M_i, __ptr, __m);
241 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
243 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
244 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
245 __atomic_load(&_M_i, __ptr, __m);
250 exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
252 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
253 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
254 __atomic_exchange(&_M_i, &__i, __ptr, __m);
260 memory_order __m = memory_order_seq_cst) volatile noexcept
262 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
263 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
264 __atomic_exchange(&_M_i, &__i, __ptr, __m);
269 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
270 memory_order __f) noexcept
272 return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f);
276 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
277 memory_order __f) volatile noexcept
279 return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f);
283 compare_exchange_weak(_Tp& __e, _Tp __i,
284 memory_order __m = memory_order_seq_cst) noexcept
285 { return compare_exchange_weak(__e, __i, __m,
286 __cmpexch_failure_order(__m)); }
289 compare_exchange_weak(_Tp& __e, _Tp __i,
290 memory_order __m = memory_order_seq_cst) volatile noexcept
291 { return compare_exchange_weak(__e, __i, __m,
292 __cmpexch_failure_order(__m)); }
295 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
296 memory_order __f) noexcept
298 return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f);
302 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
303 memory_order __f) volatile noexcept
305 return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f);
309 compare_exchange_strong(_Tp& __e, _Tp __i,
310 memory_order __m = memory_order_seq_cst) noexcept
311 { return compare_exchange_strong(__e, __i, __m,
312 __cmpexch_failure_order(__m)); }
315 compare_exchange_strong(_Tp& __e, _Tp __i,
316 memory_order __m = memory_order_seq_cst) volatile noexcept
317 { return compare_exchange_strong(__e, __i, __m,
318 __cmpexch_failure_order(__m)); }
322 /// Partial specialization for pointer types.
323 template<typename _Tp>
326 typedef _Tp* __pointer_type;
327 typedef __atomic_base<_Tp*> __base_type;
330 atomic() noexcept = default;
331 ~atomic() noexcept = default;
332 atomic(const atomic&) = delete;
333 atomic& operator=(const atomic&) = delete;
334 atomic& operator=(const atomic&) volatile = delete;
336 constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
338 operator __pointer_type() const noexcept
339 { return __pointer_type(_M_b); }
341 operator __pointer_type() const volatile noexcept
342 { return __pointer_type(_M_b); }
345 operator=(__pointer_type __p) noexcept
346 { return _M_b.operator=(__p); }
349 operator=(__pointer_type __p) volatile noexcept
350 { return _M_b.operator=(__p); }
353 operator++(int) noexcept
357 operator++(int) volatile noexcept
361 operator--(int) noexcept
365 operator--(int) volatile noexcept
369 operator++() noexcept
373 operator++() volatile noexcept
377 operator--() noexcept
381 operator--() volatile noexcept
385 operator+=(ptrdiff_t __d) noexcept
386 { return _M_b.operator+=(__d); }
389 operator+=(ptrdiff_t __d) volatile noexcept
390 { return _M_b.operator+=(__d); }
393 operator-=(ptrdiff_t __d) noexcept
394 { return _M_b.operator-=(__d); }
397 operator-=(ptrdiff_t __d) volatile noexcept
398 { return _M_b.operator-=(__d); }
401 is_lock_free() const noexcept
402 { return _M_b.is_lock_free(); }
405 is_lock_free() const volatile noexcept
406 { return _M_b.is_lock_free(); }
409 store(__pointer_type __p,
410 memory_order __m = memory_order_seq_cst) noexcept
411 { return _M_b.store(__p, __m); }
414 store(__pointer_type __p,
415 memory_order __m = memory_order_seq_cst) volatile noexcept
416 { return _M_b.store(__p, __m); }
419 load(memory_order __m = memory_order_seq_cst) const noexcept
420 { return _M_b.load(__m); }
423 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
424 { return _M_b.load(__m); }
427 exchange(__pointer_type __p,
428 memory_order __m = memory_order_seq_cst) noexcept
429 { return _M_b.exchange(__p, __m); }
432 exchange(__pointer_type __p,
433 memory_order __m = memory_order_seq_cst) volatile noexcept
434 { return _M_b.exchange(__p, __m); }
437 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
438 memory_order __m1, memory_order __m2) noexcept
439 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
442 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
444 memory_order __m2) volatile noexcept
445 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
448 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
449 memory_order __m = memory_order_seq_cst) noexcept
451 return compare_exchange_weak(__p1, __p2, __m,
452 __cmpexch_failure_order(__m));
456 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
457 memory_order __m = memory_order_seq_cst) volatile noexcept
459 return compare_exchange_weak(__p1, __p2, __m,
460 __cmpexch_failure_order(__m));
464 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
465 memory_order __m1, memory_order __m2) noexcept
466 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
469 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
471 memory_order __m2) volatile noexcept
472 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
475 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
476 memory_order __m = memory_order_seq_cst) noexcept
478 return _M_b.compare_exchange_strong(__p1, __p2, __m,
479 __cmpexch_failure_order(__m));
483 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
484 memory_order __m = memory_order_seq_cst) volatile noexcept
486 return _M_b.compare_exchange_strong(__p1, __p2, __m,
487 __cmpexch_failure_order(__m));
491 fetch_add(ptrdiff_t __d,
492 memory_order __m = memory_order_seq_cst) noexcept
493 { return _M_b.fetch_add(__d, __m); }
496 fetch_add(ptrdiff_t __d,
497 memory_order __m = memory_order_seq_cst) volatile noexcept
498 { return _M_b.fetch_add(__d, __m); }
501 fetch_sub(ptrdiff_t __d,
502 memory_order __m = memory_order_seq_cst) noexcept
503 { return _M_b.fetch_sub(__d, __m); }
506 fetch_sub(ptrdiff_t __d,
507 memory_order __m = memory_order_seq_cst) volatile noexcept
508 { return _M_b.fetch_sub(__d, __m); }
512 /// Explicit specialization for char.
514 struct atomic<char> : __atomic_base<char>
516 typedef char __integral_type;
517 typedef __atomic_base<char> __base_type;
519 atomic() noexcept = default;
520 ~atomic() noexcept = default;
521 atomic(const atomic&) = delete;
522 atomic& operator=(const atomic&) = delete;
523 atomic& operator=(const atomic&) volatile = delete;
525 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
527 using __base_type::operator __integral_type;
528 using __base_type::operator=;
531 /// Explicit specialization for signed char.
533 struct atomic<signed char> : __atomic_base<signed char>
535 typedef signed char __integral_type;
536 typedef __atomic_base<signed char> __base_type;
538 atomic() noexcept= default;
539 ~atomic() noexcept = default;
540 atomic(const atomic&) = delete;
541 atomic& operator=(const atomic&) = delete;
542 atomic& operator=(const atomic&) volatile = delete;
544 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
546 using __base_type::operator __integral_type;
547 using __base_type::operator=;
550 /// Explicit specialization for unsigned char.
552 struct atomic<unsigned char> : __atomic_base<unsigned char>
554 typedef unsigned char __integral_type;
555 typedef __atomic_base<unsigned char> __base_type;
557 atomic() noexcept= default;
558 ~atomic() noexcept = default;
559 atomic(const atomic&) = delete;
560 atomic& operator=(const atomic&) = delete;
561 atomic& operator=(const atomic&) volatile = delete;
563 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
565 using __base_type::operator __integral_type;
566 using __base_type::operator=;
569 /// Explicit specialization for short.
571 struct atomic<short> : __atomic_base<short>
573 typedef short __integral_type;
574 typedef __atomic_base<short> __base_type;
576 atomic() noexcept = default;
577 ~atomic() noexcept = default;
578 atomic(const atomic&) = delete;
579 atomic& operator=(const atomic&) = delete;
580 atomic& operator=(const atomic&) volatile = delete;
582 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
584 using __base_type::operator __integral_type;
585 using __base_type::operator=;
588 /// Explicit specialization for unsigned short.
590 struct atomic<unsigned short> : __atomic_base<unsigned short>
592 typedef unsigned short __integral_type;
593 typedef __atomic_base<unsigned short> __base_type;
595 atomic() noexcept = default;
596 ~atomic() noexcept = default;
597 atomic(const atomic&) = delete;
598 atomic& operator=(const atomic&) = delete;
599 atomic& operator=(const atomic&) volatile = delete;
601 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
603 using __base_type::operator __integral_type;
604 using __base_type::operator=;
607 /// Explicit specialization for int.
609 struct atomic<int> : __atomic_base<int>
611 typedef int __integral_type;
612 typedef __atomic_base<int> __base_type;
614 atomic() noexcept = default;
615 ~atomic() noexcept = default;
616 atomic(const atomic&) = delete;
617 atomic& operator=(const atomic&) = delete;
618 atomic& operator=(const atomic&) volatile = delete;
620 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
622 using __base_type::operator __integral_type;
623 using __base_type::operator=;
626 /// Explicit specialization for unsigned int.
628 struct atomic<unsigned int> : __atomic_base<unsigned int>
630 typedef unsigned int __integral_type;
631 typedef __atomic_base<unsigned int> __base_type;
633 atomic() noexcept = default;
634 ~atomic() noexcept = default;
635 atomic(const atomic&) = delete;
636 atomic& operator=(const atomic&) = delete;
637 atomic& operator=(const atomic&) volatile = delete;
639 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
641 using __base_type::operator __integral_type;
642 using __base_type::operator=;
645 /// Explicit specialization for long.
647 struct atomic<long> : __atomic_base<long>
649 typedef long __integral_type;
650 typedef __atomic_base<long> __base_type;
652 atomic() noexcept = default;
653 ~atomic() noexcept = default;
654 atomic(const atomic&) = delete;
655 atomic& operator=(const atomic&) = delete;
656 atomic& operator=(const atomic&) volatile = delete;
658 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
660 using __base_type::operator __integral_type;
661 using __base_type::operator=;
664 /// Explicit specialization for unsigned long.
666 struct atomic<unsigned long> : __atomic_base<unsigned long>
668 typedef unsigned long __integral_type;
669 typedef __atomic_base<unsigned long> __base_type;
671 atomic() noexcept = default;
672 ~atomic() noexcept = default;
673 atomic(const atomic&) = delete;
674 atomic& operator=(const atomic&) = delete;
675 atomic& operator=(const atomic&) volatile = delete;
677 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
679 using __base_type::operator __integral_type;
680 using __base_type::operator=;
683 /// Explicit specialization for long long.
685 struct atomic<long long> : __atomic_base<long long>
687 typedef long long __integral_type;
688 typedef __atomic_base<long long> __base_type;
690 atomic() noexcept = default;
691 ~atomic() noexcept = default;
692 atomic(const atomic&) = delete;
693 atomic& operator=(const atomic&) = delete;
694 atomic& operator=(const atomic&) volatile = delete;
696 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
698 using __base_type::operator __integral_type;
699 using __base_type::operator=;
702 /// Explicit specialization for unsigned long long.
704 struct atomic<unsigned long long> : __atomic_base<unsigned long long>
706 typedef unsigned long long __integral_type;
707 typedef __atomic_base<unsigned long long> __base_type;
709 atomic() noexcept = default;
710 ~atomic() noexcept = default;
711 atomic(const atomic&) = delete;
712 atomic& operator=(const atomic&) = delete;
713 atomic& operator=(const atomic&) volatile = delete;
715 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
717 using __base_type::operator __integral_type;
718 using __base_type::operator=;
721 /// Explicit specialization for wchar_t.
723 struct atomic<wchar_t> : __atomic_base<wchar_t>
725 typedef wchar_t __integral_type;
726 typedef __atomic_base<wchar_t> __base_type;
728 atomic() noexcept = default;
729 ~atomic() noexcept = default;
730 atomic(const atomic&) = delete;
731 atomic& operator=(const atomic&) = delete;
732 atomic& operator=(const atomic&) volatile = delete;
734 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
736 using __base_type::operator __integral_type;
737 using __base_type::operator=;
740 /// Explicit specialization for char16_t.
742 struct atomic<char16_t> : __atomic_base<char16_t>
744 typedef char16_t __integral_type;
745 typedef __atomic_base<char16_t> __base_type;
747 atomic() noexcept = default;
748 ~atomic() noexcept = default;
749 atomic(const atomic&) = delete;
750 atomic& operator=(const atomic&) = delete;
751 atomic& operator=(const atomic&) volatile = delete;
753 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
755 using __base_type::operator __integral_type;
756 using __base_type::operator=;
759 /// Explicit specialization for char32_t.
761 struct atomic<char32_t> : __atomic_base<char32_t>
763 typedef char32_t __integral_type;
764 typedef __atomic_base<char32_t> __base_type;
766 atomic() noexcept = default;
767 ~atomic() noexcept = default;
768 atomic(const atomic&) = delete;
769 atomic& operator=(const atomic&) = delete;
770 atomic& operator=(const atomic&) volatile = delete;
772 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
774 using __base_type::operator __integral_type;
775 using __base_type::operator=;
780 typedef atomic<bool> atomic_bool;
783 typedef atomic<char> atomic_char;
786 typedef atomic<signed char> atomic_schar;
789 typedef atomic<unsigned char> atomic_uchar;
792 typedef atomic<short> atomic_short;
795 typedef atomic<unsigned short> atomic_ushort;
798 typedef atomic<int> atomic_int;
801 typedef atomic<unsigned int> atomic_uint;
804 typedef atomic<long> atomic_long;
807 typedef atomic<unsigned long> atomic_ulong;
810 typedef atomic<long long> atomic_llong;
813 typedef atomic<unsigned long long> atomic_ullong;
816 typedef atomic<wchar_t> atomic_wchar_t;
819 typedef atomic<char16_t> atomic_char16_t;
822 typedef atomic<char32_t> atomic_char32_t;
825 /// atomic_int_least8_t
826 typedef atomic<int_least8_t> atomic_int_least8_t;
828 /// atomic_uint_least8_t
829 typedef atomic<uint_least8_t> atomic_uint_least8_t;
831 /// atomic_int_least16_t
832 typedef atomic<int_least16_t> atomic_int_least16_t;
834 /// atomic_uint_least16_t
835 typedef atomic<uint_least16_t> atomic_uint_least16_t;
837 /// atomic_int_least32_t
838 typedef atomic<int_least32_t> atomic_int_least32_t;
840 /// atomic_uint_least32_t
841 typedef atomic<uint_least32_t> atomic_uint_least32_t;
843 /// atomic_int_least64_t
844 typedef atomic<int_least64_t> atomic_int_least64_t;
846 /// atomic_uint_least64_t
847 typedef atomic<uint_least64_t> atomic_uint_least64_t;
850 /// atomic_int_fast8_t
851 typedef atomic<int_fast8_t> atomic_int_fast8_t;
853 /// atomic_uint_fast8_t
854 typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
856 /// atomic_int_fast16_t
857 typedef atomic<int_fast16_t> atomic_int_fast16_t;
859 /// atomic_uint_fast16_t
860 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
862 /// atomic_int_fast32_t
863 typedef atomic<int_fast32_t> atomic_int_fast32_t;
865 /// atomic_uint_fast32_t
866 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
868 /// atomic_int_fast64_t
869 typedef atomic<int_fast64_t> atomic_int_fast64_t;
871 /// atomic_uint_fast64_t
872 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
876 typedef atomic<intptr_t> atomic_intptr_t;
879 typedef atomic<uintptr_t> atomic_uintptr_t;
882 typedef atomic<size_t> atomic_size_t;
885 typedef atomic<intmax_t> atomic_intmax_t;
888 typedef atomic<uintmax_t> atomic_uintmax_t;
891 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
894 // Function definitions, atomic_flag operations.
896 atomic_flag_test_and_set_explicit(atomic_flag* __a,
897 memory_order __m) noexcept
898 { return __a->test_and_set(__m); }
901 atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
902 memory_order __m) noexcept
903 { return __a->test_and_set(__m); }
906 atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
910 atomic_flag_clear_explicit(volatile atomic_flag* __a,
911 memory_order __m) noexcept
915 atomic_flag_test_and_set(atomic_flag* __a) noexcept
916 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
919 atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
920 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
923 atomic_flag_clear(atomic_flag* __a) noexcept
924 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
927 atomic_flag_clear(volatile atomic_flag* __a) noexcept
928 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
931 // Function templates generally applicable to atomic types.
932 template<typename _ITp>
934 atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
935 { return __a->is_lock_free(); }
937 template<typename _ITp>
939 atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
940 { return __a->is_lock_free(); }
942 template<typename _ITp>
944 atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept
945 { __a->store(__i, memory_order_relaxed); }
947 template<typename _ITp>
949 atomic_init(volatile atomic<_ITp>* __a, _ITp __i) noexcept
950 { __a->store(__i, memory_order_relaxed); }
952 template<typename _ITp>
954 atomic_store_explicit(atomic<_ITp>* __a, _ITp __i,
955 memory_order __m) noexcept
956 { __a->store(__i, __m); }
958 template<typename _ITp>
960 atomic_store_explicit(volatile atomic<_ITp>* __a, _ITp __i,
961 memory_order __m) noexcept
962 { __a->store(__i, __m); }
964 template<typename _ITp>
966 atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
967 { return __a->load(__m); }
969 template<typename _ITp>
971 atomic_load_explicit(const volatile atomic<_ITp>* __a,
972 memory_order __m) noexcept
973 { return __a->load(__m); }
975 template<typename _ITp>
977 atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i,
978 memory_order __m) noexcept
979 { return __a->exchange(__i, __m); }
981 template<typename _ITp>
983 atomic_exchange_explicit(volatile atomic<_ITp>* __a, _ITp __i,
984 memory_order __m) noexcept
985 { return __a->exchange(__i, __m); }
987 template<typename _ITp>
989 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
990 _ITp* __i1, _ITp __i2,
992 memory_order __m2) noexcept
993 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
995 template<typename _ITp>
997 atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
998 _ITp* __i1, _ITp __i2,
1000 memory_order __m2) noexcept
1001 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1003 template<typename _ITp>
1005 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1006 _ITp* __i1, _ITp __i2,
1008 memory_order __m2) noexcept
1009 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1011 template<typename _ITp>
1013 atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
1014 _ITp* __i1, _ITp __i2,
1016 memory_order __m2) noexcept
1017 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1020 template<typename _ITp>
1022 atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept
1023 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1025 template<typename _ITp>
1027 atomic_store(volatile atomic<_ITp>* __a, _ITp __i) noexcept
1028 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1030 template<typename _ITp>
1032 atomic_load(const atomic<_ITp>* __a) noexcept
1033 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1035 template<typename _ITp>
1037 atomic_load(const volatile atomic<_ITp>* __a) noexcept
1038 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1040 template<typename _ITp>
1042 atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept
1043 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1045 template<typename _ITp>
1047 atomic_exchange(volatile atomic<_ITp>* __a, _ITp __i) noexcept
1048 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1050 template<typename _ITp>
1052 atomic_compare_exchange_weak(atomic<_ITp>* __a,
1053 _ITp* __i1, _ITp __i2) noexcept
1055 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1056 memory_order_seq_cst,
1057 memory_order_seq_cst);
1060 template<typename _ITp>
1062 atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
1063 _ITp* __i1, _ITp __i2) noexcept
1065 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1066 memory_order_seq_cst,
1067 memory_order_seq_cst);
1070 template<typename _ITp>
1072 atomic_compare_exchange_strong(atomic<_ITp>* __a,
1073 _ITp* __i1, _ITp __i2) noexcept
1075 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1076 memory_order_seq_cst,
1077 memory_order_seq_cst);
1080 template<typename _ITp>
1082 atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
1083 _ITp* __i1, _ITp __i2) noexcept
1085 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1086 memory_order_seq_cst,
1087 memory_order_seq_cst);
1090 // Function templates for atomic_integral operations only, using
1091 // __atomic_base. Template argument should be constricted to
1092 // intergral types as specified in the standard, excluding address
1094 template<typename _ITp>
1096 atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1097 memory_order __m) noexcept
1098 { return __a->fetch_add(__i, __m); }
1100 template<typename _ITp>
1102 atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1103 memory_order __m) noexcept
1104 { return __a->fetch_add(__i, __m); }
1106 template<typename _ITp>
1108 atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1109 memory_order __m) noexcept
1110 { return __a->fetch_sub(__i, __m); }
1112 template<typename _ITp>
1114 atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1115 memory_order __m) noexcept
1116 { return __a->fetch_sub(__i, __m); }
1118 template<typename _ITp>
1120 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1121 memory_order __m) noexcept
1122 { return __a->fetch_and(__i, __m); }
1124 template<typename _ITp>
1126 atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1127 memory_order __m) noexcept
1128 { return __a->fetch_and(__i, __m); }
1130 template<typename _ITp>
1132 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1133 memory_order __m) noexcept
1134 { return __a->fetch_or(__i, __m); }
1136 template<typename _ITp>
1138 atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1139 memory_order __m) noexcept
1140 { return __a->fetch_or(__i, __m); }
1142 template<typename _ITp>
1144 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1145 memory_order __m) noexcept
1146 { return __a->fetch_xor(__i, __m); }
1148 template<typename _ITp>
1150 atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
1151 memory_order __m) noexcept
1152 { return __a->fetch_xor(__i, __m); }
1154 template<typename _ITp>
1156 atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1157 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1159 template<typename _ITp>
1161 atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1162 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1164 template<typename _ITp>
1166 atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1167 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1169 template<typename _ITp>
1171 atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1172 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1174 template<typename _ITp>
1176 atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1177 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1179 template<typename _ITp>
1181 atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1182 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1184 template<typename _ITp>
1186 atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1187 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1189 template<typename _ITp>
1191 atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1192 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1194 template<typename _ITp>
1196 atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1197 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1199 template<typename _ITp>
1201 atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1202 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1205 // Partial specializations for pointers.
1206 template<typename _ITp>
1208 atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1209 memory_order __m) noexcept
1210 { return __a->fetch_add(__d, __m); }
1212 template<typename _ITp>
1214 atomic_fetch_add_explicit(volatile atomic<_ITp*>* __a, ptrdiff_t __d,
1215 memory_order __m) noexcept
1216 { return __a->fetch_add(__d, __m); }
1218 template<typename _ITp>
1220 atomic_fetch_add(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1221 { return __a->fetch_add(__d); }
1223 template<typename _ITp>
1225 atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1226 { return __a->fetch_add(__d); }
1228 template<typename _ITp>
1230 atomic_fetch_sub_explicit(volatile atomic<_ITp*>* __a,
1231 ptrdiff_t __d, memory_order __m) noexcept
1232 { return __a->fetch_sub(__d, __m); }
1234 template<typename _ITp>
1236 atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1237 memory_order __m) noexcept
1238 { return __a->fetch_sub(__d, __m); }
1240 template<typename _ITp>
1242 atomic_fetch_sub(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1243 { return __a->fetch_sub(__d); }
1245 template<typename _ITp>
1247 atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1248 { return __a->fetch_sub(__d); }
1251 _GLIBCXX_END_NAMESPACE_VERSION
1256 #endif // _GLIBCXX_ATOMIC