libstdc++
atomic
Go to the documentation of this file.
1 // -*- C++ -*- header.
2 
3 // Copyright (C) 2008-2025 Free Software Foundation, Inc.
4 //
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
9 // any later version.
10 
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
15 
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
19 
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
24 
25 /** @file include/atomic
26  * This is a Standard C++ Library header.
27  */
28 
29 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
30 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
31 
32 #ifndef _GLIBCXX_ATOMIC
33 #define _GLIBCXX_ATOMIC 1
34 
35 #ifdef _GLIBCXX_SYSHDR
36 #pragma GCC system_header
37 #endif
38 
39 #if __cplusplus < 201103L
40 # include <bits/c++0x_warning.h>
41 #else
42 
43 #define __glibcxx_want_atomic_is_always_lock_free
44 #define __glibcxx_want_atomic_flag_test
45 #define __glibcxx_want_atomic_float
46 #define __glibcxx_want_atomic_ref
47 #define __glibcxx_want_atomic_lock_free_type_aliases
48 #define __glibcxx_want_atomic_value_initialization
49 #define __glibcxx_want_atomic_wait
50 #include <bits/version.h>
51 
52 #include <bits/atomic_base.h>
53 #include <cstdint>
54 #include <type_traits>
55 
56 namespace std _GLIBCXX_VISIBILITY(default)
57 {
58 _GLIBCXX_BEGIN_NAMESPACE_VERSION
59 
60  /**
61  * @addtogroup atomics
62  * @{
63  */
64 
65  template<typename _Tp>
66  struct atomic;
67 
68  /// atomic<bool>
69  // NB: No operators or fetch-operations for this type.
70  template<>
71  struct atomic<bool>
72  {
73  using value_type = bool;
74 
75  private:
76  __atomic_base<bool> _M_base;
77 
78  public:
79  atomic() noexcept = default;
80  ~atomic() noexcept = default;
81  atomic(const atomic&) = delete;
82  atomic& operator=(const atomic&) = delete;
83  atomic& operator=(const atomic&) volatile = delete;
84 
85  constexpr atomic(bool __i) noexcept : _M_base(__i) { }
86 
87  bool
88  operator=(bool __i) noexcept
89  { return _M_base.operator=(__i); }
90 
91  bool
92  operator=(bool __i) volatile noexcept
93  { return _M_base.operator=(__i); }
94 
95  operator bool() const noexcept
96  { return _M_base.load(); }
97 
98  operator bool() const volatile noexcept
99  { return _M_base.load(); }
100 
101  bool
102  is_lock_free() const noexcept { return _M_base.is_lock_free(); }
103 
104  bool
105  is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
106 
107 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
108  static constexpr bool is_always_lock_free = ATOMIC_BOOL_LOCK_FREE == 2;
109 #endif
110 
111  void
112  store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
113  { _M_base.store(__i, __m); }
114 
115  void
116  store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
117  { _M_base.store(__i, __m); }
118 
119  bool
120  load(memory_order __m = memory_order_seq_cst) const noexcept
121  { return _M_base.load(__m); }
122 
123  bool
124  load(memory_order __m = memory_order_seq_cst) const volatile noexcept
125  { return _M_base.load(__m); }
126 
127  bool
128  exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
129  { return _M_base.exchange(__i, __m); }
130 
131  bool
132  exchange(bool __i,
133  memory_order __m = memory_order_seq_cst) volatile noexcept
134  { return _M_base.exchange(__i, __m); }
135 
136  bool
137  compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
138  memory_order __m2) noexcept
139  { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
140 
141  bool
142  compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
143  memory_order __m2) volatile noexcept
144  { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
145 
146  bool
147  compare_exchange_weak(bool& __i1, bool __i2,
148  memory_order __m = memory_order_seq_cst) noexcept
149  { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
150 
151  bool
152  compare_exchange_weak(bool& __i1, bool __i2,
153  memory_order __m = memory_order_seq_cst) volatile noexcept
154  { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
155 
156  bool
157  compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
158  memory_order __m2) noexcept
159  { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
160 
161  bool
162  compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
163  memory_order __m2) volatile noexcept
164  { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
165 
166  bool
167  compare_exchange_strong(bool& __i1, bool __i2,
168  memory_order __m = memory_order_seq_cst) noexcept
169  { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
170 
171  bool
172  compare_exchange_strong(bool& __i1, bool __i2,
173  memory_order __m = memory_order_seq_cst) volatile noexcept
174  { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
175 
176 #if __cpp_lib_atomic_wait
177  void
178  wait(bool __old, memory_order __m = memory_order_seq_cst) const noexcept
179  { _M_base.wait(__old, __m); }
180 
181  // TODO add const volatile overload
182 
183  void
184  notify_one() noexcept
185  { _M_base.notify_one(); }
186 
187  void
188  notify_all() noexcept
189  { _M_base.notify_all(); }
190 #endif // __cpp_lib_atomic_wait
191  };
192 
193  /**
194  * @brief Generic atomic type, primary class template.
195  *
196  * @tparam _Tp Type to be made atomic, must be trivially copyable.
197  */
198  template<typename _Tp>
199  struct atomic
200  {
201  using value_type = _Tp;
202 
203  private:
204  // Align 1/2/4/8/16-byte types to at least their size.
205  static constexpr int _S_min_alignment
206  = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
207  ? 0 : sizeof(_Tp);
208 
209  static constexpr int _S_alignment
210  = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
211 
212  alignas(_S_alignment) _Tp _M_i;
213 
214  static_assert(__is_trivially_copyable(_Tp),
215  "std::atomic requires a trivially copyable type");
216 
217  static_assert(sizeof(_Tp) > 0,
218  "Incomplete or zero-sized types are not supported");
219 
220 #if __cplusplus > 201703L
221  static_assert(is_copy_constructible_v<_Tp>);
222  static_assert(is_move_constructible_v<_Tp>);
223  static_assert(is_copy_assignable_v<_Tp>);
224  static_assert(is_move_assignable_v<_Tp>);
225 #endif
226 
227  public:
228 #if __cpp_lib_atomic_value_initialization
229  // _GLIBCXX_RESOLVE_LIB_DEFECTS
230  // 4169. std::atomic<T>'s default constructor should be constrained
231  constexpr atomic() noexcept(is_nothrow_default_constructible_v<_Tp>)
232  requires is_default_constructible_v<_Tp>
233  : _M_i()
234  {}
235 #else
236  atomic() = default;
237 #endif
238 
239  ~atomic() noexcept = default;
240  atomic(const atomic&) = delete;
241  atomic& operator=(const atomic&) = delete;
242  atomic& operator=(const atomic&) volatile = delete;
243 
244  constexpr atomic(_Tp __i) noexcept : _M_i(__i)
245  {
246 #if __cplusplus >= 201402L && __has_builtin(__builtin_clear_padding)
247  if _GLIBCXX17_CONSTEXPR (__atomic_impl::__maybe_has_padding<_Tp>())
248  __builtin_clear_padding(std::__addressof(_M_i));
249 #endif
250  }
251 
252  operator _Tp() const noexcept
253  { return load(); }
254 
255  operator _Tp() const volatile noexcept
256  { return load(); }
257 
258  _Tp
259  operator=(_Tp __i) noexcept
260  { store(__i); return __i; }
261 
262  _Tp
263  operator=(_Tp __i) volatile noexcept
264  { store(__i); return __i; }
265 
266  bool
267  is_lock_free() const noexcept
268  {
269  // Produce a fake, minimally aligned pointer.
270  return __atomic_is_lock_free(sizeof(_M_i),
271  reinterpret_cast<void *>(-_S_alignment));
272  }
273 
274  bool
275  is_lock_free() const volatile noexcept
276  {
277  // Produce a fake, minimally aligned pointer.
278  return __atomic_is_lock_free(sizeof(_M_i),
279  reinterpret_cast<void *>(-_S_alignment));
280  }
281 
282 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
283  static constexpr bool is_always_lock_free
284  = __atomic_always_lock_free(sizeof(_M_i), 0);
285 #endif
286 
287  void
288  store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
289  {
290  __atomic_store(std::__addressof(_M_i),
291  __atomic_impl::__clear_padding(__i),
292  int(__m));
293  }
294 
295  void
296  store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
297  {
298  __atomic_store(std::__addressof(_M_i),
299  __atomic_impl::__clear_padding(__i),
300  int(__m));
301  }
302 
303  _Tp
304  load(memory_order __m = memory_order_seq_cst) const noexcept
305  {
306  alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
307  _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
308  __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
309  return *__ptr;
310  }
311 
312  _Tp
313  load(memory_order __m = memory_order_seq_cst) const volatile noexcept
314  {
315  alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
316  _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
317  __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
318  return *__ptr;
319  }
320 
321  _Tp
322  exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
323  {
324  alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
325  _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
326  __atomic_exchange(std::__addressof(_M_i),
327  __atomic_impl::__clear_padding(__i),
328  __ptr, int(__m));
329  return *__ptr;
330  }
331 
332  _Tp
333  exchange(_Tp __i,
334  memory_order __m = memory_order_seq_cst) volatile noexcept
335  {
336  alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
337  _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
338  __atomic_exchange(std::__addressof(_M_i),
339  __atomic_impl::__clear_padding(__i),
340  __ptr, int(__m));
341  return *__ptr;
342  }
343 
344  bool
345  compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
346  memory_order __f) noexcept
347  {
348  return __atomic_impl::__compare_exchange(_M_i, __e, __i, true,
349  __s, __f);
350  }
351 
352  bool
353  compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
354  memory_order __f) volatile noexcept
355  {
356  return __atomic_impl::__compare_exchange(_M_i, __e, __i, true,
357  __s, __f);
358  }
359 
360  bool
361  compare_exchange_weak(_Tp& __e, _Tp __i,
362  memory_order __m = memory_order_seq_cst) noexcept
363  { return compare_exchange_weak(__e, __i, __m,
364  __cmpexch_failure_order(__m)); }
365 
366  bool
367  compare_exchange_weak(_Tp& __e, _Tp __i,
368  memory_order __m = memory_order_seq_cst) volatile noexcept
369  { return compare_exchange_weak(__e, __i, __m,
370  __cmpexch_failure_order(__m)); }
371 
372  bool
373  compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
374  memory_order __f) noexcept
375  {
376  return __atomic_impl::__compare_exchange(_M_i, __e, __i, false,
377  __s, __f);
378  }
379 
380  bool
381  compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
382  memory_order __f) volatile noexcept
383  {
384  return __atomic_impl::__compare_exchange(_M_i, __e, __i, false,
385  __s, __f);
386  }
387 
388  bool
389  compare_exchange_strong(_Tp& __e, _Tp __i,
390  memory_order __m = memory_order_seq_cst) noexcept
391  { return compare_exchange_strong(__e, __i, __m,
392  __cmpexch_failure_order(__m)); }
393 
394  bool
395  compare_exchange_strong(_Tp& __e, _Tp __i,
396  memory_order __m = memory_order_seq_cst) volatile noexcept
397  { return compare_exchange_strong(__e, __i, __m,
398  __cmpexch_failure_order(__m)); }
399 
400 #if __cpp_lib_atomic_wait // C++ >= 20
401  void
402  wait(_Tp __old, memory_order __m = memory_order_seq_cst) const noexcept
403  {
404  std::__atomic_wait_address_v(&_M_i, __old,
405  [__m, this] { return this->load(__m); });
406  }
407 
408  // TODO add const volatile overload
409 
410  void
411  notify_one() noexcept
412  { std::__atomic_notify_address(&_M_i, false); }
413 
414  void
415  notify_all() noexcept
416  { std::__atomic_notify_address(&_M_i, true); }
417 #endif // __cpp_lib_atomic_wait
418 
419  };
420 
421  /// Partial specialization for pointer types.
422  template<typename _Tp>
423  struct atomic<_Tp*>
424  {
425  using value_type = _Tp*;
426  using difference_type = ptrdiff_t;
427 
428  typedef _Tp* __pointer_type;
429  typedef __atomic_base<_Tp*> __base_type;
430  __base_type _M_b;
431 
432  atomic() noexcept = default;
433  ~atomic() noexcept = default;
434  atomic(const atomic&) = delete;
435  atomic& operator=(const atomic&) = delete;
436  atomic& operator=(const atomic&) volatile = delete;
437 
438  constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
439 
440  operator __pointer_type() const noexcept
441  { return __pointer_type(_M_b); }
442 
443  operator __pointer_type() const volatile noexcept
444  { return __pointer_type(_M_b); }
445 
446  __pointer_type
447  operator=(__pointer_type __p) noexcept
448  { return _M_b.operator=(__p); }
449 
450  __pointer_type
451  operator=(__pointer_type __p) volatile noexcept
452  { return _M_b.operator=(__p); }
453 
454  __pointer_type
455  operator++(int) noexcept
456  {
457 #if __cplusplus >= 201703L
458  static_assert( is_object_v<_Tp>, "pointer to object type" );
459 #endif
460  return _M_b++;
461  }
462 
463  __pointer_type
464  operator++(int) volatile noexcept
465  {
466 #if __cplusplus >= 201703L
467  static_assert( is_object_v<_Tp>, "pointer to object type" );
468 #endif
469  return _M_b++;
470  }
471 
472  __pointer_type
473  operator--(int) noexcept
474  {
475 #if __cplusplus >= 201703L
476  static_assert( is_object_v<_Tp>, "pointer to object type" );
477 #endif
478  return _M_b--;
479  }
480 
481  __pointer_type
482  operator--(int) volatile noexcept
483  {
484 #if __cplusplus >= 201703L
485  static_assert( is_object_v<_Tp>, "pointer to object type" );
486 #endif
487  return _M_b--;
488  }
489 
490  __pointer_type
491  operator++() noexcept
492  {
493 #if __cplusplus >= 201703L
494  static_assert( is_object_v<_Tp>, "pointer to object type" );
495 #endif
496  return ++_M_b;
497  }
498 
499  __pointer_type
500  operator++() volatile noexcept
501  {
502 #if __cplusplus >= 201703L
503  static_assert( is_object_v<_Tp>, "pointer to object type" );
504 #endif
505  return ++_M_b;
506  }
507 
508  __pointer_type
509  operator--() noexcept
510  {
511 #if __cplusplus >= 201703L
512  static_assert( is_object_v<_Tp>, "pointer to object type" );
513 #endif
514  return --_M_b;
515  }
516 
517  __pointer_type
518  operator--() volatile noexcept
519  {
520 #if __cplusplus >= 201703L
521  static_assert( is_object_v<_Tp>, "pointer to object type" );
522 #endif
523  return --_M_b;
524  }
525 
526  __pointer_type
527  operator+=(ptrdiff_t __d) noexcept
528  {
529 #if __cplusplus >= 201703L
530  static_assert( is_object_v<_Tp>, "pointer to object type" );
531 #endif
532  return _M_b.operator+=(__d);
533  }
534 
535  __pointer_type
536  operator+=(ptrdiff_t __d) volatile noexcept
537  {
538 #if __cplusplus >= 201703L
539  static_assert( is_object_v<_Tp>, "pointer to object type" );
540 #endif
541  return _M_b.operator+=(__d);
542  }
543 
544  __pointer_type
545  operator-=(ptrdiff_t __d) noexcept
546  {
547 #if __cplusplus >= 201703L
548  static_assert( is_object_v<_Tp>, "pointer to object type" );
549 #endif
550  return _M_b.operator-=(__d);
551  }
552 
553  __pointer_type
554  operator-=(ptrdiff_t __d) volatile noexcept
555  {
556 #if __cplusplus >= 201703L
557  static_assert( is_object_v<_Tp>, "pointer to object type" );
558 #endif
559  return _M_b.operator-=(__d);
560  }
561 
562  bool
563  is_lock_free() const noexcept
564  { return _M_b.is_lock_free(); }
565 
566  bool
567  is_lock_free() const volatile noexcept
568  { return _M_b.is_lock_free(); }
569 
570 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
571  static constexpr bool is_always_lock_free
572  = ATOMIC_POINTER_LOCK_FREE == 2;
573 #endif
574 
575  void
576  store(__pointer_type __p,
577  memory_order __m = memory_order_seq_cst) noexcept
578  { return _M_b.store(__p, __m); }
579 
580  void
581  store(__pointer_type __p,
582  memory_order __m = memory_order_seq_cst) volatile noexcept
583  { return _M_b.store(__p, __m); }
584 
585  __pointer_type
586  load(memory_order __m = memory_order_seq_cst) const noexcept
587  { return _M_b.load(__m); }
588 
589  __pointer_type
590  load(memory_order __m = memory_order_seq_cst) const volatile noexcept
591  { return _M_b.load(__m); }
592 
593  __pointer_type
594  exchange(__pointer_type __p,
595  memory_order __m = memory_order_seq_cst) noexcept
596  { return _M_b.exchange(__p, __m); }
597 
598  __pointer_type
599  exchange(__pointer_type __p,
600  memory_order __m = memory_order_seq_cst) volatile noexcept
601  { return _M_b.exchange(__p, __m); }
602 
603  bool
604  compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
605  memory_order __m1, memory_order __m2) noexcept
606  { return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
607 
608  bool
609  compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
610  memory_order __m1,
611  memory_order __m2) volatile noexcept
612  { return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
613 
614  bool
615  compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
616  memory_order __m = memory_order_seq_cst) noexcept
617  {
618  return compare_exchange_weak(__p1, __p2, __m,
619  __cmpexch_failure_order(__m));
620  }
621 
622  bool
623  compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
624  memory_order __m = memory_order_seq_cst) volatile noexcept
625  {
626  return compare_exchange_weak(__p1, __p2, __m,
627  __cmpexch_failure_order(__m));
628  }
629 
630  bool
631  compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
632  memory_order __m1, memory_order __m2) noexcept
633  { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
634 
635  bool
636  compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
637  memory_order __m1,
638  memory_order __m2) volatile noexcept
639  { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
640 
641  bool
642  compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
643  memory_order __m = memory_order_seq_cst) noexcept
644  {
645  return _M_b.compare_exchange_strong(__p1, __p2, __m,
646  __cmpexch_failure_order(__m));
647  }
648 
649  bool
650  compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
651  memory_order __m = memory_order_seq_cst) volatile noexcept
652  {
653  return _M_b.compare_exchange_strong(__p1, __p2, __m,
654  __cmpexch_failure_order(__m));
655  }
656 
657 #if __cpp_lib_atomic_wait
658  void
659  wait(__pointer_type __old, memory_order __m = memory_order_seq_cst) const noexcept
660  { _M_b.wait(__old, __m); }
661 
662  // TODO add const volatile overload
663 
664  void
665  notify_one() noexcept
666  { _M_b.notify_one(); }
667 
668  void
669  notify_all() noexcept
670  { _M_b.notify_all(); }
671 #endif // __cpp_lib_atomic_wait
672 
673  __pointer_type
674  fetch_add(ptrdiff_t __d,
675  memory_order __m = memory_order_seq_cst) noexcept
676  {
677 #if __cplusplus >= 201703L
678  static_assert( is_object_v<_Tp>, "pointer to object type" );
679 #endif
680  return _M_b.fetch_add(__d, __m);
681  }
682 
683  __pointer_type
684  fetch_add(ptrdiff_t __d,
685  memory_order __m = memory_order_seq_cst) volatile noexcept
686  {
687 #if __cplusplus >= 201703L
688  static_assert( is_object_v<_Tp>, "pointer to object type" );
689 #endif
690  return _M_b.fetch_add(__d, __m);
691  }
692 
693  __pointer_type
694  fetch_sub(ptrdiff_t __d,
695  memory_order __m = memory_order_seq_cst) noexcept
696  {
697 #if __cplusplus >= 201703L
698  static_assert( is_object_v<_Tp>, "pointer to object type" );
699 #endif
700  return _M_b.fetch_sub(__d, __m);
701  }
702 
703  __pointer_type
704  fetch_sub(ptrdiff_t __d,
705  memory_order __m = memory_order_seq_cst) volatile noexcept
706  {
707 #if __cplusplus >= 201703L
708  static_assert( is_object_v<_Tp>, "pointer to object type" );
709 #endif
710  return _M_b.fetch_sub(__d, __m);
711  }
712  };
713 
714 
715  /// Explicit specialization for char.
716  template<>
717  struct atomic<char> : __atomic_base<char>
718  {
719  typedef char __integral_type;
720  typedef __atomic_base<char> __base_type;
721 
722  atomic() noexcept = default;
723  ~atomic() noexcept = default;
724  atomic(const atomic&) = delete;
725  atomic& operator=(const atomic&) = delete;
726  atomic& operator=(const atomic&) volatile = delete;
727 
728  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
729 
730  using __base_type::operator __integral_type;
731  using __base_type::operator=;
732 
733 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
734  static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
735 #endif
736  };
737 
738  /// Explicit specialization for signed char.
739  template<>
740  struct atomic<signed char> : __atomic_base<signed char>
741  {
742  typedef signed char __integral_type;
743  typedef __atomic_base<signed char> __base_type;
744 
745  atomic() noexcept= default;
746  ~atomic() noexcept = default;
747  atomic(const atomic&) = delete;
748  atomic& operator=(const atomic&) = delete;
749  atomic& operator=(const atomic&) volatile = delete;
750 
751  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
752 
753  using __base_type::operator __integral_type;
754  using __base_type::operator=;
755 
756 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
757  static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
758 #endif
759  };
760 
761  /// Explicit specialization for unsigned char.
762  template<>
763  struct atomic<unsigned char> : __atomic_base<unsigned char>
764  {
765  typedef unsigned char __integral_type;
766  typedef __atomic_base<unsigned char> __base_type;
767 
768  atomic() noexcept= default;
769  ~atomic() noexcept = default;
770  atomic(const atomic&) = delete;
771  atomic& operator=(const atomic&) = delete;
772  atomic& operator=(const atomic&) volatile = delete;
773 
774  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
775 
776  using __base_type::operator __integral_type;
777  using __base_type::operator=;
778 
779 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
780  static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
781 #endif
782  };
783 
784  /// Explicit specialization for short.
785  template<>
786  struct atomic<short> : __atomic_base<short>
787  {
788  typedef short __integral_type;
789  typedef __atomic_base<short> __base_type;
790 
791  atomic() noexcept = default;
792  ~atomic() noexcept = default;
793  atomic(const atomic&) = delete;
794  atomic& operator=(const atomic&) = delete;
795  atomic& operator=(const atomic&) volatile = delete;
796 
797  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
798 
799  using __base_type::operator __integral_type;
800  using __base_type::operator=;
801 
802 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
803  static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
804 #endif
805  };
806 
807  /// Explicit specialization for unsigned short.
808  template<>
809  struct atomic<unsigned short> : __atomic_base<unsigned short>
810  {
811  typedef unsigned short __integral_type;
812  typedef __atomic_base<unsigned short> __base_type;
813 
814  atomic() noexcept = default;
815  ~atomic() noexcept = default;
816  atomic(const atomic&) = delete;
817  atomic& operator=(const atomic&) = delete;
818  atomic& operator=(const atomic&) volatile = delete;
819 
820  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
821 
822  using __base_type::operator __integral_type;
823  using __base_type::operator=;
824 
825 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
826  static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
827 #endif
828  };
829 
830  /// Explicit specialization for int.
831  template<>
832  struct atomic<int> : __atomic_base<int>
833  {
834  typedef int __integral_type;
835  typedef __atomic_base<int> __base_type;
836 
837  atomic() noexcept = default;
838  ~atomic() noexcept = default;
839  atomic(const atomic&) = delete;
840  atomic& operator=(const atomic&) = delete;
841  atomic& operator=(const atomic&) volatile = delete;
842 
843  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
844 
845  using __base_type::operator __integral_type;
846  using __base_type::operator=;
847 
848 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
849  static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
850 #endif
851  };
852 
853  /// Explicit specialization for unsigned int.
854  template<>
855  struct atomic<unsigned int> : __atomic_base<unsigned int>
856  {
857  typedef unsigned int __integral_type;
858  typedef __atomic_base<unsigned int> __base_type;
859 
860  atomic() noexcept = default;
861  ~atomic() noexcept = default;
862  atomic(const atomic&) = delete;
863  atomic& operator=(const atomic&) = delete;
864  atomic& operator=(const atomic&) volatile = delete;
865 
866  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
867 
868  using __base_type::operator __integral_type;
869  using __base_type::operator=;
870 
871 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
872  static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
873 #endif
874  };
875 
876  /// Explicit specialization for long.
877  template<>
878  struct atomic<long> : __atomic_base<long>
879  {
880  typedef long __integral_type;
881  typedef __atomic_base<long> __base_type;
882 
883  atomic() noexcept = default;
884  ~atomic() noexcept = default;
885  atomic(const atomic&) = delete;
886  atomic& operator=(const atomic&) = delete;
887  atomic& operator=(const atomic&) volatile = delete;
888 
889  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
890 
891  using __base_type::operator __integral_type;
892  using __base_type::operator=;
893 
894 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
895  static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
896 #endif
897  };
898 
899  /// Explicit specialization for unsigned long.
900  template<>
901  struct atomic<unsigned long> : __atomic_base<unsigned long>
902  {
903  typedef unsigned long __integral_type;
904  typedef __atomic_base<unsigned long> __base_type;
905 
906  atomic() noexcept = default;
907  ~atomic() noexcept = default;
908  atomic(const atomic&) = delete;
909  atomic& operator=(const atomic&) = delete;
910  atomic& operator=(const atomic&) volatile = delete;
911 
912  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
913 
914  using __base_type::operator __integral_type;
915  using __base_type::operator=;
916 
917 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
918  static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
919 #endif
920  };
921 
922  /// Explicit specialization for long long.
923  template<>
924  struct atomic<long long> : __atomic_base<long long>
925  {
926  typedef long long __integral_type;
927  typedef __atomic_base<long long> __base_type;
928 
929  atomic() noexcept = default;
930  ~atomic() noexcept = default;
931  atomic(const atomic&) = delete;
932  atomic& operator=(const atomic&) = delete;
933  atomic& operator=(const atomic&) volatile = delete;
934 
935  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
936 
937  using __base_type::operator __integral_type;
938  using __base_type::operator=;
939 
940 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
941  static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
942 #endif
943  };
944 
945  /// Explicit specialization for unsigned long long.
946  template<>
947  struct atomic<unsigned long long> : __atomic_base<unsigned long long>
948  {
949  typedef unsigned long long __integral_type;
950  typedef __atomic_base<unsigned long long> __base_type;
951 
952  atomic() noexcept = default;
953  ~atomic() noexcept = default;
954  atomic(const atomic&) = delete;
955  atomic& operator=(const atomic&) = delete;
956  atomic& operator=(const atomic&) volatile = delete;
957 
958  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
959 
960  using __base_type::operator __integral_type;
961  using __base_type::operator=;
962 
963 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
964  static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
965 #endif
966  };
967 
968  /// Explicit specialization for wchar_t.
969  template<>
970  struct atomic<wchar_t> : __atomic_base<wchar_t>
971  {
972  typedef wchar_t __integral_type;
973  typedef __atomic_base<wchar_t> __base_type;
974 
975  atomic() noexcept = default;
976  ~atomic() noexcept = default;
977  atomic(const atomic&) = delete;
978  atomic& operator=(const atomic&) = delete;
979  atomic& operator=(const atomic&) volatile = delete;
980 
981  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
982 
983  using __base_type::operator __integral_type;
984  using __base_type::operator=;
985 
986 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
987  static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
988 #endif
989  };
990 
991 #ifdef _GLIBCXX_USE_CHAR8_T
992  /// Explicit specialization for char8_t.
993  template<>
994  struct atomic<char8_t> : __atomic_base<char8_t>
995  {
996  typedef char8_t __integral_type;
997  typedef __atomic_base<char8_t> __base_type;
998 
999  atomic() noexcept = default;
1000  ~atomic() noexcept = default;
1001  atomic(const atomic&) = delete;
1002  atomic& operator=(const atomic&) = delete;
1003  atomic& operator=(const atomic&) volatile = delete;
1004 
1005  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1006 
1007  using __base_type::operator __integral_type;
1008  using __base_type::operator=;
1009 
1010 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
1011  static constexpr bool is_always_lock_free
1012  = ATOMIC_CHAR8_T_LOCK_FREE == 2;
1013 #endif
1014  };
1015 #endif
1016 
1017  /// Explicit specialization for char16_t.
1018  template<>
1019  struct atomic<char16_t> : __atomic_base<char16_t>
1020  {
1021  typedef char16_t __integral_type;
1022  typedef __atomic_base<char16_t> __base_type;
1023 
1024  atomic() noexcept = default;
1025  ~atomic() noexcept = default;
1026  atomic(const atomic&) = delete;
1027  atomic& operator=(const atomic&) = delete;
1028  atomic& operator=(const atomic&) volatile = delete;
1029 
1030  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1031 
1032  using __base_type::operator __integral_type;
1033  using __base_type::operator=;
1034 
1035 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
1036  static constexpr bool is_always_lock_free
1037  = ATOMIC_CHAR16_T_LOCK_FREE == 2;
1038 #endif
1039  };
1040 
1041  /// Explicit specialization for char32_t.
1042  template<>
1043  struct atomic<char32_t> : __atomic_base<char32_t>
1044  {
1045  typedef char32_t __integral_type;
1046  typedef __atomic_base<char32_t> __base_type;
1047 
1048  atomic() noexcept = default;
1049  ~atomic() noexcept = default;
1050  atomic(const atomic&) = delete;
1051  atomic& operator=(const atomic&) = delete;
1052  atomic& operator=(const atomic&) volatile = delete;
1053 
1054  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1055 
1056  using __base_type::operator __integral_type;
1057  using __base_type::operator=;
1058 
1059 #ifdef __cpp_lib_atomic_is_always_lock_free // C++ >= 17
1060  static constexpr bool is_always_lock_free
1061  = ATOMIC_CHAR32_T_LOCK_FREE == 2;
1062 #endif
1063  };
1064 
1065 
1066  /// atomic_bool
1067  typedef atomic<bool> atomic_bool;
1068 
1069  /// atomic_char
1070  typedef atomic<char> atomic_char;
1071 
1072  /// atomic_schar
1073  typedef atomic<signed char> atomic_schar;
1074 
1075  /// atomic_uchar
1076  typedef atomic<unsigned char> atomic_uchar;
1077 
1078  /// atomic_short
1079  typedef atomic<short> atomic_short;
1080 
1081  /// atomic_ushort
1082  typedef atomic<unsigned short> atomic_ushort;
1083 
1084  /// atomic_int
1085  typedef atomic<int> atomic_int;
1086 
1087  /// atomic_uint
1088  typedef atomic<unsigned int> atomic_uint;
1089 
1090  /// atomic_long
1091  typedef atomic<long> atomic_long;
1092 
1093  /// atomic_ulong
1094  typedef atomic<unsigned long> atomic_ulong;
1095 
1096  /// atomic_llong
1097  typedef atomic<long long> atomic_llong;
1098 
1099  /// atomic_ullong
1100  typedef atomic<unsigned long long> atomic_ullong;
1101 
1102  /// atomic_wchar_t
1103  typedef atomic<wchar_t> atomic_wchar_t;
1104 
1105 #ifdef _GLIBCXX_USE_CHAR8_T
1106  /// atomic_char8_t
1107  typedef atomic<char8_t> atomic_char8_t;
1108 #endif
1109 
1110  /// atomic_char16_t
1111  typedef atomic<char16_t> atomic_char16_t;
1112 
1113  /// atomic_char32_t
1114  typedef atomic<char32_t> atomic_char32_t;
1115 
1116 #ifdef _GLIBCXX_USE_C99_STDINT
1117  // _GLIBCXX_RESOLVE_LIB_DEFECTS
1118  // 2441. Exact-width atomic typedefs should be provided
1119 
1120  /// atomic_int8_t
1121  typedef atomic<int8_t> atomic_int8_t;
1122 
1123  /// atomic_uint8_t
1124  typedef atomic<uint8_t> atomic_uint8_t;
1125 
1126  /// atomic_int16_t
1127  typedef atomic<int16_t> atomic_int16_t;
1128 
1129  /// atomic_uint16_t
1130  typedef atomic<uint16_t> atomic_uint16_t;
1131 
1132  /// atomic_int32_t
1133  typedef atomic<int32_t> atomic_int32_t;
1134 
1135  /// atomic_uint32_t
1136  typedef atomic<uint32_t> atomic_uint32_t;
1137 
1138  /// atomic_int64_t
1139  typedef atomic<int64_t> atomic_int64_t;
1140 
1141  /// atomic_uint64_t
1142  typedef atomic<uint64_t> atomic_uint64_t;
1143 #endif
1144 
1145  /// atomic_int_least8_t
1146  typedef atomic<int_least8_t> atomic_int_least8_t;
1147 
1148  /// atomic_uint_least8_t
1149  typedef atomic<uint_least8_t> atomic_uint_least8_t;
1150 
1151  /// atomic_int_least16_t
1152  typedef atomic<int_least16_t> atomic_int_least16_t;
1153 
1154  /// atomic_uint_least16_t
1155  typedef atomic<uint_least16_t> atomic_uint_least16_t;
1156 
1157  /// atomic_int_least32_t
1158  typedef atomic<int_least32_t> atomic_int_least32_t;
1159 
1160  /// atomic_uint_least32_t
1161  typedef atomic<uint_least32_t> atomic_uint_least32_t;
1162 
1163  /// atomic_int_least64_t
1164  typedef atomic<int_least64_t> atomic_int_least64_t;
1165 
1166  /// atomic_uint_least64_t
1167  typedef atomic<uint_least64_t> atomic_uint_least64_t;
1168 
1169 
1170  /// atomic_int_fast8_t
1171  typedef atomic<int_fast8_t> atomic_int_fast8_t;
1172 
1173  /// atomic_uint_fast8_t
1174  typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
1175 
1176  /// atomic_int_fast16_t
1177  typedef atomic<int_fast16_t> atomic_int_fast16_t;
1178 
1179  /// atomic_uint_fast16_t
1180  typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1181 
1182  /// atomic_int_fast32_t
1183  typedef atomic<int_fast32_t> atomic_int_fast32_t;
1184 
1185  /// atomic_uint_fast32_t
1186  typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1187 
1188  /// atomic_int_fast64_t
1189  typedef atomic<int_fast64_t> atomic_int_fast64_t;
1190 
1191  /// atomic_uint_fast64_t
1192  typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1193 
1194 
1195  /// atomic_intptr_t
1196  typedef atomic<intptr_t> atomic_intptr_t;
1197 
1198  /// atomic_uintptr_t
1199  typedef atomic<uintptr_t> atomic_uintptr_t;
1200 
1201  /// atomic_size_t
1202  typedef atomic<size_t> atomic_size_t;
1203 
1204  /// atomic_ptrdiff_t
1205  typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1206 
1207  /// atomic_intmax_t
1208  typedef atomic<intmax_t> atomic_intmax_t;
1209 
1210  /// atomic_uintmax_t
1211  typedef atomic<uintmax_t> atomic_uintmax_t;
1212 
1213  // Function definitions, atomic_flag operations.
1214  inline bool
1215  atomic_flag_test_and_set_explicit(atomic_flag* __a,
1216  memory_order __m) noexcept
1217  { return __a->test_and_set(__m); }
1218 
1219  inline bool
1220  atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
1221  memory_order __m) noexcept
1222  { return __a->test_and_set(__m); }
1223 
1224 #if __cpp_lib_atomic_flag_test
1225  inline bool
1226  atomic_flag_test(const atomic_flag* __a) noexcept
1227  { return __a->test(); }
1228 
1229  inline bool
1230  atomic_flag_test(const volatile atomic_flag* __a) noexcept
1231  { return __a->test(); }
1232 
1233  inline bool
1234  atomic_flag_test_explicit(const atomic_flag* __a,
1235  memory_order __m) noexcept
1236  { return __a->test(__m); }
1237 
1238  inline bool
1239  atomic_flag_test_explicit(const volatile atomic_flag* __a,
1240  memory_order __m) noexcept
1241  { return __a->test(__m); }
1242 #endif
1243 
1244  inline void
1245  atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
1246  { __a->clear(__m); }
1247 
1248  inline void
1249  atomic_flag_clear_explicit(volatile atomic_flag* __a,
1250  memory_order __m) noexcept
1251  { __a->clear(__m); }
1252 
1253  inline bool
1254  atomic_flag_test_and_set(atomic_flag* __a) noexcept
1255  { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1256 
1257  inline bool
1258  atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
1259  { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1260 
1261  inline void
1262  atomic_flag_clear(atomic_flag* __a) noexcept
1263  { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1264 
1265  inline void
1266  atomic_flag_clear(volatile atomic_flag* __a) noexcept
1267  { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1268 
1269 #if __cpp_lib_atomic_wait
1270  inline void
1271  atomic_flag_wait(atomic_flag* __a, bool __old) noexcept
1272  { __a->wait(__old); }
1273 
1274  inline void
1275  atomic_flag_wait_explicit(atomic_flag* __a, bool __old,
1276  memory_order __m) noexcept
1277  { __a->wait(__old, __m); }
1278 
1279  inline void
1280  atomic_flag_notify_one(atomic_flag* __a) noexcept
1281  { __a->notify_one(); }
1282 
1283  inline void
1284  atomic_flag_notify_all(atomic_flag* __a) noexcept
1285  { __a->notify_all(); }
1286 #endif // __cpp_lib_atomic_wait
1287 
1288  /// @cond undocumented
1289  // _GLIBCXX_RESOLVE_LIB_DEFECTS
1290  // 3220. P0558 broke conforming C++14 uses of atomic shared_ptr
1291  template<typename _Tp>
1292  using __atomic_val_t = __type_identity_t<_Tp>;
1293  template<typename _Tp>
1294  using __atomic_diff_t = typename atomic<_Tp>::difference_type;
1295  /// @endcond
1296 
1297  // [atomics.nonmembers] Non-member functions.
1298  // Function templates generally applicable to atomic types.
1299  template<typename _ITp>
1300  inline bool
1301  atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
1302  { return __a->is_lock_free(); }
1303 
1304  template<typename _ITp>
1305  inline bool
1306  atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
1307  { return __a->is_lock_free(); }
1308 
1309  template<typename _ITp>
1310  inline void
1311  atomic_init(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1312  { __a->store(__i, memory_order_relaxed); }
1313 
1314  template<typename _ITp>
1315  inline void
1316  atomic_init(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1317  { __a->store(__i, memory_order_relaxed); }
1318 
1319  template<typename _ITp>
1320  inline void
1321  atomic_store_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1322  memory_order __m) noexcept
1323  { __a->store(__i, __m); }
1324 
1325  template<typename _ITp>
1326  inline void
1327  atomic_store_explicit(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1328  memory_order __m) noexcept
1329  { __a->store(__i, __m); }
1330 
1331  template<typename _ITp>
1332  inline _ITp
1333  atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
1334  { return __a->load(__m); }
1335 
1336  template<typename _ITp>
1337  inline _ITp
1338  atomic_load_explicit(const volatile atomic<_ITp>* __a,
1339  memory_order __m) noexcept
1340  { return __a->load(__m); }
1341 
1342  template<typename _ITp>
1343  inline _ITp
1344  atomic_exchange_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1345  memory_order __m) noexcept
1346  { return __a->exchange(__i, __m); }
1347 
1348  template<typename _ITp>
1349  inline _ITp
1350  atomic_exchange_explicit(volatile atomic<_ITp>* __a,
1351  __atomic_val_t<_ITp> __i,
1352  memory_order __m) noexcept
1353  { return __a->exchange(__i, __m); }
1354 
1355  template<typename _ITp>
1356  inline bool
1357  atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1358  __atomic_val_t<_ITp>* __i1,
1359  __atomic_val_t<_ITp> __i2,
1360  memory_order __m1,
1361  memory_order __m2) noexcept
1362  { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1363 
1364  template<typename _ITp>
1365  inline bool
1366  atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
1367  __atomic_val_t<_ITp>* __i1,
1368  __atomic_val_t<_ITp> __i2,
1369  memory_order __m1,
1370  memory_order __m2) noexcept
1371  { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1372 
1373  template<typename _ITp>
1374  inline bool
1375  atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1376  __atomic_val_t<_ITp>* __i1,
1377  __atomic_val_t<_ITp> __i2,
1378  memory_order __m1,
1379  memory_order __m2) noexcept
1380  { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1381 
1382  template<typename _ITp>
1383  inline bool
1384  atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
1385  __atomic_val_t<_ITp>* __i1,
1386  __atomic_val_t<_ITp> __i2,
1387  memory_order __m1,
1388  memory_order __m2) noexcept
1389  { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1390 
1391 
1392  template<typename _ITp>
1393  inline void
1394  atomic_store(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1395  { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1396 
1397  template<typename _ITp>
1398  inline void
1399  atomic_store(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1400  { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1401 
1402  template<typename _ITp>
1403  inline _ITp
1404  atomic_load(const atomic<_ITp>* __a) noexcept
1405  { return atomic_load_explicit(__a, memory_order_seq_cst); }
1406 
1407  template<typename _ITp>
1408  inline _ITp
1409  atomic_load(const volatile atomic<_ITp>* __a) noexcept
1410  { return atomic_load_explicit(__a, memory_order_seq_cst); }
1411 
1412  template<typename _ITp>
1413  inline _ITp
1414  atomic_exchange(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1415  { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1416 
1417  template<typename _ITp>
1418  inline _ITp
1419  atomic_exchange(volatile atomic<_ITp>* __a,
1420  __atomic_val_t<_ITp> __i) noexcept
1421  { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1422 
1423  template<typename _ITp>
1424  inline bool
1425  atomic_compare_exchange_weak(atomic<_ITp>* __a,
1426  __atomic_val_t<_ITp>* __i1,
1427  __atomic_val_t<_ITp> __i2) noexcept
1428  {
1429  return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1430  memory_order_seq_cst,
1431  memory_order_seq_cst);
1432  }
1433 
1434  template<typename _ITp>
1435  inline bool
1436  atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
1437  __atomic_val_t<_ITp>* __i1,
1438  __atomic_val_t<_ITp> __i2) noexcept
1439  {
1440  return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1441  memory_order_seq_cst,
1442  memory_order_seq_cst);
1443  }
1444 
1445  template<typename _ITp>
1446  inline bool
1447  atomic_compare_exchange_strong(atomic<_ITp>* __a,
1448  __atomic_val_t<_ITp>* __i1,
1449  __atomic_val_t<_ITp> __i2) noexcept
1450  {
1451  return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1452  memory_order_seq_cst,
1453  memory_order_seq_cst);
1454  }
1455 
1456  template<typename _ITp>
1457  inline bool
1458  atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
1459  __atomic_val_t<_ITp>* __i1,
1460  __atomic_val_t<_ITp> __i2) noexcept
1461  {
1462  return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1463  memory_order_seq_cst,
1464  memory_order_seq_cst);
1465  }
1466 
1467 
1468 #if __cpp_lib_atomic_wait
1469  template<typename _Tp>
1470  inline void
1471  atomic_wait(const atomic<_Tp>* __a,
1472  typename std::atomic<_Tp>::value_type __old) noexcept
1473  { __a->wait(__old); }
1474 
1475  template<typename _Tp>
1476  inline void
1477  atomic_wait_explicit(const atomic<_Tp>* __a,
1478  typename std::atomic<_Tp>::value_type __old,
1479  std::memory_order __m) noexcept
1480  { __a->wait(__old, __m); }
1481 
1482  template<typename _Tp>
1483  inline void
1484  atomic_notify_one(atomic<_Tp>* __a) noexcept
1485  { __a->notify_one(); }
1486 
1487  template<typename _Tp>
1488  inline void
1489  atomic_notify_all(atomic<_Tp>* __a) noexcept
1490  { __a->notify_all(); }
1491 #endif // __cpp_lib_atomic_wait
1492 
1493  // Function templates for atomic_integral and atomic_pointer operations only.
1494  // Some operations (and, or, xor) are only available for atomic integrals,
1495  // which is implemented by taking a parameter of type __atomic_base<_ITp>*.
1496 
1497  template<typename _ITp>
1498  inline _ITp
1499  atomic_fetch_add_explicit(atomic<_ITp>* __a,
1500  __atomic_diff_t<_ITp> __i,
1501  memory_order __m) noexcept
1502  { return __a->fetch_add(__i, __m); }
1503 
1504  template<typename _ITp>
1505  inline _ITp
1506  atomic_fetch_add_explicit(volatile atomic<_ITp>* __a,
1507  __atomic_diff_t<_ITp> __i,
1508  memory_order __m) noexcept
1509  { return __a->fetch_add(__i, __m); }
1510 
1511  template<typename _ITp>
1512  inline _ITp
1513  atomic_fetch_sub_explicit(atomic<_ITp>* __a,
1514  __atomic_diff_t<_ITp> __i,
1515  memory_order __m) noexcept
1516  { return __a->fetch_sub(__i, __m); }
1517 
1518  template<typename _ITp>
1519  inline _ITp
1520  atomic_fetch_sub_explicit(volatile atomic<_ITp>* __a,
1521  __atomic_diff_t<_ITp> __i,
1522  memory_order __m) noexcept
1523  { return __a->fetch_sub(__i, __m); }
1524 
1525  template<typename _ITp>
1526  inline _ITp
1527  atomic_fetch_and_explicit(__atomic_base<_ITp>* __a,
1528  __atomic_val_t<_ITp> __i,
1529  memory_order __m) noexcept
1530  { return __a->fetch_and(__i, __m); }
1531 
1532  template<typename _ITp>
1533  inline _ITp
1534  atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a,
1535  __atomic_val_t<_ITp> __i,
1536  memory_order __m) noexcept
1537  { return __a->fetch_and(__i, __m); }
1538 
1539  template<typename _ITp>
1540  inline _ITp
1541  atomic_fetch_or_explicit(__atomic_base<_ITp>* __a,
1542  __atomic_val_t<_ITp> __i,
1543  memory_order __m) noexcept
1544  { return __a->fetch_or(__i, __m); }
1545 
1546  template<typename _ITp>
1547  inline _ITp
1548  atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a,
1549  __atomic_val_t<_ITp> __i,
1550  memory_order __m) noexcept
1551  { return __a->fetch_or(__i, __m); }
1552 
1553  template<typename _ITp>
1554  inline _ITp
1555  atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a,
1556  __atomic_val_t<_ITp> __i,
1557  memory_order __m) noexcept
1558  { return __a->fetch_xor(__i, __m); }
1559 
1560  template<typename _ITp>
1561  inline _ITp
1562  atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a,
1563  __atomic_val_t<_ITp> __i,
1564  memory_order __m) noexcept
1565  { return __a->fetch_xor(__i, __m); }
1566 
1567  template<typename _ITp>
1568  inline _ITp
1569  atomic_fetch_add(atomic<_ITp>* __a,
1570  __atomic_diff_t<_ITp> __i) noexcept
1571  { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1572 
1573  template<typename _ITp>
1574  inline _ITp
1575  atomic_fetch_add(volatile atomic<_ITp>* __a,
1576  __atomic_diff_t<_ITp> __i) noexcept
1577  { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1578 
1579  template<typename _ITp>
1580  inline _ITp
1581  atomic_fetch_sub(atomic<_ITp>* __a,
1582  __atomic_diff_t<_ITp> __i) noexcept
1583  { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1584 
1585  template<typename _ITp>
1586  inline _ITp
1587  atomic_fetch_sub(volatile atomic<_ITp>* __a,
1588  __atomic_diff_t<_ITp> __i) noexcept
1589  { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1590 
1591  template<typename _ITp>
1592  inline _ITp
1593  atomic_fetch_and(__atomic_base<_ITp>* __a,
1594  __atomic_val_t<_ITp> __i) noexcept
1595  { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1596 
1597  template<typename _ITp>
1598  inline _ITp
1599  atomic_fetch_and(volatile __atomic_base<_ITp>* __a,
1600  __atomic_val_t<_ITp> __i) noexcept
1601  { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1602 
1603  template<typename _ITp>
1604  inline _ITp
1605  atomic_fetch_or(__atomic_base<_ITp>* __a,
1606  __atomic_val_t<_ITp> __i) noexcept
1607  { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1608 
1609  template<typename _ITp>
1610  inline _ITp
1611  atomic_fetch_or(volatile __atomic_base<_ITp>* __a,
1612  __atomic_val_t<_ITp> __i) noexcept
1613  { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1614 
1615  template<typename _ITp>
1616  inline _ITp
1617  atomic_fetch_xor(__atomic_base<_ITp>* __a,
1618  __atomic_val_t<_ITp> __i) noexcept
1619  { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1620 
1621  template<typename _ITp>
1622  inline _ITp
1623  atomic_fetch_xor(volatile __atomic_base<_ITp>* __a,
1624  __atomic_val_t<_ITp> __i) noexcept
1625  { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1626 
1627 #ifdef __cpp_lib_atomic_float
1628  template<>
1629  struct atomic<float> : __atomic_float<float>
1630  {
1631  atomic() noexcept = default;
1632 
1633  constexpr
1634  atomic(float __fp) noexcept : __atomic_float<float>(__fp)
1635  { }
1636 
1637  atomic& operator=(const atomic&) volatile = delete;
1638  atomic& operator=(const atomic&) = delete;
1639 
1640  using __atomic_float<float>::operator=;
1641  };
1642 
1643  template<>
1644  struct atomic<double> : __atomic_float<double>
1645  {
1646  atomic() noexcept = default;
1647 
1648  constexpr
1649  atomic(double __fp) noexcept : __atomic_float<double>(__fp)
1650  { }
1651 
1652  atomic& operator=(const atomic&) volatile = delete;
1653  atomic& operator=(const atomic&) = delete;
1654 
1655  using __atomic_float<double>::operator=;
1656  };
1657 
1658  template<>
1659  struct atomic<long double> : __atomic_float<long double>
1660  {
1661  atomic() noexcept = default;
1662 
1663  constexpr
1664  atomic(long double __fp) noexcept : __atomic_float<long double>(__fp)
1665  { }
1666 
1667  atomic& operator=(const atomic&) volatile = delete;
1668  atomic& operator=(const atomic&) = delete;
1669 
1670  using __atomic_float<long double>::operator=;
1671  };
1672 
1673 #ifdef __STDCPP_FLOAT16_T__
1674  template<>
1675  struct atomic<_Float16> : __atomic_float<_Float16>
1676  {
1677  atomic() noexcept = default;
1678 
1679  constexpr
1680  atomic(_Float16 __fp) noexcept : __atomic_float<_Float16>(__fp)
1681  { }
1682 
1683  atomic& operator=(const atomic&) volatile = delete;
1684  atomic& operator=(const atomic&) = delete;
1685 
1686  using __atomic_float<_Float16>::operator=;
1687  };
1688 #endif
1689 
1690 #ifdef __STDCPP_FLOAT32_T__
1691  template<>
1692  struct atomic<_Float32> : __atomic_float<_Float32>
1693  {
1694  atomic() noexcept = default;
1695 
1696  constexpr
1697  atomic(_Float32 __fp) noexcept : __atomic_float<_Float32>(__fp)
1698  { }
1699 
1700  atomic& operator=(const atomic&) volatile = delete;
1701  atomic& operator=(const atomic&) = delete;
1702 
1703  using __atomic_float<_Float32>::operator=;
1704  };
1705 #endif
1706 
1707 #ifdef __STDCPP_FLOAT64_T__
1708  template<>
1709  struct atomic<_Float64> : __atomic_float<_Float64>
1710  {
1711  atomic() noexcept = default;
1712 
1713  constexpr
1714  atomic(_Float64 __fp) noexcept : __atomic_float<_Float64>(__fp)
1715  { }
1716 
1717  atomic& operator=(const atomic&) volatile = delete;
1718  atomic& operator=(const atomic&) = delete;
1719 
1720  using __atomic_float<_Float64>::operator=;
1721  };
1722 #endif
1723 
1724 #ifdef __STDCPP_FLOAT128_T__
1725  template<>
1726  struct atomic<_Float128> : __atomic_float<_Float128>
1727  {
1728  atomic() noexcept = default;
1729 
1730  constexpr
1731  atomic(_Float128 __fp) noexcept : __atomic_float<_Float128>(__fp)
1732  { }
1733 
1734  atomic& operator=(const atomic&) volatile = delete;
1735  atomic& operator=(const atomic&) = delete;
1736 
1737  using __atomic_float<_Float128>::operator=;
1738  };
1739 #endif
1740 
1741 #ifdef __STDCPP_BFLOAT16_T__
1742  template<>
1743  struct atomic<__gnu_cxx::__bfloat16_t> : __atomic_float<__gnu_cxx::__bfloat16_t>
1744  {
1745  atomic() noexcept = default;
1746 
1747  constexpr
1748  atomic(__gnu_cxx::__bfloat16_t __fp) noexcept : __atomic_float<__gnu_cxx::__bfloat16_t>(__fp)
1749  { }
1750 
1751  atomic& operator=(const atomic&) volatile = delete;
1752  atomic& operator=(const atomic&) = delete;
1753 
1754  using __atomic_float<__gnu_cxx::__bfloat16_t>::operator=;
1755  };
1756 #endif
1757 #endif // __cpp_lib_atomic_float
1758 
1759 #ifdef __cpp_lib_atomic_ref
1760  /// Class template to provide atomic operations on a non-atomic variable.
1761  template<typename _Tp>
1762  struct atomic_ref : __atomic_ref<_Tp>
1763  {
1764  explicit
1765  atomic_ref(_Tp& __t) noexcept : __atomic_ref<_Tp>(__t)
1766  { }
1767 
1768  atomic_ref& operator=(const atomic_ref&) = delete;
1769 
1770  atomic_ref(const atomic_ref&) = default;
1771 
1772  using __atomic_ref<_Tp>::operator=;
1773  };
1774 #endif // __cpp_lib_atomic_ref
1775 
1776 #ifdef __cpp_lib_atomic_lock_free_type_aliases
1777 # ifdef _GLIBCXX_HAVE_PLATFORM_WAIT
1778  using atomic_signed_lock_free
1779  = atomic<make_signed_t<__detail::__platform_wait_t>>;
1780  using atomic_unsigned_lock_free
1781  = atomic<make_unsigned_t<__detail::__platform_wait_t>>;
1782 # elif ATOMIC_INT_LOCK_FREE == 2
1783  using atomic_signed_lock_free = atomic<signed int>;
1784  using atomic_unsigned_lock_free = atomic<unsigned int>;
1785 # elif ATOMIC_LONG_LOCK_FREE == 2
1786  using atomic_signed_lock_free = atomic<signed long>;
1787  using atomic_unsigned_lock_free = atomic<unsigned long>;
1788 # elif ATOMIC_CHAR_LOCK_FREE == 2
1789  using atomic_signed_lock_free = atomic<signed char>;
1790  using atomic_unsigned_lock_free = atomic<unsigned char>;
1791 # else
1792 # error "libstdc++ bug: no lock-free atomics but they were emitted in <version>"
1793 # endif
1794 #endif
1795 
1796  /// @} group atomics
1797 
1798 _GLIBCXX_END_NAMESPACE_VERSION
1799 } // namespace
1800 
1801 #endif // C++11
1802 
1803 #endif // _GLIBCXX_ATOMIC