libstdc++
shared_ptr_atomic.h
Go to the documentation of this file.
1 // shared_ptr atomic access -*- C++ -*-
2 
3 // Copyright (C) 2014-2025 Free Software Foundation, Inc.
4 //
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
9 // any later version.
10 
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
15 
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
19 
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
24 
25 /** @file bits/shared_ptr_atomic.h
26  * This is an internal header file, included by other library headers.
27  * Do not attempt to use it directly. @headername{memory}
28  */
29 
30 #ifndef _SHARED_PTR_ATOMIC_H
31 #define _SHARED_PTR_ATOMIC_H 1
32 
33 #include <bits/atomic_base.h>
34 #include <bits/shared_ptr.h>
35 
36 // Annotations for the custom locking in atomic<shared_ptr<T>>.
37 #if defined _GLIBCXX_TSAN && __has_include(<sanitizer/tsan_interface.h>)
38 #include <sanitizer/tsan_interface.h>
39 #define _GLIBCXX_TSAN_MUTEX_DESTROY(X) \
40  __tsan_mutex_destroy(X, __tsan_mutex_not_static)
41 #define _GLIBCXX_TSAN_MUTEX_TRY_LOCK(X) \
42  __tsan_mutex_pre_lock(X, __tsan_mutex_not_static|__tsan_mutex_try_lock)
43 #define _GLIBCXX_TSAN_MUTEX_TRY_LOCK_FAILED(X) __tsan_mutex_post_lock(X, \
44  __tsan_mutex_not_static|__tsan_mutex_try_lock_failed, 0)
45 #define _GLIBCXX_TSAN_MUTEX_LOCKED(X) \
46  __tsan_mutex_post_lock(X, __tsan_mutex_not_static, 0)
47 #define _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(X) __tsan_mutex_pre_unlock(X, 0)
48 #define _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(X) __tsan_mutex_post_unlock(X, 0)
49 #define _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(X) __tsan_mutex_pre_signal(X, 0)
50 #define _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(X) __tsan_mutex_post_signal(X, 0)
51 #else
52 #define _GLIBCXX_TSAN_MUTEX_DESTROY(X)
53 #define _GLIBCXX_TSAN_MUTEX_TRY_LOCK(X)
54 #define _GLIBCXX_TSAN_MUTEX_TRY_LOCK_FAILED(X)
55 #define _GLIBCXX_TSAN_MUTEX_LOCKED(X)
56 #define _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(X)
57 #define _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(X)
58 #define _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(X)
59 #define _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(X)
60 #endif
61 
62 namespace std _GLIBCXX_VISIBILITY(default)
63 {
64 _GLIBCXX_BEGIN_NAMESPACE_VERSION
65 
66  /**
67  * @addtogroup pointer_abstractions
68  * @relates shared_ptr
69  * @{
70  */
71 
72  /// @cond undocumented
73 
74  struct _Sp_locker
75  {
76  _Sp_locker(const _Sp_locker&) = delete;
77  _Sp_locker& operator=(const _Sp_locker&) = delete;
78 
79 #ifdef __GTHREADS
80  explicit
81  _Sp_locker(const void*) noexcept;
82  _Sp_locker(const void*, const void*) noexcept;
83  ~_Sp_locker();
84 
85  private:
86  unsigned char _M_key1;
87  unsigned char _M_key2;
88 #else
89  explicit _Sp_locker(const void*, const void* = nullptr) { }
90 #endif
91  };
92 
93  /// @endcond
94 
95  /**
96  * @brief Report whether shared_ptr atomic operations are lock-free.
97  * @param __p A non-null pointer to a shared_ptr object.
98  * @return True if atomic access to @c *__p is lock-free, false otherwise.
99  * @{
100  */
101  template<typename _Tp, _Lock_policy _Lp>
102  _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>")
103  inline bool
104  atomic_is_lock_free(const __shared_ptr<_Tp, _Lp>*)
105  {
106 #ifdef __GTHREADS
107  return __gthread_active_p() == 0;
108 #else
109  return true;
110 #endif
111  }
112 
113  template<typename _Tp>
114  _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>")
115  inline bool
116  atomic_is_lock_free(const shared_ptr<_Tp>* __p)
117  { return std::atomic_is_lock_free<_Tp, __default_lock_policy>(__p); }
118 
119  /// @}
120 
121  /**
122  * @brief Atomic load for shared_ptr objects.
123  * @param __p A non-null pointer to a shared_ptr object.
124  * @return @c *__p
125  *
126  * The memory order shall not be `memory_order_release` or
127  * `memory_order_acq_rel`.
128  * @{
129  */
130  template<typename _Tp>
131  _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>")
132  inline shared_ptr<_Tp>
133  atomic_load_explicit(const shared_ptr<_Tp>* __p, memory_order)
134  {
135  _Sp_locker __lock{__p};
136  return *__p;
137  }
138 
139  template<typename _Tp>
140  _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>")
141  inline shared_ptr<_Tp>
142  atomic_load(const shared_ptr<_Tp>* __p)
143  { return std::atomic_load_explicit(__p, memory_order_seq_cst); }
144 
145  template<typename _Tp, _Lock_policy _Lp>
146  _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>")
147  inline __shared_ptr<_Tp, _Lp>
148  atomic_load_explicit(const __shared_ptr<_Tp, _Lp>* __p, memory_order)
149  {
150  _Sp_locker __lock{__p};
151  return *__p;
152  }
153 
154  template<typename _Tp, _Lock_policy _Lp>
155  _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>")
156  inline __shared_ptr<_Tp, _Lp>
157  atomic_load(const __shared_ptr<_Tp, _Lp>* __p)
158  { return std::atomic_load_explicit(__p, memory_order_seq_cst); }
159  /// @}
160 
161  /**
162  * @brief Atomic store for shared_ptr objects.
163  * @param __p A non-null pointer to a shared_ptr object.
164  * @param __r The value to store.
165  *
166  * The memory order shall not be `memory_order_acquire` or
167  * `memory_order_acq_rel`.
168  * @{
169  */
170  template<typename _Tp>
171  _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>")
172  inline void
173  atomic_store_explicit(shared_ptr<_Tp>* __p, shared_ptr<_Tp> __r,
174  memory_order)
175  {
176  _Sp_locker __lock{__p};
177  __p->swap(__r); // use swap so that **__p not destroyed while lock held
178  }
179 
180  template<typename _Tp>
181  _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>")
182  inline void
183  atomic_store(shared_ptr<_Tp>* __p, shared_ptr<_Tp> __r)
184  { std::atomic_store_explicit(__p, std::move(__r), memory_order_seq_cst); }
185 
186  template<typename _Tp, _Lock_policy _Lp>
187  _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>")
188  inline void
189  atomic_store_explicit(__shared_ptr<_Tp, _Lp>* __p,
190  __shared_ptr<_Tp, _Lp> __r,
191  memory_order)
192  {
193  _Sp_locker __lock{__p};
194  __p->swap(__r); // use swap so that **__p not destroyed while lock held
195  }
196 
197  template<typename _Tp, _Lock_policy _Lp>
198  _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>")
199  inline void
200  atomic_store(__shared_ptr<_Tp, _Lp>* __p, __shared_ptr<_Tp, _Lp> __r)
201  { std::atomic_store_explicit(__p, std::move(__r), memory_order_seq_cst); }
202  /// @}
203 
204  /**
205  * @brief Atomic exchange for shared_ptr objects.
206  * @param __p A non-null pointer to a shared_ptr object.
207  * @param __r New value to store in `*__p`.
208  * @return The original value of `*__p`
209  * @{
210  */
211  template<typename _Tp>
212  _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>")
213  inline shared_ptr<_Tp>
214  atomic_exchange_explicit(shared_ptr<_Tp>* __p, shared_ptr<_Tp> __r,
215  memory_order)
216  {
217  _Sp_locker __lock{__p};
218  __p->swap(__r);
219  return __r;
220  }
221 
222  template<typename _Tp>
223  _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>")
224  inline shared_ptr<_Tp>
225  atomic_exchange(shared_ptr<_Tp>* __p, shared_ptr<_Tp> __r)
226  {
227  return std::atomic_exchange_explicit(__p, std::move(__r),
228  memory_order_seq_cst);
229  }
230 
231  template<typename _Tp, _Lock_policy _Lp>
232  _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>")
233  inline __shared_ptr<_Tp, _Lp>
234  atomic_exchange_explicit(__shared_ptr<_Tp, _Lp>* __p,
235  __shared_ptr<_Tp, _Lp> __r,
236  memory_order)
237  {
238  _Sp_locker __lock{__p};
239  __p->swap(__r);
240  return __r;
241  }
242 
243  template<typename _Tp, _Lock_policy _Lp>
244  _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>")
245  inline __shared_ptr<_Tp, _Lp>
246  atomic_exchange(__shared_ptr<_Tp, _Lp>* __p, __shared_ptr<_Tp, _Lp> __r)
247  {
248  return std::atomic_exchange_explicit(__p, std::move(__r),
249  memory_order_seq_cst);
250  }
251  /// @}
252 
253  /**
254  * @brief Atomic compare-and-swap for shared_ptr objects.
255  * @param __p A non-null pointer to a shared_ptr object.
256  * @param __v A non-null pointer to a shared_ptr object.
257  * @param __w A non-null pointer to a shared_ptr object.
258  * @return True if `*__p` was equivalent to `*__v`, false otherwise.
259  *
260  * The memory order for failure shall not be `memory_order_release` or
261  * `memory_order_acq_rel`.
262  * @{
263  */
264  template<typename _Tp>
265  _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>")
266  bool
267  atomic_compare_exchange_strong_explicit(shared_ptr<_Tp>* __p,
268  shared_ptr<_Tp>* __v,
269  shared_ptr<_Tp> __w,
270  memory_order,
271  memory_order)
272  {
273  shared_ptr<_Tp> __x; // goes out of scope after __lock
274  _Sp_locker __lock{__p, __v};
276  if (*__p == *__v && !__less(*__p, *__v) && !__less(*__v, *__p))
277  {
278  __x = std::move(*__p);
279  *__p = std::move(__w);
280  return true;
281  }
282  __x = std::move(*__v);
283  *__v = *__p;
284  return false;
285  }
286 
287  template<typename _Tp>
288  _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>")
289  inline bool
290  atomic_compare_exchange_strong(shared_ptr<_Tp>* __p, shared_ptr<_Tp>* __v,
291  shared_ptr<_Tp> __w)
292  {
293  return std::atomic_compare_exchange_strong_explicit(__p, __v,
294  std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
295  }
296 
297  template<typename _Tp>
298  _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>")
299  inline bool
300  atomic_compare_exchange_weak_explicit(shared_ptr<_Tp>* __p,
301  shared_ptr<_Tp>* __v,
302  shared_ptr<_Tp> __w,
303  memory_order __success,
304  memory_order __failure)
305  {
306  return std::atomic_compare_exchange_strong_explicit(__p, __v,
307  std::move(__w), __success, __failure);
308  }
309 
310  template<typename _Tp>
311  _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>")
312  inline bool
313  atomic_compare_exchange_weak(shared_ptr<_Tp>* __p, shared_ptr<_Tp>* __v,
314  shared_ptr<_Tp> __w)
315  {
316  return std::atomic_compare_exchange_weak_explicit(__p, __v,
317  std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
318  }
319 
320  template<typename _Tp, _Lock_policy _Lp>
321  _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>")
322  bool
323  atomic_compare_exchange_strong_explicit(__shared_ptr<_Tp, _Lp>* __p,
324  __shared_ptr<_Tp, _Lp>* __v,
325  __shared_ptr<_Tp, _Lp> __w,
326  memory_order,
327  memory_order)
328  {
329  __shared_ptr<_Tp, _Lp> __x; // goes out of scope after __lock
330  _Sp_locker __lock{__p, __v};
332  if (*__p == *__v && !__less(*__p, *__v) && !__less(*__v, *__p))
333  {
334  __x = std::move(*__p);
335  *__p = std::move(__w);
336  return true;
337  }
338  __x = std::move(*__v);
339  *__v = *__p;
340  return false;
341  }
342 
343  template<typename _Tp, _Lock_policy _Lp>
344  _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>")
345  inline bool
346  atomic_compare_exchange_strong(__shared_ptr<_Tp, _Lp>* __p,
347  __shared_ptr<_Tp, _Lp>* __v,
348  __shared_ptr<_Tp, _Lp> __w)
349  {
350  return std::atomic_compare_exchange_strong_explicit(__p, __v,
351  std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
352  }
353 
354  template<typename _Tp, _Lock_policy _Lp>
355  _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>")
356  inline bool
357  atomic_compare_exchange_weak_explicit(__shared_ptr<_Tp, _Lp>* __p,
358  __shared_ptr<_Tp, _Lp>* __v,
359  __shared_ptr<_Tp, _Lp> __w,
360  memory_order __success,
361  memory_order __failure)
362  {
363  return std::atomic_compare_exchange_strong_explicit(__p, __v,
364  std::move(__w), __success, __failure);
365  }
366 
367  template<typename _Tp, _Lock_policy _Lp>
368  _GLIBCXX20_DEPRECATED_SUGGEST("std::atomic<std::shared_ptr<T>>")
369  inline bool
370  atomic_compare_exchange_weak(__shared_ptr<_Tp, _Lp>* __p,
371  __shared_ptr<_Tp, _Lp>* __v,
372  __shared_ptr<_Tp, _Lp> __w)
373  {
374  return std::atomic_compare_exchange_weak_explicit(__p, __v,
375  std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
376  }
377  /// @}
378 
379  /// @} group pointer_abstractions
380 
381 #ifdef __glibcxx_atomic_shared_ptr // C++ >= 20 && HOSTED
382  template<typename _Tp>
383  struct atomic;
384 
385  /**
386  * @addtogroup pointer_abstractions
387  * @relates shared_ptr
388  * @{
389  */
390 
391  template<typename _Tp>
392  class _Sp_atomic
393  {
394  using value_type = _Tp;
395 
396  friend struct atomic<_Tp>;
397 
398  // An atomic version of __shared_count<> and __weak_count<>.
399  // Stores a _Sp_counted_base<>* but uses the LSB as a lock.
400  struct _Atomic_count
401  {
402  // Either __shared_count<> or __weak_count<>
403  using __count_type = decltype(_Tp::_M_refcount);
404  using uintptr_t = __UINTPTR_TYPE__;
405 
406  // _Sp_counted_base<>*
407  using pointer = decltype(__count_type::_M_pi);
408 
409  // Ensure we can use the LSB as the lock bit.
410  static_assert(alignof(remove_pointer_t<pointer>) > 1);
411 
412  constexpr _Atomic_count() noexcept = default;
413 
414  explicit
415  _Atomic_count(__count_type&& __c) noexcept
416  : _M_val(reinterpret_cast<uintptr_t>(__c._M_pi))
417  {
418  __c._M_pi = nullptr;
419  }
420 
421  ~_Atomic_count()
422  {
423  auto __val = _M_val.load(memory_order_relaxed);
424  _GLIBCXX_TSAN_MUTEX_DESTROY(&_M_val);
425  __glibcxx_assert(!(__val & _S_lock_bit));
426  if (auto __pi = reinterpret_cast<pointer>(__val))
427  {
428  if constexpr (__is_shared_ptr<_Tp>)
429  __pi->_M_release();
430  else
431  __pi->_M_weak_release();
432  }
433  }
434 
435  _Atomic_count(const _Atomic_count&) = delete;
436  _Atomic_count& operator=(const _Atomic_count&) = delete;
437 
438  // Precondition: Caller does not hold lock!
439  // Returns the raw pointer value without the lock bit set.
440  pointer
441  lock(memory_order __o) const noexcept
442  {
443  // To acquire the lock we flip the LSB from 0 to 1.
444 
445  auto __current = _M_val.load(memory_order_relaxed);
446  while (__current & _S_lock_bit)
447  {
448 #if __glibcxx_atomic_wait
449  __detail::__thread_relax();
450 #endif
451  __current = _M_val.load(memory_order_relaxed);
452  }
453 
454  _GLIBCXX_TSAN_MUTEX_TRY_LOCK(&_M_val);
455 
456  while (!_M_val.compare_exchange_strong(__current,
457  __current | _S_lock_bit,
458  __o,
459  memory_order_relaxed))
460  {
461  _GLIBCXX_TSAN_MUTEX_TRY_LOCK_FAILED(&_M_val);
462 #if __glibcxx_atomic_wait
463  __detail::__thread_relax();
464 #endif
465  __current = __current & ~_S_lock_bit;
466  _GLIBCXX_TSAN_MUTEX_TRY_LOCK(&_M_val);
467  }
468  _GLIBCXX_TSAN_MUTEX_LOCKED(&_M_val);
469  return reinterpret_cast<pointer>(__current);
470  }
471 
472  // Precondition: caller holds lock!
473  void
474  unlock(memory_order __o) const noexcept
475  {
476  _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(&_M_val);
477  _M_val.fetch_sub(1, __o);
478  _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(&_M_val);
479  }
480 
481  // Swaps the values of *this and __c, and unlocks *this.
482  // Precondition: caller holds lock!
483  void
484  _M_swap_unlock(__count_type& __c, memory_order __o) noexcept
485  {
486  if (__o != memory_order_seq_cst)
487  __o = memory_order_release;
488  auto __x = reinterpret_cast<uintptr_t>(__c._M_pi);
489  _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(&_M_val);
490  __x = _M_val.exchange(__x, __o);
491  _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(&_M_val);
492  __c._M_pi = reinterpret_cast<pointer>(__x & ~_S_lock_bit);
493  }
494 
495 #if __glibcxx_atomic_wait
496  // Precondition: caller holds lock!
497  void
498  _M_wait_unlock(memory_order __o) const noexcept
499  {
500  _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(&_M_val);
501  auto __v = _M_val.fetch_sub(1, memory_order_relaxed);
502  _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(&_M_val);
503  _M_val.wait(__v & ~_S_lock_bit, __o);
504  }
505 
506  void
507  notify_one() noexcept
508  {
509  _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(&_M_val);
510  _M_val.notify_one();
511  _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(&_M_val);
512  }
513 
514  void
515  notify_all() noexcept
516  {
517  _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(&_M_val);
518  _M_val.notify_all();
519  _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(&_M_val);
520  }
521 #endif
522 
523  private:
524  mutable __atomic_base<uintptr_t> _M_val{0};
525  static constexpr uintptr_t _S_lock_bit{1};
526  };
527 
528  typename _Tp::element_type* _M_ptr = nullptr;
529  _Atomic_count _M_refcount;
530 
531  static typename _Atomic_count::pointer
532  _S_add_ref(typename _Atomic_count::pointer __p)
533  {
534  if (__p)
535  {
536  if constexpr (__is_shared_ptr<_Tp>)
537  __p->_M_add_ref_copy();
538  else
539  __p->_M_weak_add_ref();
540  }
541  return __p;
542  }
543 
544  constexpr _Sp_atomic() noexcept = default;
545 
546  explicit
547  _Sp_atomic(value_type __r) noexcept
548  : _M_ptr(__r._M_ptr), _M_refcount(std::move(__r._M_refcount))
549  { }
550 
551  ~_Sp_atomic() = default;
552 
553  _Sp_atomic(const _Sp_atomic&) = delete;
554  void operator=(const _Sp_atomic&) = delete;
555 
556  value_type
557  load(memory_order __o) const noexcept
558  {
559  __glibcxx_assert(__o != memory_order_release
560  && __o != memory_order_acq_rel);
561  // Ensure that the correct value of _M_ptr is visible after locking,
562  // by upgrading relaxed or consume to acquire.
563  if (__o != memory_order_seq_cst)
564  __o = memory_order_acquire;
565 
566  value_type __ret;
567  auto __pi = _M_refcount.lock(__o);
568  __ret._M_ptr = _M_ptr;
569  __ret._M_refcount._M_pi = _S_add_ref(__pi);
570  _M_refcount.unlock(memory_order_relaxed);
571  return __ret;
572  }
573 
574  void
575  swap(value_type& __r, memory_order __o) noexcept
576  {
577  _M_refcount.lock(memory_order_acquire);
578  std::swap(_M_ptr, __r._M_ptr);
579  _M_refcount._M_swap_unlock(__r._M_refcount, __o);
580  }
581 
582  bool
583  compare_exchange_strong(value_type& __expected, value_type __desired,
584  memory_order __o, memory_order __o2) noexcept
585  {
586  bool __result = true;
587  auto __pi = _M_refcount.lock(memory_order_acquire);
588  if (_M_ptr == __expected._M_ptr
589  && __pi == __expected._M_refcount._M_pi)
590  {
591  _M_ptr = __desired._M_ptr;
592  _M_refcount._M_swap_unlock(__desired._M_refcount, __o);
593  }
594  else
595  {
596  _Tp __sink = std::move(__expected);
597  __expected._M_ptr = _M_ptr;
598  __expected._M_refcount._M_pi = _S_add_ref(__pi);
599  _M_refcount.unlock(__o2);
600  __result = false;
601  }
602  return __result;
603  }
604 
605 #if __glibcxx_atomic_wait
606  void
607  wait(value_type __old, memory_order __o) const noexcept
608  {
609  auto __pi = _M_refcount.lock(memory_order_acquire);
610  if (_M_ptr == __old._M_ptr && __pi == __old._M_refcount._M_pi)
611  _M_refcount._M_wait_unlock(__o);
612  else
613  _M_refcount.unlock(memory_order_relaxed);
614  }
615 
616  void
617  notify_one() noexcept
618  {
619  _M_refcount.notify_one();
620  }
621 
622  void
623  notify_all() noexcept
624  {
625  _M_refcount.notify_all();
626  }
627 #endif
628  };
629 
630  template<typename _Tp>
631  struct atomic<shared_ptr<_Tp>>
632  {
633  public:
634  using value_type = shared_ptr<_Tp>;
635 
636  static constexpr bool is_always_lock_free = false;
637 
638  bool
639  is_lock_free() const noexcept
640  { return false; }
641 
642  constexpr atomic() noexcept = default;
643 
644  // _GLIBCXX_RESOLVE_LIB_DEFECTS
645  // 3661. constinit atomic<shared_ptr<T>> a(nullptr); should work
646  constexpr atomic(nullptr_t) noexcept : atomic() { }
647 
648  atomic(shared_ptr<_Tp> __r) noexcept
649  : _M_impl(std::move(__r))
650  { }
651 
652  atomic(const atomic&) = delete;
653  void operator=(const atomic&) = delete;
654 
655  shared_ptr<_Tp>
656  load(memory_order __o = memory_order_seq_cst) const noexcept
657  { return _M_impl.load(__o); }
658 
659  operator shared_ptr<_Tp>() const noexcept
660  { return _M_impl.load(memory_order_seq_cst); }
661 
662  void
663  store(shared_ptr<_Tp> __desired,
664  memory_order __o = memory_order_seq_cst) noexcept
665  { _M_impl.swap(__desired, __o); }
666 
667  void
668  operator=(shared_ptr<_Tp> __desired) noexcept
669  { _M_impl.swap(__desired, memory_order_seq_cst); }
670 
671  // _GLIBCXX_RESOLVE_LIB_DEFECTS
672  // 3893. LWG 3661 broke atomic<shared_ptr<T>> a; a = nullptr;
673  void
674  operator=(nullptr_t) noexcept
675  { store(nullptr); }
676 
677  shared_ptr<_Tp>
678  exchange(shared_ptr<_Tp> __desired,
679  memory_order __o = memory_order_seq_cst) noexcept
680  {
681  _M_impl.swap(__desired, __o);
682  return __desired;
683  }
684 
685  bool
686  compare_exchange_strong(shared_ptr<_Tp>& __expected,
687  shared_ptr<_Tp> __desired,
688  memory_order __o, memory_order __o2) noexcept
689  {
690  return _M_impl.compare_exchange_strong(__expected, __desired, __o, __o2);
691  }
692 
693  bool
694  compare_exchange_strong(value_type& __expected, value_type __desired,
695  memory_order __o = memory_order_seq_cst) noexcept
696  {
697  memory_order __o2;
698  switch (__o)
699  {
700  case memory_order_acq_rel:
701  __o2 = memory_order_acquire;
702  break;
703  case memory_order_release:
704  __o2 = memory_order_relaxed;
705  break;
706  default:
707  __o2 = __o;
708  }
709  return compare_exchange_strong(__expected, std::move(__desired),
710  __o, __o2);
711  }
712 
713  bool
714  compare_exchange_weak(value_type& __expected, value_type __desired,
715  memory_order __o, memory_order __o2) noexcept
716  {
717  return compare_exchange_strong(__expected, std::move(__desired),
718  __o, __o2);
719  }
720 
721  bool
722  compare_exchange_weak(value_type& __expected, value_type __desired,
723  memory_order __o = memory_order_seq_cst) noexcept
724  {
725  return compare_exchange_strong(__expected, std::move(__desired), __o);
726  }
727 
728 #if __glibcxx_atomic_wait
729  void
730  wait(value_type __old,
731  memory_order __o = memory_order_seq_cst) const noexcept
732  {
733  _M_impl.wait(std::move(__old), __o);
734  }
735 
736  void
737  notify_one() noexcept
738  {
739  _M_impl.notify_one();
740  }
741 
742  void
743  notify_all() noexcept
744  {
745  _M_impl.notify_all();
746  }
747 #endif
748 
749  private:
750  _Sp_atomic<shared_ptr<_Tp>> _M_impl;
751  };
752 
753  template<typename _Tp>
754  struct atomic<weak_ptr<_Tp>>
755  {
756  public:
757  using value_type = weak_ptr<_Tp>;
758 
759  static constexpr bool is_always_lock_free = false;
760 
761  bool
762  is_lock_free() const noexcept
763  { return false; }
764 
765  constexpr atomic() noexcept = default;
766 
767  atomic(weak_ptr<_Tp> __r) noexcept
768  : _M_impl(move(__r))
769  { }
770 
771  atomic(const atomic&) = delete;
772  void operator=(const atomic&) = delete;
773 
774  weak_ptr<_Tp>
775  load(memory_order __o = memory_order_seq_cst) const noexcept
776  { return _M_impl.load(__o); }
777 
778  operator weak_ptr<_Tp>() const noexcept
779  { return _M_impl.load(memory_order_seq_cst); }
780 
781  void
782  store(weak_ptr<_Tp> __desired,
783  memory_order __o = memory_order_seq_cst) noexcept
784  { _M_impl.swap(__desired, __o); }
785 
786  void
787  operator=(weak_ptr<_Tp> __desired) noexcept
788  { _M_impl.swap(__desired, memory_order_seq_cst); }
789 
790  weak_ptr<_Tp>
791  exchange(weak_ptr<_Tp> __desired,
792  memory_order __o = memory_order_seq_cst) noexcept
793  {
794  _M_impl.swap(__desired, __o);
795  return __desired;
796  }
797 
798  bool
799  compare_exchange_strong(weak_ptr<_Tp>& __expected,
800  weak_ptr<_Tp> __desired,
801  memory_order __o, memory_order __o2) noexcept
802  {
803  return _M_impl.compare_exchange_strong(__expected, __desired, __o, __o2);
804  }
805 
806  bool
807  compare_exchange_strong(value_type& __expected, value_type __desired,
808  memory_order __o = memory_order_seq_cst) noexcept
809  {
810  memory_order __o2;
811  switch (__o)
812  {
813  case memory_order_acq_rel:
814  __o2 = memory_order_acquire;
815  break;
816  case memory_order_release:
817  __o2 = memory_order_relaxed;
818  break;
819  default:
820  __o2 = __o;
821  }
822  return compare_exchange_strong(__expected, std::move(__desired),
823  __o, __o2);
824  }
825 
826  bool
827  compare_exchange_weak(value_type& __expected, value_type __desired,
828  memory_order __o, memory_order __o2) noexcept
829  {
830  return compare_exchange_strong(__expected, std::move(__desired),
831  __o, __o2);
832  }
833 
834  bool
835  compare_exchange_weak(value_type& __expected, value_type __desired,
836  memory_order __o = memory_order_seq_cst) noexcept
837  {
838  return compare_exchange_strong(__expected, std::move(__desired), __o);
839  }
840 
841 #if __glibcxx_atomic_wait
842  void
843  wait(value_type __old,
844  memory_order __o = memory_order_seq_cst) const noexcept
845  {
846  _M_impl.wait(std::move(__old), __o);
847  }
848 
849  void
850  notify_one() noexcept
851  {
852  _M_impl.notify_one();
853  }
854 
855  void
856  notify_all() noexcept
857  {
858  _M_impl.notify_all();
859  }
860 #endif
861 
862  private:
863  _Sp_atomic<weak_ptr<_Tp>> _M_impl;
864  };
865  /// @} group pointer_abstractions
866 #endif // C++20
867 
868 _GLIBCXX_END_NAMESPACE_VERSION
869 } // namespace
870 
871 #endif // _SHARED_PTR_ATOMIC_H
constexpr std::remove_reference< _Tp >::type && move(_Tp &&__t) noexcept
Convert a value to an rvalue.
Definition: move.h:138
memory_order
Enumeration for memory_order.
Definition: atomic_base.h:66
void lock(_L1 &__l1, _L2 &__l2, _L3 &... __l3)
Generic lock.
Definition: mutex:700
ISO C++ entities toplevel namespace is std.
A smart pointer with reference-counted copy semantics.
Primary template owner_less.