libstdc++
generator
Go to the documentation of this file.
1 // <generator> -*- C++ -*-
2 
3 // Copyright (C) 2023-2025 Free Software Foundation, Inc.
4 //
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
9 // any later version.
10 
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
15 
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
19 
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
24 
25 /** @file include/generator
26  * This is a Standard C++ Library header.
27  */
28 
29 #ifndef _GLIBCXX_GENERATOR
30 #define _GLIBCXX_GENERATOR
31 
32 #include <ranges>
33 #ifdef _GLIBCXX_SYSHDR
34 #pragma GCC system_header
35 #endif
36 
37 #include <bits/c++config.h>
38 
39 #define __glibcxx_want_generator
40 #include <bits/version.h>
41 
42 #ifdef __cpp_lib_generator // C++ >= 23 && __glibcxx_coroutine
43 #include <new>
44 #include <bits/move.h>
45 #include <bits/ranges_util.h>
46 #include <bits/elements_of.h>
47 #include <bits/uses_allocator.h>
48 #include <bits/exception_ptr.h>
49 #include <cstddef>
50 #include <cstdint>
51 #include <cstring>
52 #include <coroutine>
53 
54 #include <type_traits>
55 #include <variant>
56 #include <concepts>
57 
58 #if _GLIBCXX_HOSTED
59 # include <bits/memory_resource.h>
60 #endif // HOSTED
61 
62 namespace std _GLIBCXX_VISIBILITY(default)
63 {
64 _GLIBCXX_BEGIN_NAMESPACE_VERSION
65 
66  /**
67  * @defgroup generator_coros Range generator coroutines
68  * @addtogroup ranges
69  * @since C++23
70  * @{
71  */
72 
73  /** @brief A range specified using a yielding coroutine.
74  *
75  * `std::generator` is a utility class for defining ranges using coroutines
76  * that yield elements as a range. Generator coroutines are synchronous.
77  *
78  * @headerfile generator
79  * @since C++23
80  */
81  template<typename _Ref, typename _Val = void, typename _Alloc = void>
82  class generator;
83 
84  /// @cond undocumented
85  namespace __gen
86  {
87  /// _Reference type for a generator whose reference (first argument) and
88  /// value (second argument) types are _Ref and _Val.
89  template<typename _Ref, typename _Val>
90  using _Reference_t = __conditional_t<is_void_v<_Val>,
91  _Ref&&, _Ref>;
92 
93  /// Type yielded by a generator whose _Reference type is _Reference.
94  template<typename _Reference>
95  using _Yield_t = __conditional_t<is_reference_v<_Reference>,
96  _Reference,
97  const _Reference&>;
98 
99  /// _Yield_t * _Reference_t
100  template<typename _Ref, typename _Val>
101  using _Yield2_t = _Yield_t<_Reference_t<_Ref, _Val>>;
102 
103  template<typename> constexpr bool __is_generator = false;
104  template<typename _Val, typename _Ref, typename _Alloc>
105  constexpr bool __is_generator<std::generator<_Val, _Ref, _Alloc>> = true;
106 
107  /// Allocator and value type erased generator promise type.
108  /// \tparam _Yielded The corresponding generators yielded type.
109  template<typename _Yielded>
110  class _Promise_erased
111  {
112  static_assert(is_reference_v<_Yielded>);
113  using _Yielded_deref = remove_reference_t<_Yielded>;
114  using _Yielded_decvref = remove_cvref_t<_Yielded>;
115  using _ValuePtr = add_pointer_t<_Yielded>;
116  using _Coro_handle = std::coroutine_handle<_Promise_erased>;
117 
118  template<typename, typename, typename>
119  friend class std::generator;
120 
121  template<typename _Gen>
122  struct _Recursive_awaiter;
123  template<typename>
124  friend struct _Recursive_awaiter;
125  struct _Copy_awaiter;
126  struct _Subyield_state;
127  struct _Final_awaiter;
128  public:
129  suspend_always
130  initial_suspend() const noexcept
131  { return {}; }
132 
133  suspend_always
134  yield_value(_Yielded __val) noexcept
135  {
136  _M_bottom_value() = ::std::addressof(__val);
137  return {};
138  }
139 
140  auto
141  yield_value(const _Yielded_deref& __val)
142  noexcept (is_nothrow_constructible_v<_Yielded_decvref,
143  const _Yielded_deref&>)
144  requires (is_rvalue_reference_v<_Yielded>
145  && constructible_from<_Yielded_decvref,
146  const _Yielded_deref&>)
147  { return _Copy_awaiter(_Yielded_decvref(__val), _M_bottom_value()); }
148 
149  template<typename _R2, typename _V2, typename _A2, typename _U2>
150  requires std::same_as<_Yield2_t<_R2, _V2>, _Yielded>
151  auto
152  yield_value(ranges::elements_of<generator<_R2, _V2, _A2>&&, _U2> __r)
153  noexcept
154  { return _Recursive_awaiter { std::move(__r.range) }; }
155 
156  template<ranges::input_range _R, typename _Alloc>
157  requires convertible_to<ranges::range_reference_t<_R>, _Yielded>
158  auto
159  yield_value(ranges::elements_of<_R, _Alloc> __r)
160  {
161  auto __n = [] (allocator_arg_t, _Alloc,
162  ranges::iterator_t<_R> __i,
163  ranges::sentinel_t<_R> __s)
164  -> generator<_Yielded, ranges::range_value_t<_R>, _Alloc> {
165  for (; __i != __s; ++__i)
166  co_yield static_cast<_Yielded>(*__i);
167  };
168  return yield_value(ranges::elements_of(__n(allocator_arg,
169  __r.allocator,
170  ranges::begin(__r.range),
171  ranges::end(__r.range))));
172  }
173 
174 
175  _Final_awaiter
176  final_suspend() noexcept
177  { return {}; }
178 
179  void
180  unhandled_exception()
181  {
182  // To get to this point, this coroutine must have been active. In that
183  // case, it must be the top of the stack. The current coroutine is
184  // the sole entry of the stack iff it is both the top and the bottom. As
185  // it is the top implicitly in this context it will be the sole entry iff
186  // it is the bottom.
187  if (_M_nest._M_is_bottom())
188  throw;
189  else
190  this->_M_except = std::current_exception();
191  }
192 
193  void await_transform() = delete;
194  void return_void() const noexcept {}
195 
196  private:
197  _ValuePtr&
198  _M_bottom_value() noexcept
199  { return _M_nest._M_bottom_value(*this); }
200 
201  _ValuePtr&
202  _M_value() noexcept
203  { return _M_nest._M_value(*this); }
204 
205  _Subyield_state _M_nest;
206  std::exception_ptr _M_except;
207  };
208 
209  template<typename _Yielded>
210  struct _Promise_erased<_Yielded>::_Subyield_state
211  {
212  struct _Frame
213  {
214  _Coro_handle _M_bottom;
215  _Coro_handle _M_parent;
216  };
217 
218  struct _Bottom_frame
219  {
220  _Coro_handle _M_top;
221  _ValuePtr _M_value = nullptr;
222  };
223 
224  std::variant<
225  _Bottom_frame,
226  _Frame
227  > _M_stack;
228 
229  bool
230  _M_is_bottom() const noexcept
231  { return !std::holds_alternative<_Frame>(this->_M_stack); }
232 
233  _Coro_handle&
234  _M_top() noexcept
235  {
236  if (auto __f = std::get_if<_Frame>(&this->_M_stack))
237  return __f->_M_bottom.promise()._M_nest._M_top();
238 
239  auto __bf = std::get_if<_Bottom_frame>(&this->_M_stack);
240  __glibcxx_assert(__bf);
241  return __bf->_M_top;
242  }
243 
244  void
245  _M_push(_Coro_handle __current, _Coro_handle __subyield) noexcept
246  {
247  __glibcxx_assert(&__current.promise()._M_nest == this);
248  __glibcxx_assert(this->_M_top() == __current);
249 
250  __subyield.promise()._M_nest._M_jump_in(__current, __subyield);
251  }
252 
253  std::coroutine_handle<>
254  _M_pop() noexcept
255  {
256  if (auto __f = std::get_if<_Frame>(&this->_M_stack))
257  {
258  // We aren't a bottom coroutine. Restore the parent to the top
259  // and resume.
260  auto __p = this->_M_top() = __f->_M_parent;
261  return __p;
262  }
263  else
264  // Otherwise, there's nothing to resume.
265  return std::noop_coroutine();
266  }
267 
268  void
269  _M_jump_in(_Coro_handle __rest, _Coro_handle __new) noexcept
270  {
271  __glibcxx_assert(&__new.promise()._M_nest == this);
272  __glibcxx_assert(this->_M_is_bottom());
273  // We're bottom. We're also top if top is unset (note that this is
274  // not true if something was added to the coro stack and then popped,
275  // but in that case we can't possibly be yielded from, as it would
276  // require rerunning begin()).
277  __glibcxx_assert(!this->_M_top());
278 
279  auto& __rn = __rest.promise()._M_nest;
280  __rn._M_top() = __new;
281 
282  // Presume we're the second frame...
283  auto __bott = __rest;
284  if (auto __f = std::get_if<_Frame>(&__rn._M_stack))
285  // But, if we aren't, get the actual bottom. We're only the second
286  // frame if our parent is the bottom frame, i.e. it doesn't have a
287  // _Frame member.
288  __bott = __f->_M_bottom;
289 
290  this->_M_stack = _Frame {
291  ._M_bottom = __bott,
292  ._M_parent = __rest
293  };
294  }
295 
296  _ValuePtr&
297  _M_bottom_value(_Promise_erased& __current) noexcept
298  {
299  __glibcxx_assert(&__current._M_nest == this);
300  if (auto __bf = std::get_if<_Bottom_frame>(&this->_M_stack))
301  return __bf->_M_value;
302  auto __f = std::get_if<_Frame>(&this->_M_stack);
303  __glibcxx_assert(__f);
304  auto& __p = __f->_M_bottom.promise();
305  return __p._M_nest._M_value(__p);
306  }
307 
308  _ValuePtr&
309  _M_value(_Promise_erased& __current) noexcept
310  {
311  __glibcxx_assert(&__current._M_nest == this);
312  auto __bf = std::get_if<_Bottom_frame>(&this->_M_stack);
313  __glibcxx_assert(__bf);
314  return __bf->_M_value;
315  }
316  };
317 
318  template<typename _Yielded>
319  struct _Promise_erased<_Yielded>::_Final_awaiter
320  {
321  bool await_ready() noexcept
322  { return false; }
323 
324  template<typename _Promise>
325  auto await_suspend(std::coroutine_handle<_Promise> __c) noexcept
326  {
327 #ifdef __glibcxx_is_pointer_interconvertible
328  static_assert(is_pointer_interconvertible_base_of_v<
329  _Promise_erased, _Promise>);
330 #endif
331 
332  auto& __n = __c.promise()._M_nest;
333  return __n._M_pop();
334  }
335 
336  void await_resume() noexcept {}
337  };
338 
339  template<typename _Yielded>
340  struct _Promise_erased<_Yielded>::_Copy_awaiter
341  {
342  _Yielded_decvref _M_value;
343  _ValuePtr& _M_bottom_value;
344 
345  constexpr bool await_ready() noexcept
346  { return false; }
347 
348  template<typename _Promise>
349  void await_suspend(std::coroutine_handle<_Promise>) noexcept
350  {
351 #ifdef __glibcxx_is_pointer_interconvertible
352  static_assert(is_pointer_interconvertible_base_of_v<
353  _Promise_erased, _Promise>);
354 #endif
355  _M_bottom_value = ::std::addressof(_M_value);
356  }
357 
358  constexpr void
359  await_resume() const noexcept
360  {}
361  };
362 
363  template<typename _Yielded>
364  template<typename _Gen>
365  struct _Promise_erased<_Yielded>::_Recursive_awaiter
366  {
367  _Gen _M_gen;
368  static_assert(__is_generator<_Gen>);
369  static_assert(std::same_as<typename _Gen::yielded, _Yielded>);
370 
371  _Recursive_awaiter(_Gen __gen) noexcept
372  : _M_gen(std::move(__gen))
373  { this->_M_gen._M_mark_as_started(); }
374 
375  constexpr bool
376  await_ready() const noexcept
377  { return false; }
378 
379 
380  template<typename _Promise>
381  std::coroutine_handle<>
382  await_suspend(std::coroutine_handle<_Promise> __p) noexcept
383  {
384 #ifdef __glibcxx_is_pointer_interconvertible
385  static_assert(is_pointer_interconvertible_base_of_v<
386  _Promise_erased, _Promise>);
387 #endif
388 
389  auto __c = _Coro_handle::from_address(__p.address());
390  auto __t = _Coro_handle::from_address(this->_M_gen._M_coro.address());
391  __p.promise()._M_nest._M_push(__c, __t);
392  return __t;
393  }
394 
395  void await_resume()
396  {
397  if (auto __e = _M_gen._M_coro.promise()._M_except)
398  std::rethrow_exception(__e);
399  }
400  };
401 
402  struct _Alloc_block
403  {
404  alignas(__STDCPP_DEFAULT_NEW_ALIGNMENT__)
405  char _M_data[__STDCPP_DEFAULT_NEW_ALIGNMENT__];
406 
407  static auto
408  _M_cnt(std::size_t __sz) noexcept
409  {
410  auto __blksz = sizeof(_Alloc_block);
411  return (__sz + __blksz - 1) / __blksz;
412  }
413  };
414 
415  template<typename _All>
416  concept _Stateless_alloc = (allocator_traits<_All>::is_always_equal::value
417  && default_initializable<_All>);
418 
419  template<typename _Allocator>
420  class _Promise_alloc
421  {
422  using _Rebound = __alloc_rebind<_Allocator, _Alloc_block>;
423  using _Rebound_ATr = allocator_traits<_Rebound>;
424  static_assert(is_pointer_v<typename _Rebound_ATr::pointer>,
425  "Must use allocators for true pointers with generators");
426 
427  static auto
428  _M_alloc_address(std::uintptr_t __fn, std::uintptr_t __fsz) noexcept
429  {
430  auto __an = __fn + __fsz;
431  auto __ba = alignof(_Rebound);
432  return reinterpret_cast<_Rebound*>(((__an + __ba - 1) / __ba) * __ba);
433  }
434 
435  static auto
436  _M_alloc_size(std::size_t __csz) noexcept
437  {
438  auto __ba = alignof(_Rebound);
439  // Our desired layout is placing the coroutine frame, then pad out to
440  // align, then place the allocator. The total size of that is the
441  // size of the coroutine frame, plus up to __ba bytes, plus the size
442  // of the allocator.
443  return __csz + __ba + sizeof(_Rebound);
444  }
445 
446  static void*
447  _M_allocate(_Rebound __b, std::size_t __csz)
448  {
449  if constexpr (_Stateless_alloc<_Rebound>)
450  // Only need room for the coroutine.
451  return __b.allocate(_Alloc_block::_M_cnt(__csz));
452  else
453  {
454  auto __nsz = _Alloc_block::_M_cnt(_M_alloc_size(__csz));
455  auto __f = __b.allocate(__nsz);
456  auto __fn = reinterpret_cast<std::uintptr_t>(__f);
457  auto __an = _M_alloc_address(__fn, __csz);
458  ::new (__an) _Rebound(std::move(__b));
459  return __f;
460  }
461  }
462 
463  public:
464  void*
465  operator new(std::size_t __sz)
466  requires default_initializable<_Rebound> // _Allocator is non-void
467  { return _M_allocate({}, __sz); }
468 
469  // _GLIBCXX_RESOLVE_LIB_DEFECTS
470  // 3900. The allocator_arg_t overloads of promise_type::operator new
471  // should not be constrained
472  template<typename _Alloc, typename... _Args>
473  void*
474  operator new(std::size_t __sz,
475  allocator_arg_t, const _Alloc& __a,
476  const _Args&...)
477  {
478  static_assert(convertible_to<const _Alloc&, _Allocator>,
479  "the allocator argument to the coroutine must be "
480  "convertible to the generator's allocator type");
481  return _M_allocate(_Rebound(_Allocator(__a)), __sz);
482  }
483 
484  template<typename _This, typename _Alloc, typename... _Args>
485  void*
486  operator new(std::size_t __sz,
487  const _This&,
488  allocator_arg_t, const _Alloc& __a,
489  const _Args&...)
490  {
491  static_assert(convertible_to<const _Alloc&, _Allocator>,
492  "the allocator argument to the coroutine must be "
493  "convertible to the generator's allocator type");
494  return _M_allocate(_Rebound(_Allocator(__a)), __sz);
495  }
496 
497  void
498  operator delete(void* __ptr, std::size_t __csz) noexcept
499  {
500  if constexpr (_Stateless_alloc<_Rebound>)
501  {
502  _Rebound __b;
503  return __b.deallocate(reinterpret_cast<_Alloc_block*>(__ptr),
504  _Alloc_block::_M_cnt(__csz));
505  }
506  else
507  {
508  auto __nsz = _Alloc_block::_M_cnt(_M_alloc_size(__csz));
509  auto __fn = reinterpret_cast<std::uintptr_t>(__ptr);
510  auto __an = _M_alloc_address(__fn, __csz);
511  _Rebound __b(std::move(*__an));
512  __an->~_Rebound();
513  __b.deallocate(reinterpret_cast<_Alloc_block*>(__ptr), __nsz);
514  }
515  }
516  };
517 
518  template<>
519  class _Promise_alloc<void>
520  {
521  using _Dealloc_fn = void (*)(void*, std::size_t);
522 
523  static auto
524  _M_dealloc_address(std::uintptr_t __fn, std::uintptr_t __fsz) noexcept
525  {
526  auto __an = __fn + __fsz;
527  auto __ba = alignof(_Dealloc_fn);
528  auto __aligned = ((__an + __ba - 1) / __ba) * __ba;
529  return reinterpret_cast<_Dealloc_fn*>(__aligned);
530  }
531 
532  template<typename _Rebound>
533  static auto
534  _M_alloc_address(std::uintptr_t __fn, std::uintptr_t __fsz) noexcept
535  requires (!_Stateless_alloc<_Rebound>)
536  {
537  auto __ba = alignof(_Rebound);
538  auto __da = _M_dealloc_address(__fn, __fsz);
539  auto __aan = reinterpret_cast<std::uintptr_t>(__da);
540  __aan += sizeof(_Dealloc_fn);
541  auto __aligned = ((__aan + __ba - 1) / __ba) * __ba;
542  return reinterpret_cast<_Rebound*>(__aligned);
543  }
544 
545  template<typename _Rebound>
546  static auto
547  _M_alloc_size(std::size_t __csz) noexcept
548  {
549  // This time, we want the coroutine frame, then the deallocator
550  // pointer, then the allocator itself, if any.
551  std::size_t __aa = 0;
552  std::size_t __as = 0;
553  if constexpr (!std::same_as<_Rebound, void>)
554  {
555  __aa = alignof(_Rebound);
556  __as = sizeof(_Rebound);
557  }
558  auto __ba = __aa + alignof(_Dealloc_fn);
559  return __csz + __ba + __as + sizeof(_Dealloc_fn);
560  }
561 
562  template<typename _Rebound>
563  static void
564  _M_deallocator(void* __ptr, std::size_t __csz) noexcept
565  {
566  auto __asz = _M_alloc_size<_Rebound>(__csz);
567  auto __nblk = _Alloc_block::_M_cnt(__asz);
568 
569  if constexpr (_Stateless_alloc<_Rebound>)
570  {
571  _Rebound __b;
572  __b.deallocate(reinterpret_cast<_Alloc_block*>(__ptr), __nblk);
573  }
574  else
575  {
576  auto __fn = reinterpret_cast<std::uintptr_t>(__ptr);
577  auto __an = _M_alloc_address<_Rebound>(__fn, __csz);
578  _Rebound __b(std::move(*__an));
579  __an->~_Rebound();
580  __b.deallocate(reinterpret_cast<_Alloc_block*>(__ptr), __nblk);
581  }
582  }
583 
584  template<typename _Alloc>
585  static void*
586  _M_allocate(const _Alloc& __a, std::size_t __csz)
587  {
588  using _Rebound = __alloc_rebind<_Alloc, _Alloc_block>;
589  using _Rebound_ATr = allocator_traits<_Rebound>;
590 
591  static_assert(is_pointer_v<typename _Rebound_ATr::pointer>,
592  "Must use allocators for true pointers with generators");
593 
594  _Dealloc_fn __d = &_M_deallocator<_Rebound>;
595  auto __b = static_cast<_Rebound>(__a);
596  auto __asz = _M_alloc_size<_Rebound>(__csz);
597  auto __nblk = _Alloc_block::_M_cnt(__asz);
598  void* __p = __b.allocate(__nblk);
599  auto __pn = reinterpret_cast<std::uintptr_t>(__p);
600  *_M_dealloc_address(__pn, __csz) = __d;
601  if constexpr (!_Stateless_alloc<_Rebound>)
602  {
603  auto __an = _M_alloc_address<_Rebound>(__pn, __csz);
604  ::new (__an) _Rebound(std::move(__b));
605  }
606  return __p;
607  }
608  public:
609  void*
610  operator new(std::size_t __sz)
611  {
612  auto __nsz = _M_alloc_size<void>(__sz);
613  _Dealloc_fn __d = [] (void* __ptr, std::size_t __sz)
614  {
615  ::operator delete(__ptr, _M_alloc_size<void>(__sz));
616  };
617  auto __p = ::operator new(__nsz);
618  auto __pn = reinterpret_cast<uintptr_t>(__p);
619  *_M_dealloc_address(__pn, __sz) = __d;
620  return __p;
621  }
622 
623  template<typename _Alloc, typename... _Args>
624  void*
625  operator new(std::size_t __sz,
626  allocator_arg_t, const _Alloc& __a,
627  const _Args&...)
628  { return _M_allocate(__a, __sz); }
629 
630  template<typename _This, typename _Alloc, typename... _Args>
631  void*
632  operator new(std::size_t __sz,
633  const _This&,
634  allocator_arg_t, const _Alloc& __a,
635  const _Args&...)
636  { return _M_allocate(__a, __sz); }
637 
638  void
639  operator delete(void* __ptr, std::size_t __sz) noexcept
640  {
641  _Dealloc_fn __d;
642  auto __pn = reinterpret_cast<uintptr_t>(__ptr);
643  __d = *_M_dealloc_address(__pn, __sz);
644  __d(__ptr, __sz);
645  }
646  };
647 
648  template<typename _Tp>
649  concept _Cv_unqualified_object = is_object_v<_Tp>
650  && same_as<_Tp, remove_cv_t<_Tp>>;
651  } // namespace __gen
652  /// @endcond
653 
654  template<typename _Ref, typename _Val, typename _Alloc>
655  class generator
656  : public ranges::view_interface<generator<_Ref, _Val, _Alloc>>
657  {
658  using _Value = __conditional_t<is_void_v<_Val>,
659  remove_cvref_t<_Ref>,
660  _Val>;
661  static_assert(__gen::_Cv_unqualified_object<_Value>,
662  "Generator value must be a cv-unqualified object type");
663  using _Reference = __gen::_Reference_t<_Ref, _Val>;
664  static_assert(is_reference_v<_Reference>
665  || (__gen::_Cv_unqualified_object<_Reference>
666  && copy_constructible<_Reference>),
667  "Generator reference type must be either a cv-unqualified "
668  "object type that is trivially constructible or a "
669  "reference type");
670 
671  using _RRef = __conditional_t<
672  is_reference_v<_Reference>,
673  remove_reference_t<_Reference>&&,
674  _Reference>;
675 
676  /* Required to model indirectly_readable, and input_iterator. */
677  static_assert(common_reference_with<_Reference&&, _Value&&>);
678  static_assert(common_reference_with<_Reference&&, _RRef&&>);
679  static_assert(common_reference_with<_RRef&&, const _Value&>);
680 
681  using _Yielded = __gen::_Yield_t<_Reference>;
682  using _Erased_promise = __gen::_Promise_erased<_Yielded>;
683 
684  struct _Iterator;
685 
686  friend _Erased_promise;
687  friend struct _Erased_promise::_Subyield_state;
688  public:
689  using yielded = _Yielded;
690 
691  struct promise_type : _Erased_promise, __gen::_Promise_alloc<_Alloc>
692  {
693  generator get_return_object() noexcept
694  { return { coroutine_handle<promise_type>::from_promise(*this) }; }
695  };
696 
697 #ifdef __glibcxx_is_pointer_interconvertible
698  static_assert(is_pointer_interconvertible_base_of_v<_Erased_promise,
699  promise_type>);
700 #endif
701 
702  generator(const generator&) = delete;
703 
704  generator(generator&& __other) noexcept
705  : _M_coro(std::__exchange(__other._M_coro, nullptr)),
706  _M_began(std::__exchange(__other._M_began, false))
707  {}
708 
709  ~generator()
710  {
711  if (auto& __c = this->_M_coro)
712  __c.destroy();
713  }
714 
715  generator&
716  operator=(generator __other) noexcept
717  {
718  swap(__other._M_coro, this->_M_coro);
719  swap(__other._M_began, this->_M_began);
720  return *this;
721  }
722 
723  _Iterator
724  begin()
725  {
726  this->_M_mark_as_started();
727  auto __h = _Coro_handle::from_promise(_M_coro.promise());
728  __h.promise()._M_nest._M_top() = __h;
729  return { __h };
730  }
731 
732  default_sentinel_t
733  end() const noexcept
734  { return default_sentinel; }
735 
736  private:
737  using _Coro_handle = std::coroutine_handle<_Erased_promise>;
738 
739  generator(coroutine_handle<promise_type> __coro) noexcept
740  : _M_coro { move(__coro) }
741  {}
742 
743  void
744  _M_mark_as_started() noexcept
745  {
746  __glibcxx_assert(!this->_M_began);
747  this->_M_began = true;
748  }
749 
750  coroutine_handle<promise_type> _M_coro;
751  bool _M_began = false;
752  };
753 
754  template<class _Ref, class _Val, class _Alloc>
755  struct generator<_Ref, _Val, _Alloc>::_Iterator
756  {
757  using value_type = _Value;
758  using difference_type = ptrdiff_t;
759 
760  friend bool
761  operator==(const _Iterator& __i, default_sentinel_t) noexcept
762  { return __i._M_coro.done(); }
763 
764  friend class generator;
765 
766  _Iterator(_Iterator&& __o) noexcept
767  : _M_coro(std::__exchange(__o._M_coro, {}))
768  {}
769 
770  _Iterator&
771  operator=(_Iterator&& __o) noexcept
772  {
773  this->_M_coro = std::__exchange(__o._M_coro, {});
774  return *this;
775  }
776 
777  _Iterator&
778  operator++()
779  {
780  _M_next();
781  return *this;
782  }
783 
784  void
785  operator++(int)
786  { this->operator++(); }
787 
788  _Reference
789  operator*()
790  const noexcept(is_nothrow_move_constructible_v<_Reference>)
791  {
792  auto& __p = this->_M_coro.promise();
793  return static_cast<_Reference>(*__p._M_value());
794  }
795 
796  private:
797  friend class generator;
798 
799  _Iterator(_Coro_handle __g)
800  : _M_coro { __g }
801  { this->_M_next(); }
802 
803  void _M_next()
804  {
805  auto& __t = this->_M_coro.promise()._M_nest._M_top();
806  __t.resume();
807  }
808 
809  _Coro_handle _M_coro;
810  };
811 
812  /// @}
813 
814 #if _GLIBCXX_HOSTED
815  namespace pmr {
816  template<typename _Ref, typename _Val = void>
817  using generator = std::generator<_Ref, _Val, polymorphic_allocator<std::byte>>;
818  }
819 #endif // HOSTED
820 
821 _GLIBCXX_END_NAMESPACE_VERSION
822 } // namespace std
823 #endif // __cpp_lib_generator
824 
825 #endif // _GLIBCXX_GENERATOR