1import copy
2import gc
3import pickle
4import sys
5import doctest
6import unittest
7import weakref
8import inspect
9
10from test import support
11
12try:
13    import _testcapi
14except ImportError:
15    _testcapi = None
16
17
18# This tests to make sure that if a SIGINT arrives just before we send into a
19# yield from chain, the KeyboardInterrupt is raised in the innermost
20# generator (see bpo-30039).
21@unittest.skipUnless(_testcapi is not None and
22                     hasattr(_testcapi, "raise_SIGINT_then_send_None"),
23                     "needs _testcapi.raise_SIGINT_then_send_None")
24class SignalAndYieldFromTest(unittest.TestCase):
25
26    def generator1(self):
27        return (yield from self.generator2())
28
29    def generator2(self):
30        try:
31            yield
32        except KeyboardInterrupt:
33            return "PASSED"
34        else:
35            return "FAILED"
36
37    def test_raise_and_yield_from(self):
38        gen = self.generator1()
39        gen.send(None)
40        try:
41            _testcapi.raise_SIGINT_then_send_None(gen)
42        except BaseException as _exc:
43            exc = _exc
44        self.assertIs(type(exc), StopIteration)
45        self.assertEqual(exc.value, "PASSED")
46
47
48class FinalizationTest(unittest.TestCase):
49
50    def test_frame_resurrect(self):
51        # A generator frame can be resurrected by a generator's finalization.
52        def gen():
53            nonlocal frame
54            try:
55                yield
56            finally:
57                frame = sys._getframe()
58
59        g = gen()
60        wr = weakref.ref(g)
61        next(g)
62        del g
63        support.gc_collect()
64        self.assertIs(wr(), None)
65        self.assertTrue(frame)
66        del frame
67        support.gc_collect()
68
69    def test_refcycle(self):
70        # A generator caught in a refcycle gets finalized anyway.
71        old_garbage = gc.garbage[:]
72        finalized = False
73        def gen():
74            nonlocal finalized
75            try:
76                g = yield
77                yield 1
78            finally:
79                finalized = True
80
81        g = gen()
82        next(g)
83        g.send(g)
84        self.assertGreater(sys.getrefcount(g), 2)
85        self.assertFalse(finalized)
86        del g
87        support.gc_collect()
88        self.assertTrue(finalized)
89        self.assertEqual(gc.garbage, old_garbage)
90
91    def test_lambda_generator(self):
92        # Issue #23192: Test that a lambda returning a generator behaves
93        # like the equivalent function
94        f = lambda: (yield 1)
95        def g(): return (yield 1)
96
97        # test 'yield from'
98        f2 = lambda: (yield from g())
99        def g2(): return (yield from g())
100
101        f3 = lambda: (yield from f())
102        def g3(): return (yield from f())
103
104        for gen_fun in (f, g, f2, g2, f3, g3):
105            gen = gen_fun()
106            self.assertEqual(next(gen), 1)
107            with self.assertRaises(StopIteration) as cm:
108                gen.send(2)
109            self.assertEqual(cm.exception.value, 2)
110
111
112class GeneratorTest(unittest.TestCase):
113
114    def test_name(self):
115        def func():
116            yield 1
117
118        # check generator names
119        gen = func()
120        self.assertEqual(gen.__name__, "func")
121        self.assertEqual(gen.__qualname__,
122                         "GeneratorTest.test_name.<locals>.func")
123
124        # modify generator names
125        gen.__name__ = "name"
126        gen.__qualname__ = "qualname"
127        self.assertEqual(gen.__name__, "name")
128        self.assertEqual(gen.__qualname__, "qualname")
129
130        # generator names must be a string and cannot be deleted
131        self.assertRaises(TypeError, setattr, gen, '__name__', 123)
132        self.assertRaises(TypeError, setattr, gen, '__qualname__', 123)
133        self.assertRaises(TypeError, delattr, gen, '__name__')
134        self.assertRaises(TypeError, delattr, gen, '__qualname__')
135
136        # modify names of the function creating the generator
137        func.__qualname__ = "func_qualname"
138        func.__name__ = "func_name"
139        gen = func()
140        self.assertEqual(gen.__name__, "func_name")
141        self.assertEqual(gen.__qualname__, "func_qualname")
142
143        # unnamed generator
144        gen = (x for x in range(10))
145        self.assertEqual(gen.__name__,
146                         "<genexpr>")
147        self.assertEqual(gen.__qualname__,
148                         "GeneratorTest.test_name.<locals>.<genexpr>")
149
150    def test_copy(self):
151        def f():
152            yield 1
153        g = f()
154        with self.assertRaises(TypeError):
155            copy.copy(g)
156
157    def test_pickle(self):
158        def f():
159            yield 1
160        g = f()
161        for proto in range(pickle.HIGHEST_PROTOCOL + 1):
162            with self.assertRaises((TypeError, pickle.PicklingError)):
163                pickle.dumps(g, proto)
164
165    def test_send_non_none_to_new_gen(self):
166        def f():
167            yield 1
168        g = f()
169        with self.assertRaises(TypeError):
170            g.send(0)
171        self.assertEqual(next(g), 1)
172
173    def test_handle_frame_object_in_creation(self):
174
175        #Attempt to expose partially constructed frames
176        #See https://github.com/python/cpython/issues/94262
177
178        def cb(*args):
179            inspect.stack()
180
181        def gen():
182            yield 1
183
184        thresholds = gc.get_threshold()
185
186        gc.callbacks.append(cb)
187        gc.set_threshold(1, 0, 0)
188        try:
189            gen()
190        finally:
191            gc.set_threshold(*thresholds)
192            gc.callbacks.pop()
193
194        class Sneaky:
195            def __del__(self):
196                inspect.stack()
197
198        sneaky = Sneaky()
199        sneaky._s = Sneaky()
200        sneaky._s._s = sneaky
201
202        gc.set_threshold(1, 0, 0)
203        try:
204            del sneaky
205            gen()
206        finally:
207            gc.set_threshold(*thresholds)
208
209    def test_ag_frame_f_back(self):
210        async def f():
211            yield
212        ag = f()
213        self.assertIsNone(ag.ag_frame.f_back)
214
215    def test_cr_frame_f_back(self):
216        async def f():
217            pass
218        cr = f()
219        self.assertIsNone(cr.cr_frame.f_back)
220        cr.close()  # Suppress RuntimeWarning.
221
222    def test_gi_frame_f_back(self):
223        def f():
224            yield
225        gi = f()
226        self.assertIsNone(gi.gi_frame.f_back)
227
228
229
230class ExceptionTest(unittest.TestCase):
231    # Tests for the issue #23353: check that the currently handled exception
232    # is correctly saved/restored in PyEval_EvalFrameEx().
233
234    def test_except_throw(self):
235        def store_raise_exc_generator():
236            try:
237                self.assertEqual(sys.exc_info()[0], None)
238                yield
239            except Exception as exc:
240                # exception raised by gen.throw(exc)
241                self.assertEqual(sys.exc_info()[0], ValueError)
242                self.assertIsNone(exc.__context__)
243                yield
244
245                # ensure that the exception is not lost
246                self.assertEqual(sys.exc_info()[0], ValueError)
247                yield
248
249                # we should be able to raise back the ValueError
250                raise
251
252        make = store_raise_exc_generator()
253        next(make)
254
255        try:
256            raise ValueError()
257        except Exception as exc:
258            try:
259                make.throw(exc)
260            except Exception:
261                pass
262
263        next(make)
264        with self.assertRaises(ValueError) as cm:
265            next(make)
266        self.assertIsNone(cm.exception.__context__)
267
268        self.assertEqual(sys.exc_info(), (None, None, None))
269
270    def test_except_next(self):
271        def gen():
272            self.assertEqual(sys.exc_info()[0], ValueError)
273            yield "done"
274
275        g = gen()
276        try:
277            raise ValueError
278        except Exception:
279            self.assertEqual(next(g), "done")
280        self.assertEqual(sys.exc_info(), (None, None, None))
281
282    def test_except_gen_except(self):
283        def gen():
284            try:
285                self.assertEqual(sys.exc_info()[0], None)
286                yield
287                # we are called from "except ValueError:", TypeError must
288                # inherit ValueError in its context
289                raise TypeError()
290            except TypeError as exc:
291                self.assertEqual(sys.exc_info()[0], TypeError)
292                self.assertEqual(type(exc.__context__), ValueError)
293            # here we are still called from the "except ValueError:"
294            self.assertEqual(sys.exc_info()[0], ValueError)
295            yield
296            self.assertIsNone(sys.exc_info()[0])
297            yield "done"
298
299        g = gen()
300        next(g)
301        try:
302            raise ValueError
303        except Exception:
304            next(g)
305
306        self.assertEqual(next(g), "done")
307        self.assertEqual(sys.exc_info(), (None, None, None))
308
309    def test_except_throw_exception_context(self):
310        def gen():
311            try:
312                try:
313                    self.assertEqual(sys.exc_info()[0], None)
314                    yield
315                except ValueError:
316                    # we are called from "except ValueError:"
317                    self.assertEqual(sys.exc_info()[0], ValueError)
318                    raise TypeError()
319            except Exception as exc:
320                self.assertEqual(sys.exc_info()[0], TypeError)
321                self.assertEqual(type(exc.__context__), ValueError)
322            # we are still called from "except ValueError:"
323            self.assertEqual(sys.exc_info()[0], ValueError)
324            yield
325            self.assertIsNone(sys.exc_info()[0])
326            yield "done"
327
328        g = gen()
329        next(g)
330        try:
331            raise ValueError
332        except Exception as exc:
333            g.throw(exc)
334
335        self.assertEqual(next(g), "done")
336        self.assertEqual(sys.exc_info(), (None, None, None))
337
338    def test_except_throw_bad_exception(self):
339        class E(Exception):
340            def __new__(cls, *args, **kwargs):
341                return cls
342
343        def boring_generator():
344            yield
345
346        gen = boring_generator()
347
348        err_msg = 'should have returned an instance of BaseException'
349
350        with self.assertRaisesRegex(TypeError, err_msg):
351            gen.throw(E)
352
353        self.assertRaises(StopIteration, next, gen)
354
355        def generator():
356            with self.assertRaisesRegex(TypeError, err_msg):
357                yield
358
359        gen = generator()
360        next(gen)
361        with self.assertRaises(StopIteration):
362            gen.throw(E)
363
364    def test_stopiteration_error(self):
365        # See also PEP 479.
366
367        def gen():
368            raise StopIteration
369            yield
370
371        with self.assertRaisesRegex(RuntimeError, 'raised StopIteration'):
372            next(gen())
373
374    def test_tutorial_stopiteration(self):
375        # Raise StopIteration" stops the generator too:
376
377        def f():
378            yield 1
379            raise StopIteration
380            yield 2 # never reached
381
382        g = f()
383        self.assertEqual(next(g), 1)
384
385        with self.assertRaisesRegex(RuntimeError, 'raised StopIteration'):
386            next(g)
387
388    def test_return_tuple(self):
389        def g():
390            return (yield 1)
391
392        gen = g()
393        self.assertEqual(next(gen), 1)
394        with self.assertRaises(StopIteration) as cm:
395            gen.send((2,))
396        self.assertEqual(cm.exception.value, (2,))
397
398    def test_return_stopiteration(self):
399        def g():
400            return (yield 1)
401
402        gen = g()
403        self.assertEqual(next(gen), 1)
404        with self.assertRaises(StopIteration) as cm:
405            gen.send(StopIteration(2))
406        self.assertIsInstance(cm.exception.value, StopIteration)
407        self.assertEqual(cm.exception.value.value, 2)
408
409
410class GeneratorThrowTest(unittest.TestCase):
411
412    def test_exception_context_with_yield(self):
413        def f():
414            try:
415                raise KeyError('a')
416            except Exception:
417                yield
418
419        gen = f()
420        gen.send(None)
421        with self.assertRaises(ValueError) as cm:
422            gen.throw(ValueError)
423        context = cm.exception.__context__
424        self.assertEqual((type(context), context.args), (KeyError, ('a',)))
425
426    def test_exception_context_with_yield_inside_generator(self):
427        # Check that the context is also available from inside the generator
428        # with yield, as opposed to outside.
429        def f():
430            try:
431                raise KeyError('a')
432            except Exception:
433                try:
434                    yield
435                except Exception as exc:
436                    self.assertEqual(type(exc), ValueError)
437                    context = exc.__context__
438                    self.assertEqual((type(context), context.args),
439                        (KeyError, ('a',)))
440                    yield 'b'
441
442        gen = f()
443        gen.send(None)
444        actual = gen.throw(ValueError)
445        # This ensures that the assertions inside were executed.
446        self.assertEqual(actual, 'b')
447
448    def test_exception_context_with_yield_from(self):
449        def f():
450            yield
451
452        def g():
453            try:
454                raise KeyError('a')
455            except Exception:
456                yield from f()
457
458        gen = g()
459        gen.send(None)
460        with self.assertRaises(ValueError) as cm:
461            gen.throw(ValueError)
462        context = cm.exception.__context__
463        self.assertEqual((type(context), context.args), (KeyError, ('a',)))
464
465    def test_exception_context_with_yield_from_with_context_cycle(self):
466        # Check trying to create an exception context cycle:
467        # https://bugs.python.org/issue40696
468        has_cycle = None
469
470        def f():
471            yield
472
473        def g(exc):
474            nonlocal has_cycle
475            try:
476                raise exc
477            except Exception:
478                try:
479                    yield from f()
480                except Exception as exc:
481                    has_cycle = (exc is exc.__context__)
482            yield
483
484        exc = KeyError('a')
485        gen = g(exc)
486        gen.send(None)
487        gen.throw(exc)
488        # This also distinguishes from the initial has_cycle=None.
489        self.assertEqual(has_cycle, False)
490
491    def test_throw_after_none_exc_type(self):
492        def g():
493            try:
494                raise KeyError
495            except KeyError:
496                pass
497
498            try:
499                yield
500            except Exception:
501                raise RuntimeError
502
503        gen = g()
504        gen.send(None)
505        with self.assertRaises(RuntimeError) as cm:
506            gen.throw(ValueError)
507
508
509class GeneratorStackTraceTest(unittest.TestCase):
510
511    def check_stack_names(self, frame, expected):
512        names = []
513        while frame:
514            name = frame.f_code.co_name
515            # Stop checking frames when we get to our test helper.
516            if name.startswith('check_') or name.startswith('call_'):
517                break
518
519            names.append(name)
520            frame = frame.f_back
521
522        self.assertEqual(names, expected)
523
524    def check_yield_from_example(self, call_method):
525        def f():
526            self.check_stack_names(sys._getframe(), ['f', 'g'])
527            try:
528                yield
529            except Exception:
530                pass
531            self.check_stack_names(sys._getframe(), ['f', 'g'])
532
533        def g():
534            self.check_stack_names(sys._getframe(), ['g'])
535            yield from f()
536            self.check_stack_names(sys._getframe(), ['g'])
537
538        gen = g()
539        gen.send(None)
540        try:
541            call_method(gen)
542        except StopIteration:
543            pass
544
545    def test_send_with_yield_from(self):
546        def call_send(gen):
547            gen.send(None)
548
549        self.check_yield_from_example(call_send)
550
551    def test_throw_with_yield_from(self):
552        def call_throw(gen):
553            gen.throw(RuntimeError)
554
555        self.check_yield_from_example(call_throw)
556
557
558class YieldFromTests(unittest.TestCase):
559    def test_generator_gi_yieldfrom(self):
560        def a():
561            self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_RUNNING)
562            self.assertIsNone(gen_b.gi_yieldfrom)
563            yield
564            self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_RUNNING)
565            self.assertIsNone(gen_b.gi_yieldfrom)
566
567        def b():
568            self.assertIsNone(gen_b.gi_yieldfrom)
569            yield from a()
570            self.assertIsNone(gen_b.gi_yieldfrom)
571            yield
572            self.assertIsNone(gen_b.gi_yieldfrom)
573
574        gen_b = b()
575        self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_CREATED)
576        self.assertIsNone(gen_b.gi_yieldfrom)
577
578        gen_b.send(None)
579        self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_SUSPENDED)
580        self.assertEqual(gen_b.gi_yieldfrom.gi_code.co_name, 'a')
581
582        gen_b.send(None)
583        self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_SUSPENDED)
584        self.assertIsNone(gen_b.gi_yieldfrom)
585
586        [] = gen_b  # Exhaust generator
587        self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_CLOSED)
588        self.assertIsNone(gen_b.gi_yieldfrom)
589
590
591tutorial_tests = """
592Let's try a simple generator:
593
594    >>> def f():
595    ...    yield 1
596    ...    yield 2
597
598    >>> for i in f():
599    ...     print(i)
600    1
601    2
602    >>> g = f()
603    >>> next(g)
604    1
605    >>> next(g)
606    2
607
608"Falling off the end" stops the generator:
609
610    >>> next(g)
611    Traceback (most recent call last):
612      File "<stdin>", line 1, in ?
613      File "<stdin>", line 2, in g
614    StopIteration
615
616"return" also stops the generator:
617
618    >>> def f():
619    ...     yield 1
620    ...     return
621    ...     yield 2 # never reached
622    ...
623    >>> g = f()
624    >>> next(g)
625    1
626    >>> next(g)
627    Traceback (most recent call last):
628      File "<stdin>", line 1, in ?
629      File "<stdin>", line 3, in f
630    StopIteration
631    >>> next(g) # once stopped, can't be resumed
632    Traceback (most recent call last):
633      File "<stdin>", line 1, in ?
634    StopIteration
635
636However, "return" and StopIteration are not exactly equivalent:
637
638    >>> def g1():
639    ...     try:
640    ...         return
641    ...     except:
642    ...         yield 1
643    ...
644    >>> list(g1())
645    []
646
647    >>> def g2():
648    ...     try:
649    ...         raise StopIteration
650    ...     except:
651    ...         yield 42
652    >>> print(list(g2()))
653    [42]
654
655This may be surprising at first:
656
657    >>> def g3():
658    ...     try:
659    ...         return
660    ...     finally:
661    ...         yield 1
662    ...
663    >>> list(g3())
664    [1]
665
666Let's create an alternate range() function implemented as a generator:
667
668    >>> def yrange(n):
669    ...     for i in range(n):
670    ...         yield i
671    ...
672    >>> list(yrange(5))
673    [0, 1, 2, 3, 4]
674
675Generators always return to the most recent caller:
676
677    >>> def creator():
678    ...     r = yrange(5)
679    ...     print("creator", next(r))
680    ...     return r
681    ...
682    >>> def caller():
683    ...     r = creator()
684    ...     for i in r:
685    ...             print("caller", i)
686    ...
687    >>> caller()
688    creator 0
689    caller 1
690    caller 2
691    caller 3
692    caller 4
693
694Generators can call other generators:
695
696    >>> def zrange(n):
697    ...     for i in yrange(n):
698    ...         yield i
699    ...
700    >>> list(zrange(5))
701    [0, 1, 2, 3, 4]
702
703"""
704
705# The examples from PEP 255.
706
707pep_tests = """
708
709Specification:  Yield
710
711    Restriction:  A generator cannot be resumed while it is actively
712    running:
713
714    >>> def g():
715    ...     i = next(me)
716    ...     yield i
717    >>> me = g()
718    >>> next(me)
719    Traceback (most recent call last):
720     ...
721      File "<string>", line 2, in g
722    ValueError: generator already executing
723
724Specification: Return
725
726    Note that return isn't always equivalent to raising StopIteration:  the
727    difference lies in how enclosing try/except constructs are treated.
728    For example,
729
730        >>> def f1():
731        ...     try:
732        ...         return
733        ...     except:
734        ...        yield 1
735        >>> print(list(f1()))
736        []
737
738    because, as in any function, return simply exits, but
739
740        >>> def f2():
741        ...     try:
742        ...         raise StopIteration
743        ...     except:
744        ...         yield 42
745        >>> print(list(f2()))
746        [42]
747
748    because StopIteration is captured by a bare "except", as is any
749    exception.
750
751Specification: Generators and Exception Propagation
752
753    >>> def f():
754    ...     return 1//0
755    >>> def g():
756    ...     yield f()  # the zero division exception propagates
757    ...     yield 42   # and we'll never get here
758    >>> k = g()
759    >>> next(k)
760    Traceback (most recent call last):
761      File "<stdin>", line 1, in ?
762      File "<stdin>", line 2, in g
763      File "<stdin>", line 2, in f
764    ZeroDivisionError: integer division or modulo by zero
765    >>> next(k)  # and the generator cannot be resumed
766    Traceback (most recent call last):
767      File "<stdin>", line 1, in ?
768    StopIteration
769    >>>
770
771Specification: Try/Except/Finally
772
773    >>> def f():
774    ...     try:
775    ...         yield 1
776    ...         try:
777    ...             yield 2
778    ...             1//0
779    ...             yield 3  # never get here
780    ...         except ZeroDivisionError:
781    ...             yield 4
782    ...             yield 5
783    ...             raise
784    ...         except:
785    ...             yield 6
786    ...         yield 7     # the "raise" above stops this
787    ...     except:
788    ...         yield 8
789    ...     yield 9
790    ...     try:
791    ...         x = 12
792    ...     finally:
793    ...         yield 10
794    ...     yield 11
795    >>> print(list(f()))
796    [1, 2, 4, 5, 8, 9, 10, 11]
797    >>>
798
799Guido's binary tree example.
800
801    >>> # A binary tree class.
802    >>> class Tree:
803    ...
804    ...     def __init__(self, label, left=None, right=None):
805    ...         self.label = label
806    ...         self.left = left
807    ...         self.right = right
808    ...
809    ...     def __repr__(self, level=0, indent="    "):
810    ...         s = level*indent + repr(self.label)
811    ...         if self.left:
812    ...             s = s + "\\n" + self.left.__repr__(level+1, indent)
813    ...         if self.right:
814    ...             s = s + "\\n" + self.right.__repr__(level+1, indent)
815    ...         return s
816    ...
817    ...     def __iter__(self):
818    ...         return inorder(self)
819
820    >>> # Create a Tree from a list.
821    >>> def tree(list):
822    ...     n = len(list)
823    ...     if n == 0:
824    ...         return []
825    ...     i = n // 2
826    ...     return Tree(list[i], tree(list[:i]), tree(list[i+1:]))
827
828    >>> # Show it off: create a tree.
829    >>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
830
831    >>> # A recursive generator that generates Tree labels in in-order.
832    >>> def inorder(t):
833    ...     if t:
834    ...         for x in inorder(t.left):
835    ...             yield x
836    ...         yield t.label
837    ...         for x in inorder(t.right):
838    ...             yield x
839
840    >>> # Show it off: create a tree.
841    >>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
842    >>> # Print the nodes of the tree in in-order.
843    >>> for x in t:
844    ...     print(' '+x, end='')
845     A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
846
847    >>> # A non-recursive generator.
848    >>> def inorder(node):
849    ...     stack = []
850    ...     while node:
851    ...         while node.left:
852    ...             stack.append(node)
853    ...             node = node.left
854    ...         yield node.label
855    ...         while not node.right:
856    ...             try:
857    ...                 node = stack.pop()
858    ...             except IndexError:
859    ...                 return
860    ...             yield node.label
861    ...         node = node.right
862
863    >>> # Exercise the non-recursive generator.
864    >>> for x in t:
865    ...     print(' '+x, end='')
866     A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
867
868"""
869
870# Examples from Iterator-List and Python-Dev and c.l.py.
871
872email_tests = """
873
874The difference between yielding None and returning it.
875
876>>> def g():
877...     for i in range(3):
878...         yield None
879...     yield None
880...     return
881>>> list(g())
882[None, None, None, None]
883
884Ensure that explicitly raising StopIteration acts like any other exception
885in try/except, not like a return.
886
887>>> def g():
888...     yield 1
889...     try:
890...         raise StopIteration
891...     except:
892...         yield 2
893...     yield 3
894>>> list(g())
895[1, 2, 3]
896
897Next one was posted to c.l.py.
898
899>>> def gcomb(x, k):
900...     "Generate all combinations of k elements from list x."
901...
902...     if k > len(x):
903...         return
904...     if k == 0:
905...         yield []
906...     else:
907...         first, rest = x[0], x[1:]
908...         # A combination does or doesn't contain first.
909...         # If it does, the remainder is a k-1 comb of rest.
910...         for c in gcomb(rest, k-1):
911...             c.insert(0, first)
912...             yield c
913...         # If it doesn't contain first, it's a k comb of rest.
914...         for c in gcomb(rest, k):
915...             yield c
916
917>>> seq = list(range(1, 5))
918>>> for k in range(len(seq) + 2):
919...     print("%d-combs of %s:" % (k, seq))
920...     for c in gcomb(seq, k):
921...         print("   ", c)
9220-combs of [1, 2, 3, 4]:
923    []
9241-combs of [1, 2, 3, 4]:
925    [1]
926    [2]
927    [3]
928    [4]
9292-combs of [1, 2, 3, 4]:
930    [1, 2]
931    [1, 3]
932    [1, 4]
933    [2, 3]
934    [2, 4]
935    [3, 4]
9363-combs of [1, 2, 3, 4]:
937    [1, 2, 3]
938    [1, 2, 4]
939    [1, 3, 4]
940    [2, 3, 4]
9414-combs of [1, 2, 3, 4]:
942    [1, 2, 3, 4]
9435-combs of [1, 2, 3, 4]:
944
945From the Iterators list, about the types of these things.
946
947>>> def g():
948...     yield 1
949...
950>>> type(g)
951<class 'function'>
952>>> i = g()
953>>> type(i)
954<class 'generator'>
955>>> [s for s in dir(i) if not s.startswith('_')]
956['close', 'gi_code', 'gi_frame', 'gi_running', 'gi_suspended', 'gi_yieldfrom', 'send', 'throw']
957>>> from test.support import HAVE_DOCSTRINGS
958>>> print(i.__next__.__doc__ if HAVE_DOCSTRINGS else 'Implement next(self).')
959Implement next(self).
960>>> iter(i) is i
961True
962>>> import types
963>>> isinstance(i, types.GeneratorType)
964True
965
966And more, added later.
967
968>>> i.gi_running
9690
970>>> type(i.gi_frame)
971<class 'frame'>
972>>> i.gi_running = 42
973Traceback (most recent call last):
974  ...
975AttributeError: attribute 'gi_running' of 'generator' objects is not writable
976>>> def g():
977...     yield me.gi_running
978>>> me = g()
979>>> me.gi_running
9800
981>>> next(me)
9821
983>>> me.gi_running
9840
985
986A clever union-find implementation from c.l.py, due to David Eppstein.
987Sent: Friday, June 29, 2001 12:16 PM
988To: [email protected]
989Subject: Re: PEP 255: Simple Generators
990
991>>> class disjointSet:
992...     def __init__(self, name):
993...         self.name = name
994...         self.parent = None
995...         self.generator = self.generate()
996...
997...     def generate(self):
998...         while not self.parent:
999...             yield self
1000...         for x in self.parent.generator:
1001...             yield x
1002...
1003...     def find(self):
1004...         return next(self.generator)
1005...
1006...     def union(self, parent):
1007...         if self.parent:
1008...             raise ValueError("Sorry, I'm not a root!")
1009...         self.parent = parent
1010...
1011...     def __str__(self):
1012...         return self.name
1013
1014>>> names = "ABCDEFGHIJKLM"
1015>>> sets = [disjointSet(name) for name in names]
1016>>> roots = sets[:]
1017
1018>>> import random
1019>>> gen = random.Random(42)
1020>>> while 1:
1021...     for s in sets:
1022...         print(" %s->%s" % (s, s.find()), end='')
1023...     print()
1024...     if len(roots) > 1:
1025...         s1 = gen.choice(roots)
1026...         roots.remove(s1)
1027...         s2 = gen.choice(roots)
1028...         s1.union(s2)
1029...         print("merged", s1, "into", s2)
1030...     else:
1031...         break
1032 A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->K L->L M->M
1033merged K into B
1034 A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->B L->L M->M
1035merged A into F
1036 A->F B->B C->C D->D E->E F->F G->G H->H I->I J->J K->B L->L M->M
1037merged E into F
1038 A->F B->B C->C D->D E->F F->F G->G H->H I->I J->J K->B L->L M->M
1039merged D into C
1040 A->F B->B C->C D->C E->F F->F G->G H->H I->I J->J K->B L->L M->M
1041merged M into C
1042 A->F B->B C->C D->C E->F F->F G->G H->H I->I J->J K->B L->L M->C
1043merged J into B
1044 A->F B->B C->C D->C E->F F->F G->G H->H I->I J->B K->B L->L M->C
1045merged B into C
1046 A->F B->C C->C D->C E->F F->F G->G H->H I->I J->C K->C L->L M->C
1047merged F into G
1048 A->G B->C C->C D->C E->G F->G G->G H->H I->I J->C K->C L->L M->C
1049merged L into C
1050 A->G B->C C->C D->C E->G F->G G->G H->H I->I J->C K->C L->C M->C
1051merged G into I
1052 A->I B->C C->C D->C E->I F->I G->I H->H I->I J->C K->C L->C M->C
1053merged I into H
1054 A->H B->C C->C D->C E->H F->H G->H H->H I->H J->C K->C L->C M->C
1055merged C into H
1056 A->H B->H C->H D->H E->H F->H G->H H->H I->H J->H K->H L->H M->H
1057
1058"""
1059# Emacs turd '
1060
1061# Fun tests (for sufficiently warped notions of "fun").
1062
1063fun_tests = """
1064
1065Build up to a recursive Sieve of Eratosthenes generator.
1066
1067>>> def firstn(g, n):
1068...     return [next(g) for i in range(n)]
1069
1070>>> def intsfrom(i):
1071...     while 1:
1072...         yield i
1073...         i += 1
1074
1075>>> firstn(intsfrom(5), 7)
1076[5, 6, 7, 8, 9, 10, 11]
1077
1078>>> def exclude_multiples(n, ints):
1079...     for i in ints:
1080...         if i % n:
1081...             yield i
1082
1083>>> firstn(exclude_multiples(3, intsfrom(1)), 6)
1084[1, 2, 4, 5, 7, 8]
1085
1086>>> def sieve(ints):
1087...     prime = next(ints)
1088...     yield prime
1089...     not_divisible_by_prime = exclude_multiples(prime, ints)
1090...     for p in sieve(not_divisible_by_prime):
1091...         yield p
1092
1093>>> primes = sieve(intsfrom(2))
1094>>> firstn(primes, 20)
1095[2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71]
1096
1097
1098Another famous problem:  generate all integers of the form
1099    2**i * 3**j  * 5**k
1100in increasing order, where i,j,k >= 0.  Trickier than it may look at first!
1101Try writing it without generators, and correctly, and without generating
11023 internal results for each result output.
1103
1104>>> def times(n, g):
1105...     for i in g:
1106...         yield n * i
1107>>> firstn(times(10, intsfrom(1)), 10)
1108[10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
1109
1110>>> def merge(g, h):
1111...     ng = next(g)
1112...     nh = next(h)
1113...     while 1:
1114...         if ng < nh:
1115...             yield ng
1116...             ng = next(g)
1117...         elif ng > nh:
1118...             yield nh
1119...             nh = next(h)
1120...         else:
1121...             yield ng
1122...             ng = next(g)
1123...             nh = next(h)
1124
1125The following works, but is doing a whale of a lot of redundant work --
1126it's not clear how to get the internal uses of m235 to share a single
1127generator.  Note that me_times2 (etc) each need to see every element in the
1128result sequence.  So this is an example where lazy lists are more natural
1129(you can look at the head of a lazy list any number of times).
1130
1131>>> def m235():
1132...     yield 1
1133...     me_times2 = times(2, m235())
1134...     me_times3 = times(3, m235())
1135...     me_times5 = times(5, m235())
1136...     for i in merge(merge(me_times2,
1137...                          me_times3),
1138...                    me_times5):
1139...         yield i
1140
1141Don't print "too many" of these -- the implementation above is extremely
1142inefficient:  each call of m235() leads to 3 recursive calls, and in
1143turn each of those 3 more, and so on, and so on, until we've descended
1144enough levels to satisfy the print stmts.  Very odd:  when I printed 5
1145lines of results below, this managed to screw up Win98's malloc in "the
1146usual" way, i.e. the heap grew over 4Mb so Win98 started fragmenting
1147address space, and it *looked* like a very slow leak.
1148
1149>>> result = m235()
1150>>> for i in range(3):
1151...     print(firstn(result, 15))
1152[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
1153[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
1154[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
1155
1156Heh.  Here's one way to get a shared list, complete with an excruciating
1157namespace renaming trick.  The *pretty* part is that the times() and merge()
1158functions can be reused as-is, because they only assume their stream
1159arguments are iterable -- a LazyList is the same as a generator to times().
1160
1161>>> class LazyList:
1162...     def __init__(self, g):
1163...         self.sofar = []
1164...         self.fetch = g.__next__
1165...
1166...     def __getitem__(self, i):
1167...         sofar, fetch = self.sofar, self.fetch
1168...         while i >= len(sofar):
1169...             sofar.append(fetch())
1170...         return sofar[i]
1171
1172>>> def m235():
1173...     yield 1
1174...     # Gack:  m235 below actually refers to a LazyList.
1175...     me_times2 = times(2, m235)
1176...     me_times3 = times(3, m235)
1177...     me_times5 = times(5, m235)
1178...     for i in merge(merge(me_times2,
1179...                          me_times3),
1180...                    me_times5):
1181...         yield i
1182
1183Print as many of these as you like -- *this* implementation is memory-
1184efficient.
1185
1186>>> m235 = LazyList(m235())
1187>>> for i in range(5):
1188...     print([m235[j] for j in range(15*i, 15*(i+1))])
1189[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
1190[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
1191[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
1192[200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
1193[400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
1194
1195Ye olde Fibonacci generator, LazyList style.
1196
1197>>> def fibgen(a, b):
1198...
1199...     def sum(g, h):
1200...         while 1:
1201...             yield next(g) + next(h)
1202...
1203...     def tail(g):
1204...         next(g)    # throw first away
1205...         for x in g:
1206...             yield x
1207...
1208...     yield a
1209...     yield b
1210...     for s in sum(iter(fib),
1211...                  tail(iter(fib))):
1212...         yield s
1213
1214>>> fib = LazyList(fibgen(1, 2))
1215>>> firstn(iter(fib), 17)
1216[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
1217
1218
1219Running after your tail with itertools.tee (new in version 2.4)
1220
1221The algorithms "m235" (Hamming) and Fibonacci presented above are both
1222examples of a whole family of FP (functional programming) algorithms
1223where a function produces and returns a list while the production algorithm
1224suppose the list as already produced by recursively calling itself.
1225For these algorithms to work, they must:
1226
1227- produce at least a first element without presupposing the existence of
1228  the rest of the list
1229- produce their elements in a lazy manner
1230
1231To work efficiently, the beginning of the list must not be recomputed over
1232and over again. This is ensured in most FP languages as a built-in feature.
1233In python, we have to explicitly maintain a list of already computed results
1234and abandon genuine recursivity.
1235
1236This is what had been attempted above with the LazyList class. One problem
1237with that class is that it keeps a list of all of the generated results and
1238therefore continually grows. This partially defeats the goal of the generator
1239concept, viz. produce the results only as needed instead of producing them
1240all and thereby wasting memory.
1241
1242Thanks to itertools.tee, it is now clear "how to get the internal uses of
1243m235 to share a single generator".
1244
1245>>> from itertools import tee
1246>>> def m235():
1247...     def _m235():
1248...         yield 1
1249...         for n in merge(times(2, m2),
1250...                        merge(times(3, m3),
1251...                              times(5, m5))):
1252...             yield n
1253...     m1 = _m235()
1254...     m2, m3, m5, mRes = tee(m1, 4)
1255...     return mRes
1256
1257>>> it = m235()
1258>>> for i in range(5):
1259...     print(firstn(it, 15))
1260[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
1261[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
1262[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
1263[200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
1264[400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
1265
1266The "tee" function does just what we want. It internally keeps a generated
1267result for as long as it has not been "consumed" from all of the duplicated
1268iterators, whereupon it is deleted. You can therefore print the hamming
1269sequence during hours without increasing memory usage, or very little.
1270
1271The beauty of it is that recursive running-after-their-tail FP algorithms
1272are quite straightforwardly expressed with this Python idiom.
1273
1274Ye olde Fibonacci generator, tee style.
1275
1276>>> def fib():
1277...
1278...     def _isum(g, h):
1279...         while 1:
1280...             yield next(g) + next(h)
1281...
1282...     def _fib():
1283...         yield 1
1284...         yield 2
1285...         next(fibTail) # throw first away
1286...         for res in _isum(fibHead, fibTail):
1287...             yield res
1288...
1289...     realfib = _fib()
1290...     fibHead, fibTail, fibRes = tee(realfib, 3)
1291...     return fibRes
1292
1293>>> firstn(fib(), 17)
1294[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
1295
1296"""
1297
1298# syntax_tests mostly provokes SyntaxErrors.  Also fiddling with #if 0
1299# hackery.
1300
1301syntax_tests = """
1302
1303These are fine:
1304
1305>>> def f():
1306...     yield 1
1307...     return
1308
1309>>> def f():
1310...     try:
1311...         yield 1
1312...     finally:
1313...         pass
1314
1315>>> def f():
1316...     try:
1317...         try:
1318...             1//0
1319...         except ZeroDivisionError:
1320...             yield 666
1321...         except:
1322...             pass
1323...     finally:
1324...         pass
1325
1326>>> def f():
1327...     try:
1328...         try:
1329...             yield 12
1330...             1//0
1331...         except ZeroDivisionError:
1332...             yield 666
1333...         except:
1334...             try:
1335...                 x = 12
1336...             finally:
1337...                 yield 12
1338...     except:
1339...         return
1340>>> list(f())
1341[12, 666]
1342
1343>>> def f():
1344...    yield
1345>>> type(f())
1346<class 'generator'>
1347
1348
1349>>> def f():
1350...    if 0:
1351...        yield
1352>>> type(f())
1353<class 'generator'>
1354
1355
1356>>> def f():
1357...     if 0:
1358...         yield 1
1359>>> type(f())
1360<class 'generator'>
1361
1362>>> def f():
1363...    if "":
1364...        yield None
1365>>> type(f())
1366<class 'generator'>
1367
1368>>> def f():
1369...     return
1370...     try:
1371...         if x==4:
1372...             pass
1373...         elif 0:
1374...             try:
1375...                 1//0
1376...             except SyntaxError:
1377...                 pass
1378...             else:
1379...                 if 0:
1380...                     while 12:
1381...                         x += 1
1382...                         yield 2 # don't blink
1383...                         f(a, b, c, d, e)
1384...         else:
1385...             pass
1386...     except:
1387...         x = 1
1388...     return
1389>>> type(f())
1390<class 'generator'>
1391
1392>>> def f():
1393...     if 0:
1394...         def g():
1395...             yield 1
1396...
1397>>> type(f())
1398<class 'NoneType'>
1399
1400>>> def f():
1401...     if 0:
1402...         class C:
1403...             def __init__(self):
1404...                 yield 1
1405...             def f(self):
1406...                 yield 2
1407>>> type(f())
1408<class 'NoneType'>
1409
1410>>> def f():
1411...     if 0:
1412...         return
1413...     if 0:
1414...         yield 2
1415>>> type(f())
1416<class 'generator'>
1417
1418This one caused a crash (see SF bug 567538):
1419
1420>>> def f():
1421...     for i in range(3):
1422...         try:
1423...             continue
1424...         finally:
1425...             yield i
1426...
1427>>> g = f()
1428>>> print(next(g))
14290
1430>>> print(next(g))
14311
1432>>> print(next(g))
14332
1434>>> print(next(g))
1435Traceback (most recent call last):
1436StopIteration
1437
1438
1439Test the gi_code attribute
1440
1441>>> def f():
1442...     yield 5
1443...
1444>>> g = f()
1445>>> g.gi_code is f.__code__
1446True
1447>>> next(g)
14485
1449>>> next(g)
1450Traceback (most recent call last):
1451StopIteration
1452>>> g.gi_code is f.__code__
1453True
1454
1455
1456Test the __name__ attribute and the repr()
1457
1458>>> def f():
1459...    yield 5
1460...
1461>>> g = f()
1462>>> g.__name__
1463'f'
1464>>> repr(g)  # doctest: +ELLIPSIS
1465'<generator object f at ...>'
1466
1467Lambdas shouldn't have their usual return behavior.
1468
1469>>> x = lambda: (yield 1)
1470>>> list(x())
1471[1]
1472
1473>>> x = lambda: ((yield 1), (yield 2))
1474>>> list(x())
1475[1, 2]
1476"""
1477
1478# conjoin is a simple backtracking generator, named in honor of Icon's
1479# "conjunction" control structure.  Pass a list of no-argument functions
1480# that return iterable objects.  Easiest to explain by example:  assume the
1481# function list [x, y, z] is passed.  Then conjoin acts like:
1482#
1483# def g():
1484#     values = [None] * 3
1485#     for values[0] in x():
1486#         for values[1] in y():
1487#             for values[2] in z():
1488#                 yield values
1489#
1490# So some 3-lists of values *may* be generated, each time we successfully
1491# get into the innermost loop.  If an iterator fails (is exhausted) before
1492# then, it "backtracks" to get the next value from the nearest enclosing
1493# iterator (the one "to the left"), and starts all over again at the next
1494# slot (pumps a fresh iterator).  Of course this is most useful when the
1495# iterators have side-effects, so that which values *can* be generated at
1496# each slot depend on the values iterated at previous slots.
1497
1498def simple_conjoin(gs):
1499
1500    values = [None] * len(gs)
1501
1502    def gen(i):
1503        if i >= len(gs):
1504            yield values
1505        else:
1506            for values[i] in gs[i]():
1507                for x in gen(i+1):
1508                    yield x
1509
1510    for x in gen(0):
1511        yield x
1512
1513# That works fine, but recursing a level and checking i against len(gs) for
1514# each item produced is inefficient.  By doing manual loop unrolling across
1515# generator boundaries, it's possible to eliminate most of that overhead.
1516# This isn't worth the bother *in general* for generators, but conjoin() is
1517# a core building block for some CPU-intensive generator applications.
1518
1519def conjoin(gs):
1520
1521    n = len(gs)
1522    values = [None] * n
1523
1524    # Do one loop nest at time recursively, until the # of loop nests
1525    # remaining is divisible by 3.
1526
1527    def gen(i):
1528        if i >= n:
1529            yield values
1530
1531        elif (n-i) % 3:
1532            ip1 = i+1
1533            for values[i] in gs[i]():
1534                for x in gen(ip1):
1535                    yield x
1536
1537        else:
1538            for x in _gen3(i):
1539                yield x
1540
1541    # Do three loop nests at a time, recursing only if at least three more
1542    # remain.  Don't call directly:  this is an internal optimization for
1543    # gen's use.
1544
1545    def _gen3(i):
1546        assert i < n and (n-i) % 3 == 0
1547        ip1, ip2, ip3 = i+1, i+2, i+3
1548        g, g1, g2 = gs[i : ip3]
1549
1550        if ip3 >= n:
1551            # These are the last three, so we can yield values directly.
1552            for values[i] in g():
1553                for values[ip1] in g1():
1554                    for values[ip2] in g2():
1555                        yield values
1556
1557        else:
1558            # At least 6 loop nests remain; peel off 3 and recurse for the
1559            # rest.
1560            for values[i] in g():
1561                for values[ip1] in g1():
1562                    for values[ip2] in g2():
1563                        for x in _gen3(ip3):
1564                            yield x
1565
1566    for x in gen(0):
1567        yield x
1568
1569# And one more approach:  For backtracking apps like the Knight's Tour
1570# solver below, the number of backtracking levels can be enormous (one
1571# level per square, for the Knight's Tour, so that e.g. a 100x100 board
1572# needs 10,000 levels).  In such cases Python is likely to run out of
1573# stack space due to recursion.  So here's a recursion-free version of
1574# conjoin too.
1575# NOTE WELL:  This allows large problems to be solved with only trivial
1576# demands on stack space.  Without explicitly resumable generators, this is
1577# much harder to achieve.  OTOH, this is much slower (up to a factor of 2)
1578# than the fancy unrolled recursive conjoin.
1579
1580def flat_conjoin(gs):  # rename to conjoin to run tests with this instead
1581    n = len(gs)
1582    values = [None] * n
1583    iters  = [None] * n
1584    _StopIteration = StopIteration  # make local because caught a *lot*
1585    i = 0
1586    while 1:
1587        # Descend.
1588        try:
1589            while i < n:
1590                it = iters[i] = gs[i]().__next__
1591                values[i] = it()
1592                i += 1
1593        except _StopIteration:
1594            pass
1595        else:
1596            assert i == n
1597            yield values
1598
1599        # Backtrack until an older iterator can be resumed.
1600        i -= 1
1601        while i >= 0:
1602            try:
1603                values[i] = iters[i]()
1604                # Success!  Start fresh at next level.
1605                i += 1
1606                break
1607            except _StopIteration:
1608                # Continue backtracking.
1609                i -= 1
1610        else:
1611            assert i < 0
1612            break
1613
1614# A conjoin-based N-Queens solver.
1615
1616class Queens:
1617    def __init__(self, n):
1618        self.n = n
1619        rangen = range(n)
1620
1621        # Assign a unique int to each column and diagonal.
1622        # columns:  n of those, range(n).
1623        # NW-SE diagonals: 2n-1 of these, i-j unique and invariant along
1624        # each, smallest i-j is 0-(n-1) = 1-n, so add n-1 to shift to 0-
1625        # based.
1626        # NE-SW diagonals: 2n-1 of these, i+j unique and invariant along
1627        # each, smallest i+j is 0, largest is 2n-2.
1628
1629        # For each square, compute a bit vector of the columns and
1630        # diagonals it covers, and for each row compute a function that
1631        # generates the possibilities for the columns in that row.
1632        self.rowgenerators = []
1633        for i in rangen:
1634            rowuses = [(1 << j) |                  # column ordinal
1635                       (1 << (n + i-j + n-1)) |    # NW-SE ordinal
1636                       (1 << (n + 2*n-1 + i+j))    # NE-SW ordinal
1637                            for j in rangen]
1638
1639            def rowgen(rowuses=rowuses):
1640                for j in rangen:
1641                    uses = rowuses[j]
1642                    if uses & self.used == 0:
1643                        self.used |= uses
1644                        yield j
1645                        self.used &= ~uses
1646
1647            self.rowgenerators.append(rowgen)
1648
1649    # Generate solutions.
1650    def solve(self):
1651        self.used = 0
1652        for row2col in conjoin(self.rowgenerators):
1653            yield row2col
1654
1655    def printsolution(self, row2col):
1656        n = self.n
1657        assert n == len(row2col)
1658        sep = "+" + "-+" * n
1659        print(sep)
1660        for i in range(n):
1661            squares = [" " for j in range(n)]
1662            squares[row2col[i]] = "Q"
1663            print("|" + "|".join(squares) + "|")
1664            print(sep)
1665
1666# A conjoin-based Knight's Tour solver.  This is pretty sophisticated
1667# (e.g., when used with flat_conjoin above, and passing hard=1 to the
1668# constructor, a 200x200 Knight's Tour was found quickly -- note that we're
1669# creating 10s of thousands of generators then!), and is lengthy.
1670
1671class Knights:
1672    def __init__(self, m, n, hard=0):
1673        self.m, self.n = m, n
1674
1675        # solve() will set up succs[i] to be a list of square #i's
1676        # successors.
1677        succs = self.succs = []
1678
1679        # Remove i0 from each of its successor's successor lists, i.e.
1680        # successors can't go back to i0 again.  Return 0 if we can
1681        # detect this makes a solution impossible, else return 1.
1682
1683        def remove_from_successors(i0, len=len):
1684            # If we remove all exits from a free square, we're dead:
1685            # even if we move to it next, we can't leave it again.
1686            # If we create a square with one exit, we must visit it next;
1687            # else somebody else will have to visit it, and since there's
1688            # only one adjacent, there won't be a way to leave it again.
1689            # Finally, if we create more than one free square with a
1690            # single exit, we can only move to one of them next, leaving
1691            # the other one a dead end.
1692            ne0 = ne1 = 0
1693            for i in succs[i0]:
1694                s = succs[i]
1695                s.remove(i0)
1696                e = len(s)
1697                if e == 0:
1698                    ne0 += 1
1699                elif e == 1:
1700                    ne1 += 1
1701            return ne0 == 0 and ne1 < 2
1702
1703        # Put i0 back in each of its successor's successor lists.
1704
1705        def add_to_successors(i0):
1706            for i in succs[i0]:
1707                succs[i].append(i0)
1708
1709        # Generate the first move.
1710        def first():
1711            if m < 1 or n < 1:
1712                return
1713
1714            # Since we're looking for a cycle, it doesn't matter where we
1715            # start.  Starting in a corner makes the 2nd move easy.
1716            corner = self.coords2index(0, 0)
1717            remove_from_successors(corner)
1718            self.lastij = corner
1719            yield corner
1720            add_to_successors(corner)
1721
1722        # Generate the second moves.
1723        def second():
1724            corner = self.coords2index(0, 0)
1725            assert self.lastij == corner  # i.e., we started in the corner
1726            if m < 3 or n < 3:
1727                return
1728            assert len(succs[corner]) == 2
1729            assert self.coords2index(1, 2) in succs[corner]
1730            assert self.coords2index(2, 1) in succs[corner]
1731            # Only two choices.  Whichever we pick, the other must be the
1732            # square picked on move m*n, as it's the only way to get back
1733            # to (0, 0).  Save its index in self.final so that moves before
1734            # the last know it must be kept free.
1735            for i, j in (1, 2), (2, 1):
1736                this  = self.coords2index(i, j)
1737                final = self.coords2index(3-i, 3-j)
1738                self.final = final
1739
1740                remove_from_successors(this)
1741                succs[final].append(corner)
1742                self.lastij = this
1743                yield this
1744                succs[final].remove(corner)
1745                add_to_successors(this)
1746
1747        # Generate moves 3 through m*n-1.
1748        def advance(len=len):
1749            # If some successor has only one exit, must take it.
1750            # Else favor successors with fewer exits.
1751            candidates = []
1752            for i in succs[self.lastij]:
1753                e = len(succs[i])
1754                assert e > 0, "else remove_from_successors() pruning flawed"
1755                if e == 1:
1756                    candidates = [(e, i)]
1757                    break
1758                candidates.append((e, i))
1759            else:
1760                candidates.sort()
1761
1762            for e, i in candidates:
1763                if i != self.final:
1764                    if remove_from_successors(i):
1765                        self.lastij = i
1766                        yield i
1767                    add_to_successors(i)
1768
1769        # Generate moves 3 through m*n-1.  Alternative version using a
1770        # stronger (but more expensive) heuristic to order successors.
1771        # Since the # of backtracking levels is m*n, a poor move early on
1772        # can take eons to undo.  Smallest square board for which this
1773        # matters a lot is 52x52.
1774        def advance_hard(vmid=(m-1)/2.0, hmid=(n-1)/2.0, len=len):
1775            # If some successor has only one exit, must take it.
1776            # Else favor successors with fewer exits.
1777            # Break ties via max distance from board centerpoint (favor
1778            # corners and edges whenever possible).
1779            candidates = []
1780            for i in succs[self.lastij]:
1781                e = len(succs[i])
1782                assert e > 0, "else remove_from_successors() pruning flawed"
1783                if e == 1:
1784                    candidates = [(e, 0, i)]
1785                    break
1786                i1, j1 = self.index2coords(i)
1787                d = (i1 - vmid)**2 + (j1 - hmid)**2
1788                candidates.append((e, -d, i))
1789            else:
1790                candidates.sort()
1791
1792            for e, d, i in candidates:
1793                if i != self.final:
1794                    if remove_from_successors(i):
1795                        self.lastij = i
1796                        yield i
1797                    add_to_successors(i)
1798
1799        # Generate the last move.
1800        def last():
1801            assert self.final in succs[self.lastij]
1802            yield self.final
1803
1804        if m*n < 4:
1805            self.squaregenerators = [first]
1806        else:
1807            self.squaregenerators = [first, second] + \
1808                [hard and advance_hard or advance] * (m*n - 3) + \
1809                [last]
1810
1811    def coords2index(self, i, j):
1812        assert 0 <= i < self.m
1813        assert 0 <= j < self.n
1814        return i * self.n + j
1815
1816    def index2coords(self, index):
1817        assert 0 <= index < self.m * self.n
1818        return divmod(index, self.n)
1819
1820    def _init_board(self):
1821        succs = self.succs
1822        del succs[:]
1823        m, n = self.m, self.n
1824        c2i = self.coords2index
1825
1826        offsets = [( 1,  2), ( 2,  1), ( 2, -1), ( 1, -2),
1827                   (-1, -2), (-2, -1), (-2,  1), (-1,  2)]
1828        rangen = range(n)
1829        for i in range(m):
1830            for j in rangen:
1831                s = [c2i(i+io, j+jo) for io, jo in offsets
1832                                     if 0 <= i+io < m and
1833                                        0 <= j+jo < n]
1834                succs.append(s)
1835
1836    # Generate solutions.
1837    def solve(self):
1838        self._init_board()
1839        for x in conjoin(self.squaregenerators):
1840            yield x
1841
1842    def printsolution(self, x):
1843        m, n = self.m, self.n
1844        assert len(x) == m*n
1845        w = len(str(m*n))
1846        format = "%" + str(w) + "d"
1847
1848        squares = [[None] * n for i in range(m)]
1849        k = 1
1850        for i in x:
1851            i1, j1 = self.index2coords(i)
1852            squares[i1][j1] = format % k
1853            k += 1
1854
1855        sep = "+" + ("-" * w + "+") * n
1856        print(sep)
1857        for i in range(m):
1858            row = squares[i]
1859            print("|" + "|".join(row) + "|")
1860            print(sep)
1861
1862conjoin_tests = """
1863
1864Generate the 3-bit binary numbers in order.  This illustrates dumbest-
1865possible use of conjoin, just to generate the full cross-product.
1866
1867>>> for c in conjoin([lambda: iter((0, 1))] * 3):
1868...     print(c)
1869[0, 0, 0]
1870[0, 0, 1]
1871[0, 1, 0]
1872[0, 1, 1]
1873[1, 0, 0]
1874[1, 0, 1]
1875[1, 1, 0]
1876[1, 1, 1]
1877
1878For efficiency in typical backtracking apps, conjoin() yields the same list
1879object each time.  So if you want to save away a full account of its
1880generated sequence, you need to copy its results.
1881
1882>>> def gencopy(iterator):
1883...     for x in iterator:
1884...         yield x[:]
1885
1886>>> for n in range(10):
1887...     all = list(gencopy(conjoin([lambda: iter((0, 1))] * n)))
1888...     print(n, len(all), all[0] == [0] * n, all[-1] == [1] * n)
18890 1 True True
18901 2 True True
18912 4 True True
18923 8 True True
18934 16 True True
18945 32 True True
18956 64 True True
18967 128 True True
18978 256 True True
18989 512 True True
1899
1900And run an 8-queens solver.
1901
1902>>> q = Queens(8)
1903>>> LIMIT = 2
1904>>> count = 0
1905>>> for row2col in q.solve():
1906...     count += 1
1907...     if count <= LIMIT:
1908...         print("Solution", count)
1909...         q.printsolution(row2col)
1910Solution 1
1911+-+-+-+-+-+-+-+-+
1912|Q| | | | | | | |
1913+-+-+-+-+-+-+-+-+
1914| | | | |Q| | | |
1915+-+-+-+-+-+-+-+-+
1916| | | | | | | |Q|
1917+-+-+-+-+-+-+-+-+
1918| | | | | |Q| | |
1919+-+-+-+-+-+-+-+-+
1920| | |Q| | | | | |
1921+-+-+-+-+-+-+-+-+
1922| | | | | | |Q| |
1923+-+-+-+-+-+-+-+-+
1924| |Q| | | | | | |
1925+-+-+-+-+-+-+-+-+
1926| | | |Q| | | | |
1927+-+-+-+-+-+-+-+-+
1928Solution 2
1929+-+-+-+-+-+-+-+-+
1930|Q| | | | | | | |
1931+-+-+-+-+-+-+-+-+
1932| | | | | |Q| | |
1933+-+-+-+-+-+-+-+-+
1934| | | | | | | |Q|
1935+-+-+-+-+-+-+-+-+
1936| | |Q| | | | | |
1937+-+-+-+-+-+-+-+-+
1938| | | | | | |Q| |
1939+-+-+-+-+-+-+-+-+
1940| | | |Q| | | | |
1941+-+-+-+-+-+-+-+-+
1942| |Q| | | | | | |
1943+-+-+-+-+-+-+-+-+
1944| | | | |Q| | | |
1945+-+-+-+-+-+-+-+-+
1946
1947>>> print(count, "solutions in all.")
194892 solutions in all.
1949
1950And run a Knight's Tour on a 10x10 board.  Note that there are about
195120,000 solutions even on a 6x6 board, so don't dare run this to exhaustion.
1952
1953>>> k = Knights(10, 10)
1954>>> LIMIT = 2
1955>>> count = 0
1956>>> for x in k.solve():
1957...     count += 1
1958...     if count <= LIMIT:
1959...         print("Solution", count)
1960...         k.printsolution(x)
1961...     else:
1962...         break
1963Solution 1
1964+---+---+---+---+---+---+---+---+---+---+
1965|  1| 58| 27| 34|  3| 40| 29| 10|  5|  8|
1966+---+---+---+---+---+---+---+---+---+---+
1967| 26| 35|  2| 57| 28| 33|  4|  7| 30| 11|
1968+---+---+---+---+---+---+---+---+---+---+
1969| 59|100| 73| 36| 41| 56| 39| 32|  9|  6|
1970+---+---+---+---+---+---+---+---+---+---+
1971| 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
1972+---+---+---+---+---+---+---+---+---+---+
1973| 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
1974+---+---+---+---+---+---+---+---+---+---+
1975| 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
1976+---+---+---+---+---+---+---+---+---+---+
1977| 87| 98| 91| 80| 77| 84| 53| 46| 65| 44|
1978+---+---+---+---+---+---+---+---+---+---+
1979| 90| 23| 88| 95| 70| 79| 68| 83| 14| 17|
1980+---+---+---+---+---+---+---+---+---+---+
1981| 97| 92| 21| 78| 81| 94| 19| 16| 45| 66|
1982+---+---+---+---+---+---+---+---+---+---+
1983| 22| 89| 96| 93| 20| 69| 82| 67| 18| 15|
1984+---+---+---+---+---+---+---+---+---+---+
1985Solution 2
1986+---+---+---+---+---+---+---+---+---+---+
1987|  1| 58| 27| 34|  3| 40| 29| 10|  5|  8|
1988+---+---+---+---+---+---+---+---+---+---+
1989| 26| 35|  2| 57| 28| 33|  4|  7| 30| 11|
1990+---+---+---+---+---+---+---+---+---+---+
1991| 59|100| 73| 36| 41| 56| 39| 32|  9|  6|
1992+---+---+---+---+---+---+---+---+---+---+
1993| 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
1994+---+---+---+---+---+---+---+---+---+---+
1995| 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
1996+---+---+---+---+---+---+---+---+---+---+
1997| 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
1998+---+---+---+---+---+---+---+---+---+---+
1999| 87| 98| 89| 80| 77| 84| 53| 46| 65| 44|
2000+---+---+---+---+---+---+---+---+---+---+
2001| 90| 23| 92| 95| 70| 79| 68| 83| 14| 17|
2002+---+---+---+---+---+---+---+---+---+---+
2003| 97| 88| 21| 78| 81| 94| 19| 16| 45| 66|
2004+---+---+---+---+---+---+---+---+---+---+
2005| 22| 91| 96| 93| 20| 69| 82| 67| 18| 15|
2006+---+---+---+---+---+---+---+---+---+---+
2007"""
2008
2009weakref_tests = """\
2010Generators are weakly referencable:
2011
2012>>> import weakref
2013>>> def gen():
2014...     yield 'foo!'
2015...
2016>>> wr = weakref.ref(gen)
2017>>> wr() is gen
2018True
2019>>> p = weakref.proxy(gen)
2020
2021Generator-iterators are weakly referencable as well:
2022
2023>>> gi = gen()
2024>>> wr = weakref.ref(gi)
2025>>> wr() is gi
2026True
2027>>> p = weakref.proxy(gi)
2028>>> list(p)
2029['foo!']
2030
2031"""
2032
2033coroutine_tests = """\
2034>>> from test.support import gc_collect
2035
2036Sending a value into a started generator:
2037
2038>>> def f():
2039...     print((yield 1))
2040...     yield 2
2041>>> g = f()
2042>>> next(g)
20431
2044>>> g.send(42)
204542
20462
2047
2048Sending a value into a new generator produces a TypeError:
2049
2050>>> f().send("foo")
2051Traceback (most recent call last):
2052...
2053TypeError: can't send non-None value to a just-started generator
2054
2055
2056Yield by itself yields None:
2057
2058>>> def f(): yield
2059>>> list(f())
2060[None]
2061
2062
2063Yield is allowed only in the outermost iterable in generator expression:
2064
2065>>> def f(): list(i for i in [(yield 26)])
2066>>> type(f())
2067<class 'generator'>
2068
2069
2070A yield expression with augmented assignment.
2071
2072>>> def coroutine(seq):
2073...     count = 0
2074...     while count < 200:
2075...         count += yield
2076...         seq.append(count)
2077>>> seq = []
2078>>> c = coroutine(seq)
2079>>> next(c)
2080>>> print(seq)
2081[]
2082>>> c.send(10)
2083>>> print(seq)
2084[10]
2085>>> c.send(10)
2086>>> print(seq)
2087[10, 20]
2088>>> c.send(10)
2089>>> print(seq)
2090[10, 20, 30]
2091
2092
2093Check some syntax errors for yield expressions:
2094
2095>>> f=lambda: (yield 1),(yield 2)
2096Traceback (most recent call last):
2097  ...
2098SyntaxError: 'yield' outside function
2099
2100>>> def f(): x = yield = y
2101Traceback (most recent call last):
2102  ...
2103SyntaxError: assignment to yield expression not possible
2104
2105>>> def f(): (yield bar) = y
2106Traceback (most recent call last):
2107  ...
2108SyntaxError: cannot assign to yield expression here. Maybe you meant '==' instead of '='?
2109
2110>>> def f(): (yield bar) += y
2111Traceback (most recent call last):
2112  ...
2113SyntaxError: 'yield expression' is an illegal expression for augmented assignment
2114
2115
2116Now check some throw() conditions:
2117
2118>>> def f():
2119...     while True:
2120...         try:
2121...             print((yield))
2122...         except ValueError as v:
2123...             print("caught ValueError (%s)" % (v))
2124>>> import sys
2125>>> g = f()
2126>>> next(g)
2127
2128>>> g.throw(ValueError) # type only
2129caught ValueError ()
2130
2131>>> g.throw(ValueError("xyz"))  # value only
2132caught ValueError (xyz)
2133
2134>>> g.throw(ValueError, ValueError(1))   # value+matching type
2135caught ValueError (1)
2136
2137>>> g.throw(ValueError, TypeError(1))  # mismatched type, rewrapped
2138caught ValueError (1)
2139
2140>>> g.throw(ValueError, ValueError(1), None)   # explicit None traceback
2141caught ValueError (1)
2142
2143>>> g.throw(ValueError(1), "foo")       # bad args
2144Traceback (most recent call last):
2145  ...
2146TypeError: instance exception may not have a separate value
2147
2148>>> g.throw(ValueError, "foo", 23)      # bad args
2149Traceback (most recent call last):
2150  ...
2151TypeError: throw() third argument must be a traceback object
2152
2153>>> g.throw("abc")
2154Traceback (most recent call last):
2155  ...
2156TypeError: exceptions must be classes or instances deriving from BaseException, not str
2157
2158>>> g.throw(0)
2159Traceback (most recent call last):
2160  ...
2161TypeError: exceptions must be classes or instances deriving from BaseException, not int
2162
2163>>> g.throw(list)
2164Traceback (most recent call last):
2165  ...
2166TypeError: exceptions must be classes or instances deriving from BaseException, not type
2167
2168>>> def throw(g,exc):
2169...     try:
2170...         raise exc
2171...     except:
2172...         g.throw(*sys.exc_info())
2173>>> throw(g,ValueError) # do it with traceback included
2174caught ValueError ()
2175
2176>>> g.send(1)
21771
2178
2179>>> throw(g,TypeError)  # terminate the generator
2180Traceback (most recent call last):
2181  ...
2182TypeError
2183
2184>>> print(g.gi_frame)
2185None
2186
2187>>> g.send(2)
2188Traceback (most recent call last):
2189  ...
2190StopIteration
2191
2192>>> g.throw(ValueError,6)       # throw on closed generator
2193Traceback (most recent call last):
2194  ...
2195ValueError: 6
2196
2197>>> f().throw(ValueError,7)     # throw on just-opened generator
2198Traceback (most recent call last):
2199  ...
2200ValueError: 7
2201
2202Plain "raise" inside a generator should preserve the traceback (#13188).
2203The traceback should have 3 levels:
2204- g.throw()
2205- f()
2206- 1/0
2207
2208>>> def f():
2209...     try:
2210...         yield
2211...     except:
2212...         raise
2213>>> g = f()
2214>>> try:
2215...     1/0
2216... except ZeroDivisionError as v:
2217...     try:
2218...         g.throw(v)
2219...     except Exception as w:
2220...         tb = w.__traceback__
2221>>> levels = 0
2222>>> while tb:
2223...     levels += 1
2224...     tb = tb.tb_next
2225>>> levels
22263
2227
2228Now let's try closing a generator:
2229
2230>>> def f():
2231...     try: yield
2232...     except GeneratorExit:
2233...         print("exiting")
2234
2235>>> g = f()
2236>>> next(g)
2237>>> g.close()
2238exiting
2239>>> g.close()  # should be no-op now
2240
2241>>> f().close()  # close on just-opened generator should be fine
2242
2243>>> def f(): yield      # an even simpler generator
2244>>> f().close()         # close before opening
2245>>> g = f()
2246>>> next(g)
2247>>> g.close()           # close normally
2248
2249And finalization:
2250
2251>>> def f():
2252...     try: yield
2253...     finally:
2254...         print("exiting")
2255
2256>>> g = f()
2257>>> next(g)
2258>>> del g; gc_collect()  # For PyPy or other GCs.
2259exiting
2260
2261
2262GeneratorExit is not caught by except Exception:
2263
2264>>> def f():
2265...     try: yield
2266...     except Exception:
2267...         print('except')
2268...     finally:
2269...         print('finally')
2270
2271>>> g = f()
2272>>> next(g)
2273>>> del g; gc_collect()  # For PyPy or other GCs.
2274finally
2275
2276
2277Now let's try some ill-behaved generators:
2278
2279>>> def f():
2280...     try: yield
2281...     except GeneratorExit:
2282...         yield "foo!"
2283>>> g = f()
2284>>> next(g)
2285>>> g.close()
2286Traceback (most recent call last):
2287  ...
2288RuntimeError: generator ignored GeneratorExit
2289>>> g.close()
2290
2291
2292Our ill-behaved code should be invoked during GC:
2293
2294>>> with support.catch_unraisable_exception() as cm:
2295...     g = f()
2296...     next(g)
2297...     del g
2298...
2299...     cm.unraisable.exc_type == RuntimeError
2300...     "generator ignored GeneratorExit" in str(cm.unraisable.exc_value)
2301...     cm.unraisable.exc_traceback is not None
2302True
2303True
2304True
2305
2306And errors thrown during closing should propagate:
2307
2308>>> def f():
2309...     try: yield
2310...     except GeneratorExit:
2311...         raise TypeError("fie!")
2312>>> g = f()
2313>>> next(g)
2314>>> g.close()
2315Traceback (most recent call last):
2316  ...
2317TypeError: fie!
2318
2319
2320Ensure that various yield expression constructs make their
2321enclosing function a generator:
2322
2323>>> def f(): x += yield
2324>>> type(f())
2325<class 'generator'>
2326
2327>>> def f(): x = yield
2328>>> type(f())
2329<class 'generator'>
2330
2331>>> def f(): lambda x=(yield): 1
2332>>> type(f())
2333<class 'generator'>
2334
2335>>> def f(d): d[(yield "a")] = d[(yield "b")] = 27
2336>>> data = [1,2]
2337>>> g = f(data)
2338>>> type(g)
2339<class 'generator'>
2340>>> g.send(None)
2341'a'
2342>>> data
2343[1, 2]
2344>>> g.send(0)
2345'b'
2346>>> data
2347[27, 2]
2348>>> try: g.send(1)
2349... except StopIteration: pass
2350>>> data
2351[27, 27]
2352
2353"""
2354
2355refleaks_tests = """
2356Prior to adding cycle-GC support to itertools.tee, this code would leak
2357references. We add it to the standard suite so the routine refleak-tests
2358would trigger if it starts being uncleanable again.
2359
2360>>> import itertools
2361>>> def leak():
2362...     class gen:
2363...         def __iter__(self):
2364...             return self
2365...         def __next__(self):
2366...             return self.item
2367...     g = gen()
2368...     head, tail = itertools.tee(g)
2369...     g.item = head
2370...     return head
2371>>> it = leak()
2372
2373Make sure to also test the involvement of the tee-internal teedataobject,
2374which stores returned items.
2375
2376>>> item = next(it)
2377
2378
2379
2380This test leaked at one point due to generator finalization/destruction.
2381It was copied from Lib/test/leakers/test_generator_cycle.py before the file
2382was removed.
2383
2384>>> def leak():
2385...    def gen():
2386...        while True:
2387...            yield g
2388...    g = gen()
2389
2390>>> leak()
2391
2392
2393
2394This test isn't really generator related, but rather exception-in-cleanup
2395related. The coroutine tests (above) just happen to cause an exception in
2396the generator's __del__ (tp_del) method. We can also test for this
2397explicitly, without generators. We do have to redirect stderr to avoid
2398printing warnings and to doublecheck that we actually tested what we wanted
2399to test.
2400
2401>>> from test import support
2402>>> class Leaker:
2403...     def __del__(self):
2404...         def invoke(message):
2405...             raise RuntimeError(message)
2406...         invoke("del failed")
2407...
2408>>> with support.catch_unraisable_exception() as cm:
2409...     l = Leaker()
2410...     del l
2411...
2412...     cm.unraisable.object == Leaker.__del__
2413...     cm.unraisable.exc_type == RuntimeError
2414...     str(cm.unraisable.exc_value) == "del failed"
2415...     cm.unraisable.exc_traceback is not None
2416True
2417True
2418True
2419True
2420
2421
2422These refleak tests should perhaps be in a testfile of their own,
2423test_generators just happened to be the test that drew these out.
2424
2425"""
2426
2427__test__ = {"tut":      tutorial_tests,
2428            "pep":      pep_tests,
2429            "email":    email_tests,
2430            "fun":      fun_tests,
2431            "syntax":   syntax_tests,
2432            "conjoin":  conjoin_tests,
2433            "weakref":  weakref_tests,
2434            "coroutine":  coroutine_tests,
2435            "refleaks": refleaks_tests,
2436            }
2437
2438def load_tests(loader, tests, pattern):
2439    tests.addTest(doctest.DocTestSuite())
2440    return tests
2441
2442
2443if __name__ == "__main__":
2444    unittest.main()
2445