ยปCore Development>Code coverage>Lib/test/test_generators.py

Python code coverage for Lib/test/test_generators.py

#countcontent
1n/aimport copy
2n/aimport gc
3n/aimport pickle
4n/aimport sys
5n/aimport unittest
6n/aimport warnings
7n/aimport weakref
8n/aimport inspect
9n/aimport types
10n/a
11n/afrom test import support
12n/a
13n/a
14n/aclass FinalizationTest(unittest.TestCase):
15n/a
16n/a def test_frame_resurrect(self):
17n/a # A generator frame can be resurrected by a generator's finalization.
18n/a def gen():
19n/a nonlocal frame
20n/a try:
21n/a yield
22n/a finally:
23n/a frame = sys._getframe()
24n/a
25n/a g = gen()
26n/a wr = weakref.ref(g)
27n/a next(g)
28n/a del g
29n/a support.gc_collect()
30n/a self.assertIs(wr(), None)
31n/a self.assertTrue(frame)
32n/a del frame
33n/a support.gc_collect()
34n/a
35n/a def test_refcycle(self):
36n/a # A generator caught in a refcycle gets finalized anyway.
37n/a old_garbage = gc.garbage[:]
38n/a finalized = False
39n/a def gen():
40n/a nonlocal finalized
41n/a try:
42n/a g = yield
43n/a yield 1
44n/a finally:
45n/a finalized = True
46n/a
47n/a g = gen()
48n/a next(g)
49n/a g.send(g)
50n/a self.assertGreater(sys.getrefcount(g), 2)
51n/a self.assertFalse(finalized)
52n/a del g
53n/a support.gc_collect()
54n/a self.assertTrue(finalized)
55n/a self.assertEqual(gc.garbage, old_garbage)
56n/a
57n/a def test_lambda_generator(self):
58n/a # Issue #23192: Test that a lambda returning a generator behaves
59n/a # like the equivalent function
60n/a f = lambda: (yield 1)
61n/a def g(): return (yield 1)
62n/a
63n/a # test 'yield from'
64n/a f2 = lambda: (yield from g())
65n/a def g2(): return (yield from g())
66n/a
67n/a f3 = lambda: (yield from f())
68n/a def g3(): return (yield from f())
69n/a
70n/a for gen_fun in (f, g, f2, g2, f3, g3):
71n/a gen = gen_fun()
72n/a self.assertEqual(next(gen), 1)
73n/a with self.assertRaises(StopIteration) as cm:
74n/a gen.send(2)
75n/a self.assertEqual(cm.exception.value, 2)
76n/a
77n/a
78n/aclass GeneratorTest(unittest.TestCase):
79n/a
80n/a def test_name(self):
81n/a def func():
82n/a yield 1
83n/a
84n/a # check generator names
85n/a gen = func()
86n/a self.assertEqual(gen.__name__, "func")
87n/a self.assertEqual(gen.__qualname__,
88n/a "GeneratorTest.test_name.<locals>.func")
89n/a
90n/a # modify generator names
91n/a gen.__name__ = "name"
92n/a gen.__qualname__ = "qualname"
93n/a self.assertEqual(gen.__name__, "name")
94n/a self.assertEqual(gen.__qualname__, "qualname")
95n/a
96n/a # generator names must be a string and cannot be deleted
97n/a self.assertRaises(TypeError, setattr, gen, '__name__', 123)
98n/a self.assertRaises(TypeError, setattr, gen, '__qualname__', 123)
99n/a self.assertRaises(TypeError, delattr, gen, '__name__')
100n/a self.assertRaises(TypeError, delattr, gen, '__qualname__')
101n/a
102n/a # modify names of the function creating the generator
103n/a func.__qualname__ = "func_qualname"
104n/a func.__name__ = "func_name"
105n/a gen = func()
106n/a self.assertEqual(gen.__name__, "func_name")
107n/a self.assertEqual(gen.__qualname__, "func_qualname")
108n/a
109n/a # unnamed generator
110n/a gen = (x for x in range(10))
111n/a self.assertEqual(gen.__name__,
112n/a "<genexpr>")
113n/a self.assertEqual(gen.__qualname__,
114n/a "GeneratorTest.test_name.<locals>.<genexpr>")
115n/a
116n/a def test_copy(self):
117n/a def f():
118n/a yield 1
119n/a g = f()
120n/a with self.assertRaises(TypeError):
121n/a copy.copy(g)
122n/a
123n/a def test_pickle(self):
124n/a def f():
125n/a yield 1
126n/a g = f()
127n/a for proto in range(pickle.HIGHEST_PROTOCOL + 1):
128n/a with self.assertRaises((TypeError, pickle.PicklingError)):
129n/a pickle.dumps(g, proto)
130n/a
131n/a
132n/aclass ExceptionTest(unittest.TestCase):
133n/a # Tests for the issue #23353: check that the currently handled exception
134n/a # is correctly saved/restored in PyEval_EvalFrameEx().
135n/a
136n/a def test_except_throw(self):
137n/a def store_raise_exc_generator():
138n/a try:
139n/a self.assertEqual(sys.exc_info()[0], None)
140n/a yield
141n/a except Exception as exc:
142n/a # exception raised by gen.throw(exc)
143n/a self.assertEqual(sys.exc_info()[0], ValueError)
144n/a self.assertIsNone(exc.__context__)
145n/a yield
146n/a
147n/a # ensure that the exception is not lost
148n/a self.assertEqual(sys.exc_info()[0], ValueError)
149n/a yield
150n/a
151n/a # we should be able to raise back the ValueError
152n/a raise
153n/a
154n/a make = store_raise_exc_generator()
155n/a next(make)
156n/a
157n/a try:
158n/a raise ValueError()
159n/a except Exception as exc:
160n/a try:
161n/a make.throw(exc)
162n/a except Exception:
163n/a pass
164n/a
165n/a next(make)
166n/a with self.assertRaises(ValueError) as cm:
167n/a next(make)
168n/a self.assertIsNone(cm.exception.__context__)
169n/a
170n/a self.assertEqual(sys.exc_info(), (None, None, None))
171n/a
172n/a def test_except_next(self):
173n/a def gen():
174n/a self.assertEqual(sys.exc_info()[0], ValueError)
175n/a yield "done"
176n/a
177n/a g = gen()
178n/a try:
179n/a raise ValueError
180n/a except Exception:
181n/a self.assertEqual(next(g), "done")
182n/a self.assertEqual(sys.exc_info(), (None, None, None))
183n/a
184n/a def test_except_gen_except(self):
185n/a def gen():
186n/a try:
187n/a self.assertEqual(sys.exc_info()[0], None)
188n/a yield
189n/a # we are called from "except ValueError:", TypeError must
190n/a # inherit ValueError in its context
191n/a raise TypeError()
192n/a except TypeError as exc:
193n/a self.assertEqual(sys.exc_info()[0], TypeError)
194n/a self.assertEqual(type(exc.__context__), ValueError)
195n/a # here we are still called from the "except ValueError:"
196n/a self.assertEqual(sys.exc_info()[0], ValueError)
197n/a yield
198n/a self.assertIsNone(sys.exc_info()[0])
199n/a yield "done"
200n/a
201n/a g = gen()
202n/a next(g)
203n/a try:
204n/a raise ValueError
205n/a except Exception:
206n/a next(g)
207n/a
208n/a self.assertEqual(next(g), "done")
209n/a self.assertEqual(sys.exc_info(), (None, None, None))
210n/a
211n/a def test_except_throw_exception_context(self):
212n/a def gen():
213n/a try:
214n/a try:
215n/a self.assertEqual(sys.exc_info()[0], None)
216n/a yield
217n/a except ValueError:
218n/a # we are called from "except ValueError:"
219n/a self.assertEqual(sys.exc_info()[0], ValueError)
220n/a raise TypeError()
221n/a except Exception as exc:
222n/a self.assertEqual(sys.exc_info()[0], TypeError)
223n/a self.assertEqual(type(exc.__context__), ValueError)
224n/a # we are still called from "except ValueError:"
225n/a self.assertEqual(sys.exc_info()[0], ValueError)
226n/a yield
227n/a self.assertIsNone(sys.exc_info()[0])
228n/a yield "done"
229n/a
230n/a g = gen()
231n/a next(g)
232n/a try:
233n/a raise ValueError
234n/a except Exception as exc:
235n/a g.throw(exc)
236n/a
237n/a self.assertEqual(next(g), "done")
238n/a self.assertEqual(sys.exc_info(), (None, None, None))
239n/a
240n/a def test_stopiteration_warning(self):
241n/a # See also PEP 479.
242n/a
243n/a def gen():
244n/a raise StopIteration
245n/a yield
246n/a
247n/a with self.assertRaises(StopIteration), \
248n/a self.assertWarnsRegex(DeprecationWarning, "StopIteration"):
249n/a
250n/a next(gen())
251n/a
252n/a with self.assertRaisesRegex(DeprecationWarning,
253n/a "generator .* raised StopIteration"), \
254n/a warnings.catch_warnings():
255n/a
256n/a warnings.simplefilter('error')
257n/a next(gen())
258n/a
259n/a
260n/a def test_tutorial_stopiteration(self):
261n/a # Raise StopIteration" stops the generator too:
262n/a
263n/a def f():
264n/a yield 1
265n/a raise StopIteration
266n/a yield 2 # never reached
267n/a
268n/a g = f()
269n/a self.assertEqual(next(g), 1)
270n/a
271n/a with self.assertWarnsRegex(DeprecationWarning, "StopIteration"):
272n/a with self.assertRaises(StopIteration):
273n/a next(g)
274n/a
275n/a with self.assertRaises(StopIteration):
276n/a # This time StopIteration isn't raised from the generator's body,
277n/a # hence no warning.
278n/a next(g)
279n/a
280n/a def test_return_tuple(self):
281n/a def g():
282n/a return (yield 1)
283n/a
284n/a gen = g()
285n/a self.assertEqual(next(gen), 1)
286n/a with self.assertRaises(StopIteration) as cm:
287n/a gen.send((2,))
288n/a self.assertEqual(cm.exception.value, (2,))
289n/a
290n/a def test_return_stopiteration(self):
291n/a def g():
292n/a return (yield 1)
293n/a
294n/a gen = g()
295n/a self.assertEqual(next(gen), 1)
296n/a with self.assertRaises(StopIteration) as cm:
297n/a gen.send(StopIteration(2))
298n/a self.assertIsInstance(cm.exception.value, StopIteration)
299n/a self.assertEqual(cm.exception.value.value, 2)
300n/a
301n/a
302n/aclass YieldFromTests(unittest.TestCase):
303n/a def test_generator_gi_yieldfrom(self):
304n/a def a():
305n/a self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_RUNNING)
306n/a self.assertIsNone(gen_b.gi_yieldfrom)
307n/a yield
308n/a self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_RUNNING)
309n/a self.assertIsNone(gen_b.gi_yieldfrom)
310n/a
311n/a def b():
312n/a self.assertIsNone(gen_b.gi_yieldfrom)
313n/a yield from a()
314n/a self.assertIsNone(gen_b.gi_yieldfrom)
315n/a yield
316n/a self.assertIsNone(gen_b.gi_yieldfrom)
317n/a
318n/a gen_b = b()
319n/a self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_CREATED)
320n/a self.assertIsNone(gen_b.gi_yieldfrom)
321n/a
322n/a gen_b.send(None)
323n/a self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_SUSPENDED)
324n/a self.assertEqual(gen_b.gi_yieldfrom.gi_code.co_name, 'a')
325n/a
326n/a gen_b.send(None)
327n/a self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_SUSPENDED)
328n/a self.assertIsNone(gen_b.gi_yieldfrom)
329n/a
330n/a [] = gen_b # Exhaust generator
331n/a self.assertEqual(inspect.getgeneratorstate(gen_b), inspect.GEN_CLOSED)
332n/a self.assertIsNone(gen_b.gi_yieldfrom)
333n/a
334n/a
335n/atutorial_tests = """
336n/aLet's try a simple generator:
337n/a
338n/a >>> def f():
339n/a ... yield 1
340n/a ... yield 2
341n/a
342n/a >>> for i in f():
343n/a ... print(i)
344n/a 1
345n/a 2
346n/a >>> g = f()
347n/a >>> next(g)
348n/a 1
349n/a >>> next(g)
350n/a 2
351n/a
352n/a"Falling off the end" stops the generator:
353n/a
354n/a >>> next(g)
355n/a Traceback (most recent call last):
356n/a File "<stdin>", line 1, in ?
357n/a File "<stdin>", line 2, in g
358n/a StopIteration
359n/a
360n/a"return" also stops the generator:
361n/a
362n/a >>> def f():
363n/a ... yield 1
364n/a ... return
365n/a ... yield 2 # never reached
366n/a ...
367n/a >>> g = f()
368n/a >>> next(g)
369n/a 1
370n/a >>> next(g)
371n/a Traceback (most recent call last):
372n/a File "<stdin>", line 1, in ?
373n/a File "<stdin>", line 3, in f
374n/a StopIteration
375n/a >>> next(g) # once stopped, can't be resumed
376n/a Traceback (most recent call last):
377n/a File "<stdin>", line 1, in ?
378n/a StopIteration
379n/a
380n/aHowever, "return" and StopIteration are not exactly equivalent:
381n/a
382n/a >>> def g1():
383n/a ... try:
384n/a ... return
385n/a ... except:
386n/a ... yield 1
387n/a ...
388n/a >>> list(g1())
389n/a []
390n/a
391n/a >>> def g2():
392n/a ... try:
393n/a ... raise StopIteration
394n/a ... except:
395n/a ... yield 42
396n/a >>> print(list(g2()))
397n/a [42]
398n/a
399n/aThis may be surprising at first:
400n/a
401n/a >>> def g3():
402n/a ... try:
403n/a ... return
404n/a ... finally:
405n/a ... yield 1
406n/a ...
407n/a >>> list(g3())
408n/a [1]
409n/a
410n/aLet's create an alternate range() function implemented as a generator:
411n/a
412n/a >>> def yrange(n):
413n/a ... for i in range(n):
414n/a ... yield i
415n/a ...
416n/a >>> list(yrange(5))
417n/a [0, 1, 2, 3, 4]
418n/a
419n/aGenerators always return to the most recent caller:
420n/a
421n/a >>> def creator():
422n/a ... r = yrange(5)
423n/a ... print("creator", next(r))
424n/a ... return r
425n/a ...
426n/a >>> def caller():
427n/a ... r = creator()
428n/a ... for i in r:
429n/a ... print("caller", i)
430n/a ...
431n/a >>> caller()
432n/a creator 0
433n/a caller 1
434n/a caller 2
435n/a caller 3
436n/a caller 4
437n/a
438n/aGenerators can call other generators:
439n/a
440n/a >>> def zrange(n):
441n/a ... for i in yrange(n):
442n/a ... yield i
443n/a ...
444n/a >>> list(zrange(5))
445n/a [0, 1, 2, 3, 4]
446n/a
447n/a"""
448n/a
449n/a# The examples from PEP 255.
450n/a
451n/apep_tests = """
452n/a
453n/aSpecification: Yield
454n/a
455n/a Restriction: A generator cannot be resumed while it is actively
456n/a running:
457n/a
458n/a >>> def g():
459n/a ... i = next(me)
460n/a ... yield i
461n/a >>> me = g()
462n/a >>> next(me)
463n/a Traceback (most recent call last):
464n/a ...
465n/a File "<string>", line 2, in g
466n/a ValueError: generator already executing
467n/a
468n/aSpecification: Return
469n/a
470n/a Note that return isn't always equivalent to raising StopIteration: the
471n/a difference lies in how enclosing try/except constructs are treated.
472n/a For example,
473n/a
474n/a >>> def f1():
475n/a ... try:
476n/a ... return
477n/a ... except:
478n/a ... yield 1
479n/a >>> print(list(f1()))
480n/a []
481n/a
482n/a because, as in any function, return simply exits, but
483n/a
484n/a >>> def f2():
485n/a ... try:
486n/a ... raise StopIteration
487n/a ... except:
488n/a ... yield 42
489n/a >>> print(list(f2()))
490n/a [42]
491n/a
492n/a because StopIteration is captured by a bare "except", as is any
493n/a exception.
494n/a
495n/aSpecification: Generators and Exception Propagation
496n/a
497n/a >>> def f():
498n/a ... return 1//0
499n/a >>> def g():
500n/a ... yield f() # the zero division exception propagates
501n/a ... yield 42 # and we'll never get here
502n/a >>> k = g()
503n/a >>> next(k)
504n/a Traceback (most recent call last):
505n/a File "<stdin>", line 1, in ?
506n/a File "<stdin>", line 2, in g
507n/a File "<stdin>", line 2, in f
508n/a ZeroDivisionError: integer division or modulo by zero
509n/a >>> next(k) # and the generator cannot be resumed
510n/a Traceback (most recent call last):
511n/a File "<stdin>", line 1, in ?
512n/a StopIteration
513n/a >>>
514n/a
515n/aSpecification: Try/Except/Finally
516n/a
517n/a >>> def f():
518n/a ... try:
519n/a ... yield 1
520n/a ... try:
521n/a ... yield 2
522n/a ... 1//0
523n/a ... yield 3 # never get here
524n/a ... except ZeroDivisionError:
525n/a ... yield 4
526n/a ... yield 5
527n/a ... raise
528n/a ... except:
529n/a ... yield 6
530n/a ... yield 7 # the "raise" above stops this
531n/a ... except:
532n/a ... yield 8
533n/a ... yield 9
534n/a ... try:
535n/a ... x = 12
536n/a ... finally:
537n/a ... yield 10
538n/a ... yield 11
539n/a >>> print(list(f()))
540n/a [1, 2, 4, 5, 8, 9, 10, 11]
541n/a >>>
542n/a
543n/aGuido's binary tree example.
544n/a
545n/a >>> # A binary tree class.
546n/a >>> class Tree:
547n/a ...
548n/a ... def __init__(self, label, left=None, right=None):
549n/a ... self.label = label
550n/a ... self.left = left
551n/a ... self.right = right
552n/a ...
553n/a ... def __repr__(self, level=0, indent=" "):
554n/a ... s = level*indent + repr(self.label)
555n/a ... if self.left:
556n/a ... s = s + "\\n" + self.left.__repr__(level+1, indent)
557n/a ... if self.right:
558n/a ... s = s + "\\n" + self.right.__repr__(level+1, indent)
559n/a ... return s
560n/a ...
561n/a ... def __iter__(self):
562n/a ... return inorder(self)
563n/a
564n/a >>> # Create a Tree from a list.
565n/a >>> def tree(list):
566n/a ... n = len(list)
567n/a ... if n == 0:
568n/a ... return []
569n/a ... i = n // 2
570n/a ... return Tree(list[i], tree(list[:i]), tree(list[i+1:]))
571n/a
572n/a >>> # Show it off: create a tree.
573n/a >>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
574n/a
575n/a >>> # A recursive generator that generates Tree labels in in-order.
576n/a >>> def inorder(t):
577n/a ... if t:
578n/a ... for x in inorder(t.left):
579n/a ... yield x
580n/a ... yield t.label
581n/a ... for x in inorder(t.right):
582n/a ... yield x
583n/a
584n/a >>> # Show it off: create a tree.
585n/a >>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
586n/a >>> # Print the nodes of the tree in in-order.
587n/a >>> for x in t:
588n/a ... print(' '+x, end='')
589n/a A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
590n/a
591n/a >>> # A non-recursive generator.
592n/a >>> def inorder(node):
593n/a ... stack = []
594n/a ... while node:
595n/a ... while node.left:
596n/a ... stack.append(node)
597n/a ... node = node.left
598n/a ... yield node.label
599n/a ... while not node.right:
600n/a ... try:
601n/a ... node = stack.pop()
602n/a ... except IndexError:
603n/a ... return
604n/a ... yield node.label
605n/a ... node = node.right
606n/a
607n/a >>> # Exercise the non-recursive generator.
608n/a >>> for x in t:
609n/a ... print(' '+x, end='')
610n/a A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
611n/a
612n/a"""
613n/a
614n/a# Examples from Iterator-List and Python-Dev and c.l.py.
615n/a
616n/aemail_tests = """
617n/a
618n/aThe difference between yielding None and returning it.
619n/a
620n/a>>> def g():
621n/a... for i in range(3):
622n/a... yield None
623n/a... yield None
624n/a... return
625n/a>>> list(g())
626n/a[None, None, None, None]
627n/a
628n/aEnsure that explicitly raising StopIteration acts like any other exception
629n/ain try/except, not like a return.
630n/a
631n/a>>> def g():
632n/a... yield 1
633n/a... try:
634n/a... raise StopIteration
635n/a... except:
636n/a... yield 2
637n/a... yield 3
638n/a>>> list(g())
639n/a[1, 2, 3]
640n/a
641n/aNext one was posted to c.l.py.
642n/a
643n/a>>> def gcomb(x, k):
644n/a... "Generate all combinations of k elements from list x."
645n/a...
646n/a... if k > len(x):
647n/a... return
648n/a... if k == 0:
649n/a... yield []
650n/a... else:
651n/a... first, rest = x[0], x[1:]
652n/a... # A combination does or doesn't contain first.
653n/a... # If it does, the remainder is a k-1 comb of rest.
654n/a... for c in gcomb(rest, k-1):
655n/a... c.insert(0, first)
656n/a... yield c
657n/a... # If it doesn't contain first, it's a k comb of rest.
658n/a... for c in gcomb(rest, k):
659n/a... yield c
660n/a
661n/a>>> seq = list(range(1, 5))
662n/a>>> for k in range(len(seq) + 2):
663n/a... print("%d-combs of %s:" % (k, seq))
664n/a... for c in gcomb(seq, k):
665n/a... print(" ", c)
666n/a0-combs of [1, 2, 3, 4]:
667n/a []
668n/a1-combs of [1, 2, 3, 4]:
669n/a [1]
670n/a [2]
671n/a [3]
672n/a [4]
673n/a2-combs of [1, 2, 3, 4]:
674n/a [1, 2]
675n/a [1, 3]
676n/a [1, 4]
677n/a [2, 3]
678n/a [2, 4]
679n/a [3, 4]
680n/a3-combs of [1, 2, 3, 4]:
681n/a [1, 2, 3]
682n/a [1, 2, 4]
683n/a [1, 3, 4]
684n/a [2, 3, 4]
685n/a4-combs of [1, 2, 3, 4]:
686n/a [1, 2, 3, 4]
687n/a5-combs of [1, 2, 3, 4]:
688n/a
689n/aFrom the Iterators list, about the types of these things.
690n/a
691n/a>>> def g():
692n/a... yield 1
693n/a...
694n/a>>> type(g)
695n/a<class 'function'>
696n/a>>> i = g()
697n/a>>> type(i)
698n/a<class 'generator'>
699n/a>>> [s for s in dir(i) if not s.startswith('_')]
700n/a['close', 'gi_code', 'gi_frame', 'gi_running', 'gi_yieldfrom', 'send', 'throw']
701n/a>>> from test.support import HAVE_DOCSTRINGS
702n/a>>> print(i.__next__.__doc__ if HAVE_DOCSTRINGS else 'Implement next(self).')
703n/aImplement next(self).
704n/a>>> iter(i) is i
705n/aTrue
706n/a>>> import types
707n/a>>> isinstance(i, types.GeneratorType)
708n/aTrue
709n/a
710n/aAnd more, added later.
711n/a
712n/a>>> i.gi_running
713n/a0
714n/a>>> type(i.gi_frame)
715n/a<class 'frame'>
716n/a>>> i.gi_running = 42
717n/aTraceback (most recent call last):
718n/a ...
719n/aAttributeError: readonly attribute
720n/a>>> def g():
721n/a... yield me.gi_running
722n/a>>> me = g()
723n/a>>> me.gi_running
724n/a0
725n/a>>> next(me)
726n/a1
727n/a>>> me.gi_running
728n/a0
729n/a
730n/aA clever union-find implementation from c.l.py, due to David Eppstein.
731n/aSent: Friday, June 29, 2001 12:16 PM
732n/aTo: python-list@python.org
733n/aSubject: Re: PEP 255: Simple Generators
734n/a
735n/a>>> class disjointSet:
736n/a... def __init__(self, name):
737n/a... self.name = name
738n/a... self.parent = None
739n/a... self.generator = self.generate()
740n/a...
741n/a... def generate(self):
742n/a... while not self.parent:
743n/a... yield self
744n/a... for x in self.parent.generator:
745n/a... yield x
746n/a...
747n/a... def find(self):
748n/a... return next(self.generator)
749n/a...
750n/a... def union(self, parent):
751n/a... if self.parent:
752n/a... raise ValueError("Sorry, I'm not a root!")
753n/a... self.parent = parent
754n/a...
755n/a... def __str__(self):
756n/a... return self.name
757n/a
758n/a>>> names = "ABCDEFGHIJKLM"
759n/a>>> sets = [disjointSet(name) for name in names]
760n/a>>> roots = sets[:]
761n/a
762n/a>>> import random
763n/a>>> gen = random.Random(42)
764n/a>>> while 1:
765n/a... for s in sets:
766n/a... print(" %s->%s" % (s, s.find()), end='')
767n/a... print()
768n/a... if len(roots) > 1:
769n/a... s1 = gen.choice(roots)
770n/a... roots.remove(s1)
771n/a... s2 = gen.choice(roots)
772n/a... s1.union(s2)
773n/a... print("merged", s1, "into", s2)
774n/a... else:
775n/a... break
776n/a A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->K L->L M->M
777n/amerged K into B
778n/a A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->B L->L M->M
779n/amerged A into F
780n/a A->F B->B C->C D->D E->E F->F G->G H->H I->I J->J K->B L->L M->M
781n/amerged E into F
782n/a A->F B->B C->C D->D E->F F->F G->G H->H I->I J->J K->B L->L M->M
783n/amerged D into C
784n/a A->F B->B C->C D->C E->F F->F G->G H->H I->I J->J K->B L->L M->M
785n/amerged M into C
786n/a A->F B->B C->C D->C E->F F->F G->G H->H I->I J->J K->B L->L M->C
787n/amerged J into B
788n/a A->F B->B C->C D->C E->F F->F G->G H->H I->I J->B K->B L->L M->C
789n/amerged B into C
790n/a A->F B->C C->C D->C E->F F->F G->G H->H I->I J->C K->C L->L M->C
791n/amerged F into G
792n/a A->G B->C C->C D->C E->G F->G G->G H->H I->I J->C K->C L->L M->C
793n/amerged L into C
794n/a A->G B->C C->C D->C E->G F->G G->G H->H I->I J->C K->C L->C M->C
795n/amerged G into I
796n/a A->I B->C C->C D->C E->I F->I G->I H->H I->I J->C K->C L->C M->C
797n/amerged I into H
798n/a A->H B->C C->C D->C E->H F->H G->H H->H I->H J->C K->C L->C M->C
799n/amerged C into H
800n/a A->H B->H C->H D->H E->H F->H G->H H->H I->H J->H K->H L->H M->H
801n/a
802n/a"""
803n/a# Emacs turd '
804n/a
805n/a# Fun tests (for sufficiently warped notions of "fun").
806n/a
807n/afun_tests = """
808n/a
809n/aBuild up to a recursive Sieve of Eratosthenes generator.
810n/a
811n/a>>> def firstn(g, n):
812n/a... return [next(g) for i in range(n)]
813n/a
814n/a>>> def intsfrom(i):
815n/a... while 1:
816n/a... yield i
817n/a... i += 1
818n/a
819n/a>>> firstn(intsfrom(5), 7)
820n/a[5, 6, 7, 8, 9, 10, 11]
821n/a
822n/a>>> def exclude_multiples(n, ints):
823n/a... for i in ints:
824n/a... if i % n:
825n/a... yield i
826n/a
827n/a>>> firstn(exclude_multiples(3, intsfrom(1)), 6)
828n/a[1, 2, 4, 5, 7, 8]
829n/a
830n/a>>> def sieve(ints):
831n/a... prime = next(ints)
832n/a... yield prime
833n/a... not_divisible_by_prime = exclude_multiples(prime, ints)
834n/a... for p in sieve(not_divisible_by_prime):
835n/a... yield p
836n/a
837n/a>>> primes = sieve(intsfrom(2))
838n/a>>> firstn(primes, 20)
839n/a[2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71]
840n/a
841n/a
842n/aAnother famous problem: generate all integers of the form
843n/a 2**i * 3**j * 5**k
844n/ain increasing order, where i,j,k >= 0. Trickier than it may look at first!
845n/aTry writing it without generators, and correctly, and without generating
846n/a3 internal results for each result output.
847n/a
848n/a>>> def times(n, g):
849n/a... for i in g:
850n/a... yield n * i
851n/a>>> firstn(times(10, intsfrom(1)), 10)
852n/a[10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
853n/a
854n/a>>> def merge(g, h):
855n/a... ng = next(g)
856n/a... nh = next(h)
857n/a... while 1:
858n/a... if ng < nh:
859n/a... yield ng
860n/a... ng = next(g)
861n/a... elif ng > nh:
862n/a... yield nh
863n/a... nh = next(h)
864n/a... else:
865n/a... yield ng
866n/a... ng = next(g)
867n/a... nh = next(h)
868n/a
869n/aThe following works, but is doing a whale of a lot of redundant work --
870n/ait's not clear how to get the internal uses of m235 to share a single
871n/agenerator. Note that me_times2 (etc) each need to see every element in the
872n/aresult sequence. So this is an example where lazy lists are more natural
873n/a(you can look at the head of a lazy list any number of times).
874n/a
875n/a>>> def m235():
876n/a... yield 1
877n/a... me_times2 = times(2, m235())
878n/a... me_times3 = times(3, m235())
879n/a... me_times5 = times(5, m235())
880n/a... for i in merge(merge(me_times2,
881n/a... me_times3),
882n/a... me_times5):
883n/a... yield i
884n/a
885n/aDon't print "too many" of these -- the implementation above is extremely
886n/ainefficient: each call of m235() leads to 3 recursive calls, and in
887n/aturn each of those 3 more, and so on, and so on, until we've descended
888n/aenough levels to satisfy the print stmts. Very odd: when I printed 5
889n/alines of results below, this managed to screw up Win98's malloc in "the
890n/ausual" way, i.e. the heap grew over 4Mb so Win98 started fragmenting
891n/aaddress space, and it *looked* like a very slow leak.
892n/a
893n/a>>> result = m235()
894n/a>>> for i in range(3):
895n/a... print(firstn(result, 15))
896n/a[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
897n/a[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
898n/a[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
899n/a
900n/aHeh. Here's one way to get a shared list, complete with an excruciating
901n/anamespace renaming trick. The *pretty* part is that the times() and merge()
902n/afunctions can be reused as-is, because they only assume their stream
903n/aarguments are iterable -- a LazyList is the same as a generator to times().
904n/a
905n/a>>> class LazyList:
906n/a... def __init__(self, g):
907n/a... self.sofar = []
908n/a... self.fetch = g.__next__
909n/a...
910n/a... def __getitem__(self, i):
911n/a... sofar, fetch = self.sofar, self.fetch
912n/a... while i >= len(sofar):
913n/a... sofar.append(fetch())
914n/a... return sofar[i]
915n/a
916n/a>>> def m235():
917n/a... yield 1
918n/a... # Gack: m235 below actually refers to a LazyList.
919n/a... me_times2 = times(2, m235)
920n/a... me_times3 = times(3, m235)
921n/a... me_times5 = times(5, m235)
922n/a... for i in merge(merge(me_times2,
923n/a... me_times3),
924n/a... me_times5):
925n/a... yield i
926n/a
927n/aPrint as many of these as you like -- *this* implementation is memory-
928n/aefficient.
929n/a
930n/a>>> m235 = LazyList(m235())
931n/a>>> for i in range(5):
932n/a... print([m235[j] for j in range(15*i, 15*(i+1))])
933n/a[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
934n/a[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
935n/a[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
936n/a[200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
937n/a[400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
938n/a
939n/aYe olde Fibonacci generator, LazyList style.
940n/a
941n/a>>> def fibgen(a, b):
942n/a...
943n/a... def sum(g, h):
944n/a... while 1:
945n/a... yield next(g) + next(h)
946n/a...
947n/a... def tail(g):
948n/a... next(g) # throw first away
949n/a... for x in g:
950n/a... yield x
951n/a...
952n/a... yield a
953n/a... yield b
954n/a... for s in sum(iter(fib),
955n/a... tail(iter(fib))):
956n/a... yield s
957n/a
958n/a>>> fib = LazyList(fibgen(1, 2))
959n/a>>> firstn(iter(fib), 17)
960n/a[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
961n/a
962n/a
963n/aRunning after your tail with itertools.tee (new in version 2.4)
964n/a
965n/aThe algorithms "m235" (Hamming) and Fibonacci presented above are both
966n/aexamples of a whole family of FP (functional programming) algorithms
967n/awhere a function produces and returns a list while the production algorithm
968n/asuppose the list as already produced by recursively calling itself.
969n/aFor these algorithms to work, they must:
970n/a
971n/a- produce at least a first element without presupposing the existence of
972n/a the rest of the list
973n/a- produce their elements in a lazy manner
974n/a
975n/aTo work efficiently, the beginning of the list must not be recomputed over
976n/aand over again. This is ensured in most FP languages as a built-in feature.
977n/aIn python, we have to explicitly maintain a list of already computed results
978n/aand abandon genuine recursivity.
979n/a
980n/aThis is what had been attempted above with the LazyList class. One problem
981n/awith that class is that it keeps a list of all of the generated results and
982n/atherefore continually grows. This partially defeats the goal of the generator
983n/aconcept, viz. produce the results only as needed instead of producing them
984n/aall and thereby wasting memory.
985n/a
986n/aThanks to itertools.tee, it is now clear "how to get the internal uses of
987n/am235 to share a single generator".
988n/a
989n/a>>> from itertools import tee
990n/a>>> def m235():
991n/a... def _m235():
992n/a... yield 1
993n/a... for n in merge(times(2, m2),
994n/a... merge(times(3, m3),
995n/a... times(5, m5))):
996n/a... yield n
997n/a... m1 = _m235()
998n/a... m2, m3, m5, mRes = tee(m1, 4)
999n/a... return mRes
1000n/a
1001n/a>>> it = m235()
1002n/a>>> for i in range(5):
1003n/a... print(firstn(it, 15))
1004n/a[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
1005n/a[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
1006n/a[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
1007n/a[200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
1008n/a[400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
1009n/a
1010n/aThe "tee" function does just what we want. It internally keeps a generated
1011n/aresult for as long as it has not been "consumed" from all of the duplicated
1012n/aiterators, whereupon it is deleted. You can therefore print the hamming
1013n/asequence during hours without increasing memory usage, or very little.
1014n/a
1015n/aThe beauty of it is that recursive running-after-their-tail FP algorithms
1016n/aare quite straightforwardly expressed with this Python idiom.
1017n/a
1018n/aYe olde Fibonacci generator, tee style.
1019n/a
1020n/a>>> def fib():
1021n/a...
1022n/a... def _isum(g, h):
1023n/a... while 1:
1024n/a... yield next(g) + next(h)
1025n/a...
1026n/a... def _fib():
1027n/a... yield 1
1028n/a... yield 2
1029n/a... next(fibTail) # throw first away
1030n/a... for res in _isum(fibHead, fibTail):
1031n/a... yield res
1032n/a...
1033n/a... realfib = _fib()
1034n/a... fibHead, fibTail, fibRes = tee(realfib, 3)
1035n/a... return fibRes
1036n/a
1037n/a>>> firstn(fib(), 17)
1038n/a[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
1039n/a
1040n/a"""
1041n/a
1042n/a# syntax_tests mostly provokes SyntaxErrors. Also fiddling with #if 0
1043n/a# hackery.
1044n/a
1045n/asyntax_tests = """
1046n/a
1047n/aThese are fine:
1048n/a
1049n/a>>> def f():
1050n/a... yield 1
1051n/a... return
1052n/a
1053n/a>>> def f():
1054n/a... try:
1055n/a... yield 1
1056n/a... finally:
1057n/a... pass
1058n/a
1059n/a>>> def f():
1060n/a... try:
1061n/a... try:
1062n/a... 1//0
1063n/a... except ZeroDivisionError:
1064n/a... yield 666
1065n/a... except:
1066n/a... pass
1067n/a... finally:
1068n/a... pass
1069n/a
1070n/a>>> def f():
1071n/a... try:
1072n/a... try:
1073n/a... yield 12
1074n/a... 1//0
1075n/a... except ZeroDivisionError:
1076n/a... yield 666
1077n/a... except:
1078n/a... try:
1079n/a... x = 12
1080n/a... finally:
1081n/a... yield 12
1082n/a... except:
1083n/a... return
1084n/a>>> list(f())
1085n/a[12, 666]
1086n/a
1087n/a>>> def f():
1088n/a... yield
1089n/a>>> type(f())
1090n/a<class 'generator'>
1091n/a
1092n/a
1093n/a>>> def f():
1094n/a... if 0:
1095n/a... yield
1096n/a>>> type(f())
1097n/a<class 'generator'>
1098n/a
1099n/a
1100n/a>>> def f():
1101n/a... if 0:
1102n/a... yield 1
1103n/a>>> type(f())
1104n/a<class 'generator'>
1105n/a
1106n/a>>> def f():
1107n/a... if "":
1108n/a... yield None
1109n/a>>> type(f())
1110n/a<class 'generator'>
1111n/a
1112n/a>>> def f():
1113n/a... return
1114n/a... try:
1115n/a... if x==4:
1116n/a... pass
1117n/a... elif 0:
1118n/a... try:
1119n/a... 1//0
1120n/a... except SyntaxError:
1121n/a... pass
1122n/a... else:
1123n/a... if 0:
1124n/a... while 12:
1125n/a... x += 1
1126n/a... yield 2 # don't blink
1127n/a... f(a, b, c, d, e)
1128n/a... else:
1129n/a... pass
1130n/a... except:
1131n/a... x = 1
1132n/a... return
1133n/a>>> type(f())
1134n/a<class 'generator'>
1135n/a
1136n/a>>> def f():
1137n/a... if 0:
1138n/a... def g():
1139n/a... yield 1
1140n/a...
1141n/a>>> type(f())
1142n/a<class 'NoneType'>
1143n/a
1144n/a>>> def f():
1145n/a... if 0:
1146n/a... class C:
1147n/a... def __init__(self):
1148n/a... yield 1
1149n/a... def f(self):
1150n/a... yield 2
1151n/a>>> type(f())
1152n/a<class 'NoneType'>
1153n/a
1154n/a>>> def f():
1155n/a... if 0:
1156n/a... return
1157n/a... if 0:
1158n/a... yield 2
1159n/a>>> type(f())
1160n/a<class 'generator'>
1161n/a
1162n/aThis one caused a crash (see SF bug 567538):
1163n/a
1164n/a>>> def f():
1165n/a... for i in range(3):
1166n/a... try:
1167n/a... continue
1168n/a... finally:
1169n/a... yield i
1170n/a...
1171n/a>>> g = f()
1172n/a>>> print(next(g))
1173n/a0
1174n/a>>> print(next(g))
1175n/a1
1176n/a>>> print(next(g))
1177n/a2
1178n/a>>> print(next(g))
1179n/aTraceback (most recent call last):
1180n/aStopIteration
1181n/a
1182n/a
1183n/aTest the gi_code attribute
1184n/a
1185n/a>>> def f():
1186n/a... yield 5
1187n/a...
1188n/a>>> g = f()
1189n/a>>> g.gi_code is f.__code__
1190n/aTrue
1191n/a>>> next(g)
1192n/a5
1193n/a>>> next(g)
1194n/aTraceback (most recent call last):
1195n/aStopIteration
1196n/a>>> g.gi_code is f.__code__
1197n/aTrue
1198n/a
1199n/a
1200n/aTest the __name__ attribute and the repr()
1201n/a
1202n/a>>> def f():
1203n/a... yield 5
1204n/a...
1205n/a>>> g = f()
1206n/a>>> g.__name__
1207n/a'f'
1208n/a>>> repr(g) # doctest: +ELLIPSIS
1209n/a'<generator object f at ...>'
1210n/a
1211n/aLambdas shouldn't have their usual return behavior.
1212n/a
1213n/a>>> x = lambda: (yield 1)
1214n/a>>> list(x())
1215n/a[1]
1216n/a
1217n/a>>> x = lambda: ((yield 1), (yield 2))
1218n/a>>> list(x())
1219n/a[1, 2]
1220n/a"""
1221n/a
1222n/a# conjoin is a simple backtracking generator, named in honor of Icon's
1223n/a# "conjunction" control structure. Pass a list of no-argument functions
1224n/a# that return iterable objects. Easiest to explain by example: assume the
1225n/a# function list [x, y, z] is passed. Then conjoin acts like:
1226n/a#
1227n/a# def g():
1228n/a# values = [None] * 3
1229n/a# for values[0] in x():
1230n/a# for values[1] in y():
1231n/a# for values[2] in z():
1232n/a# yield values
1233n/a#
1234n/a# So some 3-lists of values *may* be generated, each time we successfully
1235n/a# get into the innermost loop. If an iterator fails (is exhausted) before
1236n/a# then, it "backtracks" to get the next value from the nearest enclosing
1237n/a# iterator (the one "to the left"), and starts all over again at the next
1238n/a# slot (pumps a fresh iterator). Of course this is most useful when the
1239n/a# iterators have side-effects, so that which values *can* be generated at
1240n/a# each slot depend on the values iterated at previous slots.
1241n/a
1242n/adef simple_conjoin(gs):
1243n/a
1244n/a values = [None] * len(gs)
1245n/a
1246n/a def gen(i):
1247n/a if i >= len(gs):
1248n/a yield values
1249n/a else:
1250n/a for values[i] in gs[i]():
1251n/a for x in gen(i+1):
1252n/a yield x
1253n/a
1254n/a for x in gen(0):
1255n/a yield x
1256n/a
1257n/a# That works fine, but recursing a level and checking i against len(gs) for
1258n/a# each item produced is inefficient. By doing manual loop unrolling across
1259n/a# generator boundaries, it's possible to eliminate most of that overhead.
1260n/a# This isn't worth the bother *in general* for generators, but conjoin() is
1261n/a# a core building block for some CPU-intensive generator applications.
1262n/a
1263n/adef conjoin(gs):
1264n/a
1265n/a n = len(gs)
1266n/a values = [None] * n
1267n/a
1268n/a # Do one loop nest at time recursively, until the # of loop nests
1269n/a # remaining is divisible by 3.
1270n/a
1271n/a def gen(i):
1272n/a if i >= n:
1273n/a yield values
1274n/a
1275n/a elif (n-i) % 3:
1276n/a ip1 = i+1
1277n/a for values[i] in gs[i]():
1278n/a for x in gen(ip1):
1279n/a yield x
1280n/a
1281n/a else:
1282n/a for x in _gen3(i):
1283n/a yield x
1284n/a
1285n/a # Do three loop nests at a time, recursing only if at least three more
1286n/a # remain. Don't call directly: this is an internal optimization for
1287n/a # gen's use.
1288n/a
1289n/a def _gen3(i):
1290n/a assert i < n and (n-i) % 3 == 0
1291n/a ip1, ip2, ip3 = i+1, i+2, i+3
1292n/a g, g1, g2 = gs[i : ip3]
1293n/a
1294n/a if ip3 >= n:
1295n/a # These are the last three, so we can yield values directly.
1296n/a for values[i] in g():
1297n/a for values[ip1] in g1():
1298n/a for values[ip2] in g2():
1299n/a yield values
1300n/a
1301n/a else:
1302n/a # At least 6 loop nests remain; peel off 3 and recurse for the
1303n/a # rest.
1304n/a for values[i] in g():
1305n/a for values[ip1] in g1():
1306n/a for values[ip2] in g2():
1307n/a for x in _gen3(ip3):
1308n/a yield x
1309n/a
1310n/a for x in gen(0):
1311n/a yield x
1312n/a
1313n/a# And one more approach: For backtracking apps like the Knight's Tour
1314n/a# solver below, the number of backtracking levels can be enormous (one
1315n/a# level per square, for the Knight's Tour, so that e.g. a 100x100 board
1316n/a# needs 10,000 levels). In such cases Python is likely to run out of
1317n/a# stack space due to recursion. So here's a recursion-free version of
1318n/a# conjoin too.
1319n/a# NOTE WELL: This allows large problems to be solved with only trivial
1320n/a# demands on stack space. Without explicitly resumable generators, this is
1321n/a# much harder to achieve. OTOH, this is much slower (up to a factor of 2)
1322n/a# than the fancy unrolled recursive conjoin.
1323n/a
1324n/adef flat_conjoin(gs): # rename to conjoin to run tests with this instead
1325n/a n = len(gs)
1326n/a values = [None] * n
1327n/a iters = [None] * n
1328n/a _StopIteration = StopIteration # make local because caught a *lot*
1329n/a i = 0
1330n/a while 1:
1331n/a # Descend.
1332n/a try:
1333n/a while i < n:
1334n/a it = iters[i] = gs[i]().__next__
1335n/a values[i] = it()
1336n/a i += 1
1337n/a except _StopIteration:
1338n/a pass
1339n/a else:
1340n/a assert i == n
1341n/a yield values
1342n/a
1343n/a # Backtrack until an older iterator can be resumed.
1344n/a i -= 1
1345n/a while i >= 0:
1346n/a try:
1347n/a values[i] = iters[i]()
1348n/a # Success! Start fresh at next level.
1349n/a i += 1
1350n/a break
1351n/a except _StopIteration:
1352n/a # Continue backtracking.
1353n/a i -= 1
1354n/a else:
1355n/a assert i < 0
1356n/a break
1357n/a
1358n/a# A conjoin-based N-Queens solver.
1359n/a
1360n/aclass Queens:
1361n/a def __init__(self, n):
1362n/a self.n = n
1363n/a rangen = range(n)
1364n/a
1365n/a # Assign a unique int to each column and diagonal.
1366n/a # columns: n of those, range(n).
1367n/a # NW-SE diagonals: 2n-1 of these, i-j unique and invariant along
1368n/a # each, smallest i-j is 0-(n-1) = 1-n, so add n-1 to shift to 0-
1369n/a # based.
1370n/a # NE-SW diagonals: 2n-1 of these, i+j unique and invariant along
1371n/a # each, smallest i+j is 0, largest is 2n-2.
1372n/a
1373n/a # For each square, compute a bit vector of the columns and
1374n/a # diagonals it covers, and for each row compute a function that
1375n/a # generates the possibilities for the columns in that row.
1376n/a self.rowgenerators = []
1377n/a for i in rangen:
1378n/a rowuses = [(1 << j) | # column ordinal
1379n/a (1 << (n + i-j + n-1)) | # NW-SE ordinal
1380n/a (1 << (n + 2*n-1 + i+j)) # NE-SW ordinal
1381n/a for j in rangen]
1382n/a
1383n/a def rowgen(rowuses=rowuses):
1384n/a for j in rangen:
1385n/a uses = rowuses[j]
1386n/a if uses & self.used == 0:
1387n/a self.used |= uses
1388n/a yield j
1389n/a self.used &= ~uses
1390n/a
1391n/a self.rowgenerators.append(rowgen)
1392n/a
1393n/a # Generate solutions.
1394n/a def solve(self):
1395n/a self.used = 0
1396n/a for row2col in conjoin(self.rowgenerators):
1397n/a yield row2col
1398n/a
1399n/a def printsolution(self, row2col):
1400n/a n = self.n
1401n/a assert n == len(row2col)
1402n/a sep = "+" + "-+" * n
1403n/a print(sep)
1404n/a for i in range(n):
1405n/a squares = [" " for j in range(n)]
1406n/a squares[row2col[i]] = "Q"
1407n/a print("|" + "|".join(squares) + "|")
1408n/a print(sep)
1409n/a
1410n/a# A conjoin-based Knight's Tour solver. This is pretty sophisticated
1411n/a# (e.g., when used with flat_conjoin above, and passing hard=1 to the
1412n/a# constructor, a 200x200 Knight's Tour was found quickly -- note that we're
1413n/a# creating 10s of thousands of generators then!), and is lengthy.
1414n/a
1415n/aclass Knights:
1416n/a def __init__(self, m, n, hard=0):
1417n/a self.m, self.n = m, n
1418n/a
1419n/a # solve() will set up succs[i] to be a list of square #i's
1420n/a # successors.
1421n/a succs = self.succs = []
1422n/a
1423n/a # Remove i0 from each of its successor's successor lists, i.e.
1424n/a # successors can't go back to i0 again. Return 0 if we can
1425n/a # detect this makes a solution impossible, else return 1.
1426n/a
1427n/a def remove_from_successors(i0, len=len):
1428n/a # If we remove all exits from a free square, we're dead:
1429n/a # even if we move to it next, we can't leave it again.
1430n/a # If we create a square with one exit, we must visit it next;
1431n/a # else somebody else will have to visit it, and since there's
1432n/a # only one adjacent, there won't be a way to leave it again.
1433n/a # Finelly, if we create more than one free square with a
1434n/a # single exit, we can only move to one of them next, leaving
1435n/a # the other one a dead end.
1436n/a ne0 = ne1 = 0
1437n/a for i in succs[i0]:
1438n/a s = succs[i]
1439n/a s.remove(i0)
1440n/a e = len(s)
1441n/a if e == 0:
1442n/a ne0 += 1
1443n/a elif e == 1:
1444n/a ne1 += 1
1445n/a return ne0 == 0 and ne1 < 2
1446n/a
1447n/a # Put i0 back in each of its successor's successor lists.
1448n/a
1449n/a def add_to_successors(i0):
1450n/a for i in succs[i0]:
1451n/a succs[i].append(i0)
1452n/a
1453n/a # Generate the first move.
1454n/a def first():
1455n/a if m < 1 or n < 1:
1456n/a return
1457n/a
1458n/a # Since we're looking for a cycle, it doesn't matter where we
1459n/a # start. Starting in a corner makes the 2nd move easy.
1460n/a corner = self.coords2index(0, 0)
1461n/a remove_from_successors(corner)
1462n/a self.lastij = corner
1463n/a yield corner
1464n/a add_to_successors(corner)
1465n/a
1466n/a # Generate the second moves.
1467n/a def second():
1468n/a corner = self.coords2index(0, 0)
1469n/a assert self.lastij == corner # i.e., we started in the corner
1470n/a if m < 3 or n < 3:
1471n/a return
1472n/a assert len(succs[corner]) == 2
1473n/a assert self.coords2index(1, 2) in succs[corner]
1474n/a assert self.coords2index(2, 1) in succs[corner]
1475n/a # Only two choices. Whichever we pick, the other must be the
1476n/a # square picked on move m*n, as it's the only way to get back
1477n/a # to (0, 0). Save its index in self.final so that moves before
1478n/a # the last know it must be kept free.
1479n/a for i, j in (1, 2), (2, 1):
1480n/a this = self.coords2index(i, j)
1481n/a final = self.coords2index(3-i, 3-j)
1482n/a self.final = final
1483n/a
1484n/a remove_from_successors(this)
1485n/a succs[final].append(corner)
1486n/a self.lastij = this
1487n/a yield this
1488n/a succs[final].remove(corner)
1489n/a add_to_successors(this)
1490n/a
1491n/a # Generate moves 3 thru m*n-1.
1492n/a def advance(len=len):
1493n/a # If some successor has only one exit, must take it.
1494n/a # Else favor successors with fewer exits.
1495n/a candidates = []
1496n/a for i in succs[self.lastij]:
1497n/a e = len(succs[i])
1498n/a assert e > 0, "else remove_from_successors() pruning flawed"
1499n/a if e == 1:
1500n/a candidates = [(e, i)]
1501n/a break
1502n/a candidates.append((e, i))
1503n/a else:
1504n/a candidates.sort()
1505n/a
1506n/a for e, i in candidates:
1507n/a if i != self.final:
1508n/a if remove_from_successors(i):
1509n/a self.lastij = i
1510n/a yield i
1511n/a add_to_successors(i)
1512n/a
1513n/a # Generate moves 3 thru m*n-1. Alternative version using a
1514n/a # stronger (but more expensive) heuristic to order successors.
1515n/a # Since the # of backtracking levels is m*n, a poor move early on
1516n/a # can take eons to undo. Smallest square board for which this
1517n/a # matters a lot is 52x52.
1518n/a def advance_hard(vmid=(m-1)/2.0, hmid=(n-1)/2.0, len=len):
1519n/a # If some successor has only one exit, must take it.
1520n/a # Else favor successors with fewer exits.
1521n/a # Break ties via max distance from board centerpoint (favor
1522n/a # corners and edges whenever possible).
1523n/a candidates = []
1524n/a for i in succs[self.lastij]:
1525n/a e = len(succs[i])
1526n/a assert e > 0, "else remove_from_successors() pruning flawed"
1527n/a if e == 1:
1528n/a candidates = [(e, 0, i)]
1529n/a break
1530n/a i1, j1 = self.index2coords(i)
1531n/a d = (i1 - vmid)**2 + (j1 - hmid)**2
1532n/a candidates.append((e, -d, i))
1533n/a else:
1534n/a candidates.sort()
1535n/a
1536n/a for e, d, i in candidates:
1537n/a if i != self.final:
1538n/a if remove_from_successors(i):
1539n/a self.lastij = i
1540n/a yield i
1541n/a add_to_successors(i)
1542n/a
1543n/a # Generate the last move.
1544n/a def last():
1545n/a assert self.final in succs[self.lastij]
1546n/a yield self.final
1547n/a
1548n/a if m*n < 4:
1549n/a self.squaregenerators = [first]
1550n/a else:
1551n/a self.squaregenerators = [first, second] + \
1552n/a [hard and advance_hard or advance] * (m*n - 3) + \
1553n/a [last]
1554n/a
1555n/a def coords2index(self, i, j):
1556n/a assert 0 <= i < self.m
1557n/a assert 0 <= j < self.n
1558n/a return i * self.n + j
1559n/a
1560n/a def index2coords(self, index):
1561n/a assert 0 <= index < self.m * self.n
1562n/a return divmod(index, self.n)
1563n/a
1564n/a def _init_board(self):
1565n/a succs = self.succs
1566n/a del succs[:]
1567n/a m, n = self.m, self.n
1568n/a c2i = self.coords2index
1569n/a
1570n/a offsets = [( 1, 2), ( 2, 1), ( 2, -1), ( 1, -2),
1571n/a (-1, -2), (-2, -1), (-2, 1), (-1, 2)]
1572n/a rangen = range(n)
1573n/a for i in range(m):
1574n/a for j in rangen:
1575n/a s = [c2i(i+io, j+jo) for io, jo in offsets
1576n/a if 0 <= i+io < m and
1577n/a 0 <= j+jo < n]
1578n/a succs.append(s)
1579n/a
1580n/a # Generate solutions.
1581n/a def solve(self):
1582n/a self._init_board()
1583n/a for x in conjoin(self.squaregenerators):
1584n/a yield x
1585n/a
1586n/a def printsolution(self, x):
1587n/a m, n = self.m, self.n
1588n/a assert len(x) == m*n
1589n/a w = len(str(m*n))
1590n/a format = "%" + str(w) + "d"
1591n/a
1592n/a squares = [[None] * n for i in range(m)]
1593n/a k = 1
1594n/a for i in x:
1595n/a i1, j1 = self.index2coords(i)
1596n/a squares[i1][j1] = format % k
1597n/a k += 1
1598n/a
1599n/a sep = "+" + ("-" * w + "+") * n
1600n/a print(sep)
1601n/a for i in range(m):
1602n/a row = squares[i]
1603n/a print("|" + "|".join(row) + "|")
1604n/a print(sep)
1605n/a
1606n/aconjoin_tests = """
1607n/a
1608n/aGenerate the 3-bit binary numbers in order. This illustrates dumbest-
1609n/apossible use of conjoin, just to generate the full cross-product.
1610n/a
1611n/a>>> for c in conjoin([lambda: iter((0, 1))] * 3):
1612n/a... print(c)
1613n/a[0, 0, 0]
1614n/a[0, 0, 1]
1615n/a[0, 1, 0]
1616n/a[0, 1, 1]
1617n/a[1, 0, 0]
1618n/a[1, 0, 1]
1619n/a[1, 1, 0]
1620n/a[1, 1, 1]
1621n/a
1622n/aFor efficiency in typical backtracking apps, conjoin() yields the same list
1623n/aobject each time. So if you want to save away a full account of its
1624n/agenerated sequence, you need to copy its results.
1625n/a
1626n/a>>> def gencopy(iterator):
1627n/a... for x in iterator:
1628n/a... yield x[:]
1629n/a
1630n/a>>> for n in range(10):
1631n/a... all = list(gencopy(conjoin([lambda: iter((0, 1))] * n)))
1632n/a... print(n, len(all), all[0] == [0] * n, all[-1] == [1] * n)
1633n/a0 1 True True
1634n/a1 2 True True
1635n/a2 4 True True
1636n/a3 8 True True
1637n/a4 16 True True
1638n/a5 32 True True
1639n/a6 64 True True
1640n/a7 128 True True
1641n/a8 256 True True
1642n/a9 512 True True
1643n/a
1644n/aAnd run an 8-queens solver.
1645n/a
1646n/a>>> q = Queens(8)
1647n/a>>> LIMIT = 2
1648n/a>>> count = 0
1649n/a>>> for row2col in q.solve():
1650n/a... count += 1
1651n/a... if count <= LIMIT:
1652n/a... print("Solution", count)
1653n/a... q.printsolution(row2col)
1654n/aSolution 1
1655n/a+-+-+-+-+-+-+-+-+
1656n/a|Q| | | | | | | |
1657n/a+-+-+-+-+-+-+-+-+
1658n/a| | | | |Q| | | |
1659n/a+-+-+-+-+-+-+-+-+
1660n/a| | | | | | | |Q|
1661n/a+-+-+-+-+-+-+-+-+
1662n/a| | | | | |Q| | |
1663n/a+-+-+-+-+-+-+-+-+
1664n/a| | |Q| | | | | |
1665n/a+-+-+-+-+-+-+-+-+
1666n/a| | | | | | |Q| |
1667n/a+-+-+-+-+-+-+-+-+
1668n/a| |Q| | | | | | |
1669n/a+-+-+-+-+-+-+-+-+
1670n/a| | | |Q| | | | |
1671n/a+-+-+-+-+-+-+-+-+
1672n/aSolution 2
1673n/a+-+-+-+-+-+-+-+-+
1674n/a|Q| | | | | | | |
1675n/a+-+-+-+-+-+-+-+-+
1676n/a| | | | | |Q| | |
1677n/a+-+-+-+-+-+-+-+-+
1678n/a| | | | | | | |Q|
1679n/a+-+-+-+-+-+-+-+-+
1680n/a| | |Q| | | | | |
1681n/a+-+-+-+-+-+-+-+-+
1682n/a| | | | | | |Q| |
1683n/a+-+-+-+-+-+-+-+-+
1684n/a| | | |Q| | | | |
1685n/a+-+-+-+-+-+-+-+-+
1686n/a| |Q| | | | | | |
1687n/a+-+-+-+-+-+-+-+-+
1688n/a| | | | |Q| | | |
1689n/a+-+-+-+-+-+-+-+-+
1690n/a
1691n/a>>> print(count, "solutions in all.")
1692n/a92 solutions in all.
1693n/a
1694n/aAnd run a Knight's Tour on a 10x10 board. Note that there are about
1695n/a20,000 solutions even on a 6x6 board, so don't dare run this to exhaustion.
1696n/a
1697n/a>>> k = Knights(10, 10)
1698n/a>>> LIMIT = 2
1699n/a>>> count = 0
1700n/a>>> for x in k.solve():
1701n/a... count += 1
1702n/a... if count <= LIMIT:
1703n/a... print("Solution", count)
1704n/a... k.printsolution(x)
1705n/a... else:
1706n/a... break
1707n/aSolution 1
1708n/a+---+---+---+---+---+---+---+---+---+---+
1709n/a| 1| 58| 27| 34| 3| 40| 29| 10| 5| 8|
1710n/a+---+---+---+---+---+---+---+---+---+---+
1711n/a| 26| 35| 2| 57| 28| 33| 4| 7| 30| 11|
1712n/a+---+---+---+---+---+---+---+---+---+---+
1713n/a| 59|100| 73| 36| 41| 56| 39| 32| 9| 6|
1714n/a+---+---+---+---+---+---+---+---+---+---+
1715n/a| 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
1716n/a+---+---+---+---+---+---+---+---+---+---+
1717n/a| 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
1718n/a+---+---+---+---+---+---+---+---+---+---+
1719n/a| 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
1720n/a+---+---+---+---+---+---+---+---+---+---+
1721n/a| 87| 98| 91| 80| 77| 84| 53| 46| 65| 44|
1722n/a+---+---+---+---+---+---+---+---+---+---+
1723n/a| 90| 23| 88| 95| 70| 79| 68| 83| 14| 17|
1724n/a+---+---+---+---+---+---+---+---+---+---+
1725n/a| 97| 92| 21| 78| 81| 94| 19| 16| 45| 66|
1726n/a+---+---+---+---+---+---+---+---+---+---+
1727n/a| 22| 89| 96| 93| 20| 69| 82| 67| 18| 15|
1728n/a+---+---+---+---+---+---+---+---+---+---+
1729n/aSolution 2
1730n/a+---+---+---+---+---+---+---+---+---+---+
1731n/a| 1| 58| 27| 34| 3| 40| 29| 10| 5| 8|
1732n/a+---+---+---+---+---+---+---+---+---+---+
1733n/a| 26| 35| 2| 57| 28| 33| 4| 7| 30| 11|
1734n/a+---+---+---+---+---+---+---+---+---+---+
1735n/a| 59|100| 73| 36| 41| 56| 39| 32| 9| 6|
1736n/a+---+---+---+---+---+---+---+---+---+---+
1737n/a| 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
1738n/a+---+---+---+---+---+---+---+---+---+---+
1739n/a| 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
1740n/a+---+---+---+---+---+---+---+---+---+---+
1741n/a| 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
1742n/a+---+---+---+---+---+---+---+---+---+---+
1743n/a| 87| 98| 89| 80| 77| 84| 53| 46| 65| 44|
1744n/a+---+---+---+---+---+---+---+---+---+---+
1745n/a| 90| 23| 92| 95| 70| 79| 68| 83| 14| 17|
1746n/a+---+---+---+---+---+---+---+---+---+---+
1747n/a| 97| 88| 21| 78| 81| 94| 19| 16| 45| 66|
1748n/a+---+---+---+---+---+---+---+---+---+---+
1749n/a| 22| 91| 96| 93| 20| 69| 82| 67| 18| 15|
1750n/a+---+---+---+---+---+---+---+---+---+---+
1751n/a"""
1752n/a
1753n/aweakref_tests = """\
1754n/aGenerators are weakly referencable:
1755n/a
1756n/a>>> import weakref
1757n/a>>> def gen():
1758n/a... yield 'foo!'
1759n/a...
1760n/a>>> wr = weakref.ref(gen)
1761n/a>>> wr() is gen
1762n/aTrue
1763n/a>>> p = weakref.proxy(gen)
1764n/a
1765n/aGenerator-iterators are weakly referencable as well:
1766n/a
1767n/a>>> gi = gen()
1768n/a>>> wr = weakref.ref(gi)
1769n/a>>> wr() is gi
1770n/aTrue
1771n/a>>> p = weakref.proxy(gi)
1772n/a>>> list(p)
1773n/a['foo!']
1774n/a
1775n/a"""
1776n/a
1777n/acoroutine_tests = """\
1778n/aSending a value into a started generator:
1779n/a
1780n/a>>> def f():
1781n/a... print((yield 1))
1782n/a... yield 2
1783n/a>>> g = f()
1784n/a>>> next(g)
1785n/a1
1786n/a>>> g.send(42)
1787n/a42
1788n/a2
1789n/a
1790n/aSending a value into a new generator produces a TypeError:
1791n/a
1792n/a>>> f().send("foo")
1793n/aTraceback (most recent call last):
1794n/a...
1795n/aTypeError: can't send non-None value to a just-started generator
1796n/a
1797n/a
1798n/aYield by itself yields None:
1799n/a
1800n/a>>> def f(): yield
1801n/a>>> list(f())
1802n/a[None]
1803n/a
1804n/a
1805n/a
1806n/aAn obscene abuse of a yield expression within a generator expression:
1807n/a
1808n/a>>> list((yield 21) for i in range(4))
1809n/a[21, None, 21, None, 21, None, 21, None]
1810n/a
1811n/aAnd a more sane, but still weird usage:
1812n/a
1813n/a>>> def f(): list(i for i in [(yield 26)])
1814n/a>>> type(f())
1815n/a<class 'generator'>
1816n/a
1817n/a
1818n/aA yield expression with augmented assignment.
1819n/a
1820n/a>>> def coroutine(seq):
1821n/a... count = 0
1822n/a... while count < 200:
1823n/a... count += yield
1824n/a... seq.append(count)
1825n/a>>> seq = []
1826n/a>>> c = coroutine(seq)
1827n/a>>> next(c)
1828n/a>>> print(seq)
1829n/a[]
1830n/a>>> c.send(10)
1831n/a>>> print(seq)
1832n/a[10]
1833n/a>>> c.send(10)
1834n/a>>> print(seq)
1835n/a[10, 20]
1836n/a>>> c.send(10)
1837n/a>>> print(seq)
1838n/a[10, 20, 30]
1839n/a
1840n/a
1841n/aCheck some syntax errors for yield expressions:
1842n/a
1843n/a>>> f=lambda: (yield 1),(yield 2)
1844n/aTraceback (most recent call last):
1845n/a ...
1846n/aSyntaxError: 'yield' outside function
1847n/a
1848n/a>>> def f(): x = yield = y
1849n/aTraceback (most recent call last):
1850n/a ...
1851n/aSyntaxError: assignment to yield expression not possible
1852n/a
1853n/a>>> def f(): (yield bar) = y
1854n/aTraceback (most recent call last):
1855n/a ...
1856n/aSyntaxError: can't assign to yield expression
1857n/a
1858n/a>>> def f(): (yield bar) += y
1859n/aTraceback (most recent call last):
1860n/a ...
1861n/aSyntaxError: can't assign to yield expression
1862n/a
1863n/a
1864n/aNow check some throw() conditions:
1865n/a
1866n/a>>> def f():
1867n/a... while True:
1868n/a... try:
1869n/a... print((yield))
1870n/a... except ValueError as v:
1871n/a... print("caught ValueError (%s)" % (v))
1872n/a>>> import sys
1873n/a>>> g = f()
1874n/a>>> next(g)
1875n/a
1876n/a>>> g.throw(ValueError) # type only
1877n/acaught ValueError ()
1878n/a
1879n/a>>> g.throw(ValueError("xyz")) # value only
1880n/acaught ValueError (xyz)
1881n/a
1882n/a>>> g.throw(ValueError, ValueError(1)) # value+matching type
1883n/acaught ValueError (1)
1884n/a
1885n/a>>> g.throw(ValueError, TypeError(1)) # mismatched type, rewrapped
1886n/acaught ValueError (1)
1887n/a
1888n/a>>> g.throw(ValueError, ValueError(1), None) # explicit None traceback
1889n/acaught ValueError (1)
1890n/a
1891n/a>>> g.throw(ValueError(1), "foo") # bad args
1892n/aTraceback (most recent call last):
1893n/a ...
1894n/aTypeError: instance exception may not have a separate value
1895n/a
1896n/a>>> g.throw(ValueError, "foo", 23) # bad args
1897n/aTraceback (most recent call last):
1898n/a ...
1899n/aTypeError: throw() third argument must be a traceback object
1900n/a
1901n/a>>> g.throw("abc")
1902n/aTraceback (most recent call last):
1903n/a ...
1904n/aTypeError: exceptions must be classes or instances deriving from BaseException, not str
1905n/a
1906n/a>>> g.throw(0)
1907n/aTraceback (most recent call last):
1908n/a ...
1909n/aTypeError: exceptions must be classes or instances deriving from BaseException, not int
1910n/a
1911n/a>>> g.throw(list)
1912n/aTraceback (most recent call last):
1913n/a ...
1914n/aTypeError: exceptions must be classes or instances deriving from BaseException, not type
1915n/a
1916n/a>>> def throw(g,exc):
1917n/a... try:
1918n/a... raise exc
1919n/a... except:
1920n/a... g.throw(*sys.exc_info())
1921n/a>>> throw(g,ValueError) # do it with traceback included
1922n/acaught ValueError ()
1923n/a
1924n/a>>> g.send(1)
1925n/a1
1926n/a
1927n/a>>> throw(g,TypeError) # terminate the generator
1928n/aTraceback (most recent call last):
1929n/a ...
1930n/aTypeError
1931n/a
1932n/a>>> print(g.gi_frame)
1933n/aNone
1934n/a
1935n/a>>> g.send(2)
1936n/aTraceback (most recent call last):
1937n/a ...
1938n/aStopIteration
1939n/a
1940n/a>>> g.throw(ValueError,6) # throw on closed generator
1941n/aTraceback (most recent call last):
1942n/a ...
1943n/aValueError: 6
1944n/a
1945n/a>>> f().throw(ValueError,7) # throw on just-opened generator
1946n/aTraceback (most recent call last):
1947n/a ...
1948n/aValueError: 7
1949n/a
1950n/aPlain "raise" inside a generator should preserve the traceback (#13188).
1951n/aThe traceback should have 3 levels:
1952n/a- g.throw()
1953n/a- f()
1954n/a- 1/0
1955n/a
1956n/a>>> def f():
1957n/a... try:
1958n/a... yield
1959n/a... except:
1960n/a... raise
1961n/a>>> g = f()
1962n/a>>> try:
1963n/a... 1/0
1964n/a... except ZeroDivisionError as v:
1965n/a... try:
1966n/a... g.throw(v)
1967n/a... except Exception as w:
1968n/a... tb = w.__traceback__
1969n/a>>> levels = 0
1970n/a>>> while tb:
1971n/a... levels += 1
1972n/a... tb = tb.tb_next
1973n/a>>> levels
1974n/a3
1975n/a
1976n/aNow let's try closing a generator:
1977n/a
1978n/a>>> def f():
1979n/a... try: yield
1980n/a... except GeneratorExit:
1981n/a... print("exiting")
1982n/a
1983n/a>>> g = f()
1984n/a>>> next(g)
1985n/a>>> g.close()
1986n/aexiting
1987n/a>>> g.close() # should be no-op now
1988n/a
1989n/a>>> f().close() # close on just-opened generator should be fine
1990n/a
1991n/a>>> def f(): yield # an even simpler generator
1992n/a>>> f().close() # close before opening
1993n/a>>> g = f()
1994n/a>>> next(g)
1995n/a>>> g.close() # close normally
1996n/a
1997n/aAnd finalization:
1998n/a
1999n/a>>> def f():
2000n/a... try: yield
2001n/a... finally:
2002n/a... print("exiting")
2003n/a
2004n/a>>> g = f()
2005n/a>>> next(g)
2006n/a>>> del g
2007n/aexiting
2008n/a
2009n/a
2010n/aGeneratorExit is not caught by except Exception:
2011n/a
2012n/a>>> def f():
2013n/a... try: yield
2014n/a... except Exception:
2015n/a... print('except')
2016n/a... finally:
2017n/a... print('finally')
2018n/a
2019n/a>>> g = f()
2020n/a>>> next(g)
2021n/a>>> del g
2022n/afinally
2023n/a
2024n/a
2025n/aNow let's try some ill-behaved generators:
2026n/a
2027n/a>>> def f():
2028n/a... try: yield
2029n/a... except GeneratorExit:
2030n/a... yield "foo!"
2031n/a>>> g = f()
2032n/a>>> next(g)
2033n/a>>> g.close()
2034n/aTraceback (most recent call last):
2035n/a ...
2036n/aRuntimeError: generator ignored GeneratorExit
2037n/a>>> g.close()
2038n/a
2039n/a
2040n/aOur ill-behaved code should be invoked during GC:
2041n/a
2042n/a>>> import sys, io
2043n/a>>> old, sys.stderr = sys.stderr, io.StringIO()
2044n/a>>> g = f()
2045n/a>>> next(g)
2046n/a>>> del g
2047n/a>>> "RuntimeError: generator ignored GeneratorExit" in sys.stderr.getvalue()
2048n/aTrue
2049n/a>>> sys.stderr = old
2050n/a
2051n/a
2052n/aAnd errors thrown during closing should propagate:
2053n/a
2054n/a>>> def f():
2055n/a... try: yield
2056n/a... except GeneratorExit:
2057n/a... raise TypeError("fie!")
2058n/a>>> g = f()
2059n/a>>> next(g)
2060n/a>>> g.close()
2061n/aTraceback (most recent call last):
2062n/a ...
2063n/aTypeError: fie!
2064n/a
2065n/a
2066n/aEnsure that various yield expression constructs make their
2067n/aenclosing function a generator:
2068n/a
2069n/a>>> def f(): x += yield
2070n/a>>> type(f())
2071n/a<class 'generator'>
2072n/a
2073n/a>>> def f(): x = yield
2074n/a>>> type(f())
2075n/a<class 'generator'>
2076n/a
2077n/a>>> def f(): lambda x=(yield): 1
2078n/a>>> type(f())
2079n/a<class 'generator'>
2080n/a
2081n/a>>> def f(): x=(i for i in (yield) if (yield))
2082n/a>>> type(f())
2083n/a<class 'generator'>
2084n/a
2085n/a>>> def f(d): d[(yield "a")] = d[(yield "b")] = 27
2086n/a>>> data = [1,2]
2087n/a>>> g = f(data)
2088n/a>>> type(g)
2089n/a<class 'generator'>
2090n/a>>> g.send(None)
2091n/a'a'
2092n/a>>> data
2093n/a[1, 2]
2094n/a>>> g.send(0)
2095n/a'b'
2096n/a>>> data
2097n/a[27, 2]
2098n/a>>> try: g.send(1)
2099n/a... except StopIteration: pass
2100n/a>>> data
2101n/a[27, 27]
2102n/a
2103n/a"""
2104n/a
2105n/arefleaks_tests = """
2106n/aPrior to adding cycle-GC support to itertools.tee, this code would leak
2107n/areferences. We add it to the standard suite so the routine refleak-tests
2108n/awould trigger if it starts being uncleanable again.
2109n/a
2110n/a>>> import itertools
2111n/a>>> def leak():
2112n/a... class gen:
2113n/a... def __iter__(self):
2114n/a... return self
2115n/a... def __next__(self):
2116n/a... return self.item
2117n/a... g = gen()
2118n/a... head, tail = itertools.tee(g)
2119n/a... g.item = head
2120n/a... return head
2121n/a>>> it = leak()
2122n/a
2123n/aMake sure to also test the involvement of the tee-internal teedataobject,
2124n/awhich stores returned items.
2125n/a
2126n/a>>> item = next(it)
2127n/a
2128n/a
2129n/a
2130n/aThis test leaked at one point due to generator finalization/destruction.
2131n/aIt was copied from Lib/test/leakers/test_generator_cycle.py before the file
2132n/awas removed.
2133n/a
2134n/a>>> def leak():
2135n/a... def gen():
2136n/a... while True:
2137n/a... yield g
2138n/a... g = gen()
2139n/a
2140n/a>>> leak()
2141n/a
2142n/a
2143n/a
2144n/aThis test isn't really generator related, but rather exception-in-cleanup
2145n/arelated. The coroutine tests (above) just happen to cause an exception in
2146n/athe generator's __del__ (tp_del) method. We can also test for this
2147n/aexplicitly, without generators. We do have to redirect stderr to avoid
2148n/aprinting warnings and to doublecheck that we actually tested what we wanted
2149n/ato test.
2150n/a
2151n/a>>> import sys, io
2152n/a>>> old = sys.stderr
2153n/a>>> try:
2154n/a... sys.stderr = io.StringIO()
2155n/a... class Leaker:
2156n/a... def __del__(self):
2157n/a... def invoke(message):
2158n/a... raise RuntimeError(message)
2159n/a... invoke("test")
2160n/a...
2161n/a... l = Leaker()
2162n/a... del l
2163n/a... err = sys.stderr.getvalue().strip()
2164n/a... "Exception ignored in" in err
2165n/a... "RuntimeError: test" in err
2166n/a... "Traceback" in err
2167n/a... "in invoke" in err
2168n/a... finally:
2169n/a... sys.stderr = old
2170n/aTrue
2171n/aTrue
2172n/aTrue
2173n/aTrue
2174n/a
2175n/a
2176n/aThese refleak tests should perhaps be in a testfile of their own,
2177n/atest_generators just happened to be the test that drew these out.
2178n/a
2179n/a"""
2180n/a
2181n/a__test__ = {"tut": tutorial_tests,
2182n/a "pep": pep_tests,
2183n/a "email": email_tests,
2184n/a "fun": fun_tests,
2185n/a "syntax": syntax_tests,
2186n/a "conjoin": conjoin_tests,
2187n/a "weakref": weakref_tests,
2188n/a "coroutine": coroutine_tests,
2189n/a "refleaks": refleaks_tests,
2190n/a }
2191n/a
2192n/a# Magic test name that regrtest.py invokes *after* importing this module.
2193n/a# This worms around a bootstrap problem.
2194n/a# Note that doctest and regrtest both look in sys.argv for a "-v" argument,
2195n/a# so this works as expected in both ways of running regrtest.
2196n/adef test_main(verbose=None):
2197n/a from test import support, test_generators
2198n/a support.run_unittest(__name__)
2199n/a support.run_doctest(test_generators, verbose)
2200n/a
2201n/a# This part isn't needed for regrtest, but for running the test directly.
2202n/aif __name__ == "__main__":
2203n/a test_main(1)