Merge p3yk branch with the trunk up to revision 45595. This breaks a fair
number of tests, all because of the codecs/_multibytecodecs issue described
here (it's not a Py3K issue, just something Py3K discovers):
http://mail.python.org/pipermail/python-dev/2006-April/064051.html

Hye-Shik Chang promised to look for a fix, so no need to fix it here. The
tests that are expected to break are:

test_codecencodings_cn
test_codecencodings_hk
test_codecencodings_jp
test_codecencodings_kr
test_codecencodings_tw
test_codecs
test_multibytecodec

This merge fixes an actual test failure (test_weakref) in this branch,
though, so I believe merging is the right thing to do anyway.
diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py
index 4be1b4c..a60a768 100644
--- a/Lib/test/test_generators.py
+++ b/Lib/test/test_generators.py
@@ -421,7 +421,6 @@
 ...         self.name = name
 ...         self.parent = None
 ...         self.generator = self.generate()
-...         self.close = self.generator.close
 ...
 ...     def generate(self):
 ...         while not self.parent:
@@ -484,8 +483,6 @@
 merged A into G
 A->G B->G C->G D->G E->G F->G G->G H->G I->G J->G K->G L->G M->G
 
->>> for s in sets: s.close()    # break cycles
-
 """
 # Emacs turd '
 
@@ -593,7 +590,6 @@
 ...     def __init__(self, g):
 ...         self.sofar = []
 ...         self.fetch = g.next
-...         self.close = g.close
 ...
 ...     def __getitem__(self, i):
 ...         sofar, fetch = self.sofar, self.fetch
@@ -624,8 +620,6 @@
 [200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
 [400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
 
->>> m235.close()
-
 Ye olde Fibonacci generator, LazyList style.
 
 >>> def fibgen(a, b):
@@ -648,7 +642,6 @@
 >>> fib = LazyList(fibgen(1, 2))
 >>> firstn(iter(fib), 17)
 [1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
->>> fib.close()
 
 
 Running after your tail with itertools.tee (new in version 2.4)
@@ -685,7 +678,8 @@
 ...                        merge(times(3, m3),
 ...                              times(5, m5))):
 ...             yield n
-...     m2, m3, m5, mRes = tee(_m235(), 4)
+...     m1 = _m235()
+...     m2, m3, m5, mRes = tee(m1, 4)
 ...     return mRes
 
 >>> it = m235()
@@ -702,10 +696,9 @@
 iterators, whereupon it is deleted. You can therefore print the hamming
 sequence during hours without increasing memory usage, or very little.
 
-The beauty of it is that recursive running after their tail FP algorithms
+The beauty of it is that recursive running-after-their-tail FP algorithms
 are quite straightforwardly expressed with this Python idiom.
 
-
 Ye olde Fibonacci generator, tee style.
 
 >>> def fib():
@@ -721,7 +714,8 @@
 ...         for res in _isum(fibHead, fibTail):
 ...             yield res
 ...
-...     fibHead, fibTail, fibRes = tee(_fib(), 3)
+...     realfib = _fib()
+...     fibHead, fibTail, fibRes = tee(realfib, 3)
 ...     return fibRes
 
 >>> firstn(fib(), 17)
@@ -1545,6 +1539,9 @@
 >>> g.throw(ValueError, TypeError(1))  # mismatched type, rewrapped
 caught ValueError (1)
 
+>>> g.throw(ValueError, ValueError(1), None)   # explicit None traceback
+caught ValueError (1)
+
 >>> g.throw(ValueError(1), "foo")       # bad args
 Traceback (most recent call last):
   ...
@@ -1592,8 +1589,7 @@
 >>> f().throw("abc")     # throw on just-opened generator
 Traceback (most recent call last):
   ...
-TypeError: exceptions must be classes, or instances, not str
-
+abc
 
 Now let's try closing a generator:
 
@@ -1711,6 +1707,81 @@
 
 """
 
+refleaks_tests = """
+Prior to adding cycle-GC support to itertools.tee, this code would leak
+references. We add it to the standard suite so the routine refleak-tests
+would trigger if it starts being uncleanable again.
+
+>>> import itertools
+>>> def leak():
+...     class gen:
+...         def __iter__(self):
+...             return self
+...         def next(self):
+...             return self.item
+...     g = gen()
+...     head, tail = itertools.tee(g)
+...     g.item = head
+...     return head
+>>> it = leak()
+
+Make sure to also test the involvement of the tee-internal teedataobject,
+which stores returned items.
+
+>>> item = it.next()
+
+
+
+This test leaked at one point due to generator finalization/destruction.
+It was copied from Lib/test/leakers/test_generator_cycle.py before the file
+was removed.
+
+>>> def leak():
+...    def gen():
+...        while True:
+...            yield g
+...    g = gen()
+
+>>> leak()
+
+
+
+This test isn't really generator related, but rather exception-in-cleanup
+related. The coroutine tests (above) just happen to cause an exception in
+the generator's __del__ (tp_del) method. We can also test for this
+explicitly, without generators. We do have to redirect stderr to avoid
+printing warnings and to doublecheck that we actually tested what we wanted
+to test.
+
+>>> import sys, StringIO
+>>> old = sys.stderr
+>>> try:
+...     sys.stderr = StringIO.StringIO()
+...     class Leaker:
+...         def __del__(self):
+...             raise RuntimeError
+...
+...     l = Leaker()
+...     del l
+...     err = sys.stderr.getvalue().strip()
+...     err.startswith(
+...         "Exception exceptions.RuntimeError: RuntimeError() in <"
+...     )
+...     err.endswith("> ignored")
+...     len(err.splitlines())
+... finally:
+...     sys.stderr = old
+True
+True
+1
+
+
+
+These refleak tests should perhaps be in a testfile of their own,
+test_generators just happened to be the test that drew these out.
+
+"""
+
 __test__ = {"tut":      tutorial_tests,
             "pep":      pep_tests,
             "email":    email_tests,
@@ -1719,6 +1790,7 @@
             "conjoin":  conjoin_tests,
             "weakref":  weakref_tests,
             "coroutine":  coroutine_tests,
+            "refleaks": refleaks_tests,
             }
 
 # Magic test name that regrtest.py invokes *after* importing this module.